##// END OF EJS Templates
Rewrite controller, remove MPDecorator to units (keep for plots an writers) use of queues for interproc comm instead of zmq, self operations are no longer supported
Juan C. Espinoza -
r1287:af11e4aac00c
parent child
Show More

The requested changes are too big and content was truncated. Show full diff

@@ -1,241 +1,238
1 1 import click
2 import schainpy
3 2 import subprocess
4 3 import os
5 4 import sys
6 5 import glob
7 from multiprocessing import cpu_count
6 import schainpy
8 7 from schainpy.controller import Project
9 8 from schainpy.model import Operation, ProcessingUnit
10 9 from schainpy.utils import log
11 10 from importlib import import_module
12 11 from pydoc import locate
13 12 from fuzzywuzzy import process
14 13 from schainpy.cli import templates
15 14 import inspect
16 15 try:
17 16 from queue import Queue
18 17 except:
19 18 from Queue import Queue
20 19
21 20
22 21 def getProcs():
23 22 modules = dir(schainpy.model)
24 23 procs = check_module(modules, 'processing')
25 24 try:
26 25 procs.remove('ProcessingUnit')
27 26 except Exception as e:
28 27 pass
29 28 return procs
30 29
31 30 def getOperations():
32 31 module = dir(schainpy.model)
33 32 noProcs = [x for x in module if not x.endswith('Proc')]
34 33 operations = check_module(noProcs, 'operation')
35 34 try:
36 35 operations.remove('Operation')
37 36 operations.remove('Figure')
38 37 operations.remove('Plot')
39 38 except Exception as e:
40 39 pass
41 40 return operations
42 41
43 42 def getArgs(op):
44 43 module = locate('schainpy.model.{}'.format(op))
45 44 try:
46 obj = module(1,2,3,Queue(),5,6)
45 obj = module(1, 2, 3, Queue())
47 46 except:
48 47 obj = module()
49 48
50 49 if hasattr(obj, '__attrs__'):
51 50 args = obj.__attrs__
52 51 else:
53 52 if hasattr(obj, 'myrun'):
54 53 args = inspect.getfullargspec(obj.myrun).args
55 54 else:
56 55 args = inspect.getfullargspec(obj.run).args
57 56
58 57 try:
59 58 args.remove('self')
60 59 except Exception as e:
61 60 pass
62 61 try:
63 62 args.remove('dataOut')
64 63 except Exception as e:
65 64 pass
66 65 return args
67 66
68 67 def getDoc(obj):
69 68 module = locate('schainpy.model.{}'.format(obj))
70 69 try:
71 obj = module(1,2,3,Queue(),5,6)
70 obj = module(1, 2, 3, Queue())
72 71 except:
73 72 obj = module()
74 73 return obj.__doc__
75 74
76 75 def getAll():
77 76 modules = getOperations()
78 77 modules.extend(getProcs())
79 78 return modules
80 79
81 80
82 81 def print_version(ctx, param, value):
83 82 if not value or ctx.resilient_parsing:
84 83 return
85 84 click.echo(schainpy.__version__)
86 85 ctx.exit()
87 86
88 87
89 88 PREFIX = 'experiment'
90 89
91 90 @click.command()
92 91 @click.option('--version', '-v', is_flag=True, callback=print_version, help='SChain version', type=str)
93 92 @click.argument('command', default='run', required=True)
94 93 @click.argument('nextcommand', default=None, required=False, type=str)
95 94 def main(command, nextcommand, version):
96 95 """COMMAND LINE INTERFACE FOR SIGNAL CHAIN - JICAMARCA RADIO OBSERVATORY V3.0\n
97 Available commands.\n
96 Available commands:\n
98 97 xml: runs a schain XML generated file\n
99 run: runs any python script starting 'experiment_'\n
98 run: runs any python script'\n
100 99 generate: generates a template schain script\n
101 100 list: return a list of available procs and operations\n
102 101 search: return avilable operations, procs or arguments of the given
103 102 operation/proc\n"""
104 103 if command == 'xml':
105 104 runFromXML(nextcommand)
106 105 elif command == 'generate':
107 106 generate()
108 107 elif command == 'test':
109 108 test()
110 109 elif command == 'run':
111 110 runschain(nextcommand)
112 111 elif command == 'search':
113 112 search(nextcommand)
114 113 elif command == 'list':
115 114 cmdlist(nextcommand)
116 115 else:
117 116 log.error('Command {} is not defined'.format(command))
118 117
119 118
120 119 def check_module(possible, instance):
121 120 def check(x):
122 121 try:
123 122 instancia = locate('schainpy.model.{}'.format(x))
124 123 ret = instancia.proc_type == instance
125 124 return ret
126 125 except Exception as e:
127 126 return False
128 127 clean = clean_modules(possible)
129 128 return [x for x in clean if check(x)]
130 129
131 130
132 131 def clean_modules(module):
133 132 noEndsUnder = [x for x in module if not x.endswith('__')]
134 133 noStartUnder = [x for x in noEndsUnder if not x.startswith('__')]
135 134 noFullUpper = [x for x in noStartUnder if not x.isupper()]
136 135 return noFullUpper
137 136
138 137 def cmdlist(nextcommand):
139 138 if nextcommand is None:
140 139 log.error('Missing argument, available arguments: procs, operations', '')
141 140 elif nextcommand == 'procs':
142 141 procs = getProcs()
143 142 log.success(
144 143 'Current ProcessingUnits are:\n {}'.format('\n '.join(procs)), '')
145 144 elif nextcommand == 'operations':
146 145 operations = getOperations()
147 146 log.success('Current Operations are:\n {}'.format(
148 147 '\n '.join(operations)), '')
149 148 else:
150 149 log.error('Wrong argument', '')
151 150
152 151 def search(nextcommand):
153 152 if nextcommand is None:
154 153 log.error('There is no Operation/ProcessingUnit to search', '')
155 154 else:
156 155 try:
157 156 args = getArgs(nextcommand)
158 157 doc = getDoc(nextcommand)
159 if len(args) == 0:
160 log.success('\n{} has no arguments'.format(nextcommand), '')
161 else:
162 158 log.success('{}\n{}\n\narguments:\n {}'.format(
163 nextcommand, doc, ', '.join(args)), '')
159 nextcommand, doc, ', '.join(args)), ''
160 )
164 161 except Exception as e:
165 162 log.error('Module `{}` does not exists'.format(nextcommand), '')
166 163 allModules = getAll()
167 164 similar = [t[0] for t in process.extract(nextcommand, allModules, limit=12) if t[1]>80]
168 165 log.success('Possible modules are: {}'.format(', '.join(similar)), '')
169 166
170 167 def runschain(nextcommand):
171 168 if nextcommand is None:
172 169 currentfiles = glob.glob('./{}_*.py'.format(PREFIX))
173 170 numberfiles = len(currentfiles)
174 171 if numberfiles > 1:
175 172 log.error('There is more than one file to run')
176 173 elif numberfiles == 1:
177 174 subprocess.call(['python ' + currentfiles[0]], shell=True)
178 175 else:
179 176 log.error('There is no file to run')
180 177 else:
181 178 try:
182 179 subprocess.call(['python ' + nextcommand], shell=True)
183 180 except Exception as e:
184 181 log.error("I cannot run the file. Does it exists?")
185 182
186 183
187 184 def basicInputs():
188 185 inputs = {}
189 186 inputs['name'] = click.prompt(
190 187 'Name of the project', default="project", type=str)
191 188 inputs['desc'] = click.prompt(
192 189 'Enter a description', default="A schain project", type=str)
193 190 inputs['multiprocess'] = click.prompt(
194 191 '''Select data type:
195 192
196 193 - Voltage (*.r): [1]
197 194 - Spectra (*.pdata): [2]
198 195 - Voltage and Spectra (*.r): [3]
199 196
200 197 -->''', type=int)
201 198 inputs['path'] = click.prompt('Data path', default=os.getcwd(
202 199 ), type=click.Path(exists=True, resolve_path=True))
203 200 inputs['startDate'] = click.prompt(
204 201 'Start date', default='1970/01/01', type=str)
205 202 inputs['endDate'] = click.prompt(
206 203 'End date', default='2018/12/31', type=str)
207 204 inputs['startHour'] = click.prompt(
208 205 'Start hour', default='00:00:00', type=str)
209 206 inputs['endHour'] = click.prompt('End hour', default='23:59:59', type=str)
210 207 inputs['figpath'] = inputs['path'] + '/figs'
211 208 return inputs
212 209
213 210
214 211 def generate():
215 212 inputs = basicInputs()
216 213
217 214 if inputs['multiprocess'] == 1:
218 215 current = templates.voltage.format(**inputs)
219 216 elif inputs['multiprocess'] == 2:
220 217 current = templates.spectra.format(**inputs)
221 218 elif inputs['multiprocess'] == 3:
222 219 current = templates.voltagespectra.format(**inputs)
223 220 scriptname = '{}_{}.py'.format(PREFIX, inputs['name'])
224 221 script = open(scriptname, 'w')
225 222 try:
226 223 script.write(current)
227 224 log.success('Script {} generated'.format(scriptname))
228 225 except Exception as e:
229 226 log.error('I cannot create the file. Do you have writing permissions?')
230 227
231 228
232 229 def test():
233 230 log.warning('testing')
234 231
235 232
236 233 def runFromXML(filename):
237 234 controller = Project()
238 235 if not controller.readXml(filename):
239 236 return
240 237 controller.start()
241 238 return
This diff has been collapsed as it changes many lines, (1158 lines changed) Show them Hide them
@@ -1,1290 +1,648
1 1 '''
2 Updated on January , 2018, for multiprocessing purposes
3 Author: Sergio Cortez
4 Created on September , 2012
2 Main routines to create a Signal Chain project
5 3 '''
6 from platform import python_version
4
5 import re
7 6 import sys
8 7 import ast
9 8 import datetime
10 9 import traceback
11 import math
12 10 import time
13 import zmq
14 from multiprocessing import Process, Queue, Event, Value, cpu_count
11 from multiprocessing import Process, Queue
15 12 from threading import Thread
16 from xml.etree.ElementTree import ElementTree, Element, SubElement, tostring
17 from xml.dom import minidom
18
13 from xml.etree.ElementTree import ElementTree, Element, SubElement
19 14
20 15 from schainpy.admin import Alarm, SchainWarning
21 16 from schainpy.model import *
22 17 from schainpy.utils import log
23 18
24 19
25 DTYPES = {
26 'Voltage': '.r',
27 'Spectra': '.pdata'
28 }
29
30
31 def MPProject(project, n=cpu_count()):
32 '''
33 Project wrapper to run schain in n processes
34 '''
35
36 rconf = project.getReadUnitObj()
37 op = rconf.getOperationObj('run')
38 dt1 = op.getParameterValue('startDate')
39 dt2 = op.getParameterValue('endDate')
40 tm1 = op.getParameterValue('startTime')
41 tm2 = op.getParameterValue('endTime')
42 days = (dt2 - dt1).days
43
44 for day in range(days + 1):
45 skip = 0
46 cursor = 0
47 processes = []
48 dt = dt1 + datetime.timedelta(day)
49 dt_str = dt.strftime('%Y/%m/%d')
50 reader = JRODataReader()
51 paths, files = reader.searchFilesOffLine(path=rconf.path,
52 startDate=dt,
53 endDate=dt,
54 startTime=tm1,
55 endTime=tm2,
56 ext=DTYPES[rconf.datatype])
57 nFiles = len(files)
58 if nFiles == 0:
59 continue
60 skip = int(math.ceil(nFiles / n))
61 while nFiles > cursor * skip:
62 rconf.update(startDate=dt_str, endDate=dt_str, cursor=cursor,
63 skip=skip)
64 p = project.clone()
65 p.start()
66 processes.append(p)
67 cursor += 1
68
69 def beforeExit(exctype, value, trace):
70 for process in processes:
71 process.terminate()
72 process.join()
73 print(traceback.print_tb(trace))
74
75 sys.excepthook = beforeExit
76
77 for process in processes:
78 process.join()
79 process.terminate()
80
81 time.sleep(3)
82
83 def wait(context):
84
85 time.sleep(1)
86 c = zmq.Context()
87 receiver = c.socket(zmq.SUB)
88 receiver.connect('ipc:///tmp/schain_{}_pub'.format(self.id))
89 receiver.setsockopt(zmq.SUBSCRIBE, self.id.encode())
90 msg = receiver.recv_multipart()[1]
91 context.terminate()
92
93 class ParameterConf():
94
95 id = None
96 name = None
97 value = None
98 format = None
99
100 __formated_value = None
101
102 ELEMENTNAME = 'Parameter'
103
104 def __init__(self):
105
106 self.format = 'str'
107
108 def getElementName(self):
109
110 return self.ELEMENTNAME
111
112 def getValue(self):
113
114 value = self.value
115 format = self.format
116
117 if self.__formated_value != None:
118
119 return self.__formated_value
120
121 if format == 'obj':
122 return value
123
124 if format == 'str':
125 self.__formated_value = str(value)
126 return self.__formated_value
127
128 if value == '':
129 raise ValueError('%s: This parameter value is empty' % self.name)
130
131 if format == 'list':
132 strList = [s.strip() for s in value.split(',')]
133 self.__formated_value = strList
134
135 return self.__formated_value
136
137 if format == 'intlist':
138 '''
139 Example:
140 value = (0,1,2)
141 '''
142
143 new_value = ast.literal_eval(value)
144
145 if type(new_value) not in (tuple, list):
146 new_value = [int(new_value)]
147
148 self.__formated_value = new_value
149
150 return self.__formated_value
151
152 if format == 'floatlist':
153 '''
154 Example:
155 value = (0.5, 1.4, 2.7)
156 '''
157
158 new_value = ast.literal_eval(value)
159
160 if type(new_value) not in (tuple, list):
161 new_value = [float(new_value)]
162
163 self.__formated_value = new_value
164
165 return self.__formated_value
166
167 if format == 'date':
168 strList = value.split('/')
169 intList = [int(x) for x in strList]
170 date = datetime.date(intList[0], intList[1], intList[2])
171
172 self.__formated_value = date
173
174 return self.__formated_value
175
176 if format == 'time':
177 strList = value.split(':')
178 intList = [int(x) for x in strList]
179 time = datetime.time(intList[0], intList[1], intList[2])
180
181 self.__formated_value = time
182
183 return self.__formated_value
184
185 if format == 'pairslist':
186 '''
187 Example:
188 value = (0,1),(1,2)
189 '''
190
191 new_value = ast.literal_eval(value)
192
193 if type(new_value) not in (tuple, list):
194 raise ValueError('%s has to be a tuple or list of pairs' % value)
195
196 if type(new_value[0]) not in (tuple, list):
197 if len(new_value) != 2:
198 raise ValueError('%s has to be a tuple or list of pairs' % value)
199 new_value = [new_value]
200
201 for thisPair in new_value:
202 if len(thisPair) != 2:
203 raise ValueError('%s has to be a tuple or list of pairs' % value)
204
205 self.__formated_value = new_value
206
207 return self.__formated_value
208
209 if format == 'multilist':
210 '''
211 Example:
212 value = (0,1,2),(3,4,5)
213 '''
214 multiList = ast.literal_eval(value)
215
216 if type(multiList[0]) == int:
217 multiList = ast.literal_eval('(' + value + ')')
218
219 self.__formated_value = multiList
220
221 return self.__formated_value
222
223 if format == 'bool':
224 value = int(value)
225
226 if format == 'int':
227 value = float(value)
228
229 format_func = eval(format)
230
231 self.__formated_value = format_func(value)
232
233 return self.__formated_value
234
235 def updateId(self, new_id):
236
237 self.id = str(new_id)
238
239 def setup(self, id, name, value, format='str'):
240 self.id = str(id)
241 self.name = name
242 if format == 'obj':
243 self.value = value
244 else:
245 self.value = str(value)
246 self.format = str.lower(format)
247
248 self.getValue()
249
250 return 1
251
252 def update(self, name, value, format='str'):
253
254 self.name = name
255 self.value = str(value)
256 self.format = format
257
258 def makeXml(self, opElement):
259 if self.name not in ('queue',):
260 parmElement = SubElement(opElement, self.ELEMENTNAME)
261 parmElement.set('id', str(self.id))
262 parmElement.set('name', self.name)
263 parmElement.set('value', self.value)
264 parmElement.set('format', self.format)
265
266 def readXml(self, parmElement):
267
268 self.id = parmElement.get('id')
269 self.name = parmElement.get('name')
270 self.value = parmElement.get('value')
271 self.format = str.lower(parmElement.get('format'))
272
273 # Compatible with old signal chain version
274 if self.format == 'int' and self.name == 'idfigure':
275 self.name = 'id'
276
277 def printattr(self):
278
279 print('Parameter[%s]: name = %s, value = %s, format = %s, project_id = %s' % (self.id, self.name, self.value, self.format, self.project_id))
280
281 class OperationConf():
282
283 ELEMENTNAME = 'Operation'
20 class ConfBase():
284 21
285 22 def __init__(self):
286 23
287 24 self.id = '0'
288 25 self.name = None
289 26 self.priority = None
290 self.topic = None
291
292 def __getNewId(self):
293
294 return int(self.id) * 10 + len(self.parmConfObjList) + 1
27 self.parameters = {}
28 self.object = None
29 self.operations = []
295 30
296 31 def getId(self):
32
297 33 return self.id
298 34
35 def getNewId(self):
36
37 return int(self.id) * 10 + len(self.operations) + 1
38
299 39 def updateId(self, new_id):
300 40
301 41 self.id = str(new_id)
302 42
303 43 n = 1
304 for parmObj in self.parmConfObjList:
305
306 idParm = str(int(new_id) * 10 + n)
307 parmObj.updateId(idParm)
308
44 for conf in self.operations:
45 conf_id = str(int(new_id) * 10 + n)
46 conf.updateId(conf_id)
309 47 n += 1
310 48
311 def getElementName(self):
312
313 return self.ELEMENTNAME
314
315 def getParameterObjList(self):
49 def getKwargs(self):
316 50
317 return self.parmConfObjList
51 params = {}
318 52
319 def getParameterObj(self, parameterName):
53 for key, value in self.parameters.items():
54 if value not in (None, '', ' '):
55 params[key] = value
320 56
321 for parmConfObj in self.parmConfObjList:
57 return params
322 58
323 if parmConfObj.name != parameterName:
324 continue
59 def update(self, **kwargs):
325 60
326 return parmConfObj
61 for key, value in kwargs.items():
62 self.addParameter(name=key, value=value)
327 63
328 return None
64 def addParameter(self, name, value, format=None):
65 '''
66 '''
329 67
330 def getParameterObjfromValue(self, parameterValue):
68 if isinstance(value, str) and re.search(r'(\d+/\d+/\d+)', value):
69 self.parameters[name] = datetime.date(*[int(x) for x in value.split('/')])
70 elif isinstance(value, str) and re.search(r'(\d+:\d+:\d+)', value):
71 self.parameters[name] = datetime.time(*[int(x) for x in value.split(':')])
72 else:
73 try:
74 self.parameters[name] = ast.literal_eval(value)
75 except:
76 if isinstance(value, str) and ',' in value:
77 self.parameters[name] = value.split(',')
78 else:
79 self.parameters[name] = value
331 80
332 for parmConfObj in self.parmConfObjList:
81 def getParameters(self):
333 82
334 if parmConfObj.getValue() != parameterValue:
335 continue
83 params = {}
84 for key, value in self.parameters.items():
85 s = type(value).__name__
86 if s == 'date':
87 params[key] = value.strftime('%Y/%m/%d')
88 elif s == 'time':
89 params[key] = value.strftime('%H:%M:%S')
90 else:
91 params[key] = str(value)
336 92
337 return parmConfObj.getValue()
93 return params
338 94
339 return None
95 def makeXml(self, element):
340 96
341 def getParameterValue(self, parameterName):
97 xml = SubElement(element, self.ELEMENTNAME)
98 for label in self.xml_labels:
99 xml.set(label, str(getattr(self, label)))
342 100
343 parameterObj = self.getParameterObj(parameterName)
101 for key, value in self.getParameters().items():
102 xml_param = SubElement(xml, 'Parameter')
103 xml_param.set('name', key)
104 xml_param.set('value', value)
344 105
345 # if not parameterObj:
346 # return None
106 for conf in self.operations:
107 conf.makeXml(xml)
347 108
348 value = parameterObj.getValue()
109 def __str__(self):
349 110
350 return value
111 if self.ELEMENTNAME == 'Operation':
112 s = ' {}[id={}]\n'.format(self.name, self.id)
113 else:
114 s = '{}[id={}, inputId={}]\n'.format(self.name, self.id, self.inputId)
351 115
352 def getKwargs(self):
116 for key, value in self.parameters.items():
117 if self.ELEMENTNAME == 'Operation':
118 s += ' {}: {}\n'.format(key, value)
119 else:
120 s += ' {}: {}\n'.format(key, value)
353 121
354 kwargs = {}
122 for conf in self.operations:
123 s += str(conf)
355 124
356 for parmConfObj in self.parmConfObjList:
357 if self.name == 'run' and parmConfObj.name == 'datatype':
358 continue
125 return s
359 126
360 kwargs[parmConfObj.name] = parmConfObj.getValue()
127 class OperationConf(ConfBase):
361 128
362 return kwargs
129 ELEMENTNAME = 'Operation'
130 xml_labels = ['id', 'name']
363 131
364 def setup(self, id, name, priority, type, project_id, err_queue, lock):
132 def setup(self, id, name, priority, project_id, err_queue):
365 133
366 134 self.id = str(id)
367 135 self.project_id = project_id
368 136 self.name = name
369 self.type = type
370 self.priority = priority
137 self.type = 'other'
371 138 self.err_queue = err_queue
372 self.lock = lock
373 self.parmConfObjList = []
374
375 def removeParameters(self):
376
377 for obj in self.parmConfObjList:
378 del obj
379
380 self.parmConfObjList = []
381 139
382 def addParameter(self, name, value, format='str'):
140 def readXml(self, element, project_id, err_queue):
383 141
384 if value is None:
385 return None
386 id = self.__getNewId()
387
388 parmConfObj = ParameterConf()
389 if not parmConfObj.setup(id, name, value, format):
390 return None
391
392 self.parmConfObjList.append(parmConfObj)
393
394 return parmConfObj
395
396 def changeParameter(self, name, value, format='str'):
397
398 parmConfObj = self.getParameterObj(name)
399 parmConfObj.update(name, value, format)
400
401 return parmConfObj
402
403 def makeXml(self, procUnitElement):
404
405 opElement = SubElement(procUnitElement, self.ELEMENTNAME)
406 opElement.set('id', str(self.id))
407 opElement.set('name', self.name)
408 opElement.set('type', self.type)
409 opElement.set('priority', str(self.priority))
410
411 for parmConfObj in self.parmConfObjList:
412 parmConfObj.makeXml(opElement)
413
414 def readXml(self, opElement, project_id):
415
416 self.id = opElement.get('id')
417 self.name = opElement.get('name')
418 self.type = opElement.get('type')
419 self.priority = opElement.get('priority')
142 self.id = element.get('id')
143 self.name = element.get('name')
144 self.type = 'other'
420 145 self.project_id = str(project_id)
146 self.err_queue = err_queue
421 147
422 # Compatible with old signal chain version
423 # Use of 'run' method instead 'init'
424 if self.type == 'self' and self.name == 'init':
425 self.name = 'run'
426
427 self.parmConfObjList = []
428
429 parmElementList = opElement.iter(ParameterConf().getElementName())
430
431 for parmElement in parmElementList:
432 parmConfObj = ParameterConf()
433 parmConfObj.readXml(parmElement)
434
435 # Compatible with old signal chain version
436 # If an 'plot' OPERATION is found, changes name operation by the value of its type PARAMETER
437 if self.type != 'self' and self.name == 'Plot':
438 if parmConfObj.format == 'str' and parmConfObj.name == 'type':
439 self.name = parmConfObj.value
440 continue
441
442 self.parmConfObjList.append(parmConfObj)
443
444 def printattr(self):
445
446 print('%s[%s]: name = %s, type = %s, priority = %s, project_id = %s' % (self.ELEMENTNAME,
447 self.id,
448 self.name,
449 self.type,
450 self.priority,
451 self.project_id))
452
453 for parmConfObj in self.parmConfObjList:
454 parmConfObj.printattr()
148 for elm in element.iter('Parameter'):
149 self.addParameter(elm.get('name'), elm.get('value'))
455 150
456 151 def createObject(self):
457 152
458 153 className = eval(self.name)
459 154
460 if self.type == 'other':
461 opObj = className()
462 elif self.type == 'external':
155 if 'Plot' in self.name or 'Writer' in self.name:
463 156 kwargs = self.getKwargs()
464 opObj = className(self.id, self.id, self.project_id, self.err_queue, self.lock, 'Operation', **kwargs)
157 opObj = className(self.id, self.id, self.project_id, self.err_queue, **kwargs)
465 158 opObj.start()
466 self.opObj = opObj
159 self.type = 'external'
160 else:
161 opObj = className()
467 162
163 self.object = opObj
468 164 return opObj
469 165
470 class ProcUnitConf():
166 class ProcUnitConf(ConfBase):
471 167
472 168 ELEMENTNAME = 'ProcUnit'
169 xml_labels = ['id', 'inputId', 'name']
473 170
474 def __init__(self):
475
476 self.id = None
477 self.datatype = None
478 self.name = None
479 self.inputId = None
480 self.opConfObjList = []
481 self.procUnitObj = None
482 self.opObjDict = {}
483
484 def __getPriority(self):
485
486 return len(self.opConfObjList) + 1
487
488 def __getNewId(self):
489
490 return int(self.id) * 10 + len(self.opConfObjList) + 1
491
492 def getElementName(self):
493
494 return self.ELEMENTNAME
495
496 def getId(self):
497
498 return self.id
499
500 def updateId(self, new_id):
501 '''
502 new_id = int(parentId) * 10 + (int(self.id) % 10)
503 new_inputId = int(parentId) * 10 + (int(self.inputId) % 10)
504
505 # If this proc unit has not inputs
506 #if self.inputId == '0':
507 #new_inputId = 0
508
509 n = 1
510 for opConfObj in self.opConfObjList:
511
512 idOp = str(int(new_id) * 10 + n)
513 opConfObj.updateId(idOp)
514
515 n += 1
516
517 self.parentId = str(parentId)
518 self.id = str(new_id)
519 #self.inputId = str(new_inputId)
520 '''
521 n = 1
522
523 def getInputId(self):
524
525 return self.inputId
526
527 def getOperationObjList(self):
528
529 return self.opConfObjList
530
531 def getOperationObj(self, name=None):
532
533 for opConfObj in self.opConfObjList:
534
535 if opConfObj.name != name:
536 continue
537
538 return opConfObj
539
540 return None
541
542 def getOpObjfromParamValue(self, value=None):
543
544 for opConfObj in self.opConfObjList:
545 if opConfObj.getParameterObjfromValue(parameterValue=value) != value:
546 continue
547 return opConfObj
548 return None
549
550 def getProcUnitObj(self):
551
552 return self.procUnitObj
553
554 def setup(self, project_id, id, name, datatype, inputId, err_queue, lock):
171 def setup(self, project_id, id, name, datatype, inputId, err_queue):
555 172 '''
556 id sera el topico a publicar
557 inputId sera el topico a subscribirse
558 173 '''
559 174
560 # Compatible with old signal chain version
561 175 if datatype == None and name == None:
562 176 raise ValueError('datatype or name should be defined')
563 177
564 #Definir una condicion para inputId cuando sea 0
565
566 178 if name == None:
567 179 if 'Proc' in datatype:
568 180 name = datatype
569 181 else:
570 182 name = '%sProc' % (datatype)
571 183
572 184 if datatype == None:
573 185 datatype = name.replace('Proc', '')
574 186
575 187 self.id = str(id)
576 188 self.project_id = project_id
577 189 self.name = name
578 190 self.datatype = datatype
579 191 self.inputId = inputId
580 192 self.err_queue = err_queue
581 self.lock = lock
582 self.opConfObjList = []
583
584 self.addOperation(name='run', optype='self')
585
586 def removeOperations(self):
587
588 for obj in self.opConfObjList:
589 del obj
193 self.operations = []
194 self.parameters = {}
590 195
591 self.opConfObjList = []
592 self.addOperation(name='run')
196 def removeOperation(self, id):
593 197
594 def addParameter(self, **kwargs):
595 '''
596 Add parameters to 'run' operation
597 '''
598 opObj = self.opConfObjList[0]
198 i = [1 if x.id==id else 0 for x in self.operations]
199 self.operations.pop(i.index(1))
599 200
600 opObj.addParameter(**kwargs)
201 def getOperation(self, id):
601 202
602 return opObj
203 for conf in self.operations:
204 if conf.id == id:
205 return conf
603 206
604 207 def addOperation(self, name, optype='self'):
605 208 '''
606 Actualizacion - > proceso comunicacion
607 En el caso de optype='self', elminar. DEfinir comuncacion IPC -> Topic
608 definir el tipoc de socket o comunicacion ipc++
609
610 209 '''
611 210
612 id = self.__getNewId()
613 priority = self.__getPriority() # Sin mucho sentido, pero puede usarse
614 opConfObj = OperationConf()
615 opConfObj.setup(id, name=name, priority=priority, type=optype, project_id=self.project_id, err_queue=self.err_queue, lock=self.lock)
616 self.opConfObjList.append(opConfObj)
617
618 return opConfObj
211 id = self.getNewId()
212 conf = OperationConf()
213 conf.setup(id, name=name, priority='0', project_id=self.project_id, err_queue=self.err_queue)
214 self.operations.append(conf)
619 215
620 def makeXml(self, projectElement):
216 return conf
621 217
622 procUnitElement = SubElement(projectElement, self.ELEMENTNAME)
623 procUnitElement.set('id', str(self.id))
624 procUnitElement.set('name', self.name)
625 procUnitElement.set('datatype', self.datatype)
626 procUnitElement.set('inputId', str(self.inputId))
218 def readXml(self, element, project_id, err_queue):
627 219
628 for opConfObj in self.opConfObjList:
629 opConfObj.makeXml(procUnitElement)
630
631 def readXml(self, upElement, project_id):
632
633 self.id = upElement.get('id')
634 self.name = upElement.get('name')
635 self.datatype = upElement.get('datatype')
636 self.inputId = upElement.get('inputId')
220 self.id = element.get('id')
221 self.name = element.get('name')
222 self.inputId = None if element.get('inputId') == 'None' else element.get('inputId')
223 self.datatype = element.get('datatype', self.name.replace(self.ELEMENTNAME.replace('Unit', ''), ''))
637 224 self.project_id = str(project_id)
225 self.err_queue = err_queue
226 self.operations = []
227 self.parameters = {}
638 228
639 if self.ELEMENTNAME == 'ReadUnit':
640 self.datatype = self.datatype.replace('Reader', '')
641
642 if self.ELEMENTNAME == 'ProcUnit':
643 self.datatype = self.datatype.replace('Proc', '')
644
645 if self.inputId == 'None':
646 self.inputId = '0'
647
648 self.opConfObjList = []
649
650 opElementList = upElement.iter(OperationConf().getElementName())
651
652 for opElement in opElementList:
653 opConfObj = OperationConf()
654 opConfObj.readXml(opElement, project_id)
655 self.opConfObjList.append(opConfObj)
656
657 def printattr(self):
658
659 print('%s[%s]: name = %s, datatype = %s, inputId = %s, project_id = %s' % (self.ELEMENTNAME,
660 self.id,
661 self.name,
662 self.datatype,
663 self.inputId,
664 self.project_id))
665
666 for opConfObj in self.opConfObjList:
667 opConfObj.printattr()
668
669 def getKwargs(self):
670
671 opObj = self.opConfObjList[0]
672 kwargs = opObj.getKwargs()
673
674 return kwargs
229 for elm in element:
230 if elm.tag == 'Parameter':
231 self.addParameter(elm.get('name'), elm.get('value'))
232 elif elm.tag == 'Operation':
233 conf = OperationConf()
234 conf.readXml(elm, project_id, err_queue)
235 self.operations.append(conf)
675 236
676 237 def createObjects(self):
677 238 '''
678 239 Instancia de unidades de procesamiento.
679 240 '''
680 241
681 242 className = eval(self.name)
682 243 kwargs = self.getKwargs()
683 procUnitObj = className(self.id, self.inputId, self.project_id, self.err_queue, self.lock, 'ProcUnit', **kwargs)
244 procUnitObj = className()
245 procUnitObj.name = self.name
684 246 log.success('creating process...', self.name)
685 247
686 for opConfObj in self.opConfObjList:
248 for conf in self.operations:
687 249
688 if opConfObj.type == 'self' and opConfObj.name == 'run':
689 continue
690 elif opConfObj.type == 'self':
691 opObj = getattr(procUnitObj, opConfObj.name)
692 else:
693 opObj = opConfObj.createObject()
250 opObj = conf.createObject()
694 251
695 252 log.success('adding operation: {}, type:{}'.format(
696 opConfObj.name,
697 opConfObj.type), self.name)
698
699 procUnitObj.addOperation(opConfObj, opObj)
700
701 procUnitObj.start()
702 self.procUnitObj = procUnitObj
253 conf.name,
254 conf.type), self.name)
703 255
704 def close(self):
256 procUnitObj.addOperation(conf, opObj)
705 257
706 for opConfObj in self.opConfObjList:
707 if opConfObj.type == 'self':
708 continue
709
710 opObj = self.procUnitObj.getOperationObj(opConfObj.id)
711 opObj.close()
258 self.object = procUnitObj
712 259
713 self.procUnitObj.close()
260 def run(self):
261 '''
262 '''
714 263
715 return
264 return self.object.call(**self.getKwargs())
716 265
717 266
718 267 class ReadUnitConf(ProcUnitConf):
719 268
720 269 ELEMENTNAME = 'ReadUnit'
721 270
722 271 def __init__(self):
723 272
724 273 self.id = None
725 274 self.datatype = None
726 275 self.name = None
727 276 self.inputId = None
728 self.opConfObjList = []
729 self.lock = Event()
730 self.lock.set()
731 self.lock.n = Value('d', 0)
732
733 def getElementName(self):
734
735 return self.ELEMENTNAME
277 self.operations = []
278 self.parameters = {}
736 279
737 280 def setup(self, project_id, id, name, datatype, err_queue, path='', startDate='', endDate='',
738 281 startTime='', endTime='', server=None, **kwargs):
739 282
740
741 '''
742 *****el id del proceso sera el Topico
743
744 Adicion de {topic}, si no esta presente -> error
745 kwargs deben ser trasmitidos en la instanciacion
746
747 '''
748
749 # Compatible with old signal chain version
750 283 if datatype == None and name == None:
751 284 raise ValueError('datatype or name should be defined')
752 285 if name == None:
753 286 if 'Reader' in datatype:
754 287 name = datatype
755 288 datatype = name.replace('Reader','')
756 289 else:
757 290 name = '{}Reader'.format(datatype)
758 291 if datatype == None:
759 292 if 'Reader' in name:
760 293 datatype = name.replace('Reader','')
761 294 else:
762 295 datatype = name
763 296 name = '{}Reader'.format(name)
764 297
765 298 self.id = id
766 299 self.project_id = project_id
767 300 self.name = name
768 301 self.datatype = datatype
769 if path != '':
770 self.path = os.path.abspath(path)
771 self.startDate = startDate
772 self.endDate = endDate
773 self.startTime = startTime
774 self.endTime = endTime
775 self.server = server
776 302 self.err_queue = err_queue
777 self.addRunOperation(**kwargs)
778
779 def update(self, **kwargs):
780
781 if 'datatype' in kwargs:
782 datatype = kwargs.pop('datatype')
783 if 'Reader' in datatype:
784 self.name = datatype
785 else:
786 self.name = '%sReader' % (datatype)
787 self.datatype = self.name.replace('Reader', '')
788
789 attrs = ('path', 'startDate', 'endDate',
790 'startTime', 'endTime')
791
792 for attr in attrs:
793 if attr in kwargs:
794 setattr(self, attr, kwargs.pop(attr))
795
796 self.updateRunOperation(**kwargs)
797 303
798 def removeOperations(self):
304 self.addParameter(name='path', value=path)
305 self.addParameter(name='startDate', value=startDate)
306 self.addParameter(name='endDate', value=endDate)
307 self.addParameter(name='startTime', value=startTime)
308 self.addParameter(name='endTime', value=endTime)
799 309
800 for obj in self.opConfObjList:
801 del obj
802
803 self.opConfObjList = []
804
805 def addRunOperation(self, **kwargs):
806
807 opObj = self.addOperation(name='run', optype='self')
808
809 if self.server is None:
810 opObj.addParameter(
811 name='datatype', value=self.datatype, format='str')
812 opObj.addParameter(name='path', value=self.path, format='str')
813 opObj.addParameter(
814 name='startDate', value=self.startDate, format='date')
815 opObj.addParameter(
816 name='endDate', value=self.endDate, format='date')
817 opObj.addParameter(
818 name='startTime', value=self.startTime, format='time')
819 opObj.addParameter(
820 name='endTime', value=self.endTime, format='time')
821
822 for key, value in list(kwargs.items()):
823 opObj.addParameter(name=key, value=value,
824 format=type(value).__name__)
825 else:
826 opObj.addParameter(name='server', value=self.server, format='str')
827
828 return opObj
829
830 def updateRunOperation(self, **kwargs):
831
832 opObj = self.getOperationObj(name='run')
833 opObj.removeParameters()
834
835 opObj.addParameter(name='datatype', value=self.datatype, format='str')
836 opObj.addParameter(name='path', value=self.path, format='str')
837 opObj.addParameter(
838 name='startDate', value=self.startDate, format='date')
839 opObj.addParameter(name='endDate', value=self.endDate, format='date')
840 opObj.addParameter(
841 name='startTime', value=self.startTime, format='time')
842 opObj.addParameter(name='endTime', value=self.endTime, format='time')
843
844 for key, value in list(kwargs.items()):
845 opObj.addParameter(name=key, value=value,
846 format=type(value).__name__)
847
848 return opObj
849
850 def readXml(self, upElement, project_id):
851
852 self.id = upElement.get('id')
853 self.name = upElement.get('name')
854 self.datatype = upElement.get('datatype')
855 self.project_id = str(project_id) #yong
856
857 if self.ELEMENTNAME == 'ReadUnit':
858 self.datatype = self.datatype.replace('Reader', '')
859
860 self.opConfObjList = []
861
862 opElementList = upElement.iter(OperationConf().getElementName())
863
864 for opElement in opElementList:
865 opConfObj = OperationConf()
866 opConfObj.readXml(opElement, project_id)
867 self.opConfObjList.append(opConfObj)
868
869 if opConfObj.name == 'run':
870 self.path = opConfObj.getParameterValue('path')
871 self.startDate = opConfObj.getParameterValue('startDate')
872 self.endDate = opConfObj.getParameterValue('endDate')
873 self.startTime = opConfObj.getParameterValue('startTime')
874 self.endTime = opConfObj.getParameterValue('endTime')
310 for key, value in kwargs.items():
311 self.addParameter(name=key, value=value)
875 312
876 313
877 314 class Project(Process):
878 315
879 316 ELEMENTNAME = 'Project'
880 317
881 318 def __init__(self):
882 319
883 320 Process.__init__(self)
884 321 self.id = None
885 322 self.filename = None
886 323 self.description = None
887 324 self.email = None
888 self.alarm = None
889 self.procUnitConfObjDict = {}
890 self.err_queue = Queue()
325 self.alarm = []
326 self.configurations = {}
327 # self.err_queue = Queue()
328 self.err_queue = None
329 self.started = False
891 330
892 def __getNewId(self):
331 def getNewId(self):
893 332
894 idList = list(self.procUnitConfObjDict.keys())
333 idList = list(self.configurations.keys())
895 334 id = int(self.id) * 10
896 335
897 336 while True:
898 337 id += 1
899 338
900 339 if str(id) in idList:
901 340 continue
902 341
903 342 break
904 343
905 344 return str(id)
906 345
907 def getElementName(self):
908
909 return self.ELEMENTNAME
910
911 def getId(self):
912
913 return self.id
914
915 346 def updateId(self, new_id):
916 347
917 348 self.id = str(new_id)
918 349
919 keyList = list(self.procUnitConfObjDict.keys())
350 keyList = list(self.configurations.keys())
920 351 keyList.sort()
921 352
922 353 n = 1
923 newProcUnitConfObjDict = {}
354 new_confs = {}
924 355
925 356 for procKey in keyList:
926 357
927 procUnitConfObj = self.procUnitConfObjDict[procKey]
358 conf = self.configurations[procKey]
928 359 idProcUnit = str(int(self.id) * 10 + n)
929 procUnitConfObj.updateId(idProcUnit)
930 newProcUnitConfObjDict[idProcUnit] = procUnitConfObj
360 conf.updateId(idProcUnit)
361 new_confs[idProcUnit] = conf
931 362 n += 1
932 363
933 self.procUnitConfObjDict = newProcUnitConfObjDict
364 self.configurations = new_confs
934 365
935 366 def setup(self, id=1, name='', description='', email=None, alarm=[]):
936 367
937 print(' ')
938 print('*' * 60)
939 print('* Starting SIGNAL CHAIN PROCESSING (Multiprocessing) v%s *' % schainpy.__version__)
940 print('*' * 60)
941 print("* Python " + python_version() + " *")
942 print('*' * 19)
943 print(' ')
944 368 self.id = str(id)
945 369 self.description = description
946 370 self.email = email
947 371 self.alarm = alarm
948 372 if name:
949 373 self.name = '{} ({})'.format(Process.__name__, name)
950 374
951 375 def update(self, **kwargs):
952 376
953 for key, value in list(kwargs.items()):
377 for key, value in kwargs.items():
954 378 setattr(self, key, value)
955 379
956 380 def clone(self):
957 381
958 382 p = Project()
959 p.procUnitConfObjDict = self.procUnitConfObjDict
383 p.id = self.id
384 p.name = self.name
385 p.description = self.description
386 p.configurations = self.configurations.copy()
387
960 388 return p
961 389
962 390 def addReadUnit(self, id=None, datatype=None, name=None, **kwargs):
963 391
964 392 '''
965 Actualizacion:
966 Se agrego un nuevo argumento: topic -relativo a la forma de comunicar los procesos simultaneos
967
968 * El id del proceso sera el topico al que se deben subscribir los procUnits para recibir la informacion(data)
969
970 393 '''
971 394
972 395 if id is None:
973 idReadUnit = self.__getNewId()
396 idReadUnit = self.getNewId()
974 397 else:
975 398 idReadUnit = str(id)
976 399
977 readUnitConfObj = ReadUnitConf()
978 readUnitConfObj.setup(self.id, idReadUnit, name, datatype, self.err_queue, **kwargs)
979 self.procUnitConfObjDict[readUnitConfObj.getId()] = readUnitConfObj
400 conf = ReadUnitConf()
401 conf.setup(self.id, idReadUnit, name, datatype, self.err_queue, **kwargs)
402 self.configurations[conf.id] = conf
980 403
981 return readUnitConfObj
404 return conf
982 405
983 def addProcUnit(self, inputId='0', datatype=None, name=None):
406 def addProcUnit(self, id=None, inputId='0', datatype=None, name=None):
984 407
985 408 '''
986 Actualizacion:
987 Se agrego dos nuevos argumentos: topic_read (lee data de otro procUnit) y topic_write(escribe o envia data a otro procUnit)
988 Deberia reemplazar a "inputId"
989
990 ** A fin de mantener el inputID, este sera la representaacion del topicoal que deben subscribirse. El ID propio de la intancia
991 (proceso) sera el topico de la publicacion, todo sera asignado de manera dinamica.
992
993 409 '''
994 410
995 idProcUnit = self.__getNewId()
996 procUnitConfObj = ProcUnitConf()
997 input_proc = self.procUnitConfObjDict[inputId]
998 procUnitConfObj.setup(self.id, idProcUnit, name, datatype, inputId, self.err_queue, input_proc.lock)
999 self.procUnitConfObjDict[procUnitConfObj.getId()] = procUnitConfObj
1000
1001 return procUnitConfObj
1002
1003 def removeProcUnit(self, id):
411 if id is None:
412 idProcUnit = self.getNewId()
413 else:
414 idProcUnit = id
1004 415
1005 if id in list(self.procUnitConfObjDict.keys()):
1006 self.procUnitConfObjDict.pop(id)
416 conf = ProcUnitConf()
417 conf.setup(self.id, idProcUnit, name, datatype, inputId, self.err_queue)
418 self.configurations[conf.id] = conf
1007 419
1008 def getReadUnitId(self):
420 return conf
1009 421
1010 readUnitConfObj = self.getReadUnitObj()
422 def removeProcUnit(self, id):
1011 423
1012 return readUnitConfObj.id
424 if id in self.configurations:
425 self.configurations.pop(id)
1013 426
1014 def getReadUnitObj(self):
427 def getReadUnit(self):
1015 428
1016 for obj in list(self.procUnitConfObjDict.values()):
1017 if obj.getElementName() == 'ReadUnit':
429 for obj in list(self.configurations.values()):
430 if obj.ELEMENTNAME == 'ReadUnit':
1018 431 return obj
1019 432
1020 433 return None
1021 434
1022 def getProcUnitObj(self, id=None, name=None):
1023
1024 if id != None:
1025 return self.procUnitConfObjDict[id]
435 def getProcUnit(self, id):
1026 436
1027 if name != None:
1028 return self.getProcUnitObjByName(name)
437 return self.configurations[id]
1029 438
1030 return None
439 def getUnits(self):
1031 440
1032 def getProcUnitObjByName(self, name):
1033
1034 for obj in list(self.procUnitConfObjDict.values()):
1035 if obj.name == name:
1036 return obj
441 keys = list(self.configurations)
442 keys.sort()
1037 443
1038 return None
444 for key in keys:
445 yield self.configurations[key]
1039 446
1040 def procUnitItems(self):
447 def updateUnit(self, id, **kwargs):
1041 448
1042 return list(self.procUnitConfObjDict.items())
449 conf = self.configurations[id].update(**kwargs)
1043 450
1044 451 def makeXml(self):
1045 452
1046 projectElement = Element('Project')
1047 projectElement.set('id', str(self.id))
1048 projectElement.set('name', self.name)
1049 projectElement.set('description', self.description)
453 xml = Element('Project')
454 xml.set('id', str(self.id))
455 xml.set('name', self.name)
456 xml.set('description', self.description)
1050 457
1051 for procUnitConfObj in list(self.procUnitConfObjDict.values()):
1052 procUnitConfObj.makeXml(projectElement)
458 for conf in self.configurations.values():
459 conf.makeXml(xml)
1053 460
1054 self.projectElement = projectElement
461 self.xml = xml
1055 462
1056 463 def writeXml(self, filename=None):
1057 464
1058 465 if filename == None:
1059 466 if self.filename:
1060 467 filename = self.filename
1061 468 else:
1062 469 filename = 'schain.xml'
1063 470
1064 471 if not filename:
1065 472 print('filename has not been defined. Use setFilename(filename) for do it.')
1066 473 return 0
1067 474
1068 475 abs_file = os.path.abspath(filename)
1069 476
1070 477 if not os.access(os.path.dirname(abs_file), os.W_OK):
1071 478 print('No write permission on %s' % os.path.dirname(abs_file))
1072 479 return 0
1073 480
1074 481 if os.path.isfile(abs_file) and not(os.access(abs_file, os.W_OK)):
1075 482 print('File %s already exists and it could not be overwriten' % abs_file)
1076 483 return 0
1077 484
1078 485 self.makeXml()
1079 486
1080 ElementTree(self.projectElement).write(abs_file, method='xml')
487 ElementTree(self.xml).write(abs_file, method='xml')
1081 488
1082 489 self.filename = abs_file
1083 490
1084 491 return 1
1085 492
1086 def readXml(self, filename=None):
1087
1088 if not filename:
1089 print('filename is not defined')
1090 return 0
493 def readXml(self, filename):
1091 494
1092 495 abs_file = os.path.abspath(filename)
1093 496
1094 if not os.path.isfile(abs_file):
1095 print('%s file does not exist' % abs_file)
1096 return 0
1097
1098 self.projectElement = None
1099 self.procUnitConfObjDict = {}
497 self.configurations = {}
1100 498
1101 499 try:
1102 self.projectElement = ElementTree().parse(abs_file)
500 self.xml = ElementTree().parse(abs_file)
1103 501 except:
1104 print('Error reading %s, verify file format' % filename)
502 log.error('Error reading %s, verify file format' % filename)
1105 503 return 0
1106 504
1107 self.project = self.projectElement.tag
1108
1109 self.id = self.projectElement.get('id')
1110 self.name = self.projectElement.get('name')
1111 self.description = self.projectElement.get('description')
1112
1113 readUnitElementList = self.projectElement.iter(
1114 ReadUnitConf().getElementName())
1115
1116 for readUnitElement in readUnitElementList:
1117 readUnitConfObj = ReadUnitConf()
1118 readUnitConfObj.readXml(readUnitElement, self.id)
1119 self.procUnitConfObjDict[readUnitConfObj.getId()] = readUnitConfObj
1120
1121 procUnitElementList = self.projectElement.iter(
1122 ProcUnitConf().getElementName())
1123
1124 for procUnitElement in procUnitElementList:
1125 procUnitConfObj = ProcUnitConf()
1126 procUnitConfObj.readXml(procUnitElement, self.id)
1127 self.procUnitConfObjDict[procUnitConfObj.getId()] = procUnitConfObj
505 self.id = self.xml.get('id')
506 self.name = self.xml.get('name')
507 self.description = self.xml.get('description')
508
509 for element in self.xml:
510 if element.tag == 'ReadUnit':
511 conf = ReadUnitConf()
512 conf.readXml(element, self.id, self.err_queue)
513 self.configurations[conf.id] = conf
514 elif element.tag == 'ProcUnit':
515 conf = ProcUnitConf()
516 input_proc = self.configurations[element.get('inputId')]
517 conf.readXml(element, self.id, self.err_queue)
518 self.configurations[conf.id] = conf
1128 519
1129 520 self.filename = abs_file
1130 521
1131 522 return 1
1132 523
1133 524 def __str__(self):
1134 525
1135 print('Project: name = %s, description = %s, id = %s' % (
526 text = '\nProject[id=%s, name=%s, description=%s]\n\n' % (
527 self.id,
1136 528 self.name,
1137 529 self.description,
1138 self.id))
530 )
1139 531
1140 for procUnitConfObj in self.procUnitConfObjDict.values():
1141 print(procUnitConfObj)
532 for conf in self.configurations.values():
533 text += '{}'.format(conf)
1142 534
1143 def createObjects(self):
535 return text
1144 536
537 def createObjects(self):
1145 538
1146 keys = list(self.procUnitConfObjDict.keys())
539 keys = list(self.configurations.keys())
1147 540 keys.sort()
1148 541 for key in keys:
1149 self.procUnitConfObjDict[key].createObjects()
542 conf = self.configurations[key]
543 conf.createObjects()
544 if conf.inputId is not None:
545 conf.object.setInput(self.configurations[conf.inputId].object)
1150 546
1151 547 def monitor(self):
1152 548
1153 t = Thread(target=self.__monitor, args=(self.err_queue, self.ctx))
549 t = Thread(target=self._monitor, args=(self.err_queue, self.ctx))
1154 550 t.start()
1155 551
1156 def __monitor(self, queue, ctx):
552 def _monitor(self, queue, ctx):
1157 553
1158 554 import socket
1159 555
1160 556 procs = 0
1161 557 err_msg = ''
1162 558
1163 559 while True:
1164 560 msg = queue.get()
1165 561 if '#_start_#' in msg:
1166 562 procs += 1
1167 563 elif '#_end_#' in msg:
1168 564 procs -=1
1169 565 else:
1170 566 err_msg = msg
1171 567
1172 568 if procs == 0 or 'Traceback' in err_msg:
1173 569 break
1174 570 time.sleep(0.1)
1175 571
1176 572 if '|' in err_msg:
1177 573 name, err = err_msg.split('|')
1178 574 if 'SchainWarning' in err:
1179 575 log.warning(err.split('SchainWarning:')[-1].split('\n')[0].strip(), name)
1180 576 elif 'SchainError' in err:
1181 577 log.error(err.split('SchainError:')[-1].split('\n')[0].strip(), name)
1182 578 else:
1183 579 log.error(err, name)
1184 580 else:
1185 581 name, err = self.name, err_msg
1186 582
1187 time.sleep(2)
1188
1189 for conf in self.procUnitConfObjDict.values():
1190 for confop in conf.opConfObjList:
1191 if confop.type == 'external':
1192 confop.opObj.terminate()
1193 conf.procUnitObj.terminate()
583 time.sleep(1)
1194 584
1195 585 ctx.term()
1196 586
1197 587 message = ''.join(err)
1198 588
1199 589 if err_msg:
1200 590 subject = 'SChain v%s: Error running %s\n' % (
1201 591 schainpy.__version__, self.name)
1202 592
1203 593 subtitle = 'Hostname: %s\n' % socket.gethostbyname(
1204 594 socket.gethostname())
1205 595 subtitle += 'Working directory: %s\n' % os.path.abspath('./')
1206 596 subtitle += 'Configuration file: %s\n' % self.filename
1207 597 subtitle += 'Time: %s\n' % str(datetime.datetime.now())
1208 598
1209 readUnitConfObj = self.getReadUnitObj()
599 readUnitConfObj = self.getReadUnit()
1210 600 if readUnitConfObj:
1211 601 subtitle += '\nInput parameters:\n'
1212 subtitle += '[Data path = %s]\n' % readUnitConfObj.path
1213 subtitle += '[Data type = %s]\n' % readUnitConfObj.datatype
1214 subtitle += '[Start date = %s]\n' % readUnitConfObj.startDate
1215 subtitle += '[End date = %s]\n' % readUnitConfObj.endDate
1216 subtitle += '[Start time = %s]\n' % readUnitConfObj.startTime
1217 subtitle += '[End time = %s]\n' % readUnitConfObj.endTime
602 subtitle += '[Data path = %s]\n' % readUnitConfObj.parameters['path']
603 subtitle += '[Start date = %s]\n' % readUnitConfObj.parameters['startDate']
604 subtitle += '[End date = %s]\n' % readUnitConfObj.parameters['endDate']
605 subtitle += '[Start time = %s]\n' % readUnitConfObj.parameters['startTime']
606 subtitle += '[End time = %s]\n' % readUnitConfObj.parameters['endTime']
1218 607
1219 608 a = Alarm(
1220 609 modes=self.alarm,
1221 610 email=self.email,
1222 611 message=message,
1223 612 subject=subject,
1224 613 subtitle=subtitle,
1225 614 filename=self.filename
1226 615 )
1227 616
1228 617 a.start()
1229 618
1230 def isPaused(self):
1231 return 0
1232
1233 def isStopped(self):
1234 return 0
1235
1236 def runController(self):
1237 '''
1238 returns 0 when this process has been stopped, 1 otherwise
1239 '''
1240
1241 if self.isPaused():
1242 print('Process suspended')
1243
1244 while True:
1245 time.sleep(0.1)
1246
1247 if not self.isPaused():
1248 break
1249
1250 if self.isStopped():
1251 break
1252
1253 print('Process reinitialized')
1254
1255 if self.isStopped():
1256 print('Process stopped')
1257 return 0
1258
1259 return 1
1260
1261 619 def setFilename(self, filename):
1262 620
1263 621 self.filename = filename
1264 622
1265 def setProxy(self):
623 def runProcs(self):
1266 624
1267 if not os.path.exists('/tmp/schain'):
1268 os.mkdir('/tmp/schain')
625 err = False
626 n = len(self.configurations)
1269 627
1270 self.ctx = zmq.Context()
1271 xpub = self.ctx.socket(zmq.XPUB)
1272 xpub.bind('ipc:///tmp/schain/{}_pub'.format(self.id))
1273 xsub = self.ctx.socket(zmq.XSUB)
1274 xsub.bind('ipc:///tmp/schain/{}_sub'.format(self.id))
1275 self.monitor()
1276 try:
1277 zmq.proxy(xpub, xsub)
1278 except zmq.ContextTerminated:
1279 xpub.close()
1280 xsub.close()
628 while not err:
629 for conf in self.getUnits():
630 ok = conf.run()
631 if ok is 'Error':
632 n -= 1
633 continue
634 elif not ok:
635 break
636 if n == 0:
637 err = True
1281 638
1282 639 def run(self):
1283 640
1284 log.success('Starting {}: {}'.format(self.name, self.id), tag='')
641 log.success('\nStarting Project {} [id={}]'.format(self.name, self.id), tag='')
642 self.started = True
1285 643 self.start_time = time.time()
1286 644 self.createObjects()
1287 self.setProxy()
1288 log.success('{} Done (Time: {}s)'.format(
645 self.runProcs()
646 log.success('{} Done (Time: {:4.2f}s)'.format(
1289 647 self.name,
1290 648 time.time()-self.start_time), '')
@@ -1,407 +1,407
1 1 '''
2 2 Created on Nov 9, 2016
3 3
4 4 @author: roj- LouVD
5 5 '''
6 6
7 7
8 8 import os
9 9 import sys
10 10 import time
11 11 import glob
12 12 import datetime
13 13
14 14 import numpy
15 15
16 16 import schainpy.admin
17 17 from schainpy.model.proc.jroproc_base import ProcessingUnit, MPDecorator
18 18 from schainpy.model.data.jrodata import Parameters
19 19 from schainpy.model.io.jroIO_base import JRODataReader, isNumber
20 20 from schainpy.utils import log
21 21
22 22 FILE_HEADER_STRUCTURE = numpy.dtype([
23 23 ('FMN', '<u4'),
24 24 ('nrec', '<u4'),
25 25 ('fr_offset', '<u4'),
26 26 ('id', '<u4'),
27 27 ('site', 'u1', (32,))
28 28 ])
29 29
30 30 REC_HEADER_STRUCTURE = numpy.dtype([
31 31 ('rmn', '<u4'),
32 32 ('rcounter', '<u4'),
33 33 ('nr_offset', '<u4'),
34 34 ('tr_offset', '<u4'),
35 35 ('time', '<u4'),
36 36 ('time_msec', '<u4'),
37 37 ('tag', 'u1', (32,)),
38 38 ('comments', 'u1', (32,)),
39 39 ('lat', '<f4'),
40 40 ('lon', '<f4'),
41 41 ('gps_status', '<u4'),
42 42 ('freq', '<u4'),
43 43 ('freq0', '<u4'),
44 44 ('nchan', '<u4'),
45 45 ('delta_r', '<u4'),
46 46 ('nranges', '<u4'),
47 47 ('r0', '<u4'),
48 48 ('prf', '<u4'),
49 49 ('ncoh', '<u4'),
50 50 ('npoints', '<u4'),
51 51 ('polarization', '<i4'),
52 52 ('rx_filter', '<u4'),
53 53 ('nmodes', '<u4'),
54 54 ('dmode_index', '<u4'),
55 55 ('dmode_rngcorr', '<u4'),
56 56 ('nrxs', '<u4'),
57 57 ('acf_length', '<u4'),
58 58 ('acf_lags', '<u4'),
59 59 ('sea_to_atmos', '<f4'),
60 60 ('sea_notch', '<u4'),
61 61 ('lh_sea', '<u4'),
62 62 ('hh_sea', '<u4'),
63 63 ('nbins_sea', '<u4'),
64 64 ('min_snr', '<f4'),
65 65 ('min_cc', '<f4'),
66 66 ('max_time_diff', '<f4')
67 67 ])
68 68
69 69 DATA_STRUCTURE = numpy.dtype([
70 70 ('range', '<u4'),
71 71 ('status', '<u4'),
72 72 ('zonal', '<f4'),
73 73 ('meridional', '<f4'),
74 74 ('vertical', '<f4'),
75 75 ('zonal_a', '<f4'),
76 76 ('meridional_a', '<f4'),
77 77 ('corrected_fading', '<f4'), # seconds
78 78 ('uncorrected_fading', '<f4'), # seconds
79 79 ('time_diff', '<f4'),
80 80 ('major_axis', '<f4'),
81 81 ('axial_ratio', '<f4'),
82 82 ('orientation', '<f4'),
83 83 ('sea_power', '<u4'),
84 84 ('sea_algorithm', '<u4')
85 85 ])
86 86
87 @MPDecorator
87
88 88 class BLTRParamReader(JRODataReader, ProcessingUnit):
89 89 '''
90 90 Boundary Layer and Tropospheric Radar (BLTR) reader, Wind velocities and SNR
91 91 from *.sswma files
92 92 '''
93 93
94 94 ext = '.sswma'
95 95
96 96 def __init__(self):
97 97
98 98 ProcessingUnit.__init__(self)
99 99
100 100 self.dataOut = Parameters()
101 101 self.counter_records = 0
102 102 self.flagNoMoreFiles = 0
103 103 self.isConfig = False
104 104 self.filename = None
105 105
106 106 def setup(self,
107 107 path=None,
108 108 startDate=None,
109 109 endDate=None,
110 110 ext=None,
111 111 startTime=datetime.time(0, 0, 0),
112 112 endTime=datetime.time(23, 59, 59),
113 113 timezone=0,
114 114 status_value=0,
115 115 **kwargs):
116 116 self.path = path
117 117 self.startDate = startDate
118 118 self.endDate = endDate
119 119 self.startTime = startTime
120 120 self.endTime = endTime
121 121 self.status_value = status_value
122 122 self.datatime = datetime.datetime(1900,1,1)
123 123 self.delay = kwargs.get('delay', 10)
124 124 self.online = kwargs.get('online', False)
125 125 self.nTries = kwargs.get('nTries', 3)
126 126
127 127 if self.path is None:
128 128 raise ValueError("The path is not valid")
129 129
130 130 if ext is None:
131 131 ext = self.ext
132 132
133 133 self.fileList = self.search_files(self.path, startDate, endDate, ext)
134 134 self.timezone = timezone
135 135 self.fileIndex = 0
136 136
137 137 if not self.fileList:
138 138 raise Warning("There is no files matching these date in the folder: %s. \n Check 'startDate' and 'endDate' " % (
139 139 path))
140 140
141 141 self.setNextFile()
142 142
143 143 def search_last_file(self):
144 144 '''
145 145 Get last file and add it to the list
146 146 '''
147 147
148 148 for n in range(self.nTries+1):
149 149 if n>0:
150 150 log.warning(
151 151 "Waiting %0.2f seconds for the next file, try %03d ..." % (self.delay, n+1),
152 152 self.name
153 153 )
154 154 time.sleep(self.delay)
155 155 file_list = os.listdir(self.path)
156 156 file_list.sort()
157 157 if file_list:
158 158 if self.filename:
159 159 if file_list[-1] not in self.filename:
160 160 return file_list[-1]
161 161 else:
162 162 continue
163 163 return file_list[-1]
164 164 return 0
165 165
166 166 def search_files(self, path, startDate, endDate, ext):
167 167 '''
168 168 Searching for BLTR rawdata file in path
169 169 Creating a list of file to proces included in [startDate,endDate]
170 170
171 171 Input:
172 172 path - Path to find BLTR rawdata files
173 173 startDate - Select file from this date
174 174 enDate - Select file until this date
175 175 ext - Extension of the file to read
176 176 '''
177 177
178 178 log.success('Searching files in {} '.format(path), 'BLTRParamReader')
179 179 foldercounter = 0
180 180 fileList0 = glob.glob1(path, "*%s" % ext)
181 181 fileList0.sort()
182 182
183 183 for thisFile in fileList0:
184 184 year = thisFile[-14:-10]
185 185 if not isNumber(year):
186 186 continue
187 187
188 188 month = thisFile[-10:-8]
189 189 if not isNumber(month):
190 190 continue
191 191
192 192 day = thisFile[-8:-6]
193 193 if not isNumber(day):
194 194 continue
195 195
196 196 year, month, day = int(year), int(month), int(day)
197 197 dateFile = datetime.date(year, month, day)
198 198
199 199 if (startDate > dateFile) or (endDate < dateFile):
200 200 continue
201 201
202 202 yield thisFile
203 203
204 204 return
205 205
206 206 def setNextFile(self):
207 207
208 208 if self.online:
209 209 filename = self.search_last_file()
210 210 if not filename:
211 211 self.flagNoMoreFiles = 1
212 212 return 0
213 213 else:
214 214 try:
215 215 filename = next(self.fileList)
216 216 except StopIteration:
217 217 self.flagNoMoreFiles = 1
218 218 return 0
219 219
220 220 log.success('Opening {}'.format(filename), 'BLTRParamReader')
221 221
222 222 dirname, name = os.path.split(filename)
223 223 # 'peru2' ---> Piura - 'peru1' ---> Huancayo or Porcuya
224 224 self.siteFile = filename.split('.')[0]
225 225 if self.filename is not None:
226 226 self.fp.close()
227 227 self.filename = os.path.join(self.path, filename)
228 228 self.fp = open(self.filename, 'rb')
229 229 self.header_file = numpy.fromfile(self.fp, FILE_HEADER_STRUCTURE, 1)
230 230 self.nrecords = self.header_file['nrec'][0]
231 231 self.sizeOfFile = os.path.getsize(self.filename)
232 232 self.counter_records = 0
233 233 self.flagIsNewFile = 0
234 234 self.fileIndex += 1
235 235
236 236 return 1
237 237
238 238 def readNextBlock(self):
239 239
240 240 while True:
241 241 if not self.online and self.counter_records == self.nrecords:
242 242 self.flagIsNewFile = 1
243 243 if not self.setNextFile():
244 244 return 0
245 245
246 246 try:
247 247 pointer = self.fp.tell()
248 248 self.readBlock()
249 249 except:
250 250 if self.online and self.waitDataBlock(pointer, 38512) == 1:
251 251 continue
252 252 else:
253 253 if not self.setNextFile():
254 254 return 0
255 255
256 256 if (self.datatime < datetime.datetime.combine(self.startDate, self.startTime)) or \
257 257 (self.datatime > datetime.datetime.combine(self.endDate, self.endTime)):
258 258 log.warning(
259 259 'Reading Record No. {}/{} -> {} [Skipping]'.format(
260 260 self.counter_records,
261 261 self.nrecords,
262 262 self.datatime.ctime()),
263 263 'BLTRParamReader')
264 264 continue
265 265 break
266 266
267 267 log.log('Reading Record No. {} -> {}'.format(
268 268 self.counter_records,
269 269 # self.nrecords,
270 270 self.datatime.ctime()), 'BLTRParamReader')
271 271
272 272 return 1
273 273
274 274 def readBlock(self):
275 275
276 276 pointer = self.fp.tell()
277 277 header_rec = numpy.fromfile(self.fp, REC_HEADER_STRUCTURE, 1)
278 278 self.nchannels = int(header_rec['nchan'][0] / 2)
279 279 self.kchan = header_rec['nrxs'][0]
280 280 self.nmodes = header_rec['nmodes'][0]
281 281 self.nranges = header_rec['nranges'][0]
282 282 self.fp.seek(pointer)
283 283 self.height = numpy.empty((self.nmodes, self.nranges))
284 284 self.snr = numpy.empty((self.nmodes, int(self.nchannels), self.nranges))
285 285 self.buffer = numpy.empty((self.nmodes, 3, self.nranges))
286 286 self.flagDiscontinuousBlock = 0
287 287
288 288 for mode in range(self.nmodes):
289 289 self.readHeader()
290 290 data = self.readData()
291 291 self.height[mode] = (data[0] - self.correction) / 1000.
292 292 self.buffer[mode] = data[1]
293 293 self.snr[mode] = data[2]
294 294
295 295 self.counter_records = self.counter_records + self.nmodes
296 296
297 297 return
298 298
299 299 def readHeader(self):
300 300 '''
301 301 RecordHeader of BLTR rawdata file
302 302 '''
303 303
304 304 header_structure = numpy.dtype(
305 305 REC_HEADER_STRUCTURE.descr + [
306 306 ('antenna_coord', 'f4', (2, int(self.nchannels))),
307 307 ('rx_gains', 'u4', (int(self.nchannels),)),
308 308 ('rx_analysis', 'u4', (int(self.nchannels),))
309 309 ]
310 310 )
311 311
312 312 self.header_rec = numpy.fromfile(self.fp, header_structure, 1)
313 313 self.lat = self.header_rec['lat'][0]
314 314 self.lon = self.header_rec['lon'][0]
315 315 self.delta = self.header_rec['delta_r'][0]
316 316 self.correction = self.header_rec['dmode_rngcorr'][0]
317 317 self.imode = self.header_rec['dmode_index'][0]
318 318 self.antenna = self.header_rec['antenna_coord']
319 319 self.rx_gains = self.header_rec['rx_gains']
320 320 self.time = self.header_rec['time'][0]
321 321 dt = datetime.datetime.utcfromtimestamp(self.time)
322 322 if dt.date()>self.datatime.date():
323 323 self.flagDiscontinuousBlock = 1
324 324 self.datatime = dt
325 325
326 326 def readData(self):
327 327 '''
328 328 Reading and filtering data block record of BLTR rawdata file,
329 329 filtering is according to status_value.
330 330
331 331 Input:
332 332 status_value - Array data is set to NAN for values that are not
333 333 equal to status_value
334 334
335 335 '''
336 336 self.nchannels = int(self.nchannels)
337 337
338 338 data_structure = numpy.dtype(
339 339 DATA_STRUCTURE.descr + [
340 340 ('rx_saturation', 'u4', (self.nchannels,)),
341 341 ('chan_offset', 'u4', (2 * self.nchannels,)),
342 342 ('rx_amp', 'u4', (self.nchannels,)),
343 343 ('rx_snr', 'f4', (self.nchannels,)),
344 344 ('cross_snr', 'f4', (self.kchan,)),
345 345 ('sea_power_relative', 'f4', (self.kchan,))]
346 346 )
347 347
348 348 data = numpy.fromfile(self.fp, data_structure, self.nranges)
349 349
350 350 height = data['range']
351 351 winds = numpy.array(
352 352 (data['zonal'], data['meridional'], data['vertical']))
353 353 snr = data['rx_snr'].T
354 354
355 355 winds[numpy.where(winds == -9999.)] = numpy.nan
356 356 winds[:, numpy.where(data['status'] != self.status_value)] = numpy.nan
357 357 snr[numpy.where(snr == -9999.)] = numpy.nan
358 358 snr[:, numpy.where(data['status'] != self.status_value)] = numpy.nan
359 359 snr = numpy.power(10, snr / 10)
360 360
361 361 return height, winds, snr
362 362
363 363 def set_output(self):
364 364 '''
365 365 Storing data from databuffer to dataOut object
366 366 '''
367 367
368 368 self.dataOut.data_SNR = self.snr
369 369 self.dataOut.height = self.height
370 370 self.dataOut.data = self.buffer
371 371 self.dataOut.utctimeInit = self.time
372 372 self.dataOut.utctime = self.dataOut.utctimeInit
373 373 self.dataOut.useLocalTime = False
374 374 self.dataOut.paramInterval = 157
375 375 self.dataOut.timezone = self.timezone
376 376 self.dataOut.site = self.siteFile
377 377 self.dataOut.nrecords = self.nrecords / self.nmodes
378 378 self.dataOut.sizeOfFile = self.sizeOfFile
379 379 self.dataOut.lat = self.lat
380 380 self.dataOut.lon = self.lon
381 381 self.dataOut.channelList = list(range(self.nchannels))
382 382 self.dataOut.kchan = self.kchan
383 383 self.dataOut.delta = self.delta
384 384 self.dataOut.correction = self.correction
385 385 self.dataOut.nmodes = self.nmodes
386 386 self.dataOut.imode = self.imode
387 387 self.dataOut.antenna = self.antenna
388 388 self.dataOut.rx_gains = self.rx_gains
389 389 self.dataOut.flagNoData = False
390 390 self.dataOut.flagDiscontinuousBlock = self.flagDiscontinuousBlock
391 391
392 392 def getData(self):
393 393 '''
394 394 Storing data from databuffer to dataOut object
395 395 '''
396 396 if self.flagNoMoreFiles:
397 397 self.dataOut.flagNoData = True
398 398 raise schainpy.admin.SchainError('No More files to read')
399 399
400 400 if not self.readNextBlock():
401 401 self.dataOut.flagNoData = True
402 402 raise schainpy.admin.SchainError('Time for wait new file reach!!!')
403 403
404 404 self.set_output()
405 405
406 406 return 1
407 407 No newline at end of file
@@ -1,462 +1,462
1 1 import os
2 2 import sys
3 3 import glob
4 4 import fnmatch
5 5 import datetime
6 6 import time
7 7 import re
8 8 import h5py
9 9 import numpy
10 10
11 11 import pylab as plb
12 12 from scipy.optimize import curve_fit
13 13 from scipy import asarray as ar, exp
14 14
15 15 SPEED_OF_LIGHT = 299792458
16 16 SPEED_OF_LIGHT = 3e8
17 17
18 18 from .utils import folder_in_range
19 19
20 20 import schainpy.admin
21 21 from schainpy.model.data.jrodata import Spectra
22 22 from schainpy.model.proc.jroproc_base import ProcessingUnit, Operation, MPDecorator
23 23 from schainpy.utils import log
24 24 from schainpy.model.io.jroIO_base import JRODataReader
25 25
26 26 def pol2cart(rho, phi):
27 27 x = rho * numpy.cos(phi)
28 28 y = rho * numpy.sin(phi)
29 29 return(x, y)
30 30
31 31 FILE_STRUCTURE = numpy.dtype([ # HEADER 48bytes
32 32 ('FileMgcNumber', '<u4'), # 0x23020100
33 33 ('nFDTdataRecors', '<u4'),
34 34 ('OffsetStartHeader', '<u4'),
35 35 ('RadarUnitId', '<u4'),
36 36 ('SiteName', 'S32'), # Null terminated
37 37 ])
38 38
39 39
40 40 class FileHeaderBLTR():
41 41
42 42 def __init__(self, fo):
43 43
44 44 self.fo = fo
45 45 self.size = 48
46 46 self.read()
47 47
48 48 def read(self):
49 49
50 50 header = numpy.fromfile(self.fo, FILE_STRUCTURE, 1)
51 51 self.FileMgcNumber = hex(header['FileMgcNumber'][0])
52 52 self.nFDTdataRecors = int(header['nFDTdataRecors'][0])
53 53 self.RadarUnitId = int(header['RadarUnitId'][0])
54 54 self.OffsetStartHeader = int(header['OffsetStartHeader'][0])
55 55 self.SiteName = header['SiteName'][0]
56 56
57 57 def write(self, fp):
58 58
59 59 headerTuple = (self.FileMgcNumber,
60 60 self.nFDTdataRecors,
61 61 self.RadarUnitId,
62 62 self.SiteName,
63 63 self.size)
64 64
65 65 header = numpy.array(headerTuple, FILE_STRUCTURE)
66 66 header.tofile(fp)
67 67 ''' ndarray.tofile(fid, sep, format) Write array to a file as text or binary (default).
68 68
69 69 fid : file or str
70 70 An open file object, or a string containing a filename.
71 71
72 72 sep : str
73 73 Separator between array items for text output. If "" (empty), a binary file is written,
74 74 equivalent to file.write(a.tobytes()).
75 75
76 76 format : str
77 77 Format string for text file output. Each entry in the array is formatted to text by
78 78 first converting it to the closest Python type, and then using "format" % item.
79 79
80 80 '''
81 81
82 82 return 1
83 83
84 84
85 85 RECORD_STRUCTURE = numpy.dtype([ # RECORD HEADER 180+20N bytes
86 86 ('RecMgcNumber', '<u4'), # 0x23030001
87 87 ('RecCounter', '<u4'), # Record counter(0,1, ...)
88 88 # Offset to start of next record form start of this record
89 89 ('Off2StartNxtRec', '<u4'),
90 90 # Offset to start of data from start of this record
91 91 ('Off2StartData', '<u4'),
92 92 # Epoch time stamp of start of acquisition (seconds)
93 93 ('nUtime', '<i4'),
94 94 # Millisecond component of time stamp (0,...,999)
95 95 ('nMilisec', '<u4'),
96 96 # Experiment tag name (null terminated)
97 97 ('ExpTagName', 'S32'),
98 98 # Experiment comment (null terminated)
99 99 ('ExpComment', 'S32'),
100 100 # Site latitude (from GPS) in degrees (positive implies North)
101 101 ('SiteLatDegrees', '<f4'),
102 102 # Site longitude (from GPS) in degrees (positive implies East)
103 103 ('SiteLongDegrees', '<f4'),
104 104 # RTC GPS engine status (0=SEEK, 1=LOCK, 2=NOT FITTED, 3=UNAVAILABLE)
105 105 ('RTCgpsStatus', '<u4'),
106 106 ('TransmitFrec', '<u4'), # Transmit frequency (Hz)
107 107 ('ReceiveFrec', '<u4'), # Receive frequency
108 108 # First local oscillator frequency (Hz)
109 109 ('FirstOsciFrec', '<u4'),
110 110 # (0="O", 1="E", 2="linear 1", 3="linear2")
111 111 ('Polarisation', '<u4'),
112 112 # Receiver filter settings (0,1,2,3)
113 113 ('ReceiverFiltSett', '<u4'),
114 114 # Number of modes in use (1 or 2)
115 115 ('nModesInUse', '<u4'),
116 116 # Dual Mode index number for these data (0 or 1)
117 117 ('DualModeIndex', '<u4'),
118 118 # Dual Mode range correction for these data (m)
119 119 ('DualModeRange', '<u4'),
120 120 # Number of digital channels acquired (2*N)
121 121 ('nDigChannels', '<u4'),
122 122 # Sampling resolution (meters)
123 123 ('SampResolution', '<u4'),
124 124 # Number of range gates sampled
125 125 ('nHeights', '<u4'),
126 126 # Start range of sampling (meters)
127 127 ('StartRangeSamp', '<u4'),
128 128 ('PRFhz', '<u4'), # PRF (Hz)
129 129 ('nCohInt', '<u4'), # Integrations
130 130 # Number of data points transformed
131 131 ('nProfiles', '<u4'),
132 132 # Number of receive beams stored in file (1 or N)
133 133 ('nChannels', '<u4'),
134 134 ('nIncohInt', '<u4'), # Number of spectral averages
135 135 # FFT windowing index (0 = no window)
136 136 ('FFTwindowingInd', '<u4'),
137 137 # Beam steer angle (azimuth) in degrees (clockwise from true North)
138 138 ('BeamAngleAzim', '<f4'),
139 139 # Beam steer angle (zenith) in degrees (0=> vertical)
140 140 ('BeamAngleZen', '<f4'),
141 141 # Antenna coordinates (Range(meters), Bearing(degrees)) - N pairs
142 142 ('AntennaCoord0', '<f4'),
143 143 # Antenna coordinates (Range(meters), Bearing(degrees)) - N pairs
144 144 ('AntennaAngl0', '<f4'),
145 145 # Antenna coordinates (Range(meters), Bearing(degrees)) - N pairs
146 146 ('AntennaCoord1', '<f4'),
147 147 # Antenna coordinates (Range(meters), Bearing(degrees)) - N pairs
148 148 ('AntennaAngl1', '<f4'),
149 149 # Antenna coordinates (Range(meters), Bearing(degrees)) - N pairs
150 150 ('AntennaCoord2', '<f4'),
151 151 # Antenna coordinates (Range(meters), Bearing(degrees)) - N pairs
152 152 ('AntennaAngl2', '<f4'),
153 153 # Receiver phase calibration (degrees) - N values
154 154 ('RecPhaseCalibr0', '<f4'),
155 155 # Receiver phase calibration (degrees) - N values
156 156 ('RecPhaseCalibr1', '<f4'),
157 157 # Receiver phase calibration (degrees) - N values
158 158 ('RecPhaseCalibr2', '<f4'),
159 159 # Receiver amplitude calibration (ratio relative to receiver one) - N values
160 160 ('RecAmpCalibr0', '<f4'),
161 161 # Receiver amplitude calibration (ratio relative to receiver one) - N values
162 162 ('RecAmpCalibr1', '<f4'),
163 163 # Receiver amplitude calibration (ratio relative to receiver one) - N values
164 164 ('RecAmpCalibr2', '<f4'),
165 165 # Receiver gains in dB - N values
166 166 ('ReceiverGaindB0', '<i4'),
167 167 # Receiver gains in dB - N values
168 168 ('ReceiverGaindB1', '<i4'),
169 169 # Receiver gains in dB - N values
170 170 ('ReceiverGaindB2', '<i4'),
171 171 ])
172 172
173 173
174 174 class RecordHeaderBLTR():
175 175
176 176 def __init__(self, fo):
177 177
178 178 self.fo = fo
179 179 self.OffsetStartHeader = 48
180 180 self.Off2StartNxtRec = 811248
181 181
182 182 def read(self, block):
183 183 OffRHeader = self.OffsetStartHeader + block * self.Off2StartNxtRec
184 184 self.fo.seek(OffRHeader, os.SEEK_SET)
185 185 header = numpy.fromfile(self.fo, RECORD_STRUCTURE, 1)
186 186 self.RecMgcNumber = hex(header['RecMgcNumber'][0]) # 0x23030001
187 187 self.RecCounter = int(header['RecCounter'][0])
188 188 self.Off2StartNxtRec = int(header['Off2StartNxtRec'][0])
189 189 self.Off2StartData = int(header['Off2StartData'][0])
190 190 self.nUtime = header['nUtime'][0]
191 191 self.nMilisec = header['nMilisec'][0]
192 192 self.ExpTagName = '' # str(header['ExpTagName'][0])
193 193 self.ExpComment = '' # str(header['ExpComment'][0])
194 194 self.SiteLatDegrees = header['SiteLatDegrees'][0]
195 195 self.SiteLongDegrees = header['SiteLongDegrees'][0]
196 196 self.RTCgpsStatus = header['RTCgpsStatus'][0]
197 197 self.TransmitFrec = header['TransmitFrec'][0]
198 198 self.ReceiveFrec = header['ReceiveFrec'][0]
199 199 self.FirstOsciFrec = header['FirstOsciFrec'][0]
200 200 self.Polarisation = header['Polarisation'][0]
201 201 self.ReceiverFiltSett = header['ReceiverFiltSett'][0]
202 202 self.nModesInUse = header['nModesInUse'][0]
203 203 self.DualModeIndex = header['DualModeIndex'][0]
204 204 self.DualModeRange = header['DualModeRange'][0]
205 205 self.nDigChannels = header['nDigChannels'][0]
206 206 self.SampResolution = header['SampResolution'][0]
207 207 self.nHeights = header['nHeights'][0]
208 208 self.StartRangeSamp = header['StartRangeSamp'][0]
209 209 self.PRFhz = header['PRFhz'][0]
210 210 self.nCohInt = header['nCohInt'][0]
211 211 self.nProfiles = header['nProfiles'][0]
212 212 self.nChannels = header['nChannels'][0]
213 213 self.nIncohInt = header['nIncohInt'][0]
214 214 self.FFTwindowingInd = header['FFTwindowingInd'][0]
215 215 self.BeamAngleAzim = header['BeamAngleAzim'][0]
216 216 self.BeamAngleZen = header['BeamAngleZen'][0]
217 217 self.AntennaCoord0 = header['AntennaCoord0'][0]
218 218 self.AntennaAngl0 = header['AntennaAngl0'][0]
219 219 self.AntennaCoord1 = header['AntennaCoord1'][0]
220 220 self.AntennaAngl1 = header['AntennaAngl1'][0]
221 221 self.AntennaCoord2 = header['AntennaCoord2'][0]
222 222 self.AntennaAngl2 = header['AntennaAngl2'][0]
223 223 self.RecPhaseCalibr0 = header['RecPhaseCalibr0'][0]
224 224 self.RecPhaseCalibr1 = header['RecPhaseCalibr1'][0]
225 225 self.RecPhaseCalibr2 = header['RecPhaseCalibr2'][0]
226 226 self.RecAmpCalibr0 = header['RecAmpCalibr0'][0]
227 227 self.RecAmpCalibr1 = header['RecAmpCalibr1'][0]
228 228 self.RecAmpCalibr2 = header['RecAmpCalibr2'][0]
229 229 self.ReceiverGaindB0 = header['ReceiverGaindB0'][0]
230 230 self.ReceiverGaindB1 = header['ReceiverGaindB1'][0]
231 231 self.ReceiverGaindB2 = header['ReceiverGaindB2'][0]
232 232 self.ipp = 0.5 * (SPEED_OF_LIGHT / self.PRFhz)
233 233 self.RHsize = 180 + 20 * self.nChannels
234 234 self.Datasize = self.nProfiles * self.nChannels * self.nHeights * 2 * 4
235 235 endFp = self.OffsetStartHeader + self.RecCounter * self.Off2StartNxtRec
236 236
237 237
238 238 if OffRHeader > endFp:
239 239 sys.stderr.write(
240 240 "Warning %s: Size value read from System Header is lower than it has to be\n" % fp)
241 241 return 0
242 242
243 243 if OffRHeader < endFp:
244 244 sys.stderr.write(
245 245 "Warning %s: Size value read from System Header size is greater than it has to be\n" % fp)
246 246 return 0
247 247
248 248 return 1
249 249
250 @MPDecorator
250
251 251 class BLTRSpectraReader (ProcessingUnit):
252 252
253 253 def __init__(self):
254 254
255 255 ProcessingUnit.__init__(self)
256 256
257 257 self.ext = ".fdt"
258 258 self.optchar = "P"
259 259 self.fpFile = None
260 260 self.fp = None
261 261 self.BlockCounter = 0
262 262 self.fileSizeByHeader = None
263 263 self.filenameList = []
264 264 self.fileSelector = 0
265 265 self.Off2StartNxtRec = 0
266 266 self.RecCounter = 0
267 267 self.flagNoMoreFiles = 0
268 268 self.data_spc = None
269 269 self.data_cspc = None
270 270 self.path = None
271 271 self.OffsetStartHeader = 0
272 272 self.Off2StartData = 0
273 273 self.ipp = 0
274 274 self.nFDTdataRecors = 0
275 275 self.blocksize = 0
276 276 self.dataOut = Spectra()
277 277 self.dataOut.flagNoData = False
278 278
279 279 def search_files(self):
280 280 '''
281 281 Function that indicates the number of .fdt files that exist in the folder to be read.
282 282 It also creates an organized list with the names of the files to read.
283 283 '''
284 284
285 285 files = glob.glob(os.path.join(self.path, '*{}'.format(self.ext)))
286 286 files = sorted(files)
287 287 for f in files:
288 288 filename = f.split('/')[-1]
289 289 if folder_in_range(filename.split('.')[1], self.startDate, self.endDate, '%Y%m%d'):
290 290 self.filenameList.append(f)
291 291
292 292 def run(self, **kwargs):
293 293 '''
294 294 This method will be the one that will initiate the data entry, will be called constantly.
295 295 You should first verify that your Setup () is set up and then continue to acquire
296 296 the data to be processed with getData ().
297 297 '''
298 298 if not self.isConfig:
299 299 self.setup(**kwargs)
300 300 self.isConfig = True
301 301
302 302 self.getData()
303 303
304 304 def setup(self,
305 305 path=None,
306 306 startDate=None,
307 307 endDate=None,
308 308 startTime=None,
309 309 endTime=None,
310 310 walk=True,
311 311 code=None,
312 312 online=False,
313 313 mode=None,
314 314 **kwargs):
315 315
316 316 self.isConfig = True
317 317
318 318 self.path = path
319 319 self.startDate = startDate
320 320 self.endDate = endDate
321 321 self.startTime = startTime
322 322 self.endTime = endTime
323 323 self.walk = walk
324 324 self.mode = int(mode)
325 325 self.search_files()
326 326 if self.filenameList:
327 327 self.readFile()
328 328
329 329 def getData(self):
330 330 '''
331 331 Before starting this function, you should check that there is still an unread file,
332 332 If there are still blocks to read or if the data block is empty.
333 333
334 334 You should call the file "read".
335 335
336 336 '''
337 337
338 338 if self.flagNoMoreFiles:
339 339 self.dataOut.flagNoData = True
340 340 raise schainpy.admin.SchainError('No more files')
341 341
342 342 self.readBlock()
343 343
344 344 def readFile(self):
345 345 '''
346 346 You must indicate if you are reading in Online or Offline mode and load the
347 347 The parameters for this file reading mode.
348 348
349 349 Then you must do 2 actions:
350 350
351 351 1. Get the BLTR FileHeader.
352 352 2. Start reading the first block.
353 353 '''
354 354
355 355 if self.fileSelector < len(self.filenameList):
356 356 log.success('Opening file: {}'.format(self.filenameList[self.fileSelector]), self.name)
357 357 self.fp = open(self.filenameList[self.fileSelector])
358 358 self.fheader = FileHeaderBLTR(self.fp)
359 359 self.rheader = RecordHeaderBLTR(self.fp)
360 360 self.nFDTdataRecors = self.fheader.nFDTdataRecors
361 361 self.fileSelector += 1
362 362 self.BlockCounter = 0
363 363 return 1
364 364 else:
365 365 self.flagNoMoreFiles = True
366 366 self.dataOut.flagNoData = True
367 367 return 0
368 368
369 369 def readBlock(self):
370 370 '''
371 371 It should be checked if the block has data, if it is not passed to the next file.
372 372
373 373 Then the following is done:
374 374
375 375 1. Read the RecordHeader
376 376 2. Fill the buffer with the current block number.
377 377
378 378 '''
379 379
380 380 if self.BlockCounter == self.nFDTdataRecors:
381 381 if not self.readFile():
382 382 return
383 383
384 384 if self.mode == 1:
385 385 self.rheader.read(self.BlockCounter+1)
386 386 elif self.mode == 0:
387 387 self.rheader.read(self.BlockCounter)
388 388
389 389 self.RecCounter = self.rheader.RecCounter
390 390 self.OffsetStartHeader = self.rheader.OffsetStartHeader
391 391 self.Off2StartNxtRec = self.rheader.Off2StartNxtRec
392 392 self.Off2StartData = self.rheader.Off2StartData
393 393 self.nProfiles = self.rheader.nProfiles
394 394 self.nChannels = self.rheader.nChannels
395 395 self.nHeights = self.rheader.nHeights
396 396 self.frequency = self.rheader.TransmitFrec
397 397 self.DualModeIndex = self.rheader.DualModeIndex
398 398 self.pairsList = [(0, 1), (0, 2), (1, 2)]
399 399 self.dataOut.pairsList = self.pairsList
400 400 self.nRdPairs = len(self.dataOut.pairsList)
401 401 self.dataOut.nRdPairs = self.nRdPairs
402 402 self.dataOut.heightList = (self.rheader.StartRangeSamp + numpy.arange(self.nHeights) * self.rheader.SampResolution) / 1000.
403 403 self.dataOut.channelList = range(self.nChannels)
404 404 self.dataOut.nProfiles=self.rheader.nProfiles
405 405 self.dataOut.nIncohInt=self.rheader.nIncohInt
406 406 self.dataOut.nCohInt=self.rheader.nCohInt
407 407 self.dataOut.ippSeconds= 1/float(self.rheader.PRFhz)
408 408 self.dataOut.PRF=self.rheader.PRFhz
409 409 self.dataOut.nFFTPoints=self.rheader.nProfiles
410 410 self.dataOut.utctime = self.rheader.nUtime + self.rheader.nMilisec/1000.
411 411 self.dataOut.timeZone = 0
412 412 self.dataOut.useLocalTime = False
413 413 self.dataOut.nmodes = 2
414 414 log.log('Reading block {} - {}'.format(self.BlockCounter, self.dataOut.datatime), self.name)
415 415 OffDATA = self.OffsetStartHeader + self.RecCounter * \
416 416 self.Off2StartNxtRec + self.Off2StartData
417 417 self.fp.seek(OffDATA, os.SEEK_SET)
418 418
419 419 self.data_fft = numpy.fromfile(self.fp, [('complex','<c8')], self.nProfiles*self.nChannels*self.nHeights )
420 420 self.data_fft = self.data_fft.astype(numpy.dtype('complex'))
421 421 self.data_block = numpy.reshape(self.data_fft,(self.nHeights, self.nChannels, self.nProfiles))
422 422 self.data_block = numpy.transpose(self.data_block, (1,2,0))
423 423 copy = self.data_block.copy()
424 424 spc = copy * numpy.conjugate(copy)
425 425 self.data_spc = numpy.absolute(spc) # valor absoluto o magnitud
426 426 self.dataOut.data_spc = self.data_spc
427 427
428 428 cspc = self.data_block.copy()
429 429 self.data_cspc = self.data_block.copy()
430 430
431 431 for i in range(self.nRdPairs):
432 432
433 433 chan_index0 = self.dataOut.pairsList[i][0]
434 434 chan_index1 = self.dataOut.pairsList[i][1]
435 435
436 436 self.data_cspc[i, :, :] = cspc[chan_index0, :, :] * numpy.conjugate(cspc[chan_index1, :, :])
437 437
438 438 '''Getting Eij and Nij'''
439 439 (AntennaX0, AntennaY0) = pol2cart(
440 440 self.rheader.AntennaCoord0, self.rheader.AntennaAngl0 * numpy.pi / 180)
441 441 (AntennaX1, AntennaY1) = pol2cart(
442 442 self.rheader.AntennaCoord1, self.rheader.AntennaAngl1 * numpy.pi / 180)
443 443 (AntennaX2, AntennaY2) = pol2cart(
444 444 self.rheader.AntennaCoord2, self.rheader.AntennaAngl2 * numpy.pi / 180)
445 445
446 446 E01 = AntennaX0 - AntennaX1
447 447 N01 = AntennaY0 - AntennaY1
448 448
449 449 E02 = AntennaX0 - AntennaX2
450 450 N02 = AntennaY0 - AntennaY2
451 451
452 452 E12 = AntennaX1 - AntennaX2
453 453 N12 = AntennaY1 - AntennaY2
454 454
455 455 self.ChanDist = numpy.array(
456 456 [[E01, N01], [E02, N02], [E12, N12]])
457 457
458 458 self.dataOut.ChanDist = self.ChanDist
459 459
460 460 self.BlockCounter += 2
461 461 self.dataOut.data_spc = self.data_spc
462 462 self.dataOut.data_cspc =self.data_cspc
@@ -1,1575 +1,1580
1 1 """
2 2 Created on Jul 2, 2014
3 3
4 4 @author: roj-idl71
5 5 """
6 6 import os
7 7 import sys
8 8 import glob
9 9 import time
10 10 import numpy
11 11 import fnmatch
12 12 import inspect
13 13 import time
14 14 import datetime
15 15 import zmq
16 16
17 from schainpy.model.proc.jroproc_base import Operation
17 18 from schainpy.model.data.jroheaderIO import PROCFLAG, BasicHeader, SystemHeader, RadarControllerHeader, ProcessingHeader
18 19 from schainpy.model.data.jroheaderIO import get_dtype_index, get_numpy_dtype, get_procflag_dtype, get_dtype_width
19 20 from schainpy.utils import log
20 21 import schainpy.admin
21 22
22 23 LOCALTIME = True
23 24 DT_DIRECTIVES = {
24 25 '%Y': 4,
25 26 '%y': 2,
26 27 '%m': 2,
27 28 '%d': 2,
28 29 '%j': 3,
29 30 '%H': 2,
30 31 '%M': 2,
31 32 '%S': 2,
32 33 '%f': 6
33 34 }
34 35
35 36
36 37 def isNumber(cad):
37 38 """
38 39 Chequea si el conjunto de caracteres que componen un string puede ser convertidos a un numero.
39 40
40 41 Excepciones:
41 42 Si un determinado string no puede ser convertido a numero
42 43 Input:
43 44 str, string al cual se le analiza para determinar si convertible a un numero o no
44 45
45 46 Return:
46 47 True : si el string es uno numerico
47 48 False : no es un string numerico
48 49 """
49 50 try:
50 51 float(cad)
51 52 return True
52 53 except:
53 54 return False
54 55
55 56
56 57 def isFileInEpoch(filename, startUTSeconds, endUTSeconds):
57 58 """
58 59 Esta funcion determina si un archivo de datos se encuentra o no dentro del rango de fecha especificado.
59 60
60 61 Inputs:
61 62 filename : nombre completo del archivo de datos en formato Jicamarca (.r)
62 63
63 64 startUTSeconds : fecha inicial del rango seleccionado. La fecha esta dada en
64 65 segundos contados desde 01/01/1970.
65 66 endUTSeconds : fecha final del rango seleccionado. La fecha esta dada en
66 67 segundos contados desde 01/01/1970.
67 68
68 69 Return:
69 70 Boolean : Retorna True si el archivo de datos contiene datos en el rango de
70 71 fecha especificado, de lo contrario retorna False.
71 72
72 73 Excepciones:
73 74 Si el archivo no existe o no puede ser abierto
74 75 Si la cabecera no puede ser leida.
75 76
76 77 """
77 78 basicHeaderObj = BasicHeader(LOCALTIME)
78 79
79 80 try:
80 81 fp = open(filename, 'rb')
81 82 except IOError:
82 83 print("The file %s can't be opened" % (filename))
83 84 return 0
84 85
85 86 sts = basicHeaderObj.read(fp)
86 87 fp.close()
87 88
88 89 if not(sts):
89 90 print("Skipping the file %s because it has not a valid header" % (filename))
90 91 return 0
91 92
92 93 if not ((startUTSeconds <= basicHeaderObj.utc) and (endUTSeconds > basicHeaderObj.utc)):
93 94 return 0
94 95
95 96 return 1
96 97
97 98
98 99 def isTimeInRange(thisTime, startTime, endTime):
99 100 if endTime >= startTime:
100 101 if (thisTime < startTime) or (thisTime > endTime):
101 102 return 0
102 103 return 1
103 104 else:
104 105 if (thisTime < startTime) and (thisTime > endTime):
105 106 return 0
106 107 return 1
107 108
108 109
109 110 def isFileInTimeRange(filename, startDate, endDate, startTime, endTime):
110 111 """
111 112 Retorna 1 si el archivo de datos se encuentra dentro del rango de horas especificado.
112 113
113 114 Inputs:
114 115 filename : nombre completo del archivo de datos en formato Jicamarca (.r)
115 116
116 117 startDate : fecha inicial del rango seleccionado en formato datetime.date
117 118
118 119 endDate : fecha final del rango seleccionado en formato datetime.date
119 120
120 121 startTime : tiempo inicial del rango seleccionado en formato datetime.time
121 122
122 123 endTime : tiempo final del rango seleccionado en formato datetime.time
123 124
124 125 Return:
125 126 Boolean : Retorna True si el archivo de datos contiene datos en el rango de
126 127 fecha especificado, de lo contrario retorna False.
127 128
128 129 Excepciones:
129 130 Si el archivo no existe o no puede ser abierto
130 131 Si la cabecera no puede ser leida.
131 132
132 133 """
133 134
134 135 try:
135 136 fp = open(filename, 'rb')
136 137 except IOError:
137 138 print("The file %s can't be opened" % (filename))
138 139 return None
139 140
140 141 firstBasicHeaderObj = BasicHeader(LOCALTIME)
141 142 systemHeaderObj = SystemHeader()
142 143 radarControllerHeaderObj = RadarControllerHeader()
143 144 processingHeaderObj = ProcessingHeader()
144 145
145 146 lastBasicHeaderObj = BasicHeader(LOCALTIME)
146 147
147 148 sts = firstBasicHeaderObj.read(fp)
148 149
149 150 if not(sts):
150 151 print("[Reading] Skipping the file %s because it has not a valid header" % (filename))
151 152 return None
152 153
153 154 if not systemHeaderObj.read(fp):
154 155 return None
155 156
156 157 if not radarControllerHeaderObj.read(fp):
157 158 return None
158 159
159 160 if not processingHeaderObj.read(fp):
160 161 return None
161 162
162 163 filesize = os.path.getsize(filename)
163 164
164 165 offset = processingHeaderObj.blockSize + 24 # header size
165 166
166 167 if filesize <= offset:
167 168 print("[Reading] %s: This file has not enough data" % filename)
168 169 return None
169 170
170 171 fp.seek(-offset, 2)
171 172
172 173 sts = lastBasicHeaderObj.read(fp)
173 174
174 175 fp.close()
175 176
176 177 thisDatetime = lastBasicHeaderObj.datatime
177 178 thisTime_last_block = thisDatetime.time()
178 179
179 180 thisDatetime = firstBasicHeaderObj.datatime
180 181 thisDate = thisDatetime.date()
181 182 thisTime_first_block = thisDatetime.time()
182 183
183 184 # General case
184 185 # o>>>>>>>>>>>>>><<<<<<<<<<<<<<o
185 186 #-----------o----------------------------o-----------
186 187 # startTime endTime
187 188
188 189 if endTime >= startTime:
189 190 if (thisTime_last_block < startTime) or (thisTime_first_block > endTime):
190 191 return None
191 192
192 193 return thisDatetime
193 194
194 195 # If endTime < startTime then endTime belongs to the next day
195 196
196 197 #<<<<<<<<<<<o o>>>>>>>>>>>
197 198 #-----------o----------------------------o-----------
198 199 # endTime startTime
199 200
200 201 if (thisDate == startDate) and (thisTime_last_block < startTime):
201 202 return None
202 203
203 204 if (thisDate == endDate) and (thisTime_first_block > endTime):
204 205 return None
205 206
206 207 if (thisTime_last_block < startTime) and (thisTime_first_block > endTime):
207 208 return None
208 209
209 210 return thisDatetime
210 211
211 212
212 213 def isFolderInDateRange(folder, startDate=None, endDate=None):
213 214 """
214 215 Retorna 1 si el archivo de datos se encuentra dentro del rango de horas especificado.
215 216
216 217 Inputs:
217 218 folder : nombre completo del directorio.
218 219 Su formato deberia ser "/path_root/?YYYYDDD"
219 220
220 221 siendo:
221 222 YYYY : Anio (ejemplo 2015)
222 223 DDD : Dia del anio (ejemplo 305)
223 224
224 225 startDate : fecha inicial del rango seleccionado en formato datetime.date
225 226
226 227 endDate : fecha final del rango seleccionado en formato datetime.date
227 228
228 229 Return:
229 230 Boolean : Retorna True si el archivo de datos contiene datos en el rango de
230 231 fecha especificado, de lo contrario retorna False.
231 232 Excepciones:
232 233 Si el directorio no tiene el formato adecuado
233 234 """
234 235
235 236 basename = os.path.basename(folder)
236 237
237 238 if not isRadarFolder(basename):
238 239 print("The folder %s has not the rigth format" % folder)
239 240 return 0
240 241
241 242 if startDate and endDate:
242 243 thisDate = getDateFromRadarFolder(basename)
243 244
244 245 if thisDate < startDate:
245 246 return 0
246 247
247 248 if thisDate > endDate:
248 249 return 0
249 250
250 251 return 1
251 252
252 253
253 254 def isFileInDateRange(filename, startDate=None, endDate=None):
254 255 """
255 256 Retorna 1 si el archivo de datos se encuentra dentro del rango de horas especificado.
256 257
257 258 Inputs:
258 259 filename : nombre completo del archivo de datos en formato Jicamarca (.r)
259 260
260 261 Su formato deberia ser "?YYYYDDDsss"
261 262
262 263 siendo:
263 264 YYYY : Anio (ejemplo 2015)
264 265 DDD : Dia del anio (ejemplo 305)
265 266 sss : set
266 267
267 268 startDate : fecha inicial del rango seleccionado en formato datetime.date
268 269
269 270 endDate : fecha final del rango seleccionado en formato datetime.date
270 271
271 272 Return:
272 273 Boolean : Retorna True si el archivo de datos contiene datos en el rango de
273 274 fecha especificado, de lo contrario retorna False.
274 275 Excepciones:
275 276 Si el archivo no tiene el formato adecuado
276 277 """
277 278
278 279 basename = os.path.basename(filename)
279 280
280 281 if not isRadarFile(basename):
281 282 print("The filename %s has not the rigth format" % filename)
282 283 return 0
283 284
284 285 if startDate and endDate:
285 286 thisDate = getDateFromRadarFile(basename)
286 287
287 288 if thisDate < startDate:
288 289 return 0
289 290
290 291 if thisDate > endDate:
291 292 return 0
292 293
293 294 return 1
294 295
295 296
296 297 def getFileFromSet(path, ext, set):
297 298 validFilelist = []
298 299 fileList = os.listdir(path)
299 300
300 301 # 0 1234 567 89A BCDE
301 302 # H YYYY DDD SSS .ext
302 303
303 304 for thisFile in fileList:
304 305 try:
305 306 year = int(thisFile[1:5])
306 307 doy = int(thisFile[5:8])
307 308 except:
308 309 continue
309 310
310 311 if (os.path.splitext(thisFile)[-1].lower() != ext.lower()):
311 312 continue
312 313
313 314 validFilelist.append(thisFile)
314 315
315 316 myfile = fnmatch.filter(
316 317 validFilelist, '*%4.4d%3.3d%3.3d*' % (year, doy, set))
317 318
318 319 if len(myfile) != 0:
319 320 return myfile[0]
320 321 else:
321 322 filename = '*%4.4d%3.3d%3.3d%s' % (year, doy, set, ext.lower())
322 323 print('the filename %s does not exist' % filename)
323 324 print('...going to the last file: ')
324 325
325 326 if validFilelist:
326 327 validFilelist = sorted(validFilelist, key=str.lower)
327 328 return validFilelist[-1]
328 329
329 330 return None
330 331
331 332
332 333 def getlastFileFromPath(path, ext):
333 334 """
334 335 Depura el fileList dejando solo los que cumplan el formato de "PYYYYDDDSSS.ext"
335 336 al final de la depuracion devuelve el ultimo file de la lista que quedo.
336 337
337 338 Input:
338 339 fileList : lista conteniendo todos los files (sin path) que componen una determinada carpeta
339 340 ext : extension de los files contenidos en una carpeta
340 341
341 342 Return:
342 343 El ultimo file de una determinada carpeta, no se considera el path.
343 344 """
344 345 validFilelist = []
345 346 fileList = os.listdir(path)
346 347
347 348 # 0 1234 567 89A BCDE
348 349 # H YYYY DDD SSS .ext
349 350
350 351 for thisFile in fileList:
351 352
352 353 year = thisFile[1:5]
353 354 if not isNumber(year):
354 355 continue
355 356
356 357 doy = thisFile[5:8]
357 358 if not isNumber(doy):
358 359 continue
359 360
360 361 year = int(year)
361 362 doy = int(doy)
362 363
363 364 if (os.path.splitext(thisFile)[-1].lower() != ext.lower()):
364 365 continue
365 366
366 367 validFilelist.append(thisFile)
367 368
368 369 if validFilelist:
369 370 validFilelist = sorted(validFilelist, key=str.lower)
370 371 return validFilelist[-1]
371 372
372 373 return None
373 374
374 375
375 376 def isRadarFolder(folder):
376 377 try:
377 378 year = int(folder[1:5])
378 379 doy = int(folder[5:8])
379 380 except:
380 381 return 0
381 382
382 383 return 1
383 384
384 385
385 386 def isRadarFile(file):
386 387 try:
387 388 year = int(file[1:5])
388 389 doy = int(file[5:8])
389 390 set = int(file[8:11])
390 391 except:
391 392 return 0
392 393
393 394 return 1
394 395
395 396
396 397 def getDateFromRadarFile(file):
397 398 try:
398 399 year = int(file[1:5])
399 400 doy = int(file[5:8])
400 401 set = int(file[8:11])
401 402 except:
402 403 return None
403 404
404 405 thisDate = datetime.date(year, 1, 1) + datetime.timedelta(doy - 1)
405 406 return thisDate
406 407
407 408
408 409 def getDateFromRadarFolder(folder):
409 410 try:
410 411 year = int(folder[1:5])
411 412 doy = int(folder[5:8])
412 413 except:
413 414 return None
414 415
415 416 thisDate = datetime.date(year, 1, 1) + datetime.timedelta(doy - 1)
416 417 return thisDate
417 418
418 419 def parse_format(s, fmt):
419 420
420 421 for i in range(fmt.count('%')):
421 422 x = fmt.index('%')
422 423 d = DT_DIRECTIVES[fmt[x:x+2]]
423 424 fmt = fmt.replace(fmt[x:x+2], s[x:x+d])
424 425 return fmt
425 426
426 427 class Reader(object):
427 428
428 429 c = 3E8
429 430 isConfig = False
430 431 dtype = None
431 432 pathList = []
432 433 filenameList = []
433 434 datetimeList = []
434 435 filename = None
435 436 ext = None
436 437 flagIsNewFile = 1
437 438 flagDiscontinuousBlock = 0
438 439 flagIsNewBlock = 0
439 440 flagNoMoreFiles = 0
440 441 fp = None
441 442 firstHeaderSize = 0
442 443 basicHeaderSize = 24
443 444 versionFile = 1103
444 445 fileSize = None
445 446 fileSizeByHeader = None
446 447 fileIndex = -1
447 448 profileIndex = None
448 449 blockIndex = 0
449 450 nTotalBlocks = 0
450 451 maxTimeStep = 30
451 452 lastUTTime = None
452 453 datablock = None
453 454 dataOut = None
454 455 getByBlock = False
455 456 path = None
456 457 startDate = None
457 458 endDate = None
458 459 startTime = datetime.time(0, 0, 0)
459 460 endTime = datetime.time(23, 59, 59)
460 461 set = None
461 462 expLabel = ""
462 463 online = False
463 464 delay = 60
464 465 nTries = 3 # quantity tries
465 466 nFiles = 3 # number of files for searching
466 467 walk = True
467 468 getblock = False
468 469 nTxs = 1
469 470 realtime = False
470 471 blocksize = 0
471 472 blocktime = None
472 473 warnings = True
473 474 verbose = True
474 475 server = None
475 476 format = None
476 477 oneDDict = None
477 478 twoDDict = None
478 479 independentParam = None
479 480 filefmt = None
480 481 folderfmt = None
481 482 open_file = open
482 483 open_mode = 'rb'
483 484
484 485 def run(self):
485 486
486 487 raise NotImplementedError
487 488
488 489 def getAllowedArgs(self):
489 490 if hasattr(self, '__attrs__'):
490 491 return self.__attrs__
491 492 else:
492 493 return inspect.getargspec(self.run).args
493 494
494 495 def set_kwargs(self, **kwargs):
495 496
496 497 for key, value in kwargs.items():
497 498 setattr(self, key, value)
498 499
499 500 def find_folders(self, path, startDate, endDate, folderfmt, last=False):
500 501
501 502 folders = [x for f in path.split(',')
502 503 for x in os.listdir(f) if os.path.isdir(os.path.join(f, x))]
503 504 folders.sort()
504 505
505 506 if last:
506 507 folders = [folders[-1]]
507 508
508 509 for folder in folders:
509 510 try:
510 511 dt = datetime.datetime.strptime(parse_format(folder, folderfmt), folderfmt).date()
511 512 if dt >= startDate and dt <= endDate:
512 513 yield os.path.join(path, folder)
513 514 else:
514 515 log.log('Skiping folder {}'.format(folder), self.name)
515 516 except Exception as e:
516 517 log.log('Skiping folder {}'.format(folder), self.name)
517 518 continue
518 519 return
519 520
520 521 def find_files(self, folders, ext, filefmt, startDate=None, endDate=None,
521 522 expLabel='', last=False):
522 523
523 524 for path in folders:
524 525 files = glob.glob1(path, '*{}'.format(ext))
525 526 files.sort()
526 527 if last:
527 528 if files:
528 529 fo = files[-1]
529 530 try:
530 531 dt = datetime.datetime.strptime(parse_format(fo, filefmt), filefmt).date()
531 532 yield os.path.join(path, expLabel, fo)
532 533 except Exception as e:
533 534 pass
534 535 return
535 536 else:
536 537 return
537 538
538 539 for fo in files:
539 540 try:
540 541 dt = datetime.datetime.strptime(parse_format(fo, filefmt), filefmt).date()
541 542 if dt >= startDate and dt <= endDate:
542 543 yield os.path.join(path, expLabel, fo)
543 544 else:
544 545 log.log('Skiping file {}'.format(fo), self.name)
545 546 except Exception as e:
546 547 log.log('Skiping file {}'.format(fo), self.name)
547 548 continue
548 549
549 550 def searchFilesOffLine(self, path, startDate, endDate,
550 551 expLabel, ext, walk,
551 552 filefmt, folderfmt):
552 553 """Search files in offline mode for the given arguments
553 554
554 555 Return:
555 556 Generator of files
556 557 """
557 558
558 559 if walk:
559 560 folders = self.find_folders(
560 561 path, startDate, endDate, folderfmt)
561 562 else:
562 563 folders = path.split(',')
563 564
564 565 return self.find_files(
565 566 folders, ext, filefmt, startDate, endDate, expLabel)
566 567
567 568 def searchFilesOnLine(self, path, startDate, endDate,
568 569 expLabel, ext, walk,
569 570 filefmt, folderfmt):
570 571 """Search for the last file of the last folder
571 572
572 573 Arguments:
573 574 path : carpeta donde estan contenidos los files que contiene data
574 575 expLabel : Nombre del subexperimento (subfolder)
575 576 ext : extension de los files
576 577 walk : Si es habilitado no realiza busquedas dentro de los ubdirectorios (doypath)
577 578
578 579 Return:
579 580 generator with the full path of last filename
580 581 """
581 582
582 583 if walk:
583 584 folders = self.find_folders(
584 585 path, startDate, endDate, folderfmt, last=True)
585 586 else:
586 587 folders = path.split(',')
587 588
588 589 return self.find_files(
589 590 folders, ext, filefmt, startDate, endDate, expLabel, last=True)
590 591
591 592 def setNextFile(self):
592 593 """Set the next file to be readed open it and parse de file header"""
593 594
594 595 while True:
595 596 if self.fp != None:
596 597 self.fp.close()
597 598
598 599 if self.online:
599 600 newFile = self.setNextFileOnline()
600 601 else:
601 602 newFile = self.setNextFileOffline()
602 603
603 604 if not(newFile):
604 605 if self.online:
605 606 raise schainpy.admin.SchainError('Time to wait for new files reach')
606 607 else:
607 608 if self.fileIndex == -1:
608 609 raise schainpy.admin.SchainWarning('No files found in the given path')
609 610 else:
610 611 raise schainpy.admin.SchainWarning('No more files to read')
611 612
612 613 if self.verifyFile(self.filename):
613 614 break
614 615
615 616 log.log('Opening file: %s' % self.filename, self.name)
616 617
617 618 self.readFirstHeader()
618 619 self.nReadBlocks = 0
619 620
620 621 def setNextFileOnline(self):
621 622 """Check for the next file to be readed in online mode.
622 623
623 624 Set:
624 625 self.filename
625 626 self.fp
626 627 self.filesize
627 628
628 629 Return:
629 630 boolean
630 631
631 632 """
632 633 nextFile = True
633 634 nextDay = False
634 635
635 636 for nFiles in range(self.nFiles+1):
636 637 for nTries in range(self.nTries):
637 638 fullfilename, filename = self.checkForRealPath(nextFile, nextDay)
638 639 if fullfilename is not None:
639 640 break
640 641 log.warning(
641 642 "Waiting %0.2f sec for the next file: \"%s\" , try %02d ..." % (self.delay, filename, nTries + 1),
642 643 self.name)
643 644 time.sleep(self.delay)
644 645 nextFile = False
645 646 continue
646 647
647 648 if fullfilename is not None:
648 649 break
649 650
650 651 self.nTries = 1
651 652 nextFile = True
652 653
653 654 if nFiles == (self.nFiles - 1):
654 655 log.log('Trying with next day...', self.name)
655 656 nextDay = True
656 657 self.nTries = 3
657 658
658 659 if fullfilename:
659 660 self.fileSize = os.path.getsize(fullfilename)
660 661 self.filename = fullfilename
661 662 self.flagIsNewFile = 1
662 663 if self.fp != None:
663 664 self.fp.close()
664 665 self.fp = self.open_file(fullfilename, self.open_mode)
665 666 self.flagNoMoreFiles = 0
666 667 self.fileIndex += 1
667 668 return 1
668 669 else:
669 670 return 0
670 671
671 672 def setNextFileOffline(self):
672 673 """Open the next file to be readed in offline mode"""
673 674
674 675 try:
675 676 filename = next(self.filenameList)
676 677 self.fileIndex +=1
677 678 except StopIteration:
678 679 self.flagNoMoreFiles = 1
679 680 return 0
680 681
681 682 self.filename = filename
682 683 self.fileSize = os.path.getsize(filename)
683 684 self.fp = self.open_file(filename, self.open_mode)
684 685 self.flagIsNewFile = 1
685 686
686 687 return 1
687 688
688 689 @staticmethod
689 690 def isDateTimeInRange(dt, startDate, endDate, startTime, endTime):
690 691 """Check if the given datetime is in range"""
691 692
692 693 if startDate <= dt.date() <= endDate:
693 694 if startTime <= dt.time() <= endTime:
694 695 return True
695 696 return False
696 697
697 698 def verifyFile(self, filename):
698 699 """Check for a valid file
699 700
700 701 Arguments:
701 702 filename -- full path filename
702 703
703 704 Return:
704 705 boolean
705 706 """
706 707
707 708 return True
708 709
709 710 def checkForRealPath(self, nextFile, nextDay):
710 711 """Check if the next file to be readed exists"""
711 712
712 713 raise NotImplementedError
713 714
714 715 def readFirstHeader(self):
715 716 """Parse the file header"""
716 717
717 718 pass
718 719
719 720 class JRODataReader(Reader):
720 721
721 722 utc = 0
722 723 nReadBlocks = 0
723 724 foldercounter = 0
724 725 firstHeaderSize = 0
725 726 basicHeaderSize = 24
726 727 __isFirstTimeOnline = 1
727 __printInfo = True
728 728 filefmt = "*%Y%j***"
729 729 folderfmt = "*%Y%j"
730 __attrs__ = ['path', 'startDate', 'endDate', 'startTime', 'endTime', 'online', 'delay', 'walk']
730 731
731 732 def getDtypeWidth(self):
732 733
733 734 dtype_index = get_dtype_index(self.dtype)
734 735 dtype_width = get_dtype_width(dtype_index)
735 736
736 737 return dtype_width
737 738
738 739 def checkForRealPath(self, nextFile, nextDay):
739 740 """Check if the next file to be readed exists.
740 741
741 742 Example :
742 743 nombre correcto del file es .../.../D2009307/P2009307367.ext
743 744
744 745 Entonces la funcion prueba con las siguientes combinaciones
745 746 .../.../y2009307367.ext
746 747 .../.../Y2009307367.ext
747 748 .../.../x2009307/y2009307367.ext
748 749 .../.../x2009307/Y2009307367.ext
749 750 .../.../X2009307/y2009307367.ext
750 751 .../.../X2009307/Y2009307367.ext
751 752 siendo para este caso, la ultima combinacion de letras, identica al file buscado
752 753
753 754 Return:
754 755 str -- fullpath of the file
755 756 """
756 757
757 758
758 759 if nextFile:
759 760 self.set += 1
760 761 if nextDay:
761 762 self.set = 0
762 763 self.doy += 1
763 764 foldercounter = 0
764 765 prefixDirList = [None, 'd', 'D']
765 766 if self.ext.lower() == ".r": # voltage
766 767 prefixFileList = ['d', 'D']
767 768 elif self.ext.lower() == ".pdata": # spectra
768 769 prefixFileList = ['p', 'P']
769 770
770 771 # barrido por las combinaciones posibles
771 772 for prefixDir in prefixDirList:
772 773 thispath = self.path
773 774 if prefixDir != None:
774 775 # formo el nombre del directorio xYYYYDDD (x=d o x=D)
775 776 if foldercounter == 0:
776 777 thispath = os.path.join(self.path, "%s%04d%03d" %
777 778 (prefixDir, self.year, self.doy))
778 779 else:
779 780 thispath = os.path.join(self.path, "%s%04d%03d_%02d" % (
780 781 prefixDir, self.year, self.doy, foldercounter))
781 782 for prefixFile in prefixFileList: # barrido por las dos combinaciones posibles de "D"
782 783 # formo el nombre del file xYYYYDDDSSS.ext
783 784 filename = "%s%04d%03d%03d%s" % (prefixFile, self.year, self.doy, self.set, self.ext)
784 785 fullfilename = os.path.join(
785 786 thispath, filename)
786 787
787 788 if os.path.exists(fullfilename):
788 789 return fullfilename, filename
789 790
790 791 return None, filename
791 792
792 793 def __waitNewBlock(self):
793 794 """
794 795 Return 1 si se encontro un nuevo bloque de datos, 0 de otra forma.
795 796
796 797 Si el modo de lectura es OffLine siempre retorn 0
797 798 """
798 799 if not self.online:
799 800 return 0
800 801
801 802 if (self.nReadBlocks >= self.processingHeaderObj.dataBlocksPerFile):
802 803 return 0
803 804
804 805 currentPointer = self.fp.tell()
805 806
806 807 neededSize = self.processingHeaderObj.blockSize + self.basicHeaderSize
807 808
808 809 for nTries in range(self.nTries):
809 810
810 811 self.fp.close()
811 812 self.fp = open(self.filename, 'rb')
812 813 self.fp.seek(currentPointer)
813 814
814 815 self.fileSize = os.path.getsize(self.filename)
815 816 currentSize = self.fileSize - currentPointer
816 817
817 818 if (currentSize >= neededSize):
818 819 self.basicHeaderObj.read(self.fp)
819 820 return 1
820 821
821 822 if self.fileSize == self.fileSizeByHeader:
822 823 # self.flagEoF = True
823 824 return 0
824 825
825 826 print("[Reading] Waiting %0.2f seconds for the next block, try %03d ..." % (self.delay, nTries + 1))
826 827 time.sleep(self.delay)
827 828
828 829 return 0
829 830
830 831 def waitDataBlock(self, pointer_location, blocksize=None):
831 832
832 833 currentPointer = pointer_location
833 834 if blocksize is None:
834 835 neededSize = self.processingHeaderObj.blockSize # + self.basicHeaderSize
835 836 else:
836 837 neededSize = blocksize
837 838
838 839 for nTries in range(self.nTries):
839 840 self.fp.close()
840 841 self.fp = open(self.filename, 'rb')
841 842 self.fp.seek(currentPointer)
842 843
843 844 self.fileSize = os.path.getsize(self.filename)
844 845 currentSize = self.fileSize - currentPointer
845 846
846 847 if (currentSize >= neededSize):
847 848 return 1
848 849
849 850 log.warning(
850 851 "Waiting %0.2f seconds for the next block, try %03d ..." % (self.delay, nTries + 1),
851 852 self.name
852 853 )
853 854 time.sleep(self.delay)
854 855
855 856 return 0
856 857
857 858 def __setNewBlock(self):
858 859
859 860 if self.fp == None:
860 861 return 0
861 862
862 863 if self.flagIsNewFile:
863 864 self.lastUTTime = self.basicHeaderObj.utc
864 865 return 1
865 866
866 867 if self.realtime:
867 868 self.flagDiscontinuousBlock = 1
868 869 if not(self.setNextFile()):
869 870 return 0
870 871 else:
871 872 return 1
872 873
873 874 currentSize = self.fileSize - self.fp.tell()
874 875 neededSize = self.processingHeaderObj.blockSize + self.basicHeaderSize
875 876
876 877 if (currentSize >= neededSize):
877 878 self.basicHeaderObj.read(self.fp)
878 879 self.lastUTTime = self.basicHeaderObj.utc
879 880 return 1
880 881
881 882 if self.__waitNewBlock():
882 883 self.lastUTTime = self.basicHeaderObj.utc
883 884 return 1
884 885
885 886 if not(self.setNextFile()):
886 887 return 0
887 888
888 889 deltaTime = self.basicHeaderObj.utc - self.lastUTTime
889 890 self.lastUTTime = self.basicHeaderObj.utc
890 891
891 892 self.flagDiscontinuousBlock = 0
892 893
893 894 if deltaTime > self.maxTimeStep:
894 895 self.flagDiscontinuousBlock = 1
895 896
896 897 return 1
897 898
898 899 def readNextBlock(self):
899 900
900 901 while True:
901 902 self.__setNewBlock()
902 903
903 904 if not(self.readBlock()):
904 905 return 0
905 906
906 907 self.getBasicHeader()
907 908
908 909 if not self.isDateTimeInRange(self.dataOut.datatime, self.startDate, self.endDate, self.startTime, self.endTime):
909 910 print("[Reading] Block No. %d/%d -> %s [Skipping]" % (self.nReadBlocks,
910 911 self.processingHeaderObj.dataBlocksPerFile,
911 912 self.dataOut.datatime.ctime()))
912 913 continue
913 914
914 915 break
915 916
916 917 if self.verbose:
917 918 print("[Reading] Block No. %d/%d -> %s" % (self.nReadBlocks,
918 919 self.processingHeaderObj.dataBlocksPerFile,
919 920 self.dataOut.datatime.ctime()))
920 921 return 1
921 922
922 923 def readFirstHeader(self):
923 924
924 925 self.basicHeaderObj.read(self.fp)
925 926 self.systemHeaderObj.read(self.fp)
926 927 self.radarControllerHeaderObj.read(self.fp)
927 928 self.processingHeaderObj.read(self.fp)
928 929 self.firstHeaderSize = self.basicHeaderObj.size
929 930
930 931 datatype = int(numpy.log2((self.processingHeaderObj.processFlags &
931 932 PROCFLAG.DATATYPE_MASK)) - numpy.log2(PROCFLAG.DATATYPE_CHAR))
932 933 if datatype == 0:
933 934 datatype_str = numpy.dtype([('real', '<i1'), ('imag', '<i1')])
934 935 elif datatype == 1:
935 936 datatype_str = numpy.dtype([('real', '<i2'), ('imag', '<i2')])
936 937 elif datatype == 2:
937 938 datatype_str = numpy.dtype([('real', '<i4'), ('imag', '<i4')])
938 939 elif datatype == 3:
939 940 datatype_str = numpy.dtype([('real', '<i8'), ('imag', '<i8')])
940 941 elif datatype == 4:
941 942 datatype_str = numpy.dtype([('real', '<f4'), ('imag', '<f4')])
942 943 elif datatype == 5:
943 944 datatype_str = numpy.dtype([('real', '<f8'), ('imag', '<f8')])
944 945 else:
945 946 raise ValueError('Data type was not defined')
946 947
947 948 self.dtype = datatype_str
948 949 #self.ippSeconds = 2 * 1000 * self.radarControllerHeaderObj.ipp / self.c
949 950 self.fileSizeByHeader = self.processingHeaderObj.dataBlocksPerFile * self.processingHeaderObj.blockSize + \
950 951 self.firstHeaderSize + self.basicHeaderSize * \
951 952 (self.processingHeaderObj.dataBlocksPerFile - 1)
952 953 # self.dataOut.channelList = numpy.arange(self.systemHeaderObj.numChannels)
953 954 # self.dataOut.channelIndexList = numpy.arange(self.systemHeaderObj.numChannels)
954 955 self.getBlockDimension()
955 956
956 957 def verifyFile(self, filename, msgFlag=True):
957 958
958 959 msg = None
959 960
960 961 try:
961 962 fp = open(filename, 'rb')
962 963 except IOError:
963 964
964 965 if msgFlag:
965 966 print("[Reading] File %s can't be opened" % (filename))
966 967
967 968 return False
968 969
969 970 if self.waitDataBlock(0):
970 971 basicHeaderObj = BasicHeader(LOCALTIME)
971 972 systemHeaderObj = SystemHeader()
972 973 radarControllerHeaderObj = RadarControllerHeader()
973 974 processingHeaderObj = ProcessingHeader()
974 975
975 976 if not(basicHeaderObj.read(fp)):
976 977 fp.close()
977 978 return False
978 979
979 980 if not(systemHeaderObj.read(fp)):
980 981 fp.close()
981 982 return False
982 983
983 984 if not(radarControllerHeaderObj.read(fp)):
984 985 fp.close()
985 986 return False
986 987
987 988 if not(processingHeaderObj.read(fp)):
988 989 fp.close()
989 990 return False
990 991
991 992 if not self.online:
992 993 dt1 = basicHeaderObj.datatime
993 994 fp.seek(self.fileSize-processingHeaderObj.blockSize-24)
994 995 if not(basicHeaderObj.read(fp)):
995 996 fp.close()
996 997 return False
997 998 dt2 = basicHeaderObj.datatime
998 999 if not self.isDateTimeInRange(dt1, self.startDate, self.endDate, self.startTime, self.endTime) and not \
999 1000 self.isDateTimeInRange(dt2, self.startDate, self.endDate, self.startTime, self.endTime):
1000 1001 return False
1001 1002
1002 1003 fp.close()
1003 1004
1004 1005 return True
1005 1006
1006 1007 def findDatafiles(self, path, startDate=None, endDate=None, expLabel='', ext='.r', walk=True, include_path=False):
1007 1008
1008 1009 path_empty = True
1009 1010
1010 1011 dateList = []
1011 1012 pathList = []
1012 1013
1013 1014 multi_path = path.split(',')
1014 1015
1015 1016 if not walk:
1016 1017
1017 1018 for single_path in multi_path:
1018 1019
1019 1020 if not os.path.isdir(single_path):
1020 1021 continue
1021 1022
1022 1023 fileList = glob.glob1(single_path, "*" + ext)
1023 1024
1024 1025 if not fileList:
1025 1026 continue
1026 1027
1027 1028 path_empty = False
1028 1029
1029 1030 fileList.sort()
1030 1031
1031 1032 for thisFile in fileList:
1032 1033
1033 1034 if not os.path.isfile(os.path.join(single_path, thisFile)):
1034 1035 continue
1035 1036
1036 1037 if not isRadarFile(thisFile):
1037 1038 continue
1038 1039
1039 1040 if not isFileInDateRange(thisFile, startDate, endDate):
1040 1041 continue
1041 1042
1042 1043 thisDate = getDateFromRadarFile(thisFile)
1043 1044
1044 1045 if thisDate in dateList or single_path in pathList:
1045 1046 continue
1046 1047
1047 1048 dateList.append(thisDate)
1048 1049 pathList.append(single_path)
1049 1050
1050 1051 else:
1051 1052 for single_path in multi_path:
1052 1053
1053 1054 if not os.path.isdir(single_path):
1054 1055 continue
1055 1056
1056 1057 dirList = []
1057 1058
1058 1059 for thisPath in os.listdir(single_path):
1059 1060
1060 1061 if not os.path.isdir(os.path.join(single_path, thisPath)):
1061 1062 continue
1062 1063
1063 1064 if not isRadarFolder(thisPath):
1064 1065 continue
1065 1066
1066 1067 if not isFolderInDateRange(thisPath, startDate, endDate):
1067 1068 continue
1068 1069
1069 1070 dirList.append(thisPath)
1070 1071
1071 1072 if not dirList:
1072 1073 continue
1073 1074
1074 1075 dirList.sort()
1075 1076
1076 1077 for thisDir in dirList:
1077 1078
1078 1079 datapath = os.path.join(single_path, thisDir, expLabel)
1079 1080 fileList = glob.glob1(datapath, "*" + ext)
1080 1081
1081 1082 if not fileList:
1082 1083 continue
1083 1084
1084 1085 path_empty = False
1085 1086
1086 1087 thisDate = getDateFromRadarFolder(thisDir)
1087 1088
1088 1089 pathList.append(datapath)
1089 1090 dateList.append(thisDate)
1090 1091
1091 1092 dateList.sort()
1092 1093
1093 1094 if walk:
1094 1095 pattern_path = os.path.join(multi_path[0], "[dYYYYDDD]", expLabel)
1095 1096 else:
1096 1097 pattern_path = multi_path[0]
1097 1098
1098 1099 if path_empty:
1099 1100 raise schainpy.admin.SchainError("[Reading] No *%s files in %s for %s to %s" % (ext, pattern_path, startDate, endDate))
1100 1101 else:
1101 1102 if not dateList:
1102 1103 raise schainpy.admin.SchainError("[Reading] Date range selected invalid [%s - %s]: No *%s files in %s)" % (startDate, endDate, ext, path))
1103 1104
1104 1105 if include_path:
1105 1106 return dateList, pathList
1106 1107
1107 1108 return dateList
1108 1109
1109 1110 def setup(self, **kwargs):
1110 1111
1111 1112 self.set_kwargs(**kwargs)
1112 1113 if not self.ext.startswith('.'):
1113 1114 self.ext = '.{}'.format(self.ext)
1114 1115
1115 1116 if self.server is not None:
1116 1117 if 'tcp://' in self.server:
1117 1118 address = server
1118 1119 else:
1119 1120 address = 'ipc:///tmp/%s' % self.server
1120 1121 self.server = address
1121 1122 self.context = zmq.Context()
1122 1123 self.receiver = self.context.socket(zmq.PULL)
1123 1124 self.receiver.connect(self.server)
1124 1125 time.sleep(0.5)
1125 1126 print('[Starting] ReceiverData from {}'.format(self.server))
1126 1127 else:
1127 1128 self.server = None
1128 1129 if self.path == None:
1129 1130 raise ValueError("[Reading] The path is not valid")
1130 1131
1131 1132 if self.online:
1132 1133 log.log("[Reading] Searching files in online mode...", self.name)
1133 1134
1134 1135 for nTries in range(self.nTries):
1135 1136 fullpath = self.searchFilesOnLine(self.path, self.startDate,
1136 1137 self.endDate, self.expLabel, self.ext, self.walk,
1137 1138 self.filefmt, self.folderfmt)
1138 1139
1139 1140 try:
1140 1141 fullpath = next(fullpath)
1141 1142 except:
1142 1143 fullpath = None
1143 1144
1144 1145 if fullpath:
1145 1146 break
1146 1147
1147 1148 log.warning(
1148 1149 'Waiting {} sec for a valid file in {}: try {} ...'.format(
1149 1150 self.delay, self.path, nTries + 1),
1150 1151 self.name)
1151 1152 time.sleep(self.delay)
1152 1153
1153 1154 if not(fullpath):
1154 1155 raise schainpy.admin.SchainError(
1155 1156 'There isn\'t any valid file in {}'.format(self.path))
1156 1157
1157 1158 pathname, filename = os.path.split(fullpath)
1158 1159 self.year = int(filename[1:5])
1159 1160 self.doy = int(filename[5:8])
1160 1161 self.set = int(filename[8:11]) - 1
1161 1162 else:
1162 1163 log.log("Searching files in {}".format(self.path), self.name)
1163 1164 self.filenameList = self.searchFilesOffLine(self.path, self.startDate,
1164 1165 self.endDate, self.expLabel, self.ext, self.walk, self.filefmt, self.folderfmt)
1165 1166
1166 1167 self.setNextFile()
1167 1168
1168 1169 return
1169 1170
1170 1171 def getBasicHeader(self):
1171 1172
1172 1173 self.dataOut.utctime = self.basicHeaderObj.utc + self.basicHeaderObj.miliSecond / \
1173 1174 1000. + self.profileIndex * self.radarControllerHeaderObj.ippSeconds
1174 1175
1175 1176 self.dataOut.flagDiscontinuousBlock = self.flagDiscontinuousBlock
1176 1177
1177 1178 self.dataOut.timeZone = self.basicHeaderObj.timeZone
1178 1179
1179 1180 self.dataOut.dstFlag = self.basicHeaderObj.dstFlag
1180 1181
1181 1182 self.dataOut.errorCount = self.basicHeaderObj.errorCount
1182 1183
1183 1184 self.dataOut.useLocalTime = self.basicHeaderObj.useLocalTime
1184 1185
1185 1186 self.dataOut.ippSeconds = self.radarControllerHeaderObj.ippSeconds / self.nTxs
1186 1187
1187 1188 # self.dataOut.nProfiles = self.processingHeaderObj.profilesPerBlock*self.nTxs
1188 1189
1189 1190 def getFirstHeader(self):
1190 1191
1191 1192 raise NotImplementedError
1192 1193
1193 1194 def getData(self):
1194 1195
1195 1196 raise NotImplementedError
1196 1197
1197 1198 def hasNotDataInBuffer(self):
1198 1199
1199 1200 raise NotImplementedError
1200 1201
1201 1202 def readBlock(self):
1202 1203
1203 1204 raise NotImplementedError
1204 1205
1205 1206 def isEndProcess(self):
1206 1207
1207 1208 return self.flagNoMoreFiles
1208 1209
1209 1210 def printReadBlocks(self):
1210 1211
1211 1212 print("[Reading] Number of read blocks per file %04d" % self.nReadBlocks)
1212 1213
1213 1214 def printTotalBlocks(self):
1214 1215
1215 1216 print("[Reading] Number of read blocks %04d" % self.nTotalBlocks)
1216 1217
1217 def printNumberOfBlock(self):
1218 'SPAM!'
1219
1220 # if self.flagIsNewBlock:
1221 # print "[Reading] Block No. %d/%d -> %s" %(self.nReadBlocks,
1222 # self.processingHeaderObj.dataBlocksPerFile,
1223 # self.dataOut.datatime.ctime())
1224
1225 def printInfo(self):
1226
1227 if self.__printInfo == False:
1228 return
1229
1230 self.basicHeaderObj.printInfo()
1231 self.systemHeaderObj.printInfo()
1232 self.radarControllerHeaderObj.printInfo()
1233 self.processingHeaderObj.printInfo()
1234
1235 self.__printInfo = False
1236
1237 1218 def run(self, **kwargs):
1238 1219 """
1239 1220
1240 1221 Arguments:
1241 1222 path :
1242 1223 startDate :
1243 1224 endDate :
1244 1225 startTime :
1245 1226 endTime :
1246 1227 set :
1247 1228 expLabel :
1248 1229 ext :
1249 1230 online :
1250 1231 delay :
1251 1232 walk :
1252 1233 getblock :
1253 1234 nTxs :
1254 1235 realtime :
1255 1236 blocksize :
1256 1237 blocktime :
1257 1238 skip :
1258 1239 cursor :
1259 1240 warnings :
1260 1241 server :
1261 1242 verbose :
1262 1243 format :
1263 1244 oneDDict :
1264 1245 twoDDict :
1265 1246 independentParam :
1266 1247 """
1267 1248
1268 1249 if not(self.isConfig):
1269 1250 self.setup(**kwargs)
1270 1251 self.isConfig = True
1271 1252 if self.server is None:
1272 1253 self.getData()
1273 1254 else:
1274 1255 self.getFromServer()
1275 1256
1276 1257
1277 1258 class JRODataWriter(Reader):
1278 1259
1279 1260 """
1280 1261 Esta clase permite escribir datos a archivos procesados (.r o ,pdata). La escritura
1281 1262 de los datos siempre se realiza por bloques.
1282 1263 """
1283 1264
1284 1265 setFile = None
1285 1266 profilesPerBlock = None
1286 1267 blocksPerFile = None
1287 1268 nWriteBlocks = 0
1288 1269 fileDate = None
1289 1270
1290 1271 def __init__(self, dataOut=None):
1291 1272 raise NotImplementedError
1292 1273
1293 1274 def hasAllDataInBuffer(self):
1294 1275 raise NotImplementedError
1295 1276
1296 1277 def setBlockDimension(self):
1297 1278 raise NotImplementedError
1298 1279
1299 1280 def writeBlock(self):
1300 1281 raise NotImplementedError
1301 1282
1302 1283 def putData(self):
1303 1284 raise NotImplementedError
1304 1285
1305 1286 def getDtypeWidth(self):
1306 1287
1307 1288 dtype_index = get_dtype_index(self.dtype)
1308 1289 dtype_width = get_dtype_width(dtype_index)
1309 1290
1310 1291 return dtype_width
1311 1292
1312 1293 def getProcessFlags(self):
1313 1294
1314 1295 processFlags = 0
1315 1296
1316 1297 dtype_index = get_dtype_index(self.dtype)
1317 1298 procflag_dtype = get_procflag_dtype(dtype_index)
1318 1299
1319 1300 processFlags += procflag_dtype
1320 1301
1321 1302 if self.dataOut.flagDecodeData:
1322 1303 processFlags += PROCFLAG.DECODE_DATA
1323 1304
1324 1305 if self.dataOut.flagDeflipData:
1325 1306 processFlags += PROCFLAG.DEFLIP_DATA
1326 1307
1327 1308 if self.dataOut.code is not None:
1328 1309 processFlags += PROCFLAG.DEFINE_PROCESS_CODE
1329 1310
1330 1311 if self.dataOut.nCohInt > 1:
1331 1312 processFlags += PROCFLAG.COHERENT_INTEGRATION
1332 1313
1333 1314 if self.dataOut.type == "Spectra":
1334 1315 if self.dataOut.nIncohInt > 1:
1335 1316 processFlags += PROCFLAG.INCOHERENT_INTEGRATION
1336 1317
1337 1318 if self.dataOut.data_dc is not None:
1338 1319 processFlags += PROCFLAG.SAVE_CHANNELS_DC
1339 1320
1340 1321 if self.dataOut.flagShiftFFT:
1341 1322 processFlags += PROCFLAG.SHIFT_FFT_DATA
1342 1323
1343 1324 return processFlags
1344 1325
1345 1326 def setBasicHeader(self):
1346 1327
1347 1328 self.basicHeaderObj.size = self.basicHeaderSize # bytes
1348 1329 self.basicHeaderObj.version = self.versionFile
1349 1330 self.basicHeaderObj.dataBlock = self.nTotalBlocks
1350 1331 utc = numpy.floor(self.dataOut.utctime)
1351 1332 milisecond = (self.dataOut.utctime - utc) * 1000.0
1352 1333 self.basicHeaderObj.utc = utc
1353 1334 self.basicHeaderObj.miliSecond = milisecond
1354 1335 self.basicHeaderObj.timeZone = self.dataOut.timeZone
1355 1336 self.basicHeaderObj.dstFlag = self.dataOut.dstFlag
1356 1337 self.basicHeaderObj.errorCount = self.dataOut.errorCount
1357 1338
1358 1339 def setFirstHeader(self):
1359 1340 """
1360 1341 Obtiene una copia del First Header
1361 1342
1362 1343 Affected:
1363 1344
1364 1345 self.basicHeaderObj
1365 1346 self.systemHeaderObj
1366 1347 self.radarControllerHeaderObj
1367 1348 self.processingHeaderObj self.
1368 1349
1369 1350 Return:
1370 1351 None
1371 1352 """
1372 1353
1373 1354 raise NotImplementedError
1374 1355
1375 1356 def __writeFirstHeader(self):
1376 1357 """
1377 1358 Escribe el primer header del file es decir el Basic header y el Long header (SystemHeader, RadarControllerHeader, ProcessingHeader)
1378 1359
1379 1360 Affected:
1380 1361 __dataType
1381 1362
1382 1363 Return:
1383 1364 None
1384 1365 """
1385 1366
1386 1367 # CALCULAR PARAMETROS
1387 1368
1388 1369 sizeLongHeader = self.systemHeaderObj.size + \
1389 1370 self.radarControllerHeaderObj.size + self.processingHeaderObj.size
1390 1371 self.basicHeaderObj.size = self.basicHeaderSize + sizeLongHeader
1391 1372
1392 1373 self.basicHeaderObj.write(self.fp)
1393 1374 self.systemHeaderObj.write(self.fp)
1394 1375 self.radarControllerHeaderObj.write(self.fp)
1395 1376 self.processingHeaderObj.write(self.fp)
1396 1377
1397 1378 def __setNewBlock(self):
1398 1379 """
1399 1380 Si es un nuevo file escribe el First Header caso contrario escribe solo el Basic Header
1400 1381
1401 1382 Return:
1402 1383 0 : si no pudo escribir nada
1403 1384 1 : Si escribio el Basic el First Header
1404 1385 """
1405 1386 if self.fp == None:
1406 1387 self.setNextFile()
1407 1388
1408 1389 if self.flagIsNewFile:
1409 1390 return 1
1410 1391
1411 1392 if self.blockIndex < self.processingHeaderObj.dataBlocksPerFile:
1412 1393 self.basicHeaderObj.write(self.fp)
1413 1394 return 1
1414 1395
1415 1396 if not(self.setNextFile()):
1416 1397 return 0
1417 1398
1418 1399 return 1
1419 1400
1420 1401 def writeNextBlock(self):
1421 1402 """
1422 1403 Selecciona el bloque siguiente de datos y los escribe en un file
1423 1404
1424 1405 Return:
1425 1406 0 : Si no hizo pudo escribir el bloque de datos
1426 1407 1 : Si no pudo escribir el bloque de datos
1427 1408 """
1428 1409 if not(self.__setNewBlock()):
1429 1410 return 0
1430 1411
1431 1412 self.writeBlock()
1432 1413
1433 1414 print("[Writing] Block No. %d/%d" % (self.blockIndex,
1434 1415 self.processingHeaderObj.dataBlocksPerFile))
1435 1416
1436 1417 return 1
1437 1418
1438 1419 def setNextFile(self):
1439 1420 """Determina el siguiente file que sera escrito
1440 1421
1441 1422 Affected:
1442 1423 self.filename
1443 1424 self.subfolder
1444 1425 self.fp
1445 1426 self.setFile
1446 1427 self.flagIsNewFile
1447 1428
1448 1429 Return:
1449 1430 0 : Si el archivo no puede ser escrito
1450 1431 1 : Si el archivo esta listo para ser escrito
1451 1432 """
1452 1433 ext = self.ext
1453 1434 path = self.path
1454 1435
1455 1436 if self.fp != None:
1456 1437 self.fp.close()
1457 1438
1458 1439 if not os.path.exists(path):
1459 1440 os.mkdir(path)
1460 1441
1461 1442 timeTuple = time.localtime(self.dataOut.utctime)
1462 1443 subfolder = 'd%4.4d%3.3d' % (timeTuple.tm_year, timeTuple.tm_yday)
1463 1444
1464 1445 fullpath = os.path.join(path, subfolder)
1465 1446 setFile = self.setFile
1466 1447
1467 1448 if not(os.path.exists(fullpath)):
1468 1449 os.mkdir(fullpath)
1469 1450 setFile = -1 # inicializo mi contador de seteo
1470 1451 else:
1471 1452 filesList = os.listdir(fullpath)
1472 1453 if len(filesList) > 0:
1473 1454 filesList = sorted(filesList, key=str.lower)
1474 1455 filen = filesList[-1]
1475 1456 # el filename debera tener el siguiente formato
1476 1457 # 0 1234 567 89A BCDE (hex)
1477 1458 # x YYYY DDD SSS .ext
1478 1459 if isNumber(filen[8:11]):
1479 1460 # inicializo mi contador de seteo al seteo del ultimo file
1480 1461 setFile = int(filen[8:11])
1481 1462 else:
1482 1463 setFile = -1
1483 1464 else:
1484 1465 setFile = -1 # inicializo mi contador de seteo
1485 1466
1486 1467 setFile += 1
1487 1468
1488 1469 # If this is a new day it resets some values
1489 1470 if self.dataOut.datatime.date() > self.fileDate:
1490 1471 setFile = 0
1491 1472 self.nTotalBlocks = 0
1492 1473
1493 1474 filen = '{}{:04d}{:03d}{:03d}{}'.format(
1494 1475 self.optchar, timeTuple.tm_year, timeTuple.tm_yday, setFile, ext)
1495 1476
1496 1477 filename = os.path.join(path, subfolder, filen)
1497 1478
1498 1479 fp = open(filename, 'wb')
1499 1480
1500 1481 self.blockIndex = 0
1501 1482 self.filename = filename
1502 1483 self.subfolder = subfolder
1503 1484 self.fp = fp
1504 1485 self.setFile = setFile
1505 1486 self.flagIsNewFile = 1
1506 1487 self.fileDate = self.dataOut.datatime.date()
1507 1488 self.setFirstHeader()
1508 1489
1509 1490 print('[Writing] Opening file: %s' % self.filename)
1510 1491
1511 1492 self.__writeFirstHeader()
1512 1493
1513 1494 return 1
1514 1495
1515 1496 def setup(self, dataOut, path, blocksPerFile, profilesPerBlock=64, set=None, ext=None, datatype=4):
1516 1497 """
1517 1498 Setea el tipo de formato en la cual sera guardada la data y escribe el First Header
1518 1499
1519 1500 Inputs:
1520 1501 path : directory where data will be saved
1521 1502 profilesPerBlock : number of profiles per block
1522 1503 set : initial file set
1523 1504 datatype : An integer number that defines data type:
1524 1505 0 : int8 (1 byte)
1525 1506 1 : int16 (2 bytes)
1526 1507 2 : int32 (4 bytes)
1527 1508 3 : int64 (8 bytes)
1528 1509 4 : float32 (4 bytes)
1529 1510 5 : double64 (8 bytes)
1530 1511
1531 1512 Return:
1532 1513 0 : Si no realizo un buen seteo
1533 1514 1 : Si realizo un buen seteo
1534 1515 """
1535 1516
1536 1517 if ext == None:
1537 1518 ext = self.ext
1538 1519
1539 1520 self.ext = ext.lower()
1540 1521
1541 1522 self.path = path
1542 1523
1543 1524 if set is None:
1544 1525 self.setFile = -1
1545 1526 else:
1546 1527 self.setFile = set - 1
1547 1528
1548 1529 self.blocksPerFile = blocksPerFile
1549 1530 self.profilesPerBlock = profilesPerBlock
1550 1531 self.dataOut = dataOut
1551 1532 self.fileDate = self.dataOut.datatime.date()
1552 1533 self.dtype = self.dataOut.dtype
1553 1534
1554 1535 if datatype is not None:
1555 1536 self.dtype = get_numpy_dtype(datatype)
1556 1537
1557 1538 if not(self.setNextFile()):
1558 1539 print("[Writing] There isn't a next file")
1559 1540 return 0
1560 1541
1561 1542 self.setBlockDimension()
1562 1543
1563 1544 return 1
1564 1545
1565 1546 def run(self, dataOut, path, blocksPerFile=100, profilesPerBlock=64, set=None, ext=None, datatype=4, **kwargs):
1566 1547
1567 1548 if not(self.isConfig):
1568 1549
1569 1550 self.setup(dataOut, path, blocksPerFile, profilesPerBlock=profilesPerBlock,
1570 1551 set=set, ext=ext, datatype=datatype, **kwargs)
1571 1552 self.isConfig = True
1572 1553
1573 1554 self.dataOut = dataOut
1574 1555 self.putData()
1575 1556 return self.dataOut
1557
1558 class printInfo(Operation):
1559
1560 def __init__(self):
1561
1562 Operation.__init__(self)
1563 self.__printInfo = True
1564
1565 def run(self, dataOut, headers = ['systemHeaderObj', 'radarControllerHeaderObj', 'processingHeaderObj']):
1566 if self.__printInfo == False:
1567 return dataOut
1568
1569 for header in headers:
1570 if hasattr(dataOut, header):
1571 obj = getattr(dataOut, header)
1572 if hasattr(obj, 'printInfo'):
1573 obj.printInfo()
1574 else:
1575 print(obj)
1576 else:
1577 log.warning('Header {} Not found in object'.format(header))
1578
1579 self.__printInfo = False
1580 return dataOut No newline at end of file
@@ -1,794 +1,794
1 1
2 2 '''
3 3 Created on Jul 3, 2014
4 4
5 5 @author: roj-idl71
6 6 '''
7 7 # SUBCHANNELS EN VEZ DE CHANNELS
8 8 # BENCHMARKS -> PROBLEMAS CON ARCHIVOS GRANDES -> INCONSTANTE EN EL TIEMPO
9 9 # ACTUALIZACION DE VERSION
10 10 # HEADERS
11 11 # MODULO DE ESCRITURA
12 12 # METADATA
13 13
14 14 import os
15 15 import time
16 16 import datetime
17 17 import numpy
18 18 import timeit
19 19 from fractions import Fraction
20 20 from time import time
21 21 from time import sleep
22 22
23 23 import schainpy.admin
24 24 from schainpy.model.data.jroheaderIO import RadarControllerHeader, SystemHeader
25 25 from schainpy.model.data.jrodata import Voltage
26 26 from schainpy.model.proc.jroproc_base import ProcessingUnit, Operation, MPDecorator
27 27
28 28 import pickle
29 29 try:
30 30 import digital_rf
31 31 except:
32 32 pass
33 33
34 @MPDecorator
34
35 35 class DigitalRFReader(ProcessingUnit):
36 36 '''
37 37 classdocs
38 38 '''
39 39
40 40 def __init__(self):
41 41 '''
42 42 Constructor
43 43 '''
44 44
45 45 ProcessingUnit.__init__(self)
46 46
47 47 self.dataOut = Voltage()
48 48 self.__printInfo = True
49 49 self.__flagDiscontinuousBlock = False
50 50 self.__bufferIndex = 9999999
51 51 self.__codeType = 0
52 52 self.__ippKm = None
53 53 self.__nCode = None
54 54 self.__nBaud = None
55 55 self.__code = None
56 56 self.dtype = None
57 57 self.oldAverage = None
58 58 self.path = None
59 59
60 60 def close(self):
61 61 print('Average of writing to digital rf format is ', self.oldAverage * 1000)
62 62 return
63 63
64 64 def __getCurrentSecond(self):
65 65
66 66 return self.__thisUnixSample / self.__sample_rate
67 67
68 68 thisSecond = property(__getCurrentSecond, "I'm the 'thisSecond' property.")
69 69
70 70 def __setFileHeader(self):
71 71 '''
72 72 In this method will be initialized every parameter of dataOut object (header, no data)
73 73 '''
74 74 ippSeconds = 1.0 * self.__nSamples / self.__sample_rate
75 75
76 76 nProfiles = 1.0 / ippSeconds # Number of profiles in one second
77 77
78 78 try:
79 79 self.dataOut.radarControllerHeaderObj = RadarControllerHeader(
80 80 self.__radarControllerHeader)
81 81 except:
82 82 self.dataOut.radarControllerHeaderObj = RadarControllerHeader(
83 83 txA=0,
84 84 txB=0,
85 85 nWindows=1,
86 86 nHeights=self.__nSamples,
87 87 firstHeight=self.__firstHeigth,
88 88 deltaHeight=self.__deltaHeigth,
89 89 codeType=self.__codeType,
90 90 nCode=self.__nCode, nBaud=self.__nBaud,
91 91 code=self.__code)
92 92
93 93 try:
94 94 self.dataOut.systemHeaderObj = SystemHeader(self.__systemHeader)
95 95 except:
96 96 self.dataOut.systemHeaderObj = SystemHeader(nSamples=self.__nSamples,
97 97 nProfiles=nProfiles,
98 98 nChannels=len(
99 99 self.__channelList),
100 100 adcResolution=14)
101 101 self.dataOut.type = "Voltage"
102 102
103 103 self.dataOut.data = None
104 104
105 105 self.dataOut.dtype = self.dtype
106 106
107 107 # self.dataOut.nChannels = 0
108 108
109 109 # self.dataOut.nHeights = 0
110 110
111 111 self.dataOut.nProfiles = int(nProfiles)
112 112
113 113 self.dataOut.heightList = self.__firstHeigth + \
114 114 numpy.arange(self.__nSamples, dtype=numpy.float) * \
115 115 self.__deltaHeigth
116 116
117 117 self.dataOut.channelList = list(range(self.__num_subchannels))
118 118
119 119 self.dataOut.blocksize = self.dataOut.getNChannels() * self.dataOut.getNHeights()
120 120
121 121 # self.dataOut.channelIndexList = None
122 122
123 123 self.dataOut.flagNoData = True
124 124
125 125 self.dataOut.flagDataAsBlock = False
126 126 # Set to TRUE if the data is discontinuous
127 127 self.dataOut.flagDiscontinuousBlock = False
128 128
129 129 self.dataOut.utctime = None
130 130
131 131 # timezone like jroheader, difference in minutes between UTC and localtime
132 132 self.dataOut.timeZone = self.__timezone / 60
133 133
134 134 self.dataOut.dstFlag = 0
135 135
136 136 self.dataOut.errorCount = 0
137 137
138 138 try:
139 139 self.dataOut.nCohInt = self.fixed_metadata_dict.get(
140 140 'nCohInt', self.nCohInt)
141 141
142 142 # asumo que la data esta decodificada
143 143 self.dataOut.flagDecodeData = self.fixed_metadata_dict.get(
144 144 'flagDecodeData', self.flagDecodeData)
145 145
146 146 # asumo que la data esta sin flip
147 147 self.dataOut.flagDeflipData = self.fixed_metadata_dict['flagDeflipData']
148 148
149 149 self.dataOut.flagShiftFFT = self.fixed_metadata_dict['flagShiftFFT']
150 150
151 151 self.dataOut.useLocalTime = self.fixed_metadata_dict['useLocalTime']
152 152 except:
153 153 pass
154 154
155 155 self.dataOut.ippSeconds = ippSeconds
156 156
157 157 # Time interval between profiles
158 158 # self.dataOut.timeInterval = self.dataOut.ippSeconds * self.dataOut.nCohInt
159 159
160 160 self.dataOut.frequency = self.__frequency
161 161
162 162 self.dataOut.realtime = self.__online
163 163
164 164 def findDatafiles(self, path, startDate=None, endDate=None):
165 165
166 166 if not os.path.isdir(path):
167 167 return []
168 168
169 169 try:
170 170 digitalReadObj = digital_rf.DigitalRFReader(
171 171 path, load_all_metadata=True)
172 172 except:
173 173 digitalReadObj = digital_rf.DigitalRFReader(path)
174 174
175 175 channelNameList = digitalReadObj.get_channels()
176 176
177 177 if not channelNameList:
178 178 return []
179 179
180 180 metadata_dict = digitalReadObj.get_rf_file_metadata(channelNameList[0])
181 181
182 182 sample_rate = metadata_dict['sample_rate'][0]
183 183
184 184 this_metadata_file = digitalReadObj.get_metadata(channelNameList[0])
185 185
186 186 try:
187 187 timezone = this_metadata_file['timezone'].value
188 188 except:
189 189 timezone = 0
190 190
191 191 startUTCSecond, endUTCSecond = digitalReadObj.get_bounds(
192 192 channelNameList[0]) / sample_rate - timezone
193 193
194 194 startDatetime = datetime.datetime.utcfromtimestamp(startUTCSecond)
195 195 endDatatime = datetime.datetime.utcfromtimestamp(endUTCSecond)
196 196
197 197 if not startDate:
198 198 startDate = startDatetime.date()
199 199
200 200 if not endDate:
201 201 endDate = endDatatime.date()
202 202
203 203 dateList = []
204 204
205 205 thisDatetime = startDatetime
206 206
207 207 while(thisDatetime <= endDatatime):
208 208
209 209 thisDate = thisDatetime.date()
210 210
211 211 if thisDate < startDate:
212 212 continue
213 213
214 214 if thisDate > endDate:
215 215 break
216 216
217 217 dateList.append(thisDate)
218 218 thisDatetime += datetime.timedelta(1)
219 219
220 220 return dateList
221 221
222 222 def setup(self, path=None,
223 223 startDate=None,
224 224 endDate=None,
225 225 startTime=datetime.time(0, 0, 0),
226 226 endTime=datetime.time(23, 59, 59),
227 227 channelList=None,
228 228 nSamples=None,
229 229 online=False,
230 230 delay=60,
231 231 buffer_size=1024,
232 232 ippKm=None,
233 233 nCohInt=1,
234 234 nCode=1,
235 235 nBaud=1,
236 236 flagDecodeData=False,
237 237 code=numpy.ones((1, 1), dtype=numpy.int),
238 238 **kwargs):
239 239 '''
240 240 In this method we should set all initial parameters.
241 241
242 242 Inputs:
243 243 path
244 244 startDate
245 245 endDate
246 246 startTime
247 247 endTime
248 248 set
249 249 expLabel
250 250 ext
251 251 online
252 252 delay
253 253 '''
254 254 self.path = path
255 255 self.nCohInt = nCohInt
256 256 self.flagDecodeData = flagDecodeData
257 257 self.i = 0
258 258 if not os.path.isdir(path):
259 259 raise ValueError("[Reading] Directory %s does not exist" % path)
260 260
261 261 try:
262 262 self.digitalReadObj = digital_rf.DigitalRFReader(
263 263 path, load_all_metadata=True)
264 264 except:
265 265 self.digitalReadObj = digital_rf.DigitalRFReader(path)
266 266
267 267 channelNameList = self.digitalReadObj.get_channels()
268 268
269 269 if not channelNameList:
270 270 raise ValueError("[Reading] Directory %s does not have any files" % path)
271 271
272 272 if not channelList:
273 273 channelList = list(range(len(channelNameList)))
274 274
275 275 ########## Reading metadata ######################
276 276
277 277 top_properties = self.digitalReadObj.get_properties(
278 278 channelNameList[channelList[0]])
279 279
280 280 self.__num_subchannels = top_properties['num_subchannels']
281 281 self.__sample_rate = 1.0 * \
282 282 top_properties['sample_rate_numerator'] / \
283 283 top_properties['sample_rate_denominator']
284 284 # self.__samples_per_file = top_properties['samples_per_file'][0]
285 285 self.__deltaHeigth = 1e6 * 0.15 / self.__sample_rate # why 0.15?
286 286
287 287 this_metadata_file = self.digitalReadObj.get_digital_metadata(
288 288 channelNameList[channelList[0]])
289 289 metadata_bounds = this_metadata_file.get_bounds()
290 290 self.fixed_metadata_dict = this_metadata_file.read(
291 291 metadata_bounds[0])[metadata_bounds[0]] # GET FIRST HEADER
292 292
293 293 try:
294 294 self.__processingHeader = self.fixed_metadata_dict['processingHeader']
295 295 self.__radarControllerHeader = self.fixed_metadata_dict['radarControllerHeader']
296 296 self.__systemHeader = self.fixed_metadata_dict['systemHeader']
297 297 self.dtype = pickle.loads(self.fixed_metadata_dict['dtype'])
298 298 except:
299 299 pass
300 300
301 301 self.__frequency = None
302 302
303 303 self.__frequency = self.fixed_metadata_dict.get('frequency', 1)
304 304
305 305 self.__timezone = self.fixed_metadata_dict.get('timezone', 18000)
306 306
307 307 try:
308 308 nSamples = self.fixed_metadata_dict['nSamples']
309 309 except:
310 310 nSamples = None
311 311
312 312 self.__firstHeigth = 0
313 313
314 314 try:
315 315 codeType = self.__radarControllerHeader['codeType']
316 316 except:
317 317 codeType = 0
318 318
319 319 try:
320 320 if codeType:
321 321 nCode = self.__radarControllerHeader['nCode']
322 322 nBaud = self.__radarControllerHeader['nBaud']
323 323 code = self.__radarControllerHeader['code']
324 324 except:
325 325 pass
326 326
327 327 if not ippKm:
328 328 try:
329 329 # seconds to km
330 330 ippKm = self.__radarControllerHeader['ipp']
331 331 except:
332 332 ippKm = None
333 333 ####################################################
334 334 self.__ippKm = ippKm
335 335 startUTCSecond = None
336 336 endUTCSecond = None
337 337
338 338 if startDate:
339 339 startDatetime = datetime.datetime.combine(startDate, startTime)
340 340 startUTCSecond = (
341 341 startDatetime - datetime.datetime(1970, 1, 1)).total_seconds() + self.__timezone
342 342
343 343 if endDate:
344 344 endDatetime = datetime.datetime.combine(endDate, endTime)
345 345 endUTCSecond = (endDatetime - datetime.datetime(1970,
346 346 1, 1)).total_seconds() + self.__timezone
347 347
348 348 start_index, end_index = self.digitalReadObj.get_bounds(
349 349 channelNameList[channelList[0]])
350 350
351 351 if not startUTCSecond:
352 352 startUTCSecond = start_index / self.__sample_rate
353 353
354 354 if start_index > startUTCSecond * self.__sample_rate:
355 355 startUTCSecond = start_index / self.__sample_rate
356 356
357 357 if not endUTCSecond:
358 358 endUTCSecond = end_index / self.__sample_rate
359 359
360 360 if end_index < endUTCSecond * self.__sample_rate:
361 361 endUTCSecond = end_index / self.__sample_rate
362 362 if not nSamples:
363 363 if not ippKm:
364 364 raise ValueError("[Reading] nSamples or ippKm should be defined")
365 365 nSamples = int(ippKm / (1e6 * 0.15 / self.__sample_rate))
366 366 channelBoundList = []
367 367 channelNameListFiltered = []
368 368
369 369 for thisIndexChannel in channelList:
370 370 thisChannelName = channelNameList[thisIndexChannel]
371 371 start_index, end_index = self.digitalReadObj.get_bounds(
372 372 thisChannelName)
373 373 channelBoundList.append((start_index, end_index))
374 374 channelNameListFiltered.append(thisChannelName)
375 375
376 376 self.profileIndex = 0
377 377 self.i = 0
378 378 self.__delay = delay
379 379
380 380 self.__codeType = codeType
381 381 self.__nCode = nCode
382 382 self.__nBaud = nBaud
383 383 self.__code = code
384 384
385 385 self.__datapath = path
386 386 self.__online = online
387 387 self.__channelList = channelList
388 388 self.__channelNameList = channelNameListFiltered
389 389 self.__channelBoundList = channelBoundList
390 390 self.__nSamples = nSamples
391 391 self.__samples_to_read = int(nSamples) # FIJO: AHORA 40
392 392 self.__nChannels = len(self.__channelList)
393 393
394 394 self.__startUTCSecond = startUTCSecond
395 395 self.__endUTCSecond = endUTCSecond
396 396
397 397 self.__timeInterval = 1.0 * self.__samples_to_read / \
398 398 self.__sample_rate # Time interval
399 399
400 400 if online:
401 401 # self.__thisUnixSample = int(endUTCSecond*self.__sample_rate - 4*self.__samples_to_read)
402 402 startUTCSecond = numpy.floor(endUTCSecond)
403 403
404 404 # por que en el otro metodo lo primero q se hace es sumar samplestoread
405 405 self.__thisUnixSample = int(startUTCSecond * self.__sample_rate) - self.__samples_to_read
406 406
407 407 self.__data_buffer = numpy.zeros(
408 408 (self.__num_subchannels, self.__samples_to_read), dtype=numpy.complex)
409 409
410 410 self.__setFileHeader()
411 411 self.isConfig = True
412 412
413 413 print("[Reading] Digital RF Data was found from %s to %s " % (
414 414 datetime.datetime.utcfromtimestamp(
415 415 self.__startUTCSecond - self.__timezone),
416 416 datetime.datetime.utcfromtimestamp(
417 417 self.__endUTCSecond - self.__timezone)
418 418 ))
419 419
420 420 print("[Reading] Starting process from %s to %s" % (datetime.datetime.utcfromtimestamp(startUTCSecond - self.__timezone),
421 421 datetime.datetime.utcfromtimestamp(
422 422 endUTCSecond - self.__timezone)
423 423 ))
424 424 self.oldAverage = None
425 425 self.count = 0
426 426 self.executionTime = 0
427 427
428 428 def __reload(self):
429 429 # print
430 430 # print "%s not in range [%s, %s]" %(
431 431 # datetime.datetime.utcfromtimestamp(self.thisSecond - self.__timezone),
432 432 # datetime.datetime.utcfromtimestamp(self.__startUTCSecond - self.__timezone),
433 433 # datetime.datetime.utcfromtimestamp(self.__endUTCSecond - self.__timezone)
434 434 # )
435 435 print("[Reading] reloading metadata ...")
436 436
437 437 try:
438 438 self.digitalReadObj.reload(complete_update=True)
439 439 except:
440 440 self.digitalReadObj = digital_rf.DigitalRFReader(self.path)
441 441
442 442 start_index, end_index = self.digitalReadObj.get_bounds(
443 443 self.__channelNameList[self.__channelList[0]])
444 444
445 445 if start_index > self.__startUTCSecond * self.__sample_rate:
446 446 self.__startUTCSecond = 1.0 * start_index / self.__sample_rate
447 447
448 448 if end_index > self.__endUTCSecond * self.__sample_rate:
449 449 self.__endUTCSecond = 1.0 * end_index / self.__sample_rate
450 450 print()
451 451 print("[Reading] New timerange found [%s, %s] " % (
452 452 datetime.datetime.utcfromtimestamp(
453 453 self.__startUTCSecond - self.__timezone),
454 454 datetime.datetime.utcfromtimestamp(
455 455 self.__endUTCSecond - self.__timezone)
456 456 ))
457 457
458 458 return True
459 459
460 460 return False
461 461
462 462 def timeit(self, toExecute):
463 463 t0 = time.time()
464 464 toExecute()
465 465 self.executionTime = time.time() - t0
466 466 if self.oldAverage is None:
467 467 self.oldAverage = self.executionTime
468 468 self.oldAverage = (self.executionTime + self.count *
469 469 self.oldAverage) / (self.count + 1.0)
470 470 self.count = self.count + 1.0
471 471 return
472 472
473 473 def __readNextBlock(self, seconds=30, volt_scale=1):
474 474 '''
475 475 '''
476 476
477 477 # Set the next data
478 478 self.__flagDiscontinuousBlock = False
479 479 self.__thisUnixSample += self.__samples_to_read
480 480
481 481 if self.__thisUnixSample + 2 * self.__samples_to_read > self.__endUTCSecond * self.__sample_rate:
482 482 print ("[Reading] There are no more data into selected time-range")
483 483 if self.__online:
484 484 sleep(3)
485 485 self.__reload()
486 486 else:
487 487 return False
488 488
489 489 if self.__thisUnixSample + 2 * self.__samples_to_read > self.__endUTCSecond * self.__sample_rate:
490 490 return False
491 491 self.__thisUnixSample -= self.__samples_to_read
492 492
493 493 indexChannel = 0
494 494
495 495 dataOk = False
496 496
497 497 for thisChannelName in self.__channelNameList: # TODO VARIOS CHANNELS?
498 498 for indexSubchannel in range(self.__num_subchannels):
499 499 try:
500 500 t0 = time()
501 501 result = self.digitalReadObj.read_vector_c81d(self.__thisUnixSample,
502 502 self.__samples_to_read,
503 503 thisChannelName, sub_channel=indexSubchannel)
504 504 self.executionTime = time() - t0
505 505 if self.oldAverage is None:
506 506 self.oldAverage = self.executionTime
507 507 self.oldAverage = (
508 508 self.executionTime + self.count * self.oldAverage) / (self.count + 1.0)
509 509 self.count = self.count + 1.0
510 510
511 511 except IOError as e:
512 512 # read next profile
513 513 self.__flagDiscontinuousBlock = True
514 514 print("[Reading] %s" % datetime.datetime.utcfromtimestamp(self.thisSecond - self.__timezone), e)
515 515 break
516 516
517 517 if result.shape[0] != self.__samples_to_read:
518 518 self.__flagDiscontinuousBlock = True
519 519 print("[Reading] %s: Too few samples were found, just %d/%d samples" % (datetime.datetime.utcfromtimestamp(self.thisSecond - self.__timezone),
520 520 result.shape[0],
521 521 self.__samples_to_read))
522 522 break
523 523
524 524 self.__data_buffer[indexSubchannel, :] = result * volt_scale
525 525 indexChannel+=1
526 526
527 527 dataOk = True
528 528
529 529 self.__utctime = self.__thisUnixSample / self.__sample_rate
530 530
531 531 if not dataOk:
532 532 return False
533 533
534 534 print("[Reading] %s: %d samples <> %f sec" % (datetime.datetime.utcfromtimestamp(self.thisSecond - self.__timezone),
535 535 self.__samples_to_read,
536 536 self.__timeInterval))
537 537
538 538 self.__bufferIndex = 0
539 539
540 540 return True
541 541
542 542 def __isBufferEmpty(self):
543 543 return self.__bufferIndex > self.__samples_to_read - self.__nSamples # 40960 - 40
544 544
545 545 def getData(self, seconds=30, nTries=5):
546 546 '''
547 547 This method gets the data from files and put the data into the dataOut object
548 548
549 549 In addition, increase el the buffer counter in one.
550 550
551 551 Return:
552 552 data : retorna un perfil de voltages (alturas * canales) copiados desde el
553 553 buffer. Si no hay mas archivos a leer retorna None.
554 554
555 555 Affected:
556 556 self.dataOut
557 557 self.profileIndex
558 558 self.flagDiscontinuousBlock
559 559 self.flagIsNewBlock
560 560 '''
561 561 #print("getdata")
562 562 err_counter = 0
563 563 self.dataOut.flagNoData = True
564 564
565 565 if self.__isBufferEmpty():
566 566 #print("hi")
567 567 self.__flagDiscontinuousBlock = False
568 568
569 569 while True:
570 570 #print ("q ha pasado")
571 571 if self.__readNextBlock():
572 572 break
573 573 if self.__thisUnixSample > self.__endUTCSecond * self.__sample_rate:
574 574 raise schainpy.admin.SchainError('Error')
575 575 return
576 576
577 577 if self.__flagDiscontinuousBlock:
578 578 raise schainpy.admin.SchainError('discontinuous block found')
579 579 return
580 580
581 581 if not self.__online:
582 582 raise schainpy.admin.SchainError('Online?')
583 583 return
584 584
585 585 err_counter += 1
586 586 if err_counter > nTries:
587 587 raise schainpy.admin.SchainError('Max retrys reach')
588 588 return
589 589
590 590 print('[Reading] waiting %d seconds to read a new block' % seconds)
591 591 time.sleep(seconds)
592 592
593 593 self.dataOut.data = self.__data_buffer[:, self.__bufferIndex:self.__bufferIndex + self.__nSamples]
594 594 self.dataOut.utctime = ( self.__thisUnixSample + self.__bufferIndex) / self.__sample_rate
595 595 self.dataOut.flagNoData = False
596 596 self.dataOut.flagDiscontinuousBlock = self.__flagDiscontinuousBlock
597 597 self.dataOut.profileIndex = self.profileIndex
598 598
599 599 self.__bufferIndex += self.__nSamples
600 600 self.profileIndex += 1
601 601
602 602 if self.profileIndex == self.dataOut.nProfiles:
603 603 self.profileIndex = 0
604 604
605 605 return True
606 606
607 607 def printInfo(self):
608 608 '''
609 609 '''
610 610 if self.__printInfo == False:
611 611 return
612 612
613 613 # self.systemHeaderObj.printInfo()
614 614 # self.radarControllerHeaderObj.printInfo()
615 615
616 616 self.__printInfo = False
617 617
618 618 def printNumberOfBlock(self):
619 619 '''
620 620 '''
621 621 return
622 622 # print self.profileIndex
623 623
624 624 def run(self, **kwargs):
625 625 '''
626 626 This method will be called many times so here you should put all your code
627 627 '''
628 628
629 629 if not self.isConfig:
630 630 self.setup(**kwargs)
631 631 #self.i = self.i+1
632 632 self.getData(seconds=self.__delay)
633 633
634 634 return
635 635
636
636 @MPDecorator
637 637 class DigitalRFWriter(Operation):
638 638 '''
639 639 classdocs
640 640 '''
641 641
642 642 def __init__(self, **kwargs):
643 643 '''
644 644 Constructor
645 645 '''
646 646 Operation.__init__(self, **kwargs)
647 647 self.metadata_dict = {}
648 648 self.dataOut = None
649 649 self.dtype = None
650 650 self.oldAverage = 0
651 651
652 652 def setHeader(self):
653 653
654 654 self.metadata_dict['frequency'] = self.dataOut.frequency
655 655 self.metadata_dict['timezone'] = self.dataOut.timeZone
656 656 self.metadata_dict['dtype'] = pickle.dumps(self.dataOut.dtype)
657 657 self.metadata_dict['nProfiles'] = self.dataOut.nProfiles
658 658 self.metadata_dict['heightList'] = self.dataOut.heightList
659 659 self.metadata_dict['channelList'] = self.dataOut.channelList
660 660 self.metadata_dict['flagDecodeData'] = self.dataOut.flagDecodeData
661 661 self.metadata_dict['flagDeflipData'] = self.dataOut.flagDeflipData
662 662 self.metadata_dict['flagShiftFFT'] = self.dataOut.flagShiftFFT
663 663 self.metadata_dict['useLocalTime'] = self.dataOut.useLocalTime
664 664 self.metadata_dict['nCohInt'] = self.dataOut.nCohInt
665 665 self.metadata_dict['type'] = self.dataOut.type
666 666 self.metadata_dict['flagDataAsBlock']= getattr(
667 667 self.dataOut, 'flagDataAsBlock', None) # chequear
668 668
669 669 def setup(self, dataOut, path, frequency, fileCadence, dirCadence, metadataCadence, set=0, metadataFile='metadata', ext='.h5'):
670 670 '''
671 671 In this method we should set all initial parameters.
672 672 Input:
673 673 dataOut: Input data will also be outputa data
674 674 '''
675 675 self.setHeader()
676 676 self.__ippSeconds = dataOut.ippSeconds
677 677 self.__deltaH = dataOut.getDeltaH()
678 678 self.__sample_rate = 1e6 * 0.15 / self.__deltaH
679 679 self.__dtype = dataOut.dtype
680 680 if len(dataOut.dtype) == 2:
681 681 self.__dtype = dataOut.dtype[0]
682 682 self.__nSamples = dataOut.systemHeaderObj.nSamples
683 683 self.__nProfiles = dataOut.nProfiles
684 684
685 685 if self.dataOut.type != 'Voltage':
686 686 raise 'Digital RF cannot be used with this data type'
687 687 self.arr_data = numpy.ones((1, dataOut.nFFTPoints * len(
688 688 self.dataOut.channelList)), dtype=[('r', self.__dtype), ('i', self.__dtype)])
689 689 else:
690 690 self.arr_data = numpy.ones((self.__nSamples, len(
691 691 self.dataOut.channelList)), dtype=[('r', self.__dtype), ('i', self.__dtype)])
692 692
693 693 file_cadence_millisecs = 1000
694 694
695 695 sample_rate_fraction = Fraction(self.__sample_rate).limit_denominator()
696 696 sample_rate_numerator = int(sample_rate_fraction.numerator)
697 697 sample_rate_denominator = int(sample_rate_fraction.denominator)
698 698 start_global_index = dataOut.utctime * self.__sample_rate
699 699
700 700 uuid = 'prueba'
701 701 compression_level = 0
702 702 checksum = False
703 703 is_complex = True
704 704 num_subchannels = len(dataOut.channelList)
705 705 is_continuous = True
706 706 marching_periods = False
707 707
708 708 self.digitalWriteObj = digital_rf.DigitalRFWriter(path, self.__dtype, dirCadence,
709 709 fileCadence, start_global_index,
710 710 sample_rate_numerator, sample_rate_denominator, uuid, compression_level, checksum,
711 711 is_complex, num_subchannels, is_continuous, marching_periods)
712 712 metadata_dir = os.path.join(path, 'metadata')
713 713 os.system('mkdir %s' % (metadata_dir))
714 714 self.digitalMetadataWriteObj = digital_rf.DigitalMetadataWriter(metadata_dir, dirCadence, 1, # 236, file_cadence_millisecs / 1000
715 715 sample_rate_numerator, sample_rate_denominator,
716 716 metadataFile)
717 717 self.isConfig = True
718 718 self.currentSample = 0
719 719 self.oldAverage = 0
720 720 self.count = 0
721 721 return
722 722
723 723 def writeMetadata(self):
724 724 start_idx = self.__sample_rate * self.dataOut.utctime
725 725
726 726 self.metadata_dict['processingHeader'] = self.dataOut.processingHeaderObj.getAsDict(
727 727 )
728 728 self.metadata_dict['radarControllerHeader'] = self.dataOut.radarControllerHeaderObj.getAsDict(
729 729 )
730 730 self.metadata_dict['systemHeader'] = self.dataOut.systemHeaderObj.getAsDict(
731 731 )
732 732 self.digitalMetadataWriteObj.write(start_idx, self.metadata_dict)
733 733 return
734 734
735 735 def timeit(self, toExecute):
736 736 t0 = time()
737 737 toExecute()
738 738 self.executionTime = time() - t0
739 739 if self.oldAverage is None:
740 740 self.oldAverage = self.executionTime
741 741 self.oldAverage = (self.executionTime + self.count *
742 742 self.oldAverage) / (self.count + 1.0)
743 743 self.count = self.count + 1.0
744 744 return
745 745
746 746 def writeData(self):
747 747 if self.dataOut.type != 'Voltage':
748 748 raise 'Digital RF cannot be used with this data type'
749 749 for channel in self.dataOut.channelList:
750 750 for i in range(self.dataOut.nFFTPoints):
751 751 self.arr_data[1][channel * self.dataOut.nFFTPoints +
752 752 i]['r'] = self.dataOut.data[channel][i].real
753 753 self.arr_data[1][channel * self.dataOut.nFFTPoints +
754 754 i]['i'] = self.dataOut.data[channel][i].imag
755 755 else:
756 756 for i in range(self.dataOut.systemHeaderObj.nSamples):
757 757 for channel in self.dataOut.channelList:
758 758 self.arr_data[i][channel]['r'] = self.dataOut.data[channel][i].real
759 759 self.arr_data[i][channel]['i'] = self.dataOut.data[channel][i].imag
760 760
761 761 def f(): return self.digitalWriteObj.rf_write(self.arr_data)
762 762 self.timeit(f)
763 763
764 764 return
765 765
766 766 def run(self, dataOut, frequency=49.92e6, path=None, fileCadence=1000, dirCadence=36000, metadataCadence=1, **kwargs):
767 767 '''
768 768 This method will be called many times so here you should put all your code
769 769 Inputs:
770 770 dataOut: object with the data
771 771 '''
772 772 # print dataOut.__dict__
773 773 self.dataOut = dataOut
774 774 if not self.isConfig:
775 775 self.setup(dataOut, path, frequency, fileCadence,
776 776 dirCadence, metadataCadence, **kwargs)
777 777 self.writeMetadata()
778 778
779 779 self.writeData()
780 780
781 781 ## self.currentSample += 1
782 782 # if self.dataOut.flagDataAsBlock or self.currentSample == 1:
783 783 # self.writeMetadata()
784 784 ## if self.currentSample == self.__nProfiles: self.currentSample = 0
785 785
786 786 return dataOut# en la version 2.7 no aparece este return
787 787
788 788 def close(self):
789 789 print('[Writing] - Closing files ')
790 790 print('Average of writing to digital rf format is ', self.oldAverage * 1000)
791 791 try:
792 792 self.digitalWriteObj.close()
793 793 except:
794 794 pass
@@ -1,849 +1,850
1 1 '''
2 2 Created on Jul 3, 2014
3 3
4 4 @author: roj-idl71
5 5 '''
6 6
7 7 import os, sys
8 8 import time, datetime
9 9 import numpy
10 10 import fnmatch
11 11 import glob
12 12 from time import sleep
13 13
14 14 try:
15 15 import pyfits
16 16 except ImportError as e:
17 17 pass
18 18
19 19 from xml.etree.ElementTree import ElementTree
20 20
21 21 from .jroIO_base import isRadarFolder, isNumber
22 22 from schainpy.model.data.jrodata import Fits
23 23 from schainpy.model.proc.jroproc_base import Operation, ProcessingUnit, MPDecorator
24 24 from schainpy.utils import log
25 25
26 26
27 27 class PyFits(object):
28 28 name=None
29 29 format=None
30 30 array =None
31 31 data =None
32 32 thdulist=None
33 33 prihdr=None
34 34 hdu=None
35 35
36 36 def __init__(self):
37 37
38 38 pass
39 39
40 40 def setColF(self,name,format,array):
41 41 self.name=name
42 42 self.format=format
43 43 self.array=array
44 44 a1=numpy.array([self.array],dtype=numpy.float32)
45 45 self.col1 = pyfits.Column(name=self.name, format=self.format, array=a1)
46 46 return self.col1
47 47
48 48 # def setColP(self,name,format,data):
49 49 # self.name=name
50 50 # self.format=format
51 51 # self.data=data
52 52 # a2=numpy.array([self.data],dtype=numpy.float32)
53 53 # self.col2 = pyfits.Column(name=self.name, format=self.format, array=a2)
54 54 # return self.col2
55 55
56 56
57 57 def writeData(self,name,format,data):
58 58 self.name=name
59 59 self.format=format
60 60 self.data=data
61 61 a2=numpy.array([self.data],dtype=numpy.float32)
62 62 self.col2 = pyfits.Column(name=self.name, format=self.format, array=a2)
63 63 return self.col2
64 64
65 65 def cFImage(self,idblock,year,month,day,hour,minute,second):
66 66 self.hdu= pyfits.PrimaryHDU(idblock)
67 67 self.hdu.header.set("Year",year)
68 68 self.hdu.header.set("Month",month)
69 69 self.hdu.header.set("Day",day)
70 70 self.hdu.header.set("Hour",hour)
71 71 self.hdu.header.set("Minute",minute)
72 72 self.hdu.header.set("Second",second)
73 73 return self.hdu
74 74
75 75
76 76 def Ctable(self,colList):
77 77 self.cols=pyfits.ColDefs(colList)
78 78 self.tbhdu = pyfits.new_table(self.cols)
79 79 return self.tbhdu
80 80
81 81
82 82 def CFile(self,hdu,tbhdu):
83 83 self.thdulist=pyfits.HDUList([hdu,tbhdu])
84 84
85 85 def wFile(self,filename):
86 86 if os.path.isfile(filename):
87 87 os.remove(filename)
88 88 self.thdulist.writeto(filename)
89 89
90 90
91 91 class ParameterConf:
92 92 ELEMENTNAME = 'Parameter'
93 93 def __init__(self):
94 94 self.name = ''
95 95 self.value = ''
96 96
97 97 def readXml(self, parmElement):
98 98 self.name = parmElement.get('name')
99 99 self.value = parmElement.get('value')
100 100
101 101 def getElementName(self):
102 102 return self.ELEMENTNAME
103 103
104 104 class Metadata(object):
105 105
106 106 def __init__(self, filename):
107 107 self.parmConfObjList = []
108 108 self.readXml(filename)
109 109
110 110 def readXml(self, filename):
111 111 self.projectElement = None
112 112 self.procUnitConfObjDict = {}
113 113 self.projectElement = ElementTree().parse(filename)
114 114 self.project = self.projectElement.tag
115 115
116 116 parmElementList = self.projectElement.getiterator(ParameterConf().getElementName())
117 117
118 118 for parmElement in parmElementList:
119 119 parmConfObj = ParameterConf()
120 120 parmConfObj.readXml(parmElement)
121 121 self.parmConfObjList.append(parmConfObj)
122 122
123 @MPDecorator
123 124 class FitsWriter(Operation):
124 125 def __init__(self, **kwargs):
125 126 Operation.__init__(self, **kwargs)
126 127 self.isConfig = False
127 128 self.dataBlocksPerFile = None
128 129 self.blockIndex = 0
129 130 self.flagIsNewFile = 1
130 131 self.fitsObj = None
131 132 self.optchar = 'P'
132 133 self.ext = '.fits'
133 134 self.setFile = 0
134 135
135 136 def setFitsHeader(self, dataOut, metadatafile=None):
136 137
137 138 header_data = pyfits.PrimaryHDU()
138 139
139 140 header_data.header['EXPNAME'] = "RADAR DATA"
140 141 header_data.header['DATATYPE'] = "SPECTRA"
141 142 header_data.header['COMMENT'] = ""
142 143
143 144 if metadatafile:
144 145
145 146 metadata4fits = Metadata(metadatafile)
146 147
147 148 for parameter in metadata4fits.parmConfObjList:
148 149 parm_name = parameter.name
149 150 parm_value = parameter.value
150 151
151 152 header_data.header[parm_name] = parm_value
152 153
153 154 header_data.header['DATETIME'] = time.strftime("%b %d %Y %H:%M:%S", dataOut.datatime.timetuple())
154 155 header_data.header['CHANNELLIST'] = str(dataOut.channelList)
155 156 header_data.header['NCHANNELS'] = dataOut.nChannels
156 157 #header_data.header['HEIGHTS'] = dataOut.heightList
157 158 header_data.header['NHEIGHTS'] = dataOut.nHeights
158 159
159 160 header_data.header['IPPSECONDS'] = dataOut.ippSeconds
160 161 header_data.header['NCOHINT'] = dataOut.nCohInt
161 162 header_data.header['NINCOHINT'] = dataOut.nIncohInt
162 163 header_data.header['TIMEZONE'] = dataOut.timeZone
163 164 header_data.header['NBLOCK'] = self.blockIndex
164 165
165 166 header_data.writeto(self.filename)
166 167
167 168 self.addExtension(dataOut.heightList,'HEIGHTLIST')
168 169
169 170
170 171 def setup(self, dataOut, path, dataBlocksPerFile=100, metadatafile=None):
171 172
172 173 self.path = path
173 174 self.dataOut = dataOut
174 175 self.metadatafile = metadatafile
175 176 self.dataBlocksPerFile = dataBlocksPerFile
176 177
177 178 def open(self):
178 179 self.fitsObj = pyfits.open(self.filename, mode='update')
179 180
180 181
181 182 def addExtension(self, data, tagname):
182 183 self.open()
183 184 extension = pyfits.ImageHDU(data=data, name=tagname)
184 185 #extension.header['TAG'] = tagname
185 186 self.fitsObj.append(extension)
186 187 self.write()
187 188
188 189 def addData(self, data):
189 190 self.open()
190 191 extension = pyfits.ImageHDU(data=data, name=self.fitsObj[0].header['DATATYPE'])
191 192 extension.header['UTCTIME'] = self.dataOut.utctime
192 193 self.fitsObj.append(extension)
193 194 self.blockIndex += 1
194 195 self.fitsObj[0].header['NBLOCK'] = self.blockIndex
195 196
196 197 self.write()
197 198
198 199 def write(self):
199 200
200 201 self.fitsObj.flush(verbose=True)
201 202 self.fitsObj.close()
202 203
203 204
204 205 def setNextFile(self):
205 206
206 207 ext = self.ext
207 208 path = self.path
208 209
209 210 timeTuple = time.localtime( self.dataOut.utctime)
210 211 subfolder = 'd%4.4d%3.3d' % (timeTuple.tm_year,timeTuple.tm_yday)
211 212
212 213 fullpath = os.path.join( path, subfolder )
213 214 if not( os.path.exists(fullpath) ):
214 215 os.mkdir(fullpath)
215 216 self.setFile = -1 #inicializo mi contador de seteo
216 217 else:
217 218 filesList = os.listdir( fullpath )
218 219 if len( filesList ) > 0:
219 220 filesList = sorted( filesList, key=str.lower )
220 221 filen = filesList[-1]
221 222
222 223 if isNumber( filen[8:11] ):
223 224 self.setFile = int( filen[8:11] ) #inicializo mi contador de seteo al seteo del ultimo file
224 225 else:
225 226 self.setFile = -1
226 227 else:
227 228 self.setFile = -1 #inicializo mi contador de seteo
228 229
229 230 setFile = self.setFile
230 231 setFile += 1
231 232
232 233 thisFile = '%s%4.4d%3.3d%3.3d%s' % (self.optchar,
233 234 timeTuple.tm_year,
234 235 timeTuple.tm_yday,
235 236 setFile,
236 237 ext )
237 238
238 239 filename = os.path.join( path, subfolder, thisFile )
239 240
240 241 self.blockIndex = 0
241 242 self.filename = filename
242 243 self.setFile = setFile
243 244 self.flagIsNewFile = 1
244 245
245 246 print('Writing the file: %s'%self.filename)
246 247
247 248 self.setFitsHeader(self.dataOut, self.metadatafile)
248 249
249 250 return 1
250 251
251 252 def writeBlock(self):
252 253 self.addData(self.dataOut.data_spc)
253 254 self.flagIsNewFile = 0
254 255
255 256
256 257 def __setNewBlock(self):
257 258
258 259 if self.flagIsNewFile:
259 260 return 1
260 261
261 262 if self.blockIndex < self.dataBlocksPerFile:
262 263 return 1
263 264
264 265 if not( self.setNextFile() ):
265 266 return 0
266 267
267 268 return 1
268 269
269 270 def writeNextBlock(self):
270 271 if not( self.__setNewBlock() ):
271 272 return 0
272 273 self.writeBlock()
273 274 return 1
274 275
275 276 def putData(self):
276 277 if self.flagIsNewFile:
277 278 self.setNextFile()
278 279 self.writeNextBlock()
279 280
280 281 def run(self, dataOut, path, dataBlocksPerFile=100, metadatafile=None, **kwargs):
281 282 if not(self.isConfig):
282 283 self.setup(dataOut, path, dataBlocksPerFile=dataBlocksPerFile, metadatafile=metadatafile, **kwargs)
283 284 self.isConfig = True
284 285 self.putData()
285 286
286 @MPDecorator
287
287 288 class FitsReader(ProcessingUnit):
288 289
289 290 # __TIMEZONE = time.timezone
290 291
291 292 expName = None
292 293 datetimestr = None
293 294 utc = None
294 295 nChannels = None
295 296 nSamples = None
296 297 dataBlocksPerFile = None
297 298 comments = None
298 299 lastUTTime = None
299 300 header_dict = None
300 301 data = None
301 302 data_header_dict = None
302 303
303 304 def __init__(self):#, **kwargs):
304 305 ProcessingUnit.__init__(self)#, **kwargs)
305 306 self.isConfig = False
306 307 self.ext = '.fits'
307 308 self.setFile = 0
308 309 self.flagNoMoreFiles = 0
309 310 self.flagIsNewFile = 1
310 311 self.flagDiscontinuousBlock = None
311 312 self.fileIndex = None
312 313 self.filename = None
313 314 self.fileSize = None
314 315 self.fitsObj = None
315 316 self.timeZone = None
316 317 self.nReadBlocks = 0
317 318 self.nTotalBlocks = 0
318 319 self.dataOut = self.createObjByDefault()
319 320 self.maxTimeStep = 10# deberia ser definido por el usuario usando el metodo setup()
320 321 self.blockIndex = 1
321 322
322 323 def createObjByDefault(self):
323 324
324 325 dataObj = Fits()
325 326
326 327 return dataObj
327 328
328 329 def isFileinThisTime(self, filename, startTime, endTime, useLocalTime=False):
329 330 try:
330 331 fitsObj = pyfits.open(filename,'readonly')
331 332 except:
332 333 print("File %s can't be opened" %(filename))
333 334 return None
334 335
335 336 header = fitsObj[0].header
336 337 struct_time = time.strptime(header['DATETIME'], "%b %d %Y %H:%M:%S")
337 338 utc = time.mktime(struct_time) - time.timezone #TIMEZONE debe ser un parametro del header FITS
338 339
339 340 ltc = utc
340 341 if useLocalTime:
341 342 ltc -= time.timezone
342 343 thisDatetime = datetime.datetime.utcfromtimestamp(ltc)
343 344 thisTime = thisDatetime.time()
344 345
345 346 if not ((startTime <= thisTime) and (endTime > thisTime)):
346 347 return None
347 348
348 349 return thisDatetime
349 350
350 351 def __setNextFileOnline(self):
351 352 raise NotImplementedError
352 353
353 354 def __setNextFileOffline(self):
354 355 idFile = self.fileIndex
355 356
356 357 while (True):
357 358 idFile += 1
358 359 if not(idFile < len(self.filenameList)):
359 360 self.flagNoMoreFiles = 1
360 361 print("No more Files")
361 362 return 0
362 363
363 364 filename = self.filenameList[idFile]
364 365
365 366 # if not(self.__verifyFile(filename)):
366 367 # continue
367 368
368 369 fileSize = os.path.getsize(filename)
369 370 fitsObj = pyfits.open(filename,'readonly')
370 371 break
371 372
372 373 self.flagIsNewFile = 1
373 374 self.fileIndex = idFile
374 375 self.filename = filename
375 376 self.fileSize = fileSize
376 377 self.fitsObj = fitsObj
377 378 self.blockIndex = 0
378 379 print("Setting the file: %s"%self.filename)
379 380
380 381 return 1
381 382
382 383 def __setValuesFromHeader(self):
383 384
384 385 self.dataOut.header = self.header_dict
385 386 self.dataOut.expName = self.expName
386 387
387 388 self.dataOut.timeZone = self.timeZone
388 389 self.dataOut.dataBlocksPerFile = self.dataBlocksPerFile
389 390 self.dataOut.comments = self.comments
390 391 # self.dataOut.timeInterval = self.timeInterval
391 392 self.dataOut.channelList = self.channelList
392 393 self.dataOut.heightList = self.heightList
393 394
394 395 self.dataOut.nCohInt = self.nCohInt
395 396 self.dataOut.nIncohInt = self.nIncohInt
396 397 self.dataOut.ipp_sec = self.ippSeconds
397 398
398 399 def readHeader(self):
399 400 headerObj = self.fitsObj[0]
400 401
401 402 self.header_dict = headerObj.header
402 403 if 'EXPNAME' in list(headerObj.header.keys()):
403 404 self.expName = headerObj.header['EXPNAME']
404 405
405 406 if 'DATATYPE' in list(headerObj.header.keys()):
406 407 self.dataType = headerObj.header['DATATYPE']
407 408
408 409 self.datetimestr = headerObj.header['DATETIME']
409 410 channelList = headerObj.header['CHANNELLIST']
410 411 channelList = channelList.split('[')
411 412 channelList = channelList[1].split(']')
412 413 channelList = channelList[0].split(',')
413 414 channelList = [int(ch) for ch in channelList]
414 415 self.channelList = channelList
415 416 self.nChannels = headerObj.header['NCHANNELS']
416 417 self.nHeights = headerObj.header['NHEIGHTS']
417 418 self.ippSeconds = headerObj.header['IPPSECONDS']
418 419 self.nCohInt = headerObj.header['NCOHINT']
419 420 self.nIncohInt = headerObj.header['NINCOHINT']
420 421 self.dataBlocksPerFile = headerObj.header['NBLOCK']
421 422 self.timeZone = headerObj.header['TIMEZONE']
422 423
423 424 # self.timeInterval = self.ippSeconds * self.nCohInt * self.nIncohInt
424 425
425 426 if 'COMMENT' in list(headerObj.header.keys()):
426 427 self.comments = headerObj.header['COMMENT']
427 428
428 429 self.readHeightList()
429 430
430 431 def readHeightList(self):
431 432 self.blockIndex = self.blockIndex + 1
432 433 obj = self.fitsObj[self.blockIndex]
433 434 self.heightList = obj.data
434 435 self.blockIndex = self.blockIndex + 1
435 436
436 437 def readExtension(self):
437 438 obj = self.fitsObj[self.blockIndex]
438 439 self.heightList = obj.data
439 440 self.blockIndex = self.blockIndex + 1
440 441
441 442 def setNextFile(self):
442 443
443 444 if self.online:
444 445 newFile = self.__setNextFileOnline()
445 446 else:
446 447 newFile = self.__setNextFileOffline()
447 448
448 449 if not(newFile):
449 450 return 0
450 451
451 452 self.readHeader()
452 453 self.__setValuesFromHeader()
453 454 self.nReadBlocks = 0
454 455 # self.blockIndex = 1
455 456 return 1
456 457
457 458 def searchFilesOffLine(self,
458 459 path,
459 460 startDate,
460 461 endDate,
461 462 startTime=datetime.time(0,0,0),
462 463 endTime=datetime.time(23,59,59),
463 464 set=None,
464 465 expLabel='',
465 466 ext='.fits',
466 467 walk=True):
467 468
468 469 pathList = []
469 470
470 471 if not walk:
471 472 pathList.append(path)
472 473
473 474 else:
474 475 dirList = []
475 476 for thisPath in os.listdir(path):
476 477 if not os.path.isdir(os.path.join(path,thisPath)):
477 478 continue
478 479 if not isRadarFolder(thisPath):
479 480 continue
480 481
481 482 dirList.append(thisPath)
482 483
483 484 if not(dirList):
484 485 return None, None
485 486
486 487 thisDate = startDate
487 488
488 489 while(thisDate <= endDate):
489 490 year = thisDate.timetuple().tm_year
490 491 doy = thisDate.timetuple().tm_yday
491 492
492 493 matchlist = fnmatch.filter(dirList, '?' + '%4.4d%3.3d' % (year,doy) + '*')
493 494 if len(matchlist) == 0:
494 495 thisDate += datetime.timedelta(1)
495 496 continue
496 497 for match in matchlist:
497 498 pathList.append(os.path.join(path,match,expLabel))
498 499
499 500 thisDate += datetime.timedelta(1)
500 501
501 502 if pathList == []:
502 503 print("Any folder was found for the date range: %s-%s" %(startDate, endDate))
503 504 return None, None
504 505
505 506 print("%d folder(s) was(were) found for the date range: %s - %s" %(len(pathList), startDate, endDate))
506 507
507 508 filenameList = []
508 509 datetimeList = []
509 510
510 511 for i in range(len(pathList)):
511 512
512 513 thisPath = pathList[i]
513 514
514 515 fileList = glob.glob1(thisPath, "*%s" %ext)
515 516 fileList.sort()
516 517
517 518 for thisFile in fileList:
518 519
519 520 filename = os.path.join(thisPath,thisFile)
520 521 thisDatetime = self.isFileinThisTime(filename, startTime, endTime)
521 522
522 523 if not(thisDatetime):
523 524 continue
524 525
525 526 filenameList.append(filename)
526 527 datetimeList.append(thisDatetime)
527 528
528 529 if not(filenameList):
529 530 print("Any file was found for the time range %s - %s" %(startTime, endTime))
530 531 return None, None
531 532
532 533 print("%d file(s) was(were) found for the time range: %s - %s" %(len(filenameList), startTime, endTime))
533 534 print()
534 535
535 536 for i in range(len(filenameList)):
536 537 print("%s -> [%s]" %(filenameList[i], datetimeList[i].ctime()))
537 538
538 539 self.filenameList = filenameList
539 540 self.datetimeList = datetimeList
540 541
541 542 return pathList, filenameList
542 543
543 544 def setup(self, path=None,
544 545 startDate=None,
545 546 endDate=None,
546 547 startTime=datetime.time(0,0,0),
547 548 endTime=datetime.time(23,59,59),
548 549 set=0,
549 550 expLabel = "",
550 551 ext = None,
551 552 online = False,
552 553 delay = 60,
553 554 walk = True):
554 555
555 556 if path == None:
556 557 raise ValueError("The path is not valid")
557 558
558 559 if ext == None:
559 560 ext = self.ext
560 561
561 562 if not(online):
562 563 print("Searching files in offline mode ...")
563 564 pathList, filenameList = self.searchFilesOffLine(path, startDate=startDate, endDate=endDate,
564 565 startTime=startTime, endTime=endTime,
565 566 set=set, expLabel=expLabel, ext=ext,
566 567 walk=walk)
567 568
568 569 if not(pathList):
569 570 print("No *%s files into the folder %s \nfor the range: %s - %s"%(ext, path,
570 571 datetime.datetime.combine(startDate,startTime).ctime(),
571 572 datetime.datetime.combine(endDate,endTime).ctime()))
572 573
573 574 sys.exit(-1)
574 575
575 576 self.fileIndex = -1
576 577 self.pathList = pathList
577 578 self.filenameList = filenameList
578 579
579 580 self.online = online
580 581 self.delay = delay
581 582 ext = ext.lower()
582 583 self.ext = ext
583 584
584 585 if not(self.setNextFile()):
585 586 if (startDate!=None) and (endDate!=None):
586 587 print("No files in range: %s - %s" %(datetime.datetime.combine(startDate,startTime).ctime(), datetime.datetime.combine(endDate,endTime).ctime()))
587 588 elif startDate != None:
588 589 print("No files in range: %s" %(datetime.datetime.combine(startDate,startTime).ctime()))
589 590 else:
590 591 print("No files")
591 592
592 593 sys.exit(-1)
593 594
594 595
595 596
596 597 def readBlock(self):
597 598 dataObj = self.fitsObj[self.blockIndex]
598 599
599 600 self.data = dataObj.data
600 601 self.data_header_dict = dataObj.header
601 602 self.utc = self.data_header_dict['UTCTIME']
602 603
603 604 self.flagIsNewFile = 0
604 605 self.blockIndex += 1
605 606 self.nTotalBlocks += 1
606 607 self.nReadBlocks += 1
607 608
608 609 return 1
609 610
610 611 def __jumpToLastBlock(self):
611 612 raise NotImplementedError
612 613
613 614 def __waitNewBlock(self):
614 615 """
615 616 Return 1 si se encontro un nuevo bloque de datos, 0 de otra forma.
616 617
617 618 Si el modo de lectura es OffLine siempre retorn 0
618 619 """
619 620 if not self.online:
620 621 return 0
621 622
622 623 if (self.nReadBlocks >= self.dataBlocksPerFile):
623 624 return 0
624 625
625 626 currentPointer = self.fp.tell()
626 627
627 628 neededSize = self.processingHeaderObj.blockSize + self.basicHeaderSize
628 629
629 630 for nTries in range( self.nTries ):
630 631
631 632 self.fp.close()
632 633 self.fp = open( self.filename, 'rb' )
633 634 self.fp.seek( currentPointer )
634 635
635 636 self.fileSize = os.path.getsize( self.filename )
636 637 currentSize = self.fileSize - currentPointer
637 638
638 639 if ( currentSize >= neededSize ):
639 640 self.__rdBasicHeader()
640 641 return 1
641 642
642 643 print("\tWaiting %0.2f seconds for the next block, try %03d ..." % (self.delay, nTries+1))
643 644 sleep( self.delay )
644 645
645 646
646 647 return 0
647 648
648 649 def __setNewBlock(self):
649 650
650 651 if self.online:
651 652 self.__jumpToLastBlock()
652 653
653 654 if self.flagIsNewFile:
654 655 return 1
655 656
656 657 self.lastUTTime = self.utc
657 658
658 659 if self.online:
659 660 if self.__waitNewBlock():
660 661 return 1
661 662
662 663 if self.nReadBlocks < self.dataBlocksPerFile:
663 664 return 1
664 665
665 666 if not(self.setNextFile()):
666 667 return 0
667 668
668 669 deltaTime = self.utc - self.lastUTTime
669 670
670 671 self.flagDiscontinuousBlock = 0
671 672
672 673 if deltaTime > self.maxTimeStep:
673 674 self.flagDiscontinuousBlock = 1
674 675
675 676 return 1
676 677
677 678
678 679 def readNextBlock(self):
679 680 if not(self.__setNewBlock()):
680 681 return 0
681 682
682 683 if not(self.readBlock()):
683 684 return 0
684 685
685 686 return 1
686 687
687 688 def printInfo(self):
688 689
689 690 pass
690 691
691 692 def getData(self):
692 693
693 694 if self.flagNoMoreFiles:
694 695 self.dataOut.flagNoData = True
695 696 return (0, 'No more files')
696 697
697 698 self.flagDiscontinuousBlock = 0
698 699 self.flagIsNewBlock = 0
699 700
700 701 if not(self.readNextBlock()):
701 702 return (1, 'Error reading data')
702 703
703 704 if self.data is None:
704 705 self.dataOut.flagNoData = True
705 706 return (0, 'No more data')
706 707
707 708 self.dataOut.data = self.data
708 709 self.dataOut.data_header = self.data_header_dict
709 710 self.dataOut.utctime = self.utc
710 711
711 712 # self.dataOut.header = self.header_dict
712 713 # self.dataOut.expName = self.expName
713 714 # self.dataOut.nChannels = self.nChannels
714 715 # self.dataOut.timeZone = self.timeZone
715 716 # self.dataOut.dataBlocksPerFile = self.dataBlocksPerFile
716 717 # self.dataOut.comments = self.comments
717 718 # # self.dataOut.timeInterval = self.timeInterval
718 719 # self.dataOut.channelList = self.channelList
719 720 # self.dataOut.heightList = self.heightList
720 721 self.dataOut.flagNoData = False
721 722 # return self.dataOut.data
722 723
723 724 def run(self, **kwargs):
724 725
725 726 if not(self.isConfig):
726 727 self.setup(**kwargs)
727 728 self.isConfig = True
728 729
729 730 self.getData()
730 731
731 732 @MPDecorator
732 733 class SpectraHeisWriter(Operation):
733 734 # set = None
734 735 setFile = None
735 736 idblock = None
736 737 doypath = None
737 738 subfolder = None
738 739
739 740 def __init__(self):#, **kwargs):
740 741 Operation.__init__(self)#, **kwargs)
741 742 self.wrObj = PyFits()
742 743 # self.dataOut = dataOut
743 744 self.nTotalBlocks=0
744 745 # self.set = None
745 746 self.setFile = None
746 747 self.idblock = 0
747 748 self.wrpath = None
748 749 self.doypath = None
749 750 self.subfolder = None
750 751 self.isConfig = False
751 752
752 753 def isNumber(str):
753 754 """
754 755 Chequea si el conjunto de caracteres que componen un string puede ser convertidos a un numero.
755 756
756 757 Excepciones:
757 758 Si un determinado string no puede ser convertido a numero
758 759 Input:
759 760 str, string al cual se le analiza para determinar si convertible a un numero o no
760 761
761 762 Return:
762 763 True : si el string es uno numerico
763 764 False : no es un string numerico
764 765 """
765 766 try:
766 767 float( str )
767 768 return True
768 769 except:
769 770 return False
770 771
771 772 def setup(self, dataOut, wrpath):
772 773
773 774 if not(os.path.exists(wrpath)):
774 775 os.mkdir(wrpath)
775 776
776 777 self.wrpath = wrpath
777 778 # self.setFile = 0
778 779 self.dataOut = dataOut
779 780
780 781 def putData(self):
781 782 name= time.localtime( self.dataOut.utctime)
782 783 ext=".fits"
783 784
784 785 if self.doypath == None:
785 786 self.subfolder = 'F%4.4d%3.3d_%d' % (name.tm_year,name.tm_yday,time.mktime(datetime.datetime.now().timetuple()))
786 787 self.doypath = os.path.join( self.wrpath, self.subfolder )
787 788 os.mkdir(self.doypath)
788 789
789 790 if self.setFile == None:
790 791 # self.set = self.dataOut.set
791 792 self.setFile = 0
792 793 # if self.set != self.dataOut.set:
793 794 ## self.set = self.dataOut.set
794 795 # self.setFile = 0
795 796
796 797 #make the filename
797 798 thisFile = 'D%4.4d%3.3d_%3.3d%s' % (name.tm_year,name.tm_yday,self.setFile,ext)
798 799
799 800 filename = os.path.join(self.wrpath,self.subfolder, thisFile)
800 801
801 802 idblock = numpy.array([self.idblock],dtype="int64")
802 803 header=self.wrObj.cFImage(idblock=idblock,
803 804 year=time.gmtime(self.dataOut.utctime).tm_year,
804 805 month=time.gmtime(self.dataOut.utctime).tm_mon,
805 806 day=time.gmtime(self.dataOut.utctime).tm_mday,
806 807 hour=time.gmtime(self.dataOut.utctime).tm_hour,
807 808 minute=time.gmtime(self.dataOut.utctime).tm_min,
808 809 second=time.gmtime(self.dataOut.utctime).tm_sec)
809 810
810 811 c=3E8
811 812 deltaHeight = self.dataOut.heightList[1] - self.dataOut.heightList[0]
812 813 freq=numpy.arange(-1*self.dataOut.nHeights/2.,self.dataOut.nHeights/2.)*(c/(2*deltaHeight*1000))
813 814
814 815 colList = []
815 816
816 817 colFreq=self.wrObj.setColF(name="freq", format=str(self.dataOut.nFFTPoints)+'E', array=freq)
817 818
818 819 colList.append(colFreq)
819 820
820 821 nchannel=self.dataOut.nChannels
821 822
822 823 for i in range(nchannel):
823 824 col = self.wrObj.writeData(name="PCh"+str(i+1),
824 825 format=str(self.dataOut.nFFTPoints)+'E',
825 826 data=10*numpy.log10(self.dataOut.data_spc[i,:]))
826 827
827 828 colList.append(col)
828 829
829 830 data=self.wrObj.Ctable(colList=colList)
830 831
831 832 self.wrObj.CFile(header,data)
832 833
833 834 self.wrObj.wFile(filename)
834 835
835 836 #update the setFile
836 837 self.setFile += 1
837 838 self.idblock += 1
838 839
839 840 return 1
840 841
841 842 def run(self, dataOut, **kwargs):
842 843
843 844 if not(self.isConfig):
844 845
845 846 self.setup(dataOut, **kwargs)
846 847 self.isConfig = True
847 848
848 849 self.putData()
849 850 return dataOut No newline at end of file
@@ -1,595 +1,595
1 1 '''
2 2 Created on Aug 1, 2017
3 3
4 4 @author: Juan C. Espinoza
5 5 '''
6 6
7 7 import os
8 8 import sys
9 9 import time
10 10 import json
11 11 import glob
12 12 import datetime
13 13
14 14 import numpy
15 15 import h5py
16 16
17 17 import schainpy.admin
18 18 from schainpy.model.io.jroIO_base import LOCALTIME, Reader
19 19 from schainpy.model.proc.jroproc_base import ProcessingUnit, Operation, MPDecorator
20 20 from schainpy.model.data.jrodata import Parameters
21 21 from schainpy.utils import log
22 22
23 23 try:
24 24 import madrigal.cedar
25 25 except:
26 26 pass
27 27
28 28 try:
29 29 basestring
30 30 except:
31 31 basestring = str
32 32
33 33 DEF_CATALOG = {
34 34 'principleInvestigator': 'Marco Milla',
35 35 'expPurpose': '',
36 36 'cycleTime': '',
37 37 'correlativeExp': '',
38 38 'sciRemarks': '',
39 39 'instRemarks': ''
40 40 }
41 41
42 42 DEF_HEADER = {
43 43 'kindatDesc': '',
44 44 'analyst': 'Jicamarca User',
45 45 'comments': '',
46 46 'history': ''
47 47 }
48 48
49 49 MNEMONICS = {
50 50 10: 'jro',
51 51 11: 'jbr',
52 52 840: 'jul',
53 53 13: 'jas',
54 54 1000: 'pbr',
55 55 1001: 'hbr',
56 56 1002: 'obr',
57 57 400: 'clr'
58 58
59 59 }
60 60
61 61 UT1970 = datetime.datetime(1970, 1, 1) - datetime.timedelta(seconds=time.timezone)
62 62
63 63 def load_json(obj):
64 64 '''
65 65 Parse json as string instead of unicode
66 66 '''
67 67
68 68 if isinstance(obj, str):
69 69 iterable = json.loads(obj)
70 70 else:
71 71 iterable = obj
72 72
73 73 if isinstance(iterable, dict):
74 74 return {str(k): load_json(v) if isinstance(v, dict) else str(v) if isinstance(v, basestring) else v
75 75 for k, v in list(iterable.items())}
76 76 elif isinstance(iterable, (list, tuple)):
77 77 return [str(v) if isinstance(v, basestring) else v for v in iterable]
78 78
79 79 return iterable
80 80
81 @MPDecorator
81
82 82 class MADReader(Reader, ProcessingUnit):
83 83
84 84 def __init__(self):
85 85
86 86 ProcessingUnit.__init__(self)
87 87
88 88 self.dataOut = Parameters()
89 89 self.counter_records = 0
90 90 self.nrecords = None
91 91 self.flagNoMoreFiles = 0
92 92 self.filename = None
93 93 self.intervals = set()
94 94 self.datatime = datetime.datetime(1900,1,1)
95 95 self.format = None
96 96 self.filefmt = "***%Y%m%d*******"
97 97
98 98 def setup(self, **kwargs):
99 99
100 100 self.set_kwargs(**kwargs)
101 101 self.oneDDict = load_json(self.oneDDict)
102 102 self.twoDDict = load_json(self.twoDDict)
103 103 self.ind2DList = load_json(self.ind2DList)
104 104 self.independentParam = self.ind2DList[0]
105 105
106 106 if self.path is None:
107 107 raise ValueError('The path is not valid')
108 108
109 109 self.open_file = open
110 110 self.open_mode = 'rb'
111 111
112 112 if self.format is None:
113 113 raise ValueError('The format is not valid choose simple or hdf5')
114 114 elif self.format.lower() in ('simple', 'txt'):
115 115 self.ext = '.txt'
116 116 elif self.format.lower() in ('cedar',):
117 117 self.ext = '.001'
118 118 else:
119 119 self.ext = '.hdf5'
120 120 self.open_file = h5py.File
121 121 self.open_mode = 'r'
122 122
123 123 if self.online:
124 124 log.log("Searching files in online mode...", self.name)
125 125
126 126 for nTries in range(self.nTries):
127 127 fullpath = self.searchFilesOnLine(self.path, self.startDate,
128 128 self.endDate, self.expLabel, self.ext, self.walk,
129 129 self.filefmt, self.folderfmt)
130 130
131 131 try:
132 132 fullpath = next(fullpath)
133 133 except:
134 134 fullpath = None
135 135
136 136 if fullpath:
137 137 break
138 138
139 139 log.warning(
140 140 'Waiting {} sec for a valid file in {}: try {} ...'.format(
141 141 self.delay, self.path, nTries + 1),
142 142 self.name)
143 143 time.sleep(self.delay)
144 144
145 145 if not(fullpath):
146 146 raise schainpy.admin.SchainError(
147 147 'There isn\'t any valid file in {}'.format(self.path))
148 148
149 149 else:
150 150 log.log("Searching files in {}".format(self.path), self.name)
151 151 self.filenameList = self.searchFilesOffLine(self.path, self.startDate,
152 152 self.endDate, self.expLabel, self.ext, self.walk, self.filefmt, self.folderfmt)
153 153
154 154 self.setNextFile()
155 155
156 156 def readFirstHeader(self):
157 157 '''Read header and data'''
158 158
159 159 self.parseHeader()
160 160 self.parseData()
161 161 self.blockIndex = 0
162 162
163 163 return
164 164
165 165 def parseHeader(self):
166 166 '''
167 167 '''
168 168
169 169 self.output = {}
170 170 self.version = '2'
171 171 s_parameters = None
172 172 if self.ext == '.txt':
173 173 self.parameters = [s.strip().lower() for s in self.fp.readline().decode().strip().split(' ') if s]
174 174 elif self.ext == '.hdf5':
175 175 self.metadata = self.fp['Metadata']
176 176 if '_record_layout' in self.metadata:
177 177 s_parameters = [s[0].lower().decode() for s in self.metadata['Independent Spatial Parameters']]
178 178 self.version = '3'
179 179 self.parameters = [s[0].lower().decode() for s in self.metadata['Data Parameters']]
180 180
181 181 log.success('Parameters found: {}'.format(self.parameters),
182 182 'MADReader')
183 183 if s_parameters:
184 184 log.success('Spatial parameters found: {}'.format(s_parameters),
185 185 'MADReader')
186 186
187 187 for param in list(self.oneDDict.keys()):
188 188 if param.lower() not in self.parameters:
189 189 log.warning(
190 190 'Parameter {} not found will be ignored'.format(
191 191 param),
192 192 'MADReader')
193 193 self.oneDDict.pop(param, None)
194 194
195 195 for param, value in list(self.twoDDict.items()):
196 196 if param.lower() not in self.parameters:
197 197 log.warning(
198 198 'Parameter {} not found, it will be ignored'.format(
199 199 param),
200 200 'MADReader')
201 201 self.twoDDict.pop(param, None)
202 202 continue
203 203 if isinstance(value, list):
204 204 if value[0] not in self.output:
205 205 self.output[value[0]] = []
206 206 self.output[value[0]].append([])
207 207
208 208 def parseData(self):
209 209 '''
210 210 '''
211 211
212 212 if self.ext == '.txt':
213 213 self.data = numpy.genfromtxt(self.fp, missing_values=('missing'))
214 214 self.nrecords = self.data.shape[0]
215 215 self.ranges = numpy.unique(self.data[:,self.parameters.index(self.independentParam.lower())])
216 216 self.counter_records = 0
217 217 elif self.ext == '.hdf5':
218 218 self.data = self.fp['Data']
219 219 self.ranges = numpy.unique(self.data['Table Layout'][self.independentParam.lower()])
220 220 self.times = numpy.unique(self.data['Table Layout']['ut1_unix'])
221 221 self.counter_records = int(self.data['Table Layout']['recno'][0])
222 222 self.nrecords = int(self.data['Table Layout']['recno'][-1])
223 223
224 224 def readNextBlock(self):
225 225
226 226 while True:
227 227 self.flagDiscontinuousBlock = 0
228 228 if self.counter_records == self.nrecords:
229 229 self.setNextFile()
230 230
231 231 self.readBlock()
232 232
233 233 if (self.datatime < datetime.datetime.combine(self.startDate, self.startTime)) or \
234 234 (self.datatime > datetime.datetime.combine(self.endDate, self.endTime)):
235 235 log.warning(
236 236 'Reading Record No. {}/{} -> {} [Skipping]'.format(
237 237 self.counter_records,
238 238 self.nrecords,
239 239 self.datatime.ctime()),
240 240 'MADReader')
241 241 continue
242 242 break
243 243
244 244 log.log(
245 245 'Reading Record No. {}/{} -> {}'.format(
246 246 self.counter_records,
247 247 self.nrecords,
248 248 self.datatime.ctime()),
249 249 'MADReader')
250 250
251 251 return 1
252 252
253 253 def readBlock(self):
254 254 '''
255 255 '''
256 256 dum = []
257 257 if self.ext == '.txt':
258 258 dt = self.data[self.counter_records][:6].astype(int)
259 259 if datetime.datetime(dt[0], dt[1], dt[2], dt[3], dt[4], dt[5]).date() > self.datatime.date():
260 260 self.flagDiscontinuousBlock = 1
261 261 self.datatime = datetime.datetime(dt[0], dt[1], dt[2], dt[3], dt[4], dt[5])
262 262 while True:
263 263 dt = self.data[self.counter_records][:6].astype(int)
264 264 datatime = datetime.datetime(dt[0], dt[1], dt[2], dt[3], dt[4], dt[5])
265 265 if datatime == self.datatime:
266 266 dum.append(self.data[self.counter_records])
267 267 self.counter_records += 1
268 268 if self.counter_records == self.nrecords:
269 269 break
270 270 continue
271 271 self.intervals.add((datatime-self.datatime).seconds)
272 272 break
273 273 elif self.ext == '.hdf5':
274 274 datatime = datetime.datetime.utcfromtimestamp(
275 275 self.times[self.counter_records])
276 276 dum = self.data['Table Layout'][self.data['Table Layout']['recno']==self.counter_records]
277 277 self.intervals.add((datatime-self.datatime).seconds)
278 278 if datatime.date()>self.datatime.date():
279 279 self.flagDiscontinuousBlock = 1
280 280 self.datatime = datatime
281 281 self.counter_records += 1
282 282
283 283 self.buffer = numpy.array(dum)
284 284 return
285 285
286 286 def set_output(self):
287 287 '''
288 288 Storing data from buffer to dataOut object
289 289 '''
290 290
291 291 parameters = [None for __ in self.parameters]
292 292
293 293 for param, attr in list(self.oneDDict.items()):
294 294 x = self.parameters.index(param.lower())
295 295 setattr(self.dataOut, attr, self.buffer[0][x])
296 296
297 297 for param, value in list(self.twoDDict.items()):
298 298 dummy = numpy.zeros(self.ranges.shape) + numpy.nan
299 299 if self.ext == '.txt':
300 300 x = self.parameters.index(param.lower())
301 301 y = self.parameters.index(self.independentParam.lower())
302 302 ranges = self.buffer[:,y]
303 303 #if self.ranges.size == ranges.size:
304 304 # continue
305 305 index = numpy.where(numpy.in1d(self.ranges, ranges))[0]
306 306 dummy[index] = self.buffer[:,x]
307 307 else:
308 308 ranges = self.buffer[self.independentParam.lower()]
309 309 index = numpy.where(numpy.in1d(self.ranges, ranges))[0]
310 310 dummy[index] = self.buffer[param.lower()]
311 311
312 312 if isinstance(value, str):
313 313 if value not in self.independentParam:
314 314 setattr(self.dataOut, value, dummy.reshape(1,-1))
315 315 elif isinstance(value, list):
316 316 self.output[value[0]][value[1]] = dummy
317 317 parameters[value[1]] = param
318 318 for key, value in list(self.output.items()):
319 319 setattr(self.dataOut, key, numpy.array(value))
320 320
321 321 self.dataOut.parameters = [s for s in parameters if s]
322 322 self.dataOut.heightList = self.ranges
323 323 self.dataOut.utctime = (self.datatime - datetime.datetime(1970, 1, 1)).total_seconds()
324 324 self.dataOut.utctimeInit = self.dataOut.utctime
325 325 self.dataOut.paramInterval = min(self.intervals)
326 326 self.dataOut.useLocalTime = False
327 327 self.dataOut.flagNoData = False
328 328 self.dataOut.nrecords = self.nrecords
329 329 self.dataOut.flagDiscontinuousBlock = self.flagDiscontinuousBlock
330 330
331 331 def getData(self):
332 332 '''
333 333 Storing data from databuffer to dataOut object
334 334 '''
335 335
336 336 if not self.readNextBlock():
337 337 self.dataOut.flagNoData = True
338 338 return 0
339 339
340 340 self.set_output()
341 341
342 342 return 1
343 343
344 344 def run(self, **kwargs):
345 345
346 346 if not(self.isConfig):
347 347 self.setup(**kwargs)
348 348 self.isConfig = True
349 349
350 350 self.getData()
351 351
352 352 return
353 353
354 354 @MPDecorator
355 355 class MADWriter(Operation):
356 356 '''Writing module for Madrigal files
357 357
358 358 type: external
359 359
360 360 Inputs:
361 361 path path where files will be created
362 362 oneDDict json of one-dimensional parameters in record where keys
363 363 are Madrigal codes (integers or mnemonics) and values the corresponding
364 364 dataOut attribute e.g: {
365 365 'gdlatr': 'lat',
366 366 'gdlonr': 'lon',
367 367 'gdlat2':'lat',
368 368 'glon2':'lon'}
369 369 ind2DList list of independent spatial two-dimensional parameters e.g:
370 370 ['heigthList']
371 371 twoDDict json of two-dimensional parameters in record where keys
372 372 are Madrigal codes (integers or mnemonics) and values the corresponding
373 373 dataOut attribute if multidimensional array specify as tupple
374 374 ('attr', pos) e.g: {
375 375 'gdalt': 'heightList',
376 376 'vn1p2': ('data_output', 0),
377 377 'vn2p2': ('data_output', 1),
378 378 'vn3': ('data_output', 2),
379 379 'snl': ('data_SNR', 'db')
380 380 }
381 381 metadata json of madrigal metadata (kinst, kindat, catalog and header)
382 382 format hdf5, cedar
383 383 blocks number of blocks per file'''
384 384
385 385 __attrs__ = ['path', 'oneDDict', 'ind2DList', 'twoDDict','metadata', 'format', 'blocks']
386 386 missing = -32767
387 387
388 388 def __init__(self):
389 389
390 390 Operation.__init__(self)
391 391 self.dataOut = Parameters()
392 392 self.counter = 0
393 393 self.path = None
394 394 self.fp = None
395 395
396 396 def run(self, dataOut, path, oneDDict, ind2DList='[]', twoDDict='{}',
397 397 metadata='{}', format='cedar', **kwargs):
398 398
399 399 if not self.isConfig:
400 400 self.setup(path, oneDDict, ind2DList, twoDDict, metadata, format, **kwargs)
401 401 self.isConfig = True
402 402
403 403 self.dataOut = dataOut
404 404 self.putData()
405 405 return 1
406 406
407 407 def setup(self, path, oneDDict, ind2DList, twoDDict, metadata, format, **kwargs):
408 408 '''
409 409 Configure Operation
410 410 '''
411 411
412 412 self.path = path
413 413 self.blocks = kwargs.get('blocks', None)
414 414 self.counter = 0
415 415 self.oneDDict = load_json(oneDDict)
416 416 self.twoDDict = load_json(twoDDict)
417 417 self.ind2DList = load_json(ind2DList)
418 418 meta = load_json(metadata)
419 419 self.kinst = meta.get('kinst')
420 420 self.kindat = meta.get('kindat')
421 421 self.catalog = meta.get('catalog', DEF_CATALOG)
422 422 self.header = meta.get('header', DEF_HEADER)
423 423 if format == 'cedar':
424 424 self.ext = '.dat'
425 425 self.extra_args = {}
426 426 elif format == 'hdf5':
427 427 self.ext = '.hdf5'
428 428 self.extra_args = {'ind2DList': self.ind2DList}
429 429
430 430 self.keys = [k.lower() for k in self.twoDDict]
431 431 if 'range' in self.keys:
432 432 self.keys.remove('range')
433 433 if 'gdalt' in self.keys:
434 434 self.keys.remove('gdalt')
435 435
436 436 def setFile(self):
437 437 '''
438 438 Create new cedar file object
439 439 '''
440 440
441 441 self.mnemonic = MNEMONICS[self.kinst] #TODO get mnemonic from madrigal
442 442 date = datetime.datetime.utcfromtimestamp(self.dataOut.utctime)
443 443
444 444 filename = '{}{}{}'.format(self.mnemonic,
445 445 date.strftime('%Y%m%d_%H%M%S'),
446 446 self.ext)
447 447
448 448 self.fullname = os.path.join(self.path, filename)
449 449
450 450 if os.path.isfile(self.fullname) :
451 451 log.warning(
452 452 'Destination file {} already exists, previous file deleted.'.format(
453 453 self.fullname),
454 454 'MADWriter')
455 455 os.remove(self.fullname)
456 456
457 457 try:
458 458 log.success(
459 459 'Creating file: {}'.format(self.fullname),
460 460 'MADWriter')
461 461 if not os.path.exists(self.path):
462 462 os.makedirs(self.path)
463 463 self.fp = madrigal.cedar.MadrigalCedarFile(self.fullname, True)
464 464 except ValueError as e:
465 465 log.error(
466 466 'Impossible to create a cedar object with "madrigal.cedar.MadrigalCedarFile"',
467 467 'MADWriter')
468 468 return
469 469
470 470 return 1
471 471
472 472 def writeBlock(self):
473 473 '''
474 474 Add data records to cedar file taking data from oneDDict and twoDDict
475 475 attributes.
476 476 Allowed parameters in: parcodes.tab
477 477 '''
478 478
479 479 startTime = datetime.datetime.utcfromtimestamp(self.dataOut.utctime)
480 480 endTime = startTime + datetime.timedelta(seconds=self.dataOut.paramInterval)
481 481 heights = self.dataOut.heightList
482 482
483 483 if self.ext == '.dat':
484 484 for key, value in list(self.twoDDict.items()):
485 485 if isinstance(value, str):
486 486 data = getattr(self.dataOut, value)
487 487 invalid = numpy.isnan(data)
488 488 data[invalid] = self.missing
489 489 elif isinstance(value, (tuple, list)):
490 490 attr, key = value
491 491 data = getattr(self.dataOut, attr)
492 492 invalid = numpy.isnan(data)
493 493 data[invalid] = self.missing
494 494
495 495 out = {}
496 496 for key, value in list(self.twoDDict.items()):
497 497 key = key.lower()
498 498 if isinstance(value, str):
499 499 if 'db' in value.lower():
500 500 tmp = getattr(self.dataOut, value.replace('_db', ''))
501 501 SNRavg = numpy.average(tmp, axis=0)
502 502 tmp = 10*numpy.log10(SNRavg)
503 503 else:
504 504 tmp = getattr(self.dataOut, value)
505 505 out[key] = tmp.flatten()[:len(heights)]
506 506 elif isinstance(value, (tuple, list)):
507 507 attr, x = value
508 508 data = getattr(self.dataOut, attr)
509 509 out[key] = data[int(x)][:len(heights)]
510 510
511 511 a = numpy.array([out[k] for k in self.keys])
512 512 nrows = numpy.array([numpy.isnan(a[:, x]).all() for x in range(len(heights))])
513 513 index = numpy.where(nrows == False)[0]
514 514
515 515 rec = madrigal.cedar.MadrigalDataRecord(
516 516 self.kinst,
517 517 self.kindat,
518 518 startTime.year,
519 519 startTime.month,
520 520 startTime.day,
521 521 startTime.hour,
522 522 startTime.minute,
523 523 startTime.second,
524 524 startTime.microsecond/10000,
525 525 endTime.year,
526 526 endTime.month,
527 527 endTime.day,
528 528 endTime.hour,
529 529 endTime.minute,
530 530 endTime.second,
531 531 endTime.microsecond/10000,
532 532 list(self.oneDDict.keys()),
533 533 list(self.twoDDict.keys()),
534 534 len(index),
535 535 **self.extra_args
536 536 )
537 537
538 538 # Setting 1d values
539 539 for key in self.oneDDict:
540 540 rec.set1D(key, getattr(self.dataOut, self.oneDDict[key]))
541 541
542 542 # Setting 2d values
543 543 nrec = 0
544 544 for n in index:
545 545 for key in out:
546 546 rec.set2D(key, nrec, out[key][n])
547 547 nrec += 1
548 548
549 549 self.fp.append(rec)
550 550 if self.ext == '.hdf5' and self.counter % 500 == 0 and self.counter > 0:
551 551 self.fp.dump()
552 552 if self.counter % 20 == 0 and self.counter > 0:
553 553 log.log(
554 554 'Writing {} records'.format(
555 555 self.counter),
556 556 'MADWriter')
557 557
558 558 def setHeader(self):
559 559 '''
560 560 Create an add catalog and header to cedar file
561 561 '''
562 562
563 563 log.success('Closing file {}'.format(self.fullname), 'MADWriter')
564 564
565 565 if self.ext == '.dat':
566 566 self.fp.write()
567 567 else:
568 568 self.fp.dump()
569 569 self.fp.close()
570 570
571 571 header = madrigal.cedar.CatalogHeaderCreator(self.fullname)
572 572 header.createCatalog(**self.catalog)
573 573 header.createHeader(**self.header)
574 574 header.write()
575 575
576 576 def putData(self):
577 577
578 578 if self.dataOut.flagNoData:
579 579 return 0
580 580
581 581 if self.dataOut.flagDiscontinuousBlock or self.counter == self.blocks:
582 582 if self.counter > 0:
583 583 self.setHeader()
584 584 self.counter = 0
585 585
586 586 if self.counter == 0:
587 587 self.setFile()
588 588
589 589 self.writeBlock()
590 590 self.counter += 1
591 591
592 592 def close(self):
593 593
594 594 if self.counter > 0:
595 595 self.setHeader() No newline at end of file
@@ -1,1435 +1,1435
1 1 import numpy
2 2 import time
3 3 import os
4 4 import h5py
5 5 import re
6 6 import datetime
7 7
8 8 import schainpy.admin
9 9 from schainpy.model.data.jrodata import *
10 10 from schainpy.model.proc.jroproc_base import ProcessingUnit, Operation, MPDecorator
11 11 from schainpy.model.io.jroIO_base import *
12 12 from schainpy.utils import log
13 13
14 @MPDecorator
14
15 15 class ParamReader(JRODataReader,ProcessingUnit):
16 16 '''
17 17 Reads HDF5 format files
18 18 path
19 19 startDate
20 20 endDate
21 21 startTime
22 22 endTime
23 23 '''
24 24
25 25 ext = ".hdf5"
26 26 optchar = "D"
27 27 timezone = None
28 28 startTime = None
29 29 endTime = None
30 30 fileIndex = None
31 31 utcList = None #To select data in the utctime list
32 32 blockList = None #List to blocks to be read from the file
33 33 blocksPerFile = None #Number of blocks to be read
34 34 blockIndex = None
35 35 path = None
36 36 #List of Files
37 37 filenameList = None
38 38 datetimeList = None
39 39 #Hdf5 File
40 40 listMetaname = None
41 41 listMeta = None
42 42 listDataname = None
43 43 listData = None
44 44 listShapes = None
45 45 fp = None
46 46 #dataOut reconstruction
47 47 dataOut = None
48 48
49 49 def __init__(self):#, **kwargs):
50 50 ProcessingUnit.__init__(self) #, **kwargs)
51 51 self.dataOut = Parameters()
52 52 return
53 53
54 54 def setup(self, **kwargs):
55 55
56 56 path = kwargs['path']
57 57 startDate = kwargs['startDate']
58 58 endDate = kwargs['endDate']
59 59 startTime = kwargs['startTime']
60 60 endTime = kwargs['endTime']
61 61 walk = kwargs['walk']
62 62 if 'ext' in kwargs:
63 63 ext = kwargs['ext']
64 64 else:
65 65 ext = '.hdf5'
66 66 if 'timezone' in kwargs:
67 67 self.timezone = kwargs['timezone']
68 68 else:
69 69 self.timezone = 'lt'
70 70
71 71 print("[Reading] Searching files in offline mode ...")
72 72 pathList, filenameList = self.searchFilesOffLine(path, startDate=startDate, endDate=endDate,
73 73 startTime=startTime, endTime=endTime,
74 74 ext=ext, walk=walk)
75 75
76 76 if not(filenameList):
77 77 print("There is no files into the folder: %s"%(path))
78 78 sys.exit(-1)
79 79
80 80 self.fileIndex = -1
81 81 self.startTime = startTime
82 82 self.endTime = endTime
83 83
84 84 self.__readMetadata()
85 85
86 86 self.__setNextFileOffline()
87 87
88 88 return
89 89
90 90 def searchFilesOffLine(self,
91 91 path,
92 92 startDate=None,
93 93 endDate=None,
94 94 startTime=datetime.time(0,0,0),
95 95 endTime=datetime.time(23,59,59),
96 96 ext='.hdf5',
97 97 walk=True):
98 98
99 99 expLabel = ''
100 100 self.filenameList = []
101 101 self.datetimeList = []
102 102
103 103 pathList = []
104 104
105 105 JRODataObj = JRODataReader()
106 106 dateList, pathList = JRODataObj.findDatafiles(path, startDate, endDate, expLabel, ext, walk, include_path=True)
107 107
108 108 if dateList == []:
109 109 print("[Reading] No *%s files in %s from %s to %s)"%(ext, path,
110 110 datetime.datetime.combine(startDate,startTime).ctime(),
111 111 datetime.datetime.combine(endDate,endTime).ctime()))
112 112
113 113 return None, None
114 114
115 115 if len(dateList) > 1:
116 116 print("[Reading] %d days were found in date range: %s - %s" %(len(dateList), startDate, endDate))
117 117 else:
118 118 print("[Reading] data was found for the date %s" %(dateList[0]))
119 119
120 120 filenameList = []
121 121 datetimeList = []
122 122
123 123 #----------------------------------------------------------------------------------
124 124
125 125 for thisPath in pathList:
126 126
127 127 fileList = glob.glob1(thisPath, "*%s" %ext)
128 128 fileList.sort()
129 129
130 130 for file in fileList:
131 131
132 132 filename = os.path.join(thisPath,file)
133 133
134 134 if not isFileInDateRange(filename, startDate, endDate):
135 135 continue
136 136
137 137 thisDatetime = self.__isFileInTimeRange(filename, startDate, endDate, startTime, endTime)
138 138
139 139 if not(thisDatetime):
140 140 continue
141 141
142 142 filenameList.append(filename)
143 143 datetimeList.append(thisDatetime)
144 144
145 145 if not(filenameList):
146 146 print("[Reading] Any file was found int time range %s - %s" %(datetime.datetime.combine(startDate,startTime).ctime(), datetime.datetime.combine(endDate,endTime).ctime()))
147 147 return None, None
148 148
149 149 print("[Reading] %d file(s) was(were) found in time range: %s - %s" %(len(filenameList), startTime, endTime))
150 150 print()
151 151
152 152 self.filenameList = filenameList
153 153 self.datetimeList = datetimeList
154 154
155 155 return pathList, filenameList
156 156
157 157 def __isFileInTimeRange(self,filename, startDate, endDate, startTime, endTime):
158 158
159 159 """
160 160 Retorna 1 si el archivo de datos se encuentra dentro del rango de horas especificado.
161 161
162 162 Inputs:
163 163 filename : nombre completo del archivo de datos en formato Jicamarca (.r)
164 164 startDate : fecha inicial del rango seleccionado en formato datetime.date
165 165 endDate : fecha final del rango seleccionado en formato datetime.date
166 166 startTime : tiempo inicial del rango seleccionado en formato datetime.time
167 167 endTime : tiempo final del rango seleccionado en formato datetime.time
168 168
169 169 Return:
170 170 Boolean : Retorna True si el archivo de datos contiene datos en el rango de
171 171 fecha especificado, de lo contrario retorna False.
172 172
173 173 Excepciones:
174 174 Si el archivo no existe o no puede ser abierto
175 175 Si la cabecera no puede ser leida.
176 176
177 177 """
178 178
179 179 try:
180 180 fp = h5py.File(filename,'r')
181 181 grp1 = fp['Data']
182 182
183 183 except IOError:
184 184 traceback.print_exc()
185 185 raise IOError("The file %s can't be opened" %(filename))
186 186
187 187 #In case has utctime attribute
188 188 grp2 = grp1['utctime']
189 189 # thisUtcTime = grp2.value[0] - 5*3600 #To convert to local time
190 190 thisUtcTime = grp2.value[0]
191 191
192 192 fp.close()
193 193
194 194 if self.timezone == 'lt':
195 195 thisUtcTime -= 5*3600
196 196
197 197 thisDatetime = datetime.datetime.fromtimestamp(thisUtcTime[0] + 5*3600)
198 198 thisDate = thisDatetime.date()
199 199 thisTime = thisDatetime.time()
200 200
201 201 startUtcTime = (datetime.datetime.combine(thisDate,startTime)- datetime.datetime(1970, 1, 1)).total_seconds()
202 202 endUtcTime = (datetime.datetime.combine(thisDate,endTime)- datetime.datetime(1970, 1, 1)).total_seconds()
203 203
204 204 #General case
205 205 # o>>>>>>>>>>>>>><<<<<<<<<<<<<<o
206 206 #-----------o----------------------------o-----------
207 207 # startTime endTime
208 208
209 209 if endTime >= startTime:
210 210 thisUtcLog = numpy.logical_and(thisUtcTime > startUtcTime, thisUtcTime < endUtcTime)
211 211 if numpy.any(thisUtcLog): #If there is one block between the hours mentioned
212 212 return thisDatetime
213 213 return None
214 214
215 215 #If endTime < startTime then endTime belongs to the next day
216 216 #<<<<<<<<<<<o o>>>>>>>>>>>
217 217 #-----------o----------------------------o-----------
218 218 # endTime startTime
219 219
220 220 if (thisDate == startDate) and numpy.all(thisUtcTime < startUtcTime):
221 221 return None
222 222
223 223 if (thisDate == endDate) and numpy.all(thisUtcTime > endUtcTime):
224 224 return None
225 225
226 226 if numpy.all(thisUtcTime < startUtcTime) and numpy.all(thisUtcTime > endUtcTime):
227 227 return None
228 228
229 229 return thisDatetime
230 230
231 231 def __setNextFileOffline(self):
232 232
233 233 self.fileIndex += 1
234 234 idFile = self.fileIndex
235 235
236 236 if not(idFile < len(self.filenameList)):
237 237 raise schainpy.admin.SchainError("No more Files")
238 238 return 0
239 239
240 240 filename = self.filenameList[idFile]
241 241 filePointer = h5py.File(filename,'r')
242 242 self.filename = filename
243 243 self.fp = filePointer
244 244
245 245 print("Setting the file: %s"%self.filename)
246 246
247 247 self.__setBlockList()
248 248 self.__readData()
249 249 self.blockIndex = 0
250 250 return 1
251 251
252 252 def __setBlockList(self):
253 253 '''
254 254 Selects the data within the times defined
255 255
256 256 self.fp
257 257 self.startTime
258 258 self.endTime
259 259
260 260 self.blockList
261 261 self.blocksPerFile
262 262
263 263 '''
264 264 fp = self.fp
265 265 startTime = self.startTime
266 266 endTime = self.endTime
267 267
268 268 grp = fp['Data']
269 269 thisUtcTime = grp['utctime'].value.astype(numpy.float)[0]
270 270
271 271 #ERROOOOR
272 272 if self.timezone == 'lt':
273 273 thisUtcTime -= 5*3600
274 274
275 275 thisDatetime = datetime.datetime.fromtimestamp(thisUtcTime[0] + 5*3600)
276 276
277 277 thisDate = thisDatetime.date()
278 278 thisTime = thisDatetime.time()
279 279
280 280 startUtcTime = (datetime.datetime.combine(thisDate,startTime) - datetime.datetime(1970, 1, 1)).total_seconds()
281 281 endUtcTime = (datetime.datetime.combine(thisDate,endTime) - datetime.datetime(1970, 1, 1)).total_seconds()
282 282
283 283 ind = numpy.where(numpy.logical_and(thisUtcTime >= startUtcTime, thisUtcTime < endUtcTime))[0]
284 284
285 285 self.blockList = ind
286 286 self.blocksPerFile = len(ind)
287 287
288 288 return
289 289
290 290 def __readMetadata(self):
291 291 '''
292 292 Reads Metadata
293 293
294 294 self.pathMeta
295 295 self.listShapes
296 296 self.listMetaname
297 297 self.listMeta
298 298
299 299 '''
300 300
301 301 filename = self.filenameList[0]
302 302 fp = h5py.File(filename,'r')
303 303 gp = fp['Metadata']
304 304
305 305 listMetaname = []
306 306 listMetadata = []
307 307 for item in list(gp.items()):
308 308 name = item[0]
309 309
310 310 if name=='array dimensions':
311 311 table = gp[name][:]
312 312 listShapes = {}
313 313 for shapes in table:
314 314 listShapes[shapes[0]] = numpy.array([shapes[1],shapes[2],shapes[3],shapes[4],shapes[5]])
315 315 else:
316 316 data = gp[name].value
317 317 listMetaname.append(name)
318 318 listMetadata.append(data)
319 319
320 320 self.listShapes = listShapes
321 321 self.listMetaname = listMetaname
322 322 self.listMeta = listMetadata
323 323
324 324 fp.close()
325 325 return
326 326
327 327 def __readData(self):
328 328 grp = self.fp['Data']
329 329 listdataname = []
330 330 listdata = []
331 331
332 332 for item in list(grp.items()):
333 333 name = item[0]
334 334 listdataname.append(name)
335 335
336 336 array = self.__setDataArray(grp[name],self.listShapes[name])
337 337 listdata.append(array)
338 338
339 339 self.listDataname = listdataname
340 340 self.listData = listdata
341 341 return
342 342
343 343 def __setDataArray(self, dataset, shapes):
344 344
345 345 nDims = shapes[0]
346 346 nDim2 = shapes[1] #Dimension 0
347 347 nDim1 = shapes[2] #Dimension 1, number of Points or Parameters
348 348 nDim0 = shapes[3] #Dimension 2, number of samples or ranges
349 349 mode = shapes[4] #Mode of storing
350 350 blockList = self.blockList
351 351 blocksPerFile = self.blocksPerFile
352 352
353 353 #Depending on what mode the data was stored
354 354 if mode == 0: #Divided in channels
355 355 arrayData = dataset.value.astype(numpy.float)[0][blockList]
356 356 if mode == 1: #Divided in parameter
357 357 strds = 'table'
358 358 nDatas = nDim1
359 359 newShapes = (blocksPerFile,nDim2,nDim0)
360 360 elif mode==2: #Concatenated in a table
361 361 strds = 'table0'
362 362 arrayData = dataset[strds].value
363 363 #Selecting part of the dataset
364 364 utctime = arrayData[:,0]
365 365 u, indices = numpy.unique(utctime, return_index=True)
366 366
367 367 if blockList.size != indices.size:
368 368 indMin = indices[blockList[0]]
369 369 if blockList[1] + 1 >= indices.size:
370 370 arrayData = arrayData[indMin:,:]
371 371 else:
372 372 indMax = indices[blockList[1] + 1]
373 373 arrayData = arrayData[indMin:indMax,:]
374 374 return arrayData
375 375
376 376 # One dimension
377 377 if nDims == 0:
378 378 arrayData = dataset.value.astype(numpy.float)[0][blockList]
379 379
380 380 # Two dimensions
381 381 elif nDims == 2:
382 382 arrayData = numpy.zeros((blocksPerFile,nDim1,nDim0))
383 383 newShapes = (blocksPerFile,nDim0)
384 384 nDatas = nDim1
385 385
386 386 for i in range(nDatas):
387 387 data = dataset[strds + str(i)].value
388 388 arrayData[:,i,:] = data[blockList,:]
389 389
390 390 # Three dimensions
391 391 else:
392 392 arrayData = numpy.zeros((blocksPerFile,nDim2,nDim1,nDim0))
393 393 for i in range(nDatas):
394 394
395 395 data = dataset[strds + str(i)].value
396 396
397 397 for b in range(blockList.size):
398 398 arrayData[b,:,i,:] = data[:,:,blockList[b]]
399 399
400 400 return arrayData
401 401
402 402 def __setDataOut(self):
403 403 listMeta = self.listMeta
404 404 listMetaname = self.listMetaname
405 405 listDataname = self.listDataname
406 406 listData = self.listData
407 407 listShapes = self.listShapes
408 408
409 409 blockIndex = self.blockIndex
410 410 # blockList = self.blockList
411 411
412 412 for i in range(len(listMeta)):
413 413 setattr(self.dataOut,listMetaname[i],listMeta[i])
414 414
415 415 for j in range(len(listData)):
416 416 nShapes = listShapes[listDataname[j]][0]
417 417 mode = listShapes[listDataname[j]][4]
418 418 if nShapes == 1:
419 419 setattr(self.dataOut,listDataname[j],listData[j][blockIndex])
420 420 elif nShapes > 1:
421 421 setattr(self.dataOut,listDataname[j],listData[j][blockIndex,:])
422 422 elif mode==0:
423 423 setattr(self.dataOut,listDataname[j],listData[j][blockIndex])
424 424 #Mode Meteors
425 425 elif mode ==2:
426 426 selectedData = self.__selectDataMode2(listData[j], blockIndex)
427 427 setattr(self.dataOut, listDataname[j], selectedData)
428 428 return
429 429
430 430 def __selectDataMode2(self, data, blockIndex):
431 431 utctime = data[:,0]
432 432 aux, indices = numpy.unique(utctime, return_inverse=True)
433 433 selInd = numpy.where(indices == blockIndex)[0]
434 434 selData = data[selInd,:]
435 435
436 436 return selData
437 437
438 438 def getData(self):
439 439
440 440 if self.blockIndex==self.blocksPerFile:
441 441 if not( self.__setNextFileOffline() ):
442 442 self.dataOut.flagNoData = True
443 443 return 0
444 444
445 445 self.__setDataOut()
446 446 self.dataOut.flagNoData = False
447 447
448 448 self.blockIndex += 1
449 449
450 450 return
451 451
452 452 def run(self, **kwargs):
453 453
454 454 if not(self.isConfig):
455 455 self.setup(**kwargs)
456 456 self.isConfig = True
457 457
458 458 self.getData()
459 459
460 460 return
461 461
462 462 @MPDecorator
463 463 class ParamWriter(Operation):
464 464 '''
465 465 HDF5 Writer, stores parameters data in HDF5 format files
466 466
467 467 path: path where the files will be stored
468 468 blocksPerFile: number of blocks that will be saved in per HDF5 format file
469 469 mode: selects the data stacking mode: '0' channels, '1' parameters, '3' table (for meteors)
470 470 metadataList: list of attributes that will be stored as metadata
471 471 dataList: list of attributes that will be stores as data
472 472 '''
473 473
474 474 ext = ".hdf5"
475 475 optchar = "D"
476 476 metaoptchar = "M"
477 477 metaFile = None
478 478 filename = None
479 479 path = None
480 480 setFile = None
481 481 fp = None
482 482 grp = None
483 483 ds = None
484 484 firsttime = True
485 485 #Configurations
486 486 blocksPerFile = None
487 487 blockIndex = None
488 488 dataOut = None
489 489 #Data Arrays
490 490 dataList = None
491 491 metadataList = None
492 492 dsList = None #List of dictionaries with dataset properties
493 493 tableDim = None
494 494 dtype = [('arrayName', 'S20'),('nDimensions', 'i'), ('dim2', 'i'), ('dim1', 'i'),('dim0', 'i'),('mode', 'b')]
495 495 currentDay = None
496 496 lastTime = None
497 497 setType = None
498 498
499 499 def __init__(self):
500 500
501 501 Operation.__init__(self)
502 502 return
503 503
504 504 def setup(self, dataOut, path=None, blocksPerFile=10, metadataList=None, dataList=None, mode=None, setType=None):
505 505 self.path = path
506 506 self.blocksPerFile = blocksPerFile
507 507 self.metadataList = metadataList
508 508 self.dataList = dataList
509 509 self.dataOut = dataOut
510 510 self.mode = mode
511 511 if self.mode is not None:
512 512 self.mode = numpy.zeros(len(self.dataList)) + mode
513 513 else:
514 514 self.mode = numpy.ones(len(self.dataList))
515 515
516 516 self.setType = setType
517 517
518 518 arrayDim = numpy.zeros((len(self.dataList),5))
519 519
520 520 #Table dimensions
521 521 dtype0 = self.dtype
522 522 tableList = []
523 523
524 524 #Dictionary and list of tables
525 525 dsList = []
526 526
527 527 for i in range(len(self.dataList)):
528 528 dsDict = {}
529 529 dataAux = getattr(self.dataOut, self.dataList[i])
530 530 dsDict['variable'] = self.dataList[i]
531 531 #--------------------- Conditionals ------------------------
532 532 #There is no data
533 533
534 534 if dataAux is None:
535 535
536 536 return 0
537 537
538 538 if isinstance(dataAux, (int, float, numpy.integer, numpy.float)):
539 539 dsDict['mode'] = 0
540 540 dsDict['nDim'] = 0
541 541 arrayDim[i,0] = 0
542 542 dsList.append(dsDict)
543 543
544 544 #Mode 2: meteors
545 545 elif self.mode[i] == 2:
546 546 dsDict['dsName'] = 'table0'
547 547 dsDict['mode'] = 2 # Mode meteors
548 548 dsDict['shape'] = dataAux.shape[-1]
549 549 dsDict['nDim'] = 0
550 550 dsDict['dsNumber'] = 1
551 551 arrayDim[i,3] = dataAux.shape[-1]
552 552 arrayDim[i,4] = self.mode[i] #Mode the data was stored
553 553 dsList.append(dsDict)
554 554
555 555 #Mode 1
556 556 else:
557 557 arrayDim0 = dataAux.shape #Data dimensions
558 558 arrayDim[i,0] = len(arrayDim0) #Number of array dimensions
559 559 arrayDim[i,4] = self.mode[i] #Mode the data was stored
560 560 strtable = 'table'
561 561 dsDict['mode'] = 1 # Mode parameters
562 562
563 563 # Three-dimension arrays
564 564 if len(arrayDim0) == 3:
565 565 arrayDim[i,1:-1] = numpy.array(arrayDim0)
566 566 nTables = int(arrayDim[i,2])
567 567 dsDict['dsNumber'] = nTables
568 568 dsDict['shape'] = arrayDim[i,2:4]
569 569 dsDict['nDim'] = 3
570 570
571 571 for j in range(nTables):
572 572 dsDict = dsDict.copy()
573 573 dsDict['dsName'] = strtable + str(j)
574 574 dsList.append(dsDict)
575 575
576 576 # Two-dimension arrays
577 577 elif len(arrayDim0) == 2:
578 578 arrayDim[i,2:-1] = numpy.array(arrayDim0)
579 579 nTables = int(arrayDim[i,2])
580 580 dsDict['dsNumber'] = nTables
581 581 dsDict['shape'] = arrayDim[i,3]
582 582 dsDict['nDim'] = 2
583 583
584 584 for j in range(nTables):
585 585 dsDict = dsDict.copy()
586 586 dsDict['dsName'] = strtable + str(j)
587 587 dsList.append(dsDict)
588 588
589 589 # One-dimension arrays
590 590 elif len(arrayDim0) == 1:
591 591 arrayDim[i,3] = arrayDim0[0]
592 592 dsDict['shape'] = arrayDim0[0]
593 593 dsDict['dsNumber'] = 1
594 594 dsDict['dsName'] = strtable + str(0)
595 595 dsDict['nDim'] = 1
596 596 dsList.append(dsDict)
597 597
598 598 table = numpy.array((self.dataList[i],) + tuple(arrayDim[i,:]),dtype = dtype0)
599 599 tableList.append(table)
600 600
601 601 self.dsList = dsList
602 602 self.tableDim = numpy.array(tableList, dtype = dtype0)
603 603 self.blockIndex = 0
604 604 timeTuple = time.localtime(dataOut.utctime)
605 605 self.currentDay = timeTuple.tm_yday
606 606
607 607 def putMetadata(self):
608 608
609 609 fp = self.createMetadataFile()
610 610 self.writeMetadata(fp)
611 611 fp.close()
612 612 return
613 613
614 614 def createMetadataFile(self):
615 615 ext = self.ext
616 616 path = self.path
617 617 setFile = self.setFile
618 618
619 619 timeTuple = time.localtime(self.dataOut.utctime)
620 620
621 621 subfolder = ''
622 622 fullpath = os.path.join( path, subfolder )
623 623
624 624 if not( os.path.exists(fullpath) ):
625 625 os.mkdir(fullpath)
626 626 setFile = -1 #inicializo mi contador de seteo
627 627
628 628 subfolder = 'd%4.4d%3.3d' % (timeTuple.tm_year,timeTuple.tm_yday)
629 629 fullpath = os.path.join( path, subfolder )
630 630
631 631 if not( os.path.exists(fullpath) ):
632 632 os.mkdir(fullpath)
633 633 setFile = -1 #inicializo mi contador de seteo
634 634
635 635 else:
636 636 filesList = os.listdir( fullpath )
637 637 filesList = sorted( filesList, key=str.lower )
638 638 if len( filesList ) > 0:
639 639 filesList = [k for k in filesList if k.startswith(self.metaoptchar)]
640 640 filen = filesList[-1]
641 641 # el filename debera tener el siguiente formato
642 642 # 0 1234 567 89A BCDE (hex)
643 643 # x YYYY DDD SSS .ext
644 644 if isNumber( filen[8:11] ):
645 645 setFile = int( filen[8:11] ) #inicializo mi contador de seteo al seteo del ultimo file
646 646 else:
647 647 setFile = -1
648 648 else:
649 649 setFile = -1 #inicializo mi contador de seteo
650 650
651 651 if self.setType is None:
652 652 setFile += 1
653 653 file = '%s%4.4d%3.3d%03d%s' % (self.metaoptchar,
654 654 timeTuple.tm_year,
655 655 timeTuple.tm_yday,
656 656 setFile,
657 657 ext )
658 658 else:
659 659 setFile = timeTuple.tm_hour*60+timeTuple.tm_min
660 660 file = '%s%4.4d%3.3d%04d%s' % (self.metaoptchar,
661 661 timeTuple.tm_year,
662 662 timeTuple.tm_yday,
663 663 setFile,
664 664 ext )
665 665
666 666 filename = os.path.join( path, subfolder, file )
667 667 self.metaFile = file
668 668 #Setting HDF5 File
669 669 fp = h5py.File(filename,'w')
670 670
671 671 return fp
672 672
673 673 def writeMetadata(self, fp):
674 674
675 675 grp = fp.create_group("Metadata")
676 676 grp.create_dataset('array dimensions', data = self.tableDim, dtype = self.dtype)
677 677
678 678 for i in range(len(self.metadataList)):
679 679 grp.create_dataset(self.metadataList[i], data=getattr(self.dataOut, self.metadataList[i]))
680 680 return
681 681
682 682 def timeFlag(self):
683 683 currentTime = self.dataOut.utctime
684 684
685 685 if self.lastTime is None:
686 686 self.lastTime = currentTime
687 687
688 688 #Day
689 689 timeTuple = time.localtime(currentTime)
690 690 dataDay = timeTuple.tm_yday
691 691
692 692 #Time
693 693 timeDiff = currentTime - self.lastTime
694 694
695 695 #Si el dia es diferente o si la diferencia entre un dato y otro supera la hora
696 696 if dataDay != self.currentDay:
697 697 self.currentDay = dataDay
698 698 return True
699 699 elif timeDiff > 3*60*60:
700 700 self.lastTime = currentTime
701 701 return True
702 702 else:
703 703 self.lastTime = currentTime
704 704 return False
705 705
706 706 def setNextFile(self):
707 707
708 708 ext = self.ext
709 709 path = self.path
710 710 setFile = self.setFile
711 711 mode = self.mode
712 712
713 713 timeTuple = time.localtime(self.dataOut.utctime)
714 714 subfolder = 'd%4.4d%3.3d' % (timeTuple.tm_year,timeTuple.tm_yday)
715 715
716 716 fullpath = os.path.join( path, subfolder )
717 717
718 718 if os.path.exists(fullpath):
719 719 filesList = os.listdir( fullpath )
720 720 filesList = [k for k in filesList if 'M' in k]
721 721 if len( filesList ) > 0:
722 722 filesList = sorted( filesList, key=str.lower )
723 723 filen = filesList[-1]
724 724 # el filename debera tener el siguiente formato
725 725 # 0 1234 567 89A BCDE (hex)
726 726 # x YYYY DDD SSS .ext
727 727 if isNumber( filen[8:11] ):
728 728 setFile = int( filen[8:11] ) #inicializo mi contador de seteo al seteo del ultimo file
729 729 else:
730 730 setFile = -1
731 731 else:
732 732 setFile = -1 #inicializo mi contador de seteo
733 733 else:
734 734 os.makedirs(fullpath)
735 735 setFile = -1 #inicializo mi contador de seteo
736 736
737 737 if self.setType is None:
738 738 setFile += 1
739 739 file = '%s%4.4d%3.3d%03d%s' % (self.optchar,
740 740 timeTuple.tm_year,
741 741 timeTuple.tm_yday,
742 742 setFile,
743 743 ext )
744 744 else:
745 745 setFile = timeTuple.tm_hour*60+timeTuple.tm_min
746 746 file = '%s%4.4d%3.3d%04d%s' % (self.optchar,
747 747 timeTuple.tm_year,
748 748 timeTuple.tm_yday,
749 749 setFile,
750 750 ext )
751 751
752 752 filename = os.path.join( path, subfolder, file )
753 753
754 754 #Setting HDF5 File
755 755 fp = h5py.File(filename,'w')
756 756 #write metadata
757 757 self.writeMetadata(fp)
758 758 #Write data
759 759 grp = fp.create_group("Data")
760 760 ds = []
761 761 data = []
762 762 dsList = self.dsList
763 763 i = 0
764 764 while i < len(dsList):
765 765 dsInfo = dsList[i]
766 766 #One-dimension data
767 767 if dsInfo['mode'] == 0:
768 768 ds0 = grp.create_dataset(dsInfo['variable'], (1,1), maxshape=(1,self.blocksPerFile) , chunks = True, dtype=numpy.float64)
769 769 ds.append(ds0)
770 770 data.append([])
771 771 i += 1
772 772 continue
773 773
774 774 elif dsInfo['mode'] == 2:
775 775 grp0 = grp.create_group(dsInfo['variable'])
776 776 ds0 = grp0.create_dataset(dsInfo['dsName'], (1,dsInfo['shape']), data = numpy.zeros((1,dsInfo['shape'])) , maxshape=(None,dsInfo['shape']), chunks=True)
777 777 ds.append(ds0)
778 778 data.append([])
779 779 i += 1
780 780 continue
781 781
782 782 elif dsInfo['mode'] == 1:
783 783 grp0 = grp.create_group(dsInfo['variable'])
784 784
785 785 for j in range(dsInfo['dsNumber']):
786 786 dsInfo = dsList[i]
787 787 tableName = dsInfo['dsName']
788 788
789 789
790 790 if dsInfo['nDim'] == 3:
791 791 shape = dsInfo['shape'].astype(int)
792 792 ds0 = grp0.create_dataset(tableName, (shape[0],shape[1],1) , data = numpy.zeros((shape[0],shape[1],1)), maxshape = (None,shape[1],None), chunks=True)
793 793 else:
794 794 shape = int(dsInfo['shape'])
795 795 ds0 = grp0.create_dataset(tableName, (1,shape), data = numpy.zeros((1,shape)) , maxshape=(None,shape), chunks=True)
796 796
797 797 ds.append(ds0)
798 798 data.append([])
799 799 i += 1
800 800
801 801 fp.flush()
802 802 fp.close()
803 803
804 804 log.log('creating file: {}'.format(filename), 'Writing')
805 805 self.filename = filename
806 806 self.ds = ds
807 807 self.data = data
808 808 self.firsttime = True
809 809 self.blockIndex = 0
810 810 return
811 811
812 812 def putData(self):
813 813
814 814 if self.blockIndex == self.blocksPerFile or self.timeFlag():
815 815 self.setNextFile()
816 816
817 817 self.readBlock()
818 818 self.setBlock() #Prepare data to be written
819 819 self.writeBlock() #Write data
820 820
821 821 return
822 822
823 823 def readBlock(self):
824 824
825 825 '''
826 826 data Array configured
827 827
828 828
829 829 self.data
830 830 '''
831 831 dsList = self.dsList
832 832 ds = self.ds
833 833 #Setting HDF5 File
834 834 fp = h5py.File(self.filename,'r+')
835 835 grp = fp["Data"]
836 836 ind = 0
837 837
838 838 while ind < len(dsList):
839 839 dsInfo = dsList[ind]
840 840
841 841 if dsInfo['mode'] == 0:
842 842 ds0 = grp[dsInfo['variable']]
843 843 ds[ind] = ds0
844 844 ind += 1
845 845 else:
846 846
847 847 grp0 = grp[dsInfo['variable']]
848 848
849 849 for j in range(dsInfo['dsNumber']):
850 850 dsInfo = dsList[ind]
851 851 ds0 = grp0[dsInfo['dsName']]
852 852 ds[ind] = ds0
853 853 ind += 1
854 854
855 855 self.fp = fp
856 856 self.grp = grp
857 857 self.ds = ds
858 858
859 859 return
860 860
861 861 def setBlock(self):
862 862 '''
863 863 data Array configured
864 864
865 865
866 866 self.data
867 867 '''
868 868 #Creating Arrays
869 869 dsList = self.dsList
870 870 data = self.data
871 871 ind = 0
872 872
873 873 while ind < len(dsList):
874 874 dsInfo = dsList[ind]
875 875 dataAux = getattr(self.dataOut, dsInfo['variable'])
876 876
877 877 mode = dsInfo['mode']
878 878 nDim = dsInfo['nDim']
879 879
880 880 if mode == 0 or mode == 2 or nDim == 1:
881 881 data[ind] = dataAux
882 882 ind += 1
883 883 # elif nDim == 1:
884 884 # data[ind] = numpy.reshape(dataAux,(numpy.size(dataAux),1))
885 885 # ind += 1
886 886 elif nDim == 2:
887 887 for j in range(dsInfo['dsNumber']):
888 888 data[ind] = dataAux[j,:]
889 889 ind += 1
890 890 elif nDim == 3:
891 891 for j in range(dsInfo['dsNumber']):
892 892 data[ind] = dataAux[:,j,:]
893 893 ind += 1
894 894
895 895 self.data = data
896 896 return
897 897
898 898 def writeBlock(self):
899 899 '''
900 900 Saves the block in the HDF5 file
901 901 '''
902 902 dsList = self.dsList
903 903
904 904 for i in range(len(self.ds)):
905 905 dsInfo = dsList[i]
906 906 nDim = dsInfo['nDim']
907 907 mode = dsInfo['mode']
908 908
909 909 # First time
910 910 if self.firsttime:
911 911 if type(self.data[i]) == numpy.ndarray:
912 912
913 913 if nDim == 3:
914 914 self.data[i] = self.data[i].reshape((self.data[i].shape[0],self.data[i].shape[1],1))
915 915 self.ds[i].resize(self.data[i].shape)
916 916 if mode == 2:
917 917 self.ds[i].resize(self.data[i].shape)
918 918 self.ds[i][:] = self.data[i]
919 919 else:
920 920
921 921 # From second time
922 922 # Meteors!
923 923 if mode == 2:
924 924 dataShape = self.data[i].shape
925 925 dsShape = self.ds[i].shape
926 926 self.ds[i].resize((self.ds[i].shape[0] + dataShape[0],self.ds[i].shape[1]))
927 927 self.ds[i][dsShape[0]:,:] = self.data[i]
928 928 # No dimension
929 929 elif mode == 0:
930 930 self.ds[i].resize((self.ds[i].shape[0], self.ds[i].shape[1] + 1))
931 931 self.ds[i][0,-1] = self.data[i]
932 932 # One dimension
933 933 elif nDim == 1:
934 934 self.ds[i].resize((self.ds[i].shape[0] + 1, self.ds[i].shape[1]))
935 935 self.ds[i][-1,:] = self.data[i]
936 936 # Two dimension
937 937 elif nDim == 2:
938 938 self.ds[i].resize((self.ds[i].shape[0] + 1,self.ds[i].shape[1]))
939 939 self.ds[i][self.blockIndex,:] = self.data[i]
940 940 # Three dimensions
941 941 elif nDim == 3:
942 942 self.ds[i].resize((self.ds[i].shape[0],self.ds[i].shape[1],self.ds[i].shape[2]+1))
943 943 self.ds[i][:,:,-1] = self.data[i]
944 944
945 945 self.firsttime = False
946 946 self.blockIndex += 1
947 947
948 948 #Close to save changes
949 949 self.fp.flush()
950 950 self.fp.close()
951 951 return
952 952
953 953 def run(self, dataOut, path, blocksPerFile=10, metadataList=None, dataList=None, mode=None, setType=None):
954 954
955 955 self.dataOut = dataOut
956 956 if not(self.isConfig):
957 957 self.setup(dataOut, path=path, blocksPerFile=blocksPerFile,
958 958 metadataList=metadataList, dataList=dataList, mode=mode,
959 959 setType=setType)
960 960
961 961 self.isConfig = True
962 962 self.setNextFile()
963 963
964 964 self.putData()
965 965 return
966 966
967 967
968 @MPDecorator
968
969 969 class ParameterReader(Reader, ProcessingUnit):
970 970 '''
971 971 Reads HDF5 format files
972 972 '''
973 973
974 974 def __init__(self):
975 975 ProcessingUnit.__init__(self)
976 976 self.dataOut = Parameters()
977 977 self.ext = ".hdf5"
978 978 self.optchar = "D"
979 979 self.timezone = "lt"
980 980 self.listMetaname = []
981 981 self.listMeta = []
982 982 self.listDataname = []
983 983 self.listData = []
984 984 self.listShapes = []
985 985 self.open_file = h5py.File
986 986 self.open_mode = 'r'
987 987 self.metadata = False
988 988 self.filefmt = "*%Y%j***"
989 989 self.folderfmt = "*%Y%j"
990 990
991 991 def setup(self, **kwargs):
992 992
993 993 self.set_kwargs(**kwargs)
994 994 if not self.ext.startswith('.'):
995 995 self.ext = '.{}'.format(self.ext)
996 996
997 997 if self.online:
998 998 log.log("Searching files in online mode...", self.name)
999 999
1000 1000 for nTries in range(self.nTries):
1001 1001 fullpath = self.searchFilesOnLine(self.path, self.startDate,
1002 1002 self.endDate, self.expLabel, self.ext, self.walk,
1003 1003 self.filefmt, self.folderfmt)
1004 1004
1005 1005 try:
1006 1006 fullpath = next(fullpath)
1007 1007 except:
1008 1008 fullpath = None
1009 1009
1010 1010 if fullpath:
1011 1011 break
1012 1012
1013 1013 log.warning(
1014 1014 'Waiting {} sec for a valid file in {}: try {} ...'.format(
1015 1015 self.delay, self.path, nTries + 1),
1016 1016 self.name)
1017 1017 time.sleep(self.delay)
1018 1018
1019 1019 if not(fullpath):
1020 1020 raise schainpy.admin.SchainError(
1021 1021 'There isn\'t any valid file in {}'.format(self.path))
1022 1022
1023 1023 pathname, filename = os.path.split(fullpath)
1024 1024 self.year = int(filename[1:5])
1025 1025 self.doy = int(filename[5:8])
1026 1026 self.set = int(filename[8:11]) - 1
1027 1027 else:
1028 1028 log.log("Searching files in {}".format(self.path), self.name)
1029 1029 self.filenameList = self.searchFilesOffLine(self.path, self.startDate,
1030 1030 self.endDate, self.expLabel, self.ext, self.walk, self.filefmt, self.folderfmt)
1031 1031
1032 1032 self.setNextFile()
1033 1033
1034 1034 return
1035 1035
1036 1036 def readFirstHeader(self):
1037 1037 '''Read metadata and data'''
1038 1038
1039 1039 self.__readMetadata()
1040 1040 self.__readData()
1041 1041 self.__setBlockList()
1042 1042 self.blockIndex = 0
1043 1043
1044 1044 return
1045 1045
1046 1046 def __setBlockList(self):
1047 1047 '''
1048 1048 Selects the data within the times defined
1049 1049
1050 1050 self.fp
1051 1051 self.startTime
1052 1052 self.endTime
1053 1053 self.blockList
1054 1054 self.blocksPerFile
1055 1055
1056 1056 '''
1057 1057
1058 1058 startTime = self.startTime
1059 1059 endTime = self.endTime
1060 1060
1061 1061 index = self.listDataname.index('utctime')
1062 1062 thisUtcTime = self.listData[index]
1063 1063 self.interval = numpy.min(thisUtcTime[1:] - thisUtcTime[:-1])
1064 1064
1065 1065 if self.timezone == 'lt':
1066 1066 thisUtcTime -= 5*3600
1067 1067
1068 1068 thisDatetime = datetime.datetime.fromtimestamp(thisUtcTime[0] + 5*3600)
1069 1069
1070 1070 thisDate = thisDatetime.date()
1071 1071 thisTime = thisDatetime.time()
1072 1072
1073 1073 startUtcTime = (datetime.datetime.combine(thisDate,startTime) - datetime.datetime(1970, 1, 1)).total_seconds()
1074 1074 endUtcTime = (datetime.datetime.combine(thisDate,endTime) - datetime.datetime(1970, 1, 1)).total_seconds()
1075 1075
1076 1076 ind = numpy.where(numpy.logical_and(thisUtcTime >= startUtcTime, thisUtcTime < endUtcTime))[0]
1077 1077
1078 1078 self.blockList = ind
1079 1079 self.blocksPerFile = len(ind)
1080 1080 return
1081 1081
1082 1082 def __readMetadata(self):
1083 1083 '''
1084 1084 Reads Metadata
1085 1085 '''
1086 1086
1087 1087 listMetaname = []
1088 1088 listMetadata = []
1089 1089 if 'Metadata' in self.fp:
1090 1090 gp = self.fp['Metadata']
1091 1091 for item in list(gp.items()):
1092 1092 name = item[0]
1093 1093
1094 1094 if name=='variables':
1095 1095 table = gp[name][:]
1096 1096 listShapes = {}
1097 1097 for shapes in table:
1098 1098 listShapes[shapes[0].decode()] = numpy.array([shapes[1]])
1099 1099 else:
1100 1100 data = gp[name].value
1101 1101 listMetaname.append(name)
1102 1102 listMetadata.append(data)
1103 1103 elif self.metadata:
1104 1104 metadata = json.loads(self.metadata)
1105 1105 listShapes = {}
1106 1106 for tup in metadata:
1107 1107 name, values, dim = tup
1108 1108 if dim == -1:
1109 1109 listMetaname.append(name)
1110 1110 listMetadata.append(self.fp[values].value)
1111 1111 else:
1112 1112 listShapes[name] = numpy.array([dim])
1113 1113 else:
1114 1114 raise IOError('Missing Metadata group in file or metadata info')
1115 1115
1116 1116 self.listShapes = listShapes
1117 1117 self.listMetaname = listMetaname
1118 1118 self.listMeta = listMetadata
1119 1119
1120 1120 return
1121 1121
1122 1122 def __readData(self):
1123 1123
1124 1124 listdataname = []
1125 1125 listdata = []
1126 1126
1127 1127 if 'Data' in self.fp:
1128 1128 grp = self.fp['Data']
1129 1129 for item in list(grp.items()):
1130 1130 name = item[0]
1131 1131 listdataname.append(name)
1132 1132 dim = self.listShapes[name][0]
1133 1133 if dim == 0:
1134 1134 array = grp[name].value
1135 1135 else:
1136 1136 array = []
1137 1137 for i in range(dim):
1138 1138 array.append(grp[name]['table{:02d}'.format(i)].value)
1139 1139 array = numpy.array(array)
1140 1140
1141 1141 listdata.append(array)
1142 1142 elif self.metadata:
1143 1143 metadata = json.loads(self.metadata)
1144 1144 for tup in metadata:
1145 1145 name, values, dim = tup
1146 1146 listdataname.append(name)
1147 1147 if dim == -1:
1148 1148 continue
1149 1149 elif dim == 0:
1150 1150 array = self.fp[values].value
1151 1151 else:
1152 1152 array = []
1153 1153 for var in values:
1154 1154 array.append(self.fp[var].value)
1155 1155 array = numpy.array(array)
1156 1156 listdata.append(array)
1157 1157 else:
1158 1158 raise IOError('Missing Data group in file or metadata info')
1159 1159
1160 1160 self.listDataname = listdataname
1161 1161 self.listData = listdata
1162 1162 return
1163 1163
1164 1164 def getData(self):
1165 1165
1166 1166 for i in range(len(self.listMeta)):
1167 1167 setattr(self.dataOut, self.listMetaname[i], self.listMeta[i])
1168 1168
1169 1169 for j in range(len(self.listData)):
1170 1170 dim = self.listShapes[self.listDataname[j]][0]
1171 1171 if dim == 0:
1172 1172 setattr(self.dataOut, self.listDataname[j], self.listData[j][self.blockIndex])
1173 1173 else:
1174 1174 setattr(self.dataOut, self.listDataname[j], self.listData[j][:,self.blockIndex])
1175 1175
1176 1176 self.dataOut.paramInterval = self.interval
1177 1177 self.dataOut.flagNoData = False
1178 1178 self.blockIndex += 1
1179 1179
1180 1180 return
1181 1181
1182 1182 def run(self, **kwargs):
1183 1183
1184 1184 if not(self.isConfig):
1185 1185 self.setup(**kwargs)
1186 1186 self.isConfig = True
1187 1187
1188 1188 if self.blockIndex == self.blocksPerFile:
1189 1189 self.setNextFile()
1190 1190
1191 1191 self.getData()
1192 1192
1193 1193 return
1194 1194
1195 1195 @MPDecorator
1196 1196 class ParameterWriter(Operation):
1197 1197 '''
1198 1198 HDF5 Writer, stores parameters data in HDF5 format files
1199 1199
1200 1200 path: path where the files will be stored
1201 1201 blocksPerFile: number of blocks that will be saved in per HDF5 format file
1202 1202 mode: selects the data stacking mode: '0' channels, '1' parameters, '3' table (for meteors)
1203 1203 metadataList: list of attributes that will be stored as metadata
1204 1204 dataList: list of attributes that will be stores as data
1205 1205 '''
1206 1206
1207 1207
1208 1208 ext = ".hdf5"
1209 1209 optchar = "D"
1210 1210 metaoptchar = "M"
1211 1211 metaFile = None
1212 1212 filename = None
1213 1213 path = None
1214 1214 setFile = None
1215 1215 fp = None
1216 1216 grp = None
1217 1217 ds = None
1218 1218 firsttime = True
1219 1219 #Configurations
1220 1220 blocksPerFile = None
1221 1221 blockIndex = None
1222 1222 dataOut = None
1223 1223 #Data Arrays
1224 1224 dataList = None
1225 1225 metadataList = None
1226 1226 dsList = None #List of dictionaries with dataset properties
1227 1227 tableDim = None
1228 1228 dtype = [('name', 'S20'),('nDim', 'i')]
1229 1229 currentDay = None
1230 1230 lastTime = None
1231 1231
1232 1232 def __init__(self):
1233 1233
1234 1234 Operation.__init__(self)
1235 1235 return
1236 1236
1237 1237 def setup(self, path=None, blocksPerFile=10, metadataList=None, dataList=None, setType=None):
1238 1238 self.path = path
1239 1239 self.blocksPerFile = blocksPerFile
1240 1240 self.metadataList = metadataList
1241 1241 self.dataList = dataList
1242 1242 self.setType = setType
1243 1243
1244 1244 tableList = []
1245 1245 dsList = []
1246 1246
1247 1247 for i in range(len(self.dataList)):
1248 1248 dsDict = {}
1249 1249 dataAux = getattr(self.dataOut, self.dataList[i])
1250 1250 dsDict['variable'] = self.dataList[i]
1251 1251
1252 1252 if dataAux is None:
1253 1253 continue
1254 1254 elif isinstance(dataAux, (int, float, numpy.integer, numpy.float)):
1255 1255 dsDict['nDim'] = 0
1256 1256 else:
1257 1257 dsDict['nDim'] = len(dataAux.shape)
1258 1258 dsDict['shape'] = dataAux.shape
1259 1259 dsDict['dsNumber'] = dataAux.shape[0]
1260 1260
1261 1261 dsList.append(dsDict)
1262 1262 tableList.append((self.dataList[i], dsDict['nDim']))
1263 1263
1264 1264 self.dsList = dsList
1265 1265 self.tableDim = numpy.array(tableList, dtype=self.dtype)
1266 1266 self.currentDay = self.dataOut.datatime.date()
1267 1267
1268 1268 def timeFlag(self):
1269 1269 currentTime = self.dataOut.utctime
1270 1270 timeTuple = time.localtime(currentTime)
1271 1271 dataDay = timeTuple.tm_yday
1272 1272
1273 1273 if self.lastTime is None:
1274 1274 self.lastTime = currentTime
1275 1275 self.currentDay = dataDay
1276 1276 return False
1277 1277
1278 1278 timeDiff = currentTime - self.lastTime
1279 1279
1280 1280 #Si el dia es diferente o si la diferencia entre un dato y otro supera la hora
1281 1281 if dataDay != self.currentDay:
1282 1282 self.currentDay = dataDay
1283 1283 return True
1284 1284 elif timeDiff > 3*60*60:
1285 1285 self.lastTime = currentTime
1286 1286 return True
1287 1287 else:
1288 1288 self.lastTime = currentTime
1289 1289 return False
1290 1290
1291 1291 def run(self, dataOut, path, blocksPerFile=10, metadataList=None, dataList=None, setType=None):
1292 1292
1293 1293 self.dataOut = dataOut
1294 1294 if not(self.isConfig):
1295 1295 self.setup(path=path, blocksPerFile=blocksPerFile,
1296 1296 metadataList=metadataList, dataList=dataList,
1297 1297 setType=setType)
1298 1298
1299 1299 self.isConfig = True
1300 1300 self.setNextFile()
1301 1301
1302 1302 self.putData()
1303 1303 return
1304 1304
1305 1305 def setNextFile(self):
1306 1306
1307 1307 ext = self.ext
1308 1308 path = self.path
1309 1309 setFile = self.setFile
1310 1310
1311 1311 timeTuple = time.localtime(self.dataOut.utctime)
1312 1312 subfolder = 'd%4.4d%3.3d' % (timeTuple.tm_year,timeTuple.tm_yday)
1313 1313 fullpath = os.path.join(path, subfolder)
1314 1314
1315 1315 if os.path.exists(fullpath):
1316 1316 filesList = os.listdir(fullpath)
1317 1317 filesList = [k for k in filesList if k.startswith(self.optchar)]
1318 1318 if len( filesList ) > 0:
1319 1319 filesList = sorted(filesList, key=str.lower)
1320 1320 filen = filesList[-1]
1321 1321 # el filename debera tener el siguiente formato
1322 1322 # 0 1234 567 89A BCDE (hex)
1323 1323 # x YYYY DDD SSS .ext
1324 1324 if isNumber(filen[8:11]):
1325 1325 setFile = int(filen[8:11]) #inicializo mi contador de seteo al seteo del ultimo file
1326 1326 else:
1327 1327 setFile = -1
1328 1328 else:
1329 1329 setFile = -1 #inicializo mi contador de seteo
1330 1330 else:
1331 1331 os.makedirs(fullpath)
1332 1332 setFile = -1 #inicializo mi contador de seteo
1333 1333
1334 1334 if self.setType is None:
1335 1335 setFile += 1
1336 1336 file = '%s%4.4d%3.3d%03d%s' % (self.optchar,
1337 1337 timeTuple.tm_year,
1338 1338 timeTuple.tm_yday,
1339 1339 setFile,
1340 1340 ext )
1341 1341 else:
1342 1342 setFile = timeTuple.tm_hour*60+timeTuple.tm_min
1343 1343 file = '%s%4.4d%3.3d%04d%s' % (self.optchar,
1344 1344 timeTuple.tm_year,
1345 1345 timeTuple.tm_yday,
1346 1346 setFile,
1347 1347 ext )
1348 1348
1349 1349 self.filename = os.path.join( path, subfolder, file )
1350 1350
1351 1351 #Setting HDF5 File
1352 1352 self.fp = h5py.File(self.filename, 'w')
1353 1353 #write metadata
1354 1354 self.writeMetadata(self.fp)
1355 1355 #Write data
1356 1356 self.writeData(self.fp)
1357 1357
1358 1358 def writeMetadata(self, fp):
1359 1359
1360 1360 grp = fp.create_group("Metadata")
1361 1361 grp.create_dataset('variables', data=self.tableDim, dtype=self.dtype)
1362 1362
1363 1363 for i in range(len(self.metadataList)):
1364 1364 if not hasattr(self.dataOut, self.metadataList[i]):
1365 1365 log.warning('Metadata: `{}` not found'.format(self.metadataList[i]), self.name)
1366 1366 continue
1367 1367 value = getattr(self.dataOut, self.metadataList[i])
1368 1368 grp.create_dataset(self.metadataList[i], data=value)
1369 1369 return
1370 1370
1371 1371 def writeData(self, fp):
1372 1372
1373 1373 grp = fp.create_group("Data")
1374 1374 dtsets = []
1375 1375 data = []
1376 1376
1377 1377 for dsInfo in self.dsList:
1378 1378 if dsInfo['nDim'] == 0:
1379 1379 ds = grp.create_dataset(
1380 1380 dsInfo['variable'],
1381 1381 (self.blocksPerFile, ),
1382 1382 chunks=True,
1383 1383 dtype=numpy.float64)
1384 1384 dtsets.append(ds)
1385 1385 data.append((dsInfo['variable'], -1))
1386 1386 else:
1387 1387 sgrp = grp.create_group(dsInfo['variable'])
1388 1388 for i in range(dsInfo['dsNumber']):
1389 1389 ds = sgrp.create_dataset(
1390 1390 'table{:02d}'.format(i),
1391 1391 (self.blocksPerFile, ) + dsInfo['shape'][1:],
1392 1392 chunks=True)
1393 1393 dtsets.append(ds)
1394 1394 data.append((dsInfo['variable'], i))
1395 1395 fp.flush()
1396 1396
1397 1397 log.log('Creating file: {}'.format(fp.filename), self.name)
1398 1398
1399 1399 self.ds = dtsets
1400 1400 self.data = data
1401 1401 self.firsttime = True
1402 1402 self.blockIndex = 0
1403 1403 return
1404 1404
1405 1405 def putData(self):
1406 1406
1407 1407 if (self.blockIndex == self.blocksPerFile) or self.timeFlag():
1408 1408 self.closeFile()
1409 1409 self.setNextFile()
1410 1410
1411 1411 for i, ds in enumerate(self.ds):
1412 1412 attr, ch = self.data[i]
1413 1413 if ch == -1:
1414 1414 ds[self.blockIndex] = getattr(self.dataOut, attr)
1415 1415 else:
1416 1416 ds[self.blockIndex] = getattr(self.dataOut, attr)[ch]
1417 1417
1418 1418 self.fp.flush()
1419 1419 self.blockIndex += 1
1420 1420 log.log('Block No. {}/{}'.format(self.blockIndex, self.blocksPerFile), self.name)
1421 1421
1422 1422 return
1423 1423
1424 1424 def closeFile(self):
1425 1425
1426 1426 if self.blockIndex != self.blocksPerFile:
1427 1427 for ds in self.ds:
1428 1428 ds.resize(self.blockIndex, axis=0)
1429 1429
1430 1430 self.fp.flush()
1431 1431 self.fp.close()
1432 1432
1433 1433 def close(self):
1434 1434
1435 1435 self.closeFile()
@@ -1,527 +1,527
1 1 '''
2 2 Created on Jul 2, 2014
3 3
4 4 @author: roj-idl71
5 5 '''
6 6 import numpy
7 7
8 8 from schainpy.model.io.jroIO_base import LOCALTIME, JRODataReader, JRODataWriter
9 9 from schainpy.model.proc.jroproc_base import ProcessingUnit, Operation, MPDecorator
10 10 from schainpy.model.data.jroheaderIO import PROCFLAG, BasicHeader, SystemHeader, RadarControllerHeader, ProcessingHeader
11 11 from schainpy.model.data.jrodata import Spectra
12 12 from schainpy.utils import log
13 13
14 @MPDecorator
14
15 15 class SpectraReader(JRODataReader, ProcessingUnit):
16 16 """
17 17 Esta clase permite leer datos de espectros desde archivos procesados (.pdata). La lectura
18 18 de los datos siempre se realiza por bloques. Los datos leidos (array de 3 dimensiones)
19 19 son almacenados en tres buffer's para el Self Spectra, el Cross Spectra y el DC Channel.
20 20
21 21 paresCanalesIguales * alturas * perfiles (Self Spectra)
22 22 paresCanalesDiferentes * alturas * perfiles (Cross Spectra)
23 23 canales * alturas (DC Channels)
24 24
25 25 Esta clase contiene instancias (objetos) de las clases BasicHeader, SystemHeader,
26 26 RadarControllerHeader y Spectra. Los tres primeros se usan para almacenar informacion de la
27 27 cabecera de datos (metadata), y el cuarto (Spectra) para obtener y almacenar un bloque de
28 28 datos desde el "buffer" cada vez que se ejecute el metodo "getData".
29 29
30 30 Example:
31 31 dpath = "/home/myuser/data"
32 32
33 33 startTime = datetime.datetime(2010,1,20,0,0,0,0,0,0)
34 34
35 35 endTime = datetime.datetime(2010,1,21,23,59,59,0,0,0)
36 36
37 37 readerObj = SpectraReader()
38 38
39 39 readerObj.setup(dpath, startTime, endTime)
40 40
41 41 while(True):
42 42
43 43 readerObj.getData()
44 44
45 45 print readerObj.data_spc
46 46
47 47 print readerObj.data_cspc
48 48
49 49 print readerObj.data_dc
50 50
51 51 if readerObj.flagNoMoreFiles:
52 52 break
53 53
54 54 """
55 55
56 56 def __init__(self):#, **kwargs):
57 57 """
58 58 Inicializador de la clase SpectraReader para la lectura de datos de espectros.
59 59
60 60 Inputs:
61 61 dataOut : Objeto de la clase Spectra. Este objeto sera utilizado para
62 62 almacenar un perfil de datos cada vez que se haga un requerimiento
63 63 (getData). El perfil sera obtenido a partir del buffer de datos,
64 64 si el buffer esta vacio se hara un nuevo proceso de lectura de un
65 65 bloque de datos.
66 66 Si este parametro no es pasado se creara uno internamente.
67 67
68 68 Affected:
69 69 self.dataOut
70 70
71 71 Return : None
72 72 """
73 73
74 74 ProcessingUnit.__init__(self)
75 75
76 76 self.pts2read_SelfSpectra = 0
77 77 self.pts2read_CrossSpectra = 0
78 78 self.pts2read_DCchannels = 0
79 79 self.ext = ".pdata"
80 80 self.optchar = "P"
81 81 self.basicHeaderObj = BasicHeader(LOCALTIME)
82 82 self.systemHeaderObj = SystemHeader()
83 83 self.radarControllerHeaderObj = RadarControllerHeader()
84 84 self.processingHeaderObj = ProcessingHeader()
85 85 self.lastUTTime = 0
86 86 self.maxTimeStep = 30
87 87 self.dataOut = Spectra()
88 88 self.profileIndex = 1
89 89 self.nRdChannels = None
90 90 self.nRdPairs = None
91 91 self.rdPairList = []
92 92
93 93 def createObjByDefault(self):
94 94
95 95 dataObj = Spectra()
96 96
97 97 return dataObj
98 98
99 99 def __hasNotDataInBuffer(self):
100 100 return 1
101 101
102 102
103 103 def getBlockDimension(self):
104 104 """
105 105 Obtiene la cantidad de puntos a leer por cada bloque de datos
106 106
107 107 Affected:
108 108 self.nRdChannels
109 109 self.nRdPairs
110 110 self.pts2read_SelfSpectra
111 111 self.pts2read_CrossSpectra
112 112 self.pts2read_DCchannels
113 113 self.blocksize
114 114 self.dataOut.nChannels
115 115 self.dataOut.nPairs
116 116
117 117 Return:
118 118 None
119 119 """
120 120 self.nRdChannels = 0
121 121 self.nRdPairs = 0
122 122 self.rdPairList = []
123 123
124 124 for i in range(0, self.processingHeaderObj.totalSpectra*2, 2):
125 125 if self.processingHeaderObj.spectraComb[i] == self.processingHeaderObj.spectraComb[i+1]:
126 126 self.nRdChannels = self.nRdChannels + 1 #par de canales iguales
127 127 else:
128 128 self.nRdPairs = self.nRdPairs + 1 #par de canales diferentes
129 129 self.rdPairList.append((self.processingHeaderObj.spectraComb[i], self.processingHeaderObj.spectraComb[i+1]))
130 130
131 131 pts2read = self.processingHeaderObj.nHeights * self.processingHeaderObj.profilesPerBlock
132 132
133 133 self.pts2read_SelfSpectra = int(self.nRdChannels * pts2read)
134 134 self.blocksize = self.pts2read_SelfSpectra
135 135
136 136 if self.processingHeaderObj.flag_cspc:
137 137 self.pts2read_CrossSpectra = int(self.nRdPairs * pts2read)
138 138 self.blocksize += self.pts2read_CrossSpectra
139 139
140 140 if self.processingHeaderObj.flag_dc:
141 141 self.pts2read_DCchannels = int(self.systemHeaderObj.nChannels * self.processingHeaderObj.nHeights)
142 142 self.blocksize += self.pts2read_DCchannels
143 143
144 144 def readBlock(self):
145 145 """
146 146 Lee el bloque de datos desde la posicion actual del puntero del archivo
147 147 (self.fp) y actualiza todos los parametros relacionados al bloque de datos
148 148 (metadata + data). La data leida es almacenada en el buffer y el contador del buffer
149 149 es seteado a 0
150 150
151 151 Return: None
152 152
153 153 Variables afectadas:
154 154
155 155 self.flagIsNewFile
156 156 self.flagIsNewBlock
157 157 self.nTotalBlocks
158 158 self.data_spc
159 159 self.data_cspc
160 160 self.data_dc
161 161
162 162 Exceptions:
163 163 Si un bloque leido no es un bloque valido
164 164 """
165 165
166 166 fpointer = self.fp.tell()
167 167
168 168 spc = numpy.fromfile( self.fp, self.dtype[0], self.pts2read_SelfSpectra )
169 169 spc = spc.reshape( (self.nRdChannels, self.processingHeaderObj.nHeights, self.processingHeaderObj.profilesPerBlock) ) #transforma a un arreglo 3D
170 170
171 171 if self.processingHeaderObj.flag_cspc:
172 172 cspc = numpy.fromfile( self.fp, self.dtype, self.pts2read_CrossSpectra )
173 173 cspc = cspc.reshape( (self.nRdPairs, self.processingHeaderObj.nHeights, self.processingHeaderObj.profilesPerBlock) ) #transforma a un arreglo 3D
174 174
175 175 if self.processingHeaderObj.flag_dc:
176 176 dc = numpy.fromfile( self.fp, self.dtype, self.pts2read_DCchannels ) #int(self.processingHeaderObj.nHeights*self.systemHeaderObj.nChannels) )
177 177 dc = dc.reshape( (self.systemHeaderObj.nChannels, self.processingHeaderObj.nHeights) ) #transforma a un arreglo 2D
178 178
179 179 if not self.processingHeaderObj.shif_fft:
180 180 #desplaza a la derecha en el eje 2 determinadas posiciones
181 181 shift = int(self.processingHeaderObj.profilesPerBlock/2)
182 182 spc = numpy.roll( spc, shift , axis=2 )
183 183
184 184 if self.processingHeaderObj.flag_cspc:
185 185 #desplaza a la derecha en el eje 2 determinadas posiciones
186 186 cspc = numpy.roll( cspc, shift, axis=2 )
187 187
188 188 #Dimensions : nChannels, nProfiles, nSamples
189 189 spc = numpy.transpose( spc, (0,2,1) )
190 190 self.data_spc = spc
191 191
192 192 if self.processingHeaderObj.flag_cspc:
193 193 cspc = numpy.transpose( cspc, (0,2,1) )
194 194 self.data_cspc = cspc['real'] + cspc['imag']*1j
195 195 else:
196 196 self.data_cspc = None
197 197
198 198 if self.processingHeaderObj.flag_dc:
199 199 self.data_dc = dc['real'] + dc['imag']*1j
200 200 else:
201 201 self.data_dc = None
202 202
203 203 self.flagIsNewFile = 0
204 204 self.flagIsNewBlock = 1
205 205
206 206 self.nTotalBlocks += 1
207 207 self.nReadBlocks += 1
208 208
209 209 return 1
210 210
211 211 def getFirstHeader(self):
212 212
213 213 self.getBasicHeader()
214 214 self.dataOut.systemHeaderObj = self.systemHeaderObj.copy()
215 215 self.dataOut.radarControllerHeaderObj = self.radarControllerHeaderObj.copy()
216 216 self.dataOut.dtype = self.dtype
217 217 self.dataOut.pairsList = self.rdPairList
218 218 self.dataOut.nProfiles = self.processingHeaderObj.profilesPerBlock
219 219 self.dataOut.nFFTPoints = self.processingHeaderObj.profilesPerBlock
220 220 self.dataOut.nCohInt = self.processingHeaderObj.nCohInt
221 221 self.dataOut.nIncohInt = self.processingHeaderObj.nIncohInt
222 222 xf = self.processingHeaderObj.firstHeight + self.processingHeaderObj.nHeights*self.processingHeaderObj.deltaHeight
223 223 self.dataOut.heightList = numpy.arange(self.processingHeaderObj.firstHeight, xf, self.processingHeaderObj.deltaHeight)
224 224 self.dataOut.channelList = list(range(self.systemHeaderObj.nChannels))
225 225 self.dataOut.flagShiftFFT = True #Data is always shifted
226 226 self.dataOut.flagDecodeData = self.processingHeaderObj.flag_decode #asumo q la data no esta decodificada
227 227 self.dataOut.flagDeflipData = self.processingHeaderObj.flag_deflip #asumo q la data esta sin flip
228 228
229 229 def getData(self):
230 230 """
231 231 First method to execute before "RUN" is called.
232 232
233 233 Copia el buffer de lectura a la clase "Spectra",
234 234 con todos los parametros asociados a este (metadata). cuando no hay datos en el buffer de
235 235 lectura es necesario hacer una nueva lectura de los bloques de datos usando "readNextBlock"
236 236
237 237 Return:
238 238 0 : Si no hay mas archivos disponibles
239 239 1 : Si hizo una buena copia del buffer
240 240
241 241 Affected:
242 242 self.dataOut
243 243 self.flagDiscontinuousBlock
244 244 self.flagIsNewBlock
245 245 """
246 246
247 247 if self.flagNoMoreFiles:
248 248 self.dataOut.flagNoData = True
249 249 return 0
250 250
251 251 self.flagDiscontinuousBlock = 0
252 252 self.flagIsNewBlock = 0
253 253
254 254 if self.__hasNotDataInBuffer():
255 255
256 256 if not( self.readNextBlock() ):
257 257 self.dataOut.flagNoData = True
258 258 return 0
259 259
260 260 #data es un numpy array de 3 dmensiones (perfiles, alturas y canales)
261 261
262 262 if self.data_spc is None:
263 263 self.dataOut.flagNoData = True
264 264 return 0
265 265
266 266 self.getBasicHeader()
267 267 self.getFirstHeader()
268 268 self.dataOut.data_spc = self.data_spc
269 269 self.dataOut.data_cspc = self.data_cspc
270 270 self.dataOut.data_dc = self.data_dc
271 271 self.dataOut.flagNoData = False
272 272 self.dataOut.realtime = self.online
273 273
274 274 return self.dataOut.data_spc
275 275
276 276
277 277 @MPDecorator
278 278 class SpectraWriter(JRODataWriter, Operation):
279 279
280 280 """
281 281 Esta clase permite escribir datos de espectros a archivos procesados (.pdata). La escritura
282 282 de los datos siempre se realiza por bloques.
283 283 """
284 284
285 285 def __init__(self):
286 286 """
287 287 Inicializador de la clase SpectraWriter para la escritura de datos de espectros.
288 288
289 289 Affected:
290 290 self.dataOut
291 291 self.basicHeaderObj
292 292 self.systemHeaderObj
293 293 self.radarControllerHeaderObj
294 294 self.processingHeaderObj
295 295
296 296 Return: None
297 297 """
298 298
299 299 Operation.__init__(self)
300 300
301 301 self.ext = ".pdata"
302 302 self.optchar = "P"
303 303 self.shape_spc_Buffer = None
304 304 self.shape_cspc_Buffer = None
305 305 self.shape_dc_Buffer = None
306 306 self.data_spc = None
307 307 self.data_cspc = None
308 308 self.data_dc = None
309 309 self.setFile = None
310 310 self.noMoreFiles = 0
311 311 self.basicHeaderObj = BasicHeader(LOCALTIME)
312 312 self.systemHeaderObj = SystemHeader()
313 313 self.radarControllerHeaderObj = RadarControllerHeader()
314 314 self.processingHeaderObj = ProcessingHeader()
315 315
316 316 def hasAllDataInBuffer(self):
317 317 return 1
318 318
319 319
320 320 def setBlockDimension(self):
321 321 """
322 322 Obtiene las formas dimensionales del los subbloques de datos que componen un bloque
323 323
324 324 Affected:
325 325 self.shape_spc_Buffer
326 326 self.shape_cspc_Buffer
327 327 self.shape_dc_Buffer
328 328
329 329 Return: None
330 330 """
331 331 self.shape_spc_Buffer = (self.dataOut.nChannels,
332 332 self.processingHeaderObj.nHeights,
333 333 self.processingHeaderObj.profilesPerBlock)
334 334
335 335 self.shape_cspc_Buffer = (self.dataOut.nPairs,
336 336 self.processingHeaderObj.nHeights,
337 337 self.processingHeaderObj.profilesPerBlock)
338 338
339 339 self.shape_dc_Buffer = (self.dataOut.nChannels,
340 340 self.processingHeaderObj.nHeights)
341 341
342 342
343 343 def writeBlock(self):
344 344 """processingHeaderObj
345 345 Escribe el buffer en el file designado
346 346
347 347 Affected:
348 348 self.data_spc
349 349 self.data_cspc
350 350 self.data_dc
351 351 self.flagIsNewFile
352 352 self.flagIsNewBlock
353 353 self.nTotalBlocks
354 354 self.nWriteBlocks
355 355
356 356 Return: None
357 357 """
358 358
359 359 spc = numpy.transpose( self.data_spc, (0,2,1) )
360 360 if not self.processingHeaderObj.shif_fft:
361 361 spc = numpy.roll( spc, self.processingHeaderObj.profilesPerBlock/2, axis=2 ) #desplaza a la derecha en el eje 2 determinadas posiciones
362 362 data = spc.reshape((-1))
363 363 data = data.astype(self.dtype[0])
364 364 data.tofile(self.fp)
365 365
366 366 if self.data_cspc is not None:
367 367
368 368 cspc = numpy.transpose( self.data_cspc, (0,2,1) )
369 369 data = numpy.zeros( numpy.shape(cspc), self.dtype )
370 370 #print 'data.shape', self.shape_cspc_Buffer
371 371 if not self.processingHeaderObj.shif_fft:
372 372 cspc = numpy.roll( cspc, self.processingHeaderObj.profilesPerBlock/2, axis=2 ) #desplaza a la derecha en el eje 2 determinadas posiciones
373 373 data['real'] = cspc.real
374 374 data['imag'] = cspc.imag
375 375 data = data.reshape((-1))
376 376 data.tofile(self.fp)
377 377
378 378 if self.data_dc is not None:
379 379
380 380 dc = self.data_dc
381 381 data = numpy.zeros( numpy.shape(dc), self.dtype )
382 382 data['real'] = dc.real
383 383 data['imag'] = dc.imag
384 384 data = data.reshape((-1))
385 385 data.tofile(self.fp)
386 386
387 387 # self.data_spc.fill(0)
388 388 #
389 389 # if self.data_dc is not None:
390 390 # self.data_dc.fill(0)
391 391 #
392 392 # if self.data_cspc is not None:
393 393 # self.data_cspc.fill(0)
394 394
395 395 self.flagIsNewFile = 0
396 396 self.flagIsNewBlock = 1
397 397 self.nTotalBlocks += 1
398 398 self.nWriteBlocks += 1
399 399 self.blockIndex += 1
400 400
401 401 # print "[Writing] Block = %d04" %self.blockIndex
402 402
403 403 def putData(self):
404 404 """
405 405 Setea un bloque de datos y luego los escribe en un file
406 406
407 407 Affected:
408 408 self.data_spc
409 409 self.data_cspc
410 410 self.data_dc
411 411
412 412 Return:
413 413 0 : Si no hay data o no hay mas files que puedan escribirse
414 414 1 : Si se escribio la data de un bloque en un file
415 415 """
416 416
417 417 if self.dataOut.flagNoData:
418 418 return 0
419 419
420 420 self.flagIsNewBlock = 0
421 421
422 422 if self.dataOut.flagDiscontinuousBlock:
423 423 self.data_spc.fill(0)
424 424 if self.dataOut.data_cspc is not None:
425 425 self.data_cspc.fill(0)
426 426 if self.dataOut.data_dc is not None:
427 427 self.data_dc.fill(0)
428 428 self.setNextFile()
429 429
430 430 if self.flagIsNewFile == 0:
431 431 self.setBasicHeader()
432 432
433 433 self.data_spc = self.dataOut.data_spc.copy()
434 434
435 435 if self.dataOut.data_cspc is not None:
436 436 self.data_cspc = self.dataOut.data_cspc.copy()
437 437
438 438 if self.dataOut.data_dc is not None:
439 439 self.data_dc = self.dataOut.data_dc.copy()
440 440
441 441 # #self.processingHeaderObj.dataBlocksPerFile)
442 442 if self.hasAllDataInBuffer():
443 443 # self.setFirstHeader()
444 444 self.writeNextBlock()
445 445
446 446 def __getBlockSize(self):
447 447 '''
448 448 Este metodos determina el cantidad de bytes para un bloque de datos de tipo Spectra
449 449 '''
450 450
451 451 dtype_width = self.getDtypeWidth()
452 452
453 453 pts2write = self.dataOut.nHeights * self.dataOut.nFFTPoints
454 454
455 455 pts2write_SelfSpectra = int(self.dataOut.nChannels * pts2write)
456 456 blocksize = (pts2write_SelfSpectra*dtype_width)
457 457
458 458 if self.dataOut.data_cspc is not None:
459 459 pts2write_CrossSpectra = int(self.dataOut.nPairs * pts2write)
460 460 blocksize += (pts2write_CrossSpectra*dtype_width*2)
461 461
462 462 if self.dataOut.data_dc is not None:
463 463 pts2write_DCchannels = int(self.dataOut.nChannels * self.dataOut.nHeights)
464 464 blocksize += (pts2write_DCchannels*dtype_width*2)
465 465
466 466 # blocksize = blocksize #* datatypeValue * 2 #CORREGIR ESTO
467 467
468 468 return blocksize
469 469
470 470 def setFirstHeader(self):
471 471
472 472 """
473 473 Obtiene una copia del First Header
474 474
475 475 Affected:
476 476 self.systemHeaderObj
477 477 self.radarControllerHeaderObj
478 478 self.dtype
479 479
480 480 Return:
481 481 None
482 482 """
483 483
484 484 self.systemHeaderObj = self.dataOut.systemHeaderObj.copy()
485 485 self.systemHeaderObj.nChannels = self.dataOut.nChannels
486 486 self.radarControllerHeaderObj = self.dataOut.radarControllerHeaderObj.copy()
487 487
488 488 self.processingHeaderObj.dtype = 1 # Spectra
489 489 self.processingHeaderObj.blockSize = self.__getBlockSize()
490 490 self.processingHeaderObj.profilesPerBlock = self.dataOut.nFFTPoints
491 491 self.processingHeaderObj.dataBlocksPerFile = self.blocksPerFile
492 492 self.processingHeaderObj.nWindows = 1 #podria ser 1 o self.dataOut.processingHeaderObj.nWindows
493 493 self.processingHeaderObj.nCohInt = self.dataOut.nCohInt# Se requiere para determinar el valor de timeInterval
494 494 self.processingHeaderObj.nIncohInt = self.dataOut.nIncohInt
495 495 self.processingHeaderObj.totalSpectra = self.dataOut.nPairs + self.dataOut.nChannels
496 496 self.processingHeaderObj.shif_fft = self.dataOut.flagShiftFFT
497 497
498 498 if self.processingHeaderObj.totalSpectra > 0:
499 499 channelList = []
500 500 for channel in range(self.dataOut.nChannels):
501 501 channelList.append(channel)
502 502 channelList.append(channel)
503 503
504 504 pairsList = []
505 505 if self.dataOut.nPairs > 0:
506 506 for pair in self.dataOut.pairsList:
507 507 pairsList.append(pair[0])
508 508 pairsList.append(pair[1])
509 509
510 510 spectraComb = channelList + pairsList
511 511 spectraComb = numpy.array(spectraComb, dtype="u1")
512 512 self.processingHeaderObj.spectraComb = spectraComb
513 513
514 514 if self.dataOut.code is not None:
515 515 self.processingHeaderObj.code = self.dataOut.code
516 516 self.processingHeaderObj.nCode = self.dataOut.nCode
517 517 self.processingHeaderObj.nBaud = self.dataOut.nBaud
518 518
519 519 if self.processingHeaderObj.nWindows != 0:
520 520 self.processingHeaderObj.firstHeight = self.dataOut.heightList[0]
521 521 self.processingHeaderObj.deltaHeight = self.dataOut.heightList[1] - self.dataOut.heightList[0]
522 522 self.processingHeaderObj.nHeights = self.dataOut.nHeights
523 523 self.processingHeaderObj.samplesWin = self.dataOut.nHeights
524 524
525 525 self.processingHeaderObj.processFlags = self.getProcessFlags()
526 526
527 527 self.setBasicHeader() No newline at end of file
@@ -1,600 +1,602
1 1 '''
2 2 Created on Jul 3, 2014
3 3
4 4 @author: roj-idl71
5 5 '''
6 6 import os
7 7 import datetime
8 8 import numpy
9 9
10 10 try:
11 11 from gevent import sleep
12 12 except:
13 13 from time import sleep
14 14
15 15 from schainpy.model.data.jroheaderIO import RadarControllerHeader, SystemHeader
16 16 from schainpy.model.data.jrodata import Voltage
17 from schainpy.model.proc.jroproc_base import ProcessingUnit, Operation
17 from schainpy.model.proc.jroproc_base import ProcessingUnit, Operation, MPDecorator
18 18
19 19 try:
20 20 import digital_rf_hdf5
21 21 except:
22 22 pass
23 23
24 24 class USRPReader(ProcessingUnit):
25 25 '''
26 26 classdocs
27 27 '''
28 28
29 29 def __init__(self, **kwargs):
30 30 '''
31 31 Constructor
32 32 '''
33 33
34 34 ProcessingUnit.__init__(self, **kwargs)
35 35
36 36 self.dataOut = Voltage()
37 37 self.__printInfo = True
38 38 self.__flagDiscontinuousBlock = False
39 39 self.__bufferIndex = 9999999
40 40
41 41 self.__ippKm = None
42 42 self.__codeType = 0
43 43 self.__nCode = None
44 44 self.__nBaud = None
45 45 self.__code = None
46 46
47 47 def __getCurrentSecond(self):
48 48
49 49 return self.__thisUnixSample/self.__sample_rate
50 50
51 51 thisSecond = property(__getCurrentSecond, "I'm the 'thisSecond' property.")
52 52
53 53 def __setFileHeader(self):
54 54 '''
55 55 In this method will be initialized every parameter of dataOut object (header, no data)
56 56 '''
57 57 ippSeconds = 1.0*self.__nSamples/self.__sample_rate
58 58
59 59 nProfiles = 1.0/ippSeconds #Number of profiles in one second
60 60
61 61 self.dataOut.radarControllerHeaderObj = RadarControllerHeader(ippKm=self.__ippKm,
62 62 txA=0,
63 63 txB=0,
64 64 nWindows=1,
65 65 nHeights=self.__nSamples,
66 66 firstHeight=self.__firstHeigth,
67 67 deltaHeight=self.__deltaHeigth,
68 68 codeType=self.__codeType,
69 69 nCode=self.__nCode, nBaud=self.__nBaud,
70 70 code = self.__code)
71 71
72 72 self.dataOut.systemHeaderObj = SystemHeader(nSamples=self.__nSamples,
73 73 nProfiles=nProfiles,
74 74 nChannels=len(self.__channelList),
75 75 adcResolution=14)
76 76
77 77 self.dataOut.type = "Voltage"
78 78
79 79 self.dataOut.data = None
80 80
81 81 self.dataOut.dtype = numpy.dtype([('real','<i8'),('imag','<i8')])
82 82
83 83 # self.dataOut.nChannels = 0
84 84
85 85 # self.dataOut.nHeights = 0
86 86
87 87 self.dataOut.nProfiles = nProfiles
88 88
89 89 self.dataOut.heightList = self.__firstHeigth + numpy.arange(self.__nSamples, dtype = numpy.float)*self.__deltaHeigth
90 90
91 91 self.dataOut.channelList = self.__channelList
92 92
93 93 self.dataOut.blocksize = self.dataOut.getNChannels() * self.dataOut.getNHeights()
94 94
95 95 # self.dataOut.channelIndexList = None
96 96
97 97 self.dataOut.flagNoData = True
98 98
99 99 #Set to TRUE if the data is discontinuous
100 100 self.dataOut.flagDiscontinuousBlock = False
101 101
102 102 self.dataOut.utctime = None
103 103
104 104 self.dataOut.timeZone = self.__timezone/60 #timezone like jroheader, difference in minutes between UTC and localtime
105 105
106 106 self.dataOut.dstFlag = 0
107 107
108 108 self.dataOut.errorCount = 0
109 109
110 110 self.dataOut.nCohInt = 1
111 111
112 112 self.dataOut.flagDecodeData = False #asumo que la data esta decodificada
113 113
114 114 self.dataOut.flagDeflipData = False #asumo que la data esta sin flip
115 115
116 116 self.dataOut.flagShiftFFT = False
117 117
118 118 self.dataOut.ippSeconds = ippSeconds
119 119
120 120 #Time interval between profiles
121 121 #self.dataOut.timeInterval = self.dataOut.ippSeconds * self.dataOut.nCohInt
122 122
123 123 self.dataOut.frequency = self.__frequency
124 124
125 125 self.dataOut.realtime = self.__online
126 126
127 127 def findDatafiles(self, path, startDate=None, endDate=None):
128 128
129 129 if not os.path.isdir(path):
130 130 return []
131 131
132 132 try:
133 133 digitalReadObj = digital_rf_hdf5.read_hdf5(path, load_all_metadata=True)
134 134 except:
135 135 digitalReadObj = digital_rf_hdf5.read_hdf5(path)
136 136
137 137 channelNameList = digitalReadObj.get_channels()
138 138
139 139 if not channelNameList:
140 140 return []
141 141
142 142 metadata_dict = digitalReadObj.get_rf_file_metadata(channelNameList[0])
143 143
144 144 sample_rate = metadata_dict['sample_rate'][0]
145 145
146 146 this_metadata_file = digitalReadObj.get_metadata(channelNameList[0])
147 147
148 148 try:
149 149 timezone = this_metadata_file['timezone'].value
150 150 except:
151 151 timezone = 0
152 152
153 153 startUTCSecond, endUTCSecond = digitalReadObj.get_bounds(channelNameList[0])/sample_rate - timezone
154 154
155 155 startDatetime = datetime.datetime.utcfromtimestamp(startUTCSecond)
156 156 endDatatime = datetime.datetime.utcfromtimestamp(endUTCSecond)
157 157
158 158 if not startDate:
159 159 startDate = startDatetime.date()
160 160
161 161 if not endDate:
162 162 endDate = endDatatime.date()
163 163
164 164 dateList = []
165 165
166 166 thisDatetime = startDatetime
167 167
168 168 while(thisDatetime<=endDatatime):
169 169
170 170 thisDate = thisDatetime.date()
171 171
172 172 if thisDate < startDate:
173 173 continue
174 174
175 175 if thisDate > endDate:
176 176 break
177 177
178 178 dateList.append(thisDate)
179 179 thisDatetime += datetime.timedelta(1)
180 180
181 181 return dateList
182 182
183 183 def setup(self, path = None,
184 184 startDate = None,
185 185 endDate = None,
186 186 startTime = datetime.time(0,0,0),
187 187 endTime = datetime.time(23,59,59),
188 188 channelList = None,
189 189 nSamples = None,
190 190 ippKm = 60,
191 191 online = False,
192 192 delay = 60,
193 193 buffer_size = 1024,
194 194 **kwargs):
195 195 '''
196 196 In this method we should set all initial parameters.
197 197
198 198 Inputs:
199 199 path
200 200 startDate
201 201 endDate
202 202 startTime
203 203 endTime
204 204 set
205 205 expLabel
206 206 ext
207 207 online
208 208 delay
209 209 '''
210 210
211 211 if not os.path.isdir(path):
212 212 raise ValueError("[Reading] Directory %s does not exist" %path)
213 213
214 214 try:
215 215 self.digitalReadObj = digital_rf_hdf5.read_hdf5(path, load_all_metadata=True)
216 216 except:
217 217 self.digitalReadObj = digital_rf_hdf5.read_hdf5(path)
218 218
219 219 channelNameList = self.digitalReadObj.get_channels()
220 220
221 221 if not channelNameList:
222 222 raise ValueError("[Reading] Directory %s does not have any files" %path)
223 223
224 224 if not channelList:
225 225 channelList = list(range(len(channelNameList)))
226 226
227 227 ########## Reading metadata ######################
228 228
229 229 metadata_dict = self.digitalReadObj.get_rf_file_metadata(channelNameList[channelList[0]])
230 230
231 231 self.__sample_rate = metadata_dict['sample_rate'][0]
232 232 # self.__samples_per_file = metadata_dict['samples_per_file'][0]
233 233 self.__deltaHeigth = 1e6*0.15/self.__sample_rate
234 234
235 235 this_metadata_file = self.digitalReadObj.get_metadata(channelNameList[channelList[0]])
236 236
237 237 self.__frequency = None
238 238 try:
239 239 self.__frequency = this_metadata_file['center_frequencies'].value
240 240 except:
241 241 self.__frequency = this_metadata_file['fc'].value
242 242
243 243 if not self.__frequency:
244 244 raise ValueError("Center Frequency is not defined in metadata file")
245 245
246 246 try:
247 247 self.__timezone = this_metadata_file['timezone'].value
248 248 except:
249 249 self.__timezone = 0
250 250
251 251 self.__firstHeigth = 0
252 252
253 253 try:
254 254 codeType = this_metadata_file['codeType'].value
255 255 except:
256 256 codeType = 0
257 257
258 258 nCode = 1
259 259 nBaud = 1
260 260 code = numpy.ones((nCode, nBaud), dtype=numpy.int)
261 261
262 262 if codeType:
263 263 nCode = this_metadata_file['nCode'].value
264 264 nBaud = this_metadata_file['nBaud'].value
265 265 code = this_metadata_file['code'].value
266 266
267 267 if not ippKm:
268 268 try:
269 269 #seconds to km
270 270 ippKm = 1e6*0.15*this_metadata_file['ipp'].value
271 271 except:
272 272 ippKm = None
273 273
274 274 ####################################################
275 275 startUTCSecond = None
276 276 endUTCSecond = None
277 277
278 278 if startDate:
279 279 startDatetime = datetime.datetime.combine(startDate, startTime)
280 280 startUTCSecond = (startDatetime-datetime.datetime(1970,1,1)).total_seconds() + self.__timezone
281 281
282 282 if endDate:
283 283 endDatetime = datetime.datetime.combine(endDate, endTime)
284 284 endUTCSecond = (endDatetime-datetime.datetime(1970,1,1)).total_seconds() + self.__timezone
285 285
286 286 start_index, end_index = self.digitalReadObj.get_bounds(channelNameList[channelList[0]])
287 287
288 288 if not startUTCSecond:
289 289 startUTCSecond = start_index/self.__sample_rate
290 290
291 291 if start_index > startUTCSecond*self.__sample_rate:
292 292 startUTCSecond = start_index/self.__sample_rate
293 293
294 294 if not endUTCSecond:
295 295 endUTCSecond = end_index/self.__sample_rate
296 296
297 297 if end_index < endUTCSecond*self.__sample_rate:
298 298 endUTCSecond = end_index/self.__sample_rate
299 299
300 300 if not nSamples:
301 301 if not ippKm:
302 302 raise ValueError("[Reading] nSamples or ippKm should be defined")
303 303
304 304 nSamples = int(ippKm / (1e6*0.15/self.__sample_rate))
305 305
306 306 channelBoundList = []
307 307 channelNameListFiltered = []
308 308
309 309 for thisIndexChannel in channelList:
310 310 thisChannelName = channelNameList[thisIndexChannel]
311 311 start_index, end_index = self.digitalReadObj.get_bounds(thisChannelName)
312 312 channelBoundList.append((start_index, end_index))
313 313 channelNameListFiltered.append(thisChannelName)
314 314
315 315 self.profileIndex = 0
316 316
317 317 self.__delay = delay
318 318 self.__ippKm = ippKm
319 319 self.__codeType = codeType
320 320 self.__nCode = nCode
321 321 self.__nBaud = nBaud
322 322 self.__code = code
323 323
324 324 self.__datapath = path
325 325 self.__online = online
326 326 self.__channelList = channelList
327 327 self.__channelNameList = channelNameListFiltered
328 328 self.__channelBoundList = channelBoundList
329 329 self.__nSamples = nSamples
330 330 self.__samples_to_read = int(buffer_size*nSamples)
331 331 self.__nChannels = len(self.__channelList)
332 332
333 333 self.__startUTCSecond = startUTCSecond
334 334 self.__endUTCSecond = endUTCSecond
335 335
336 336 self.__timeInterval = 1.0 * self.__samples_to_read/self.__sample_rate #Time interval
337 337
338 338 if online:
339 339 # self.__thisUnixSample = int(endUTCSecond*self.__sample_rate - 4*self.__samples_to_read)
340 340 startUTCSecond = numpy.floor(endUTCSecond)
341 341
342 342 self.__thisUnixSample = int(startUTCSecond*self.__sample_rate) - self.__samples_to_read
343 343
344 344 self.__data_buffer = numpy.zeros((self.__nChannels, self.__samples_to_read), dtype = numpy.complex)
345 345
346 346 self.__setFileHeader()
347 347 self.isConfig = True
348 348
349 349 print("[Reading] USRP Data was found from %s to %s " %(
350 350 datetime.datetime.utcfromtimestamp(self.__startUTCSecond - self.__timezone),
351 351 datetime.datetime.utcfromtimestamp(self.__endUTCSecond - self.__timezone)
352 352 ))
353 353
354 354 print("[Reading] Starting process from %s to %s" %(datetime.datetime.utcfromtimestamp(startUTCSecond - self.__timezone),
355 355 datetime.datetime.utcfromtimestamp(endUTCSecond - self.__timezone)
356 356 ))
357 357
358 358 def __reload(self):
359 359
360 360 if not self.__online:
361 361 return
362 362
363 363 # print
364 364 # print "%s not in range [%s, %s]" %(
365 365 # datetime.datetime.utcfromtimestamp(self.thisSecond - self.__timezone),
366 366 # datetime.datetime.utcfromtimestamp(self.__startUTCSecond - self.__timezone),
367 367 # datetime.datetime.utcfromtimestamp(self.__endUTCSecond - self.__timezone)
368 368 # )
369 369 print("[Reading] reloading metadata ...")
370 370
371 371 try:
372 372 self.digitalReadObj.reload(complete_update=True)
373 373 except:
374 374 self.digitalReadObj.reload()
375 375
376 376 start_index, end_index = self.digitalReadObj.get_bounds(self.__channelNameList[self.__channelList[0]])
377 377
378 378 if start_index > self.__startUTCSecond*self.__sample_rate:
379 379 self.__startUTCSecond = 1.0*start_index/self.__sample_rate
380 380
381 381 if end_index > self.__endUTCSecond*self.__sample_rate:
382 382 self.__endUTCSecond = 1.0*end_index/self.__sample_rate
383 383 print()
384 384 print("[Reading] New timerange found [%s, %s] " %(
385 385 datetime.datetime.utcfromtimestamp(self.__startUTCSecond - self.__timezone),
386 386 datetime.datetime.utcfromtimestamp(self.__endUTCSecond - self.__timezone)
387 387 ))
388 388
389 389 return True
390 390
391 391 return False
392 392
393 393 def __readNextBlock(self, seconds=30, volt_scale = 218776):
394 394 '''
395 395 '''
396 396
397 397 #Set the next data
398 398 self.__flagDiscontinuousBlock = False
399 399 self.__thisUnixSample += self.__samples_to_read
400 400
401 401 if self.__thisUnixSample + 2*self.__samples_to_read > self.__endUTCSecond*self.__sample_rate:
402 402 print("[Reading] There are no more data into selected time-range")
403 403
404 404 self.__reload()
405 405
406 406 if self.__thisUnixSample + 2*self.__samples_to_read > self.__endUTCSecond*self.__sample_rate:
407 407 self.__thisUnixSample -= self.__samples_to_read
408 408 return False
409 409
410 410 indexChannel = 0
411 411
412 412 dataOk = False
413 413
414 414 for thisChannelName in self.__channelNameList:
415 415
416 416 try:
417 417 result = self.digitalReadObj.read_vector_c81d(self.__thisUnixSample,
418 418 self.__samples_to_read,
419 419 thisChannelName)
420 420
421 421 except IOError as e:
422 422 #read next profile
423 423 self.__flagDiscontinuousBlock = True
424 424 print("[Reading] %s" %datetime.datetime.utcfromtimestamp(self.thisSecond - self.__timezone), e)
425 425 break
426 426
427 427 if result.shape[0] != self.__samples_to_read:
428 428 self.__flagDiscontinuousBlock = True
429 429 print("[Reading] %s: Too few samples were found, just %d/%d samples" %(datetime.datetime.utcfromtimestamp(self.thisSecond - self.__timezone),
430 430 result.shape[0],
431 431 self.__samples_to_read))
432 432 break
433 433
434 434 self.__data_buffer[indexChannel,:] = result*volt_scale
435 435
436 436 indexChannel += 1
437 437
438 438 dataOk = True
439 439
440 440 self.__utctime = self.__thisUnixSample/self.__sample_rate
441 441
442 442 if not dataOk:
443 443 return False
444 444
445 445 print("[Reading] %s: %d samples <> %f sec" %(datetime.datetime.utcfromtimestamp(self.thisSecond - self.__timezone),
446 446 self.__samples_to_read,
447 447 self.__timeInterval))
448 448
449 449 self.__bufferIndex = 0
450 450
451 451 return True
452 452
453 453 def __isBufferEmpty(self):
454 454
455 455 if self.__bufferIndex <= self.__samples_to_read - self.__nSamples:
456 456 return False
457 457
458 458 return True
459 459
460 460 def getData(self, seconds=30, nTries=5):
461 461
462 462 '''
463 463 This method gets the data from files and put the data into the dataOut object
464 464
465 465 In addition, increase el the buffer counter in one.
466 466
467 467 Return:
468 468 data : retorna un perfil de voltages (alturas * canales) copiados desde el
469 469 buffer. Si no hay mas archivos a leer retorna None.
470 470
471 471 Affected:
472 472 self.dataOut
473 473 self.profileIndex
474 474 self.flagDiscontinuousBlock
475 475 self.flagIsNewBlock
476 476 '''
477 477
478 478 err_counter = 0
479 479 self.dataOut.flagNoData = True
480 480
481 481 if self.__isBufferEmpty():
482 482
483 483 self.__flagDiscontinuousBlock = False
484 484
485 485 while True:
486 486 if self.__readNextBlock():
487 487 break
488 488
489 489 if self.__thisUnixSample > self.__endUTCSecond*self.__sample_rate:
490 490 return False
491 491
492 492 if self.__flagDiscontinuousBlock:
493 493 print('[Reading] discontinuous block found ... continue with the next block')
494 494 continue
495 495
496 496 if not self.__online:
497 497 return False
498 498
499 499 err_counter += 1
500 500 if err_counter > nTries:
501 501 return False
502 502
503 503 print('[Reading] waiting %d seconds to read a new block' %seconds)
504 504 sleep(seconds)
505 505
506 506 self.dataOut.data = self.__data_buffer[:,self.__bufferIndex:self.__bufferIndex+self.__nSamples]
507 507 self.dataOut.utctime = (self.__thisUnixSample + self.__bufferIndex)/self.__sample_rate
508 508 self.dataOut.flagNoData = False
509 509 self.dataOut.flagDiscontinuousBlock = self.__flagDiscontinuousBlock
510 510 self.dataOut.profileIndex = self.profileIndex
511 511
512 512 self.__bufferIndex += self.__nSamples
513 513 self.profileIndex += 1
514 514
515 515 if self.profileIndex == self.dataOut.nProfiles:
516 516 self.profileIndex = 0
517 517
518 518 return True
519 519
520 520 def printInfo(self):
521 521 '''
522 522 '''
523 523 if self.__printInfo == False:
524 524 return
525 525
526 526 # self.systemHeaderObj.printInfo()
527 527 # self.radarControllerHeaderObj.printInfo()
528 528
529 529 self.__printInfo = False
530 530
531 531 def printNumberOfBlock(self):
532 532 '''
533 533 '''
534 534
535 535 print(self.profileIndex)
536 536
537 537 def run(self, **kwargs):
538 538 '''
539 539 This method will be called many times so here you should put all your code
540 540 '''
541 541
542 542 if not self.isConfig:
543 543 self.setup(**kwargs)
544 544
545 545 self.getData(seconds=self.__delay)
546 546
547 547 return
548 548
549
550 @MPDecorator
549 551 class USRPWriter(Operation):
550 552 '''
551 553 classdocs
552 554 '''
553 555
554 556 def __init__(self, **kwargs):
555 557 '''
556 558 Constructor
557 559 '''
558 560 Operation.__init__(self, **kwargs)
559 561 self.dataOut = None
560 562
561 563 def setup(self, dataIn, path, blocksPerFile, set=0, ext=None):
562 564 '''
563 565 In this method we should set all initial parameters.
564 566
565 567 Input:
566 568 dataIn : Input data will also be outputa data
567 569
568 570 '''
569 571 self.dataOut = dataIn
570 572
571 573
572 574
573 575
574 576
575 577 self.isConfig = True
576 578
577 579 return
578 580
579 581 def run(self, dataIn, **kwargs):
580 582 '''
581 583 This method will be called many times so here you should put all your code
582 584
583 585 Inputs:
584 586
585 587 dataIn : object with the data
586 588
587 589 '''
588 590
589 591 if not self.isConfig:
590 592 self.setup(dataIn, **kwargs)
591 593
592 594
593 595 if __name__ == '__main__':
594 596
595 597 readObj = USRPReader()
596 598
597 599 while True:
598 600 readObj.run(path='/Volumes/DATA/haystack/passive_radar/')
599 601 # readObj.printInfo()
600 602 readObj.printNumberOfBlock() No newline at end of file
@@ -1,680 +1,676
1 1 '''
2 2 Created on Jul 2, 2014
3 3
4 4 @author: roj-idl71
5 5 '''
6 6
7 7 import numpy
8 8
9 9 from .jroIO_base import LOCALTIME, JRODataReader, JRODataWriter
10 10 from schainpy.model.proc.jroproc_base import ProcessingUnit, Operation, MPDecorator
11 11 from schainpy.model.data.jroheaderIO import PROCFLAG, BasicHeader, SystemHeader, RadarControllerHeader, ProcessingHeader
12 12 from schainpy.model.data.jrodata import Voltage
13 import zmq
14 import tempfile
15 from io import StringIO
16 # from _sha import blocksize
17 13
18 @MPDecorator
14
19 15 class VoltageReader(JRODataReader, ProcessingUnit):
20 16 """
21 17 Esta clase permite leer datos de voltage desde archivos en formato rawdata (.r). La lectura
22 18 de los datos siempre se realiza por bloques. Los datos leidos (array de 3 dimensiones:
23 19 perfiles*alturas*canales) son almacenados en la variable "buffer".
24 20
25 21 perfiles * alturas * canales
26 22
27 23 Esta clase contiene instancias (objetos) de las clases BasicHeader, SystemHeader,
28 24 RadarControllerHeader y Voltage. Los tres primeros se usan para almacenar informacion de la
29 25 cabecera de datos (metadata), y el cuarto (Voltage) para obtener y almacenar un perfil de
30 26 datos desde el "buffer" cada vez que se ejecute el metodo "getData".
31 27
32 28 Example:
33 29
34 30 dpath = "/home/myuser/data"
35 31
36 32 startTime = datetime.datetime(2010,1,20,0,0,0,0,0,0)
37 33
38 34 endTime = datetime.datetime(2010,1,21,23,59,59,0,0,0)
39 35
40 36 readerObj = VoltageReader()
41 37
42 38 readerObj.setup(dpath, startTime, endTime)
43 39
44 40 while(True):
45 41
46 42 #to get one profile
47 43 profile = readerObj.getData()
48 44
49 45 #print the profile
50 46 print profile
51 47
52 48 #If you want to see all datablock
53 49 print readerObj.datablock
54 50
55 51 if readerObj.flagNoMoreFiles:
56 52 break
57 53
58 54 """
59 55
60 56 def __init__(self):
61 57 """
62 58 Inicializador de la clase VoltageReader para la lectura de datos de voltage.
63 59
64 60 Input:
65 61 dataOut : Objeto de la clase Voltage. Este objeto sera utilizado para
66 62 almacenar un perfil de datos cada vez que se haga un requerimiento
67 63 (getData). El perfil sera obtenido a partir del buffer de datos,
68 64 si el buffer esta vacio se hara un nuevo proceso de lectura de un
69 65 bloque de datos.
70 66 Si este parametro no es pasado se creara uno internamente.
71 67
72 68 Variables afectadas:
73 69 self.dataOut
74 70
75 71 Return:
76 72 None
77 73 """
78 74
79 75 ProcessingUnit.__init__(self)
80 76
81 77 self.ext = ".r"
82 78 self.optchar = "D"
83 79 self.basicHeaderObj = BasicHeader(LOCALTIME)
84 80 self.systemHeaderObj = SystemHeader()
85 81 self.radarControllerHeaderObj = RadarControllerHeader()
86 82 self.processingHeaderObj = ProcessingHeader()
87 83 self.lastUTTime = 0
88 84 self.profileIndex = 2**32 - 1
89 85 self.dataOut = Voltage()
90 86 self.selBlocksize = None
91 87 self.selBlocktime = None
92 88
93 89 def createObjByDefault(self):
94 90
95 91 dataObj = Voltage()
96 92
97 93 return dataObj
98 94
99 95 def __hasNotDataInBuffer(self):
100 96
101 97 if self.profileIndex >= self.processingHeaderObj.profilesPerBlock * self.nTxs:
102 98 return 1
103 99
104 100 return 0
105 101
106 102 def getBlockDimension(self):
107 103 """
108 104 Obtiene la cantidad de puntos a leer por cada bloque de datos
109 105
110 106 Affected:
111 107 self.blocksize
112 108
113 109 Return:
114 110 None
115 111 """
116 112 pts2read = self.processingHeaderObj.profilesPerBlock * \
117 113 self.processingHeaderObj.nHeights * self.systemHeaderObj.nChannels
118 114 self.blocksize = pts2read
119 115
120 116 def readBlock(self):
121 117 """
122 118 readBlock lee el bloque de datos desde la posicion actual del puntero del archivo
123 119 (self.fp) y actualiza todos los parametros relacionados al bloque de datos
124 120 (metadata + data). La data leida es almacenada en el buffer y el contador del buffer
125 121 es seteado a 0
126 122
127 123 Inputs:
128 124 None
129 125
130 126 Return:
131 127 None
132 128
133 129 Affected:
134 130 self.profileIndex
135 131 self.datablock
136 132 self.flagIsNewFile
137 133 self.flagIsNewBlock
138 134 self.nTotalBlocks
139 135
140 136 Exceptions:
141 137 Si un bloque leido no es un bloque valido
142 138 """
143 139
144 140 # if self.server is not None:
145 141 # self.zBlock = self.receiver.recv()
146 142 # self.zHeader = self.zBlock[:24]
147 143 # self.zDataBlock = self.zBlock[24:]
148 144 # junk = numpy.fromstring(self.zDataBlock, numpy.dtype([('real','<i4'),('imag','<i4')]))
149 145 # self.processingHeaderObj.profilesPerBlock = 240
150 146 # self.processingHeaderObj.nHeights = 248
151 147 # self.systemHeaderObj.nChannels
152 148 # else:
153 149 current_pointer_location = self.fp.tell()
154 150 junk = numpy.fromfile(self.fp, self.dtype, self.blocksize)
155 151
156 152 try:
157 153 junk = junk.reshape((self.processingHeaderObj.profilesPerBlock,
158 154 self.processingHeaderObj.nHeights, self.systemHeaderObj.nChannels))
159 155 except:
160 156 # print "The read block (%3d) has not enough data" %self.nReadBlocks
161 157
162 158 if self.waitDataBlock(pointer_location=current_pointer_location):
163 159 junk = numpy.fromfile(self.fp, self.dtype, self.blocksize)
164 160 junk = junk.reshape((self.processingHeaderObj.profilesPerBlock,
165 161 self.processingHeaderObj.nHeights, self.systemHeaderObj.nChannels))
166 162 # return 0
167 163
168 164 # Dimensions : nChannels, nProfiles, nSamples
169 165
170 166 junk = numpy.transpose(junk, (2, 0, 1))
171 167 self.datablock = junk['real'] + junk['imag'] * 1j
172 168
173 169 self.profileIndex = 0
174 170
175 171 self.flagIsNewFile = 0
176 172 self.flagIsNewBlock = 1
177 173
178 174 self.nTotalBlocks += 1
179 175 self.nReadBlocks += 1
180 176
181 177 return 1
182 178
183 179 def getFirstHeader(self):
184 180
185 181 self.getBasicHeader()
186 182
187 183 self.dataOut.processingHeaderObj = self.processingHeaderObj.copy()
188 184
189 185 self.dataOut.systemHeaderObj = self.systemHeaderObj.copy()
190 186
191 187 self.dataOut.radarControllerHeaderObj = self.radarControllerHeaderObj.copy()
192 188
193 189 if self.nTxs > 1:
194 190 self.dataOut.radarControllerHeaderObj.ippSeconds = self.radarControllerHeaderObj.ippSeconds / self.nTxs
195 191 # Time interval and code are propierties of dataOut. Its value depends of radarControllerHeaderObj.
196 192
197 193 # self.dataOut.timeInterval = self.radarControllerHeaderObj.ippSeconds * self.processingHeaderObj.nCohInt
198 194 #
199 195 # if self.radarControllerHeaderObj.code is not None:
200 196 #
201 197 # self.dataOut.nCode = self.radarControllerHeaderObj.nCode
202 198 #
203 199 # self.dataOut.nBaud = self.radarControllerHeaderObj.nBaud
204 200 #
205 201 # self.dataOut.code = self.radarControllerHeaderObj.code
206 202
207 203 self.dataOut.dtype = self.dtype
208 204
209 205 self.dataOut.nProfiles = self.processingHeaderObj.profilesPerBlock
210 206
211 207 self.dataOut.heightList = numpy.arange(
212 208 self.processingHeaderObj.nHeights) * self.processingHeaderObj.deltaHeight + self.processingHeaderObj.firstHeight
213 209
214 210 self.dataOut.channelList = list(range(self.systemHeaderObj.nChannels))
215 211
216 212 self.dataOut.nCohInt = self.processingHeaderObj.nCohInt
217 213
218 214 # asumo q la data no esta decodificada
219 215 self.dataOut.flagDecodeData = self.processingHeaderObj.flag_decode
220 216
221 217 # asumo q la data no esta sin flip
222 218 self.dataOut.flagDeflipData = self.processingHeaderObj.flag_deflip
223 219
224 220 self.dataOut.flagShiftFFT = self.processingHeaderObj.shif_fft
225 221
226 222 def reshapeData(self):
227 223
228 224 if self.nTxs < 0:
229 225 return
230 226
231 227 if self.nTxs == 1:
232 228 return
233 229
234 230 if self.nTxs < 1 and self.processingHeaderObj.profilesPerBlock % (1. / self.nTxs) != 0:
235 231 raise ValueError("1./nTxs (=%f), should be a multiple of nProfiles (=%d)" % (
236 232 1. / self.nTxs, self.processingHeaderObj.profilesPerBlock))
237 233
238 234 if self.nTxs > 1 and self.processingHeaderObj.nHeights % self.nTxs != 0:
239 235 raise ValueError("nTxs (=%d), should be a multiple of nHeights (=%d)" % (
240 236 self.nTxs, self.processingHeaderObj.nHeights))
241 237
242 238 self.datablock = self.datablock.reshape(
243 239 (self.systemHeaderObj.nChannels, self.processingHeaderObj.profilesPerBlock * self.nTxs, int(self.processingHeaderObj.nHeights / self.nTxs)))
244 240
245 241 self.dataOut.nProfiles = self.processingHeaderObj.profilesPerBlock * self.nTxs
246 242 self.dataOut.heightList = numpy.arange(self.processingHeaderObj.nHeights / self.nTxs) * \
247 243 self.processingHeaderObj.deltaHeight + self.processingHeaderObj.firstHeight
248 244 self.dataOut.radarControllerHeaderObj.ippSeconds = self.radarControllerHeaderObj.ippSeconds / self.nTxs
249 245
250 246 return
251 247
252 248 def readFirstHeaderFromServer(self):
253 249
254 250 self.getFirstHeader()
255 251
256 252 self.firstHeaderSize = self.basicHeaderObj.size
257 253
258 254 datatype = int(numpy.log2((self.processingHeaderObj.processFlags &
259 255 PROCFLAG.DATATYPE_MASK)) - numpy.log2(PROCFLAG.DATATYPE_CHAR))
260 256 if datatype == 0:
261 257 datatype_str = numpy.dtype([('real', '<i1'), ('imag', '<i1')])
262 258 elif datatype == 1:
263 259 datatype_str = numpy.dtype([('real', '<i2'), ('imag', '<i2')])
264 260 elif datatype == 2:
265 261 datatype_str = numpy.dtype([('real', '<i4'), ('imag', '<i4')])
266 262 elif datatype == 3:
267 263 datatype_str = numpy.dtype([('real', '<i8'), ('imag', '<i8')])
268 264 elif datatype == 4:
269 265 datatype_str = numpy.dtype([('real', '<f4'), ('imag', '<f4')])
270 266 elif datatype == 5:
271 267 datatype_str = numpy.dtype([('real', '<f8'), ('imag', '<f8')])
272 268 else:
273 269 raise ValueError('Data type was not defined')
274 270
275 271 self.dtype = datatype_str
276 272 #self.ippSeconds = 2 * 1000 * self.radarControllerHeaderObj.ipp / self.c
277 273 self.fileSizeByHeader = self.processingHeaderObj.dataBlocksPerFile * self.processingHeaderObj.blockSize + \
278 274 self.firstHeaderSize + self.basicHeaderSize * \
279 275 (self.processingHeaderObj.dataBlocksPerFile - 1)
280 276 # self.dataOut.channelList = numpy.arange(self.systemHeaderObj.numChannels)
281 277 # self.dataOut.channelIndexList = numpy.arange(self.systemHeaderObj.numChannels)
282 278 self.getBlockDimension()
283 279
284 280 def getFromServer(self):
285 281 self.flagDiscontinuousBlock = 0
286 282 self.profileIndex = 0
287 283 self.flagIsNewBlock = 1
288 284 self.dataOut.flagNoData = False
289 285 self.nTotalBlocks += 1
290 286 self.nReadBlocks += 1
291 287 self.blockPointer = 0
292 288
293 289 block = self.receiver.recv()
294 290
295 291 self.basicHeaderObj.read(block[self.blockPointer:])
296 292 self.blockPointer += self.basicHeaderObj.length
297 293 self.systemHeaderObj.read(block[self.blockPointer:])
298 294 self.blockPointer += self.systemHeaderObj.length
299 295 self.radarControllerHeaderObj.read(block[self.blockPointer:])
300 296 self.blockPointer += self.radarControllerHeaderObj.length
301 297 self.processingHeaderObj.read(block[self.blockPointer:])
302 298 self.blockPointer += self.processingHeaderObj.length
303 299 self.readFirstHeaderFromServer()
304 300
305 301 timestamp = self.basicHeaderObj.get_datatime()
306 302 print('[Reading] - Block {} - {}'.format(self.nTotalBlocks, timestamp))
307 303 current_pointer_location = self.blockPointer
308 304 junk = numpy.fromstring(
309 305 block[self.blockPointer:], self.dtype, self.blocksize)
310 306
311 307 try:
312 308 junk = junk.reshape((self.processingHeaderObj.profilesPerBlock,
313 309 self.processingHeaderObj.nHeights, self.systemHeaderObj.nChannels))
314 310 except:
315 311 # print "The read block (%3d) has not enough data" %self.nReadBlocks
316 312 if self.waitDataBlock(pointer_location=current_pointer_location):
317 313 junk = numpy.fromstring(
318 314 block[self.blockPointer:], self.dtype, self.blocksize)
319 315 junk = junk.reshape((self.processingHeaderObj.profilesPerBlock,
320 316 self.processingHeaderObj.nHeights, self.systemHeaderObj.nChannels))
321 317 # return 0
322 318
323 319 # Dimensions : nChannels, nProfiles, nSamples
324 320
325 321 junk = numpy.transpose(junk, (2, 0, 1))
326 322 self.datablock = junk['real'] + junk['imag'] * 1j
327 323 self.profileIndex = 0
328 324 if self.selBlocksize == None:
329 325 self.selBlocksize = self.dataOut.nProfiles
330 326 if self.selBlocktime != None:
331 327 if self.dataOut.nCohInt is not None:
332 328 nCohInt = self.dataOut.nCohInt
333 329 else:
334 330 nCohInt = 1
335 331 self.selBlocksize = int(self.dataOut.nProfiles * round(self.selBlocktime / (
336 332 nCohInt * self.dataOut.ippSeconds * self.dataOut.nProfiles)))
337 333 self.dataOut.data = self.datablock[:,
338 334 self.profileIndex:self.profileIndex + self.selBlocksize, :]
339 335 datasize = self.dataOut.data.shape[1]
340 336 if datasize < self.selBlocksize:
341 337 buffer = numpy.zeros(
342 338 (self.dataOut.data.shape[0], self.selBlocksize, self.dataOut.data.shape[2]), dtype='complex')
343 339 buffer[:, :datasize, :] = self.dataOut.data
344 340 self.dataOut.data = buffer
345 341 self.profileIndex = blockIndex
346 342
347 343 self.dataOut.flagDataAsBlock = True
348 344 self.flagIsNewBlock = 1
349 345 self.dataOut.realtime = self.online
350 346
351 347 return self.dataOut.data
352 348
353 349 def getData(self):
354 350 """
355 351 getData obtiene una unidad de datos del buffer de lectura, un perfil, y la copia al objeto self.dataOut
356 352 del tipo "Voltage" con todos los parametros asociados a este (metadata). cuando no hay datos
357 353 en el buffer de lectura es necesario hacer una nueva lectura de los bloques de datos usando
358 354 "readNextBlock"
359 355
360 356 Ademas incrementa el contador del buffer "self.profileIndex" en 1.
361 357
362 358 Return:
363 359
364 360 Si el flag self.getByBlock ha sido seteado el bloque completo es copiado a self.dataOut y el self.profileIndex
365 361 es igual al total de perfiles leidos desde el archivo.
366 362
367 363 Si self.getByBlock == False:
368 364
369 365 self.dataOut.data = buffer[:, thisProfile, :]
370 366
371 367 shape = [nChannels, nHeis]
372 368
373 369 Si self.getByBlock == True:
374 370
375 371 self.dataOut.data = buffer[:, :, :]
376 372
377 373 shape = [nChannels, nProfiles, nHeis]
378 374
379 375 Variables afectadas:
380 376 self.dataOut
381 377 self.profileIndex
382 378
383 379 Affected:
384 380 self.dataOut
385 381 self.profileIndex
386 382 self.flagDiscontinuousBlock
387 383 self.flagIsNewBlock
388 384 """
389 385 if self.flagNoMoreFiles:
390 386 self.dataOut.flagNoData = True
391 387 return 0
392 388 self.flagDiscontinuousBlock = 0
393 389 self.flagIsNewBlock = 0
394 390 if self.__hasNotDataInBuffer():
395 391 if not(self.readNextBlock()):
396 392 return 0
397 393
398 394 self.getFirstHeader()
399 395
400 396 self.reshapeData()
401 397 if self.datablock is None:
402 398 self.dataOut.flagNoData = True
403 399 return 0
404 400
405 401 if not self.getByBlock:
406 402
407 403 """
408 404 Return profile by profile
409 405
410 406 If nTxs > 1 then one profile is divided by nTxs and number of total
411 407 blocks is increased by nTxs (nProfiles *= nTxs)
412 408 """
413 409 self.dataOut.flagDataAsBlock = False
414 410 self.dataOut.data = self.datablock[:, self.profileIndex, :]
415 411 self.dataOut.profileIndex = self.profileIndex
416 412
417 413 self.profileIndex += 1
418 414
419 415 else:
420 416 """
421 417 Return a block
422 418 """
423 419 if self.selBlocksize == None:
424 420 self.selBlocksize = self.dataOut.nProfiles
425 421 if self.selBlocktime != None:
426 422 if self.dataOut.nCohInt is not None:
427 423 nCohInt = self.dataOut.nCohInt
428 424 else:
429 425 nCohInt = 1
430 426 self.selBlocksize = int(self.dataOut.nProfiles * round(self.selBlocktime / (
431 427 nCohInt * self.dataOut.ippSeconds * self.dataOut.nProfiles)))
432 428
433 429 self.dataOut.data = self.datablock[:,
434 430 self.profileIndex:self.profileIndex + self.selBlocksize, :]
435 431 self.profileIndex += self.selBlocksize
436 432 datasize = self.dataOut.data.shape[1]
437 433
438 434 if datasize < self.selBlocksize:
439 435 buffer = numpy.zeros(
440 436 (self.dataOut.data.shape[0], self.selBlocksize, self.dataOut.data.shape[2]), dtype='complex')
441 437 buffer[:, :datasize, :] = self.dataOut.data
442 438
443 439 while datasize < self.selBlocksize: # Not enough profiles to fill the block
444 440 if not(self.readNextBlock()):
445 441 return 0
446 442 self.getFirstHeader()
447 443 self.reshapeData()
448 444 if self.datablock is None:
449 445 self.dataOut.flagNoData = True
450 446 return 0
451 447 # stack data
452 448 blockIndex = self.selBlocksize - datasize
453 449 datablock1 = self.datablock[:, :blockIndex, :]
454 450
455 451 buffer[:, datasize:datasize +
456 452 datablock1.shape[1], :] = datablock1
457 453 datasize += datablock1.shape[1]
458 454
459 455 self.dataOut.data = buffer
460 456 self.profileIndex = blockIndex
461 457
462 458 self.dataOut.flagDataAsBlock = True
463 459 self.dataOut.nProfiles = self.dataOut.data.shape[1]
464 460
465 461 self.dataOut.flagNoData = False
466 462
467 463 self.getBasicHeader()
468 464
469 465 self.dataOut.realtime = self.online
470 466
471 467 return self.dataOut.data
472 468
473 469
474 470 @MPDecorator
475 471 class VoltageWriter(JRODataWriter, Operation):
476 472 """
477 473 Esta clase permite escribir datos de voltajes a archivos procesados (.r). La escritura
478 474 de los datos siempre se realiza por bloques.
479 475 """
480 476
481 477 ext = ".r"
482 478
483 479 optchar = "D"
484 480
485 481 shapeBuffer = None
486 482
487 483 def __init__(self):#, **kwargs):
488 484 """
489 485 Inicializador de la clase VoltageWriter para la escritura de datos de espectros.
490 486
491 487 Affected:
492 488 self.dataOut
493 489
494 490 Return: None
495 491 """
496 492 Operation.__init__(self)#, **kwargs)
497 493
498 494 self.nTotalBlocks = 0
499 495
500 496 self.profileIndex = 0
501 497
502 498 self.isConfig = False
503 499
504 500 self.fp = None
505 501
506 502 self.flagIsNewFile = 1
507 503
508 504 self.blockIndex = 0
509 505
510 506 self.flagIsNewBlock = 0
511 507
512 508 self.setFile = None
513 509
514 510 self.dtype = None
515 511
516 512 self.path = None
517 513
518 514 self.filename = None
519 515
520 516 self.basicHeaderObj = BasicHeader(LOCALTIME)
521 517
522 518 self.systemHeaderObj = SystemHeader()
523 519
524 520 self.radarControllerHeaderObj = RadarControllerHeader()
525 521
526 522 self.processingHeaderObj = ProcessingHeader()
527 523
528 524 def hasAllDataInBuffer(self):
529 525 if self.profileIndex >= self.processingHeaderObj.profilesPerBlock:
530 526 return 1
531 527 return 0
532 528
533 529 def setBlockDimension(self):
534 530 """
535 531 Obtiene las formas dimensionales del los subbloques de datos que componen un bloque
536 532
537 533 Affected:
538 534 self.shape_spc_Buffer
539 535 self.shape_cspc_Buffer
540 536 self.shape_dc_Buffer
541 537
542 538 Return: None
543 539 """
544 540 self.shapeBuffer = (self.processingHeaderObj.profilesPerBlock,
545 541 self.processingHeaderObj.nHeights,
546 542 self.systemHeaderObj.nChannels)
547 543
548 544 self.datablock = numpy.zeros((self.systemHeaderObj.nChannels,
549 545 self.processingHeaderObj.profilesPerBlock,
550 546 self.processingHeaderObj.nHeights),
551 547 dtype=numpy.dtype('complex64'))
552 548
553 549 def writeBlock(self):
554 550 """
555 551 Escribe el buffer en el file designado
556 552
557 553 Affected:
558 554 self.profileIndex
559 555 self.flagIsNewFile
560 556 self.flagIsNewBlock
561 557 self.nTotalBlocks
562 558 self.blockIndex
563 559
564 560 Return: None
565 561 """
566 562 data = numpy.zeros(self.shapeBuffer, self.dtype)
567 563
568 564 junk = numpy.transpose(self.datablock, (1, 2, 0))
569 565
570 566 data['real'] = junk.real
571 567 data['imag'] = junk.imag
572 568
573 569 data = data.reshape((-1))
574 570
575 571 data.tofile(self.fp)
576 572
577 573 self.datablock.fill(0)
578 574
579 575 self.profileIndex = 0
580 576 self.flagIsNewFile = 0
581 577 self.flagIsNewBlock = 1
582 578
583 579 self.blockIndex += 1
584 580 self.nTotalBlocks += 1
585 581
586 582 # print "[Writing] Block = %04d" %self.blockIndex
587 583
588 584 def putData(self):
589 585 """
590 586 Setea un bloque de datos y luego los escribe en un file
591 587
592 588 Affected:
593 589 self.flagIsNewBlock
594 590 self.profileIndex
595 591
596 592 Return:
597 593 0 : Si no hay data o no hay mas files que puedan escribirse
598 594 1 : Si se escribio la data de un bloque en un file
599 595 """
600 596 if self.dataOut.flagNoData:
601 597 return 0
602 598
603 599 self.flagIsNewBlock = 0
604 600
605 601 if self.dataOut.flagDiscontinuousBlock:
606 602 self.datablock.fill(0)
607 603 self.profileIndex = 0
608 604 self.setNextFile()
609 605
610 606 if self.profileIndex == 0:
611 607 self.setBasicHeader()
612 608
613 609 self.datablock[:, self.profileIndex, :] = self.dataOut.data
614 610
615 611 self.profileIndex += 1
616 612
617 613 if self.hasAllDataInBuffer():
618 614 # if self.flagIsNewFile:
619 615 self.writeNextBlock()
620 616 # self.setFirstHeader()
621 617
622 618 return 1
623 619
624 620 def __getBlockSize(self):
625 621 '''
626 622 Este metodos determina el cantidad de bytes para un bloque de datos de tipo Voltage
627 623 '''
628 624
629 625 dtype_width = self.getDtypeWidth()
630 626
631 627 blocksize = int(self.dataOut.nHeights * self.dataOut.nChannels *
632 628 self.profilesPerBlock * dtype_width * 2)
633 629
634 630 return blocksize
635 631
636 632 def setFirstHeader(self):
637 633 """
638 634 Obtiene una copia del First Header
639 635
640 636 Affected:
641 637 self.systemHeaderObj
642 638 self.radarControllerHeaderObj
643 639 self.dtype
644 640
645 641 Return:
646 642 None
647 643 """
648 644
649 645 self.systemHeaderObj = self.dataOut.systemHeaderObj.copy()
650 646 self.systemHeaderObj.nChannels = self.dataOut.nChannels
651 647 self.radarControllerHeaderObj = self.dataOut.radarControllerHeaderObj.copy()
652 648
653 649 self.processingHeaderObj.dtype = 0 # Voltage
654 650 self.processingHeaderObj.blockSize = self.__getBlockSize()
655 651 self.processingHeaderObj.profilesPerBlock = self.profilesPerBlock
656 652 self.processingHeaderObj.dataBlocksPerFile = self.blocksPerFile
657 653 # podria ser 1 o self.dataOut.processingHeaderObj.nWindows
658 654 self.processingHeaderObj.nWindows = 1
659 655 self.processingHeaderObj.nCohInt = self.dataOut.nCohInt
660 656 # Cuando la data de origen es de tipo Voltage
661 657 self.processingHeaderObj.nIncohInt = 1
662 658 # Cuando la data de origen es de tipo Voltage
663 659 self.processingHeaderObj.totalSpectra = 0
664 660
665 661 if self.dataOut.code is not None:
666 662 self.processingHeaderObj.code = self.dataOut.code
667 663 self.processingHeaderObj.nCode = self.dataOut.nCode
668 664 self.processingHeaderObj.nBaud = self.dataOut.nBaud
669 665
670 666 if self.processingHeaderObj.nWindows != 0:
671 667 self.processingHeaderObj.firstHeight = self.dataOut.heightList[0]
672 668 self.processingHeaderObj.deltaHeight = self.dataOut.heightList[1] - \
673 669 self.dataOut.heightList[0]
674 670 self.processingHeaderObj.nHeights = self.dataOut.nHeights
675 671 self.processingHeaderObj.samplesWin = self.dataOut.nHeights
676 672
677 673 self.processingHeaderObj.processFlags = self.getProcessFlags()
678 674
679 675 self.setBasicHeader()
680 676 No newline at end of file
@@ -1,402 +1,402
1 1 '''
2 2 Created on Oct 24, 2016
3 3
4 4 @author: roj- LouVD
5 5 '''
6 6
7 7 import numpy
8 8 import copy
9 9 import datetime
10 10 import time
11 11 from time import gmtime
12 12
13 13 from numpy import transpose
14 14
15 15 from schainpy.model.proc.jroproc_base import ProcessingUnit, Operation, MPDecorator
16 16 from schainpy.model.data.jrodata import Parameters
17 17
18 @MPDecorator
18
19 19 class BLTRParametersProc(ProcessingUnit):
20 20 '''
21 21 Processing unit for BLTR parameters data (winds)
22 22
23 23 Inputs:
24 24 self.dataOut.nmodes - Number of operation modes
25 25 self.dataOut.nchannels - Number of channels
26 26 self.dataOut.nranges - Number of ranges
27 27
28 28 self.dataOut.data_SNR - SNR array
29 29 self.dataOut.data_output - Zonal, Vertical and Meridional velocity array
30 30 self.dataOut.height - Height array (km)
31 31 self.dataOut.time - Time array (seconds)
32 32
33 33 self.dataOut.fileIndex -Index of the file currently read
34 34 self.dataOut.lat - Latitude coordinate of BLTR location
35 35
36 36 self.dataOut.doy - Experiment doy (number of the day in the current year)
37 37 self.dataOut.month - Experiment month
38 38 self.dataOut.day - Experiment day
39 39 self.dataOut.year - Experiment year
40 40 '''
41 41
42 42 def __init__(self):
43 43 '''
44 44 Inputs: None
45 45 '''
46 46 ProcessingUnit.__init__(self)
47 47 self.dataOut = Parameters()
48 48
49 49 def setup(self, mode):
50 50 '''
51 51 '''
52 52 self.dataOut.mode = mode
53 53
54 54 def run(self, mode, snr_threshold=None):
55 55 '''
56 56 Inputs:
57 57 mode = High resolution (0) or Low resolution (1) data
58 58 snr_threshold = snr filter value
59 59 '''
60 60
61 61 if not self.isConfig:
62 62 self.setup(mode)
63 63 self.isConfig = True
64 64
65 65 if self.dataIn.type == 'Parameters':
66 66 self.dataOut.copy(self.dataIn)
67 67
68 68 self.dataOut.data_param = self.dataOut.data[mode]
69 69 self.dataOut.heightList = self.dataOut.height[0]
70 70 self.dataOut.data_SNR = self.dataOut.data_SNR[mode]
71 71
72 72 if snr_threshold is not None:
73 73 SNRavg = numpy.average(self.dataOut.data_SNR, axis=0)
74 74 SNRavgdB = 10*numpy.log10(SNRavg)
75 75 for i in range(3):
76 76 self.dataOut.data_param[i][SNRavgdB <= snr_threshold] = numpy.nan
77 77
78 78 # TODO
79 @MPDecorator
79
80 80 class OutliersFilter(Operation):
81 81
82 82 def __init__(self):
83 83 '''
84 84 '''
85 85 Operation.__init__(self)
86 86
87 87 def run(self, svalue2, method, factor, filter, npoints=9):
88 88 '''
89 89 Inputs:
90 90 svalue - string to select array velocity
91 91 svalue2 - string to choose axis filtering
92 92 method - 0 for SMOOTH or 1 for MEDIAN
93 93 factor - number used to set threshold
94 94 filter - 1 for data filtering using the standard deviation criteria else 0
95 95 npoints - number of points for mask filter
96 96 '''
97 97
98 98 print(' Outliers Filter {} {} / threshold = {}'.format(svalue, svalue, factor))
99 99
100 100
101 101 yaxis = self.dataOut.heightList
102 102 xaxis = numpy.array([[self.dataOut.utctime]])
103 103
104 104 # Zonal
105 105 value_temp = self.dataOut.data_output[0]
106 106
107 107 # Zonal
108 108 value_temp = self.dataOut.data_output[1]
109 109
110 110 # Vertical
111 111 value_temp = numpy.transpose(self.dataOut.data_output[2])
112 112
113 113 htemp = yaxis
114 114 std = value_temp
115 115 for h in range(len(htemp)):
116 116 nvalues_valid = len(numpy.where(numpy.isfinite(value_temp[h]))[0])
117 117 minvalid = npoints
118 118
119 119 #only if valid values greater than the minimum required (10%)
120 120 if nvalues_valid > minvalid:
121 121
122 122 if method == 0:
123 123 #SMOOTH
124 124 w = value_temp[h] - self.Smooth(input=value_temp[h], width=npoints, edge_truncate=1)
125 125
126 126
127 127 if method == 1:
128 128 #MEDIAN
129 129 w = value_temp[h] - self.Median(input=value_temp[h], width = npoints)
130 130
131 131 dw = numpy.std(w[numpy.where(numpy.isfinite(w))],ddof = 1)
132 132
133 133 threshold = dw*factor
134 134 value_temp[numpy.where(w > threshold),h] = numpy.nan
135 135 value_temp[numpy.where(w < -1*threshold),h] = numpy.nan
136 136
137 137
138 138 #At the end
139 139 if svalue2 == 'inHeight':
140 140 value_temp = numpy.transpose(value_temp)
141 141 output_array[:,m] = value_temp
142 142
143 143 if svalue == 'zonal':
144 144 self.dataOut.data_output[0] = output_array
145 145
146 146 elif svalue == 'meridional':
147 147 self.dataOut.data_output[1] = output_array
148 148
149 149 elif svalue == 'vertical':
150 150 self.dataOut.data_output[2] = output_array
151 151
152 152 return self.dataOut.data_output
153 153
154 154
155 155 def Median(self,input,width):
156 156 '''
157 157 Inputs:
158 158 input - Velocity array
159 159 width - Number of points for mask filter
160 160
161 161 '''
162 162
163 163 if numpy.mod(width,2) == 1:
164 164 pc = int((width - 1) / 2)
165 165 cont = 0
166 166 output = []
167 167
168 168 for i in range(len(input)):
169 169 if i >= pc and i < len(input) - pc:
170 170 new2 = input[i-pc:i+pc+1]
171 171 temp = numpy.where(numpy.isfinite(new2))
172 172 new = new2[temp]
173 173 value = numpy.median(new)
174 174 output.append(value)
175 175
176 176 output = numpy.array(output)
177 177 output = numpy.hstack((input[0:pc],output))
178 178 output = numpy.hstack((output,input[-pc:len(input)]))
179 179
180 180 return output
181 181
182 182 def Smooth(self,input,width,edge_truncate = None):
183 183 '''
184 184 Inputs:
185 185 input - Velocity array
186 186 width - Number of points for mask filter
187 187 edge_truncate - 1 for truncate the convolution product else
188 188
189 189 '''
190 190
191 191 if numpy.mod(width,2) == 0:
192 192 real_width = width + 1
193 193 nzeros = width / 2
194 194 else:
195 195 real_width = width
196 196 nzeros = (width - 1) / 2
197 197
198 198 half_width = int(real_width)/2
199 199 length = len(input)
200 200
201 201 gate = numpy.ones(real_width,dtype='float')
202 202 norm_of_gate = numpy.sum(gate)
203 203
204 204 nan_process = 0
205 205 nan_id = numpy.where(numpy.isnan(input))
206 206 if len(nan_id[0]) > 0:
207 207 nan_process = 1
208 208 pb = numpy.zeros(len(input))
209 209 pb[nan_id] = 1.
210 210 input[nan_id] = 0.
211 211
212 212 if edge_truncate == True:
213 213 output = numpy.convolve(input/norm_of_gate,gate,mode='same')
214 214 elif edge_truncate == False or edge_truncate == None:
215 215 output = numpy.convolve(input/norm_of_gate,gate,mode='valid')
216 216 output = numpy.hstack((input[0:half_width],output))
217 217 output = numpy.hstack((output,input[len(input)-half_width:len(input)]))
218 218
219 219 if nan_process:
220 220 pb = numpy.convolve(pb/norm_of_gate,gate,mode='valid')
221 221 pb = numpy.hstack((numpy.zeros(half_width),pb))
222 222 pb = numpy.hstack((pb,numpy.zeros(half_width)))
223 223 output[numpy.where(pb > 0.9999)] = numpy.nan
224 224 input[nan_id] = numpy.nan
225 225 return output
226 226
227 227 def Average(self,aver=0,nhaver=1):
228 228 '''
229 229 Inputs:
230 230 aver - Indicates the time period over which is averaged or consensus data
231 231 nhaver - Indicates the decimation factor in heights
232 232
233 233 '''
234 234 nhpoints = 48
235 235
236 236 lat_piura = -5.17
237 237 lat_huancayo = -12.04
238 238 lat_porcuya = -5.8
239 239
240 240 if '%2.2f'%self.dataOut.lat == '%2.2f'%lat_piura:
241 241 hcm = 3.
242 242 if self.dataOut.year == 2003 :
243 243 if self.dataOut.doy >= 25 and self.dataOut.doy < 64:
244 244 nhpoints = 12
245 245
246 246 elif '%2.2f'%self.dataOut.lat == '%2.2f'%lat_huancayo:
247 247 hcm = 3.
248 248 if self.dataOut.year == 2003 :
249 249 if self.dataOut.doy >= 25 and self.dataOut.doy < 64:
250 250 nhpoints = 12
251 251
252 252
253 253 elif '%2.2f'%self.dataOut.lat == '%2.2f'%lat_porcuya:
254 254 hcm = 5.#2
255 255
256 256 pdata = 0.2
257 257 taver = [1,2,3,4,6,8,12,24]
258 258 t0 = 0
259 259 tf = 24
260 260 ntime =(tf-t0)/taver[aver]
261 261 ti = numpy.arange(ntime)
262 262 tf = numpy.arange(ntime) + taver[aver]
263 263
264 264
265 265 old_height = self.dataOut.heightList
266 266
267 267 if nhaver > 1:
268 268 num_hei = len(self.dataOut.heightList)/nhaver/self.dataOut.nmodes
269 269 deltha = 0.05*nhaver
270 270 minhvalid = pdata*nhaver
271 271 for im in range(self.dataOut.nmodes):
272 272 new_height = numpy.arange(num_hei)*deltha + self.dataOut.height[im,0] + deltha/2.
273 273
274 274
275 275 data_fHeigths_List = []
276 276 data_fZonal_List = []
277 277 data_fMeridional_List = []
278 278 data_fVertical_List = []
279 279 startDTList = []
280 280
281 281
282 282 for i in range(ntime):
283 283 height = old_height
284 284
285 285 start = datetime.datetime(self.dataOut.year,self.dataOut.month,self.dataOut.day) + datetime.timedelta(hours = int(ti[i])) - datetime.timedelta(hours = 5)
286 286 stop = datetime.datetime(self.dataOut.year,self.dataOut.month,self.dataOut.day) + datetime.timedelta(hours = int(tf[i])) - datetime.timedelta(hours = 5)
287 287
288 288
289 289 limit_sec1 = time.mktime(start.timetuple())
290 290 limit_sec2 = time.mktime(stop.timetuple())
291 291
292 292 t1 = numpy.where(self.f_timesec >= limit_sec1)
293 293 t2 = numpy.where(self.f_timesec < limit_sec2)
294 294 time_select = []
295 295 for val_sec in t1[0]:
296 296 if val_sec in t2[0]:
297 297 time_select.append(val_sec)
298 298
299 299
300 300 time_select = numpy.array(time_select,dtype = 'int')
301 301 minvalid = numpy.ceil(pdata*nhpoints)
302 302
303 303 zon_aver = numpy.zeros([self.dataOut.nranges,self.dataOut.nmodes],dtype='f4') + numpy.nan
304 304 mer_aver = numpy.zeros([self.dataOut.nranges,self.dataOut.nmodes],dtype='f4') + numpy.nan
305 305 ver_aver = numpy.zeros([self.dataOut.nranges,self.dataOut.nmodes],dtype='f4') + numpy.nan
306 306
307 307 if nhaver > 1:
308 308 new_zon_aver = numpy.zeros([num_hei,self.dataOut.nmodes],dtype='f4') + numpy.nan
309 309 new_mer_aver = numpy.zeros([num_hei,self.dataOut.nmodes],dtype='f4') + numpy.nan
310 310 new_ver_aver = numpy.zeros([num_hei,self.dataOut.nmodes],dtype='f4') + numpy.nan
311 311
312 312 if len(time_select) > minvalid:
313 313 time_average = self.f_timesec[time_select]
314 314
315 315 for im in range(self.dataOut.nmodes):
316 316
317 317 for ih in range(self.dataOut.nranges):
318 318 if numpy.sum(numpy.isfinite(self.f_zon[time_select,ih,im])) >= minvalid:
319 319 zon_aver[ih,im] = numpy.nansum(self.f_zon[time_select,ih,im]) / numpy.sum(numpy.isfinite(self.f_zon[time_select,ih,im]))
320 320
321 321 if numpy.sum(numpy.isfinite(self.f_mer[time_select,ih,im])) >= minvalid:
322 322 mer_aver[ih,im] = numpy.nansum(self.f_mer[time_select,ih,im]) / numpy.sum(numpy.isfinite(self.f_mer[time_select,ih,im]))
323 323
324 324 if numpy.sum(numpy.isfinite(self.f_ver[time_select,ih,im])) >= minvalid:
325 325 ver_aver[ih,im] = numpy.nansum(self.f_ver[time_select,ih,im]) / numpy.sum(numpy.isfinite(self.f_ver[time_select,ih,im]))
326 326
327 327 if nhaver > 1:
328 328 for ih in range(num_hei):
329 329 hvalid = numpy.arange(nhaver) + nhaver*ih
330 330
331 331 if numpy.sum(numpy.isfinite(zon_aver[hvalid,im])) >= minvalid:
332 332 new_zon_aver[ih,im] = numpy.nansum(zon_aver[hvalid,im]) / numpy.sum(numpy.isfinite(zon_aver[hvalid,im]))
333 333
334 334 if numpy.sum(numpy.isfinite(mer_aver[hvalid,im])) >= minvalid:
335 335 new_mer_aver[ih,im] = numpy.nansum(mer_aver[hvalid,im]) / numpy.sum(numpy.isfinite(mer_aver[hvalid,im]))
336 336
337 337 if numpy.sum(numpy.isfinite(ver_aver[hvalid,im])) >= minvalid:
338 338 new_ver_aver[ih,im] = numpy.nansum(ver_aver[hvalid,im]) / numpy.sum(numpy.isfinite(ver_aver[hvalid,im]))
339 339 if nhaver > 1:
340 340 zon_aver = new_zon_aver
341 341 mer_aver = new_mer_aver
342 342 ver_aver = new_ver_aver
343 343 height = new_height
344 344
345 345
346 346 tstart = time_average[0]
347 347 tend = time_average[-1]
348 348 startTime = time.gmtime(tstart)
349 349
350 350 year = startTime.tm_year
351 351 month = startTime.tm_mon
352 352 day = startTime.tm_mday
353 353 hour = startTime.tm_hour
354 354 minute = startTime.tm_min
355 355 second = startTime.tm_sec
356 356
357 357 startDTList.append(datetime.datetime(year,month,day,hour,minute,second))
358 358
359 359
360 360 o_height = numpy.array([])
361 361 o_zon_aver = numpy.array([])
362 362 o_mer_aver = numpy.array([])
363 363 o_ver_aver = numpy.array([])
364 364 if self.dataOut.nmodes > 1:
365 365 for im in range(self.dataOut.nmodes):
366 366
367 367 if im == 0:
368 368 h_select = numpy.where(numpy.bitwise_and(height[0,:] >=0,height[0,:] <= hcm,numpy.isfinite(height[0,:])))
369 369 else:
370 370 h_select = numpy.where(numpy.bitwise_and(height[1,:] > hcm,height[1,:] < 20,numpy.isfinite(height[1,:])))
371 371
372 372
373 373 ht = h_select[0]
374 374
375 375 o_height = numpy.hstack((o_height,height[im,ht]))
376 376 o_zon_aver = numpy.hstack((o_zon_aver,zon_aver[ht,im]))
377 377 o_mer_aver = numpy.hstack((o_mer_aver,mer_aver[ht,im]))
378 378 o_ver_aver = numpy.hstack((o_ver_aver,ver_aver[ht,im]))
379 379
380 380 data_fHeigths_List.append(o_height)
381 381 data_fZonal_List.append(o_zon_aver)
382 382 data_fMeridional_List.append(o_mer_aver)
383 383 data_fVertical_List.append(o_ver_aver)
384 384
385 385
386 386 else:
387 387 h_select = numpy.where(numpy.bitwise_and(height[0,:] <= hcm,numpy.isfinite(height[0,:])))
388 388 ht = h_select[0]
389 389 o_height = numpy.hstack((o_height,height[im,ht]))
390 390 o_zon_aver = numpy.hstack((o_zon_aver,zon_aver[ht,im]))
391 391 o_mer_aver = numpy.hstack((o_mer_aver,mer_aver[ht,im]))
392 392 o_ver_aver = numpy.hstack((o_ver_aver,ver_aver[ht,im]))
393 393
394 394 data_fHeigths_List.append(o_height)
395 395 data_fZonal_List.append(o_zon_aver)
396 396 data_fMeridional_List.append(o_mer_aver)
397 397 data_fVertical_List.append(o_ver_aver)
398 398
399 399
400 400 return startDTList, data_fHeigths_List, data_fZonal_List, data_fMeridional_List, data_fVertical_List
401 401
402 402
@@ -1,143 +1,143
1 1 '''
2 2 @author: Daniel Suarez
3 3 '''
4 4 import numpy
5 5 from .jroproc_base import ProcessingUnit, Operation
6 6 from schainpy.model.data.jroamisr import AMISR
7 7
8 8 class AMISRProc(ProcessingUnit):
9 9 def __init__(self, **kwargs):
10 10 ProcessingUnit.__init__(self, **kwargs)
11 11 self.objectDict = {}
12 12 self.dataOut = AMISR()
13 13
14 14 def run(self):
15 15 if self.dataIn.type == 'AMISR':
16 16 self.dataOut.copy(self.dataIn)
17 17
18 18
19 class PrintInfo(Operation):
19 class PrintInfoAMISR(Operation):
20 20 def __init__(self, **kwargs):
21 21 Operation.__init__(self, **kwargs)
22 22 self.__isPrinted = False
23 23
24 24 def run(self, dataOut):
25 25
26 26 if not self.__isPrinted:
27 27 print('Number of Records by File: %d'%dataOut.nRecords)
28 28 print('Number of Pulses: %d'%dataOut.nProfiles)
29 29 print('Number of Pulses by Frame: %d'%dataOut.npulseByFrame)
30 30 print('Number of Samples by Pulse: %d'%len(dataOut.heightList))
31 31 print('Ipp Seconds: %f'%dataOut.ippSeconds)
32 32 print('Number of Beams: %d'%dataOut.nBeams)
33 33 print('BeamCodes:')
34 34 beamStrList = ['Beam %d -> Code=%d, azimuth=%2.2f, zenith=%2.2f, gain=%2.2f'%(k,v[0],v[1],v[2],v[3]) for k,v in list(dataOut.beamCodeDict.items())]
35 35 for b in beamStrList:
36 36 print(b)
37 37 self.__isPrinted = True
38 38
39 39 return
40 40
41 41
42 42 class BeamSelector(Operation):
43 43 profileIndex = None
44 44 nProfiles = None
45 45
46 46 def __init__(self, **kwargs):
47 47 Operation.__init__(self, **kwargs)
48 48 self.profileIndex = 0
49 49 self.__isConfig = False
50 50
51 51 def incIndex(self):
52 52 self.profileIndex += 1
53 53
54 54 if self.profileIndex >= self.nProfiles:
55 55 self.profileIndex = 0
56 56
57 57 def isProfileInRange(self, minIndex, maxIndex):
58 58
59 59 if self.profileIndex < minIndex:
60 60 return False
61 61
62 62 if self.profileIndex > maxIndex:
63 63 return False
64 64
65 65 return True
66 66
67 67 def isProfileInList(self, profileList):
68 68
69 69 if self.profileIndex not in profileList:
70 70 return False
71 71
72 72 return True
73 73
74 74 def run(self, dataOut, beam=None):
75 75
76 76 dataOut.flagNoData = True
77 77
78 78 if not(self.__isConfig):
79 79
80 80 self.nProfiles = dataOut.nProfiles
81 81 self.profileIndex = dataOut.profileIndex
82 82 self.__isConfig = True
83 83
84 84 if beam != None:
85 85 if self.isProfileInList(dataOut.beamRangeDict[beam]):
86 86 beamInfo = dataOut.beamCodeDict[beam]
87 87 dataOut.azimuth = beamInfo[1]
88 88 dataOut.zenith = beamInfo[2]
89 89 dataOut.gain = beamInfo[3]
90 90 dataOut.flagNoData = False
91 91
92 92 self.incIndex()
93 93 return 1
94 94
95 95 else:
96 96 raise ValueError("BeamSelector needs beam value")
97 97
98 98 return 0
99 99
100 100 class ProfileToChannels(Operation):
101 101
102 102 def __init__(self, **kwargs):
103 103 Operation.__init__(self, **kwargs)
104 104 self.__isConfig = False
105 105 self.__counter_chan = 0
106 106 self.buffer = None
107 107
108 108 def isProfileInList(self, profileList):
109 109
110 110 if self.profileIndex not in profileList:
111 111 return False
112 112
113 113 return True
114 114
115 115 def run(self, dataOut):
116 116
117 117 dataOut.flagNoData = True
118 118
119 119 if not(self.__isConfig):
120 120 nchannels = len(list(dataOut.beamRangeDict.keys()))
121 121 nsamples = dataOut.nHeights
122 122 self.buffer = numpy.zeros((nchannels, nsamples), dtype = 'complex128')
123 123 dataOut.beam.codeList = [dataOut.beamCodeDict[x][0] for x in range(nchannels)]
124 124 dataOut.beam.azimuthList = [dataOut.beamCodeDict[x][1] for x in range(nchannels)]
125 125 dataOut.beam.zenithList = [dataOut.beamCodeDict[x][2] for x in range(nchannels)]
126 126 self.__isConfig = True
127 127
128 128 for i in range(self.buffer.shape[0]):
129 129 if dataOut.profileIndex in dataOut.beamRangeDict[i]:
130 130 self.buffer[i,:] = dataOut.data
131 131 break
132 132
133 133
134 134 self.__counter_chan += 1
135 135
136 136 if self.__counter_chan >= self.buffer.shape[0]:
137 137 self.__counter_chan = 0
138 138 dataOut.data = self.buffer.copy()
139 139 dataOut.channelList = list(range(self.buffer.shape[0]))
140 140 self.__isConfig = False
141 141 dataOut.flagNoData = False
142 142 pass
143 143 No newline at end of file
@@ -1,429 +1,207
1 1 '''
2 Updated for multiprocessing
3 Author : Sergio Cortez
4 Jan 2018
5 Abstract:
6 Base class for processing units and operations. A decorator provides multiprocessing features and interconnect the processes created.
7 The argument (kwargs) sent from the controller is parsed and filtered via the decorator for each processing unit or operation instantiated.
8 The decorator handle also the methods inside the processing unit to be called from the main script (not as operations) (OPERATION -> type ='self').
9
10 Based on:
11 $Author: murco $
12 $Id: jroproc_base.py 1 2012-11-12 18:56:07Z murco $
2 Base clases to create Processing units and operations, the MPDecorator
3 must be used in plotting and writing operations to allow to run as an
4 external process.
13 5 '''
14 6
15 import os
16 import sys
17 7 import inspect
18 8 import zmq
19 9 import time
20 10 import pickle
21 11 import traceback
22 12 try:
23 13 from queue import Queue
24 14 except:
25 15 from Queue import Queue
26 16 from threading import Thread
27 from multiprocessing import Process
28
17 from multiprocessing import Process, Queue
29 18 from schainpy.utils import log
30 19
31 20
32 21 class ProcessingUnit(object):
22 '''
23 Base class to create Signal Chain Units
24 '''
33 25
34 """
35 Update - Jan 2018 - MULTIPROCESSING
36 All the "call" methods present in the previous base were removed.
37 The majority of operations are independant processes, thus
38 the decorator is in charge of communicate the operation processes
39 with the proccessing unit via IPC.
40
41 The constructor does not receive any argument. The remaining methods
42 are related with the operations to execute.
43
44
45 """
46 26 proc_type = 'processing'
47 __attrs__ = []
48 27
49 28 def __init__(self):
50 29
51 30 self.dataIn = None
52 31 self.dataOut = None
53 32 self.isConfig = False
54 33 self.operations = []
55 self.plots = []
34
35 def setInput(self, unit):
36
37 self.dataIn = unit.dataOut
56 38
57 39 def getAllowedArgs(self):
58 40 if hasattr(self, '__attrs__'):
59 41 return self.__attrs__
60 42 else:
61 43 return inspect.getargspec(self.run).args
62 44
63 45 def addOperation(self, conf, operation):
64 """
65 This method is used in the controller, and update the dictionary containing the operations to execute. The dict
66 posses the id of the operation process (IPC purposes)
67
68 Agrega un objeto del tipo "Operation" (opObj) a la lista de objetos "self.objectList" y retorna el
69 identificador asociado a este objeto.
70
71 Input:
72
73 object : objeto de la clase "Operation"
74
75 Return:
76
77 objId : identificador del objeto, necesario para comunicar con master(procUnit)
78 """
79
80 self.operations.append(
81 (operation, conf.type, conf.id, conf.getKwargs()))
46 '''
47 '''
82 48
83 if 'plot' in self.name.lower():
84 self.plots.append(operation.CODE)
49 self.operations.append((operation, conf.type, conf.getKwargs()))
85 50
86 51 def getOperationObj(self, objId):
87 52
88 53 if objId not in list(self.operations.keys()):
89 54 return None
90 55
91 56 return self.operations[objId]
92 57
93 def operation(self, **kwargs):
94 """
95 Operacion directa sobre la data (dataOut.data). Es necesario actualizar los valores de los
96 atributos del objeto dataOut
58 def call(self, **kwargs):
59 '''
60 '''
97 61
98 Input:
62 try:
63 if self.dataIn is not None and self.dataIn.flagNoData and not self.dataIn.error:
64 return self.dataIn.isReady()
65 elif self.dataIn is None or not self.dataIn.error:
66 self.run(**kwargs)
67 elif self.dataIn.error:
68 self.dataOut.error = self.dataIn.error
69 self.dataOut.flagNoData = True
70 except:
71 err = traceback.format_exc()
72 if 'SchainWarning' in err:
73 log.warning(err.split('SchainWarning:')[-1].split('\n')[0].strip(), self.name)
74 elif 'SchainError' in err:
75 log.error(err.split('SchainError:')[-1].split('\n')[0].strip(), self.name)
76 else:
77 log.error(err, self.name)
78 self.dataOut.error = True
99 79
100 **kwargs : Diccionario de argumentos de la funcion a ejecutar
101 """
80 for op, optype, opkwargs in self.operations:
81 if optype == 'other' and not self.dataOut.flagNoData:
82 self.dataOut = op.run(self.dataOut, **opkwargs)
83 elif optype == 'external' and not self.dataOut.flagNoData:
84 op.queue.put(self.dataOut)
85 elif optype == 'external' and self.dataOut.error:
86 op.queue.put(self.dataOut)
102 87
103 raise NotImplementedError
88 return 'Error' if self.dataOut.error else self.dataOut.isReady()
104 89
105 90 def setup(self):
106 91
107 92 raise NotImplementedError
108 93
109 94 def run(self):
110 95
111 96 raise NotImplementedError
112 97
113 98 def close(self):
114 99
115 100 return
116 101
117 102
118 103 class Operation(object):
119 104
120 """
121 Update - Jan 2018 - MULTIPROCESSING
122
123 Most of the methods remained the same. The decorator parse the arguments and executed the run() method for each process.
124 The constructor doe snot receive any argument, neither the baseclass.
125
126
127 Clase base para definir las operaciones adicionales que se pueden agregar a la clase ProcessingUnit
128 y necesiten acumular informacion previa de los datos a procesar. De preferencia usar un buffer de
129 acumulacion dentro de esta clase
130
131 Ejemplo: Integraciones coherentes, necesita la informacion previa de los n perfiles anteriores (bufffer)
105 '''
106 '''
132 107
133 """
134 108 proc_type = 'operation'
135 __attrs__ = []
136 109
137 110 def __init__(self):
138 111
139 112 self.id = None
140 113 self.isConfig = False
141 114
142 115 if not hasattr(self, 'name'):
143 116 self.name = self.__class__.__name__
144 117
145 118 def getAllowedArgs(self):
146 119 if hasattr(self, '__attrs__'):
147 120 return self.__attrs__
148 121 else:
149 122 return inspect.getargspec(self.run).args
150 123
151 124 def setup(self):
152 125
153 126 self.isConfig = True
154 127
155 128 raise NotImplementedError
156 129
157 130 def run(self, dataIn, **kwargs):
158 131 """
159 132 Realiza las operaciones necesarias sobre la dataIn.data y actualiza los
160 133 atributos del objeto dataIn.
161 134
162 135 Input:
163 136
164 137 dataIn : objeto del tipo JROData
165 138
166 139 Return:
167 140
168 141 None
169 142
170 143 Affected:
171 144 __buffer : buffer de recepcion de datos.
172 145
173 146 """
174 147 if not self.isConfig:
175 148 self.setup(**kwargs)
176 149
177 150 raise NotImplementedError
178 151
179 152 def close(self):
180 153
181 154 return
182 155
183 class InputQueue(Thread):
184
185 '''
186 Class to hold input data for Proccessing Units and external Operations,
187 '''
188
189 def __init__(self, project_id, inputId, lock=None):
190
191 Thread.__init__(self)
192 self.queue = Queue()
193 self.project_id = project_id
194 self.inputId = inputId
195 self.lock = lock
196 self.islocked = False
197 self.size = 0
198
199 def run(self):
200
201 c = zmq.Context()
202 self.receiver = c.socket(zmq.SUB)
203 self.receiver.connect(
204 'ipc:///tmp/schain/{}_pub'.format(self.project_id))
205 self.receiver.setsockopt(zmq.SUBSCRIBE, self.inputId.encode())
206
207 while True:
208 obj = self.receiver.recv_multipart()[1]
209 self.size += sys.getsizeof(obj)
210 self.queue.put(obj)
211
212 def get(self):
213
214 if not self.islocked and self.size/1000000 > 512:
215 self.lock.n.value += 1
216 self.islocked = True
217 self.lock.clear()
218 elif self.islocked and self.size/1000000 <= 512:
219 self.islocked = False
220 self.lock.n.value -= 1
221 if self.lock.n.value == 0:
222 self.lock.set()
223
224 obj = self.queue.get()
225 self.size -= sys.getsizeof(obj)
226 return pickle.loads(obj)
227
228 156
229 157 def MPDecorator(BaseClass):
230 158 """
231 159 Multiprocessing class decorator
232 160
233 This function add multiprocessing features to a BaseClass. Also, it handle
234 the communication beetween processes (readers, procUnits and operations).
161 This function add multiprocessing features to a BaseClass.
235 162 """
236 163
237 164 class MPClass(BaseClass, Process):
238 165
239 166 def __init__(self, *args, **kwargs):
240 167 super(MPClass, self).__init__()
241 168 Process.__init__(self)
242 self.operationKwargs = {}
169
243 170 self.args = args
244 171 self.kwargs = kwargs
245 self.sender = None
246 self.receiver = None
247 self.i = 0
248 172 self.t = time.time()
173 self.op_type = 'external'
249 174 self.name = BaseClass.__name__
250 175 self.__doc__ = BaseClass.__doc__
251 176
252 177 if 'plot' in self.name.lower() and not self.name.endswith('_'):
253 178 self.name = '{}{}'.format(self.CODE.upper(), 'Plot')
254 179
255 180 self.start_time = time.time()
256 self.id = args[0]
257 self.inputId = args[1]
258 self.project_id = args[2]
259 181 self.err_queue = args[3]
260 self.lock = args[4]
261 self.typeProc = args[5]
262 self.err_queue.put('#_start_#')
263 if self.inputId is not None:
264 self.queue = InputQueue(self.project_id, self.inputId, self.lock)
265
266 def subscribe(self):
267 '''
268 Start the zmq socket receiver and subcribe to input ID.
269 '''
270
271 self.queue.start()
182 self.queue = Queue(maxsize=1)
183 self.myrun = BaseClass.run
272 184
273 def listen(self):
274 '''
275 This function waits for objects
276 '''
277
278 return self.queue.get()
279
280 def set_publisher(self):
281 '''
282 This function create a zmq socket for publishing objects.
283 '''
284
285 time.sleep(0.5)
286
287 c = zmq.Context()
288 self.sender = c.socket(zmq.PUB)
289 self.sender.connect(
290 'ipc:///tmp/schain/{}_sub'.format(self.project_id))
291
292 def publish(self, data, id):
293 '''
294 This function publish an object, to an specific topic.
295 It blocks publishing when receiver queue is full to avoid data loss
296 '''
297
298 if self.inputId is None:
299 self.lock.wait()
300 self.sender.send_multipart([str(id).encode(), pickle.dumps(data)])
301
302 def runReader(self):
303 '''
304 Run fuction for read units
305 '''
306 while True:
307
308 try:
309 BaseClass.run(self, **self.kwargs)
310 except:
311 err = traceback.format_exc()
312 if 'No more files' in err:
313 log.warning('No more files to read', self.name)
314 else:
315 self.err_queue.put('{}|{}'.format(self.name, err))
316 self.dataOut.error = True
317
318 for op, optype, opId, kwargs in self.operations:
319 if optype == 'self' and not self.dataOut.flagNoData:
320 op(**kwargs)
321 elif optype == 'other' and not self.dataOut.flagNoData:
322 self.dataOut = op.run(self.dataOut, **self.kwargs)
323 elif optype == 'external':
324 self.publish(self.dataOut, opId)
325
326 if self.dataOut.flagNoData and not self.dataOut.error:
327 continue
328
329 self.publish(self.dataOut, self.id)
330
331 if self.dataOut.error:
332 break
333
334 time.sleep(0.5)
335
336 def runProc(self):
337 '''
338 Run function for proccessing units
339 '''
340
341 while True:
342 self.dataIn = self.listen()
343
344 if self.dataIn.flagNoData and self.dataIn.error is None:
345 continue
346 elif not self.dataIn.error:
347 try:
348 BaseClass.run(self, **self.kwargs)
349 except:
350 self.err_queue.put('{}|{}'.format(self.name, traceback.format_exc()))
351 self.dataOut.error = True
352 elif self.dataIn.error:
353 self.dataOut.error = self.dataIn.error
354 self.dataOut.flagNoData = True
355
356 for op, optype, opId, kwargs in self.operations:
357 if optype == 'self' and not self.dataOut.flagNoData:
358 op(**kwargs)
359 elif optype == 'other' and not self.dataOut.flagNoData:
360 self.dataOut = op.run(self.dataOut, **kwargs)
361 elif optype == 'external' and not self.dataOut.flagNoData:
362 self.publish(self.dataOut, opId)
363
364 self.publish(self.dataOut, self.id)
365 for op, optype, opId, kwargs in self.operations:
366 if optype == 'external' and self.dataOut.error:
367 self.publish(self.dataOut, opId)
368
369 if self.dataOut.error:
370 break
371
372 time.sleep(0.5)
373
374 def runOp(self):
375 '''
376 Run function for external operations (this operations just receive data
377 ex: plots, writers, publishers)
378 '''
185 def run(self):
379 186
380 187 while True:
381 188
382 dataOut = self.listen()
189 dataOut = self.queue.get()
383 190
384 191 if not dataOut.error:
385 192 try:
386 193 BaseClass.run(self, dataOut, **self.kwargs)
387 194 except:
388 self.err_queue.put('{}|{}'.format(self.name, traceback.format_exc()))
389 dataOut.error = True
195 err = traceback.format_exc()
196 log.error(err.split('\n')[-2], self.name)
390 197 else:
391 198 break
392 199
393 def run(self):
394 if self.typeProc is "ProcUnit":
395
396 if self.inputId is not None:
397 self.subscribe()
398
399 self.set_publisher()
400
401 if 'Reader' not in BaseClass.__name__:
402 self.runProc()
403 else:
404 self.runReader()
405
406 elif self.typeProc is "Operation":
407
408 self.subscribe()
409 self.runOp()
410
411 else:
412 raise ValueError("Unknown type")
413
414 200 self.close()
415 201
416 202 def close(self):
417 203
418 204 BaseClass.close(self)
419 self.err_queue.put('#_end_#')
420
421 if self.sender:
422 self.sender.close()
423
424 if self.receiver:
425 self.receiver.close()
426
427 205 log.success('Done...(Time:{:4.2f} secs)'.format(time.time()-self.start_time), self.name)
428 206
429 207 return MPClass
@@ -1,178 +1,178
1 1 import numpy
2 2
3 3 from .jroproc_base import ProcessingUnit, Operation
4 from schainpy.model.data.jrodata import Correlation, hildebrand_sekhon
4 from schainpy.model.data.jrodata import Correlation
5 5
6 6 class CorrelationProc(ProcessingUnit):
7 7
8 8 pairsList = None
9 9
10 10 data_cf = None
11 11
12 12 def __init__(self, **kwargs):
13 13
14 14 ProcessingUnit.__init__(self, **kwargs)
15 15
16 16 self.objectDict = {}
17 17 self.buffer = None
18 18 self.firstdatatime = None
19 19 self.profIndex = 0
20 20 self.dataOut = Correlation()
21 21
22 22 def __updateObjFromVoltage(self):
23 23
24 24 self.dataOut.timeZone = self.dataIn.timeZone
25 25 self.dataOut.dstFlag = self.dataIn.dstFlag
26 26 self.dataOut.errorCount = self.dataIn.errorCount
27 27 self.dataOut.useLocalTime = self.dataIn.useLocalTime
28 28
29 29 self.dataOut.radarControllerHeaderObj = self.dataIn.radarControllerHeaderObj.copy()
30 30 self.dataOut.systemHeaderObj = self.dataIn.systemHeaderObj.copy()
31 31 self.dataOut.channelList = self.dataIn.channelList
32 32 self.dataOut.heightList = self.dataIn.heightList
33 33 self.dataOut.dtype = numpy.dtype([('real','<f4'),('imag','<f4')])
34 34 # self.dataOut.nHeights = self.dataIn.nHeights
35 35 # self.dataOut.nChannels = self.dataIn.nChannels
36 36 self.dataOut.nBaud = self.dataIn.nBaud
37 37 self.dataOut.nCode = self.dataIn.nCode
38 38 self.dataOut.code = self.dataIn.code
39 39 # self.dataOut.nProfiles = self.dataOut.nFFTPoints
40 40 self.dataOut.flagDiscontinuousBlock = self.dataIn.flagDiscontinuousBlock
41 41 self.dataOut.utctime = self.firstdatatime
42 42 self.dataOut.flagDecodeData = self.dataIn.flagDecodeData #asumo q la data esta decodificada
43 43 self.dataOut.flagDeflipData = self.dataIn.flagDeflipData #asumo q la data esta sin flip
44 44 self.dataOut.nCohInt = self.dataIn.nCohInt
45 45 # self.dataOut.nIncohInt = 1
46 46 self.dataOut.ippSeconds = self.dataIn.ippSeconds
47 47 self.dataOut.nProfiles = self.dataIn.nProfiles
48 48 self.dataOut.utctime = self.dataIn.utctime
49 49 # self.dataOut.windowOfFilter = self.dataIn.windowOfFilter
50 50
51 51 # self.dataOut.timeInterval = self.dataIn.timeInterval*self.dataOut.nPoints
52 52
53 53
54 54 def removeDC(self, jspectra):
55 55
56 56 nChannel = jspectra.shape[0]
57 57
58 58 for i in range(nChannel):
59 59 jspectra_tmp = jspectra[i,:,:]
60 60 jspectra_DC = numpy.mean(jspectra_tmp,axis = 0)
61 61
62 62 jspectra_tmp = jspectra_tmp - jspectra_DC
63 63 jspectra[i,:,:] = jspectra_tmp
64 64
65 65 return jspectra
66 66
67 67
68 68 def removeNoise(self, mode = 2):
69 69 indR = numpy.where(self.dataOut.lagR == 0)[0][0]
70 70 indT = numpy.where(self.dataOut.lagT == 0)[0][0]
71 71
72 72 jspectra = self.dataOut.data_corr[:,:,indR,:]
73 73
74 74 num_chan = jspectra.shape[0]
75 75 num_hei = jspectra.shape[2]
76 76
77 77 freq_dc = indT
78 78 ind_vel = numpy.array([-2,-1,1,2]) + freq_dc
79 79
80 80 NPot = self.dataOut.getNoise(mode)
81 81 jspectra[:,freq_dc,:] = jspectra[:,freq_dc,:] - NPot
82 82 SPot = jspectra[:,freq_dc,:]
83 83 pairsAutoCorr = self.dataOut.getPairsAutoCorr()
84 84 # self.dataOut.signalPotency = SPot
85 85 self.dataOut.noise = NPot
86 86 self.dataOut.SNR = (SPot/NPot)[pairsAutoCorr]
87 87 self.dataOut.data_corr[:,:,indR,:] = jspectra
88 88
89 89 return 1
90 90
91 91 def run(self, lags=None, mode = 'time', pairsList=None, fullBuffer=False, nAvg = 1, removeDC = False, splitCF=False):
92 92
93 93 self.dataOut.flagNoData = True
94 94
95 95 if self.dataIn.type == "Correlation":
96 96
97 97 self.dataOut.copy(self.dataIn)
98 98
99 99 return
100 100
101 101 if self.dataIn.type == "Voltage":
102 102
103 103 nChannels = self.dataIn.nChannels
104 104 nProfiles = self.dataIn.nProfiles
105 105 nHeights = self.dataIn.nHeights
106 106 data_pre = self.dataIn.data
107 107
108 108 #--------------- Remover DC ------------
109 109 if removeDC:
110 110 data_pre = self.removeDC(data_pre)
111 111
112 112 #---------------------------------------------
113 113 # pairsList = list(ccfList)
114 114 # for i in acfList:
115 115 # pairsList.append((i,i))
116 116 #
117 117 # ccf_pairs = numpy.arange(len(ccfList))
118 118 # acf_pairs = numpy.arange(len(ccfList),len(pairsList))
119 119 self.__updateObjFromVoltage()
120 120 #----------------------------------------------------------------------
121 121 #Creating temporal buffers
122 122 if fullBuffer:
123 123 tmp = numpy.zeros((len(pairsList), len(lags), nProfiles, nHeights), dtype = 'complex')*numpy.nan
124 124 elif mode == 'time':
125 125 if lags == None:
126 126 lags = numpy.arange(-nProfiles+1, nProfiles)
127 127 tmp = numpy.zeros((len(pairsList), len(lags), nHeights),dtype='complex')
128 128 elif mode == 'height':
129 129 if lags == None:
130 130 lags = numpy.arange(-nHeights+1, nHeights)
131 131 tmp = numpy.zeros(len(pairsList), (len(lags), nProfiles),dtype='complex')
132 132
133 133 #For loop
134 134 for l in range(len(pairsList)):
135 135
136 136 ch0 = pairsList[l][0]
137 137 ch1 = pairsList[l][1]
138 138
139 139 for i in range(len(lags)):
140 140 idx = lags[i]
141 141
142 142 if idx >= 0:
143 143 if mode == 'time':
144 144 ccf0 = data_pre[ch0,:nProfiles-idx,:]*numpy.conj(data_pre[ch1,idx:,:]) #time
145 145 else:
146 146 ccf0 = data_pre[ch0,:,nHeights-idx]*numpy.conj(data_pre[ch1,:,idx:]) #heights
147 147 else:
148 148 if mode == 'time':
149 149 ccf0 = data_pre[ch0,-idx:,:]*numpy.conj(data_pre[ch1,:nProfiles+idx,:]) #time
150 150 else:
151 151 ccf0 = data_pre[ch0,:,-idx:]*numpy.conj(data_pre[ch1,:,:nHeights+idx]) #heights
152 152
153 153 if fullBuffer:
154 154 tmp[l,i,:ccf0.shape[0],:] = ccf0
155 155 else:
156 156 tmp[l,i,:] = numpy.sum(ccf0, axis=0)
157 157
158 158 #-----------------------------------------------------------------
159 159 if fullBuffer:
160 160 tmp = numpy.sum(numpy.reshape(tmp,(tmp.shape[0],tmp.shape[1],tmp.shape[2]/nAvg,nAvg,tmp.shape[3])),axis=3)
161 161 self.dataOut.nAvg = nAvg
162 162
163 163 self.dataOut.data_cf = tmp
164 164 self.dataOut.mode = mode
165 165 self.dataOut.nLags = len(lags)
166 166 self.dataOut.pairsList = pairsList
167 167 self.dataOut.nPairs = len(pairsList)
168 168
169 169 #Se Calcula los factores de Normalizacion
170 170 if mode == 'time':
171 171 delta = self.dataIn.ippSeconds*self.dataIn.nCohInt
172 172 else:
173 173 delta = self.dataIn.heightList[1] - self.dataIn.heightList[0]
174 174 self.dataOut.lagRange = numpy.array(lags)*delta
175 175 # self.dataOut.nCohInt = self.dataIn.nCohInt*nAvg
176 176 self.dataOut.flagNoData = False
177 177 # a = self.dataOut.normFactor
178 178 return
@@ -1,350 +1,350
1 1 import numpy
2 2
3 3 from .jroproc_base import ProcessingUnit, Operation, MPDecorator
4 4 from schainpy.model.data.jrodata import SpectraHeis
5 5 from schainpy.utils import log
6 6
7 7
8 @MPDecorator
8
9 9 class SpectraHeisProc(ProcessingUnit):
10 10
11 11 def __init__(self):#, **kwargs):
12 12
13 13 ProcessingUnit.__init__(self)#, **kwargs)
14 14
15 15 # self.buffer = None
16 16 # self.firstdatatime = None
17 17 # self.profIndex = 0
18 18 self.dataOut = SpectraHeis()
19 19
20 20 def __updateObjFromVoltage(self):
21 21
22 22 self.dataOut.timeZone = self.dataIn.timeZone
23 23 self.dataOut.dstFlag = self.dataIn.dstFlag
24 24 self.dataOut.errorCount = self.dataIn.errorCount
25 25 self.dataOut.useLocalTime = self.dataIn.useLocalTime
26 26
27 27 self.dataOut.radarControllerHeaderObj = self.dataIn.radarControllerHeaderObj.copy()#
28 28 self.dataOut.systemHeaderObj = self.dataIn.systemHeaderObj.copy()#
29 29 self.dataOut.channelList = self.dataIn.channelList
30 30 self.dataOut.heightList = self.dataIn.heightList
31 31 # self.dataOut.dtype = self.dataIn.dtype
32 32 self.dataOut.dtype = numpy.dtype([('real','<f4'),('imag','<f4')])
33 33 # self.dataOut.nHeights = self.dataIn.nHeights
34 34 # self.dataOut.nChannels = self.dataIn.nChannels
35 35 self.dataOut.nBaud = self.dataIn.nBaud
36 36 self.dataOut.nCode = self.dataIn.nCode
37 37 self.dataOut.code = self.dataIn.code
38 38 # self.dataOut.nProfiles = 1
39 39 self.dataOut.ippFactor = 1
40 40 self.dataOut.noise_estimation = None
41 41 # self.dataOut.nProfiles = self.dataOut.nFFTPoints
42 42 self.dataOut.nFFTPoints = self.dataIn.nHeights
43 43 # self.dataOut.channelIndexList = self.dataIn.channelIndexList
44 44 # self.dataOut.flagNoData = self.dataIn.flagNoData
45 45 self.dataOut.flagDiscontinuousBlock = self.dataIn.flagDiscontinuousBlock
46 46 self.dataOut.utctime = self.dataIn.utctime
47 47 # self.dataOut.utctime = self.firstdatatime
48 48 self.dataOut.flagDecodeData = self.dataIn.flagDecodeData #asumo q la data esta decodificada
49 49 self.dataOut.flagDeflipData = self.dataIn.flagDeflipData #asumo q la data esta sin flip
50 50 # self.dataOut.flagShiftFFT = self.dataIn.flagShiftFFT
51 51 self.dataOut.nCohInt = self.dataIn.nCohInt
52 52 self.dataOut.nIncohInt = 1
53 53 # self.dataOut.ippSeconds= self.dataIn.ippSeconds
54 54 self.dataOut.windowOfFilter = self.dataIn.windowOfFilter
55 55
56 56 # self.dataOut.timeInterval = self.dataIn.timeInterval*self.dataOut.nIncohInt
57 57 # self.dataOut.set=self.dataIn.set
58 58 # self.dataOut.deltaHeight=self.dataIn.deltaHeight
59 59
60 60
61 61 def __updateObjFromFits(self):
62 62
63 63 self.dataOut.utctime = self.dataIn.utctime
64 64 # self.dataOut.channelIndexList = self.dataIn.channelIndexList
65 65
66 66 self.dataOut.channelList = self.dataIn.channelList
67 67 self.dataOut.heightList = self.dataIn.heightList
68 68 self.dataOut.data_spc = self.dataIn.data
69 69 self.dataOut.ippSeconds = self.dataIn.ippSeconds
70 70 self.dataOut.nCohInt = self.dataIn.nCohInt
71 71 self.dataOut.nIncohInt = self.dataIn.nIncohInt
72 72 # self.dataOut.timeInterval = self.dataIn.timeInterval
73 73 self.dataOut.timeZone = self.dataIn.timeZone
74 74 self.dataOut.useLocalTime = True
75 75 # self.dataOut.
76 76 # self.dataOut.
77 77
78 78 def __getFft(self):
79 79
80 80 fft_volt = numpy.fft.fft(self.dataIn.data, axis=1)
81 81 fft_volt = numpy.fft.fftshift(fft_volt,axes=(1,))
82 82 spc = numpy.abs(fft_volt * numpy.conjugate(fft_volt))/(self.dataOut.nFFTPoints)
83 83 self.dataOut.data_spc = spc
84 84
85 85 def run(self):
86 86
87 87 self.dataOut.flagNoData = True
88 88
89 89 if self.dataIn.type == "Fits":
90 90 self.__updateObjFromFits()
91 91 self.dataOut.flagNoData = False
92 92 return
93 93
94 94 if self.dataIn.type == "SpectraHeis":
95 95 self.dataOut.copy(self.dataIn)
96 96 return
97 97
98 98 if self.dataIn.type == "Voltage":
99 99 self.__updateObjFromVoltage()
100 100 self.__getFft()
101 101 self.dataOut.flagNoData = False
102 102
103 103 return
104 104
105 105 raise ValueError("The type object %s is not valid"%(self.dataIn.type))
106 106
107 107
108 108 def selectChannels(self, channelList):
109 109
110 110 channelIndexList = []
111 111
112 112 for channel in channelList:
113 113 index = self.dataOut.channelList.index(channel)
114 114 channelIndexList.append(index)
115 115
116 116 self.selectChannelsByIndex(channelIndexList)
117 117
118 118 def selectChannelsByIndex(self, channelIndexList):
119 119 """
120 120 Selecciona un bloque de datos en base a canales segun el channelIndexList
121 121
122 122 Input:
123 123 channelIndexList : lista sencilla de canales a seleccionar por ej. [2,3,7]
124 124
125 125 Affected:
126 126 self.dataOut.data
127 127 self.dataOut.channelIndexList
128 128 self.dataOut.nChannels
129 129 self.dataOut.m_ProcessingHeader.totalSpectra
130 130 self.dataOut.systemHeaderObj.numChannels
131 131 self.dataOut.m_ProcessingHeader.blockSize
132 132
133 133 Return:
134 134 None
135 135 """
136 136
137 137 for channelIndex in channelIndexList:
138 138 if channelIndex not in self.dataOut.channelIndexList:
139 139 print(channelIndexList)
140 140 raise ValueError("The value %d in channelIndexList is not valid" %channelIndex)
141 141
142 142 # nChannels = len(channelIndexList)
143 143
144 144 data_spc = self.dataOut.data_spc[channelIndexList,:]
145 145
146 146 self.dataOut.data_spc = data_spc
147 147 self.dataOut.channelList = [self.dataOut.channelList[i] for i in channelIndexList]
148 148
149 149 return 1
150 150
151 151
152 152 class IncohInt4SpectraHeis(Operation):
153 153
154 154 isConfig = False
155 155
156 156 __profIndex = 0
157 157 __withOverapping = False
158 158
159 159 __byTime = False
160 160 __initime = None
161 161 __lastdatatime = None
162 162 __integrationtime = None
163 163
164 164 __buffer = None
165 165
166 166 __dataReady = False
167 167
168 168 n = None
169 169
170 170 def __init__(self):#, **kwargs):
171 171
172 172 Operation.__init__(self)#, **kwargs)
173 173 # self.isConfig = False
174 174
175 175 def setup(self, n=None, timeInterval=None, overlapping=False):
176 176 """
177 177 Set the parameters of the integration class.
178 178
179 179 Inputs:
180 180
181 181 n : Number of coherent integrations
182 182 timeInterval : Time of integration. If the parameter "n" is selected this one does not work
183 183 overlapping :
184 184
185 185 """
186 186
187 187 self.__initime = None
188 188 self.__lastdatatime = 0
189 189 self.__buffer = None
190 190 self.__dataReady = False
191 191
192 192
193 193 if n == None and timeInterval == None:
194 194 raise ValueError("n or timeInterval should be specified ...")
195 195
196 196 if n != None:
197 197 self.n = n
198 198 self.__byTime = False
199 199 else:
200 200 self.__integrationtime = timeInterval #* 60. #if (type(timeInterval)!=integer) -> change this line
201 201 self.n = 9999
202 202 self.__byTime = True
203 203
204 204 if overlapping:
205 205 self.__withOverapping = True
206 206 self.__buffer = None
207 207 else:
208 208 self.__withOverapping = False
209 209 self.__buffer = 0
210 210
211 211 self.__profIndex = 0
212 212
213 213 def putData(self, data):
214 214
215 215 """
216 216 Add a profile to the __buffer and increase in one the __profileIndex
217 217
218 218 """
219 219
220 220 if not self.__withOverapping:
221 221 self.__buffer += data.copy()
222 222 self.__profIndex += 1
223 223 return
224 224
225 225 #Overlapping data
226 226 nChannels, nHeis = data.shape
227 227 data = numpy.reshape(data, (1, nChannels, nHeis))
228 228
229 229 #If the buffer is empty then it takes the data value
230 230 if self.__buffer is None:
231 231 self.__buffer = data
232 232 self.__profIndex += 1
233 233 return
234 234
235 235 #If the buffer length is lower than n then stakcing the data value
236 236 if self.__profIndex < self.n:
237 237 self.__buffer = numpy.vstack((self.__buffer, data))
238 238 self.__profIndex += 1
239 239 return
240 240
241 241 #If the buffer length is equal to n then replacing the last buffer value with the data value
242 242 self.__buffer = numpy.roll(self.__buffer, -1, axis=0)
243 243 self.__buffer[self.n-1] = data
244 244 self.__profIndex = self.n
245 245 return
246 246
247 247
248 248 def pushData(self):
249 249 """
250 250 Return the sum of the last profiles and the profiles used in the sum.
251 251
252 252 Affected:
253 253
254 254 self.__profileIndex
255 255
256 256 """
257 257
258 258 if not self.__withOverapping:
259 259 data = self.__buffer
260 260 n = self.__profIndex
261 261
262 262 self.__buffer = 0
263 263 self.__profIndex = 0
264 264
265 265 return data, n
266 266
267 267 #Integration with Overlapping
268 268 data = numpy.sum(self.__buffer, axis=0)
269 269 n = self.__profIndex
270 270
271 271 return data, n
272 272
273 273 def byProfiles(self, data):
274 274
275 275 self.__dataReady = False
276 276 avgdata = None
277 277 # n = None
278 278
279 279 self.putData(data)
280 280
281 281 if self.__profIndex == self.n:
282 282
283 283 avgdata, n = self.pushData()
284 284 self.__dataReady = True
285 285
286 286 return avgdata
287 287
288 288 def byTime(self, data, datatime):
289 289
290 290 self.__dataReady = False
291 291 avgdata = None
292 292 n = None
293 293
294 294 self.putData(data)
295 295
296 296 if (datatime - self.__initime) >= self.__integrationtime:
297 297 avgdata, n = self.pushData()
298 298 self.n = n
299 299 self.__dataReady = True
300 300
301 301 return avgdata
302 302
303 303 def integrate(self, data, datatime=None):
304 304
305 305 if self.__initime == None:
306 306 self.__initime = datatime
307 307
308 308 if self.__byTime:
309 309 avgdata = self.byTime(data, datatime)
310 310 else:
311 311 avgdata = self.byProfiles(data)
312 312
313 313
314 314 self.__lastdatatime = datatime
315 315
316 316 if avgdata is None:
317 317 return None, None
318 318
319 319 avgdatatime = self.__initime
320 320
321 321 deltatime = datatime -self.__lastdatatime
322 322
323 323 if not self.__withOverapping:
324 324 self.__initime = datatime
325 325 else:
326 326 self.__initime += deltatime
327 327
328 328 return avgdata, avgdatatime
329 329
330 330 def run(self, dataOut, n=None, timeInterval=None, overlapping=False, **kwargs):
331 331
332 332 if not self.isConfig:
333 333 self.setup(n=n, timeInterval=timeInterval, overlapping=overlapping)
334 334 self.isConfig = True
335 335
336 336 avgdata, avgdatatime = self.integrate(dataOut.data_spc, dataOut.utctime)
337 337
338 338 # dataOut.timeInterval *= n
339 339 dataOut.flagNoData = True
340 340
341 341 if self.__dataReady:
342 342 dataOut.data_spc = avgdata
343 343 dataOut.nIncohInt *= self.n
344 344 # dataOut.nCohInt *= self.n
345 345 dataOut.utctime = avgdatatime
346 346 # dataOut.timeInterval = dataOut.ippSeconds * dataOut.nIncohInt
347 347 # dataOut.timeInterval = self.__timeInterval*self.n
348 348 dataOut.flagNoData = False
349 349
350 350 return dataOut No newline at end of file
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
General Comments 0
You need to be logged in to leave comments. Login now