##// END OF EJS Templates
Rewrite controller, remove MPDecorator to units (keep for plots an writers) use of queues for interproc comm instead of zmq, self operations are no longer supported
Juan C. Espinoza -
r1287:af11e4aac00c
parent child
Show More

The requested changes are too big and content was truncated. Show full diff

@@ -1,241 +1,238
1 import click
1 import click
2 import schainpy
3 import subprocess
2 import subprocess
4 import os
3 import os
5 import sys
4 import sys
6 import glob
5 import glob
7 from multiprocessing import cpu_count
6 import schainpy
8 from schainpy.controller import Project
7 from schainpy.controller import Project
9 from schainpy.model import Operation, ProcessingUnit
8 from schainpy.model import Operation, ProcessingUnit
10 from schainpy.utils import log
9 from schainpy.utils import log
11 from importlib import import_module
10 from importlib import import_module
12 from pydoc import locate
11 from pydoc import locate
13 from fuzzywuzzy import process
12 from fuzzywuzzy import process
14 from schainpy.cli import templates
13 from schainpy.cli import templates
15 import inspect
14 import inspect
16 try:
15 try:
17 from queue import Queue
16 from queue import Queue
18 except:
17 except:
19 from Queue import Queue
18 from Queue import Queue
20
19
21
20
22 def getProcs():
21 def getProcs():
23 modules = dir(schainpy.model)
22 modules = dir(schainpy.model)
24 procs = check_module(modules, 'processing')
23 procs = check_module(modules, 'processing')
25 try:
24 try:
26 procs.remove('ProcessingUnit')
25 procs.remove('ProcessingUnit')
27 except Exception as e:
26 except Exception as e:
28 pass
27 pass
29 return procs
28 return procs
30
29
31 def getOperations():
30 def getOperations():
32 module = dir(schainpy.model)
31 module = dir(schainpy.model)
33 noProcs = [x for x in module if not x.endswith('Proc')]
32 noProcs = [x for x in module if not x.endswith('Proc')]
34 operations = check_module(noProcs, 'operation')
33 operations = check_module(noProcs, 'operation')
35 try:
34 try:
36 operations.remove('Operation')
35 operations.remove('Operation')
37 operations.remove('Figure')
36 operations.remove('Figure')
38 operations.remove('Plot')
37 operations.remove('Plot')
39 except Exception as e:
38 except Exception as e:
40 pass
39 pass
41 return operations
40 return operations
42
41
43 def getArgs(op):
42 def getArgs(op):
44 module = locate('schainpy.model.{}'.format(op))
43 module = locate('schainpy.model.{}'.format(op))
45 try:
44 try:
46 obj = module(1,2,3,Queue(),5,6)
45 obj = module(1, 2, 3, Queue())
47 except:
46 except:
48 obj = module()
47 obj = module()
49
48
50 if hasattr(obj, '__attrs__'):
49 if hasattr(obj, '__attrs__'):
51 args = obj.__attrs__
50 args = obj.__attrs__
52 else:
51 else:
53 if hasattr(obj, 'myrun'):
52 if hasattr(obj, 'myrun'):
54 args = inspect.getfullargspec(obj.myrun).args
53 args = inspect.getfullargspec(obj.myrun).args
55 else:
54 else:
56 args = inspect.getfullargspec(obj.run).args
55 args = inspect.getfullargspec(obj.run).args
57
56
58 try:
57 try:
59 args.remove('self')
58 args.remove('self')
60 except Exception as e:
59 except Exception as e:
61 pass
60 pass
62 try:
61 try:
63 args.remove('dataOut')
62 args.remove('dataOut')
64 except Exception as e:
63 except Exception as e:
65 pass
64 pass
66 return args
65 return args
67
66
68 def getDoc(obj):
67 def getDoc(obj):
69 module = locate('schainpy.model.{}'.format(obj))
68 module = locate('schainpy.model.{}'.format(obj))
70 try:
69 try:
71 obj = module(1,2,3,Queue(),5,6)
70 obj = module(1, 2, 3, Queue())
72 except:
71 except:
73 obj = module()
72 obj = module()
74 return obj.__doc__
73 return obj.__doc__
75
74
76 def getAll():
75 def getAll():
77 modules = getOperations()
76 modules = getOperations()
78 modules.extend(getProcs())
77 modules.extend(getProcs())
79 return modules
78 return modules
80
79
81
80
82 def print_version(ctx, param, value):
81 def print_version(ctx, param, value):
83 if not value or ctx.resilient_parsing:
82 if not value or ctx.resilient_parsing:
84 return
83 return
85 click.echo(schainpy.__version__)
84 click.echo(schainpy.__version__)
86 ctx.exit()
85 ctx.exit()
87
86
88
87
89 PREFIX = 'experiment'
88 PREFIX = 'experiment'
90
89
91 @click.command()
90 @click.command()
92 @click.option('--version', '-v', is_flag=True, callback=print_version, help='SChain version', type=str)
91 @click.option('--version', '-v', is_flag=True, callback=print_version, help='SChain version', type=str)
93 @click.argument('command', default='run', required=True)
92 @click.argument('command', default='run', required=True)
94 @click.argument('nextcommand', default=None, required=False, type=str)
93 @click.argument('nextcommand', default=None, required=False, type=str)
95 def main(command, nextcommand, version):
94 def main(command, nextcommand, version):
96 """COMMAND LINE INTERFACE FOR SIGNAL CHAIN - JICAMARCA RADIO OBSERVATORY V3.0\n
95 """COMMAND LINE INTERFACE FOR SIGNAL CHAIN - JICAMARCA RADIO OBSERVATORY V3.0\n
97 Available commands.\n
96 Available commands:\n
98 xml: runs a schain XML generated file\n
97 xml: runs a schain XML generated file\n
99 run: runs any python script starting 'experiment_'\n
98 run: runs any python script'\n
100 generate: generates a template schain script\n
99 generate: generates a template schain script\n
101 list: return a list of available procs and operations\n
100 list: return a list of available procs and operations\n
102 search: return avilable operations, procs or arguments of the given
101 search: return avilable operations, procs or arguments of the given
103 operation/proc\n"""
102 operation/proc\n"""
104 if command == 'xml':
103 if command == 'xml':
105 runFromXML(nextcommand)
104 runFromXML(nextcommand)
106 elif command == 'generate':
105 elif command == 'generate':
107 generate()
106 generate()
108 elif command == 'test':
107 elif command == 'test':
109 test()
108 test()
110 elif command == 'run':
109 elif command == 'run':
111 runschain(nextcommand)
110 runschain(nextcommand)
112 elif command == 'search':
111 elif command == 'search':
113 search(nextcommand)
112 search(nextcommand)
114 elif command == 'list':
113 elif command == 'list':
115 cmdlist(nextcommand)
114 cmdlist(nextcommand)
116 else:
115 else:
117 log.error('Command {} is not defined'.format(command))
116 log.error('Command {} is not defined'.format(command))
118
117
119
118
120 def check_module(possible, instance):
119 def check_module(possible, instance):
121 def check(x):
120 def check(x):
122 try:
121 try:
123 instancia = locate('schainpy.model.{}'.format(x))
122 instancia = locate('schainpy.model.{}'.format(x))
124 ret = instancia.proc_type == instance
123 ret = instancia.proc_type == instance
125 return ret
124 return ret
126 except Exception as e:
125 except Exception as e:
127 return False
126 return False
128 clean = clean_modules(possible)
127 clean = clean_modules(possible)
129 return [x for x in clean if check(x)]
128 return [x for x in clean if check(x)]
130
129
131
130
132 def clean_modules(module):
131 def clean_modules(module):
133 noEndsUnder = [x for x in module if not x.endswith('__')]
132 noEndsUnder = [x for x in module if not x.endswith('__')]
134 noStartUnder = [x for x in noEndsUnder if not x.startswith('__')]
133 noStartUnder = [x for x in noEndsUnder if not x.startswith('__')]
135 noFullUpper = [x for x in noStartUnder if not x.isupper()]
134 noFullUpper = [x for x in noStartUnder if not x.isupper()]
136 return noFullUpper
135 return noFullUpper
137
136
138 def cmdlist(nextcommand):
137 def cmdlist(nextcommand):
139 if nextcommand is None:
138 if nextcommand is None:
140 log.error('Missing argument, available arguments: procs, operations', '')
139 log.error('Missing argument, available arguments: procs, operations', '')
141 elif nextcommand == 'procs':
140 elif nextcommand == 'procs':
142 procs = getProcs()
141 procs = getProcs()
143 log.success(
142 log.success(
144 'Current ProcessingUnits are:\n {}'.format('\n '.join(procs)), '')
143 'Current ProcessingUnits are:\n {}'.format('\n '.join(procs)), '')
145 elif nextcommand == 'operations':
144 elif nextcommand == 'operations':
146 operations = getOperations()
145 operations = getOperations()
147 log.success('Current Operations are:\n {}'.format(
146 log.success('Current Operations are:\n {}'.format(
148 '\n '.join(operations)), '')
147 '\n '.join(operations)), '')
149 else:
148 else:
150 log.error('Wrong argument', '')
149 log.error('Wrong argument', '')
151
150
152 def search(nextcommand):
151 def search(nextcommand):
153 if nextcommand is None:
152 if nextcommand is None:
154 log.error('There is no Operation/ProcessingUnit to search', '')
153 log.error('There is no Operation/ProcessingUnit to search', '')
155 else:
154 else:
156 try:
155 try:
157 args = getArgs(nextcommand)
156 args = getArgs(nextcommand)
158 doc = getDoc(nextcommand)
157 doc = getDoc(nextcommand)
159 if len(args) == 0:
160 log.success('\n{} has no arguments'.format(nextcommand), '')
161 else:
162 log.success('{}\n{}\n\narguments:\n {}'.format(
158 log.success('{}\n{}\n\narguments:\n {}'.format(
163 nextcommand, doc, ', '.join(args)), '')
159 nextcommand, doc, ', '.join(args)), ''
160 )
164 except Exception as e:
161 except Exception as e:
165 log.error('Module `{}` does not exists'.format(nextcommand), '')
162 log.error('Module `{}` does not exists'.format(nextcommand), '')
166 allModules = getAll()
163 allModules = getAll()
167 similar = [t[0] for t in process.extract(nextcommand, allModules, limit=12) if t[1]>80]
164 similar = [t[0] for t in process.extract(nextcommand, allModules, limit=12) if t[1]>80]
168 log.success('Possible modules are: {}'.format(', '.join(similar)), '')
165 log.success('Possible modules are: {}'.format(', '.join(similar)), '')
169
166
170 def runschain(nextcommand):
167 def runschain(nextcommand):
171 if nextcommand is None:
168 if nextcommand is None:
172 currentfiles = glob.glob('./{}_*.py'.format(PREFIX))
169 currentfiles = glob.glob('./{}_*.py'.format(PREFIX))
173 numberfiles = len(currentfiles)
170 numberfiles = len(currentfiles)
174 if numberfiles > 1:
171 if numberfiles > 1:
175 log.error('There is more than one file to run')
172 log.error('There is more than one file to run')
176 elif numberfiles == 1:
173 elif numberfiles == 1:
177 subprocess.call(['python ' + currentfiles[0]], shell=True)
174 subprocess.call(['python ' + currentfiles[0]], shell=True)
178 else:
175 else:
179 log.error('There is no file to run')
176 log.error('There is no file to run')
180 else:
177 else:
181 try:
178 try:
182 subprocess.call(['python ' + nextcommand], shell=True)
179 subprocess.call(['python ' + nextcommand], shell=True)
183 except Exception as e:
180 except Exception as e:
184 log.error("I cannot run the file. Does it exists?")
181 log.error("I cannot run the file. Does it exists?")
185
182
186
183
187 def basicInputs():
184 def basicInputs():
188 inputs = {}
185 inputs = {}
189 inputs['name'] = click.prompt(
186 inputs['name'] = click.prompt(
190 'Name of the project', default="project", type=str)
187 'Name of the project', default="project", type=str)
191 inputs['desc'] = click.prompt(
188 inputs['desc'] = click.prompt(
192 'Enter a description', default="A schain project", type=str)
189 'Enter a description', default="A schain project", type=str)
193 inputs['multiprocess'] = click.prompt(
190 inputs['multiprocess'] = click.prompt(
194 '''Select data type:
191 '''Select data type:
195
192
196 - Voltage (*.r): [1]
193 - Voltage (*.r): [1]
197 - Spectra (*.pdata): [2]
194 - Spectra (*.pdata): [2]
198 - Voltage and Spectra (*.r): [3]
195 - Voltage and Spectra (*.r): [3]
199
196
200 -->''', type=int)
197 -->''', type=int)
201 inputs['path'] = click.prompt('Data path', default=os.getcwd(
198 inputs['path'] = click.prompt('Data path', default=os.getcwd(
202 ), type=click.Path(exists=True, resolve_path=True))
199 ), type=click.Path(exists=True, resolve_path=True))
203 inputs['startDate'] = click.prompt(
200 inputs['startDate'] = click.prompt(
204 'Start date', default='1970/01/01', type=str)
201 'Start date', default='1970/01/01', type=str)
205 inputs['endDate'] = click.prompt(
202 inputs['endDate'] = click.prompt(
206 'End date', default='2018/12/31', type=str)
203 'End date', default='2018/12/31', type=str)
207 inputs['startHour'] = click.prompt(
204 inputs['startHour'] = click.prompt(
208 'Start hour', default='00:00:00', type=str)
205 'Start hour', default='00:00:00', type=str)
209 inputs['endHour'] = click.prompt('End hour', default='23:59:59', type=str)
206 inputs['endHour'] = click.prompt('End hour', default='23:59:59', type=str)
210 inputs['figpath'] = inputs['path'] + '/figs'
207 inputs['figpath'] = inputs['path'] + '/figs'
211 return inputs
208 return inputs
212
209
213
210
214 def generate():
211 def generate():
215 inputs = basicInputs()
212 inputs = basicInputs()
216
213
217 if inputs['multiprocess'] == 1:
214 if inputs['multiprocess'] == 1:
218 current = templates.voltage.format(**inputs)
215 current = templates.voltage.format(**inputs)
219 elif inputs['multiprocess'] == 2:
216 elif inputs['multiprocess'] == 2:
220 current = templates.spectra.format(**inputs)
217 current = templates.spectra.format(**inputs)
221 elif inputs['multiprocess'] == 3:
218 elif inputs['multiprocess'] == 3:
222 current = templates.voltagespectra.format(**inputs)
219 current = templates.voltagespectra.format(**inputs)
223 scriptname = '{}_{}.py'.format(PREFIX, inputs['name'])
220 scriptname = '{}_{}.py'.format(PREFIX, inputs['name'])
224 script = open(scriptname, 'w')
221 script = open(scriptname, 'w')
225 try:
222 try:
226 script.write(current)
223 script.write(current)
227 log.success('Script {} generated'.format(scriptname))
224 log.success('Script {} generated'.format(scriptname))
228 except Exception as e:
225 except Exception as e:
229 log.error('I cannot create the file. Do you have writing permissions?')
226 log.error('I cannot create the file. Do you have writing permissions?')
230
227
231
228
232 def test():
229 def test():
233 log.warning('testing')
230 log.warning('testing')
234
231
235
232
236 def runFromXML(filename):
233 def runFromXML(filename):
237 controller = Project()
234 controller = Project()
238 if not controller.readXml(filename):
235 if not controller.readXml(filename):
239 return
236 return
240 controller.start()
237 controller.start()
241 return
238 return
This diff has been collapsed as it changes many lines, (1158 lines changed) Show them Hide them
@@ -1,1290 +1,648
1 '''
1 '''
2 Updated on January , 2018, for multiprocessing purposes
2 Main routines to create a Signal Chain project
3 Author: Sergio Cortez
4 Created on September , 2012
5 '''
3 '''
6 from platform import python_version
4
5 import re
7 import sys
6 import sys
8 import ast
7 import ast
9 import datetime
8 import datetime
10 import traceback
9 import traceback
11 import math
12 import time
10 import time
13 import zmq
11 from multiprocessing import Process, Queue
14 from multiprocessing import Process, Queue, Event, Value, cpu_count
15 from threading import Thread
12 from threading import Thread
16 from xml.etree.ElementTree import ElementTree, Element, SubElement, tostring
13 from xml.etree.ElementTree import ElementTree, Element, SubElement
17 from xml.dom import minidom
18
19
14
20 from schainpy.admin import Alarm, SchainWarning
15 from schainpy.admin import Alarm, SchainWarning
21 from schainpy.model import *
16 from schainpy.model import *
22 from schainpy.utils import log
17 from schainpy.utils import log
23
18
24
19
25 DTYPES = {
20 class ConfBase():
26 'Voltage': '.r',
27 'Spectra': '.pdata'
28 }
29
30
31 def MPProject(project, n=cpu_count()):
32 '''
33 Project wrapper to run schain in n processes
34 '''
35
36 rconf = project.getReadUnitObj()
37 op = rconf.getOperationObj('run')
38 dt1 = op.getParameterValue('startDate')
39 dt2 = op.getParameterValue('endDate')
40 tm1 = op.getParameterValue('startTime')
41 tm2 = op.getParameterValue('endTime')
42 days = (dt2 - dt1).days
43
44 for day in range(days + 1):
45 skip = 0
46 cursor = 0
47 processes = []
48 dt = dt1 + datetime.timedelta(day)
49 dt_str = dt.strftime('%Y/%m/%d')
50 reader = JRODataReader()
51 paths, files = reader.searchFilesOffLine(path=rconf.path,
52 startDate=dt,
53 endDate=dt,
54 startTime=tm1,
55 endTime=tm2,
56 ext=DTYPES[rconf.datatype])
57 nFiles = len(files)
58 if nFiles == 0:
59 continue
60 skip = int(math.ceil(nFiles / n))
61 while nFiles > cursor * skip:
62 rconf.update(startDate=dt_str, endDate=dt_str, cursor=cursor,
63 skip=skip)
64 p = project.clone()
65 p.start()
66 processes.append(p)
67 cursor += 1
68
69 def beforeExit(exctype, value, trace):
70 for process in processes:
71 process.terminate()
72 process.join()
73 print(traceback.print_tb(trace))
74
75 sys.excepthook = beforeExit
76
77 for process in processes:
78 process.join()
79 process.terminate()
80
81 time.sleep(3)
82
83 def wait(context):
84
85 time.sleep(1)
86 c = zmq.Context()
87 receiver = c.socket(zmq.SUB)
88 receiver.connect('ipc:///tmp/schain_{}_pub'.format(self.id))
89 receiver.setsockopt(zmq.SUBSCRIBE, self.id.encode())
90 msg = receiver.recv_multipart()[1]
91 context.terminate()
92
93 class ParameterConf():
94
95 id = None
96 name = None
97 value = None
98 format = None
99
100 __formated_value = None
101
102 ELEMENTNAME = 'Parameter'
103
104 def __init__(self):
105
106 self.format = 'str'
107
108 def getElementName(self):
109
110 return self.ELEMENTNAME
111
112 def getValue(self):
113
114 value = self.value
115 format = self.format
116
117 if self.__formated_value != None:
118
119 return self.__formated_value
120
121 if format == 'obj':
122 return value
123
124 if format == 'str':
125 self.__formated_value = str(value)
126 return self.__formated_value
127
128 if value == '':
129 raise ValueError('%s: This parameter value is empty' % self.name)
130
131 if format == 'list':
132 strList = [s.strip() for s in value.split(',')]
133 self.__formated_value = strList
134
135 return self.__formated_value
136
137 if format == 'intlist':
138 '''
139 Example:
140 value = (0,1,2)
141 '''
142
143 new_value = ast.literal_eval(value)
144
145 if type(new_value) not in (tuple, list):
146 new_value = [int(new_value)]
147
148 self.__formated_value = new_value
149
150 return self.__formated_value
151
152 if format == 'floatlist':
153 '''
154 Example:
155 value = (0.5, 1.4, 2.7)
156 '''
157
158 new_value = ast.literal_eval(value)
159
160 if type(new_value) not in (tuple, list):
161 new_value = [float(new_value)]
162
163 self.__formated_value = new_value
164
165 return self.__formated_value
166
167 if format == 'date':
168 strList = value.split('/')
169 intList = [int(x) for x in strList]
170 date = datetime.date(intList[0], intList[1], intList[2])
171
172 self.__formated_value = date
173
174 return self.__formated_value
175
176 if format == 'time':
177 strList = value.split(':')
178 intList = [int(x) for x in strList]
179 time = datetime.time(intList[0], intList[1], intList[2])
180
181 self.__formated_value = time
182
183 return self.__formated_value
184
185 if format == 'pairslist':
186 '''
187 Example:
188 value = (0,1),(1,2)
189 '''
190
191 new_value = ast.literal_eval(value)
192
193 if type(new_value) not in (tuple, list):
194 raise ValueError('%s has to be a tuple or list of pairs' % value)
195
196 if type(new_value[0]) not in (tuple, list):
197 if len(new_value) != 2:
198 raise ValueError('%s has to be a tuple or list of pairs' % value)
199 new_value = [new_value]
200
201 for thisPair in new_value:
202 if len(thisPair) != 2:
203 raise ValueError('%s has to be a tuple or list of pairs' % value)
204
205 self.__formated_value = new_value
206
207 return self.__formated_value
208
209 if format == 'multilist':
210 '''
211 Example:
212 value = (0,1,2),(3,4,5)
213 '''
214 multiList = ast.literal_eval(value)
215
216 if type(multiList[0]) == int:
217 multiList = ast.literal_eval('(' + value + ')')
218
219 self.__formated_value = multiList
220
221 return self.__formated_value
222
223 if format == 'bool':
224 value = int(value)
225
226 if format == 'int':
227 value = float(value)
228
229 format_func = eval(format)
230
231 self.__formated_value = format_func(value)
232
233 return self.__formated_value
234
235 def updateId(self, new_id):
236
237 self.id = str(new_id)
238
239 def setup(self, id, name, value, format='str'):
240 self.id = str(id)
241 self.name = name
242 if format == 'obj':
243 self.value = value
244 else:
245 self.value = str(value)
246 self.format = str.lower(format)
247
248 self.getValue()
249
250 return 1
251
252 def update(self, name, value, format='str'):
253
254 self.name = name
255 self.value = str(value)
256 self.format = format
257
258 def makeXml(self, opElement):
259 if self.name not in ('queue',):
260 parmElement = SubElement(opElement, self.ELEMENTNAME)
261 parmElement.set('id', str(self.id))
262 parmElement.set('name', self.name)
263 parmElement.set('value', self.value)
264 parmElement.set('format', self.format)
265
266 def readXml(self, parmElement):
267
268 self.id = parmElement.get('id')
269 self.name = parmElement.get('name')
270 self.value = parmElement.get('value')
271 self.format = str.lower(parmElement.get('format'))
272
273 # Compatible with old signal chain version
274 if self.format == 'int' and self.name == 'idfigure':
275 self.name = 'id'
276
277 def printattr(self):
278
279 print('Parameter[%s]: name = %s, value = %s, format = %s, project_id = %s' % (self.id, self.name, self.value, self.format, self.project_id))
280
281 class OperationConf():
282
283 ELEMENTNAME = 'Operation'
284
21
285 def __init__(self):
22 def __init__(self):
286
23
287 self.id = '0'
24 self.id = '0'
288 self.name = None
25 self.name = None
289 self.priority = None
26 self.priority = None
290 self.topic = None
27 self.parameters = {}
291
28 self.object = None
292 def __getNewId(self):
29 self.operations = []
293
294 return int(self.id) * 10 + len(self.parmConfObjList) + 1
295
30
296 def getId(self):
31 def getId(self):
32
297 return self.id
33 return self.id
298
34
35 def getNewId(self):
36
37 return int(self.id) * 10 + len(self.operations) + 1
38
299 def updateId(self, new_id):
39 def updateId(self, new_id):
300
40
301 self.id = str(new_id)
41 self.id = str(new_id)
302
42
303 n = 1
43 n = 1
304 for parmObj in self.parmConfObjList:
44 for conf in self.operations:
305
45 conf_id = str(int(new_id) * 10 + n)
306 idParm = str(int(new_id) * 10 + n)
46 conf.updateId(conf_id)
307 parmObj.updateId(idParm)
308
309 n += 1
47 n += 1
310
48
311 def getElementName(self):
49 def getKwargs(self):
312
313 return self.ELEMENTNAME
314
315 def getParameterObjList(self):
316
50
317 return self.parmConfObjList
51 params = {}
318
52
319 def getParameterObj(self, parameterName):
53 for key, value in self.parameters.items():
54 if value not in (None, '', ' '):
55 params[key] = value
320
56
321 for parmConfObj in self.parmConfObjList:
57 return params
322
58
323 if parmConfObj.name != parameterName:
59 def update(self, **kwargs):
324 continue
325
60
326 return parmConfObj
61 for key, value in kwargs.items():
62 self.addParameter(name=key, value=value)
327
63
328 return None
64 def addParameter(self, name, value, format=None):
65 '''
66 '''
329
67
330 def getParameterObjfromValue(self, parameterValue):
68 if isinstance(value, str) and re.search(r'(\d+/\d+/\d+)', value):
69 self.parameters[name] = datetime.date(*[int(x) for x in value.split('/')])
70 elif isinstance(value, str) and re.search(r'(\d+:\d+:\d+)', value):
71 self.parameters[name] = datetime.time(*[int(x) for x in value.split(':')])
72 else:
73 try:
74 self.parameters[name] = ast.literal_eval(value)
75 except:
76 if isinstance(value, str) and ',' in value:
77 self.parameters[name] = value.split(',')
78 else:
79 self.parameters[name] = value
331
80
332 for parmConfObj in self.parmConfObjList:
81 def getParameters(self):
333
82
334 if parmConfObj.getValue() != parameterValue:
83 params = {}
335 continue
84 for key, value in self.parameters.items():
85 s = type(value).__name__
86 if s == 'date':
87 params[key] = value.strftime('%Y/%m/%d')
88 elif s == 'time':
89 params[key] = value.strftime('%H:%M:%S')
90 else:
91 params[key] = str(value)
336
92
337 return parmConfObj.getValue()
93 return params
338
94
339 return None
95 def makeXml(self, element):
340
96
341 def getParameterValue(self, parameterName):
97 xml = SubElement(element, self.ELEMENTNAME)
98 for label in self.xml_labels:
99 xml.set(label, str(getattr(self, label)))
342
100
343 parameterObj = self.getParameterObj(parameterName)
101 for key, value in self.getParameters().items():
102 xml_param = SubElement(xml, 'Parameter')
103 xml_param.set('name', key)
104 xml_param.set('value', value)
344
105
345 # if not parameterObj:
106 for conf in self.operations:
346 # return None
107 conf.makeXml(xml)
347
108
348 value = parameterObj.getValue()
109 def __str__(self):
349
110
350 return value
111 if self.ELEMENTNAME == 'Operation':
112 s = ' {}[id={}]\n'.format(self.name, self.id)
113 else:
114 s = '{}[id={}, inputId={}]\n'.format(self.name, self.id, self.inputId)
351
115
352 def getKwargs(self):
116 for key, value in self.parameters.items():
117 if self.ELEMENTNAME == 'Operation':
118 s += ' {}: {}\n'.format(key, value)
119 else:
120 s += ' {}: {}\n'.format(key, value)
353
121
354 kwargs = {}
122 for conf in self.operations:
123 s += str(conf)
355
124
356 for parmConfObj in self.parmConfObjList:
125 return s
357 if self.name == 'run' and parmConfObj.name == 'datatype':
358 continue
359
126
360 kwargs[parmConfObj.name] = parmConfObj.getValue()
127 class OperationConf(ConfBase):
361
128
362 return kwargs
129 ELEMENTNAME = 'Operation'
130 xml_labels = ['id', 'name']
363
131
364 def setup(self, id, name, priority, type, project_id, err_queue, lock):
132 def setup(self, id, name, priority, project_id, err_queue):
365
133
366 self.id = str(id)
134 self.id = str(id)
367 self.project_id = project_id
135 self.project_id = project_id
368 self.name = name
136 self.name = name
369 self.type = type
137 self.type = 'other'
370 self.priority = priority
371 self.err_queue = err_queue
138 self.err_queue = err_queue
372 self.lock = lock
373 self.parmConfObjList = []
374
375 def removeParameters(self):
376
377 for obj in self.parmConfObjList:
378 del obj
379
380 self.parmConfObjList = []
381
139
382 def addParameter(self, name, value, format='str'):
140 def readXml(self, element, project_id, err_queue):
383
141
384 if value is None:
142 self.id = element.get('id')
385 return None
143 self.name = element.get('name')
386 id = self.__getNewId()
144 self.type = 'other'
387
388 parmConfObj = ParameterConf()
389 if not parmConfObj.setup(id, name, value, format):
390 return None
391
392 self.parmConfObjList.append(parmConfObj)
393
394 return parmConfObj
395
396 def changeParameter(self, name, value, format='str'):
397
398 parmConfObj = self.getParameterObj(name)
399 parmConfObj.update(name, value, format)
400
401 return parmConfObj
402
403 def makeXml(self, procUnitElement):
404
405 opElement = SubElement(procUnitElement, self.ELEMENTNAME)
406 opElement.set('id', str(self.id))
407 opElement.set('name', self.name)
408 opElement.set('type', self.type)
409 opElement.set('priority', str(self.priority))
410
411 for parmConfObj in self.parmConfObjList:
412 parmConfObj.makeXml(opElement)
413
414 def readXml(self, opElement, project_id):
415
416 self.id = opElement.get('id')
417 self.name = opElement.get('name')
418 self.type = opElement.get('type')
419 self.priority = opElement.get('priority')
420 self.project_id = str(project_id)
145 self.project_id = str(project_id)
146 self.err_queue = err_queue
421
147
422 # Compatible with old signal chain version
148 for elm in element.iter('Parameter'):
423 # Use of 'run' method instead 'init'
149 self.addParameter(elm.get('name'), elm.get('value'))
424 if self.type == 'self' and self.name == 'init':
425 self.name = 'run'
426
427 self.parmConfObjList = []
428
429 parmElementList = opElement.iter(ParameterConf().getElementName())
430
431 for parmElement in parmElementList:
432 parmConfObj = ParameterConf()
433 parmConfObj.readXml(parmElement)
434
435 # Compatible with old signal chain version
436 # If an 'plot' OPERATION is found, changes name operation by the value of its type PARAMETER
437 if self.type != 'self' and self.name == 'Plot':
438 if parmConfObj.format == 'str' and parmConfObj.name == 'type':
439 self.name = parmConfObj.value
440 continue
441
442 self.parmConfObjList.append(parmConfObj)
443
444 def printattr(self):
445
446 print('%s[%s]: name = %s, type = %s, priority = %s, project_id = %s' % (self.ELEMENTNAME,
447 self.id,
448 self.name,
449 self.type,
450 self.priority,
451 self.project_id))
452
453 for parmConfObj in self.parmConfObjList:
454 parmConfObj.printattr()
455
150
456 def createObject(self):
151 def createObject(self):
457
152
458 className = eval(self.name)
153 className = eval(self.name)
459
154
460 if self.type == 'other':
155 if 'Plot' in self.name or 'Writer' in self.name:
461 opObj = className()
462 elif self.type == 'external':
463 kwargs = self.getKwargs()
156 kwargs = self.getKwargs()
464 opObj = className(self.id, self.id, self.project_id, self.err_queue, self.lock, 'Operation', **kwargs)
157 opObj = className(self.id, self.id, self.project_id, self.err_queue, **kwargs)
465 opObj.start()
158 opObj.start()
466 self.opObj = opObj
159 self.type = 'external'
160 else:
161 opObj = className()
467
162
163 self.object = opObj
468 return opObj
164 return opObj
469
165
470 class ProcUnitConf():
166 class ProcUnitConf(ConfBase):
471
167
472 ELEMENTNAME = 'ProcUnit'
168 ELEMENTNAME = 'ProcUnit'
169 xml_labels = ['id', 'inputId', 'name']
473
170
474 def __init__(self):
171 def setup(self, project_id, id, name, datatype, inputId, err_queue):
475
476 self.id = None
477 self.datatype = None
478 self.name = None
479 self.inputId = None
480 self.opConfObjList = []
481 self.procUnitObj = None
482 self.opObjDict = {}
483
484 def __getPriority(self):
485
486 return len(self.opConfObjList) + 1
487
488 def __getNewId(self):
489
490 return int(self.id) * 10 + len(self.opConfObjList) + 1
491
492 def getElementName(self):
493
494 return self.ELEMENTNAME
495
496 def getId(self):
497
498 return self.id
499
500 def updateId(self, new_id):
501 '''
502 new_id = int(parentId) * 10 + (int(self.id) % 10)
503 new_inputId = int(parentId) * 10 + (int(self.inputId) % 10)
504
505 # If this proc unit has not inputs
506 #if self.inputId == '0':
507 #new_inputId = 0
508
509 n = 1
510 for opConfObj in self.opConfObjList:
511
512 idOp = str(int(new_id) * 10 + n)
513 opConfObj.updateId(idOp)
514
515 n += 1
516
517 self.parentId = str(parentId)
518 self.id = str(new_id)
519 #self.inputId = str(new_inputId)
520 '''
521 n = 1
522
523 def getInputId(self):
524
525 return self.inputId
526
527 def getOperationObjList(self):
528
529 return self.opConfObjList
530
531 def getOperationObj(self, name=None):
532
533 for opConfObj in self.opConfObjList:
534
535 if opConfObj.name != name:
536 continue
537
538 return opConfObj
539
540 return None
541
542 def getOpObjfromParamValue(self, value=None):
543
544 for opConfObj in self.opConfObjList:
545 if opConfObj.getParameterObjfromValue(parameterValue=value) != value:
546 continue
547 return opConfObj
548 return None
549
550 def getProcUnitObj(self):
551
552 return self.procUnitObj
553
554 def setup(self, project_id, id, name, datatype, inputId, err_queue, lock):
555 '''
172 '''
556 id sera el topico a publicar
557 inputId sera el topico a subscribirse
558 '''
173 '''
559
174
560 # Compatible with old signal chain version
561 if datatype == None and name == None:
175 if datatype == None and name == None:
562 raise ValueError('datatype or name should be defined')
176 raise ValueError('datatype or name should be defined')
563
177
564 #Definir una condicion para inputId cuando sea 0
565
566 if name == None:
178 if name == None:
567 if 'Proc' in datatype:
179 if 'Proc' in datatype:
568 name = datatype
180 name = datatype
569 else:
181 else:
570 name = '%sProc' % (datatype)
182 name = '%sProc' % (datatype)
571
183
572 if datatype == None:
184 if datatype == None:
573 datatype = name.replace('Proc', '')
185 datatype = name.replace('Proc', '')
574
186
575 self.id = str(id)
187 self.id = str(id)
576 self.project_id = project_id
188 self.project_id = project_id
577 self.name = name
189 self.name = name
578 self.datatype = datatype
190 self.datatype = datatype
579 self.inputId = inputId
191 self.inputId = inputId
580 self.err_queue = err_queue
192 self.err_queue = err_queue
581 self.lock = lock
193 self.operations = []
582 self.opConfObjList = []
194 self.parameters = {}
583
584 self.addOperation(name='run', optype='self')
585
586 def removeOperations(self):
587
588 for obj in self.opConfObjList:
589 del obj
590
195
591 self.opConfObjList = []
196 def removeOperation(self, id):
592 self.addOperation(name='run')
593
197
594 def addParameter(self, **kwargs):
198 i = [1 if x.id==id else 0 for x in self.operations]
595 '''
199 self.operations.pop(i.index(1))
596 Add parameters to 'run' operation
597 '''
598 opObj = self.opConfObjList[0]
599
200
600 opObj.addParameter(**kwargs)
201 def getOperation(self, id):
601
202
602 return opObj
203 for conf in self.operations:
204 if conf.id == id:
205 return conf
603
206
604 def addOperation(self, name, optype='self'):
207 def addOperation(self, name, optype='self'):
605 '''
208 '''
606 Actualizacion - > proceso comunicacion
607 En el caso de optype='self', elminar. DEfinir comuncacion IPC -> Topic
608 definir el tipoc de socket o comunicacion ipc++
609
610 '''
209 '''
611
210
612 id = self.__getNewId()
211 id = self.getNewId()
613 priority = self.__getPriority() # Sin mucho sentido, pero puede usarse
212 conf = OperationConf()
614 opConfObj = OperationConf()
213 conf.setup(id, name=name, priority='0', project_id=self.project_id, err_queue=self.err_queue)
615 opConfObj.setup(id, name=name, priority=priority, type=optype, project_id=self.project_id, err_queue=self.err_queue, lock=self.lock)
214 self.operations.append(conf)
616 self.opConfObjList.append(opConfObj)
617
618 return opConfObj
619
215
620 def makeXml(self, projectElement):
216 return conf
621
217
622 procUnitElement = SubElement(projectElement, self.ELEMENTNAME)
218 def readXml(self, element, project_id, err_queue):
623 procUnitElement.set('id', str(self.id))
624 procUnitElement.set('name', self.name)
625 procUnitElement.set('datatype', self.datatype)
626 procUnitElement.set('inputId', str(self.inputId))
627
219
628 for opConfObj in self.opConfObjList:
220 self.id = element.get('id')
629 opConfObj.makeXml(procUnitElement)
221 self.name = element.get('name')
630
222 self.inputId = None if element.get('inputId') == 'None' else element.get('inputId')
631 def readXml(self, upElement, project_id):
223 self.datatype = element.get('datatype', self.name.replace(self.ELEMENTNAME.replace('Unit', ''), ''))
632
633 self.id = upElement.get('id')
634 self.name = upElement.get('name')
635 self.datatype = upElement.get('datatype')
636 self.inputId = upElement.get('inputId')
637 self.project_id = str(project_id)
224 self.project_id = str(project_id)
225 self.err_queue = err_queue
226 self.operations = []
227 self.parameters = {}
638
228
639 if self.ELEMENTNAME == 'ReadUnit':
229 for elm in element:
640 self.datatype = self.datatype.replace('Reader', '')
230 if elm.tag == 'Parameter':
641
231 self.addParameter(elm.get('name'), elm.get('value'))
642 if self.ELEMENTNAME == 'ProcUnit':
232 elif elm.tag == 'Operation':
643 self.datatype = self.datatype.replace('Proc', '')
233 conf = OperationConf()
644
234 conf.readXml(elm, project_id, err_queue)
645 if self.inputId == 'None':
235 self.operations.append(conf)
646 self.inputId = '0'
647
648 self.opConfObjList = []
649
650 opElementList = upElement.iter(OperationConf().getElementName())
651
652 for opElement in opElementList:
653 opConfObj = OperationConf()
654 opConfObj.readXml(opElement, project_id)
655 self.opConfObjList.append(opConfObj)
656
657 def printattr(self):
658
659 print('%s[%s]: name = %s, datatype = %s, inputId = %s, project_id = %s' % (self.ELEMENTNAME,
660 self.id,
661 self.name,
662 self.datatype,
663 self.inputId,
664 self.project_id))
665
666 for opConfObj in self.opConfObjList:
667 opConfObj.printattr()
668
669 def getKwargs(self):
670
671 opObj = self.opConfObjList[0]
672 kwargs = opObj.getKwargs()
673
674 return kwargs
675
236
676 def createObjects(self):
237 def createObjects(self):
677 '''
238 '''
678 Instancia de unidades de procesamiento.
239 Instancia de unidades de procesamiento.
679 '''
240 '''
680
241
681 className = eval(self.name)
242 className = eval(self.name)
682 kwargs = self.getKwargs()
243 kwargs = self.getKwargs()
683 procUnitObj = className(self.id, self.inputId, self.project_id, self.err_queue, self.lock, 'ProcUnit', **kwargs)
244 procUnitObj = className()
245 procUnitObj.name = self.name
684 log.success('creating process...', self.name)
246 log.success('creating process...', self.name)
685
247
686 for opConfObj in self.opConfObjList:
248 for conf in self.operations:
687
249
688 if opConfObj.type == 'self' and opConfObj.name == 'run':
250 opObj = conf.createObject()
689 continue
690 elif opConfObj.type == 'self':
691 opObj = getattr(procUnitObj, opConfObj.name)
692 else:
693 opObj = opConfObj.createObject()
694
251
695 log.success('adding operation: {}, type:{}'.format(
252 log.success('adding operation: {}, type:{}'.format(
696 opConfObj.name,
253 conf.name,
697 opConfObj.type), self.name)
254 conf.type), self.name)
698
699 procUnitObj.addOperation(opConfObj, opObj)
700
701 procUnitObj.start()
702 self.procUnitObj = procUnitObj
703
255
704 def close(self):
256 procUnitObj.addOperation(conf, opObj)
705
257
706 for opConfObj in self.opConfObjList:
258 self.object = procUnitObj
707 if opConfObj.type == 'self':
708 continue
709
710 opObj = self.procUnitObj.getOperationObj(opConfObj.id)
711 opObj.close()
712
259
713 self.procUnitObj.close()
260 def run(self):
261 '''
262 '''
714
263
715 return
264 return self.object.call(**self.getKwargs())
716
265
717
266
718 class ReadUnitConf(ProcUnitConf):
267 class ReadUnitConf(ProcUnitConf):
719
268
720 ELEMENTNAME = 'ReadUnit'
269 ELEMENTNAME = 'ReadUnit'
721
270
722 def __init__(self):
271 def __init__(self):
723
272
724 self.id = None
273 self.id = None
725 self.datatype = None
274 self.datatype = None
726 self.name = None
275 self.name = None
727 self.inputId = None
276 self.inputId = None
728 self.opConfObjList = []
277 self.operations = []
729 self.lock = Event()
278 self.parameters = {}
730 self.lock.set()
731 self.lock.n = Value('d', 0)
732
733 def getElementName(self):
734
735 return self.ELEMENTNAME
736
279
737 def setup(self, project_id, id, name, datatype, err_queue, path='', startDate='', endDate='',
280 def setup(self, project_id, id, name, datatype, err_queue, path='', startDate='', endDate='',
738 startTime='', endTime='', server=None, **kwargs):
281 startTime='', endTime='', server=None, **kwargs):
739
282
740
741 '''
742 *****el id del proceso sera el Topico
743
744 Adicion de {topic}, si no esta presente -> error
745 kwargs deben ser trasmitidos en la instanciacion
746
747 '''
748
749 # Compatible with old signal chain version
750 if datatype == None and name == None:
283 if datatype == None and name == None:
751 raise ValueError('datatype or name should be defined')
284 raise ValueError('datatype or name should be defined')
752 if name == None:
285 if name == None:
753 if 'Reader' in datatype:
286 if 'Reader' in datatype:
754 name = datatype
287 name = datatype
755 datatype = name.replace('Reader','')
288 datatype = name.replace('Reader','')
756 else:
289 else:
757 name = '{}Reader'.format(datatype)
290 name = '{}Reader'.format(datatype)
758 if datatype == None:
291 if datatype == None:
759 if 'Reader' in name:
292 if 'Reader' in name:
760 datatype = name.replace('Reader','')
293 datatype = name.replace('Reader','')
761 else:
294 else:
762 datatype = name
295 datatype = name
763 name = '{}Reader'.format(name)
296 name = '{}Reader'.format(name)
764
297
765 self.id = id
298 self.id = id
766 self.project_id = project_id
299 self.project_id = project_id
767 self.name = name
300 self.name = name
768 self.datatype = datatype
301 self.datatype = datatype
769 if path != '':
770 self.path = os.path.abspath(path)
771 self.startDate = startDate
772 self.endDate = endDate
773 self.startTime = startTime
774 self.endTime = endTime
775 self.server = server
776 self.err_queue = err_queue
302 self.err_queue = err_queue
777 self.addRunOperation(**kwargs)
778
779 def update(self, **kwargs):
780
781 if 'datatype' in kwargs:
782 datatype = kwargs.pop('datatype')
783 if 'Reader' in datatype:
784 self.name = datatype
785 else:
786 self.name = '%sReader' % (datatype)
787 self.datatype = self.name.replace('Reader', '')
788
789 attrs = ('path', 'startDate', 'endDate',
790 'startTime', 'endTime')
791
792 for attr in attrs:
793 if attr in kwargs:
794 setattr(self, attr, kwargs.pop(attr))
795
796 self.updateRunOperation(**kwargs)
797
303
798 def removeOperations(self):
304 self.addParameter(name='path', value=path)
305 self.addParameter(name='startDate', value=startDate)
306 self.addParameter(name='endDate', value=endDate)
307 self.addParameter(name='startTime', value=startTime)
308 self.addParameter(name='endTime', value=endTime)
799
309
800 for obj in self.opConfObjList:
310 for key, value in kwargs.items():
801 del obj
311 self.addParameter(name=key, value=value)
802
803 self.opConfObjList = []
804
805 def addRunOperation(self, **kwargs):
806
807 opObj = self.addOperation(name='run', optype='self')
808
809 if self.server is None:
810 opObj.addParameter(
811 name='datatype', value=self.datatype, format='str')
812 opObj.addParameter(name='path', value=self.path, format='str')
813 opObj.addParameter(
814 name='startDate', value=self.startDate, format='date')
815 opObj.addParameter(
816 name='endDate', value=self.endDate, format='date')
817 opObj.addParameter(
818 name='startTime', value=self.startTime, format='time')
819 opObj.addParameter(
820 name='endTime', value=self.endTime, format='time')
821
822 for key, value in list(kwargs.items()):
823 opObj.addParameter(name=key, value=value,
824 format=type(value).__name__)
825 else:
826 opObj.addParameter(name='server', value=self.server, format='str')
827
828 return opObj
829
830 def updateRunOperation(self, **kwargs):
831
832 opObj = self.getOperationObj(name='run')
833 opObj.removeParameters()
834
835 opObj.addParameter(name='datatype', value=self.datatype, format='str')
836 opObj.addParameter(name='path', value=self.path, format='str')
837 opObj.addParameter(
838 name='startDate', value=self.startDate, format='date')
839 opObj.addParameter(name='endDate', value=self.endDate, format='date')
840 opObj.addParameter(
841 name='startTime', value=self.startTime, format='time')
842 opObj.addParameter(name='endTime', value=self.endTime, format='time')
843
844 for key, value in list(kwargs.items()):
845 opObj.addParameter(name=key, value=value,
846 format=type(value).__name__)
847
848 return opObj
849
850 def readXml(self, upElement, project_id):
851
852 self.id = upElement.get('id')
853 self.name = upElement.get('name')
854 self.datatype = upElement.get('datatype')
855 self.project_id = str(project_id) #yong
856
857 if self.ELEMENTNAME == 'ReadUnit':
858 self.datatype = self.datatype.replace('Reader', '')
859
860 self.opConfObjList = []
861
862 opElementList = upElement.iter(OperationConf().getElementName())
863
864 for opElement in opElementList:
865 opConfObj = OperationConf()
866 opConfObj.readXml(opElement, project_id)
867 self.opConfObjList.append(opConfObj)
868
869 if opConfObj.name == 'run':
870 self.path = opConfObj.getParameterValue('path')
871 self.startDate = opConfObj.getParameterValue('startDate')
872 self.endDate = opConfObj.getParameterValue('endDate')
873 self.startTime = opConfObj.getParameterValue('startTime')
874 self.endTime = opConfObj.getParameterValue('endTime')
875
312
876
313
877 class Project(Process):
314 class Project(Process):
878
315
879 ELEMENTNAME = 'Project'
316 ELEMENTNAME = 'Project'
880
317
881 def __init__(self):
318 def __init__(self):
882
319
883 Process.__init__(self)
320 Process.__init__(self)
884 self.id = None
321 self.id = None
885 self.filename = None
322 self.filename = None
886 self.description = None
323 self.description = None
887 self.email = None
324 self.email = None
888 self.alarm = None
325 self.alarm = []
889 self.procUnitConfObjDict = {}
326 self.configurations = {}
890 self.err_queue = Queue()
327 # self.err_queue = Queue()
328 self.err_queue = None
329 self.started = False
891
330
892 def __getNewId(self):
331 def getNewId(self):
893
332
894 idList = list(self.procUnitConfObjDict.keys())
333 idList = list(self.configurations.keys())
895 id = int(self.id) * 10
334 id = int(self.id) * 10
896
335
897 while True:
336 while True:
898 id += 1
337 id += 1
899
338
900 if str(id) in idList:
339 if str(id) in idList:
901 continue
340 continue
902
341
903 break
342 break
904
343
905 return str(id)
344 return str(id)
906
345
907 def getElementName(self):
908
909 return self.ELEMENTNAME
910
911 def getId(self):
912
913 return self.id
914
915 def updateId(self, new_id):
346 def updateId(self, new_id):
916
347
917 self.id = str(new_id)
348 self.id = str(new_id)
918
349
919 keyList = list(self.procUnitConfObjDict.keys())
350 keyList = list(self.configurations.keys())
920 keyList.sort()
351 keyList.sort()
921
352
922 n = 1
353 n = 1
923 newProcUnitConfObjDict = {}
354 new_confs = {}
924
355
925 for procKey in keyList:
356 for procKey in keyList:
926
357
927 procUnitConfObj = self.procUnitConfObjDict[procKey]
358 conf = self.configurations[procKey]
928 idProcUnit = str(int(self.id) * 10 + n)
359 idProcUnit = str(int(self.id) * 10 + n)
929 procUnitConfObj.updateId(idProcUnit)
360 conf.updateId(idProcUnit)
930 newProcUnitConfObjDict[idProcUnit] = procUnitConfObj
361 new_confs[idProcUnit] = conf
931 n += 1
362 n += 1
932
363
933 self.procUnitConfObjDict = newProcUnitConfObjDict
364 self.configurations = new_confs
934
365
935 def setup(self, id=1, name='', description='', email=None, alarm=[]):
366 def setup(self, id=1, name='', description='', email=None, alarm=[]):
936
367
937 print(' ')
938 print('*' * 60)
939 print('* Starting SIGNAL CHAIN PROCESSING (Multiprocessing) v%s *' % schainpy.__version__)
940 print('*' * 60)
941 print("* Python " + python_version() + " *")
942 print('*' * 19)
943 print(' ')
944 self.id = str(id)
368 self.id = str(id)
945 self.description = description
369 self.description = description
946 self.email = email
370 self.email = email
947 self.alarm = alarm
371 self.alarm = alarm
948 if name:
372 if name:
949 self.name = '{} ({})'.format(Process.__name__, name)
373 self.name = '{} ({})'.format(Process.__name__, name)
950
374
951 def update(self, **kwargs):
375 def update(self, **kwargs):
952
376
953 for key, value in list(kwargs.items()):
377 for key, value in kwargs.items():
954 setattr(self, key, value)
378 setattr(self, key, value)
955
379
956 def clone(self):
380 def clone(self):
957
381
958 p = Project()
382 p = Project()
959 p.procUnitConfObjDict = self.procUnitConfObjDict
383 p.id = self.id
384 p.name = self.name
385 p.description = self.description
386 p.configurations = self.configurations.copy()
387
960 return p
388 return p
961
389
962 def addReadUnit(self, id=None, datatype=None, name=None, **kwargs):
390 def addReadUnit(self, id=None, datatype=None, name=None, **kwargs):
963
391
964 '''
392 '''
965 Actualizacion:
966 Se agrego un nuevo argumento: topic -relativo a la forma de comunicar los procesos simultaneos
967
968 * El id del proceso sera el topico al que se deben subscribir los procUnits para recibir la informacion(data)
969
970 '''
393 '''
971
394
972 if id is None:
395 if id is None:
973 idReadUnit = self.__getNewId()
396 idReadUnit = self.getNewId()
974 else:
397 else:
975 idReadUnit = str(id)
398 idReadUnit = str(id)
976
399
977 readUnitConfObj = ReadUnitConf()
400 conf = ReadUnitConf()
978 readUnitConfObj.setup(self.id, idReadUnit, name, datatype, self.err_queue, **kwargs)
401 conf.setup(self.id, idReadUnit, name, datatype, self.err_queue, **kwargs)
979 self.procUnitConfObjDict[readUnitConfObj.getId()] = readUnitConfObj
402 self.configurations[conf.id] = conf
980
403
981 return readUnitConfObj
404 return conf
982
405
983 def addProcUnit(self, inputId='0', datatype=None, name=None):
406 def addProcUnit(self, id=None, inputId='0', datatype=None, name=None):
984
407
985 '''
408 '''
986 Actualizacion:
987 Se agrego dos nuevos argumentos: topic_read (lee data de otro procUnit) y topic_write(escribe o envia data a otro procUnit)
988 Deberia reemplazar a "inputId"
989
990 ** A fin de mantener el inputID, este sera la representaacion del topicoal que deben subscribirse. El ID propio de la intancia
991 (proceso) sera el topico de la publicacion, todo sera asignado de manera dinamica.
992
993 '''
409 '''
994
410
995 idProcUnit = self.__getNewId()
411 if id is None:
996 procUnitConfObj = ProcUnitConf()
412 idProcUnit = self.getNewId()
997 input_proc = self.procUnitConfObjDict[inputId]
413 else:
998 procUnitConfObj.setup(self.id, idProcUnit, name, datatype, inputId, self.err_queue, input_proc.lock)
414 idProcUnit = id
999 self.procUnitConfObjDict[procUnitConfObj.getId()] = procUnitConfObj
1000
1001 return procUnitConfObj
1002
1003 def removeProcUnit(self, id):
1004
415
1005 if id in list(self.procUnitConfObjDict.keys()):
416 conf = ProcUnitConf()
1006 self.procUnitConfObjDict.pop(id)
417 conf.setup(self.id, idProcUnit, name, datatype, inputId, self.err_queue)
418 self.configurations[conf.id] = conf
1007
419
1008 def getReadUnitId(self):
420 return conf
1009
421
1010 readUnitConfObj = self.getReadUnitObj()
422 def removeProcUnit(self, id):
1011
423
1012 return readUnitConfObj.id
424 if id in self.configurations:
425 self.configurations.pop(id)
1013
426
1014 def getReadUnitObj(self):
427 def getReadUnit(self):
1015
428
1016 for obj in list(self.procUnitConfObjDict.values()):
429 for obj in list(self.configurations.values()):
1017 if obj.getElementName() == 'ReadUnit':
430 if obj.ELEMENTNAME == 'ReadUnit':
1018 return obj
431 return obj
1019
432
1020 return None
433 return None
1021
434
1022 def getProcUnitObj(self, id=None, name=None):
435 def getProcUnit(self, id):
1023
1024 if id != None:
1025 return self.procUnitConfObjDict[id]
1026
436
1027 if name != None:
437 return self.configurations[id]
1028 return self.getProcUnitObjByName(name)
1029
438
1030 return None
439 def getUnits(self):
1031
440
1032 def getProcUnitObjByName(self, name):
441 keys = list(self.configurations)
1033
442 keys.sort()
1034 for obj in list(self.procUnitConfObjDict.values()):
1035 if obj.name == name:
1036 return obj
1037
443
1038 return None
444 for key in keys:
445 yield self.configurations[key]
1039
446
1040 def procUnitItems(self):
447 def updateUnit(self, id, **kwargs):
1041
448
1042 return list(self.procUnitConfObjDict.items())
449 conf = self.configurations[id].update(**kwargs)
1043
450
1044 def makeXml(self):
451 def makeXml(self):
1045
452
1046 projectElement = Element('Project')
453 xml = Element('Project')
1047 projectElement.set('id', str(self.id))
454 xml.set('id', str(self.id))
1048 projectElement.set('name', self.name)
455 xml.set('name', self.name)
1049 projectElement.set('description', self.description)
456 xml.set('description', self.description)
1050
457
1051 for procUnitConfObj in list(self.procUnitConfObjDict.values()):
458 for conf in self.configurations.values():
1052 procUnitConfObj.makeXml(projectElement)
459 conf.makeXml(xml)
1053
460
1054 self.projectElement = projectElement
461 self.xml = xml
1055
462
1056 def writeXml(self, filename=None):
463 def writeXml(self, filename=None):
1057
464
1058 if filename == None:
465 if filename == None:
1059 if self.filename:
466 if self.filename:
1060 filename = self.filename
467 filename = self.filename
1061 else:
468 else:
1062 filename = 'schain.xml'
469 filename = 'schain.xml'
1063
470
1064 if not filename:
471 if not filename:
1065 print('filename has not been defined. Use setFilename(filename) for do it.')
472 print('filename has not been defined. Use setFilename(filename) for do it.')
1066 return 0
473 return 0
1067
474
1068 abs_file = os.path.abspath(filename)
475 abs_file = os.path.abspath(filename)
1069
476
1070 if not os.access(os.path.dirname(abs_file), os.W_OK):
477 if not os.access(os.path.dirname(abs_file), os.W_OK):
1071 print('No write permission on %s' % os.path.dirname(abs_file))
478 print('No write permission on %s' % os.path.dirname(abs_file))
1072 return 0
479 return 0
1073
480
1074 if os.path.isfile(abs_file) and not(os.access(abs_file, os.W_OK)):
481 if os.path.isfile(abs_file) and not(os.access(abs_file, os.W_OK)):
1075 print('File %s already exists and it could not be overwriten' % abs_file)
482 print('File %s already exists and it could not be overwriten' % abs_file)
1076 return 0
483 return 0
1077
484
1078 self.makeXml()
485 self.makeXml()
1079
486
1080 ElementTree(self.projectElement).write(abs_file, method='xml')
487 ElementTree(self.xml).write(abs_file, method='xml')
1081
488
1082 self.filename = abs_file
489 self.filename = abs_file
1083
490
1084 return 1
491 return 1
1085
492
1086 def readXml(self, filename=None):
493 def readXml(self, filename):
1087
1088 if not filename:
1089 print('filename is not defined')
1090 return 0
1091
494
1092 abs_file = os.path.abspath(filename)
495 abs_file = os.path.abspath(filename)
1093
496
1094 if not os.path.isfile(abs_file):
497 self.configurations = {}
1095 print('%s file does not exist' % abs_file)
1096 return 0
1097
1098 self.projectElement = None
1099 self.procUnitConfObjDict = {}
1100
498
1101 try:
499 try:
1102 self.projectElement = ElementTree().parse(abs_file)
500 self.xml = ElementTree().parse(abs_file)
1103 except:
501 except:
1104 print('Error reading %s, verify file format' % filename)
502 log.error('Error reading %s, verify file format' % filename)
1105 return 0
503 return 0
1106
504
1107 self.project = self.projectElement.tag
505 self.id = self.xml.get('id')
1108
506 self.name = self.xml.get('name')
1109 self.id = self.projectElement.get('id')
507 self.description = self.xml.get('description')
1110 self.name = self.projectElement.get('name')
508
1111 self.description = self.projectElement.get('description')
509 for element in self.xml:
1112
510 if element.tag == 'ReadUnit':
1113 readUnitElementList = self.projectElement.iter(
511 conf = ReadUnitConf()
1114 ReadUnitConf().getElementName())
512 conf.readXml(element, self.id, self.err_queue)
1115
513 self.configurations[conf.id] = conf
1116 for readUnitElement in readUnitElementList:
514 elif element.tag == 'ProcUnit':
1117 readUnitConfObj = ReadUnitConf()
515 conf = ProcUnitConf()
1118 readUnitConfObj.readXml(readUnitElement, self.id)
516 input_proc = self.configurations[element.get('inputId')]
1119 self.procUnitConfObjDict[readUnitConfObj.getId()] = readUnitConfObj
517 conf.readXml(element, self.id, self.err_queue)
1120
518 self.configurations[conf.id] = conf
1121 procUnitElementList = self.projectElement.iter(
1122 ProcUnitConf().getElementName())
1123
1124 for procUnitElement in procUnitElementList:
1125 procUnitConfObj = ProcUnitConf()
1126 procUnitConfObj.readXml(procUnitElement, self.id)
1127 self.procUnitConfObjDict[procUnitConfObj.getId()] = procUnitConfObj
1128
519
1129 self.filename = abs_file
520 self.filename = abs_file
1130
521
1131 return 1
522 return 1
1132
523
1133 def __str__(self):
524 def __str__(self):
1134
525
1135 print('Project: name = %s, description = %s, id = %s' % (
526 text = '\nProject[id=%s, name=%s, description=%s]\n\n' % (
527 self.id,
1136 self.name,
528 self.name,
1137 self.description,
529 self.description,
1138 self.id))
530 )
1139
531
1140 for procUnitConfObj in self.procUnitConfObjDict.values():
532 for conf in self.configurations.values():
1141 print(procUnitConfObj)
533 text += '{}'.format(conf)
1142
534
1143 def createObjects(self):
535 return text
1144
536
537 def createObjects(self):
1145
538
1146 keys = list(self.procUnitConfObjDict.keys())
539 keys = list(self.configurations.keys())
1147 keys.sort()
540 keys.sort()
1148 for key in keys:
541 for key in keys:
1149 self.procUnitConfObjDict[key].createObjects()
542 conf = self.configurations[key]
543 conf.createObjects()
544 if conf.inputId is not None:
545 conf.object.setInput(self.configurations[conf.inputId].object)
1150
546
1151 def monitor(self):
547 def monitor(self):
1152
548
1153 t = Thread(target=self.__monitor, args=(self.err_queue, self.ctx))
549 t = Thread(target=self._monitor, args=(self.err_queue, self.ctx))
1154 t.start()
550 t.start()
1155
551
1156 def __monitor(self, queue, ctx):
552 def _monitor(self, queue, ctx):
1157
553
1158 import socket
554 import socket
1159
555
1160 procs = 0
556 procs = 0
1161 err_msg = ''
557 err_msg = ''
1162
558
1163 while True:
559 while True:
1164 msg = queue.get()
560 msg = queue.get()
1165 if '#_start_#' in msg:
561 if '#_start_#' in msg:
1166 procs += 1
562 procs += 1
1167 elif '#_end_#' in msg:
563 elif '#_end_#' in msg:
1168 procs -=1
564 procs -=1
1169 else:
565 else:
1170 err_msg = msg
566 err_msg = msg
1171
567
1172 if procs == 0 or 'Traceback' in err_msg:
568 if procs == 0 or 'Traceback' in err_msg:
1173 break
569 break
1174 time.sleep(0.1)
570 time.sleep(0.1)
1175
571
1176 if '|' in err_msg:
572 if '|' in err_msg:
1177 name, err = err_msg.split('|')
573 name, err = err_msg.split('|')
1178 if 'SchainWarning' in err:
574 if 'SchainWarning' in err:
1179 log.warning(err.split('SchainWarning:')[-1].split('\n')[0].strip(), name)
575 log.warning(err.split('SchainWarning:')[-1].split('\n')[0].strip(), name)
1180 elif 'SchainError' in err:
576 elif 'SchainError' in err:
1181 log.error(err.split('SchainError:')[-1].split('\n')[0].strip(), name)
577 log.error(err.split('SchainError:')[-1].split('\n')[0].strip(), name)
1182 else:
578 else:
1183 log.error(err, name)
579 log.error(err, name)
1184 else:
580 else:
1185 name, err = self.name, err_msg
581 name, err = self.name, err_msg
1186
582
1187 time.sleep(2)
583 time.sleep(1)
1188
1189 for conf in self.procUnitConfObjDict.values():
1190 for confop in conf.opConfObjList:
1191 if confop.type == 'external':
1192 confop.opObj.terminate()
1193 conf.procUnitObj.terminate()
1194
584
1195 ctx.term()
585 ctx.term()
1196
586
1197 message = ''.join(err)
587 message = ''.join(err)
1198
588
1199 if err_msg:
589 if err_msg:
1200 subject = 'SChain v%s: Error running %s\n' % (
590 subject = 'SChain v%s: Error running %s\n' % (
1201 schainpy.__version__, self.name)
591 schainpy.__version__, self.name)
1202
592
1203 subtitle = 'Hostname: %s\n' % socket.gethostbyname(
593 subtitle = 'Hostname: %s\n' % socket.gethostbyname(
1204 socket.gethostname())
594 socket.gethostname())
1205 subtitle += 'Working directory: %s\n' % os.path.abspath('./')
595 subtitle += 'Working directory: %s\n' % os.path.abspath('./')
1206 subtitle += 'Configuration file: %s\n' % self.filename
596 subtitle += 'Configuration file: %s\n' % self.filename
1207 subtitle += 'Time: %s\n' % str(datetime.datetime.now())
597 subtitle += 'Time: %s\n' % str(datetime.datetime.now())
1208
598
1209 readUnitConfObj = self.getReadUnitObj()
599 readUnitConfObj = self.getReadUnit()
1210 if readUnitConfObj:
600 if readUnitConfObj:
1211 subtitle += '\nInput parameters:\n'
601 subtitle += '\nInput parameters:\n'
1212 subtitle += '[Data path = %s]\n' % readUnitConfObj.path
602 subtitle += '[Data path = %s]\n' % readUnitConfObj.parameters['path']
1213 subtitle += '[Data type = %s]\n' % readUnitConfObj.datatype
603 subtitle += '[Start date = %s]\n' % readUnitConfObj.parameters['startDate']
1214 subtitle += '[Start date = %s]\n' % readUnitConfObj.startDate
604 subtitle += '[End date = %s]\n' % readUnitConfObj.parameters['endDate']
1215 subtitle += '[End date = %s]\n' % readUnitConfObj.endDate
605 subtitle += '[Start time = %s]\n' % readUnitConfObj.parameters['startTime']
1216 subtitle += '[Start time = %s]\n' % readUnitConfObj.startTime
606 subtitle += '[End time = %s]\n' % readUnitConfObj.parameters['endTime']
1217 subtitle += '[End time = %s]\n' % readUnitConfObj.endTime
1218
607
1219 a = Alarm(
608 a = Alarm(
1220 modes=self.alarm,
609 modes=self.alarm,
1221 email=self.email,
610 email=self.email,
1222 message=message,
611 message=message,
1223 subject=subject,
612 subject=subject,
1224 subtitle=subtitle,
613 subtitle=subtitle,
1225 filename=self.filename
614 filename=self.filename
1226 )
615 )
1227
616
1228 a.start()
617 a.start()
1229
618
1230 def isPaused(self):
1231 return 0
1232
1233 def isStopped(self):
1234 return 0
1235
1236 def runController(self):
1237 '''
1238 returns 0 when this process has been stopped, 1 otherwise
1239 '''
1240
1241 if self.isPaused():
1242 print('Process suspended')
1243
1244 while True:
1245 time.sleep(0.1)
1246
1247 if not self.isPaused():
1248 break
1249
1250 if self.isStopped():
1251 break
1252
1253 print('Process reinitialized')
1254
1255 if self.isStopped():
1256 print('Process stopped')
1257 return 0
1258
1259 return 1
1260
1261 def setFilename(self, filename):
619 def setFilename(self, filename):
1262
620
1263 self.filename = filename
621 self.filename = filename
1264
622
1265 def setProxy(self):
623 def runProcs(self):
1266
624
1267 if not os.path.exists('/tmp/schain'):
625 err = False
1268 os.mkdir('/tmp/schain')
626 n = len(self.configurations)
1269
627
1270 self.ctx = zmq.Context()
628 while not err:
1271 xpub = self.ctx.socket(zmq.XPUB)
629 for conf in self.getUnits():
1272 xpub.bind('ipc:///tmp/schain/{}_pub'.format(self.id))
630 ok = conf.run()
1273 xsub = self.ctx.socket(zmq.XSUB)
631 if ok is 'Error':
1274 xsub.bind('ipc:///tmp/schain/{}_sub'.format(self.id))
632 n -= 1
1275 self.monitor()
633 continue
1276 try:
634 elif not ok:
1277 zmq.proxy(xpub, xsub)
635 break
1278 except zmq.ContextTerminated:
636 if n == 0:
1279 xpub.close()
637 err = True
1280 xsub.close()
1281
638
1282 def run(self):
639 def run(self):
1283
640
1284 log.success('Starting {}: {}'.format(self.name, self.id), tag='')
641 log.success('\nStarting Project {} [id={}]'.format(self.name, self.id), tag='')
642 self.started = True
1285 self.start_time = time.time()
643 self.start_time = time.time()
1286 self.createObjects()
644 self.createObjects()
1287 self.setProxy()
645 self.runProcs()
1288 log.success('{} Done (Time: {}s)'.format(
646 log.success('{} Done (Time: {:4.2f}s)'.format(
1289 self.name,
647 self.name,
1290 time.time()-self.start_time), '')
648 time.time()-self.start_time), '')
@@ -1,407 +1,407
1 '''
1 '''
2 Created on Nov 9, 2016
2 Created on Nov 9, 2016
3
3
4 @author: roj- LouVD
4 @author: roj- LouVD
5 '''
5 '''
6
6
7
7
8 import os
8 import os
9 import sys
9 import sys
10 import time
10 import time
11 import glob
11 import glob
12 import datetime
12 import datetime
13
13
14 import numpy
14 import numpy
15
15
16 import schainpy.admin
16 import schainpy.admin
17 from schainpy.model.proc.jroproc_base import ProcessingUnit, MPDecorator
17 from schainpy.model.proc.jroproc_base import ProcessingUnit, MPDecorator
18 from schainpy.model.data.jrodata import Parameters
18 from schainpy.model.data.jrodata import Parameters
19 from schainpy.model.io.jroIO_base import JRODataReader, isNumber
19 from schainpy.model.io.jroIO_base import JRODataReader, isNumber
20 from schainpy.utils import log
20 from schainpy.utils import log
21
21
22 FILE_HEADER_STRUCTURE = numpy.dtype([
22 FILE_HEADER_STRUCTURE = numpy.dtype([
23 ('FMN', '<u4'),
23 ('FMN', '<u4'),
24 ('nrec', '<u4'),
24 ('nrec', '<u4'),
25 ('fr_offset', '<u4'),
25 ('fr_offset', '<u4'),
26 ('id', '<u4'),
26 ('id', '<u4'),
27 ('site', 'u1', (32,))
27 ('site', 'u1', (32,))
28 ])
28 ])
29
29
30 REC_HEADER_STRUCTURE = numpy.dtype([
30 REC_HEADER_STRUCTURE = numpy.dtype([
31 ('rmn', '<u4'),
31 ('rmn', '<u4'),
32 ('rcounter', '<u4'),
32 ('rcounter', '<u4'),
33 ('nr_offset', '<u4'),
33 ('nr_offset', '<u4'),
34 ('tr_offset', '<u4'),
34 ('tr_offset', '<u4'),
35 ('time', '<u4'),
35 ('time', '<u4'),
36 ('time_msec', '<u4'),
36 ('time_msec', '<u4'),
37 ('tag', 'u1', (32,)),
37 ('tag', 'u1', (32,)),
38 ('comments', 'u1', (32,)),
38 ('comments', 'u1', (32,)),
39 ('lat', '<f4'),
39 ('lat', '<f4'),
40 ('lon', '<f4'),
40 ('lon', '<f4'),
41 ('gps_status', '<u4'),
41 ('gps_status', '<u4'),
42 ('freq', '<u4'),
42 ('freq', '<u4'),
43 ('freq0', '<u4'),
43 ('freq0', '<u4'),
44 ('nchan', '<u4'),
44 ('nchan', '<u4'),
45 ('delta_r', '<u4'),
45 ('delta_r', '<u4'),
46 ('nranges', '<u4'),
46 ('nranges', '<u4'),
47 ('r0', '<u4'),
47 ('r0', '<u4'),
48 ('prf', '<u4'),
48 ('prf', '<u4'),
49 ('ncoh', '<u4'),
49 ('ncoh', '<u4'),
50 ('npoints', '<u4'),
50 ('npoints', '<u4'),
51 ('polarization', '<i4'),
51 ('polarization', '<i4'),
52 ('rx_filter', '<u4'),
52 ('rx_filter', '<u4'),
53 ('nmodes', '<u4'),
53 ('nmodes', '<u4'),
54 ('dmode_index', '<u4'),
54 ('dmode_index', '<u4'),
55 ('dmode_rngcorr', '<u4'),
55 ('dmode_rngcorr', '<u4'),
56 ('nrxs', '<u4'),
56 ('nrxs', '<u4'),
57 ('acf_length', '<u4'),
57 ('acf_length', '<u4'),
58 ('acf_lags', '<u4'),
58 ('acf_lags', '<u4'),
59 ('sea_to_atmos', '<f4'),
59 ('sea_to_atmos', '<f4'),
60 ('sea_notch', '<u4'),
60 ('sea_notch', '<u4'),
61 ('lh_sea', '<u4'),
61 ('lh_sea', '<u4'),
62 ('hh_sea', '<u4'),
62 ('hh_sea', '<u4'),
63 ('nbins_sea', '<u4'),
63 ('nbins_sea', '<u4'),
64 ('min_snr', '<f4'),
64 ('min_snr', '<f4'),
65 ('min_cc', '<f4'),
65 ('min_cc', '<f4'),
66 ('max_time_diff', '<f4')
66 ('max_time_diff', '<f4')
67 ])
67 ])
68
68
69 DATA_STRUCTURE = numpy.dtype([
69 DATA_STRUCTURE = numpy.dtype([
70 ('range', '<u4'),
70 ('range', '<u4'),
71 ('status', '<u4'),
71 ('status', '<u4'),
72 ('zonal', '<f4'),
72 ('zonal', '<f4'),
73 ('meridional', '<f4'),
73 ('meridional', '<f4'),
74 ('vertical', '<f4'),
74 ('vertical', '<f4'),
75 ('zonal_a', '<f4'),
75 ('zonal_a', '<f4'),
76 ('meridional_a', '<f4'),
76 ('meridional_a', '<f4'),
77 ('corrected_fading', '<f4'), # seconds
77 ('corrected_fading', '<f4'), # seconds
78 ('uncorrected_fading', '<f4'), # seconds
78 ('uncorrected_fading', '<f4'), # seconds
79 ('time_diff', '<f4'),
79 ('time_diff', '<f4'),
80 ('major_axis', '<f4'),
80 ('major_axis', '<f4'),
81 ('axial_ratio', '<f4'),
81 ('axial_ratio', '<f4'),
82 ('orientation', '<f4'),
82 ('orientation', '<f4'),
83 ('sea_power', '<u4'),
83 ('sea_power', '<u4'),
84 ('sea_algorithm', '<u4')
84 ('sea_algorithm', '<u4')
85 ])
85 ])
86
86
87 @MPDecorator
87
88 class BLTRParamReader(JRODataReader, ProcessingUnit):
88 class BLTRParamReader(JRODataReader, ProcessingUnit):
89 '''
89 '''
90 Boundary Layer and Tropospheric Radar (BLTR) reader, Wind velocities and SNR
90 Boundary Layer and Tropospheric Radar (BLTR) reader, Wind velocities and SNR
91 from *.sswma files
91 from *.sswma files
92 '''
92 '''
93
93
94 ext = '.sswma'
94 ext = '.sswma'
95
95
96 def __init__(self):
96 def __init__(self):
97
97
98 ProcessingUnit.__init__(self)
98 ProcessingUnit.__init__(self)
99
99
100 self.dataOut = Parameters()
100 self.dataOut = Parameters()
101 self.counter_records = 0
101 self.counter_records = 0
102 self.flagNoMoreFiles = 0
102 self.flagNoMoreFiles = 0
103 self.isConfig = False
103 self.isConfig = False
104 self.filename = None
104 self.filename = None
105
105
106 def setup(self,
106 def setup(self,
107 path=None,
107 path=None,
108 startDate=None,
108 startDate=None,
109 endDate=None,
109 endDate=None,
110 ext=None,
110 ext=None,
111 startTime=datetime.time(0, 0, 0),
111 startTime=datetime.time(0, 0, 0),
112 endTime=datetime.time(23, 59, 59),
112 endTime=datetime.time(23, 59, 59),
113 timezone=0,
113 timezone=0,
114 status_value=0,
114 status_value=0,
115 **kwargs):
115 **kwargs):
116 self.path = path
116 self.path = path
117 self.startDate = startDate
117 self.startDate = startDate
118 self.endDate = endDate
118 self.endDate = endDate
119 self.startTime = startTime
119 self.startTime = startTime
120 self.endTime = endTime
120 self.endTime = endTime
121 self.status_value = status_value
121 self.status_value = status_value
122 self.datatime = datetime.datetime(1900,1,1)
122 self.datatime = datetime.datetime(1900,1,1)
123 self.delay = kwargs.get('delay', 10)
123 self.delay = kwargs.get('delay', 10)
124 self.online = kwargs.get('online', False)
124 self.online = kwargs.get('online', False)
125 self.nTries = kwargs.get('nTries', 3)
125 self.nTries = kwargs.get('nTries', 3)
126
126
127 if self.path is None:
127 if self.path is None:
128 raise ValueError("The path is not valid")
128 raise ValueError("The path is not valid")
129
129
130 if ext is None:
130 if ext is None:
131 ext = self.ext
131 ext = self.ext
132
132
133 self.fileList = self.search_files(self.path, startDate, endDate, ext)
133 self.fileList = self.search_files(self.path, startDate, endDate, ext)
134 self.timezone = timezone
134 self.timezone = timezone
135 self.fileIndex = 0
135 self.fileIndex = 0
136
136
137 if not self.fileList:
137 if not self.fileList:
138 raise Warning("There is no files matching these date in the folder: %s. \n Check 'startDate' and 'endDate' " % (
138 raise Warning("There is no files matching these date in the folder: %s. \n Check 'startDate' and 'endDate' " % (
139 path))
139 path))
140
140
141 self.setNextFile()
141 self.setNextFile()
142
142
143 def search_last_file(self):
143 def search_last_file(self):
144 '''
144 '''
145 Get last file and add it to the list
145 Get last file and add it to the list
146 '''
146 '''
147
147
148 for n in range(self.nTries+1):
148 for n in range(self.nTries+1):
149 if n>0:
149 if n>0:
150 log.warning(
150 log.warning(
151 "Waiting %0.2f seconds for the next file, try %03d ..." % (self.delay, n+1),
151 "Waiting %0.2f seconds for the next file, try %03d ..." % (self.delay, n+1),
152 self.name
152 self.name
153 )
153 )
154 time.sleep(self.delay)
154 time.sleep(self.delay)
155 file_list = os.listdir(self.path)
155 file_list = os.listdir(self.path)
156 file_list.sort()
156 file_list.sort()
157 if file_list:
157 if file_list:
158 if self.filename:
158 if self.filename:
159 if file_list[-1] not in self.filename:
159 if file_list[-1] not in self.filename:
160 return file_list[-1]
160 return file_list[-1]
161 else:
161 else:
162 continue
162 continue
163 return file_list[-1]
163 return file_list[-1]
164 return 0
164 return 0
165
165
166 def search_files(self, path, startDate, endDate, ext):
166 def search_files(self, path, startDate, endDate, ext):
167 '''
167 '''
168 Searching for BLTR rawdata file in path
168 Searching for BLTR rawdata file in path
169 Creating a list of file to proces included in [startDate,endDate]
169 Creating a list of file to proces included in [startDate,endDate]
170
170
171 Input:
171 Input:
172 path - Path to find BLTR rawdata files
172 path - Path to find BLTR rawdata files
173 startDate - Select file from this date
173 startDate - Select file from this date
174 enDate - Select file until this date
174 enDate - Select file until this date
175 ext - Extension of the file to read
175 ext - Extension of the file to read
176 '''
176 '''
177
177
178 log.success('Searching files in {} '.format(path), 'BLTRParamReader')
178 log.success('Searching files in {} '.format(path), 'BLTRParamReader')
179 foldercounter = 0
179 foldercounter = 0
180 fileList0 = glob.glob1(path, "*%s" % ext)
180 fileList0 = glob.glob1(path, "*%s" % ext)
181 fileList0.sort()
181 fileList0.sort()
182
182
183 for thisFile in fileList0:
183 for thisFile in fileList0:
184 year = thisFile[-14:-10]
184 year = thisFile[-14:-10]
185 if not isNumber(year):
185 if not isNumber(year):
186 continue
186 continue
187
187
188 month = thisFile[-10:-8]
188 month = thisFile[-10:-8]
189 if not isNumber(month):
189 if not isNumber(month):
190 continue
190 continue
191
191
192 day = thisFile[-8:-6]
192 day = thisFile[-8:-6]
193 if not isNumber(day):
193 if not isNumber(day):
194 continue
194 continue
195
195
196 year, month, day = int(year), int(month), int(day)
196 year, month, day = int(year), int(month), int(day)
197 dateFile = datetime.date(year, month, day)
197 dateFile = datetime.date(year, month, day)
198
198
199 if (startDate > dateFile) or (endDate < dateFile):
199 if (startDate > dateFile) or (endDate < dateFile):
200 continue
200 continue
201
201
202 yield thisFile
202 yield thisFile
203
203
204 return
204 return
205
205
206 def setNextFile(self):
206 def setNextFile(self):
207
207
208 if self.online:
208 if self.online:
209 filename = self.search_last_file()
209 filename = self.search_last_file()
210 if not filename:
210 if not filename:
211 self.flagNoMoreFiles = 1
211 self.flagNoMoreFiles = 1
212 return 0
212 return 0
213 else:
213 else:
214 try:
214 try:
215 filename = next(self.fileList)
215 filename = next(self.fileList)
216 except StopIteration:
216 except StopIteration:
217 self.flagNoMoreFiles = 1
217 self.flagNoMoreFiles = 1
218 return 0
218 return 0
219
219
220 log.success('Opening {}'.format(filename), 'BLTRParamReader')
220 log.success('Opening {}'.format(filename), 'BLTRParamReader')
221
221
222 dirname, name = os.path.split(filename)
222 dirname, name = os.path.split(filename)
223 # 'peru2' ---> Piura - 'peru1' ---> Huancayo or Porcuya
223 # 'peru2' ---> Piura - 'peru1' ---> Huancayo or Porcuya
224 self.siteFile = filename.split('.')[0]
224 self.siteFile = filename.split('.')[0]
225 if self.filename is not None:
225 if self.filename is not None:
226 self.fp.close()
226 self.fp.close()
227 self.filename = os.path.join(self.path, filename)
227 self.filename = os.path.join(self.path, filename)
228 self.fp = open(self.filename, 'rb')
228 self.fp = open(self.filename, 'rb')
229 self.header_file = numpy.fromfile(self.fp, FILE_HEADER_STRUCTURE, 1)
229 self.header_file = numpy.fromfile(self.fp, FILE_HEADER_STRUCTURE, 1)
230 self.nrecords = self.header_file['nrec'][0]
230 self.nrecords = self.header_file['nrec'][0]
231 self.sizeOfFile = os.path.getsize(self.filename)
231 self.sizeOfFile = os.path.getsize(self.filename)
232 self.counter_records = 0
232 self.counter_records = 0
233 self.flagIsNewFile = 0
233 self.flagIsNewFile = 0
234 self.fileIndex += 1
234 self.fileIndex += 1
235
235
236 return 1
236 return 1
237
237
238 def readNextBlock(self):
238 def readNextBlock(self):
239
239
240 while True:
240 while True:
241 if not self.online and self.counter_records == self.nrecords:
241 if not self.online and self.counter_records == self.nrecords:
242 self.flagIsNewFile = 1
242 self.flagIsNewFile = 1
243 if not self.setNextFile():
243 if not self.setNextFile():
244 return 0
244 return 0
245
245
246 try:
246 try:
247 pointer = self.fp.tell()
247 pointer = self.fp.tell()
248 self.readBlock()
248 self.readBlock()
249 except:
249 except:
250 if self.online and self.waitDataBlock(pointer, 38512) == 1:
250 if self.online and self.waitDataBlock(pointer, 38512) == 1:
251 continue
251 continue
252 else:
252 else:
253 if not self.setNextFile():
253 if not self.setNextFile():
254 return 0
254 return 0
255
255
256 if (self.datatime < datetime.datetime.combine(self.startDate, self.startTime)) or \
256 if (self.datatime < datetime.datetime.combine(self.startDate, self.startTime)) or \
257 (self.datatime > datetime.datetime.combine(self.endDate, self.endTime)):
257 (self.datatime > datetime.datetime.combine(self.endDate, self.endTime)):
258 log.warning(
258 log.warning(
259 'Reading Record No. {}/{} -> {} [Skipping]'.format(
259 'Reading Record No. {}/{} -> {} [Skipping]'.format(
260 self.counter_records,
260 self.counter_records,
261 self.nrecords,
261 self.nrecords,
262 self.datatime.ctime()),
262 self.datatime.ctime()),
263 'BLTRParamReader')
263 'BLTRParamReader')
264 continue
264 continue
265 break
265 break
266
266
267 log.log('Reading Record No. {} -> {}'.format(
267 log.log('Reading Record No. {} -> {}'.format(
268 self.counter_records,
268 self.counter_records,
269 # self.nrecords,
269 # self.nrecords,
270 self.datatime.ctime()), 'BLTRParamReader')
270 self.datatime.ctime()), 'BLTRParamReader')
271
271
272 return 1
272 return 1
273
273
274 def readBlock(self):
274 def readBlock(self):
275
275
276 pointer = self.fp.tell()
276 pointer = self.fp.tell()
277 header_rec = numpy.fromfile(self.fp, REC_HEADER_STRUCTURE, 1)
277 header_rec = numpy.fromfile(self.fp, REC_HEADER_STRUCTURE, 1)
278 self.nchannels = int(header_rec['nchan'][0] / 2)
278 self.nchannels = int(header_rec['nchan'][0] / 2)
279 self.kchan = header_rec['nrxs'][0]
279 self.kchan = header_rec['nrxs'][0]
280 self.nmodes = header_rec['nmodes'][0]
280 self.nmodes = header_rec['nmodes'][0]
281 self.nranges = header_rec['nranges'][0]
281 self.nranges = header_rec['nranges'][0]
282 self.fp.seek(pointer)
282 self.fp.seek(pointer)
283 self.height = numpy.empty((self.nmodes, self.nranges))
283 self.height = numpy.empty((self.nmodes, self.nranges))
284 self.snr = numpy.empty((self.nmodes, int(self.nchannels), self.nranges))
284 self.snr = numpy.empty((self.nmodes, int(self.nchannels), self.nranges))
285 self.buffer = numpy.empty((self.nmodes, 3, self.nranges))
285 self.buffer = numpy.empty((self.nmodes, 3, self.nranges))
286 self.flagDiscontinuousBlock = 0
286 self.flagDiscontinuousBlock = 0
287
287
288 for mode in range(self.nmodes):
288 for mode in range(self.nmodes):
289 self.readHeader()
289 self.readHeader()
290 data = self.readData()
290 data = self.readData()
291 self.height[mode] = (data[0] - self.correction) / 1000.
291 self.height[mode] = (data[0] - self.correction) / 1000.
292 self.buffer[mode] = data[1]
292 self.buffer[mode] = data[1]
293 self.snr[mode] = data[2]
293 self.snr[mode] = data[2]
294
294
295 self.counter_records = self.counter_records + self.nmodes
295 self.counter_records = self.counter_records + self.nmodes
296
296
297 return
297 return
298
298
299 def readHeader(self):
299 def readHeader(self):
300 '''
300 '''
301 RecordHeader of BLTR rawdata file
301 RecordHeader of BLTR rawdata file
302 '''
302 '''
303
303
304 header_structure = numpy.dtype(
304 header_structure = numpy.dtype(
305 REC_HEADER_STRUCTURE.descr + [
305 REC_HEADER_STRUCTURE.descr + [
306 ('antenna_coord', 'f4', (2, int(self.nchannels))),
306 ('antenna_coord', 'f4', (2, int(self.nchannels))),
307 ('rx_gains', 'u4', (int(self.nchannels),)),
307 ('rx_gains', 'u4', (int(self.nchannels),)),
308 ('rx_analysis', 'u4', (int(self.nchannels),))
308 ('rx_analysis', 'u4', (int(self.nchannels),))
309 ]
309 ]
310 )
310 )
311
311
312 self.header_rec = numpy.fromfile(self.fp, header_structure, 1)
312 self.header_rec = numpy.fromfile(self.fp, header_structure, 1)
313 self.lat = self.header_rec['lat'][0]
313 self.lat = self.header_rec['lat'][0]
314 self.lon = self.header_rec['lon'][0]
314 self.lon = self.header_rec['lon'][0]
315 self.delta = self.header_rec['delta_r'][0]
315 self.delta = self.header_rec['delta_r'][0]
316 self.correction = self.header_rec['dmode_rngcorr'][0]
316 self.correction = self.header_rec['dmode_rngcorr'][0]
317 self.imode = self.header_rec['dmode_index'][0]
317 self.imode = self.header_rec['dmode_index'][0]
318 self.antenna = self.header_rec['antenna_coord']
318 self.antenna = self.header_rec['antenna_coord']
319 self.rx_gains = self.header_rec['rx_gains']
319 self.rx_gains = self.header_rec['rx_gains']
320 self.time = self.header_rec['time'][0]
320 self.time = self.header_rec['time'][0]
321 dt = datetime.datetime.utcfromtimestamp(self.time)
321 dt = datetime.datetime.utcfromtimestamp(self.time)
322 if dt.date()>self.datatime.date():
322 if dt.date()>self.datatime.date():
323 self.flagDiscontinuousBlock = 1
323 self.flagDiscontinuousBlock = 1
324 self.datatime = dt
324 self.datatime = dt
325
325
326 def readData(self):
326 def readData(self):
327 '''
327 '''
328 Reading and filtering data block record of BLTR rawdata file,
328 Reading and filtering data block record of BLTR rawdata file,
329 filtering is according to status_value.
329 filtering is according to status_value.
330
330
331 Input:
331 Input:
332 status_value - Array data is set to NAN for values that are not
332 status_value - Array data is set to NAN for values that are not
333 equal to status_value
333 equal to status_value
334
334
335 '''
335 '''
336 self.nchannels = int(self.nchannels)
336 self.nchannels = int(self.nchannels)
337
337
338 data_structure = numpy.dtype(
338 data_structure = numpy.dtype(
339 DATA_STRUCTURE.descr + [
339 DATA_STRUCTURE.descr + [
340 ('rx_saturation', 'u4', (self.nchannels,)),
340 ('rx_saturation', 'u4', (self.nchannels,)),
341 ('chan_offset', 'u4', (2 * self.nchannels,)),
341 ('chan_offset', 'u4', (2 * self.nchannels,)),
342 ('rx_amp', 'u4', (self.nchannels,)),
342 ('rx_amp', 'u4', (self.nchannels,)),
343 ('rx_snr', 'f4', (self.nchannels,)),
343 ('rx_snr', 'f4', (self.nchannels,)),
344 ('cross_snr', 'f4', (self.kchan,)),
344 ('cross_snr', 'f4', (self.kchan,)),
345 ('sea_power_relative', 'f4', (self.kchan,))]
345 ('sea_power_relative', 'f4', (self.kchan,))]
346 )
346 )
347
347
348 data = numpy.fromfile(self.fp, data_structure, self.nranges)
348 data = numpy.fromfile(self.fp, data_structure, self.nranges)
349
349
350 height = data['range']
350 height = data['range']
351 winds = numpy.array(
351 winds = numpy.array(
352 (data['zonal'], data['meridional'], data['vertical']))
352 (data['zonal'], data['meridional'], data['vertical']))
353 snr = data['rx_snr'].T
353 snr = data['rx_snr'].T
354
354
355 winds[numpy.where(winds == -9999.)] = numpy.nan
355 winds[numpy.where(winds == -9999.)] = numpy.nan
356 winds[:, numpy.where(data['status'] != self.status_value)] = numpy.nan
356 winds[:, numpy.where(data['status'] != self.status_value)] = numpy.nan
357 snr[numpy.where(snr == -9999.)] = numpy.nan
357 snr[numpy.where(snr == -9999.)] = numpy.nan
358 snr[:, numpy.where(data['status'] != self.status_value)] = numpy.nan
358 snr[:, numpy.where(data['status'] != self.status_value)] = numpy.nan
359 snr = numpy.power(10, snr / 10)
359 snr = numpy.power(10, snr / 10)
360
360
361 return height, winds, snr
361 return height, winds, snr
362
362
363 def set_output(self):
363 def set_output(self):
364 '''
364 '''
365 Storing data from databuffer to dataOut object
365 Storing data from databuffer to dataOut object
366 '''
366 '''
367
367
368 self.dataOut.data_SNR = self.snr
368 self.dataOut.data_SNR = self.snr
369 self.dataOut.height = self.height
369 self.dataOut.height = self.height
370 self.dataOut.data = self.buffer
370 self.dataOut.data = self.buffer
371 self.dataOut.utctimeInit = self.time
371 self.dataOut.utctimeInit = self.time
372 self.dataOut.utctime = self.dataOut.utctimeInit
372 self.dataOut.utctime = self.dataOut.utctimeInit
373 self.dataOut.useLocalTime = False
373 self.dataOut.useLocalTime = False
374 self.dataOut.paramInterval = 157
374 self.dataOut.paramInterval = 157
375 self.dataOut.timezone = self.timezone
375 self.dataOut.timezone = self.timezone
376 self.dataOut.site = self.siteFile
376 self.dataOut.site = self.siteFile
377 self.dataOut.nrecords = self.nrecords / self.nmodes
377 self.dataOut.nrecords = self.nrecords / self.nmodes
378 self.dataOut.sizeOfFile = self.sizeOfFile
378 self.dataOut.sizeOfFile = self.sizeOfFile
379 self.dataOut.lat = self.lat
379 self.dataOut.lat = self.lat
380 self.dataOut.lon = self.lon
380 self.dataOut.lon = self.lon
381 self.dataOut.channelList = list(range(self.nchannels))
381 self.dataOut.channelList = list(range(self.nchannels))
382 self.dataOut.kchan = self.kchan
382 self.dataOut.kchan = self.kchan
383 self.dataOut.delta = self.delta
383 self.dataOut.delta = self.delta
384 self.dataOut.correction = self.correction
384 self.dataOut.correction = self.correction
385 self.dataOut.nmodes = self.nmodes
385 self.dataOut.nmodes = self.nmodes
386 self.dataOut.imode = self.imode
386 self.dataOut.imode = self.imode
387 self.dataOut.antenna = self.antenna
387 self.dataOut.antenna = self.antenna
388 self.dataOut.rx_gains = self.rx_gains
388 self.dataOut.rx_gains = self.rx_gains
389 self.dataOut.flagNoData = False
389 self.dataOut.flagNoData = False
390 self.dataOut.flagDiscontinuousBlock = self.flagDiscontinuousBlock
390 self.dataOut.flagDiscontinuousBlock = self.flagDiscontinuousBlock
391
391
392 def getData(self):
392 def getData(self):
393 '''
393 '''
394 Storing data from databuffer to dataOut object
394 Storing data from databuffer to dataOut object
395 '''
395 '''
396 if self.flagNoMoreFiles:
396 if self.flagNoMoreFiles:
397 self.dataOut.flagNoData = True
397 self.dataOut.flagNoData = True
398 raise schainpy.admin.SchainError('No More files to read')
398 raise schainpy.admin.SchainError('No More files to read')
399
399
400 if not self.readNextBlock():
400 if not self.readNextBlock():
401 self.dataOut.flagNoData = True
401 self.dataOut.flagNoData = True
402 raise schainpy.admin.SchainError('Time for wait new file reach!!!')
402 raise schainpy.admin.SchainError('Time for wait new file reach!!!')
403
403
404 self.set_output()
404 self.set_output()
405
405
406 return 1
406 return 1
407 No newline at end of file
407
@@ -1,462 +1,462
1 import os
1 import os
2 import sys
2 import sys
3 import glob
3 import glob
4 import fnmatch
4 import fnmatch
5 import datetime
5 import datetime
6 import time
6 import time
7 import re
7 import re
8 import h5py
8 import h5py
9 import numpy
9 import numpy
10
10
11 import pylab as plb
11 import pylab as plb
12 from scipy.optimize import curve_fit
12 from scipy.optimize import curve_fit
13 from scipy import asarray as ar, exp
13 from scipy import asarray as ar, exp
14
14
15 SPEED_OF_LIGHT = 299792458
15 SPEED_OF_LIGHT = 299792458
16 SPEED_OF_LIGHT = 3e8
16 SPEED_OF_LIGHT = 3e8
17
17
18 from .utils import folder_in_range
18 from .utils import folder_in_range
19
19
20 import schainpy.admin
20 import schainpy.admin
21 from schainpy.model.data.jrodata import Spectra
21 from schainpy.model.data.jrodata import Spectra
22 from schainpy.model.proc.jroproc_base import ProcessingUnit, Operation, MPDecorator
22 from schainpy.model.proc.jroproc_base import ProcessingUnit, Operation, MPDecorator
23 from schainpy.utils import log
23 from schainpy.utils import log
24 from schainpy.model.io.jroIO_base import JRODataReader
24 from schainpy.model.io.jroIO_base import JRODataReader
25
25
26 def pol2cart(rho, phi):
26 def pol2cart(rho, phi):
27 x = rho * numpy.cos(phi)
27 x = rho * numpy.cos(phi)
28 y = rho * numpy.sin(phi)
28 y = rho * numpy.sin(phi)
29 return(x, y)
29 return(x, y)
30
30
31 FILE_STRUCTURE = numpy.dtype([ # HEADER 48bytes
31 FILE_STRUCTURE = numpy.dtype([ # HEADER 48bytes
32 ('FileMgcNumber', '<u4'), # 0x23020100
32 ('FileMgcNumber', '<u4'), # 0x23020100
33 ('nFDTdataRecors', '<u4'),
33 ('nFDTdataRecors', '<u4'),
34 ('OffsetStartHeader', '<u4'),
34 ('OffsetStartHeader', '<u4'),
35 ('RadarUnitId', '<u4'),
35 ('RadarUnitId', '<u4'),
36 ('SiteName', 'S32'), # Null terminated
36 ('SiteName', 'S32'), # Null terminated
37 ])
37 ])
38
38
39
39
40 class FileHeaderBLTR():
40 class FileHeaderBLTR():
41
41
42 def __init__(self, fo):
42 def __init__(self, fo):
43
43
44 self.fo = fo
44 self.fo = fo
45 self.size = 48
45 self.size = 48
46 self.read()
46 self.read()
47
47
48 def read(self):
48 def read(self):
49
49
50 header = numpy.fromfile(self.fo, FILE_STRUCTURE, 1)
50 header = numpy.fromfile(self.fo, FILE_STRUCTURE, 1)
51 self.FileMgcNumber = hex(header['FileMgcNumber'][0])
51 self.FileMgcNumber = hex(header['FileMgcNumber'][0])
52 self.nFDTdataRecors = int(header['nFDTdataRecors'][0])
52 self.nFDTdataRecors = int(header['nFDTdataRecors'][0])
53 self.RadarUnitId = int(header['RadarUnitId'][0])
53 self.RadarUnitId = int(header['RadarUnitId'][0])
54 self.OffsetStartHeader = int(header['OffsetStartHeader'][0])
54 self.OffsetStartHeader = int(header['OffsetStartHeader'][0])
55 self.SiteName = header['SiteName'][0]
55 self.SiteName = header['SiteName'][0]
56
56
57 def write(self, fp):
57 def write(self, fp):
58
58
59 headerTuple = (self.FileMgcNumber,
59 headerTuple = (self.FileMgcNumber,
60 self.nFDTdataRecors,
60 self.nFDTdataRecors,
61 self.RadarUnitId,
61 self.RadarUnitId,
62 self.SiteName,
62 self.SiteName,
63 self.size)
63 self.size)
64
64
65 header = numpy.array(headerTuple, FILE_STRUCTURE)
65 header = numpy.array(headerTuple, FILE_STRUCTURE)
66 header.tofile(fp)
66 header.tofile(fp)
67 ''' ndarray.tofile(fid, sep, format) Write array to a file as text or binary (default).
67 ''' ndarray.tofile(fid, sep, format) Write array to a file as text or binary (default).
68
68
69 fid : file or str
69 fid : file or str
70 An open file object, or a string containing a filename.
70 An open file object, or a string containing a filename.
71
71
72 sep : str
72 sep : str
73 Separator between array items for text output. If "" (empty), a binary file is written,
73 Separator between array items for text output. If "" (empty), a binary file is written,
74 equivalent to file.write(a.tobytes()).
74 equivalent to file.write(a.tobytes()).
75
75
76 format : str
76 format : str
77 Format string for text file output. Each entry in the array is formatted to text by
77 Format string for text file output. Each entry in the array is formatted to text by
78 first converting it to the closest Python type, and then using "format" % item.
78 first converting it to the closest Python type, and then using "format" % item.
79
79
80 '''
80 '''
81
81
82 return 1
82 return 1
83
83
84
84
85 RECORD_STRUCTURE = numpy.dtype([ # RECORD HEADER 180+20N bytes
85 RECORD_STRUCTURE = numpy.dtype([ # RECORD HEADER 180+20N bytes
86 ('RecMgcNumber', '<u4'), # 0x23030001
86 ('RecMgcNumber', '<u4'), # 0x23030001
87 ('RecCounter', '<u4'), # Record counter(0,1, ...)
87 ('RecCounter', '<u4'), # Record counter(0,1, ...)
88 # Offset to start of next record form start of this record
88 # Offset to start of next record form start of this record
89 ('Off2StartNxtRec', '<u4'),
89 ('Off2StartNxtRec', '<u4'),
90 # Offset to start of data from start of this record
90 # Offset to start of data from start of this record
91 ('Off2StartData', '<u4'),
91 ('Off2StartData', '<u4'),
92 # Epoch time stamp of start of acquisition (seconds)
92 # Epoch time stamp of start of acquisition (seconds)
93 ('nUtime', '<i4'),
93 ('nUtime', '<i4'),
94 # Millisecond component of time stamp (0,...,999)
94 # Millisecond component of time stamp (0,...,999)
95 ('nMilisec', '<u4'),
95 ('nMilisec', '<u4'),
96 # Experiment tag name (null terminated)
96 # Experiment tag name (null terminated)
97 ('ExpTagName', 'S32'),
97 ('ExpTagName', 'S32'),
98 # Experiment comment (null terminated)
98 # Experiment comment (null terminated)
99 ('ExpComment', 'S32'),
99 ('ExpComment', 'S32'),
100 # Site latitude (from GPS) in degrees (positive implies North)
100 # Site latitude (from GPS) in degrees (positive implies North)
101 ('SiteLatDegrees', '<f4'),
101 ('SiteLatDegrees', '<f4'),
102 # Site longitude (from GPS) in degrees (positive implies East)
102 # Site longitude (from GPS) in degrees (positive implies East)
103 ('SiteLongDegrees', '<f4'),
103 ('SiteLongDegrees', '<f4'),
104 # RTC GPS engine status (0=SEEK, 1=LOCK, 2=NOT FITTED, 3=UNAVAILABLE)
104 # RTC GPS engine status (0=SEEK, 1=LOCK, 2=NOT FITTED, 3=UNAVAILABLE)
105 ('RTCgpsStatus', '<u4'),
105 ('RTCgpsStatus', '<u4'),
106 ('TransmitFrec', '<u4'), # Transmit frequency (Hz)
106 ('TransmitFrec', '<u4'), # Transmit frequency (Hz)
107 ('ReceiveFrec', '<u4'), # Receive frequency
107 ('ReceiveFrec', '<u4'), # Receive frequency
108 # First local oscillator frequency (Hz)
108 # First local oscillator frequency (Hz)
109 ('FirstOsciFrec', '<u4'),
109 ('FirstOsciFrec', '<u4'),
110 # (0="O", 1="E", 2="linear 1", 3="linear2")
110 # (0="O", 1="E", 2="linear 1", 3="linear2")
111 ('Polarisation', '<u4'),
111 ('Polarisation', '<u4'),
112 # Receiver filter settings (0,1,2,3)
112 # Receiver filter settings (0,1,2,3)
113 ('ReceiverFiltSett', '<u4'),
113 ('ReceiverFiltSett', '<u4'),
114 # Number of modes in use (1 or 2)
114 # Number of modes in use (1 or 2)
115 ('nModesInUse', '<u4'),
115 ('nModesInUse', '<u4'),
116 # Dual Mode index number for these data (0 or 1)
116 # Dual Mode index number for these data (0 or 1)
117 ('DualModeIndex', '<u4'),
117 ('DualModeIndex', '<u4'),
118 # Dual Mode range correction for these data (m)
118 # Dual Mode range correction for these data (m)
119 ('DualModeRange', '<u4'),
119 ('DualModeRange', '<u4'),
120 # Number of digital channels acquired (2*N)
120 # Number of digital channels acquired (2*N)
121 ('nDigChannels', '<u4'),
121 ('nDigChannels', '<u4'),
122 # Sampling resolution (meters)
122 # Sampling resolution (meters)
123 ('SampResolution', '<u4'),
123 ('SampResolution', '<u4'),
124 # Number of range gates sampled
124 # Number of range gates sampled
125 ('nHeights', '<u4'),
125 ('nHeights', '<u4'),
126 # Start range of sampling (meters)
126 # Start range of sampling (meters)
127 ('StartRangeSamp', '<u4'),
127 ('StartRangeSamp', '<u4'),
128 ('PRFhz', '<u4'), # PRF (Hz)
128 ('PRFhz', '<u4'), # PRF (Hz)
129 ('nCohInt', '<u4'), # Integrations
129 ('nCohInt', '<u4'), # Integrations
130 # Number of data points transformed
130 # Number of data points transformed
131 ('nProfiles', '<u4'),
131 ('nProfiles', '<u4'),
132 # Number of receive beams stored in file (1 or N)
132 # Number of receive beams stored in file (1 or N)
133 ('nChannels', '<u4'),
133 ('nChannels', '<u4'),
134 ('nIncohInt', '<u4'), # Number of spectral averages
134 ('nIncohInt', '<u4'), # Number of spectral averages
135 # FFT windowing index (0 = no window)
135 # FFT windowing index (0 = no window)
136 ('FFTwindowingInd', '<u4'),
136 ('FFTwindowingInd', '<u4'),
137 # Beam steer angle (azimuth) in degrees (clockwise from true North)
137 # Beam steer angle (azimuth) in degrees (clockwise from true North)
138 ('BeamAngleAzim', '<f4'),
138 ('BeamAngleAzim', '<f4'),
139 # Beam steer angle (zenith) in degrees (0=> vertical)
139 # Beam steer angle (zenith) in degrees (0=> vertical)
140 ('BeamAngleZen', '<f4'),
140 ('BeamAngleZen', '<f4'),
141 # Antenna coordinates (Range(meters), Bearing(degrees)) - N pairs
141 # Antenna coordinates (Range(meters), Bearing(degrees)) - N pairs
142 ('AntennaCoord0', '<f4'),
142 ('AntennaCoord0', '<f4'),
143 # Antenna coordinates (Range(meters), Bearing(degrees)) - N pairs
143 # Antenna coordinates (Range(meters), Bearing(degrees)) - N pairs
144 ('AntennaAngl0', '<f4'),
144 ('AntennaAngl0', '<f4'),
145 # Antenna coordinates (Range(meters), Bearing(degrees)) - N pairs
145 # Antenna coordinates (Range(meters), Bearing(degrees)) - N pairs
146 ('AntennaCoord1', '<f4'),
146 ('AntennaCoord1', '<f4'),
147 # Antenna coordinates (Range(meters), Bearing(degrees)) - N pairs
147 # Antenna coordinates (Range(meters), Bearing(degrees)) - N pairs
148 ('AntennaAngl1', '<f4'),
148 ('AntennaAngl1', '<f4'),
149 # Antenna coordinates (Range(meters), Bearing(degrees)) - N pairs
149 # Antenna coordinates (Range(meters), Bearing(degrees)) - N pairs
150 ('AntennaCoord2', '<f4'),
150 ('AntennaCoord2', '<f4'),
151 # Antenna coordinates (Range(meters), Bearing(degrees)) - N pairs
151 # Antenna coordinates (Range(meters), Bearing(degrees)) - N pairs
152 ('AntennaAngl2', '<f4'),
152 ('AntennaAngl2', '<f4'),
153 # Receiver phase calibration (degrees) - N values
153 # Receiver phase calibration (degrees) - N values
154 ('RecPhaseCalibr0', '<f4'),
154 ('RecPhaseCalibr0', '<f4'),
155 # Receiver phase calibration (degrees) - N values
155 # Receiver phase calibration (degrees) - N values
156 ('RecPhaseCalibr1', '<f4'),
156 ('RecPhaseCalibr1', '<f4'),
157 # Receiver phase calibration (degrees) - N values
157 # Receiver phase calibration (degrees) - N values
158 ('RecPhaseCalibr2', '<f4'),
158 ('RecPhaseCalibr2', '<f4'),
159 # Receiver amplitude calibration (ratio relative to receiver one) - N values
159 # Receiver amplitude calibration (ratio relative to receiver one) - N values
160 ('RecAmpCalibr0', '<f4'),
160 ('RecAmpCalibr0', '<f4'),
161 # Receiver amplitude calibration (ratio relative to receiver one) - N values
161 # Receiver amplitude calibration (ratio relative to receiver one) - N values
162 ('RecAmpCalibr1', '<f4'),
162 ('RecAmpCalibr1', '<f4'),
163 # Receiver amplitude calibration (ratio relative to receiver one) - N values
163 # Receiver amplitude calibration (ratio relative to receiver one) - N values
164 ('RecAmpCalibr2', '<f4'),
164 ('RecAmpCalibr2', '<f4'),
165 # Receiver gains in dB - N values
165 # Receiver gains in dB - N values
166 ('ReceiverGaindB0', '<i4'),
166 ('ReceiverGaindB0', '<i4'),
167 # Receiver gains in dB - N values
167 # Receiver gains in dB - N values
168 ('ReceiverGaindB1', '<i4'),
168 ('ReceiverGaindB1', '<i4'),
169 # Receiver gains in dB - N values
169 # Receiver gains in dB - N values
170 ('ReceiverGaindB2', '<i4'),
170 ('ReceiverGaindB2', '<i4'),
171 ])
171 ])
172
172
173
173
174 class RecordHeaderBLTR():
174 class RecordHeaderBLTR():
175
175
176 def __init__(self, fo):
176 def __init__(self, fo):
177
177
178 self.fo = fo
178 self.fo = fo
179 self.OffsetStartHeader = 48
179 self.OffsetStartHeader = 48
180 self.Off2StartNxtRec = 811248
180 self.Off2StartNxtRec = 811248
181
181
182 def read(self, block):
182 def read(self, block):
183 OffRHeader = self.OffsetStartHeader + block * self.Off2StartNxtRec
183 OffRHeader = self.OffsetStartHeader + block * self.Off2StartNxtRec
184 self.fo.seek(OffRHeader, os.SEEK_SET)
184 self.fo.seek(OffRHeader, os.SEEK_SET)
185 header = numpy.fromfile(self.fo, RECORD_STRUCTURE, 1)
185 header = numpy.fromfile(self.fo, RECORD_STRUCTURE, 1)
186 self.RecMgcNumber = hex(header['RecMgcNumber'][0]) # 0x23030001
186 self.RecMgcNumber = hex(header['RecMgcNumber'][0]) # 0x23030001
187 self.RecCounter = int(header['RecCounter'][0])
187 self.RecCounter = int(header['RecCounter'][0])
188 self.Off2StartNxtRec = int(header['Off2StartNxtRec'][0])
188 self.Off2StartNxtRec = int(header['Off2StartNxtRec'][0])
189 self.Off2StartData = int(header['Off2StartData'][0])
189 self.Off2StartData = int(header['Off2StartData'][0])
190 self.nUtime = header['nUtime'][0]
190 self.nUtime = header['nUtime'][0]
191 self.nMilisec = header['nMilisec'][0]
191 self.nMilisec = header['nMilisec'][0]
192 self.ExpTagName = '' # str(header['ExpTagName'][0])
192 self.ExpTagName = '' # str(header['ExpTagName'][0])
193 self.ExpComment = '' # str(header['ExpComment'][0])
193 self.ExpComment = '' # str(header['ExpComment'][0])
194 self.SiteLatDegrees = header['SiteLatDegrees'][0]
194 self.SiteLatDegrees = header['SiteLatDegrees'][0]
195 self.SiteLongDegrees = header['SiteLongDegrees'][0]
195 self.SiteLongDegrees = header['SiteLongDegrees'][0]
196 self.RTCgpsStatus = header['RTCgpsStatus'][0]
196 self.RTCgpsStatus = header['RTCgpsStatus'][0]
197 self.TransmitFrec = header['TransmitFrec'][0]
197 self.TransmitFrec = header['TransmitFrec'][0]
198 self.ReceiveFrec = header['ReceiveFrec'][0]
198 self.ReceiveFrec = header['ReceiveFrec'][0]
199 self.FirstOsciFrec = header['FirstOsciFrec'][0]
199 self.FirstOsciFrec = header['FirstOsciFrec'][0]
200 self.Polarisation = header['Polarisation'][0]
200 self.Polarisation = header['Polarisation'][0]
201 self.ReceiverFiltSett = header['ReceiverFiltSett'][0]
201 self.ReceiverFiltSett = header['ReceiverFiltSett'][0]
202 self.nModesInUse = header['nModesInUse'][0]
202 self.nModesInUse = header['nModesInUse'][0]
203 self.DualModeIndex = header['DualModeIndex'][0]
203 self.DualModeIndex = header['DualModeIndex'][0]
204 self.DualModeRange = header['DualModeRange'][0]
204 self.DualModeRange = header['DualModeRange'][0]
205 self.nDigChannels = header['nDigChannels'][0]
205 self.nDigChannels = header['nDigChannels'][0]
206 self.SampResolution = header['SampResolution'][0]
206 self.SampResolution = header['SampResolution'][0]
207 self.nHeights = header['nHeights'][0]
207 self.nHeights = header['nHeights'][0]
208 self.StartRangeSamp = header['StartRangeSamp'][0]
208 self.StartRangeSamp = header['StartRangeSamp'][0]
209 self.PRFhz = header['PRFhz'][0]
209 self.PRFhz = header['PRFhz'][0]
210 self.nCohInt = header['nCohInt'][0]
210 self.nCohInt = header['nCohInt'][0]
211 self.nProfiles = header['nProfiles'][0]
211 self.nProfiles = header['nProfiles'][0]
212 self.nChannels = header['nChannels'][0]
212 self.nChannels = header['nChannels'][0]
213 self.nIncohInt = header['nIncohInt'][0]
213 self.nIncohInt = header['nIncohInt'][0]
214 self.FFTwindowingInd = header['FFTwindowingInd'][0]
214 self.FFTwindowingInd = header['FFTwindowingInd'][0]
215 self.BeamAngleAzim = header['BeamAngleAzim'][0]
215 self.BeamAngleAzim = header['BeamAngleAzim'][0]
216 self.BeamAngleZen = header['BeamAngleZen'][0]
216 self.BeamAngleZen = header['BeamAngleZen'][0]
217 self.AntennaCoord0 = header['AntennaCoord0'][0]
217 self.AntennaCoord0 = header['AntennaCoord0'][0]
218 self.AntennaAngl0 = header['AntennaAngl0'][0]
218 self.AntennaAngl0 = header['AntennaAngl0'][0]
219 self.AntennaCoord1 = header['AntennaCoord1'][0]
219 self.AntennaCoord1 = header['AntennaCoord1'][0]
220 self.AntennaAngl1 = header['AntennaAngl1'][0]
220 self.AntennaAngl1 = header['AntennaAngl1'][0]
221 self.AntennaCoord2 = header['AntennaCoord2'][0]
221 self.AntennaCoord2 = header['AntennaCoord2'][0]
222 self.AntennaAngl2 = header['AntennaAngl2'][0]
222 self.AntennaAngl2 = header['AntennaAngl2'][0]
223 self.RecPhaseCalibr0 = header['RecPhaseCalibr0'][0]
223 self.RecPhaseCalibr0 = header['RecPhaseCalibr0'][0]
224 self.RecPhaseCalibr1 = header['RecPhaseCalibr1'][0]
224 self.RecPhaseCalibr1 = header['RecPhaseCalibr1'][0]
225 self.RecPhaseCalibr2 = header['RecPhaseCalibr2'][0]
225 self.RecPhaseCalibr2 = header['RecPhaseCalibr2'][0]
226 self.RecAmpCalibr0 = header['RecAmpCalibr0'][0]
226 self.RecAmpCalibr0 = header['RecAmpCalibr0'][0]
227 self.RecAmpCalibr1 = header['RecAmpCalibr1'][0]
227 self.RecAmpCalibr1 = header['RecAmpCalibr1'][0]
228 self.RecAmpCalibr2 = header['RecAmpCalibr2'][0]
228 self.RecAmpCalibr2 = header['RecAmpCalibr2'][0]
229 self.ReceiverGaindB0 = header['ReceiverGaindB0'][0]
229 self.ReceiverGaindB0 = header['ReceiverGaindB0'][0]
230 self.ReceiverGaindB1 = header['ReceiverGaindB1'][0]
230 self.ReceiverGaindB1 = header['ReceiverGaindB1'][0]
231 self.ReceiverGaindB2 = header['ReceiverGaindB2'][0]
231 self.ReceiverGaindB2 = header['ReceiverGaindB2'][0]
232 self.ipp = 0.5 * (SPEED_OF_LIGHT / self.PRFhz)
232 self.ipp = 0.5 * (SPEED_OF_LIGHT / self.PRFhz)
233 self.RHsize = 180 + 20 * self.nChannels
233 self.RHsize = 180 + 20 * self.nChannels
234 self.Datasize = self.nProfiles * self.nChannels * self.nHeights * 2 * 4
234 self.Datasize = self.nProfiles * self.nChannels * self.nHeights * 2 * 4
235 endFp = self.OffsetStartHeader + self.RecCounter * self.Off2StartNxtRec
235 endFp = self.OffsetStartHeader + self.RecCounter * self.Off2StartNxtRec
236
236
237
237
238 if OffRHeader > endFp:
238 if OffRHeader > endFp:
239 sys.stderr.write(
239 sys.stderr.write(
240 "Warning %s: Size value read from System Header is lower than it has to be\n" % fp)
240 "Warning %s: Size value read from System Header is lower than it has to be\n" % fp)
241 return 0
241 return 0
242
242
243 if OffRHeader < endFp:
243 if OffRHeader < endFp:
244 sys.stderr.write(
244 sys.stderr.write(
245 "Warning %s: Size value read from System Header size is greater than it has to be\n" % fp)
245 "Warning %s: Size value read from System Header size is greater than it has to be\n" % fp)
246 return 0
246 return 0
247
247
248 return 1
248 return 1
249
249
250 @MPDecorator
250
251 class BLTRSpectraReader (ProcessingUnit):
251 class BLTRSpectraReader (ProcessingUnit):
252
252
253 def __init__(self):
253 def __init__(self):
254
254
255 ProcessingUnit.__init__(self)
255 ProcessingUnit.__init__(self)
256
256
257 self.ext = ".fdt"
257 self.ext = ".fdt"
258 self.optchar = "P"
258 self.optchar = "P"
259 self.fpFile = None
259 self.fpFile = None
260 self.fp = None
260 self.fp = None
261 self.BlockCounter = 0
261 self.BlockCounter = 0
262 self.fileSizeByHeader = None
262 self.fileSizeByHeader = None
263 self.filenameList = []
263 self.filenameList = []
264 self.fileSelector = 0
264 self.fileSelector = 0
265 self.Off2StartNxtRec = 0
265 self.Off2StartNxtRec = 0
266 self.RecCounter = 0
266 self.RecCounter = 0
267 self.flagNoMoreFiles = 0
267 self.flagNoMoreFiles = 0
268 self.data_spc = None
268 self.data_spc = None
269 self.data_cspc = None
269 self.data_cspc = None
270 self.path = None
270 self.path = None
271 self.OffsetStartHeader = 0
271 self.OffsetStartHeader = 0
272 self.Off2StartData = 0
272 self.Off2StartData = 0
273 self.ipp = 0
273 self.ipp = 0
274 self.nFDTdataRecors = 0
274 self.nFDTdataRecors = 0
275 self.blocksize = 0
275 self.blocksize = 0
276 self.dataOut = Spectra()
276 self.dataOut = Spectra()
277 self.dataOut.flagNoData = False
277 self.dataOut.flagNoData = False
278
278
279 def search_files(self):
279 def search_files(self):
280 '''
280 '''
281 Function that indicates the number of .fdt files that exist in the folder to be read.
281 Function that indicates the number of .fdt files that exist in the folder to be read.
282 It also creates an organized list with the names of the files to read.
282 It also creates an organized list with the names of the files to read.
283 '''
283 '''
284
284
285 files = glob.glob(os.path.join(self.path, '*{}'.format(self.ext)))
285 files = glob.glob(os.path.join(self.path, '*{}'.format(self.ext)))
286 files = sorted(files)
286 files = sorted(files)
287 for f in files:
287 for f in files:
288 filename = f.split('/')[-1]
288 filename = f.split('/')[-1]
289 if folder_in_range(filename.split('.')[1], self.startDate, self.endDate, '%Y%m%d'):
289 if folder_in_range(filename.split('.')[1], self.startDate, self.endDate, '%Y%m%d'):
290 self.filenameList.append(f)
290 self.filenameList.append(f)
291
291
292 def run(self, **kwargs):
292 def run(self, **kwargs):
293 '''
293 '''
294 This method will be the one that will initiate the data entry, will be called constantly.
294 This method will be the one that will initiate the data entry, will be called constantly.
295 You should first verify that your Setup () is set up and then continue to acquire
295 You should first verify that your Setup () is set up and then continue to acquire
296 the data to be processed with getData ().
296 the data to be processed with getData ().
297 '''
297 '''
298 if not self.isConfig:
298 if not self.isConfig:
299 self.setup(**kwargs)
299 self.setup(**kwargs)
300 self.isConfig = True
300 self.isConfig = True
301
301
302 self.getData()
302 self.getData()
303
303
304 def setup(self,
304 def setup(self,
305 path=None,
305 path=None,
306 startDate=None,
306 startDate=None,
307 endDate=None,
307 endDate=None,
308 startTime=None,
308 startTime=None,
309 endTime=None,
309 endTime=None,
310 walk=True,
310 walk=True,
311 code=None,
311 code=None,
312 online=False,
312 online=False,
313 mode=None,
313 mode=None,
314 **kwargs):
314 **kwargs):
315
315
316 self.isConfig = True
316 self.isConfig = True
317
317
318 self.path = path
318 self.path = path
319 self.startDate = startDate
319 self.startDate = startDate
320 self.endDate = endDate
320 self.endDate = endDate
321 self.startTime = startTime
321 self.startTime = startTime
322 self.endTime = endTime
322 self.endTime = endTime
323 self.walk = walk
323 self.walk = walk
324 self.mode = int(mode)
324 self.mode = int(mode)
325 self.search_files()
325 self.search_files()
326 if self.filenameList:
326 if self.filenameList:
327 self.readFile()
327 self.readFile()
328
328
329 def getData(self):
329 def getData(self):
330 '''
330 '''
331 Before starting this function, you should check that there is still an unread file,
331 Before starting this function, you should check that there is still an unread file,
332 If there are still blocks to read or if the data block is empty.
332 If there are still blocks to read or if the data block is empty.
333
333
334 You should call the file "read".
334 You should call the file "read".
335
335
336 '''
336 '''
337
337
338 if self.flagNoMoreFiles:
338 if self.flagNoMoreFiles:
339 self.dataOut.flagNoData = True
339 self.dataOut.flagNoData = True
340 raise schainpy.admin.SchainError('No more files')
340 raise schainpy.admin.SchainError('No more files')
341
341
342 self.readBlock()
342 self.readBlock()
343
343
344 def readFile(self):
344 def readFile(self):
345 '''
345 '''
346 You must indicate if you are reading in Online or Offline mode and load the
346 You must indicate if you are reading in Online or Offline mode and load the
347 The parameters for this file reading mode.
347 The parameters for this file reading mode.
348
348
349 Then you must do 2 actions:
349 Then you must do 2 actions:
350
350
351 1. Get the BLTR FileHeader.
351 1. Get the BLTR FileHeader.
352 2. Start reading the first block.
352 2. Start reading the first block.
353 '''
353 '''
354
354
355 if self.fileSelector < len(self.filenameList):
355 if self.fileSelector < len(self.filenameList):
356 log.success('Opening file: {}'.format(self.filenameList[self.fileSelector]), self.name)
356 log.success('Opening file: {}'.format(self.filenameList[self.fileSelector]), self.name)
357 self.fp = open(self.filenameList[self.fileSelector])
357 self.fp = open(self.filenameList[self.fileSelector])
358 self.fheader = FileHeaderBLTR(self.fp)
358 self.fheader = FileHeaderBLTR(self.fp)
359 self.rheader = RecordHeaderBLTR(self.fp)
359 self.rheader = RecordHeaderBLTR(self.fp)
360 self.nFDTdataRecors = self.fheader.nFDTdataRecors
360 self.nFDTdataRecors = self.fheader.nFDTdataRecors
361 self.fileSelector += 1
361 self.fileSelector += 1
362 self.BlockCounter = 0
362 self.BlockCounter = 0
363 return 1
363 return 1
364 else:
364 else:
365 self.flagNoMoreFiles = True
365 self.flagNoMoreFiles = True
366 self.dataOut.flagNoData = True
366 self.dataOut.flagNoData = True
367 return 0
367 return 0
368
368
369 def readBlock(self):
369 def readBlock(self):
370 '''
370 '''
371 It should be checked if the block has data, if it is not passed to the next file.
371 It should be checked if the block has data, if it is not passed to the next file.
372
372
373 Then the following is done:
373 Then the following is done:
374
374
375 1. Read the RecordHeader
375 1. Read the RecordHeader
376 2. Fill the buffer with the current block number.
376 2. Fill the buffer with the current block number.
377
377
378 '''
378 '''
379
379
380 if self.BlockCounter == self.nFDTdataRecors:
380 if self.BlockCounter == self.nFDTdataRecors:
381 if not self.readFile():
381 if not self.readFile():
382 return
382 return
383
383
384 if self.mode == 1:
384 if self.mode == 1:
385 self.rheader.read(self.BlockCounter+1)
385 self.rheader.read(self.BlockCounter+1)
386 elif self.mode == 0:
386 elif self.mode == 0:
387 self.rheader.read(self.BlockCounter)
387 self.rheader.read(self.BlockCounter)
388
388
389 self.RecCounter = self.rheader.RecCounter
389 self.RecCounter = self.rheader.RecCounter
390 self.OffsetStartHeader = self.rheader.OffsetStartHeader
390 self.OffsetStartHeader = self.rheader.OffsetStartHeader
391 self.Off2StartNxtRec = self.rheader.Off2StartNxtRec
391 self.Off2StartNxtRec = self.rheader.Off2StartNxtRec
392 self.Off2StartData = self.rheader.Off2StartData
392 self.Off2StartData = self.rheader.Off2StartData
393 self.nProfiles = self.rheader.nProfiles
393 self.nProfiles = self.rheader.nProfiles
394 self.nChannels = self.rheader.nChannels
394 self.nChannels = self.rheader.nChannels
395 self.nHeights = self.rheader.nHeights
395 self.nHeights = self.rheader.nHeights
396 self.frequency = self.rheader.TransmitFrec
396 self.frequency = self.rheader.TransmitFrec
397 self.DualModeIndex = self.rheader.DualModeIndex
397 self.DualModeIndex = self.rheader.DualModeIndex
398 self.pairsList = [(0, 1), (0, 2), (1, 2)]
398 self.pairsList = [(0, 1), (0, 2), (1, 2)]
399 self.dataOut.pairsList = self.pairsList
399 self.dataOut.pairsList = self.pairsList
400 self.nRdPairs = len(self.dataOut.pairsList)
400 self.nRdPairs = len(self.dataOut.pairsList)
401 self.dataOut.nRdPairs = self.nRdPairs
401 self.dataOut.nRdPairs = self.nRdPairs
402 self.dataOut.heightList = (self.rheader.StartRangeSamp + numpy.arange(self.nHeights) * self.rheader.SampResolution) / 1000.
402 self.dataOut.heightList = (self.rheader.StartRangeSamp + numpy.arange(self.nHeights) * self.rheader.SampResolution) / 1000.
403 self.dataOut.channelList = range(self.nChannels)
403 self.dataOut.channelList = range(self.nChannels)
404 self.dataOut.nProfiles=self.rheader.nProfiles
404 self.dataOut.nProfiles=self.rheader.nProfiles
405 self.dataOut.nIncohInt=self.rheader.nIncohInt
405 self.dataOut.nIncohInt=self.rheader.nIncohInt
406 self.dataOut.nCohInt=self.rheader.nCohInt
406 self.dataOut.nCohInt=self.rheader.nCohInt
407 self.dataOut.ippSeconds= 1/float(self.rheader.PRFhz)
407 self.dataOut.ippSeconds= 1/float(self.rheader.PRFhz)
408 self.dataOut.PRF=self.rheader.PRFhz
408 self.dataOut.PRF=self.rheader.PRFhz
409 self.dataOut.nFFTPoints=self.rheader.nProfiles
409 self.dataOut.nFFTPoints=self.rheader.nProfiles
410 self.dataOut.utctime = self.rheader.nUtime + self.rheader.nMilisec/1000.
410 self.dataOut.utctime = self.rheader.nUtime + self.rheader.nMilisec/1000.
411 self.dataOut.timeZone = 0
411 self.dataOut.timeZone = 0
412 self.dataOut.useLocalTime = False
412 self.dataOut.useLocalTime = False
413 self.dataOut.nmodes = 2
413 self.dataOut.nmodes = 2
414 log.log('Reading block {} - {}'.format(self.BlockCounter, self.dataOut.datatime), self.name)
414 log.log('Reading block {} - {}'.format(self.BlockCounter, self.dataOut.datatime), self.name)
415 OffDATA = self.OffsetStartHeader + self.RecCounter * \
415 OffDATA = self.OffsetStartHeader + self.RecCounter * \
416 self.Off2StartNxtRec + self.Off2StartData
416 self.Off2StartNxtRec + self.Off2StartData
417 self.fp.seek(OffDATA, os.SEEK_SET)
417 self.fp.seek(OffDATA, os.SEEK_SET)
418
418
419 self.data_fft = numpy.fromfile(self.fp, [('complex','<c8')], self.nProfiles*self.nChannels*self.nHeights )
419 self.data_fft = numpy.fromfile(self.fp, [('complex','<c8')], self.nProfiles*self.nChannels*self.nHeights )
420 self.data_fft = self.data_fft.astype(numpy.dtype('complex'))
420 self.data_fft = self.data_fft.astype(numpy.dtype('complex'))
421 self.data_block = numpy.reshape(self.data_fft,(self.nHeights, self.nChannels, self.nProfiles))
421 self.data_block = numpy.reshape(self.data_fft,(self.nHeights, self.nChannels, self.nProfiles))
422 self.data_block = numpy.transpose(self.data_block, (1,2,0))
422 self.data_block = numpy.transpose(self.data_block, (1,2,0))
423 copy = self.data_block.copy()
423 copy = self.data_block.copy()
424 spc = copy * numpy.conjugate(copy)
424 spc = copy * numpy.conjugate(copy)
425 self.data_spc = numpy.absolute(spc) # valor absoluto o magnitud
425 self.data_spc = numpy.absolute(spc) # valor absoluto o magnitud
426 self.dataOut.data_spc = self.data_spc
426 self.dataOut.data_spc = self.data_spc
427
427
428 cspc = self.data_block.copy()
428 cspc = self.data_block.copy()
429 self.data_cspc = self.data_block.copy()
429 self.data_cspc = self.data_block.copy()
430
430
431 for i in range(self.nRdPairs):
431 for i in range(self.nRdPairs):
432
432
433 chan_index0 = self.dataOut.pairsList[i][0]
433 chan_index0 = self.dataOut.pairsList[i][0]
434 chan_index1 = self.dataOut.pairsList[i][1]
434 chan_index1 = self.dataOut.pairsList[i][1]
435
435
436 self.data_cspc[i, :, :] = cspc[chan_index0, :, :] * numpy.conjugate(cspc[chan_index1, :, :])
436 self.data_cspc[i, :, :] = cspc[chan_index0, :, :] * numpy.conjugate(cspc[chan_index1, :, :])
437
437
438 '''Getting Eij and Nij'''
438 '''Getting Eij and Nij'''
439 (AntennaX0, AntennaY0) = pol2cart(
439 (AntennaX0, AntennaY0) = pol2cart(
440 self.rheader.AntennaCoord0, self.rheader.AntennaAngl0 * numpy.pi / 180)
440 self.rheader.AntennaCoord0, self.rheader.AntennaAngl0 * numpy.pi / 180)
441 (AntennaX1, AntennaY1) = pol2cart(
441 (AntennaX1, AntennaY1) = pol2cart(
442 self.rheader.AntennaCoord1, self.rheader.AntennaAngl1 * numpy.pi / 180)
442 self.rheader.AntennaCoord1, self.rheader.AntennaAngl1 * numpy.pi / 180)
443 (AntennaX2, AntennaY2) = pol2cart(
443 (AntennaX2, AntennaY2) = pol2cart(
444 self.rheader.AntennaCoord2, self.rheader.AntennaAngl2 * numpy.pi / 180)
444 self.rheader.AntennaCoord2, self.rheader.AntennaAngl2 * numpy.pi / 180)
445
445
446 E01 = AntennaX0 - AntennaX1
446 E01 = AntennaX0 - AntennaX1
447 N01 = AntennaY0 - AntennaY1
447 N01 = AntennaY0 - AntennaY1
448
448
449 E02 = AntennaX0 - AntennaX2
449 E02 = AntennaX0 - AntennaX2
450 N02 = AntennaY0 - AntennaY2
450 N02 = AntennaY0 - AntennaY2
451
451
452 E12 = AntennaX1 - AntennaX2
452 E12 = AntennaX1 - AntennaX2
453 N12 = AntennaY1 - AntennaY2
453 N12 = AntennaY1 - AntennaY2
454
454
455 self.ChanDist = numpy.array(
455 self.ChanDist = numpy.array(
456 [[E01, N01], [E02, N02], [E12, N12]])
456 [[E01, N01], [E02, N02], [E12, N12]])
457
457
458 self.dataOut.ChanDist = self.ChanDist
458 self.dataOut.ChanDist = self.ChanDist
459
459
460 self.BlockCounter += 2
460 self.BlockCounter += 2
461 self.dataOut.data_spc = self.data_spc
461 self.dataOut.data_spc = self.data_spc
462 self.dataOut.data_cspc =self.data_cspc
462 self.dataOut.data_cspc =self.data_cspc
@@ -1,1575 +1,1580
1 """
1 """
2 Created on Jul 2, 2014
2 Created on Jul 2, 2014
3
3
4 @author: roj-idl71
4 @author: roj-idl71
5 """
5 """
6 import os
6 import os
7 import sys
7 import sys
8 import glob
8 import glob
9 import time
9 import time
10 import numpy
10 import numpy
11 import fnmatch
11 import fnmatch
12 import inspect
12 import inspect
13 import time
13 import time
14 import datetime
14 import datetime
15 import zmq
15 import zmq
16
16
17 from schainpy.model.proc.jroproc_base import Operation
17 from schainpy.model.data.jroheaderIO import PROCFLAG, BasicHeader, SystemHeader, RadarControllerHeader, ProcessingHeader
18 from schainpy.model.data.jroheaderIO import PROCFLAG, BasicHeader, SystemHeader, RadarControllerHeader, ProcessingHeader
18 from schainpy.model.data.jroheaderIO import get_dtype_index, get_numpy_dtype, get_procflag_dtype, get_dtype_width
19 from schainpy.model.data.jroheaderIO import get_dtype_index, get_numpy_dtype, get_procflag_dtype, get_dtype_width
19 from schainpy.utils import log
20 from schainpy.utils import log
20 import schainpy.admin
21 import schainpy.admin
21
22
22 LOCALTIME = True
23 LOCALTIME = True
23 DT_DIRECTIVES = {
24 DT_DIRECTIVES = {
24 '%Y': 4,
25 '%Y': 4,
25 '%y': 2,
26 '%y': 2,
26 '%m': 2,
27 '%m': 2,
27 '%d': 2,
28 '%d': 2,
28 '%j': 3,
29 '%j': 3,
29 '%H': 2,
30 '%H': 2,
30 '%M': 2,
31 '%M': 2,
31 '%S': 2,
32 '%S': 2,
32 '%f': 6
33 '%f': 6
33 }
34 }
34
35
35
36
36 def isNumber(cad):
37 def isNumber(cad):
37 """
38 """
38 Chequea si el conjunto de caracteres que componen un string puede ser convertidos a un numero.
39 Chequea si el conjunto de caracteres que componen un string puede ser convertidos a un numero.
39
40
40 Excepciones:
41 Excepciones:
41 Si un determinado string no puede ser convertido a numero
42 Si un determinado string no puede ser convertido a numero
42 Input:
43 Input:
43 str, string al cual se le analiza para determinar si convertible a un numero o no
44 str, string al cual se le analiza para determinar si convertible a un numero o no
44
45
45 Return:
46 Return:
46 True : si el string es uno numerico
47 True : si el string es uno numerico
47 False : no es un string numerico
48 False : no es un string numerico
48 """
49 """
49 try:
50 try:
50 float(cad)
51 float(cad)
51 return True
52 return True
52 except:
53 except:
53 return False
54 return False
54
55
55
56
56 def isFileInEpoch(filename, startUTSeconds, endUTSeconds):
57 def isFileInEpoch(filename, startUTSeconds, endUTSeconds):
57 """
58 """
58 Esta funcion determina si un archivo de datos se encuentra o no dentro del rango de fecha especificado.
59 Esta funcion determina si un archivo de datos se encuentra o no dentro del rango de fecha especificado.
59
60
60 Inputs:
61 Inputs:
61 filename : nombre completo del archivo de datos en formato Jicamarca (.r)
62 filename : nombre completo del archivo de datos en formato Jicamarca (.r)
62
63
63 startUTSeconds : fecha inicial del rango seleccionado. La fecha esta dada en
64 startUTSeconds : fecha inicial del rango seleccionado. La fecha esta dada en
64 segundos contados desde 01/01/1970.
65 segundos contados desde 01/01/1970.
65 endUTSeconds : fecha final del rango seleccionado. La fecha esta dada en
66 endUTSeconds : fecha final del rango seleccionado. La fecha esta dada en
66 segundos contados desde 01/01/1970.
67 segundos contados desde 01/01/1970.
67
68
68 Return:
69 Return:
69 Boolean : Retorna True si el archivo de datos contiene datos en el rango de
70 Boolean : Retorna True si el archivo de datos contiene datos en el rango de
70 fecha especificado, de lo contrario retorna False.
71 fecha especificado, de lo contrario retorna False.
71
72
72 Excepciones:
73 Excepciones:
73 Si el archivo no existe o no puede ser abierto
74 Si el archivo no existe o no puede ser abierto
74 Si la cabecera no puede ser leida.
75 Si la cabecera no puede ser leida.
75
76
76 """
77 """
77 basicHeaderObj = BasicHeader(LOCALTIME)
78 basicHeaderObj = BasicHeader(LOCALTIME)
78
79
79 try:
80 try:
80 fp = open(filename, 'rb')
81 fp = open(filename, 'rb')
81 except IOError:
82 except IOError:
82 print("The file %s can't be opened" % (filename))
83 print("The file %s can't be opened" % (filename))
83 return 0
84 return 0
84
85
85 sts = basicHeaderObj.read(fp)
86 sts = basicHeaderObj.read(fp)
86 fp.close()
87 fp.close()
87
88
88 if not(sts):
89 if not(sts):
89 print("Skipping the file %s because it has not a valid header" % (filename))
90 print("Skipping the file %s because it has not a valid header" % (filename))
90 return 0
91 return 0
91
92
92 if not ((startUTSeconds <= basicHeaderObj.utc) and (endUTSeconds > basicHeaderObj.utc)):
93 if not ((startUTSeconds <= basicHeaderObj.utc) and (endUTSeconds > basicHeaderObj.utc)):
93 return 0
94 return 0
94
95
95 return 1
96 return 1
96
97
97
98
98 def isTimeInRange(thisTime, startTime, endTime):
99 def isTimeInRange(thisTime, startTime, endTime):
99 if endTime >= startTime:
100 if endTime >= startTime:
100 if (thisTime < startTime) or (thisTime > endTime):
101 if (thisTime < startTime) or (thisTime > endTime):
101 return 0
102 return 0
102 return 1
103 return 1
103 else:
104 else:
104 if (thisTime < startTime) and (thisTime > endTime):
105 if (thisTime < startTime) and (thisTime > endTime):
105 return 0
106 return 0
106 return 1
107 return 1
107
108
108
109
109 def isFileInTimeRange(filename, startDate, endDate, startTime, endTime):
110 def isFileInTimeRange(filename, startDate, endDate, startTime, endTime):
110 """
111 """
111 Retorna 1 si el archivo de datos se encuentra dentro del rango de horas especificado.
112 Retorna 1 si el archivo de datos se encuentra dentro del rango de horas especificado.
112
113
113 Inputs:
114 Inputs:
114 filename : nombre completo del archivo de datos en formato Jicamarca (.r)
115 filename : nombre completo del archivo de datos en formato Jicamarca (.r)
115
116
116 startDate : fecha inicial del rango seleccionado en formato datetime.date
117 startDate : fecha inicial del rango seleccionado en formato datetime.date
117
118
118 endDate : fecha final del rango seleccionado en formato datetime.date
119 endDate : fecha final del rango seleccionado en formato datetime.date
119
120
120 startTime : tiempo inicial del rango seleccionado en formato datetime.time
121 startTime : tiempo inicial del rango seleccionado en formato datetime.time
121
122
122 endTime : tiempo final del rango seleccionado en formato datetime.time
123 endTime : tiempo final del rango seleccionado en formato datetime.time
123
124
124 Return:
125 Return:
125 Boolean : Retorna True si el archivo de datos contiene datos en el rango de
126 Boolean : Retorna True si el archivo de datos contiene datos en el rango de
126 fecha especificado, de lo contrario retorna False.
127 fecha especificado, de lo contrario retorna False.
127
128
128 Excepciones:
129 Excepciones:
129 Si el archivo no existe o no puede ser abierto
130 Si el archivo no existe o no puede ser abierto
130 Si la cabecera no puede ser leida.
131 Si la cabecera no puede ser leida.
131
132
132 """
133 """
133
134
134 try:
135 try:
135 fp = open(filename, 'rb')
136 fp = open(filename, 'rb')
136 except IOError:
137 except IOError:
137 print("The file %s can't be opened" % (filename))
138 print("The file %s can't be opened" % (filename))
138 return None
139 return None
139
140
140 firstBasicHeaderObj = BasicHeader(LOCALTIME)
141 firstBasicHeaderObj = BasicHeader(LOCALTIME)
141 systemHeaderObj = SystemHeader()
142 systemHeaderObj = SystemHeader()
142 radarControllerHeaderObj = RadarControllerHeader()
143 radarControllerHeaderObj = RadarControllerHeader()
143 processingHeaderObj = ProcessingHeader()
144 processingHeaderObj = ProcessingHeader()
144
145
145 lastBasicHeaderObj = BasicHeader(LOCALTIME)
146 lastBasicHeaderObj = BasicHeader(LOCALTIME)
146
147
147 sts = firstBasicHeaderObj.read(fp)
148 sts = firstBasicHeaderObj.read(fp)
148
149
149 if not(sts):
150 if not(sts):
150 print("[Reading] Skipping the file %s because it has not a valid header" % (filename))
151 print("[Reading] Skipping the file %s because it has not a valid header" % (filename))
151 return None
152 return None
152
153
153 if not systemHeaderObj.read(fp):
154 if not systemHeaderObj.read(fp):
154 return None
155 return None
155
156
156 if not radarControllerHeaderObj.read(fp):
157 if not radarControllerHeaderObj.read(fp):
157 return None
158 return None
158
159
159 if not processingHeaderObj.read(fp):
160 if not processingHeaderObj.read(fp):
160 return None
161 return None
161
162
162 filesize = os.path.getsize(filename)
163 filesize = os.path.getsize(filename)
163
164
164 offset = processingHeaderObj.blockSize + 24 # header size
165 offset = processingHeaderObj.blockSize + 24 # header size
165
166
166 if filesize <= offset:
167 if filesize <= offset:
167 print("[Reading] %s: This file has not enough data" % filename)
168 print("[Reading] %s: This file has not enough data" % filename)
168 return None
169 return None
169
170
170 fp.seek(-offset, 2)
171 fp.seek(-offset, 2)
171
172
172 sts = lastBasicHeaderObj.read(fp)
173 sts = lastBasicHeaderObj.read(fp)
173
174
174 fp.close()
175 fp.close()
175
176
176 thisDatetime = lastBasicHeaderObj.datatime
177 thisDatetime = lastBasicHeaderObj.datatime
177 thisTime_last_block = thisDatetime.time()
178 thisTime_last_block = thisDatetime.time()
178
179
179 thisDatetime = firstBasicHeaderObj.datatime
180 thisDatetime = firstBasicHeaderObj.datatime
180 thisDate = thisDatetime.date()
181 thisDate = thisDatetime.date()
181 thisTime_first_block = thisDatetime.time()
182 thisTime_first_block = thisDatetime.time()
182
183
183 # General case
184 # General case
184 # o>>>>>>>>>>>>>><<<<<<<<<<<<<<o
185 # o>>>>>>>>>>>>>><<<<<<<<<<<<<<o
185 #-----------o----------------------------o-----------
186 #-----------o----------------------------o-----------
186 # startTime endTime
187 # startTime endTime
187
188
188 if endTime >= startTime:
189 if endTime >= startTime:
189 if (thisTime_last_block < startTime) or (thisTime_first_block > endTime):
190 if (thisTime_last_block < startTime) or (thisTime_first_block > endTime):
190 return None
191 return None
191
192
192 return thisDatetime
193 return thisDatetime
193
194
194 # If endTime < startTime then endTime belongs to the next day
195 # If endTime < startTime then endTime belongs to the next day
195
196
196 #<<<<<<<<<<<o o>>>>>>>>>>>
197 #<<<<<<<<<<<o o>>>>>>>>>>>
197 #-----------o----------------------------o-----------
198 #-----------o----------------------------o-----------
198 # endTime startTime
199 # endTime startTime
199
200
200 if (thisDate == startDate) and (thisTime_last_block < startTime):
201 if (thisDate == startDate) and (thisTime_last_block < startTime):
201 return None
202 return None
202
203
203 if (thisDate == endDate) and (thisTime_first_block > endTime):
204 if (thisDate == endDate) and (thisTime_first_block > endTime):
204 return None
205 return None
205
206
206 if (thisTime_last_block < startTime) and (thisTime_first_block > endTime):
207 if (thisTime_last_block < startTime) and (thisTime_first_block > endTime):
207 return None
208 return None
208
209
209 return thisDatetime
210 return thisDatetime
210
211
211
212
212 def isFolderInDateRange(folder, startDate=None, endDate=None):
213 def isFolderInDateRange(folder, startDate=None, endDate=None):
213 """
214 """
214 Retorna 1 si el archivo de datos se encuentra dentro del rango de horas especificado.
215 Retorna 1 si el archivo de datos se encuentra dentro del rango de horas especificado.
215
216
216 Inputs:
217 Inputs:
217 folder : nombre completo del directorio.
218 folder : nombre completo del directorio.
218 Su formato deberia ser "/path_root/?YYYYDDD"
219 Su formato deberia ser "/path_root/?YYYYDDD"
219
220
220 siendo:
221 siendo:
221 YYYY : Anio (ejemplo 2015)
222 YYYY : Anio (ejemplo 2015)
222 DDD : Dia del anio (ejemplo 305)
223 DDD : Dia del anio (ejemplo 305)
223
224
224 startDate : fecha inicial del rango seleccionado en formato datetime.date
225 startDate : fecha inicial del rango seleccionado en formato datetime.date
225
226
226 endDate : fecha final del rango seleccionado en formato datetime.date
227 endDate : fecha final del rango seleccionado en formato datetime.date
227
228
228 Return:
229 Return:
229 Boolean : Retorna True si el archivo de datos contiene datos en el rango de
230 Boolean : Retorna True si el archivo de datos contiene datos en el rango de
230 fecha especificado, de lo contrario retorna False.
231 fecha especificado, de lo contrario retorna False.
231 Excepciones:
232 Excepciones:
232 Si el directorio no tiene el formato adecuado
233 Si el directorio no tiene el formato adecuado
233 """
234 """
234
235
235 basename = os.path.basename(folder)
236 basename = os.path.basename(folder)
236
237
237 if not isRadarFolder(basename):
238 if not isRadarFolder(basename):
238 print("The folder %s has not the rigth format" % folder)
239 print("The folder %s has not the rigth format" % folder)
239 return 0
240 return 0
240
241
241 if startDate and endDate:
242 if startDate and endDate:
242 thisDate = getDateFromRadarFolder(basename)
243 thisDate = getDateFromRadarFolder(basename)
243
244
244 if thisDate < startDate:
245 if thisDate < startDate:
245 return 0
246 return 0
246
247
247 if thisDate > endDate:
248 if thisDate > endDate:
248 return 0
249 return 0
249
250
250 return 1
251 return 1
251
252
252
253
253 def isFileInDateRange(filename, startDate=None, endDate=None):
254 def isFileInDateRange(filename, startDate=None, endDate=None):
254 """
255 """
255 Retorna 1 si el archivo de datos se encuentra dentro del rango de horas especificado.
256 Retorna 1 si el archivo de datos se encuentra dentro del rango de horas especificado.
256
257
257 Inputs:
258 Inputs:
258 filename : nombre completo del archivo de datos en formato Jicamarca (.r)
259 filename : nombre completo del archivo de datos en formato Jicamarca (.r)
259
260
260 Su formato deberia ser "?YYYYDDDsss"
261 Su formato deberia ser "?YYYYDDDsss"
261
262
262 siendo:
263 siendo:
263 YYYY : Anio (ejemplo 2015)
264 YYYY : Anio (ejemplo 2015)
264 DDD : Dia del anio (ejemplo 305)
265 DDD : Dia del anio (ejemplo 305)
265 sss : set
266 sss : set
266
267
267 startDate : fecha inicial del rango seleccionado en formato datetime.date
268 startDate : fecha inicial del rango seleccionado en formato datetime.date
268
269
269 endDate : fecha final del rango seleccionado en formato datetime.date
270 endDate : fecha final del rango seleccionado en formato datetime.date
270
271
271 Return:
272 Return:
272 Boolean : Retorna True si el archivo de datos contiene datos en el rango de
273 Boolean : Retorna True si el archivo de datos contiene datos en el rango de
273 fecha especificado, de lo contrario retorna False.
274 fecha especificado, de lo contrario retorna False.
274 Excepciones:
275 Excepciones:
275 Si el archivo no tiene el formato adecuado
276 Si el archivo no tiene el formato adecuado
276 """
277 """
277
278
278 basename = os.path.basename(filename)
279 basename = os.path.basename(filename)
279
280
280 if not isRadarFile(basename):
281 if not isRadarFile(basename):
281 print("The filename %s has not the rigth format" % filename)
282 print("The filename %s has not the rigth format" % filename)
282 return 0
283 return 0
283
284
284 if startDate and endDate:
285 if startDate and endDate:
285 thisDate = getDateFromRadarFile(basename)
286 thisDate = getDateFromRadarFile(basename)
286
287
287 if thisDate < startDate:
288 if thisDate < startDate:
288 return 0
289 return 0
289
290
290 if thisDate > endDate:
291 if thisDate > endDate:
291 return 0
292 return 0
292
293
293 return 1
294 return 1
294
295
295
296
296 def getFileFromSet(path, ext, set):
297 def getFileFromSet(path, ext, set):
297 validFilelist = []
298 validFilelist = []
298 fileList = os.listdir(path)
299 fileList = os.listdir(path)
299
300
300 # 0 1234 567 89A BCDE
301 # 0 1234 567 89A BCDE
301 # H YYYY DDD SSS .ext
302 # H YYYY DDD SSS .ext
302
303
303 for thisFile in fileList:
304 for thisFile in fileList:
304 try:
305 try:
305 year = int(thisFile[1:5])
306 year = int(thisFile[1:5])
306 doy = int(thisFile[5:8])
307 doy = int(thisFile[5:8])
307 except:
308 except:
308 continue
309 continue
309
310
310 if (os.path.splitext(thisFile)[-1].lower() != ext.lower()):
311 if (os.path.splitext(thisFile)[-1].lower() != ext.lower()):
311 continue
312 continue
312
313
313 validFilelist.append(thisFile)
314 validFilelist.append(thisFile)
314
315
315 myfile = fnmatch.filter(
316 myfile = fnmatch.filter(
316 validFilelist, '*%4.4d%3.3d%3.3d*' % (year, doy, set))
317 validFilelist, '*%4.4d%3.3d%3.3d*' % (year, doy, set))
317
318
318 if len(myfile) != 0:
319 if len(myfile) != 0:
319 return myfile[0]
320 return myfile[0]
320 else:
321 else:
321 filename = '*%4.4d%3.3d%3.3d%s' % (year, doy, set, ext.lower())
322 filename = '*%4.4d%3.3d%3.3d%s' % (year, doy, set, ext.lower())
322 print('the filename %s does not exist' % filename)
323 print('the filename %s does not exist' % filename)
323 print('...going to the last file: ')
324 print('...going to the last file: ')
324
325
325 if validFilelist:
326 if validFilelist:
326 validFilelist = sorted(validFilelist, key=str.lower)
327 validFilelist = sorted(validFilelist, key=str.lower)
327 return validFilelist[-1]
328 return validFilelist[-1]
328
329
329 return None
330 return None
330
331
331
332
332 def getlastFileFromPath(path, ext):
333 def getlastFileFromPath(path, ext):
333 """
334 """
334 Depura el fileList dejando solo los que cumplan el formato de "PYYYYDDDSSS.ext"
335 Depura el fileList dejando solo los que cumplan el formato de "PYYYYDDDSSS.ext"
335 al final de la depuracion devuelve el ultimo file de la lista que quedo.
336 al final de la depuracion devuelve el ultimo file de la lista que quedo.
336
337
337 Input:
338 Input:
338 fileList : lista conteniendo todos los files (sin path) que componen una determinada carpeta
339 fileList : lista conteniendo todos los files (sin path) que componen una determinada carpeta
339 ext : extension de los files contenidos en una carpeta
340 ext : extension de los files contenidos en una carpeta
340
341
341 Return:
342 Return:
342 El ultimo file de una determinada carpeta, no se considera el path.
343 El ultimo file de una determinada carpeta, no se considera el path.
343 """
344 """
344 validFilelist = []
345 validFilelist = []
345 fileList = os.listdir(path)
346 fileList = os.listdir(path)
346
347
347 # 0 1234 567 89A BCDE
348 # 0 1234 567 89A BCDE
348 # H YYYY DDD SSS .ext
349 # H YYYY DDD SSS .ext
349
350
350 for thisFile in fileList:
351 for thisFile in fileList:
351
352
352 year = thisFile[1:5]
353 year = thisFile[1:5]
353 if not isNumber(year):
354 if not isNumber(year):
354 continue
355 continue
355
356
356 doy = thisFile[5:8]
357 doy = thisFile[5:8]
357 if not isNumber(doy):
358 if not isNumber(doy):
358 continue
359 continue
359
360
360 year = int(year)
361 year = int(year)
361 doy = int(doy)
362 doy = int(doy)
362
363
363 if (os.path.splitext(thisFile)[-1].lower() != ext.lower()):
364 if (os.path.splitext(thisFile)[-1].lower() != ext.lower()):
364 continue
365 continue
365
366
366 validFilelist.append(thisFile)
367 validFilelist.append(thisFile)
367
368
368 if validFilelist:
369 if validFilelist:
369 validFilelist = sorted(validFilelist, key=str.lower)
370 validFilelist = sorted(validFilelist, key=str.lower)
370 return validFilelist[-1]
371 return validFilelist[-1]
371
372
372 return None
373 return None
373
374
374
375
375 def isRadarFolder(folder):
376 def isRadarFolder(folder):
376 try:
377 try:
377 year = int(folder[1:5])
378 year = int(folder[1:5])
378 doy = int(folder[5:8])
379 doy = int(folder[5:8])
379 except:
380 except:
380 return 0
381 return 0
381
382
382 return 1
383 return 1
383
384
384
385
385 def isRadarFile(file):
386 def isRadarFile(file):
386 try:
387 try:
387 year = int(file[1:5])
388 year = int(file[1:5])
388 doy = int(file[5:8])
389 doy = int(file[5:8])
389 set = int(file[8:11])
390 set = int(file[8:11])
390 except:
391 except:
391 return 0
392 return 0
392
393
393 return 1
394 return 1
394
395
395
396
396 def getDateFromRadarFile(file):
397 def getDateFromRadarFile(file):
397 try:
398 try:
398 year = int(file[1:5])
399 year = int(file[1:5])
399 doy = int(file[5:8])
400 doy = int(file[5:8])
400 set = int(file[8:11])
401 set = int(file[8:11])
401 except:
402 except:
402 return None
403 return None
403
404
404 thisDate = datetime.date(year, 1, 1) + datetime.timedelta(doy - 1)
405 thisDate = datetime.date(year, 1, 1) + datetime.timedelta(doy - 1)
405 return thisDate
406 return thisDate
406
407
407
408
408 def getDateFromRadarFolder(folder):
409 def getDateFromRadarFolder(folder):
409 try:
410 try:
410 year = int(folder[1:5])
411 year = int(folder[1:5])
411 doy = int(folder[5:8])
412 doy = int(folder[5:8])
412 except:
413 except:
413 return None
414 return None
414
415
415 thisDate = datetime.date(year, 1, 1) + datetime.timedelta(doy - 1)
416 thisDate = datetime.date(year, 1, 1) + datetime.timedelta(doy - 1)
416 return thisDate
417 return thisDate
417
418
418 def parse_format(s, fmt):
419 def parse_format(s, fmt):
419
420
420 for i in range(fmt.count('%')):
421 for i in range(fmt.count('%')):
421 x = fmt.index('%')
422 x = fmt.index('%')
422 d = DT_DIRECTIVES[fmt[x:x+2]]
423 d = DT_DIRECTIVES[fmt[x:x+2]]
423 fmt = fmt.replace(fmt[x:x+2], s[x:x+d])
424 fmt = fmt.replace(fmt[x:x+2], s[x:x+d])
424 return fmt
425 return fmt
425
426
426 class Reader(object):
427 class Reader(object):
427
428
428 c = 3E8
429 c = 3E8
429 isConfig = False
430 isConfig = False
430 dtype = None
431 dtype = None
431 pathList = []
432 pathList = []
432 filenameList = []
433 filenameList = []
433 datetimeList = []
434 datetimeList = []
434 filename = None
435 filename = None
435 ext = None
436 ext = None
436 flagIsNewFile = 1
437 flagIsNewFile = 1
437 flagDiscontinuousBlock = 0
438 flagDiscontinuousBlock = 0
438 flagIsNewBlock = 0
439 flagIsNewBlock = 0
439 flagNoMoreFiles = 0
440 flagNoMoreFiles = 0
440 fp = None
441 fp = None
441 firstHeaderSize = 0
442 firstHeaderSize = 0
442 basicHeaderSize = 24
443 basicHeaderSize = 24
443 versionFile = 1103
444 versionFile = 1103
444 fileSize = None
445 fileSize = None
445 fileSizeByHeader = None
446 fileSizeByHeader = None
446 fileIndex = -1
447 fileIndex = -1
447 profileIndex = None
448 profileIndex = None
448 blockIndex = 0
449 blockIndex = 0
449 nTotalBlocks = 0
450 nTotalBlocks = 0
450 maxTimeStep = 30
451 maxTimeStep = 30
451 lastUTTime = None
452 lastUTTime = None
452 datablock = None
453 datablock = None
453 dataOut = None
454 dataOut = None
454 getByBlock = False
455 getByBlock = False
455 path = None
456 path = None
456 startDate = None
457 startDate = None
457 endDate = None
458 endDate = None
458 startTime = datetime.time(0, 0, 0)
459 startTime = datetime.time(0, 0, 0)
459 endTime = datetime.time(23, 59, 59)
460 endTime = datetime.time(23, 59, 59)
460 set = None
461 set = None
461 expLabel = ""
462 expLabel = ""
462 online = False
463 online = False
463 delay = 60
464 delay = 60
464 nTries = 3 # quantity tries
465 nTries = 3 # quantity tries
465 nFiles = 3 # number of files for searching
466 nFiles = 3 # number of files for searching
466 walk = True
467 walk = True
467 getblock = False
468 getblock = False
468 nTxs = 1
469 nTxs = 1
469 realtime = False
470 realtime = False
470 blocksize = 0
471 blocksize = 0
471 blocktime = None
472 blocktime = None
472 warnings = True
473 warnings = True
473 verbose = True
474 verbose = True
474 server = None
475 server = None
475 format = None
476 format = None
476 oneDDict = None
477 oneDDict = None
477 twoDDict = None
478 twoDDict = None
478 independentParam = None
479 independentParam = None
479 filefmt = None
480 filefmt = None
480 folderfmt = None
481 folderfmt = None
481 open_file = open
482 open_file = open
482 open_mode = 'rb'
483 open_mode = 'rb'
483
484
484 def run(self):
485 def run(self):
485
486
486 raise NotImplementedError
487 raise NotImplementedError
487
488
488 def getAllowedArgs(self):
489 def getAllowedArgs(self):
489 if hasattr(self, '__attrs__'):
490 if hasattr(self, '__attrs__'):
490 return self.__attrs__
491 return self.__attrs__
491 else:
492 else:
492 return inspect.getargspec(self.run).args
493 return inspect.getargspec(self.run).args
493
494
494 def set_kwargs(self, **kwargs):
495 def set_kwargs(self, **kwargs):
495
496
496 for key, value in kwargs.items():
497 for key, value in kwargs.items():
497 setattr(self, key, value)
498 setattr(self, key, value)
498
499
499 def find_folders(self, path, startDate, endDate, folderfmt, last=False):
500 def find_folders(self, path, startDate, endDate, folderfmt, last=False):
500
501
501 folders = [x for f in path.split(',')
502 folders = [x for f in path.split(',')
502 for x in os.listdir(f) if os.path.isdir(os.path.join(f, x))]
503 for x in os.listdir(f) if os.path.isdir(os.path.join(f, x))]
503 folders.sort()
504 folders.sort()
504
505
505 if last:
506 if last:
506 folders = [folders[-1]]
507 folders = [folders[-1]]
507
508
508 for folder in folders:
509 for folder in folders:
509 try:
510 try:
510 dt = datetime.datetime.strptime(parse_format(folder, folderfmt), folderfmt).date()
511 dt = datetime.datetime.strptime(parse_format(folder, folderfmt), folderfmt).date()
511 if dt >= startDate and dt <= endDate:
512 if dt >= startDate and dt <= endDate:
512 yield os.path.join(path, folder)
513 yield os.path.join(path, folder)
513 else:
514 else:
514 log.log('Skiping folder {}'.format(folder), self.name)
515 log.log('Skiping folder {}'.format(folder), self.name)
515 except Exception as e:
516 except Exception as e:
516 log.log('Skiping folder {}'.format(folder), self.name)
517 log.log('Skiping folder {}'.format(folder), self.name)
517 continue
518 continue
518 return
519 return
519
520
520 def find_files(self, folders, ext, filefmt, startDate=None, endDate=None,
521 def find_files(self, folders, ext, filefmt, startDate=None, endDate=None,
521 expLabel='', last=False):
522 expLabel='', last=False):
522
523
523 for path in folders:
524 for path in folders:
524 files = glob.glob1(path, '*{}'.format(ext))
525 files = glob.glob1(path, '*{}'.format(ext))
525 files.sort()
526 files.sort()
526 if last:
527 if last:
527 if files:
528 if files:
528 fo = files[-1]
529 fo = files[-1]
529 try:
530 try:
530 dt = datetime.datetime.strptime(parse_format(fo, filefmt), filefmt).date()
531 dt = datetime.datetime.strptime(parse_format(fo, filefmt), filefmt).date()
531 yield os.path.join(path, expLabel, fo)
532 yield os.path.join(path, expLabel, fo)
532 except Exception as e:
533 except Exception as e:
533 pass
534 pass
534 return
535 return
535 else:
536 else:
536 return
537 return
537
538
538 for fo in files:
539 for fo in files:
539 try:
540 try:
540 dt = datetime.datetime.strptime(parse_format(fo, filefmt), filefmt).date()
541 dt = datetime.datetime.strptime(parse_format(fo, filefmt), filefmt).date()
541 if dt >= startDate and dt <= endDate:
542 if dt >= startDate and dt <= endDate:
542 yield os.path.join(path, expLabel, fo)
543 yield os.path.join(path, expLabel, fo)
543 else:
544 else:
544 log.log('Skiping file {}'.format(fo), self.name)
545 log.log('Skiping file {}'.format(fo), self.name)
545 except Exception as e:
546 except Exception as e:
546 log.log('Skiping file {}'.format(fo), self.name)
547 log.log('Skiping file {}'.format(fo), self.name)
547 continue
548 continue
548
549
549 def searchFilesOffLine(self, path, startDate, endDate,
550 def searchFilesOffLine(self, path, startDate, endDate,
550 expLabel, ext, walk,
551 expLabel, ext, walk,
551 filefmt, folderfmt):
552 filefmt, folderfmt):
552 """Search files in offline mode for the given arguments
553 """Search files in offline mode for the given arguments
553
554
554 Return:
555 Return:
555 Generator of files
556 Generator of files
556 """
557 """
557
558
558 if walk:
559 if walk:
559 folders = self.find_folders(
560 folders = self.find_folders(
560 path, startDate, endDate, folderfmt)
561 path, startDate, endDate, folderfmt)
561 else:
562 else:
562 folders = path.split(',')
563 folders = path.split(',')
563
564
564 return self.find_files(
565 return self.find_files(
565 folders, ext, filefmt, startDate, endDate, expLabel)
566 folders, ext, filefmt, startDate, endDate, expLabel)
566
567
567 def searchFilesOnLine(self, path, startDate, endDate,
568 def searchFilesOnLine(self, path, startDate, endDate,
568 expLabel, ext, walk,
569 expLabel, ext, walk,
569 filefmt, folderfmt):
570 filefmt, folderfmt):
570 """Search for the last file of the last folder
571 """Search for the last file of the last folder
571
572
572 Arguments:
573 Arguments:
573 path : carpeta donde estan contenidos los files que contiene data
574 path : carpeta donde estan contenidos los files que contiene data
574 expLabel : Nombre del subexperimento (subfolder)
575 expLabel : Nombre del subexperimento (subfolder)
575 ext : extension de los files
576 ext : extension de los files
576 walk : Si es habilitado no realiza busquedas dentro de los ubdirectorios (doypath)
577 walk : Si es habilitado no realiza busquedas dentro de los ubdirectorios (doypath)
577
578
578 Return:
579 Return:
579 generator with the full path of last filename
580 generator with the full path of last filename
580 """
581 """
581
582
582 if walk:
583 if walk:
583 folders = self.find_folders(
584 folders = self.find_folders(
584 path, startDate, endDate, folderfmt, last=True)
585 path, startDate, endDate, folderfmt, last=True)
585 else:
586 else:
586 folders = path.split(',')
587 folders = path.split(',')
587
588
588 return self.find_files(
589 return self.find_files(
589 folders, ext, filefmt, startDate, endDate, expLabel, last=True)
590 folders, ext, filefmt, startDate, endDate, expLabel, last=True)
590
591
591 def setNextFile(self):
592 def setNextFile(self):
592 """Set the next file to be readed open it and parse de file header"""
593 """Set the next file to be readed open it and parse de file header"""
593
594
594 while True:
595 while True:
595 if self.fp != None:
596 if self.fp != None:
596 self.fp.close()
597 self.fp.close()
597
598
598 if self.online:
599 if self.online:
599 newFile = self.setNextFileOnline()
600 newFile = self.setNextFileOnline()
600 else:
601 else:
601 newFile = self.setNextFileOffline()
602 newFile = self.setNextFileOffline()
602
603
603 if not(newFile):
604 if not(newFile):
604 if self.online:
605 if self.online:
605 raise schainpy.admin.SchainError('Time to wait for new files reach')
606 raise schainpy.admin.SchainError('Time to wait for new files reach')
606 else:
607 else:
607 if self.fileIndex == -1:
608 if self.fileIndex == -1:
608 raise schainpy.admin.SchainWarning('No files found in the given path')
609 raise schainpy.admin.SchainWarning('No files found in the given path')
609 else:
610 else:
610 raise schainpy.admin.SchainWarning('No more files to read')
611 raise schainpy.admin.SchainWarning('No more files to read')
611
612
612 if self.verifyFile(self.filename):
613 if self.verifyFile(self.filename):
613 break
614 break
614
615
615 log.log('Opening file: %s' % self.filename, self.name)
616 log.log('Opening file: %s' % self.filename, self.name)
616
617
617 self.readFirstHeader()
618 self.readFirstHeader()
618 self.nReadBlocks = 0
619 self.nReadBlocks = 0
619
620
620 def setNextFileOnline(self):
621 def setNextFileOnline(self):
621 """Check for the next file to be readed in online mode.
622 """Check for the next file to be readed in online mode.
622
623
623 Set:
624 Set:
624 self.filename
625 self.filename
625 self.fp
626 self.fp
626 self.filesize
627 self.filesize
627
628
628 Return:
629 Return:
629 boolean
630 boolean
630
631
631 """
632 """
632 nextFile = True
633 nextFile = True
633 nextDay = False
634 nextDay = False
634
635
635 for nFiles in range(self.nFiles+1):
636 for nFiles in range(self.nFiles+1):
636 for nTries in range(self.nTries):
637 for nTries in range(self.nTries):
637 fullfilename, filename = self.checkForRealPath(nextFile, nextDay)
638 fullfilename, filename = self.checkForRealPath(nextFile, nextDay)
638 if fullfilename is not None:
639 if fullfilename is not None:
639 break
640 break
640 log.warning(
641 log.warning(
641 "Waiting %0.2f sec for the next file: \"%s\" , try %02d ..." % (self.delay, filename, nTries + 1),
642 "Waiting %0.2f sec for the next file: \"%s\" , try %02d ..." % (self.delay, filename, nTries + 1),
642 self.name)
643 self.name)
643 time.sleep(self.delay)
644 time.sleep(self.delay)
644 nextFile = False
645 nextFile = False
645 continue
646 continue
646
647
647 if fullfilename is not None:
648 if fullfilename is not None:
648 break
649 break
649
650
650 self.nTries = 1
651 self.nTries = 1
651 nextFile = True
652 nextFile = True
652
653
653 if nFiles == (self.nFiles - 1):
654 if nFiles == (self.nFiles - 1):
654 log.log('Trying with next day...', self.name)
655 log.log('Trying with next day...', self.name)
655 nextDay = True
656 nextDay = True
656 self.nTries = 3
657 self.nTries = 3
657
658
658 if fullfilename:
659 if fullfilename:
659 self.fileSize = os.path.getsize(fullfilename)
660 self.fileSize = os.path.getsize(fullfilename)
660 self.filename = fullfilename
661 self.filename = fullfilename
661 self.flagIsNewFile = 1
662 self.flagIsNewFile = 1
662 if self.fp != None:
663 if self.fp != None:
663 self.fp.close()
664 self.fp.close()
664 self.fp = self.open_file(fullfilename, self.open_mode)
665 self.fp = self.open_file(fullfilename, self.open_mode)
665 self.flagNoMoreFiles = 0
666 self.flagNoMoreFiles = 0
666 self.fileIndex += 1
667 self.fileIndex += 1
667 return 1
668 return 1
668 else:
669 else:
669 return 0
670 return 0
670
671
671 def setNextFileOffline(self):
672 def setNextFileOffline(self):
672 """Open the next file to be readed in offline mode"""
673 """Open the next file to be readed in offline mode"""
673
674
674 try:
675 try:
675 filename = next(self.filenameList)
676 filename = next(self.filenameList)
676 self.fileIndex +=1
677 self.fileIndex +=1
677 except StopIteration:
678 except StopIteration:
678 self.flagNoMoreFiles = 1
679 self.flagNoMoreFiles = 1
679 return 0
680 return 0
680
681
681 self.filename = filename
682 self.filename = filename
682 self.fileSize = os.path.getsize(filename)
683 self.fileSize = os.path.getsize(filename)
683 self.fp = self.open_file(filename, self.open_mode)
684 self.fp = self.open_file(filename, self.open_mode)
684 self.flagIsNewFile = 1
685 self.flagIsNewFile = 1
685
686
686 return 1
687 return 1
687
688
688 @staticmethod
689 @staticmethod
689 def isDateTimeInRange(dt, startDate, endDate, startTime, endTime):
690 def isDateTimeInRange(dt, startDate, endDate, startTime, endTime):
690 """Check if the given datetime is in range"""
691 """Check if the given datetime is in range"""
691
692
692 if startDate <= dt.date() <= endDate:
693 if startDate <= dt.date() <= endDate:
693 if startTime <= dt.time() <= endTime:
694 if startTime <= dt.time() <= endTime:
694 return True
695 return True
695 return False
696 return False
696
697
697 def verifyFile(self, filename):
698 def verifyFile(self, filename):
698 """Check for a valid file
699 """Check for a valid file
699
700
700 Arguments:
701 Arguments:
701 filename -- full path filename
702 filename -- full path filename
702
703
703 Return:
704 Return:
704 boolean
705 boolean
705 """
706 """
706
707
707 return True
708 return True
708
709
709 def checkForRealPath(self, nextFile, nextDay):
710 def checkForRealPath(self, nextFile, nextDay):
710 """Check if the next file to be readed exists"""
711 """Check if the next file to be readed exists"""
711
712
712 raise NotImplementedError
713 raise NotImplementedError
713
714
714 def readFirstHeader(self):
715 def readFirstHeader(self):
715 """Parse the file header"""
716 """Parse the file header"""
716
717
717 pass
718 pass
718
719
719 class JRODataReader(Reader):
720 class JRODataReader(Reader):
720
721
721 utc = 0
722 utc = 0
722 nReadBlocks = 0
723 nReadBlocks = 0
723 foldercounter = 0
724 foldercounter = 0
724 firstHeaderSize = 0
725 firstHeaderSize = 0
725 basicHeaderSize = 24
726 basicHeaderSize = 24
726 __isFirstTimeOnline = 1
727 __isFirstTimeOnline = 1
727 __printInfo = True
728 filefmt = "*%Y%j***"
728 filefmt = "*%Y%j***"
729 folderfmt = "*%Y%j"
729 folderfmt = "*%Y%j"
730 __attrs__ = ['path', 'startDate', 'endDate', 'startTime', 'endTime', 'online', 'delay', 'walk']
730
731
731 def getDtypeWidth(self):
732 def getDtypeWidth(self):
732
733
733 dtype_index = get_dtype_index(self.dtype)
734 dtype_index = get_dtype_index(self.dtype)
734 dtype_width = get_dtype_width(dtype_index)
735 dtype_width = get_dtype_width(dtype_index)
735
736
736 return dtype_width
737 return dtype_width
737
738
738 def checkForRealPath(self, nextFile, nextDay):
739 def checkForRealPath(self, nextFile, nextDay):
739 """Check if the next file to be readed exists.
740 """Check if the next file to be readed exists.
740
741
741 Example :
742 Example :
742 nombre correcto del file es .../.../D2009307/P2009307367.ext
743 nombre correcto del file es .../.../D2009307/P2009307367.ext
743
744
744 Entonces la funcion prueba con las siguientes combinaciones
745 Entonces la funcion prueba con las siguientes combinaciones
745 .../.../y2009307367.ext
746 .../.../y2009307367.ext
746 .../.../Y2009307367.ext
747 .../.../Y2009307367.ext
747 .../.../x2009307/y2009307367.ext
748 .../.../x2009307/y2009307367.ext
748 .../.../x2009307/Y2009307367.ext
749 .../.../x2009307/Y2009307367.ext
749 .../.../X2009307/y2009307367.ext
750 .../.../X2009307/y2009307367.ext
750 .../.../X2009307/Y2009307367.ext
751 .../.../X2009307/Y2009307367.ext
751 siendo para este caso, la ultima combinacion de letras, identica al file buscado
752 siendo para este caso, la ultima combinacion de letras, identica al file buscado
752
753
753 Return:
754 Return:
754 str -- fullpath of the file
755 str -- fullpath of the file
755 """
756 """
756
757
757
758
758 if nextFile:
759 if nextFile:
759 self.set += 1
760 self.set += 1
760 if nextDay:
761 if nextDay:
761 self.set = 0
762 self.set = 0
762 self.doy += 1
763 self.doy += 1
763 foldercounter = 0
764 foldercounter = 0
764 prefixDirList = [None, 'd', 'D']
765 prefixDirList = [None, 'd', 'D']
765 if self.ext.lower() == ".r": # voltage
766 if self.ext.lower() == ".r": # voltage
766 prefixFileList = ['d', 'D']
767 prefixFileList = ['d', 'D']
767 elif self.ext.lower() == ".pdata": # spectra
768 elif self.ext.lower() == ".pdata": # spectra
768 prefixFileList = ['p', 'P']
769 prefixFileList = ['p', 'P']
769
770
770 # barrido por las combinaciones posibles
771 # barrido por las combinaciones posibles
771 for prefixDir in prefixDirList:
772 for prefixDir in prefixDirList:
772 thispath = self.path
773 thispath = self.path
773 if prefixDir != None:
774 if prefixDir != None:
774 # formo el nombre del directorio xYYYYDDD (x=d o x=D)
775 # formo el nombre del directorio xYYYYDDD (x=d o x=D)
775 if foldercounter == 0:
776 if foldercounter == 0:
776 thispath = os.path.join(self.path, "%s%04d%03d" %
777 thispath = os.path.join(self.path, "%s%04d%03d" %
777 (prefixDir, self.year, self.doy))
778 (prefixDir, self.year, self.doy))
778 else:
779 else:
779 thispath = os.path.join(self.path, "%s%04d%03d_%02d" % (
780 thispath = os.path.join(self.path, "%s%04d%03d_%02d" % (
780 prefixDir, self.year, self.doy, foldercounter))
781 prefixDir, self.year, self.doy, foldercounter))
781 for prefixFile in prefixFileList: # barrido por las dos combinaciones posibles de "D"
782 for prefixFile in prefixFileList: # barrido por las dos combinaciones posibles de "D"
782 # formo el nombre del file xYYYYDDDSSS.ext
783 # formo el nombre del file xYYYYDDDSSS.ext
783 filename = "%s%04d%03d%03d%s" % (prefixFile, self.year, self.doy, self.set, self.ext)
784 filename = "%s%04d%03d%03d%s" % (prefixFile, self.year, self.doy, self.set, self.ext)
784 fullfilename = os.path.join(
785 fullfilename = os.path.join(
785 thispath, filename)
786 thispath, filename)
786
787
787 if os.path.exists(fullfilename):
788 if os.path.exists(fullfilename):
788 return fullfilename, filename
789 return fullfilename, filename
789
790
790 return None, filename
791 return None, filename
791
792
792 def __waitNewBlock(self):
793 def __waitNewBlock(self):
793 """
794 """
794 Return 1 si se encontro un nuevo bloque de datos, 0 de otra forma.
795 Return 1 si se encontro un nuevo bloque de datos, 0 de otra forma.
795
796
796 Si el modo de lectura es OffLine siempre retorn 0
797 Si el modo de lectura es OffLine siempre retorn 0
797 """
798 """
798 if not self.online:
799 if not self.online:
799 return 0
800 return 0
800
801
801 if (self.nReadBlocks >= self.processingHeaderObj.dataBlocksPerFile):
802 if (self.nReadBlocks >= self.processingHeaderObj.dataBlocksPerFile):
802 return 0
803 return 0
803
804
804 currentPointer = self.fp.tell()
805 currentPointer = self.fp.tell()
805
806
806 neededSize = self.processingHeaderObj.blockSize + self.basicHeaderSize
807 neededSize = self.processingHeaderObj.blockSize + self.basicHeaderSize
807
808
808 for nTries in range(self.nTries):
809 for nTries in range(self.nTries):
809
810
810 self.fp.close()
811 self.fp.close()
811 self.fp = open(self.filename, 'rb')
812 self.fp = open(self.filename, 'rb')
812 self.fp.seek(currentPointer)
813 self.fp.seek(currentPointer)
813
814
814 self.fileSize = os.path.getsize(self.filename)
815 self.fileSize = os.path.getsize(self.filename)
815 currentSize = self.fileSize - currentPointer
816 currentSize = self.fileSize - currentPointer
816
817
817 if (currentSize >= neededSize):
818 if (currentSize >= neededSize):
818 self.basicHeaderObj.read(self.fp)
819 self.basicHeaderObj.read(self.fp)
819 return 1
820 return 1
820
821
821 if self.fileSize == self.fileSizeByHeader:
822 if self.fileSize == self.fileSizeByHeader:
822 # self.flagEoF = True
823 # self.flagEoF = True
823 return 0
824 return 0
824
825
825 print("[Reading] Waiting %0.2f seconds for the next block, try %03d ..." % (self.delay, nTries + 1))
826 print("[Reading] Waiting %0.2f seconds for the next block, try %03d ..." % (self.delay, nTries + 1))
826 time.sleep(self.delay)
827 time.sleep(self.delay)
827
828
828 return 0
829 return 0
829
830
830 def waitDataBlock(self, pointer_location, blocksize=None):
831 def waitDataBlock(self, pointer_location, blocksize=None):
831
832
832 currentPointer = pointer_location
833 currentPointer = pointer_location
833 if blocksize is None:
834 if blocksize is None:
834 neededSize = self.processingHeaderObj.blockSize # + self.basicHeaderSize
835 neededSize = self.processingHeaderObj.blockSize # + self.basicHeaderSize
835 else:
836 else:
836 neededSize = blocksize
837 neededSize = blocksize
837
838
838 for nTries in range(self.nTries):
839 for nTries in range(self.nTries):
839 self.fp.close()
840 self.fp.close()
840 self.fp = open(self.filename, 'rb')
841 self.fp = open(self.filename, 'rb')
841 self.fp.seek(currentPointer)
842 self.fp.seek(currentPointer)
842
843
843 self.fileSize = os.path.getsize(self.filename)
844 self.fileSize = os.path.getsize(self.filename)
844 currentSize = self.fileSize - currentPointer
845 currentSize = self.fileSize - currentPointer
845
846
846 if (currentSize >= neededSize):
847 if (currentSize >= neededSize):
847 return 1
848 return 1
848
849
849 log.warning(
850 log.warning(
850 "Waiting %0.2f seconds for the next block, try %03d ..." % (self.delay, nTries + 1),
851 "Waiting %0.2f seconds for the next block, try %03d ..." % (self.delay, nTries + 1),
851 self.name
852 self.name
852 )
853 )
853 time.sleep(self.delay)
854 time.sleep(self.delay)
854
855
855 return 0
856 return 0
856
857
857 def __setNewBlock(self):
858 def __setNewBlock(self):
858
859
859 if self.fp == None:
860 if self.fp == None:
860 return 0
861 return 0
861
862
862 if self.flagIsNewFile:
863 if self.flagIsNewFile:
863 self.lastUTTime = self.basicHeaderObj.utc
864 self.lastUTTime = self.basicHeaderObj.utc
864 return 1
865 return 1
865
866
866 if self.realtime:
867 if self.realtime:
867 self.flagDiscontinuousBlock = 1
868 self.flagDiscontinuousBlock = 1
868 if not(self.setNextFile()):
869 if not(self.setNextFile()):
869 return 0
870 return 0
870 else:
871 else:
871 return 1
872 return 1
872
873
873 currentSize = self.fileSize - self.fp.tell()
874 currentSize = self.fileSize - self.fp.tell()
874 neededSize = self.processingHeaderObj.blockSize + self.basicHeaderSize
875 neededSize = self.processingHeaderObj.blockSize + self.basicHeaderSize
875
876
876 if (currentSize >= neededSize):
877 if (currentSize >= neededSize):
877 self.basicHeaderObj.read(self.fp)
878 self.basicHeaderObj.read(self.fp)
878 self.lastUTTime = self.basicHeaderObj.utc
879 self.lastUTTime = self.basicHeaderObj.utc
879 return 1
880 return 1
880
881
881 if self.__waitNewBlock():
882 if self.__waitNewBlock():
882 self.lastUTTime = self.basicHeaderObj.utc
883 self.lastUTTime = self.basicHeaderObj.utc
883 return 1
884 return 1
884
885
885 if not(self.setNextFile()):
886 if not(self.setNextFile()):
886 return 0
887 return 0
887
888
888 deltaTime = self.basicHeaderObj.utc - self.lastUTTime
889 deltaTime = self.basicHeaderObj.utc - self.lastUTTime
889 self.lastUTTime = self.basicHeaderObj.utc
890 self.lastUTTime = self.basicHeaderObj.utc
890
891
891 self.flagDiscontinuousBlock = 0
892 self.flagDiscontinuousBlock = 0
892
893
893 if deltaTime > self.maxTimeStep:
894 if deltaTime > self.maxTimeStep:
894 self.flagDiscontinuousBlock = 1
895 self.flagDiscontinuousBlock = 1
895
896
896 return 1
897 return 1
897
898
898 def readNextBlock(self):
899 def readNextBlock(self):
899
900
900 while True:
901 while True:
901 self.__setNewBlock()
902 self.__setNewBlock()
902
903
903 if not(self.readBlock()):
904 if not(self.readBlock()):
904 return 0
905 return 0
905
906
906 self.getBasicHeader()
907 self.getBasicHeader()
907
908
908 if not self.isDateTimeInRange(self.dataOut.datatime, self.startDate, self.endDate, self.startTime, self.endTime):
909 if not self.isDateTimeInRange(self.dataOut.datatime, self.startDate, self.endDate, self.startTime, self.endTime):
909 print("[Reading] Block No. %d/%d -> %s [Skipping]" % (self.nReadBlocks,
910 print("[Reading] Block No. %d/%d -> %s [Skipping]" % (self.nReadBlocks,
910 self.processingHeaderObj.dataBlocksPerFile,
911 self.processingHeaderObj.dataBlocksPerFile,
911 self.dataOut.datatime.ctime()))
912 self.dataOut.datatime.ctime()))
912 continue
913 continue
913
914
914 break
915 break
915
916
916 if self.verbose:
917 if self.verbose:
917 print("[Reading] Block No. %d/%d -> %s" % (self.nReadBlocks,
918 print("[Reading] Block No. %d/%d -> %s" % (self.nReadBlocks,
918 self.processingHeaderObj.dataBlocksPerFile,
919 self.processingHeaderObj.dataBlocksPerFile,
919 self.dataOut.datatime.ctime()))
920 self.dataOut.datatime.ctime()))
920 return 1
921 return 1
921
922
922 def readFirstHeader(self):
923 def readFirstHeader(self):
923
924
924 self.basicHeaderObj.read(self.fp)
925 self.basicHeaderObj.read(self.fp)
925 self.systemHeaderObj.read(self.fp)
926 self.systemHeaderObj.read(self.fp)
926 self.radarControllerHeaderObj.read(self.fp)
927 self.radarControllerHeaderObj.read(self.fp)
927 self.processingHeaderObj.read(self.fp)
928 self.processingHeaderObj.read(self.fp)
928 self.firstHeaderSize = self.basicHeaderObj.size
929 self.firstHeaderSize = self.basicHeaderObj.size
929
930
930 datatype = int(numpy.log2((self.processingHeaderObj.processFlags &
931 datatype = int(numpy.log2((self.processingHeaderObj.processFlags &
931 PROCFLAG.DATATYPE_MASK)) - numpy.log2(PROCFLAG.DATATYPE_CHAR))
932 PROCFLAG.DATATYPE_MASK)) - numpy.log2(PROCFLAG.DATATYPE_CHAR))
932 if datatype == 0:
933 if datatype == 0:
933 datatype_str = numpy.dtype([('real', '<i1'), ('imag', '<i1')])
934 datatype_str = numpy.dtype([('real', '<i1'), ('imag', '<i1')])
934 elif datatype == 1:
935 elif datatype == 1:
935 datatype_str = numpy.dtype([('real', '<i2'), ('imag', '<i2')])
936 datatype_str = numpy.dtype([('real', '<i2'), ('imag', '<i2')])
936 elif datatype == 2:
937 elif datatype == 2:
937 datatype_str = numpy.dtype([('real', '<i4'), ('imag', '<i4')])
938 datatype_str = numpy.dtype([('real', '<i4'), ('imag', '<i4')])
938 elif datatype == 3:
939 elif datatype == 3:
939 datatype_str = numpy.dtype([('real', '<i8'), ('imag', '<i8')])
940 datatype_str = numpy.dtype([('real', '<i8'), ('imag', '<i8')])
940 elif datatype == 4:
941 elif datatype == 4:
941 datatype_str = numpy.dtype([('real', '<f4'), ('imag', '<f4')])
942 datatype_str = numpy.dtype([('real', '<f4'), ('imag', '<f4')])
942 elif datatype == 5:
943 elif datatype == 5:
943 datatype_str = numpy.dtype([('real', '<f8'), ('imag', '<f8')])
944 datatype_str = numpy.dtype([('real', '<f8'), ('imag', '<f8')])
944 else:
945 else:
945 raise ValueError('Data type was not defined')
946 raise ValueError('Data type was not defined')
946
947
947 self.dtype = datatype_str
948 self.dtype = datatype_str
948 #self.ippSeconds = 2 * 1000 * self.radarControllerHeaderObj.ipp / self.c
949 #self.ippSeconds = 2 * 1000 * self.radarControllerHeaderObj.ipp / self.c
949 self.fileSizeByHeader = self.processingHeaderObj.dataBlocksPerFile * self.processingHeaderObj.blockSize + \
950 self.fileSizeByHeader = self.processingHeaderObj.dataBlocksPerFile * self.processingHeaderObj.blockSize + \
950 self.firstHeaderSize + self.basicHeaderSize * \
951 self.firstHeaderSize + self.basicHeaderSize * \
951 (self.processingHeaderObj.dataBlocksPerFile - 1)
952 (self.processingHeaderObj.dataBlocksPerFile - 1)
952 # self.dataOut.channelList = numpy.arange(self.systemHeaderObj.numChannels)
953 # self.dataOut.channelList = numpy.arange(self.systemHeaderObj.numChannels)
953 # self.dataOut.channelIndexList = numpy.arange(self.systemHeaderObj.numChannels)
954 # self.dataOut.channelIndexList = numpy.arange(self.systemHeaderObj.numChannels)
954 self.getBlockDimension()
955 self.getBlockDimension()
955
956
956 def verifyFile(self, filename, msgFlag=True):
957 def verifyFile(self, filename, msgFlag=True):
957
958
958 msg = None
959 msg = None
959
960
960 try:
961 try:
961 fp = open(filename, 'rb')
962 fp = open(filename, 'rb')
962 except IOError:
963 except IOError:
963
964
964 if msgFlag:
965 if msgFlag:
965 print("[Reading] File %s can't be opened" % (filename))
966 print("[Reading] File %s can't be opened" % (filename))
966
967
967 return False
968 return False
968
969
969 if self.waitDataBlock(0):
970 if self.waitDataBlock(0):
970 basicHeaderObj = BasicHeader(LOCALTIME)
971 basicHeaderObj = BasicHeader(LOCALTIME)
971 systemHeaderObj = SystemHeader()
972 systemHeaderObj = SystemHeader()
972 radarControllerHeaderObj = RadarControllerHeader()
973 radarControllerHeaderObj = RadarControllerHeader()
973 processingHeaderObj = ProcessingHeader()
974 processingHeaderObj = ProcessingHeader()
974
975
975 if not(basicHeaderObj.read(fp)):
976 if not(basicHeaderObj.read(fp)):
976 fp.close()
977 fp.close()
977 return False
978 return False
978
979
979 if not(systemHeaderObj.read(fp)):
980 if not(systemHeaderObj.read(fp)):
980 fp.close()
981 fp.close()
981 return False
982 return False
982
983
983 if not(radarControllerHeaderObj.read(fp)):
984 if not(radarControllerHeaderObj.read(fp)):
984 fp.close()
985 fp.close()
985 return False
986 return False
986
987
987 if not(processingHeaderObj.read(fp)):
988 if not(processingHeaderObj.read(fp)):
988 fp.close()
989 fp.close()
989 return False
990 return False
990
991
991 if not self.online:
992 if not self.online:
992 dt1 = basicHeaderObj.datatime
993 dt1 = basicHeaderObj.datatime
993 fp.seek(self.fileSize-processingHeaderObj.blockSize-24)
994 fp.seek(self.fileSize-processingHeaderObj.blockSize-24)
994 if not(basicHeaderObj.read(fp)):
995 if not(basicHeaderObj.read(fp)):
995 fp.close()
996 fp.close()
996 return False
997 return False
997 dt2 = basicHeaderObj.datatime
998 dt2 = basicHeaderObj.datatime
998 if not self.isDateTimeInRange(dt1, self.startDate, self.endDate, self.startTime, self.endTime) and not \
999 if not self.isDateTimeInRange(dt1, self.startDate, self.endDate, self.startTime, self.endTime) and not \
999 self.isDateTimeInRange(dt2, self.startDate, self.endDate, self.startTime, self.endTime):
1000 self.isDateTimeInRange(dt2, self.startDate, self.endDate, self.startTime, self.endTime):
1000 return False
1001 return False
1001
1002
1002 fp.close()
1003 fp.close()
1003
1004
1004 return True
1005 return True
1005
1006
1006 def findDatafiles(self, path, startDate=None, endDate=None, expLabel='', ext='.r', walk=True, include_path=False):
1007 def findDatafiles(self, path, startDate=None, endDate=None, expLabel='', ext='.r', walk=True, include_path=False):
1007
1008
1008 path_empty = True
1009 path_empty = True
1009
1010
1010 dateList = []
1011 dateList = []
1011 pathList = []
1012 pathList = []
1012
1013
1013 multi_path = path.split(',')
1014 multi_path = path.split(',')
1014
1015
1015 if not walk:
1016 if not walk:
1016
1017
1017 for single_path in multi_path:
1018 for single_path in multi_path:
1018
1019
1019 if not os.path.isdir(single_path):
1020 if not os.path.isdir(single_path):
1020 continue
1021 continue
1021
1022
1022 fileList = glob.glob1(single_path, "*" + ext)
1023 fileList = glob.glob1(single_path, "*" + ext)
1023
1024
1024 if not fileList:
1025 if not fileList:
1025 continue
1026 continue
1026
1027
1027 path_empty = False
1028 path_empty = False
1028
1029
1029 fileList.sort()
1030 fileList.sort()
1030
1031
1031 for thisFile in fileList:
1032 for thisFile in fileList:
1032
1033
1033 if not os.path.isfile(os.path.join(single_path, thisFile)):
1034 if not os.path.isfile(os.path.join(single_path, thisFile)):
1034 continue
1035 continue
1035
1036
1036 if not isRadarFile(thisFile):
1037 if not isRadarFile(thisFile):
1037 continue
1038 continue
1038
1039
1039 if not isFileInDateRange(thisFile, startDate, endDate):
1040 if not isFileInDateRange(thisFile, startDate, endDate):
1040 continue
1041 continue
1041
1042
1042 thisDate = getDateFromRadarFile(thisFile)
1043 thisDate = getDateFromRadarFile(thisFile)
1043
1044
1044 if thisDate in dateList or single_path in pathList:
1045 if thisDate in dateList or single_path in pathList:
1045 continue
1046 continue
1046
1047
1047 dateList.append(thisDate)
1048 dateList.append(thisDate)
1048 pathList.append(single_path)
1049 pathList.append(single_path)
1049
1050
1050 else:
1051 else:
1051 for single_path in multi_path:
1052 for single_path in multi_path:
1052
1053
1053 if not os.path.isdir(single_path):
1054 if not os.path.isdir(single_path):
1054 continue
1055 continue
1055
1056
1056 dirList = []
1057 dirList = []
1057
1058
1058 for thisPath in os.listdir(single_path):
1059 for thisPath in os.listdir(single_path):
1059
1060
1060 if not os.path.isdir(os.path.join(single_path, thisPath)):
1061 if not os.path.isdir(os.path.join(single_path, thisPath)):
1061 continue
1062 continue
1062
1063
1063 if not isRadarFolder(thisPath):
1064 if not isRadarFolder(thisPath):
1064 continue
1065 continue
1065
1066
1066 if not isFolderInDateRange(thisPath, startDate, endDate):
1067 if not isFolderInDateRange(thisPath, startDate, endDate):
1067 continue
1068 continue
1068
1069
1069 dirList.append(thisPath)
1070 dirList.append(thisPath)
1070
1071
1071 if not dirList:
1072 if not dirList:
1072 continue
1073 continue
1073
1074
1074 dirList.sort()
1075 dirList.sort()
1075
1076
1076 for thisDir in dirList:
1077 for thisDir in dirList:
1077
1078
1078 datapath = os.path.join(single_path, thisDir, expLabel)
1079 datapath = os.path.join(single_path, thisDir, expLabel)
1079 fileList = glob.glob1(datapath, "*" + ext)
1080 fileList = glob.glob1(datapath, "*" + ext)
1080
1081
1081 if not fileList:
1082 if not fileList:
1082 continue
1083 continue
1083
1084
1084 path_empty = False
1085 path_empty = False
1085
1086
1086 thisDate = getDateFromRadarFolder(thisDir)
1087 thisDate = getDateFromRadarFolder(thisDir)
1087
1088
1088 pathList.append(datapath)
1089 pathList.append(datapath)
1089 dateList.append(thisDate)
1090 dateList.append(thisDate)
1090
1091
1091 dateList.sort()
1092 dateList.sort()
1092
1093
1093 if walk:
1094 if walk:
1094 pattern_path = os.path.join(multi_path[0], "[dYYYYDDD]", expLabel)
1095 pattern_path = os.path.join(multi_path[0], "[dYYYYDDD]", expLabel)
1095 else:
1096 else:
1096 pattern_path = multi_path[0]
1097 pattern_path = multi_path[0]
1097
1098
1098 if path_empty:
1099 if path_empty:
1099 raise schainpy.admin.SchainError("[Reading] No *%s files in %s for %s to %s" % (ext, pattern_path, startDate, endDate))
1100 raise schainpy.admin.SchainError("[Reading] No *%s files in %s for %s to %s" % (ext, pattern_path, startDate, endDate))
1100 else:
1101 else:
1101 if not dateList:
1102 if not dateList:
1102 raise schainpy.admin.SchainError("[Reading] Date range selected invalid [%s - %s]: No *%s files in %s)" % (startDate, endDate, ext, path))
1103 raise schainpy.admin.SchainError("[Reading] Date range selected invalid [%s - %s]: No *%s files in %s)" % (startDate, endDate, ext, path))
1103
1104
1104 if include_path:
1105 if include_path:
1105 return dateList, pathList
1106 return dateList, pathList
1106
1107
1107 return dateList
1108 return dateList
1108
1109
1109 def setup(self, **kwargs):
1110 def setup(self, **kwargs):
1110
1111
1111 self.set_kwargs(**kwargs)
1112 self.set_kwargs(**kwargs)
1112 if not self.ext.startswith('.'):
1113 if not self.ext.startswith('.'):
1113 self.ext = '.{}'.format(self.ext)
1114 self.ext = '.{}'.format(self.ext)
1114
1115
1115 if self.server is not None:
1116 if self.server is not None:
1116 if 'tcp://' in self.server:
1117 if 'tcp://' in self.server:
1117 address = server
1118 address = server
1118 else:
1119 else:
1119 address = 'ipc:///tmp/%s' % self.server
1120 address = 'ipc:///tmp/%s' % self.server
1120 self.server = address
1121 self.server = address
1121 self.context = zmq.Context()
1122 self.context = zmq.Context()
1122 self.receiver = self.context.socket(zmq.PULL)
1123 self.receiver = self.context.socket(zmq.PULL)
1123 self.receiver.connect(self.server)
1124 self.receiver.connect(self.server)
1124 time.sleep(0.5)
1125 time.sleep(0.5)
1125 print('[Starting] ReceiverData from {}'.format(self.server))
1126 print('[Starting] ReceiverData from {}'.format(self.server))
1126 else:
1127 else:
1127 self.server = None
1128 self.server = None
1128 if self.path == None:
1129 if self.path == None:
1129 raise ValueError("[Reading] The path is not valid")
1130 raise ValueError("[Reading] The path is not valid")
1130
1131
1131 if self.online:
1132 if self.online:
1132 log.log("[Reading] Searching files in online mode...", self.name)
1133 log.log("[Reading] Searching files in online mode...", self.name)
1133
1134
1134 for nTries in range(self.nTries):
1135 for nTries in range(self.nTries):
1135 fullpath = self.searchFilesOnLine(self.path, self.startDate,
1136 fullpath = self.searchFilesOnLine(self.path, self.startDate,
1136 self.endDate, self.expLabel, self.ext, self.walk,
1137 self.endDate, self.expLabel, self.ext, self.walk,
1137 self.filefmt, self.folderfmt)
1138 self.filefmt, self.folderfmt)
1138
1139
1139 try:
1140 try:
1140 fullpath = next(fullpath)
1141 fullpath = next(fullpath)
1141 except:
1142 except:
1142 fullpath = None
1143 fullpath = None
1143
1144
1144 if fullpath:
1145 if fullpath:
1145 break
1146 break
1146
1147
1147 log.warning(
1148 log.warning(
1148 'Waiting {} sec for a valid file in {}: try {} ...'.format(
1149 'Waiting {} sec for a valid file in {}: try {} ...'.format(
1149 self.delay, self.path, nTries + 1),
1150 self.delay, self.path, nTries + 1),
1150 self.name)
1151 self.name)
1151 time.sleep(self.delay)
1152 time.sleep(self.delay)
1152
1153
1153 if not(fullpath):
1154 if not(fullpath):
1154 raise schainpy.admin.SchainError(
1155 raise schainpy.admin.SchainError(
1155 'There isn\'t any valid file in {}'.format(self.path))
1156 'There isn\'t any valid file in {}'.format(self.path))
1156
1157
1157 pathname, filename = os.path.split(fullpath)
1158 pathname, filename = os.path.split(fullpath)
1158 self.year = int(filename[1:5])
1159 self.year = int(filename[1:5])
1159 self.doy = int(filename[5:8])
1160 self.doy = int(filename[5:8])
1160 self.set = int(filename[8:11]) - 1
1161 self.set = int(filename[8:11]) - 1
1161 else:
1162 else:
1162 log.log("Searching files in {}".format(self.path), self.name)
1163 log.log("Searching files in {}".format(self.path), self.name)
1163 self.filenameList = self.searchFilesOffLine(self.path, self.startDate,
1164 self.filenameList = self.searchFilesOffLine(self.path, self.startDate,
1164 self.endDate, self.expLabel, self.ext, self.walk, self.filefmt, self.folderfmt)
1165 self.endDate, self.expLabel, self.ext, self.walk, self.filefmt, self.folderfmt)
1165
1166
1166 self.setNextFile()
1167 self.setNextFile()
1167
1168
1168 return
1169 return
1169
1170
1170 def getBasicHeader(self):
1171 def getBasicHeader(self):
1171
1172
1172 self.dataOut.utctime = self.basicHeaderObj.utc + self.basicHeaderObj.miliSecond / \
1173 self.dataOut.utctime = self.basicHeaderObj.utc + self.basicHeaderObj.miliSecond / \
1173 1000. + self.profileIndex * self.radarControllerHeaderObj.ippSeconds
1174 1000. + self.profileIndex * self.radarControllerHeaderObj.ippSeconds
1174
1175
1175 self.dataOut.flagDiscontinuousBlock = self.flagDiscontinuousBlock
1176 self.dataOut.flagDiscontinuousBlock = self.flagDiscontinuousBlock
1176
1177
1177 self.dataOut.timeZone = self.basicHeaderObj.timeZone
1178 self.dataOut.timeZone = self.basicHeaderObj.timeZone
1178
1179
1179 self.dataOut.dstFlag = self.basicHeaderObj.dstFlag
1180 self.dataOut.dstFlag = self.basicHeaderObj.dstFlag
1180
1181
1181 self.dataOut.errorCount = self.basicHeaderObj.errorCount
1182 self.dataOut.errorCount = self.basicHeaderObj.errorCount
1182
1183
1183 self.dataOut.useLocalTime = self.basicHeaderObj.useLocalTime
1184 self.dataOut.useLocalTime = self.basicHeaderObj.useLocalTime
1184
1185
1185 self.dataOut.ippSeconds = self.radarControllerHeaderObj.ippSeconds / self.nTxs
1186 self.dataOut.ippSeconds = self.radarControllerHeaderObj.ippSeconds / self.nTxs
1186
1187
1187 # self.dataOut.nProfiles = self.processingHeaderObj.profilesPerBlock*self.nTxs
1188 # self.dataOut.nProfiles = self.processingHeaderObj.profilesPerBlock*self.nTxs
1188
1189
1189 def getFirstHeader(self):
1190 def getFirstHeader(self):
1190
1191
1191 raise NotImplementedError
1192 raise NotImplementedError
1192
1193
1193 def getData(self):
1194 def getData(self):
1194
1195
1195 raise NotImplementedError
1196 raise NotImplementedError
1196
1197
1197 def hasNotDataInBuffer(self):
1198 def hasNotDataInBuffer(self):
1198
1199
1199 raise NotImplementedError
1200 raise NotImplementedError
1200
1201
1201 def readBlock(self):
1202 def readBlock(self):
1202
1203
1203 raise NotImplementedError
1204 raise NotImplementedError
1204
1205
1205 def isEndProcess(self):
1206 def isEndProcess(self):
1206
1207
1207 return self.flagNoMoreFiles
1208 return self.flagNoMoreFiles
1208
1209
1209 def printReadBlocks(self):
1210 def printReadBlocks(self):
1210
1211
1211 print("[Reading] Number of read blocks per file %04d" % self.nReadBlocks)
1212 print("[Reading] Number of read blocks per file %04d" % self.nReadBlocks)
1212
1213
1213 def printTotalBlocks(self):
1214 def printTotalBlocks(self):
1214
1215
1215 print("[Reading] Number of read blocks %04d" % self.nTotalBlocks)
1216 print("[Reading] Number of read blocks %04d" % self.nTotalBlocks)
1216
1217
1217 def printNumberOfBlock(self):
1218 'SPAM!'
1219
1220 # if self.flagIsNewBlock:
1221 # print "[Reading] Block No. %d/%d -> %s" %(self.nReadBlocks,
1222 # self.processingHeaderObj.dataBlocksPerFile,
1223 # self.dataOut.datatime.ctime())
1224
1225 def printInfo(self):
1226
1227 if self.__printInfo == False:
1228 return
1229
1230 self.basicHeaderObj.printInfo()
1231 self.systemHeaderObj.printInfo()
1232 self.radarControllerHeaderObj.printInfo()
1233 self.processingHeaderObj.printInfo()
1234
1235 self.__printInfo = False
1236
1237 def run(self, **kwargs):
1218 def run(self, **kwargs):
1238 """
1219 """
1239
1220
1240 Arguments:
1221 Arguments:
1241 path :
1222 path :
1242 startDate :
1223 startDate :
1243 endDate :
1224 endDate :
1244 startTime :
1225 startTime :
1245 endTime :
1226 endTime :
1246 set :
1227 set :
1247 expLabel :
1228 expLabel :
1248 ext :
1229 ext :
1249 online :
1230 online :
1250 delay :
1231 delay :
1251 walk :
1232 walk :
1252 getblock :
1233 getblock :
1253 nTxs :
1234 nTxs :
1254 realtime :
1235 realtime :
1255 blocksize :
1236 blocksize :
1256 blocktime :
1237 blocktime :
1257 skip :
1238 skip :
1258 cursor :
1239 cursor :
1259 warnings :
1240 warnings :
1260 server :
1241 server :
1261 verbose :
1242 verbose :
1262 format :
1243 format :
1263 oneDDict :
1244 oneDDict :
1264 twoDDict :
1245 twoDDict :
1265 independentParam :
1246 independentParam :
1266 """
1247 """
1267
1248
1268 if not(self.isConfig):
1249 if not(self.isConfig):
1269 self.setup(**kwargs)
1250 self.setup(**kwargs)
1270 self.isConfig = True
1251 self.isConfig = True
1271 if self.server is None:
1252 if self.server is None:
1272 self.getData()
1253 self.getData()
1273 else:
1254 else:
1274 self.getFromServer()
1255 self.getFromServer()
1275
1256
1276
1257
1277 class JRODataWriter(Reader):
1258 class JRODataWriter(Reader):
1278
1259
1279 """
1260 """
1280 Esta clase permite escribir datos a archivos procesados (.r o ,pdata). La escritura
1261 Esta clase permite escribir datos a archivos procesados (.r o ,pdata). La escritura
1281 de los datos siempre se realiza por bloques.
1262 de los datos siempre se realiza por bloques.
1282 """
1263 """
1283
1264
1284 setFile = None
1265 setFile = None
1285 profilesPerBlock = None
1266 profilesPerBlock = None
1286 blocksPerFile = None
1267 blocksPerFile = None
1287 nWriteBlocks = 0
1268 nWriteBlocks = 0
1288 fileDate = None
1269 fileDate = None
1289
1270
1290 def __init__(self, dataOut=None):
1271 def __init__(self, dataOut=None):
1291 raise NotImplementedError
1272 raise NotImplementedError
1292
1273
1293 def hasAllDataInBuffer(self):
1274 def hasAllDataInBuffer(self):
1294 raise NotImplementedError
1275 raise NotImplementedError
1295
1276
1296 def setBlockDimension(self):
1277 def setBlockDimension(self):
1297 raise NotImplementedError
1278 raise NotImplementedError
1298
1279
1299 def writeBlock(self):
1280 def writeBlock(self):
1300 raise NotImplementedError
1281 raise NotImplementedError
1301
1282
1302 def putData(self):
1283 def putData(self):
1303 raise NotImplementedError
1284 raise NotImplementedError
1304
1285
1305 def getDtypeWidth(self):
1286 def getDtypeWidth(self):
1306
1287
1307 dtype_index = get_dtype_index(self.dtype)
1288 dtype_index = get_dtype_index(self.dtype)
1308 dtype_width = get_dtype_width(dtype_index)
1289 dtype_width = get_dtype_width(dtype_index)
1309
1290
1310 return dtype_width
1291 return dtype_width
1311
1292
1312 def getProcessFlags(self):
1293 def getProcessFlags(self):
1313
1294
1314 processFlags = 0
1295 processFlags = 0
1315
1296
1316 dtype_index = get_dtype_index(self.dtype)
1297 dtype_index = get_dtype_index(self.dtype)
1317 procflag_dtype = get_procflag_dtype(dtype_index)
1298 procflag_dtype = get_procflag_dtype(dtype_index)
1318
1299
1319 processFlags += procflag_dtype
1300 processFlags += procflag_dtype
1320
1301
1321 if self.dataOut.flagDecodeData:
1302 if self.dataOut.flagDecodeData:
1322 processFlags += PROCFLAG.DECODE_DATA
1303 processFlags += PROCFLAG.DECODE_DATA
1323
1304
1324 if self.dataOut.flagDeflipData:
1305 if self.dataOut.flagDeflipData:
1325 processFlags += PROCFLAG.DEFLIP_DATA
1306 processFlags += PROCFLAG.DEFLIP_DATA
1326
1307
1327 if self.dataOut.code is not None:
1308 if self.dataOut.code is not None:
1328 processFlags += PROCFLAG.DEFINE_PROCESS_CODE
1309 processFlags += PROCFLAG.DEFINE_PROCESS_CODE
1329
1310
1330 if self.dataOut.nCohInt > 1:
1311 if self.dataOut.nCohInt > 1:
1331 processFlags += PROCFLAG.COHERENT_INTEGRATION
1312 processFlags += PROCFLAG.COHERENT_INTEGRATION
1332
1313
1333 if self.dataOut.type == "Spectra":
1314 if self.dataOut.type == "Spectra":
1334 if self.dataOut.nIncohInt > 1:
1315 if self.dataOut.nIncohInt > 1:
1335 processFlags += PROCFLAG.INCOHERENT_INTEGRATION
1316 processFlags += PROCFLAG.INCOHERENT_INTEGRATION
1336
1317
1337 if self.dataOut.data_dc is not None:
1318 if self.dataOut.data_dc is not None:
1338 processFlags += PROCFLAG.SAVE_CHANNELS_DC
1319 processFlags += PROCFLAG.SAVE_CHANNELS_DC
1339
1320
1340 if self.dataOut.flagShiftFFT:
1321 if self.dataOut.flagShiftFFT:
1341 processFlags += PROCFLAG.SHIFT_FFT_DATA
1322 processFlags += PROCFLAG.SHIFT_FFT_DATA
1342
1323
1343 return processFlags
1324 return processFlags
1344
1325
1345 def setBasicHeader(self):
1326 def setBasicHeader(self):
1346
1327
1347 self.basicHeaderObj.size = self.basicHeaderSize # bytes
1328 self.basicHeaderObj.size = self.basicHeaderSize # bytes
1348 self.basicHeaderObj.version = self.versionFile
1329 self.basicHeaderObj.version = self.versionFile
1349 self.basicHeaderObj.dataBlock = self.nTotalBlocks
1330 self.basicHeaderObj.dataBlock = self.nTotalBlocks
1350 utc = numpy.floor(self.dataOut.utctime)
1331 utc = numpy.floor(self.dataOut.utctime)
1351 milisecond = (self.dataOut.utctime - utc) * 1000.0
1332 milisecond = (self.dataOut.utctime - utc) * 1000.0
1352 self.basicHeaderObj.utc = utc
1333 self.basicHeaderObj.utc = utc
1353 self.basicHeaderObj.miliSecond = milisecond
1334 self.basicHeaderObj.miliSecond = milisecond
1354 self.basicHeaderObj.timeZone = self.dataOut.timeZone
1335 self.basicHeaderObj.timeZone = self.dataOut.timeZone
1355 self.basicHeaderObj.dstFlag = self.dataOut.dstFlag
1336 self.basicHeaderObj.dstFlag = self.dataOut.dstFlag
1356 self.basicHeaderObj.errorCount = self.dataOut.errorCount
1337 self.basicHeaderObj.errorCount = self.dataOut.errorCount
1357
1338
1358 def setFirstHeader(self):
1339 def setFirstHeader(self):
1359 """
1340 """
1360 Obtiene una copia del First Header
1341 Obtiene una copia del First Header
1361
1342
1362 Affected:
1343 Affected:
1363
1344
1364 self.basicHeaderObj
1345 self.basicHeaderObj
1365 self.systemHeaderObj
1346 self.systemHeaderObj
1366 self.radarControllerHeaderObj
1347 self.radarControllerHeaderObj
1367 self.processingHeaderObj self.
1348 self.processingHeaderObj self.
1368
1349
1369 Return:
1350 Return:
1370 None
1351 None
1371 """
1352 """
1372
1353
1373 raise NotImplementedError
1354 raise NotImplementedError
1374
1355
1375 def __writeFirstHeader(self):
1356 def __writeFirstHeader(self):
1376 """
1357 """
1377 Escribe el primer header del file es decir el Basic header y el Long header (SystemHeader, RadarControllerHeader, ProcessingHeader)
1358 Escribe el primer header del file es decir el Basic header y el Long header (SystemHeader, RadarControllerHeader, ProcessingHeader)
1378
1359
1379 Affected:
1360 Affected:
1380 __dataType
1361 __dataType
1381
1362
1382 Return:
1363 Return:
1383 None
1364 None
1384 """
1365 """
1385
1366
1386 # CALCULAR PARAMETROS
1367 # CALCULAR PARAMETROS
1387
1368
1388 sizeLongHeader = self.systemHeaderObj.size + \
1369 sizeLongHeader = self.systemHeaderObj.size + \
1389 self.radarControllerHeaderObj.size + self.processingHeaderObj.size
1370 self.radarControllerHeaderObj.size + self.processingHeaderObj.size
1390 self.basicHeaderObj.size = self.basicHeaderSize + sizeLongHeader
1371 self.basicHeaderObj.size = self.basicHeaderSize + sizeLongHeader
1391
1372
1392 self.basicHeaderObj.write(self.fp)
1373 self.basicHeaderObj.write(self.fp)
1393 self.systemHeaderObj.write(self.fp)
1374 self.systemHeaderObj.write(self.fp)
1394 self.radarControllerHeaderObj.write(self.fp)
1375 self.radarControllerHeaderObj.write(self.fp)
1395 self.processingHeaderObj.write(self.fp)
1376 self.processingHeaderObj.write(self.fp)
1396
1377
1397 def __setNewBlock(self):
1378 def __setNewBlock(self):
1398 """
1379 """
1399 Si es un nuevo file escribe el First Header caso contrario escribe solo el Basic Header
1380 Si es un nuevo file escribe el First Header caso contrario escribe solo el Basic Header
1400
1381
1401 Return:
1382 Return:
1402 0 : si no pudo escribir nada
1383 0 : si no pudo escribir nada
1403 1 : Si escribio el Basic el First Header
1384 1 : Si escribio el Basic el First Header
1404 """
1385 """
1405 if self.fp == None:
1386 if self.fp == None:
1406 self.setNextFile()
1387 self.setNextFile()
1407
1388
1408 if self.flagIsNewFile:
1389 if self.flagIsNewFile:
1409 return 1
1390 return 1
1410
1391
1411 if self.blockIndex < self.processingHeaderObj.dataBlocksPerFile:
1392 if self.blockIndex < self.processingHeaderObj.dataBlocksPerFile:
1412 self.basicHeaderObj.write(self.fp)
1393 self.basicHeaderObj.write(self.fp)
1413 return 1
1394 return 1
1414
1395
1415 if not(self.setNextFile()):
1396 if not(self.setNextFile()):
1416 return 0
1397 return 0
1417
1398
1418 return 1
1399 return 1
1419
1400
1420 def writeNextBlock(self):
1401 def writeNextBlock(self):
1421 """
1402 """
1422 Selecciona el bloque siguiente de datos y los escribe en un file
1403 Selecciona el bloque siguiente de datos y los escribe en un file
1423
1404
1424 Return:
1405 Return:
1425 0 : Si no hizo pudo escribir el bloque de datos
1406 0 : Si no hizo pudo escribir el bloque de datos
1426 1 : Si no pudo escribir el bloque de datos
1407 1 : Si no pudo escribir el bloque de datos
1427 """
1408 """
1428 if not(self.__setNewBlock()):
1409 if not(self.__setNewBlock()):
1429 return 0
1410 return 0
1430
1411
1431 self.writeBlock()
1412 self.writeBlock()
1432
1413
1433 print("[Writing] Block No. %d/%d" % (self.blockIndex,
1414 print("[Writing] Block No. %d/%d" % (self.blockIndex,
1434 self.processingHeaderObj.dataBlocksPerFile))
1415 self.processingHeaderObj.dataBlocksPerFile))
1435
1416
1436 return 1
1417 return 1
1437
1418
1438 def setNextFile(self):
1419 def setNextFile(self):
1439 """Determina el siguiente file que sera escrito
1420 """Determina el siguiente file que sera escrito
1440
1421
1441 Affected:
1422 Affected:
1442 self.filename
1423 self.filename
1443 self.subfolder
1424 self.subfolder
1444 self.fp
1425 self.fp
1445 self.setFile
1426 self.setFile
1446 self.flagIsNewFile
1427 self.flagIsNewFile
1447
1428
1448 Return:
1429 Return:
1449 0 : Si el archivo no puede ser escrito
1430 0 : Si el archivo no puede ser escrito
1450 1 : Si el archivo esta listo para ser escrito
1431 1 : Si el archivo esta listo para ser escrito
1451 """
1432 """
1452 ext = self.ext
1433 ext = self.ext
1453 path = self.path
1434 path = self.path
1454
1435
1455 if self.fp != None:
1436 if self.fp != None:
1456 self.fp.close()
1437 self.fp.close()
1457
1438
1458 if not os.path.exists(path):
1439 if not os.path.exists(path):
1459 os.mkdir(path)
1440 os.mkdir(path)
1460
1441
1461 timeTuple = time.localtime(self.dataOut.utctime)
1442 timeTuple = time.localtime(self.dataOut.utctime)
1462 subfolder = 'd%4.4d%3.3d' % (timeTuple.tm_year, timeTuple.tm_yday)
1443 subfolder = 'd%4.4d%3.3d' % (timeTuple.tm_year, timeTuple.tm_yday)
1463
1444
1464 fullpath = os.path.join(path, subfolder)
1445 fullpath = os.path.join(path, subfolder)
1465 setFile = self.setFile
1446 setFile = self.setFile
1466
1447
1467 if not(os.path.exists(fullpath)):
1448 if not(os.path.exists(fullpath)):
1468 os.mkdir(fullpath)
1449 os.mkdir(fullpath)
1469 setFile = -1 # inicializo mi contador de seteo
1450 setFile = -1 # inicializo mi contador de seteo
1470 else:
1451 else:
1471 filesList = os.listdir(fullpath)
1452 filesList = os.listdir(fullpath)
1472 if len(filesList) > 0:
1453 if len(filesList) > 0:
1473 filesList = sorted(filesList, key=str.lower)
1454 filesList = sorted(filesList, key=str.lower)
1474 filen = filesList[-1]
1455 filen = filesList[-1]
1475 # el filename debera tener el siguiente formato
1456 # el filename debera tener el siguiente formato
1476 # 0 1234 567 89A BCDE (hex)
1457 # 0 1234 567 89A BCDE (hex)
1477 # x YYYY DDD SSS .ext
1458 # x YYYY DDD SSS .ext
1478 if isNumber(filen[8:11]):
1459 if isNumber(filen[8:11]):
1479 # inicializo mi contador de seteo al seteo del ultimo file
1460 # inicializo mi contador de seteo al seteo del ultimo file
1480 setFile = int(filen[8:11])
1461 setFile = int(filen[8:11])
1481 else:
1462 else:
1482 setFile = -1
1463 setFile = -1
1483 else:
1464 else:
1484 setFile = -1 # inicializo mi contador de seteo
1465 setFile = -1 # inicializo mi contador de seteo
1485
1466
1486 setFile += 1
1467 setFile += 1
1487
1468
1488 # If this is a new day it resets some values
1469 # If this is a new day it resets some values
1489 if self.dataOut.datatime.date() > self.fileDate:
1470 if self.dataOut.datatime.date() > self.fileDate:
1490 setFile = 0
1471 setFile = 0
1491 self.nTotalBlocks = 0
1472 self.nTotalBlocks = 0
1492
1473
1493 filen = '{}{:04d}{:03d}{:03d}{}'.format(
1474 filen = '{}{:04d}{:03d}{:03d}{}'.format(
1494 self.optchar, timeTuple.tm_year, timeTuple.tm_yday, setFile, ext)
1475 self.optchar, timeTuple.tm_year, timeTuple.tm_yday, setFile, ext)
1495
1476
1496 filename = os.path.join(path, subfolder, filen)
1477 filename = os.path.join(path, subfolder, filen)
1497
1478
1498 fp = open(filename, 'wb')
1479 fp = open(filename, 'wb')
1499
1480
1500 self.blockIndex = 0
1481 self.blockIndex = 0
1501 self.filename = filename
1482 self.filename = filename
1502 self.subfolder = subfolder
1483 self.subfolder = subfolder
1503 self.fp = fp
1484 self.fp = fp
1504 self.setFile = setFile
1485 self.setFile = setFile
1505 self.flagIsNewFile = 1
1486 self.flagIsNewFile = 1
1506 self.fileDate = self.dataOut.datatime.date()
1487 self.fileDate = self.dataOut.datatime.date()
1507 self.setFirstHeader()
1488 self.setFirstHeader()
1508
1489
1509 print('[Writing] Opening file: %s' % self.filename)
1490 print('[Writing] Opening file: %s' % self.filename)
1510
1491
1511 self.__writeFirstHeader()
1492 self.__writeFirstHeader()
1512
1493
1513 return 1
1494 return 1
1514
1495
1515 def setup(self, dataOut, path, blocksPerFile, profilesPerBlock=64, set=None, ext=None, datatype=4):
1496 def setup(self, dataOut, path, blocksPerFile, profilesPerBlock=64, set=None, ext=None, datatype=4):
1516 """
1497 """
1517 Setea el tipo de formato en la cual sera guardada la data y escribe el First Header
1498 Setea el tipo de formato en la cual sera guardada la data y escribe el First Header
1518
1499
1519 Inputs:
1500 Inputs:
1520 path : directory where data will be saved
1501 path : directory where data will be saved
1521 profilesPerBlock : number of profiles per block
1502 profilesPerBlock : number of profiles per block
1522 set : initial file set
1503 set : initial file set
1523 datatype : An integer number that defines data type:
1504 datatype : An integer number that defines data type:
1524 0 : int8 (1 byte)
1505 0 : int8 (1 byte)
1525 1 : int16 (2 bytes)
1506 1 : int16 (2 bytes)
1526 2 : int32 (4 bytes)
1507 2 : int32 (4 bytes)
1527 3 : int64 (8 bytes)
1508 3 : int64 (8 bytes)
1528 4 : float32 (4 bytes)
1509 4 : float32 (4 bytes)
1529 5 : double64 (8 bytes)
1510 5 : double64 (8 bytes)
1530
1511
1531 Return:
1512 Return:
1532 0 : Si no realizo un buen seteo
1513 0 : Si no realizo un buen seteo
1533 1 : Si realizo un buen seteo
1514 1 : Si realizo un buen seteo
1534 """
1515 """
1535
1516
1536 if ext == None:
1517 if ext == None:
1537 ext = self.ext
1518 ext = self.ext
1538
1519
1539 self.ext = ext.lower()
1520 self.ext = ext.lower()
1540
1521
1541 self.path = path
1522 self.path = path
1542
1523
1543 if set is None:
1524 if set is None:
1544 self.setFile = -1
1525 self.setFile = -1
1545 else:
1526 else:
1546 self.setFile = set - 1
1527 self.setFile = set - 1
1547
1528
1548 self.blocksPerFile = blocksPerFile
1529 self.blocksPerFile = blocksPerFile
1549 self.profilesPerBlock = profilesPerBlock
1530 self.profilesPerBlock = profilesPerBlock
1550 self.dataOut = dataOut
1531 self.dataOut = dataOut
1551 self.fileDate = self.dataOut.datatime.date()
1532 self.fileDate = self.dataOut.datatime.date()
1552 self.dtype = self.dataOut.dtype
1533 self.dtype = self.dataOut.dtype
1553
1534
1554 if datatype is not None:
1535 if datatype is not None:
1555 self.dtype = get_numpy_dtype(datatype)
1536 self.dtype = get_numpy_dtype(datatype)
1556
1537
1557 if not(self.setNextFile()):
1538 if not(self.setNextFile()):
1558 print("[Writing] There isn't a next file")
1539 print("[Writing] There isn't a next file")
1559 return 0
1540 return 0
1560
1541
1561 self.setBlockDimension()
1542 self.setBlockDimension()
1562
1543
1563 return 1
1544 return 1
1564
1545
1565 def run(self, dataOut, path, blocksPerFile=100, profilesPerBlock=64, set=None, ext=None, datatype=4, **kwargs):
1546 def run(self, dataOut, path, blocksPerFile=100, profilesPerBlock=64, set=None, ext=None, datatype=4, **kwargs):
1566
1547
1567 if not(self.isConfig):
1548 if not(self.isConfig):
1568
1549
1569 self.setup(dataOut, path, blocksPerFile, profilesPerBlock=profilesPerBlock,
1550 self.setup(dataOut, path, blocksPerFile, profilesPerBlock=profilesPerBlock,
1570 set=set, ext=ext, datatype=datatype, **kwargs)
1551 set=set, ext=ext, datatype=datatype, **kwargs)
1571 self.isConfig = True
1552 self.isConfig = True
1572
1553
1573 self.dataOut = dataOut
1554 self.dataOut = dataOut
1574 self.putData()
1555 self.putData()
1575 return self.dataOut
1556 return self.dataOut
1557
1558 class printInfo(Operation):
1559
1560 def __init__(self):
1561
1562 Operation.__init__(self)
1563 self.__printInfo = True
1564
1565 def run(self, dataOut, headers = ['systemHeaderObj', 'radarControllerHeaderObj', 'processingHeaderObj']):
1566 if self.__printInfo == False:
1567 return dataOut
1568
1569 for header in headers:
1570 if hasattr(dataOut, header):
1571 obj = getattr(dataOut, header)
1572 if hasattr(obj, 'printInfo'):
1573 obj.printInfo()
1574 else:
1575 print(obj)
1576 else:
1577 log.warning('Header {} Not found in object'.format(header))
1578
1579 self.__printInfo = False
1580 return dataOut No newline at end of file
@@ -1,794 +1,794
1
1
2 '''
2 '''
3 Created on Jul 3, 2014
3 Created on Jul 3, 2014
4
4
5 @author: roj-idl71
5 @author: roj-idl71
6 '''
6 '''
7 # SUBCHANNELS EN VEZ DE CHANNELS
7 # SUBCHANNELS EN VEZ DE CHANNELS
8 # BENCHMARKS -> PROBLEMAS CON ARCHIVOS GRANDES -> INCONSTANTE EN EL TIEMPO
8 # BENCHMARKS -> PROBLEMAS CON ARCHIVOS GRANDES -> INCONSTANTE EN EL TIEMPO
9 # ACTUALIZACION DE VERSION
9 # ACTUALIZACION DE VERSION
10 # HEADERS
10 # HEADERS
11 # MODULO DE ESCRITURA
11 # MODULO DE ESCRITURA
12 # METADATA
12 # METADATA
13
13
14 import os
14 import os
15 import time
15 import time
16 import datetime
16 import datetime
17 import numpy
17 import numpy
18 import timeit
18 import timeit
19 from fractions import Fraction
19 from fractions import Fraction
20 from time import time
20 from time import time
21 from time import sleep
21 from time import sleep
22
22
23 import schainpy.admin
23 import schainpy.admin
24 from schainpy.model.data.jroheaderIO import RadarControllerHeader, SystemHeader
24 from schainpy.model.data.jroheaderIO import RadarControllerHeader, SystemHeader
25 from schainpy.model.data.jrodata import Voltage
25 from schainpy.model.data.jrodata import Voltage
26 from schainpy.model.proc.jroproc_base import ProcessingUnit, Operation, MPDecorator
26 from schainpy.model.proc.jroproc_base import ProcessingUnit, Operation, MPDecorator
27
27
28 import pickle
28 import pickle
29 try:
29 try:
30 import digital_rf
30 import digital_rf
31 except:
31 except:
32 pass
32 pass
33
33
34 @MPDecorator
34
35 class DigitalRFReader(ProcessingUnit):
35 class DigitalRFReader(ProcessingUnit):
36 '''
36 '''
37 classdocs
37 classdocs
38 '''
38 '''
39
39
40 def __init__(self):
40 def __init__(self):
41 '''
41 '''
42 Constructor
42 Constructor
43 '''
43 '''
44
44
45 ProcessingUnit.__init__(self)
45 ProcessingUnit.__init__(self)
46
46
47 self.dataOut = Voltage()
47 self.dataOut = Voltage()
48 self.__printInfo = True
48 self.__printInfo = True
49 self.__flagDiscontinuousBlock = False
49 self.__flagDiscontinuousBlock = False
50 self.__bufferIndex = 9999999
50 self.__bufferIndex = 9999999
51 self.__codeType = 0
51 self.__codeType = 0
52 self.__ippKm = None
52 self.__ippKm = None
53 self.__nCode = None
53 self.__nCode = None
54 self.__nBaud = None
54 self.__nBaud = None
55 self.__code = None
55 self.__code = None
56 self.dtype = None
56 self.dtype = None
57 self.oldAverage = None
57 self.oldAverage = None
58 self.path = None
58 self.path = None
59
59
60 def close(self):
60 def close(self):
61 print('Average of writing to digital rf format is ', self.oldAverage * 1000)
61 print('Average of writing to digital rf format is ', self.oldAverage * 1000)
62 return
62 return
63
63
64 def __getCurrentSecond(self):
64 def __getCurrentSecond(self):
65
65
66 return self.__thisUnixSample / self.__sample_rate
66 return self.__thisUnixSample / self.__sample_rate
67
67
68 thisSecond = property(__getCurrentSecond, "I'm the 'thisSecond' property.")
68 thisSecond = property(__getCurrentSecond, "I'm the 'thisSecond' property.")
69
69
70 def __setFileHeader(self):
70 def __setFileHeader(self):
71 '''
71 '''
72 In this method will be initialized every parameter of dataOut object (header, no data)
72 In this method will be initialized every parameter of dataOut object (header, no data)
73 '''
73 '''
74 ippSeconds = 1.0 * self.__nSamples / self.__sample_rate
74 ippSeconds = 1.0 * self.__nSamples / self.__sample_rate
75
75
76 nProfiles = 1.0 / ippSeconds # Number of profiles in one second
76 nProfiles = 1.0 / ippSeconds # Number of profiles in one second
77
77
78 try:
78 try:
79 self.dataOut.radarControllerHeaderObj = RadarControllerHeader(
79 self.dataOut.radarControllerHeaderObj = RadarControllerHeader(
80 self.__radarControllerHeader)
80 self.__radarControllerHeader)
81 except:
81 except:
82 self.dataOut.radarControllerHeaderObj = RadarControllerHeader(
82 self.dataOut.radarControllerHeaderObj = RadarControllerHeader(
83 txA=0,
83 txA=0,
84 txB=0,
84 txB=0,
85 nWindows=1,
85 nWindows=1,
86 nHeights=self.__nSamples,
86 nHeights=self.__nSamples,
87 firstHeight=self.__firstHeigth,
87 firstHeight=self.__firstHeigth,
88 deltaHeight=self.__deltaHeigth,
88 deltaHeight=self.__deltaHeigth,
89 codeType=self.__codeType,
89 codeType=self.__codeType,
90 nCode=self.__nCode, nBaud=self.__nBaud,
90 nCode=self.__nCode, nBaud=self.__nBaud,
91 code=self.__code)
91 code=self.__code)
92
92
93 try:
93 try:
94 self.dataOut.systemHeaderObj = SystemHeader(self.__systemHeader)
94 self.dataOut.systemHeaderObj = SystemHeader(self.__systemHeader)
95 except:
95 except:
96 self.dataOut.systemHeaderObj = SystemHeader(nSamples=self.__nSamples,
96 self.dataOut.systemHeaderObj = SystemHeader(nSamples=self.__nSamples,
97 nProfiles=nProfiles,
97 nProfiles=nProfiles,
98 nChannels=len(
98 nChannels=len(
99 self.__channelList),
99 self.__channelList),
100 adcResolution=14)
100 adcResolution=14)
101 self.dataOut.type = "Voltage"
101 self.dataOut.type = "Voltage"
102
102
103 self.dataOut.data = None
103 self.dataOut.data = None
104
104
105 self.dataOut.dtype = self.dtype
105 self.dataOut.dtype = self.dtype
106
106
107 # self.dataOut.nChannels = 0
107 # self.dataOut.nChannels = 0
108
108
109 # self.dataOut.nHeights = 0
109 # self.dataOut.nHeights = 0
110
110
111 self.dataOut.nProfiles = int(nProfiles)
111 self.dataOut.nProfiles = int(nProfiles)
112
112
113 self.dataOut.heightList = self.__firstHeigth + \
113 self.dataOut.heightList = self.__firstHeigth + \
114 numpy.arange(self.__nSamples, dtype=numpy.float) * \
114 numpy.arange(self.__nSamples, dtype=numpy.float) * \
115 self.__deltaHeigth
115 self.__deltaHeigth
116
116
117 self.dataOut.channelList = list(range(self.__num_subchannels))
117 self.dataOut.channelList = list(range(self.__num_subchannels))
118
118
119 self.dataOut.blocksize = self.dataOut.getNChannels() * self.dataOut.getNHeights()
119 self.dataOut.blocksize = self.dataOut.getNChannels() * self.dataOut.getNHeights()
120
120
121 # self.dataOut.channelIndexList = None
121 # self.dataOut.channelIndexList = None
122
122
123 self.dataOut.flagNoData = True
123 self.dataOut.flagNoData = True
124
124
125 self.dataOut.flagDataAsBlock = False
125 self.dataOut.flagDataAsBlock = False
126 # Set to TRUE if the data is discontinuous
126 # Set to TRUE if the data is discontinuous
127 self.dataOut.flagDiscontinuousBlock = False
127 self.dataOut.flagDiscontinuousBlock = False
128
128
129 self.dataOut.utctime = None
129 self.dataOut.utctime = None
130
130
131 # timezone like jroheader, difference in minutes between UTC and localtime
131 # timezone like jroheader, difference in minutes between UTC and localtime
132 self.dataOut.timeZone = self.__timezone / 60
132 self.dataOut.timeZone = self.__timezone / 60
133
133
134 self.dataOut.dstFlag = 0
134 self.dataOut.dstFlag = 0
135
135
136 self.dataOut.errorCount = 0
136 self.dataOut.errorCount = 0
137
137
138 try:
138 try:
139 self.dataOut.nCohInt = self.fixed_metadata_dict.get(
139 self.dataOut.nCohInt = self.fixed_metadata_dict.get(
140 'nCohInt', self.nCohInt)
140 'nCohInt', self.nCohInt)
141
141
142 # asumo que la data esta decodificada
142 # asumo que la data esta decodificada
143 self.dataOut.flagDecodeData = self.fixed_metadata_dict.get(
143 self.dataOut.flagDecodeData = self.fixed_metadata_dict.get(
144 'flagDecodeData', self.flagDecodeData)
144 'flagDecodeData', self.flagDecodeData)
145
145
146 # asumo que la data esta sin flip
146 # asumo que la data esta sin flip
147 self.dataOut.flagDeflipData = self.fixed_metadata_dict['flagDeflipData']
147 self.dataOut.flagDeflipData = self.fixed_metadata_dict['flagDeflipData']
148
148
149 self.dataOut.flagShiftFFT = self.fixed_metadata_dict['flagShiftFFT']
149 self.dataOut.flagShiftFFT = self.fixed_metadata_dict['flagShiftFFT']
150
150
151 self.dataOut.useLocalTime = self.fixed_metadata_dict['useLocalTime']
151 self.dataOut.useLocalTime = self.fixed_metadata_dict['useLocalTime']
152 except:
152 except:
153 pass
153 pass
154
154
155 self.dataOut.ippSeconds = ippSeconds
155 self.dataOut.ippSeconds = ippSeconds
156
156
157 # Time interval between profiles
157 # Time interval between profiles
158 # self.dataOut.timeInterval = self.dataOut.ippSeconds * self.dataOut.nCohInt
158 # self.dataOut.timeInterval = self.dataOut.ippSeconds * self.dataOut.nCohInt
159
159
160 self.dataOut.frequency = self.__frequency
160 self.dataOut.frequency = self.__frequency
161
161
162 self.dataOut.realtime = self.__online
162 self.dataOut.realtime = self.__online
163
163
164 def findDatafiles(self, path, startDate=None, endDate=None):
164 def findDatafiles(self, path, startDate=None, endDate=None):
165
165
166 if not os.path.isdir(path):
166 if not os.path.isdir(path):
167 return []
167 return []
168
168
169 try:
169 try:
170 digitalReadObj = digital_rf.DigitalRFReader(
170 digitalReadObj = digital_rf.DigitalRFReader(
171 path, load_all_metadata=True)
171 path, load_all_metadata=True)
172 except:
172 except:
173 digitalReadObj = digital_rf.DigitalRFReader(path)
173 digitalReadObj = digital_rf.DigitalRFReader(path)
174
174
175 channelNameList = digitalReadObj.get_channels()
175 channelNameList = digitalReadObj.get_channels()
176
176
177 if not channelNameList:
177 if not channelNameList:
178 return []
178 return []
179
179
180 metadata_dict = digitalReadObj.get_rf_file_metadata(channelNameList[0])
180 metadata_dict = digitalReadObj.get_rf_file_metadata(channelNameList[0])
181
181
182 sample_rate = metadata_dict['sample_rate'][0]
182 sample_rate = metadata_dict['sample_rate'][0]
183
183
184 this_metadata_file = digitalReadObj.get_metadata(channelNameList[0])
184 this_metadata_file = digitalReadObj.get_metadata(channelNameList[0])
185
185
186 try:
186 try:
187 timezone = this_metadata_file['timezone'].value
187 timezone = this_metadata_file['timezone'].value
188 except:
188 except:
189 timezone = 0
189 timezone = 0
190
190
191 startUTCSecond, endUTCSecond = digitalReadObj.get_bounds(
191 startUTCSecond, endUTCSecond = digitalReadObj.get_bounds(
192 channelNameList[0]) / sample_rate - timezone
192 channelNameList[0]) / sample_rate - timezone
193
193
194 startDatetime = datetime.datetime.utcfromtimestamp(startUTCSecond)
194 startDatetime = datetime.datetime.utcfromtimestamp(startUTCSecond)
195 endDatatime = datetime.datetime.utcfromtimestamp(endUTCSecond)
195 endDatatime = datetime.datetime.utcfromtimestamp(endUTCSecond)
196
196
197 if not startDate:
197 if not startDate:
198 startDate = startDatetime.date()
198 startDate = startDatetime.date()
199
199
200 if not endDate:
200 if not endDate:
201 endDate = endDatatime.date()
201 endDate = endDatatime.date()
202
202
203 dateList = []
203 dateList = []
204
204
205 thisDatetime = startDatetime
205 thisDatetime = startDatetime
206
206
207 while(thisDatetime <= endDatatime):
207 while(thisDatetime <= endDatatime):
208
208
209 thisDate = thisDatetime.date()
209 thisDate = thisDatetime.date()
210
210
211 if thisDate < startDate:
211 if thisDate < startDate:
212 continue
212 continue
213
213
214 if thisDate > endDate:
214 if thisDate > endDate:
215 break
215 break
216
216
217 dateList.append(thisDate)
217 dateList.append(thisDate)
218 thisDatetime += datetime.timedelta(1)
218 thisDatetime += datetime.timedelta(1)
219
219
220 return dateList
220 return dateList
221
221
222 def setup(self, path=None,
222 def setup(self, path=None,
223 startDate=None,
223 startDate=None,
224 endDate=None,
224 endDate=None,
225 startTime=datetime.time(0, 0, 0),
225 startTime=datetime.time(0, 0, 0),
226 endTime=datetime.time(23, 59, 59),
226 endTime=datetime.time(23, 59, 59),
227 channelList=None,
227 channelList=None,
228 nSamples=None,
228 nSamples=None,
229 online=False,
229 online=False,
230 delay=60,
230 delay=60,
231 buffer_size=1024,
231 buffer_size=1024,
232 ippKm=None,
232 ippKm=None,
233 nCohInt=1,
233 nCohInt=1,
234 nCode=1,
234 nCode=1,
235 nBaud=1,
235 nBaud=1,
236 flagDecodeData=False,
236 flagDecodeData=False,
237 code=numpy.ones((1, 1), dtype=numpy.int),
237 code=numpy.ones((1, 1), dtype=numpy.int),
238 **kwargs):
238 **kwargs):
239 '''
239 '''
240 In this method we should set all initial parameters.
240 In this method we should set all initial parameters.
241
241
242 Inputs:
242 Inputs:
243 path
243 path
244 startDate
244 startDate
245 endDate
245 endDate
246 startTime
246 startTime
247 endTime
247 endTime
248 set
248 set
249 expLabel
249 expLabel
250 ext
250 ext
251 online
251 online
252 delay
252 delay
253 '''
253 '''
254 self.path = path
254 self.path = path
255 self.nCohInt = nCohInt
255 self.nCohInt = nCohInt
256 self.flagDecodeData = flagDecodeData
256 self.flagDecodeData = flagDecodeData
257 self.i = 0
257 self.i = 0
258 if not os.path.isdir(path):
258 if not os.path.isdir(path):
259 raise ValueError("[Reading] Directory %s does not exist" % path)
259 raise ValueError("[Reading] Directory %s does not exist" % path)
260
260
261 try:
261 try:
262 self.digitalReadObj = digital_rf.DigitalRFReader(
262 self.digitalReadObj = digital_rf.DigitalRFReader(
263 path, load_all_metadata=True)
263 path, load_all_metadata=True)
264 except:
264 except:
265 self.digitalReadObj = digital_rf.DigitalRFReader(path)
265 self.digitalReadObj = digital_rf.DigitalRFReader(path)
266
266
267 channelNameList = self.digitalReadObj.get_channels()
267 channelNameList = self.digitalReadObj.get_channels()
268
268
269 if not channelNameList:
269 if not channelNameList:
270 raise ValueError("[Reading] Directory %s does not have any files" % path)
270 raise ValueError("[Reading] Directory %s does not have any files" % path)
271
271
272 if not channelList:
272 if not channelList:
273 channelList = list(range(len(channelNameList)))
273 channelList = list(range(len(channelNameList)))
274
274
275 ########## Reading metadata ######################
275 ########## Reading metadata ######################
276
276
277 top_properties = self.digitalReadObj.get_properties(
277 top_properties = self.digitalReadObj.get_properties(
278 channelNameList[channelList[0]])
278 channelNameList[channelList[0]])
279
279
280 self.__num_subchannels = top_properties['num_subchannels']
280 self.__num_subchannels = top_properties['num_subchannels']
281 self.__sample_rate = 1.0 * \
281 self.__sample_rate = 1.0 * \
282 top_properties['sample_rate_numerator'] / \
282 top_properties['sample_rate_numerator'] / \
283 top_properties['sample_rate_denominator']
283 top_properties['sample_rate_denominator']
284 # self.__samples_per_file = top_properties['samples_per_file'][0]
284 # self.__samples_per_file = top_properties['samples_per_file'][0]
285 self.__deltaHeigth = 1e6 * 0.15 / self.__sample_rate # why 0.15?
285 self.__deltaHeigth = 1e6 * 0.15 / self.__sample_rate # why 0.15?
286
286
287 this_metadata_file = self.digitalReadObj.get_digital_metadata(
287 this_metadata_file = self.digitalReadObj.get_digital_metadata(
288 channelNameList[channelList[0]])
288 channelNameList[channelList[0]])
289 metadata_bounds = this_metadata_file.get_bounds()
289 metadata_bounds = this_metadata_file.get_bounds()
290 self.fixed_metadata_dict = this_metadata_file.read(
290 self.fixed_metadata_dict = this_metadata_file.read(
291 metadata_bounds[0])[metadata_bounds[0]] # GET FIRST HEADER
291 metadata_bounds[0])[metadata_bounds[0]] # GET FIRST HEADER
292
292
293 try:
293 try:
294 self.__processingHeader = self.fixed_metadata_dict['processingHeader']
294 self.__processingHeader = self.fixed_metadata_dict['processingHeader']
295 self.__radarControllerHeader = self.fixed_metadata_dict['radarControllerHeader']
295 self.__radarControllerHeader = self.fixed_metadata_dict['radarControllerHeader']
296 self.__systemHeader = self.fixed_metadata_dict['systemHeader']
296 self.__systemHeader = self.fixed_metadata_dict['systemHeader']
297 self.dtype = pickle.loads(self.fixed_metadata_dict['dtype'])
297 self.dtype = pickle.loads(self.fixed_metadata_dict['dtype'])
298 except:
298 except:
299 pass
299 pass
300
300
301 self.__frequency = None
301 self.__frequency = None
302
302
303 self.__frequency = self.fixed_metadata_dict.get('frequency', 1)
303 self.__frequency = self.fixed_metadata_dict.get('frequency', 1)
304
304
305 self.__timezone = self.fixed_metadata_dict.get('timezone', 18000)
305 self.__timezone = self.fixed_metadata_dict.get('timezone', 18000)
306
306
307 try:
307 try:
308 nSamples = self.fixed_metadata_dict['nSamples']
308 nSamples = self.fixed_metadata_dict['nSamples']
309 except:
309 except:
310 nSamples = None
310 nSamples = None
311
311
312 self.__firstHeigth = 0
312 self.__firstHeigth = 0
313
313
314 try:
314 try:
315 codeType = self.__radarControllerHeader['codeType']
315 codeType = self.__radarControllerHeader['codeType']
316 except:
316 except:
317 codeType = 0
317 codeType = 0
318
318
319 try:
319 try:
320 if codeType:
320 if codeType:
321 nCode = self.__radarControllerHeader['nCode']
321 nCode = self.__radarControllerHeader['nCode']
322 nBaud = self.__radarControllerHeader['nBaud']
322 nBaud = self.__radarControllerHeader['nBaud']
323 code = self.__radarControllerHeader['code']
323 code = self.__radarControllerHeader['code']
324 except:
324 except:
325 pass
325 pass
326
326
327 if not ippKm:
327 if not ippKm:
328 try:
328 try:
329 # seconds to km
329 # seconds to km
330 ippKm = self.__radarControllerHeader['ipp']
330 ippKm = self.__radarControllerHeader['ipp']
331 except:
331 except:
332 ippKm = None
332 ippKm = None
333 ####################################################
333 ####################################################
334 self.__ippKm = ippKm
334 self.__ippKm = ippKm
335 startUTCSecond = None
335 startUTCSecond = None
336 endUTCSecond = None
336 endUTCSecond = None
337
337
338 if startDate:
338 if startDate:
339 startDatetime = datetime.datetime.combine(startDate, startTime)
339 startDatetime = datetime.datetime.combine(startDate, startTime)
340 startUTCSecond = (
340 startUTCSecond = (
341 startDatetime - datetime.datetime(1970, 1, 1)).total_seconds() + self.__timezone
341 startDatetime - datetime.datetime(1970, 1, 1)).total_seconds() + self.__timezone
342
342
343 if endDate:
343 if endDate:
344 endDatetime = datetime.datetime.combine(endDate, endTime)
344 endDatetime = datetime.datetime.combine(endDate, endTime)
345 endUTCSecond = (endDatetime - datetime.datetime(1970,
345 endUTCSecond = (endDatetime - datetime.datetime(1970,
346 1, 1)).total_seconds() + self.__timezone
346 1, 1)).total_seconds() + self.__timezone
347
347
348 start_index, end_index = self.digitalReadObj.get_bounds(
348 start_index, end_index = self.digitalReadObj.get_bounds(
349 channelNameList[channelList[0]])
349 channelNameList[channelList[0]])
350
350
351 if not startUTCSecond:
351 if not startUTCSecond:
352 startUTCSecond = start_index / self.__sample_rate
352 startUTCSecond = start_index / self.__sample_rate
353
353
354 if start_index > startUTCSecond * self.__sample_rate:
354 if start_index > startUTCSecond * self.__sample_rate:
355 startUTCSecond = start_index / self.__sample_rate
355 startUTCSecond = start_index / self.__sample_rate
356
356
357 if not endUTCSecond:
357 if not endUTCSecond:
358 endUTCSecond = end_index / self.__sample_rate
358 endUTCSecond = end_index / self.__sample_rate
359
359
360 if end_index < endUTCSecond * self.__sample_rate:
360 if end_index < endUTCSecond * self.__sample_rate:
361 endUTCSecond = end_index / self.__sample_rate
361 endUTCSecond = end_index / self.__sample_rate
362 if not nSamples:
362 if not nSamples:
363 if not ippKm:
363 if not ippKm:
364 raise ValueError("[Reading] nSamples or ippKm should be defined")
364 raise ValueError("[Reading] nSamples or ippKm should be defined")
365 nSamples = int(ippKm / (1e6 * 0.15 / self.__sample_rate))
365 nSamples = int(ippKm / (1e6 * 0.15 / self.__sample_rate))
366 channelBoundList = []
366 channelBoundList = []
367 channelNameListFiltered = []
367 channelNameListFiltered = []
368
368
369 for thisIndexChannel in channelList:
369 for thisIndexChannel in channelList:
370 thisChannelName = channelNameList[thisIndexChannel]
370 thisChannelName = channelNameList[thisIndexChannel]
371 start_index, end_index = self.digitalReadObj.get_bounds(
371 start_index, end_index = self.digitalReadObj.get_bounds(
372 thisChannelName)
372 thisChannelName)
373 channelBoundList.append((start_index, end_index))
373 channelBoundList.append((start_index, end_index))
374 channelNameListFiltered.append(thisChannelName)
374 channelNameListFiltered.append(thisChannelName)
375
375
376 self.profileIndex = 0
376 self.profileIndex = 0
377 self.i = 0
377 self.i = 0
378 self.__delay = delay
378 self.__delay = delay
379
379
380 self.__codeType = codeType
380 self.__codeType = codeType
381 self.__nCode = nCode
381 self.__nCode = nCode
382 self.__nBaud = nBaud
382 self.__nBaud = nBaud
383 self.__code = code
383 self.__code = code
384
384
385 self.__datapath = path
385 self.__datapath = path
386 self.__online = online
386 self.__online = online
387 self.__channelList = channelList
387 self.__channelList = channelList
388 self.__channelNameList = channelNameListFiltered
388 self.__channelNameList = channelNameListFiltered
389 self.__channelBoundList = channelBoundList
389 self.__channelBoundList = channelBoundList
390 self.__nSamples = nSamples
390 self.__nSamples = nSamples
391 self.__samples_to_read = int(nSamples) # FIJO: AHORA 40
391 self.__samples_to_read = int(nSamples) # FIJO: AHORA 40
392 self.__nChannels = len(self.__channelList)
392 self.__nChannels = len(self.__channelList)
393
393
394 self.__startUTCSecond = startUTCSecond
394 self.__startUTCSecond = startUTCSecond
395 self.__endUTCSecond = endUTCSecond
395 self.__endUTCSecond = endUTCSecond
396
396
397 self.__timeInterval = 1.0 * self.__samples_to_read / \
397 self.__timeInterval = 1.0 * self.__samples_to_read / \
398 self.__sample_rate # Time interval
398 self.__sample_rate # Time interval
399
399
400 if online:
400 if online:
401 # self.__thisUnixSample = int(endUTCSecond*self.__sample_rate - 4*self.__samples_to_read)
401 # self.__thisUnixSample = int(endUTCSecond*self.__sample_rate - 4*self.__samples_to_read)
402 startUTCSecond = numpy.floor(endUTCSecond)
402 startUTCSecond = numpy.floor(endUTCSecond)
403
403
404 # por que en el otro metodo lo primero q se hace es sumar samplestoread
404 # por que en el otro metodo lo primero q se hace es sumar samplestoread
405 self.__thisUnixSample = int(startUTCSecond * self.__sample_rate) - self.__samples_to_read
405 self.__thisUnixSample = int(startUTCSecond * self.__sample_rate) - self.__samples_to_read
406
406
407 self.__data_buffer = numpy.zeros(
407 self.__data_buffer = numpy.zeros(
408 (self.__num_subchannels, self.__samples_to_read), dtype=numpy.complex)
408 (self.__num_subchannels, self.__samples_to_read), dtype=numpy.complex)
409
409
410 self.__setFileHeader()
410 self.__setFileHeader()
411 self.isConfig = True
411 self.isConfig = True
412
412
413 print("[Reading] Digital RF Data was found from %s to %s " % (
413 print("[Reading] Digital RF Data was found from %s to %s " % (
414 datetime.datetime.utcfromtimestamp(
414 datetime.datetime.utcfromtimestamp(
415 self.__startUTCSecond - self.__timezone),
415 self.__startUTCSecond - self.__timezone),
416 datetime.datetime.utcfromtimestamp(
416 datetime.datetime.utcfromtimestamp(
417 self.__endUTCSecond - self.__timezone)
417 self.__endUTCSecond - self.__timezone)
418 ))
418 ))
419
419
420 print("[Reading] Starting process from %s to %s" % (datetime.datetime.utcfromtimestamp(startUTCSecond - self.__timezone),
420 print("[Reading] Starting process from %s to %s" % (datetime.datetime.utcfromtimestamp(startUTCSecond - self.__timezone),
421 datetime.datetime.utcfromtimestamp(
421 datetime.datetime.utcfromtimestamp(
422 endUTCSecond - self.__timezone)
422 endUTCSecond - self.__timezone)
423 ))
423 ))
424 self.oldAverage = None
424 self.oldAverage = None
425 self.count = 0
425 self.count = 0
426 self.executionTime = 0
426 self.executionTime = 0
427
427
428 def __reload(self):
428 def __reload(self):
429 # print
429 # print
430 # print "%s not in range [%s, %s]" %(
430 # print "%s not in range [%s, %s]" %(
431 # datetime.datetime.utcfromtimestamp(self.thisSecond - self.__timezone),
431 # datetime.datetime.utcfromtimestamp(self.thisSecond - self.__timezone),
432 # datetime.datetime.utcfromtimestamp(self.__startUTCSecond - self.__timezone),
432 # datetime.datetime.utcfromtimestamp(self.__startUTCSecond - self.__timezone),
433 # datetime.datetime.utcfromtimestamp(self.__endUTCSecond - self.__timezone)
433 # datetime.datetime.utcfromtimestamp(self.__endUTCSecond - self.__timezone)
434 # )
434 # )
435 print("[Reading] reloading metadata ...")
435 print("[Reading] reloading metadata ...")
436
436
437 try:
437 try:
438 self.digitalReadObj.reload(complete_update=True)
438 self.digitalReadObj.reload(complete_update=True)
439 except:
439 except:
440 self.digitalReadObj = digital_rf.DigitalRFReader(self.path)
440 self.digitalReadObj = digital_rf.DigitalRFReader(self.path)
441
441
442 start_index, end_index = self.digitalReadObj.get_bounds(
442 start_index, end_index = self.digitalReadObj.get_bounds(
443 self.__channelNameList[self.__channelList[0]])
443 self.__channelNameList[self.__channelList[0]])
444
444
445 if start_index > self.__startUTCSecond * self.__sample_rate:
445 if start_index > self.__startUTCSecond * self.__sample_rate:
446 self.__startUTCSecond = 1.0 * start_index / self.__sample_rate
446 self.__startUTCSecond = 1.0 * start_index / self.__sample_rate
447
447
448 if end_index > self.__endUTCSecond * self.__sample_rate:
448 if end_index > self.__endUTCSecond * self.__sample_rate:
449 self.__endUTCSecond = 1.0 * end_index / self.__sample_rate
449 self.__endUTCSecond = 1.0 * end_index / self.__sample_rate
450 print()
450 print()
451 print("[Reading] New timerange found [%s, %s] " % (
451 print("[Reading] New timerange found [%s, %s] " % (
452 datetime.datetime.utcfromtimestamp(
452 datetime.datetime.utcfromtimestamp(
453 self.__startUTCSecond - self.__timezone),
453 self.__startUTCSecond - self.__timezone),
454 datetime.datetime.utcfromtimestamp(
454 datetime.datetime.utcfromtimestamp(
455 self.__endUTCSecond - self.__timezone)
455 self.__endUTCSecond - self.__timezone)
456 ))
456 ))
457
457
458 return True
458 return True
459
459
460 return False
460 return False
461
461
462 def timeit(self, toExecute):
462 def timeit(self, toExecute):
463 t0 = time.time()
463 t0 = time.time()
464 toExecute()
464 toExecute()
465 self.executionTime = time.time() - t0
465 self.executionTime = time.time() - t0
466 if self.oldAverage is None:
466 if self.oldAverage is None:
467 self.oldAverage = self.executionTime
467 self.oldAverage = self.executionTime
468 self.oldAverage = (self.executionTime + self.count *
468 self.oldAverage = (self.executionTime + self.count *
469 self.oldAverage) / (self.count + 1.0)
469 self.oldAverage) / (self.count + 1.0)
470 self.count = self.count + 1.0
470 self.count = self.count + 1.0
471 return
471 return
472
472
473 def __readNextBlock(self, seconds=30, volt_scale=1):
473 def __readNextBlock(self, seconds=30, volt_scale=1):
474 '''
474 '''
475 '''
475 '''
476
476
477 # Set the next data
477 # Set the next data
478 self.__flagDiscontinuousBlock = False
478 self.__flagDiscontinuousBlock = False
479 self.__thisUnixSample += self.__samples_to_read
479 self.__thisUnixSample += self.__samples_to_read
480
480
481 if self.__thisUnixSample + 2 * self.__samples_to_read > self.__endUTCSecond * self.__sample_rate:
481 if self.__thisUnixSample + 2 * self.__samples_to_read > self.__endUTCSecond * self.__sample_rate:
482 print ("[Reading] There are no more data into selected time-range")
482 print ("[Reading] There are no more data into selected time-range")
483 if self.__online:
483 if self.__online:
484 sleep(3)
484 sleep(3)
485 self.__reload()
485 self.__reload()
486 else:
486 else:
487 return False
487 return False
488
488
489 if self.__thisUnixSample + 2 * self.__samples_to_read > self.__endUTCSecond * self.__sample_rate:
489 if self.__thisUnixSample + 2 * self.__samples_to_read > self.__endUTCSecond * self.__sample_rate:
490 return False
490 return False
491 self.__thisUnixSample -= self.__samples_to_read
491 self.__thisUnixSample -= self.__samples_to_read
492
492
493 indexChannel = 0
493 indexChannel = 0
494
494
495 dataOk = False
495 dataOk = False
496
496
497 for thisChannelName in self.__channelNameList: # TODO VARIOS CHANNELS?
497 for thisChannelName in self.__channelNameList: # TODO VARIOS CHANNELS?
498 for indexSubchannel in range(self.__num_subchannels):
498 for indexSubchannel in range(self.__num_subchannels):
499 try:
499 try:
500 t0 = time()
500 t0 = time()
501 result = self.digitalReadObj.read_vector_c81d(self.__thisUnixSample,
501 result = self.digitalReadObj.read_vector_c81d(self.__thisUnixSample,
502 self.__samples_to_read,
502 self.__samples_to_read,
503 thisChannelName, sub_channel=indexSubchannel)
503 thisChannelName, sub_channel=indexSubchannel)
504 self.executionTime = time() - t0
504 self.executionTime = time() - t0
505 if self.oldAverage is None:
505 if self.oldAverage is None:
506 self.oldAverage = self.executionTime
506 self.oldAverage = self.executionTime
507 self.oldAverage = (
507 self.oldAverage = (
508 self.executionTime + self.count * self.oldAverage) / (self.count + 1.0)
508 self.executionTime + self.count * self.oldAverage) / (self.count + 1.0)
509 self.count = self.count + 1.0
509 self.count = self.count + 1.0
510
510
511 except IOError as e:
511 except IOError as e:
512 # read next profile
512 # read next profile
513 self.__flagDiscontinuousBlock = True
513 self.__flagDiscontinuousBlock = True
514 print("[Reading] %s" % datetime.datetime.utcfromtimestamp(self.thisSecond - self.__timezone), e)
514 print("[Reading] %s" % datetime.datetime.utcfromtimestamp(self.thisSecond - self.__timezone), e)
515 break
515 break
516
516
517 if result.shape[0] != self.__samples_to_read:
517 if result.shape[0] != self.__samples_to_read:
518 self.__flagDiscontinuousBlock = True
518 self.__flagDiscontinuousBlock = True
519 print("[Reading] %s: Too few samples were found, just %d/%d samples" % (datetime.datetime.utcfromtimestamp(self.thisSecond - self.__timezone),
519 print("[Reading] %s: Too few samples were found, just %d/%d samples" % (datetime.datetime.utcfromtimestamp(self.thisSecond - self.__timezone),
520 result.shape[0],
520 result.shape[0],
521 self.__samples_to_read))
521 self.__samples_to_read))
522 break
522 break
523
523
524 self.__data_buffer[indexSubchannel, :] = result * volt_scale
524 self.__data_buffer[indexSubchannel, :] = result * volt_scale
525 indexChannel+=1
525 indexChannel+=1
526
526
527 dataOk = True
527 dataOk = True
528
528
529 self.__utctime = self.__thisUnixSample / self.__sample_rate
529 self.__utctime = self.__thisUnixSample / self.__sample_rate
530
530
531 if not dataOk:
531 if not dataOk:
532 return False
532 return False
533
533
534 print("[Reading] %s: %d samples <> %f sec" % (datetime.datetime.utcfromtimestamp(self.thisSecond - self.__timezone),
534 print("[Reading] %s: %d samples <> %f sec" % (datetime.datetime.utcfromtimestamp(self.thisSecond - self.__timezone),
535 self.__samples_to_read,
535 self.__samples_to_read,
536 self.__timeInterval))
536 self.__timeInterval))
537
537
538 self.__bufferIndex = 0
538 self.__bufferIndex = 0
539
539
540 return True
540 return True
541
541
542 def __isBufferEmpty(self):
542 def __isBufferEmpty(self):
543 return self.__bufferIndex > self.__samples_to_read - self.__nSamples # 40960 - 40
543 return self.__bufferIndex > self.__samples_to_read - self.__nSamples # 40960 - 40
544
544
545 def getData(self, seconds=30, nTries=5):
545 def getData(self, seconds=30, nTries=5):
546 '''
546 '''
547 This method gets the data from files and put the data into the dataOut object
547 This method gets the data from files and put the data into the dataOut object
548
548
549 In addition, increase el the buffer counter in one.
549 In addition, increase el the buffer counter in one.
550
550
551 Return:
551 Return:
552 data : retorna un perfil de voltages (alturas * canales) copiados desde el
552 data : retorna un perfil de voltages (alturas * canales) copiados desde el
553 buffer. Si no hay mas archivos a leer retorna None.
553 buffer. Si no hay mas archivos a leer retorna None.
554
554
555 Affected:
555 Affected:
556 self.dataOut
556 self.dataOut
557 self.profileIndex
557 self.profileIndex
558 self.flagDiscontinuousBlock
558 self.flagDiscontinuousBlock
559 self.flagIsNewBlock
559 self.flagIsNewBlock
560 '''
560 '''
561 #print("getdata")
561 #print("getdata")
562 err_counter = 0
562 err_counter = 0
563 self.dataOut.flagNoData = True
563 self.dataOut.flagNoData = True
564
564
565 if self.__isBufferEmpty():
565 if self.__isBufferEmpty():
566 #print("hi")
566 #print("hi")
567 self.__flagDiscontinuousBlock = False
567 self.__flagDiscontinuousBlock = False
568
568
569 while True:
569 while True:
570 #print ("q ha pasado")
570 #print ("q ha pasado")
571 if self.__readNextBlock():
571 if self.__readNextBlock():
572 break
572 break
573 if self.__thisUnixSample > self.__endUTCSecond * self.__sample_rate:
573 if self.__thisUnixSample > self.__endUTCSecond * self.__sample_rate:
574 raise schainpy.admin.SchainError('Error')
574 raise schainpy.admin.SchainError('Error')
575 return
575 return
576
576
577 if self.__flagDiscontinuousBlock:
577 if self.__flagDiscontinuousBlock:
578 raise schainpy.admin.SchainError('discontinuous block found')
578 raise schainpy.admin.SchainError('discontinuous block found')
579 return
579 return
580
580
581 if not self.__online:
581 if not self.__online:
582 raise schainpy.admin.SchainError('Online?')
582 raise schainpy.admin.SchainError('Online?')
583 return
583 return
584
584
585 err_counter += 1
585 err_counter += 1
586 if err_counter > nTries:
586 if err_counter > nTries:
587 raise schainpy.admin.SchainError('Max retrys reach')
587 raise schainpy.admin.SchainError('Max retrys reach')
588 return
588 return
589
589
590 print('[Reading] waiting %d seconds to read a new block' % seconds)
590 print('[Reading] waiting %d seconds to read a new block' % seconds)
591 time.sleep(seconds)
591 time.sleep(seconds)
592
592
593 self.dataOut.data = self.__data_buffer[:, self.__bufferIndex:self.__bufferIndex + self.__nSamples]
593 self.dataOut.data = self.__data_buffer[:, self.__bufferIndex:self.__bufferIndex + self.__nSamples]
594 self.dataOut.utctime = ( self.__thisUnixSample + self.__bufferIndex) / self.__sample_rate
594 self.dataOut.utctime = ( self.__thisUnixSample + self.__bufferIndex) / self.__sample_rate
595 self.dataOut.flagNoData = False
595 self.dataOut.flagNoData = False
596 self.dataOut.flagDiscontinuousBlock = self.__flagDiscontinuousBlock
596 self.dataOut.flagDiscontinuousBlock = self.__flagDiscontinuousBlock
597 self.dataOut.profileIndex = self.profileIndex
597 self.dataOut.profileIndex = self.profileIndex
598
598
599 self.__bufferIndex += self.__nSamples
599 self.__bufferIndex += self.__nSamples
600 self.profileIndex += 1
600 self.profileIndex += 1
601
601
602 if self.profileIndex == self.dataOut.nProfiles:
602 if self.profileIndex == self.dataOut.nProfiles:
603 self.profileIndex = 0
603 self.profileIndex = 0
604
604
605 return True
605 return True
606
606
607 def printInfo(self):
607 def printInfo(self):
608 '''
608 '''
609 '''
609 '''
610 if self.__printInfo == False:
610 if self.__printInfo == False:
611 return
611 return
612
612
613 # self.systemHeaderObj.printInfo()
613 # self.systemHeaderObj.printInfo()
614 # self.radarControllerHeaderObj.printInfo()
614 # self.radarControllerHeaderObj.printInfo()
615
615
616 self.__printInfo = False
616 self.__printInfo = False
617
617
618 def printNumberOfBlock(self):
618 def printNumberOfBlock(self):
619 '''
619 '''
620 '''
620 '''
621 return
621 return
622 # print self.profileIndex
622 # print self.profileIndex
623
623
624 def run(self, **kwargs):
624 def run(self, **kwargs):
625 '''
625 '''
626 This method will be called many times so here you should put all your code
626 This method will be called many times so here you should put all your code
627 '''
627 '''
628
628
629 if not self.isConfig:
629 if not self.isConfig:
630 self.setup(**kwargs)
630 self.setup(**kwargs)
631 #self.i = self.i+1
631 #self.i = self.i+1
632 self.getData(seconds=self.__delay)
632 self.getData(seconds=self.__delay)
633
633
634 return
634 return
635
635
636
636 @MPDecorator
637 class DigitalRFWriter(Operation):
637 class DigitalRFWriter(Operation):
638 '''
638 '''
639 classdocs
639 classdocs
640 '''
640 '''
641
641
642 def __init__(self, **kwargs):
642 def __init__(self, **kwargs):
643 '''
643 '''
644 Constructor
644 Constructor
645 '''
645 '''
646 Operation.__init__(self, **kwargs)
646 Operation.__init__(self, **kwargs)
647 self.metadata_dict = {}
647 self.metadata_dict = {}
648 self.dataOut = None
648 self.dataOut = None
649 self.dtype = None
649 self.dtype = None
650 self.oldAverage = 0
650 self.oldAverage = 0
651
651
652 def setHeader(self):
652 def setHeader(self):
653
653
654 self.metadata_dict['frequency'] = self.dataOut.frequency
654 self.metadata_dict['frequency'] = self.dataOut.frequency
655 self.metadata_dict['timezone'] = self.dataOut.timeZone
655 self.metadata_dict['timezone'] = self.dataOut.timeZone
656 self.metadata_dict['dtype'] = pickle.dumps(self.dataOut.dtype)
656 self.metadata_dict['dtype'] = pickle.dumps(self.dataOut.dtype)
657 self.metadata_dict['nProfiles'] = self.dataOut.nProfiles
657 self.metadata_dict['nProfiles'] = self.dataOut.nProfiles
658 self.metadata_dict['heightList'] = self.dataOut.heightList
658 self.metadata_dict['heightList'] = self.dataOut.heightList
659 self.metadata_dict['channelList'] = self.dataOut.channelList
659 self.metadata_dict['channelList'] = self.dataOut.channelList
660 self.metadata_dict['flagDecodeData'] = self.dataOut.flagDecodeData
660 self.metadata_dict['flagDecodeData'] = self.dataOut.flagDecodeData
661 self.metadata_dict['flagDeflipData'] = self.dataOut.flagDeflipData
661 self.metadata_dict['flagDeflipData'] = self.dataOut.flagDeflipData
662 self.metadata_dict['flagShiftFFT'] = self.dataOut.flagShiftFFT
662 self.metadata_dict['flagShiftFFT'] = self.dataOut.flagShiftFFT
663 self.metadata_dict['useLocalTime'] = self.dataOut.useLocalTime
663 self.metadata_dict['useLocalTime'] = self.dataOut.useLocalTime
664 self.metadata_dict['nCohInt'] = self.dataOut.nCohInt
664 self.metadata_dict['nCohInt'] = self.dataOut.nCohInt
665 self.metadata_dict['type'] = self.dataOut.type
665 self.metadata_dict['type'] = self.dataOut.type
666 self.metadata_dict['flagDataAsBlock']= getattr(
666 self.metadata_dict['flagDataAsBlock']= getattr(
667 self.dataOut, 'flagDataAsBlock', None) # chequear
667 self.dataOut, 'flagDataAsBlock', None) # chequear
668
668
669 def setup(self, dataOut, path, frequency, fileCadence, dirCadence, metadataCadence, set=0, metadataFile='metadata', ext='.h5'):
669 def setup(self, dataOut, path, frequency, fileCadence, dirCadence, metadataCadence, set=0, metadataFile='metadata', ext='.h5'):
670 '''
670 '''
671 In this method we should set all initial parameters.
671 In this method we should set all initial parameters.
672 Input:
672 Input:
673 dataOut: Input data will also be outputa data
673 dataOut: Input data will also be outputa data
674 '''
674 '''
675 self.setHeader()
675 self.setHeader()
676 self.__ippSeconds = dataOut.ippSeconds
676 self.__ippSeconds = dataOut.ippSeconds
677 self.__deltaH = dataOut.getDeltaH()
677 self.__deltaH = dataOut.getDeltaH()
678 self.__sample_rate = 1e6 * 0.15 / self.__deltaH
678 self.__sample_rate = 1e6 * 0.15 / self.__deltaH
679 self.__dtype = dataOut.dtype
679 self.__dtype = dataOut.dtype
680 if len(dataOut.dtype) == 2:
680 if len(dataOut.dtype) == 2:
681 self.__dtype = dataOut.dtype[0]
681 self.__dtype = dataOut.dtype[0]
682 self.__nSamples = dataOut.systemHeaderObj.nSamples
682 self.__nSamples = dataOut.systemHeaderObj.nSamples
683 self.__nProfiles = dataOut.nProfiles
683 self.__nProfiles = dataOut.nProfiles
684
684
685 if self.dataOut.type != 'Voltage':
685 if self.dataOut.type != 'Voltage':
686 raise 'Digital RF cannot be used with this data type'
686 raise 'Digital RF cannot be used with this data type'
687 self.arr_data = numpy.ones((1, dataOut.nFFTPoints * len(
687 self.arr_data = numpy.ones((1, dataOut.nFFTPoints * len(
688 self.dataOut.channelList)), dtype=[('r', self.__dtype), ('i', self.__dtype)])
688 self.dataOut.channelList)), dtype=[('r', self.__dtype), ('i', self.__dtype)])
689 else:
689 else:
690 self.arr_data = numpy.ones((self.__nSamples, len(
690 self.arr_data = numpy.ones((self.__nSamples, len(
691 self.dataOut.channelList)), dtype=[('r', self.__dtype), ('i', self.__dtype)])
691 self.dataOut.channelList)), dtype=[('r', self.__dtype), ('i', self.__dtype)])
692
692
693 file_cadence_millisecs = 1000
693 file_cadence_millisecs = 1000
694
694
695 sample_rate_fraction = Fraction(self.__sample_rate).limit_denominator()
695 sample_rate_fraction = Fraction(self.__sample_rate).limit_denominator()
696 sample_rate_numerator = int(sample_rate_fraction.numerator)
696 sample_rate_numerator = int(sample_rate_fraction.numerator)
697 sample_rate_denominator = int(sample_rate_fraction.denominator)
697 sample_rate_denominator = int(sample_rate_fraction.denominator)
698 start_global_index = dataOut.utctime * self.__sample_rate
698 start_global_index = dataOut.utctime * self.__sample_rate
699
699
700 uuid = 'prueba'
700 uuid = 'prueba'
701 compression_level = 0
701 compression_level = 0
702 checksum = False
702 checksum = False
703 is_complex = True
703 is_complex = True
704 num_subchannels = len(dataOut.channelList)
704 num_subchannels = len(dataOut.channelList)
705 is_continuous = True
705 is_continuous = True
706 marching_periods = False
706 marching_periods = False
707
707
708 self.digitalWriteObj = digital_rf.DigitalRFWriter(path, self.__dtype, dirCadence,
708 self.digitalWriteObj = digital_rf.DigitalRFWriter(path, self.__dtype, dirCadence,
709 fileCadence, start_global_index,
709 fileCadence, start_global_index,
710 sample_rate_numerator, sample_rate_denominator, uuid, compression_level, checksum,
710 sample_rate_numerator, sample_rate_denominator, uuid, compression_level, checksum,
711 is_complex, num_subchannels, is_continuous, marching_periods)
711 is_complex, num_subchannels, is_continuous, marching_periods)
712 metadata_dir = os.path.join(path, 'metadata')
712 metadata_dir = os.path.join(path, 'metadata')
713 os.system('mkdir %s' % (metadata_dir))
713 os.system('mkdir %s' % (metadata_dir))
714 self.digitalMetadataWriteObj = digital_rf.DigitalMetadataWriter(metadata_dir, dirCadence, 1, # 236, file_cadence_millisecs / 1000
714 self.digitalMetadataWriteObj = digital_rf.DigitalMetadataWriter(metadata_dir, dirCadence, 1, # 236, file_cadence_millisecs / 1000
715 sample_rate_numerator, sample_rate_denominator,
715 sample_rate_numerator, sample_rate_denominator,
716 metadataFile)
716 metadataFile)
717 self.isConfig = True
717 self.isConfig = True
718 self.currentSample = 0
718 self.currentSample = 0
719 self.oldAverage = 0
719 self.oldAverage = 0
720 self.count = 0
720 self.count = 0
721 return
721 return
722
722
723 def writeMetadata(self):
723 def writeMetadata(self):
724 start_idx = self.__sample_rate * self.dataOut.utctime
724 start_idx = self.__sample_rate * self.dataOut.utctime
725
725
726 self.metadata_dict['processingHeader'] = self.dataOut.processingHeaderObj.getAsDict(
726 self.metadata_dict['processingHeader'] = self.dataOut.processingHeaderObj.getAsDict(
727 )
727 )
728 self.metadata_dict['radarControllerHeader'] = self.dataOut.radarControllerHeaderObj.getAsDict(
728 self.metadata_dict['radarControllerHeader'] = self.dataOut.radarControllerHeaderObj.getAsDict(
729 )
729 )
730 self.metadata_dict['systemHeader'] = self.dataOut.systemHeaderObj.getAsDict(
730 self.metadata_dict['systemHeader'] = self.dataOut.systemHeaderObj.getAsDict(
731 )
731 )
732 self.digitalMetadataWriteObj.write(start_idx, self.metadata_dict)
732 self.digitalMetadataWriteObj.write(start_idx, self.metadata_dict)
733 return
733 return
734
734
735 def timeit(self, toExecute):
735 def timeit(self, toExecute):
736 t0 = time()
736 t0 = time()
737 toExecute()
737 toExecute()
738 self.executionTime = time() - t0
738 self.executionTime = time() - t0
739 if self.oldAverage is None:
739 if self.oldAverage is None:
740 self.oldAverage = self.executionTime
740 self.oldAverage = self.executionTime
741 self.oldAverage = (self.executionTime + self.count *
741 self.oldAverage = (self.executionTime + self.count *
742 self.oldAverage) / (self.count + 1.0)
742 self.oldAverage) / (self.count + 1.0)
743 self.count = self.count + 1.0
743 self.count = self.count + 1.0
744 return
744 return
745
745
746 def writeData(self):
746 def writeData(self):
747 if self.dataOut.type != 'Voltage':
747 if self.dataOut.type != 'Voltage':
748 raise 'Digital RF cannot be used with this data type'
748 raise 'Digital RF cannot be used with this data type'
749 for channel in self.dataOut.channelList:
749 for channel in self.dataOut.channelList:
750 for i in range(self.dataOut.nFFTPoints):
750 for i in range(self.dataOut.nFFTPoints):
751 self.arr_data[1][channel * self.dataOut.nFFTPoints +
751 self.arr_data[1][channel * self.dataOut.nFFTPoints +
752 i]['r'] = self.dataOut.data[channel][i].real
752 i]['r'] = self.dataOut.data[channel][i].real
753 self.arr_data[1][channel * self.dataOut.nFFTPoints +
753 self.arr_data[1][channel * self.dataOut.nFFTPoints +
754 i]['i'] = self.dataOut.data[channel][i].imag
754 i]['i'] = self.dataOut.data[channel][i].imag
755 else:
755 else:
756 for i in range(self.dataOut.systemHeaderObj.nSamples):
756 for i in range(self.dataOut.systemHeaderObj.nSamples):
757 for channel in self.dataOut.channelList:
757 for channel in self.dataOut.channelList:
758 self.arr_data[i][channel]['r'] = self.dataOut.data[channel][i].real
758 self.arr_data[i][channel]['r'] = self.dataOut.data[channel][i].real
759 self.arr_data[i][channel]['i'] = self.dataOut.data[channel][i].imag
759 self.arr_data[i][channel]['i'] = self.dataOut.data[channel][i].imag
760
760
761 def f(): return self.digitalWriteObj.rf_write(self.arr_data)
761 def f(): return self.digitalWriteObj.rf_write(self.arr_data)
762 self.timeit(f)
762 self.timeit(f)
763
763
764 return
764 return
765
765
766 def run(self, dataOut, frequency=49.92e6, path=None, fileCadence=1000, dirCadence=36000, metadataCadence=1, **kwargs):
766 def run(self, dataOut, frequency=49.92e6, path=None, fileCadence=1000, dirCadence=36000, metadataCadence=1, **kwargs):
767 '''
767 '''
768 This method will be called many times so here you should put all your code
768 This method will be called many times so here you should put all your code
769 Inputs:
769 Inputs:
770 dataOut: object with the data
770 dataOut: object with the data
771 '''
771 '''
772 # print dataOut.__dict__
772 # print dataOut.__dict__
773 self.dataOut = dataOut
773 self.dataOut = dataOut
774 if not self.isConfig:
774 if not self.isConfig:
775 self.setup(dataOut, path, frequency, fileCadence,
775 self.setup(dataOut, path, frequency, fileCadence,
776 dirCadence, metadataCadence, **kwargs)
776 dirCadence, metadataCadence, **kwargs)
777 self.writeMetadata()
777 self.writeMetadata()
778
778
779 self.writeData()
779 self.writeData()
780
780
781 ## self.currentSample += 1
781 ## self.currentSample += 1
782 # if self.dataOut.flagDataAsBlock or self.currentSample == 1:
782 # if self.dataOut.flagDataAsBlock or self.currentSample == 1:
783 # self.writeMetadata()
783 # self.writeMetadata()
784 ## if self.currentSample == self.__nProfiles: self.currentSample = 0
784 ## if self.currentSample == self.__nProfiles: self.currentSample = 0
785
785
786 return dataOut# en la version 2.7 no aparece este return
786 return dataOut# en la version 2.7 no aparece este return
787
787
788 def close(self):
788 def close(self):
789 print('[Writing] - Closing files ')
789 print('[Writing] - Closing files ')
790 print('Average of writing to digital rf format is ', self.oldAverage * 1000)
790 print('Average of writing to digital rf format is ', self.oldAverage * 1000)
791 try:
791 try:
792 self.digitalWriteObj.close()
792 self.digitalWriteObj.close()
793 except:
793 except:
794 pass
794 pass
@@ -1,849 +1,850
1 '''
1 '''
2 Created on Jul 3, 2014
2 Created on Jul 3, 2014
3
3
4 @author: roj-idl71
4 @author: roj-idl71
5 '''
5 '''
6
6
7 import os, sys
7 import os, sys
8 import time, datetime
8 import time, datetime
9 import numpy
9 import numpy
10 import fnmatch
10 import fnmatch
11 import glob
11 import glob
12 from time import sleep
12 from time import sleep
13
13
14 try:
14 try:
15 import pyfits
15 import pyfits
16 except ImportError as e:
16 except ImportError as e:
17 pass
17 pass
18
18
19 from xml.etree.ElementTree import ElementTree
19 from xml.etree.ElementTree import ElementTree
20
20
21 from .jroIO_base import isRadarFolder, isNumber
21 from .jroIO_base import isRadarFolder, isNumber
22 from schainpy.model.data.jrodata import Fits
22 from schainpy.model.data.jrodata import Fits
23 from schainpy.model.proc.jroproc_base import Operation, ProcessingUnit, MPDecorator
23 from schainpy.model.proc.jroproc_base import Operation, ProcessingUnit, MPDecorator
24 from schainpy.utils import log
24 from schainpy.utils import log
25
25
26
26
27 class PyFits(object):
27 class PyFits(object):
28 name=None
28 name=None
29 format=None
29 format=None
30 array =None
30 array =None
31 data =None
31 data =None
32 thdulist=None
32 thdulist=None
33 prihdr=None
33 prihdr=None
34 hdu=None
34 hdu=None
35
35
36 def __init__(self):
36 def __init__(self):
37
37
38 pass
38 pass
39
39
40 def setColF(self,name,format,array):
40 def setColF(self,name,format,array):
41 self.name=name
41 self.name=name
42 self.format=format
42 self.format=format
43 self.array=array
43 self.array=array
44 a1=numpy.array([self.array],dtype=numpy.float32)
44 a1=numpy.array([self.array],dtype=numpy.float32)
45 self.col1 = pyfits.Column(name=self.name, format=self.format, array=a1)
45 self.col1 = pyfits.Column(name=self.name, format=self.format, array=a1)
46 return self.col1
46 return self.col1
47
47
48 # def setColP(self,name,format,data):
48 # def setColP(self,name,format,data):
49 # self.name=name
49 # self.name=name
50 # self.format=format
50 # self.format=format
51 # self.data=data
51 # self.data=data
52 # a2=numpy.array([self.data],dtype=numpy.float32)
52 # a2=numpy.array([self.data],dtype=numpy.float32)
53 # self.col2 = pyfits.Column(name=self.name, format=self.format, array=a2)
53 # self.col2 = pyfits.Column(name=self.name, format=self.format, array=a2)
54 # return self.col2
54 # return self.col2
55
55
56
56
57 def writeData(self,name,format,data):
57 def writeData(self,name,format,data):
58 self.name=name
58 self.name=name
59 self.format=format
59 self.format=format
60 self.data=data
60 self.data=data
61 a2=numpy.array([self.data],dtype=numpy.float32)
61 a2=numpy.array([self.data],dtype=numpy.float32)
62 self.col2 = pyfits.Column(name=self.name, format=self.format, array=a2)
62 self.col2 = pyfits.Column(name=self.name, format=self.format, array=a2)
63 return self.col2
63 return self.col2
64
64
65 def cFImage(self,idblock,year,month,day,hour,minute,second):
65 def cFImage(self,idblock,year,month,day,hour,minute,second):
66 self.hdu= pyfits.PrimaryHDU(idblock)
66 self.hdu= pyfits.PrimaryHDU(idblock)
67 self.hdu.header.set("Year",year)
67 self.hdu.header.set("Year",year)
68 self.hdu.header.set("Month",month)
68 self.hdu.header.set("Month",month)
69 self.hdu.header.set("Day",day)
69 self.hdu.header.set("Day",day)
70 self.hdu.header.set("Hour",hour)
70 self.hdu.header.set("Hour",hour)
71 self.hdu.header.set("Minute",minute)
71 self.hdu.header.set("Minute",minute)
72 self.hdu.header.set("Second",second)
72 self.hdu.header.set("Second",second)
73 return self.hdu
73 return self.hdu
74
74
75
75
76 def Ctable(self,colList):
76 def Ctable(self,colList):
77 self.cols=pyfits.ColDefs(colList)
77 self.cols=pyfits.ColDefs(colList)
78 self.tbhdu = pyfits.new_table(self.cols)
78 self.tbhdu = pyfits.new_table(self.cols)
79 return self.tbhdu
79 return self.tbhdu
80
80
81
81
82 def CFile(self,hdu,tbhdu):
82 def CFile(self,hdu,tbhdu):
83 self.thdulist=pyfits.HDUList([hdu,tbhdu])
83 self.thdulist=pyfits.HDUList([hdu,tbhdu])
84
84
85 def wFile(self,filename):
85 def wFile(self,filename):
86 if os.path.isfile(filename):
86 if os.path.isfile(filename):
87 os.remove(filename)
87 os.remove(filename)
88 self.thdulist.writeto(filename)
88 self.thdulist.writeto(filename)
89
89
90
90
91 class ParameterConf:
91 class ParameterConf:
92 ELEMENTNAME = 'Parameter'
92 ELEMENTNAME = 'Parameter'
93 def __init__(self):
93 def __init__(self):
94 self.name = ''
94 self.name = ''
95 self.value = ''
95 self.value = ''
96
96
97 def readXml(self, parmElement):
97 def readXml(self, parmElement):
98 self.name = parmElement.get('name')
98 self.name = parmElement.get('name')
99 self.value = parmElement.get('value')
99 self.value = parmElement.get('value')
100
100
101 def getElementName(self):
101 def getElementName(self):
102 return self.ELEMENTNAME
102 return self.ELEMENTNAME
103
103
104 class Metadata(object):
104 class Metadata(object):
105
105
106 def __init__(self, filename):
106 def __init__(self, filename):
107 self.parmConfObjList = []
107 self.parmConfObjList = []
108 self.readXml(filename)
108 self.readXml(filename)
109
109
110 def readXml(self, filename):
110 def readXml(self, filename):
111 self.projectElement = None
111 self.projectElement = None
112 self.procUnitConfObjDict = {}
112 self.procUnitConfObjDict = {}
113 self.projectElement = ElementTree().parse(filename)
113 self.projectElement = ElementTree().parse(filename)
114 self.project = self.projectElement.tag
114 self.project = self.projectElement.tag
115
115
116 parmElementList = self.projectElement.getiterator(ParameterConf().getElementName())
116 parmElementList = self.projectElement.getiterator(ParameterConf().getElementName())
117
117
118 for parmElement in parmElementList:
118 for parmElement in parmElementList:
119 parmConfObj = ParameterConf()
119 parmConfObj = ParameterConf()
120 parmConfObj.readXml(parmElement)
120 parmConfObj.readXml(parmElement)
121 self.parmConfObjList.append(parmConfObj)
121 self.parmConfObjList.append(parmConfObj)
122
122
123 @MPDecorator
123 class FitsWriter(Operation):
124 class FitsWriter(Operation):
124 def __init__(self, **kwargs):
125 def __init__(self, **kwargs):
125 Operation.__init__(self, **kwargs)
126 Operation.__init__(self, **kwargs)
126 self.isConfig = False
127 self.isConfig = False
127 self.dataBlocksPerFile = None
128 self.dataBlocksPerFile = None
128 self.blockIndex = 0
129 self.blockIndex = 0
129 self.flagIsNewFile = 1
130 self.flagIsNewFile = 1
130 self.fitsObj = None
131 self.fitsObj = None
131 self.optchar = 'P'
132 self.optchar = 'P'
132 self.ext = '.fits'
133 self.ext = '.fits'
133 self.setFile = 0
134 self.setFile = 0
134
135
135 def setFitsHeader(self, dataOut, metadatafile=None):
136 def setFitsHeader(self, dataOut, metadatafile=None):
136
137
137 header_data = pyfits.PrimaryHDU()
138 header_data = pyfits.PrimaryHDU()
138
139
139 header_data.header['EXPNAME'] = "RADAR DATA"
140 header_data.header['EXPNAME'] = "RADAR DATA"
140 header_data.header['DATATYPE'] = "SPECTRA"
141 header_data.header['DATATYPE'] = "SPECTRA"
141 header_data.header['COMMENT'] = ""
142 header_data.header['COMMENT'] = ""
142
143
143 if metadatafile:
144 if metadatafile:
144
145
145 metadata4fits = Metadata(metadatafile)
146 metadata4fits = Metadata(metadatafile)
146
147
147 for parameter in metadata4fits.parmConfObjList:
148 for parameter in metadata4fits.parmConfObjList:
148 parm_name = parameter.name
149 parm_name = parameter.name
149 parm_value = parameter.value
150 parm_value = parameter.value
150
151
151 header_data.header[parm_name] = parm_value
152 header_data.header[parm_name] = parm_value
152
153
153 header_data.header['DATETIME'] = time.strftime("%b %d %Y %H:%M:%S", dataOut.datatime.timetuple())
154 header_data.header['DATETIME'] = time.strftime("%b %d %Y %H:%M:%S", dataOut.datatime.timetuple())
154 header_data.header['CHANNELLIST'] = str(dataOut.channelList)
155 header_data.header['CHANNELLIST'] = str(dataOut.channelList)
155 header_data.header['NCHANNELS'] = dataOut.nChannels
156 header_data.header['NCHANNELS'] = dataOut.nChannels
156 #header_data.header['HEIGHTS'] = dataOut.heightList
157 #header_data.header['HEIGHTS'] = dataOut.heightList
157 header_data.header['NHEIGHTS'] = dataOut.nHeights
158 header_data.header['NHEIGHTS'] = dataOut.nHeights
158
159
159 header_data.header['IPPSECONDS'] = dataOut.ippSeconds
160 header_data.header['IPPSECONDS'] = dataOut.ippSeconds
160 header_data.header['NCOHINT'] = dataOut.nCohInt
161 header_data.header['NCOHINT'] = dataOut.nCohInt
161 header_data.header['NINCOHINT'] = dataOut.nIncohInt
162 header_data.header['NINCOHINT'] = dataOut.nIncohInt
162 header_data.header['TIMEZONE'] = dataOut.timeZone
163 header_data.header['TIMEZONE'] = dataOut.timeZone
163 header_data.header['NBLOCK'] = self.blockIndex
164 header_data.header['NBLOCK'] = self.blockIndex
164
165
165 header_data.writeto(self.filename)
166 header_data.writeto(self.filename)
166
167
167 self.addExtension(dataOut.heightList,'HEIGHTLIST')
168 self.addExtension(dataOut.heightList,'HEIGHTLIST')
168
169
169
170
170 def setup(self, dataOut, path, dataBlocksPerFile=100, metadatafile=None):
171 def setup(self, dataOut, path, dataBlocksPerFile=100, metadatafile=None):
171
172
172 self.path = path
173 self.path = path
173 self.dataOut = dataOut
174 self.dataOut = dataOut
174 self.metadatafile = metadatafile
175 self.metadatafile = metadatafile
175 self.dataBlocksPerFile = dataBlocksPerFile
176 self.dataBlocksPerFile = dataBlocksPerFile
176
177
177 def open(self):
178 def open(self):
178 self.fitsObj = pyfits.open(self.filename, mode='update')
179 self.fitsObj = pyfits.open(self.filename, mode='update')
179
180
180
181
181 def addExtension(self, data, tagname):
182 def addExtension(self, data, tagname):
182 self.open()
183 self.open()
183 extension = pyfits.ImageHDU(data=data, name=tagname)
184 extension = pyfits.ImageHDU(data=data, name=tagname)
184 #extension.header['TAG'] = tagname
185 #extension.header['TAG'] = tagname
185 self.fitsObj.append(extension)
186 self.fitsObj.append(extension)
186 self.write()
187 self.write()
187
188
188 def addData(self, data):
189 def addData(self, data):
189 self.open()
190 self.open()
190 extension = pyfits.ImageHDU(data=data, name=self.fitsObj[0].header['DATATYPE'])
191 extension = pyfits.ImageHDU(data=data, name=self.fitsObj[0].header['DATATYPE'])
191 extension.header['UTCTIME'] = self.dataOut.utctime
192 extension.header['UTCTIME'] = self.dataOut.utctime
192 self.fitsObj.append(extension)
193 self.fitsObj.append(extension)
193 self.blockIndex += 1
194 self.blockIndex += 1
194 self.fitsObj[0].header['NBLOCK'] = self.blockIndex
195 self.fitsObj[0].header['NBLOCK'] = self.blockIndex
195
196
196 self.write()
197 self.write()
197
198
198 def write(self):
199 def write(self):
199
200
200 self.fitsObj.flush(verbose=True)
201 self.fitsObj.flush(verbose=True)
201 self.fitsObj.close()
202 self.fitsObj.close()
202
203
203
204
204 def setNextFile(self):
205 def setNextFile(self):
205
206
206 ext = self.ext
207 ext = self.ext
207 path = self.path
208 path = self.path
208
209
209 timeTuple = time.localtime( self.dataOut.utctime)
210 timeTuple = time.localtime( self.dataOut.utctime)
210 subfolder = 'd%4.4d%3.3d' % (timeTuple.tm_year,timeTuple.tm_yday)
211 subfolder = 'd%4.4d%3.3d' % (timeTuple.tm_year,timeTuple.tm_yday)
211
212
212 fullpath = os.path.join( path, subfolder )
213 fullpath = os.path.join( path, subfolder )
213 if not( os.path.exists(fullpath) ):
214 if not( os.path.exists(fullpath) ):
214 os.mkdir(fullpath)
215 os.mkdir(fullpath)
215 self.setFile = -1 #inicializo mi contador de seteo
216 self.setFile = -1 #inicializo mi contador de seteo
216 else:
217 else:
217 filesList = os.listdir( fullpath )
218 filesList = os.listdir( fullpath )
218 if len( filesList ) > 0:
219 if len( filesList ) > 0:
219 filesList = sorted( filesList, key=str.lower )
220 filesList = sorted( filesList, key=str.lower )
220 filen = filesList[-1]
221 filen = filesList[-1]
221
222
222 if isNumber( filen[8:11] ):
223 if isNumber( filen[8:11] ):
223 self.setFile = int( filen[8:11] ) #inicializo mi contador de seteo al seteo del ultimo file
224 self.setFile = int( filen[8:11] ) #inicializo mi contador de seteo al seteo del ultimo file
224 else:
225 else:
225 self.setFile = -1
226 self.setFile = -1
226 else:
227 else:
227 self.setFile = -1 #inicializo mi contador de seteo
228 self.setFile = -1 #inicializo mi contador de seteo
228
229
229 setFile = self.setFile
230 setFile = self.setFile
230 setFile += 1
231 setFile += 1
231
232
232 thisFile = '%s%4.4d%3.3d%3.3d%s' % (self.optchar,
233 thisFile = '%s%4.4d%3.3d%3.3d%s' % (self.optchar,
233 timeTuple.tm_year,
234 timeTuple.tm_year,
234 timeTuple.tm_yday,
235 timeTuple.tm_yday,
235 setFile,
236 setFile,
236 ext )
237 ext )
237
238
238 filename = os.path.join( path, subfolder, thisFile )
239 filename = os.path.join( path, subfolder, thisFile )
239
240
240 self.blockIndex = 0
241 self.blockIndex = 0
241 self.filename = filename
242 self.filename = filename
242 self.setFile = setFile
243 self.setFile = setFile
243 self.flagIsNewFile = 1
244 self.flagIsNewFile = 1
244
245
245 print('Writing the file: %s'%self.filename)
246 print('Writing the file: %s'%self.filename)
246
247
247 self.setFitsHeader(self.dataOut, self.metadatafile)
248 self.setFitsHeader(self.dataOut, self.metadatafile)
248
249
249 return 1
250 return 1
250
251
251 def writeBlock(self):
252 def writeBlock(self):
252 self.addData(self.dataOut.data_spc)
253 self.addData(self.dataOut.data_spc)
253 self.flagIsNewFile = 0
254 self.flagIsNewFile = 0
254
255
255
256
256 def __setNewBlock(self):
257 def __setNewBlock(self):
257
258
258 if self.flagIsNewFile:
259 if self.flagIsNewFile:
259 return 1
260 return 1
260
261
261 if self.blockIndex < self.dataBlocksPerFile:
262 if self.blockIndex < self.dataBlocksPerFile:
262 return 1
263 return 1
263
264
264 if not( self.setNextFile() ):
265 if not( self.setNextFile() ):
265 return 0
266 return 0
266
267
267 return 1
268 return 1
268
269
269 def writeNextBlock(self):
270 def writeNextBlock(self):
270 if not( self.__setNewBlock() ):
271 if not( self.__setNewBlock() ):
271 return 0
272 return 0
272 self.writeBlock()
273 self.writeBlock()
273 return 1
274 return 1
274
275
275 def putData(self):
276 def putData(self):
276 if self.flagIsNewFile:
277 if self.flagIsNewFile:
277 self.setNextFile()
278 self.setNextFile()
278 self.writeNextBlock()
279 self.writeNextBlock()
279
280
280 def run(self, dataOut, path, dataBlocksPerFile=100, metadatafile=None, **kwargs):
281 def run(self, dataOut, path, dataBlocksPerFile=100, metadatafile=None, **kwargs):
281 if not(self.isConfig):
282 if not(self.isConfig):
282 self.setup(dataOut, path, dataBlocksPerFile=dataBlocksPerFile, metadatafile=metadatafile, **kwargs)
283 self.setup(dataOut, path, dataBlocksPerFile=dataBlocksPerFile, metadatafile=metadatafile, **kwargs)
283 self.isConfig = True
284 self.isConfig = True
284 self.putData()
285 self.putData()
285
286
286 @MPDecorator
287
287 class FitsReader(ProcessingUnit):
288 class FitsReader(ProcessingUnit):
288
289
289 # __TIMEZONE = time.timezone
290 # __TIMEZONE = time.timezone
290
291
291 expName = None
292 expName = None
292 datetimestr = None
293 datetimestr = None
293 utc = None
294 utc = None
294 nChannels = None
295 nChannels = None
295 nSamples = None
296 nSamples = None
296 dataBlocksPerFile = None
297 dataBlocksPerFile = None
297 comments = None
298 comments = None
298 lastUTTime = None
299 lastUTTime = None
299 header_dict = None
300 header_dict = None
300 data = None
301 data = None
301 data_header_dict = None
302 data_header_dict = None
302
303
303 def __init__(self):#, **kwargs):
304 def __init__(self):#, **kwargs):
304 ProcessingUnit.__init__(self)#, **kwargs)
305 ProcessingUnit.__init__(self)#, **kwargs)
305 self.isConfig = False
306 self.isConfig = False
306 self.ext = '.fits'
307 self.ext = '.fits'
307 self.setFile = 0
308 self.setFile = 0
308 self.flagNoMoreFiles = 0
309 self.flagNoMoreFiles = 0
309 self.flagIsNewFile = 1
310 self.flagIsNewFile = 1
310 self.flagDiscontinuousBlock = None
311 self.flagDiscontinuousBlock = None
311 self.fileIndex = None
312 self.fileIndex = None
312 self.filename = None
313 self.filename = None
313 self.fileSize = None
314 self.fileSize = None
314 self.fitsObj = None
315 self.fitsObj = None
315 self.timeZone = None
316 self.timeZone = None
316 self.nReadBlocks = 0
317 self.nReadBlocks = 0
317 self.nTotalBlocks = 0
318 self.nTotalBlocks = 0
318 self.dataOut = self.createObjByDefault()
319 self.dataOut = self.createObjByDefault()
319 self.maxTimeStep = 10# deberia ser definido por el usuario usando el metodo setup()
320 self.maxTimeStep = 10# deberia ser definido por el usuario usando el metodo setup()
320 self.blockIndex = 1
321 self.blockIndex = 1
321
322
322 def createObjByDefault(self):
323 def createObjByDefault(self):
323
324
324 dataObj = Fits()
325 dataObj = Fits()
325
326
326 return dataObj
327 return dataObj
327
328
328 def isFileinThisTime(self, filename, startTime, endTime, useLocalTime=False):
329 def isFileinThisTime(self, filename, startTime, endTime, useLocalTime=False):
329 try:
330 try:
330 fitsObj = pyfits.open(filename,'readonly')
331 fitsObj = pyfits.open(filename,'readonly')
331 except:
332 except:
332 print("File %s can't be opened" %(filename))
333 print("File %s can't be opened" %(filename))
333 return None
334 return None
334
335
335 header = fitsObj[0].header
336 header = fitsObj[0].header
336 struct_time = time.strptime(header['DATETIME'], "%b %d %Y %H:%M:%S")
337 struct_time = time.strptime(header['DATETIME'], "%b %d %Y %H:%M:%S")
337 utc = time.mktime(struct_time) - time.timezone #TIMEZONE debe ser un parametro del header FITS
338 utc = time.mktime(struct_time) - time.timezone #TIMEZONE debe ser un parametro del header FITS
338
339
339 ltc = utc
340 ltc = utc
340 if useLocalTime:
341 if useLocalTime:
341 ltc -= time.timezone
342 ltc -= time.timezone
342 thisDatetime = datetime.datetime.utcfromtimestamp(ltc)
343 thisDatetime = datetime.datetime.utcfromtimestamp(ltc)
343 thisTime = thisDatetime.time()
344 thisTime = thisDatetime.time()
344
345
345 if not ((startTime <= thisTime) and (endTime > thisTime)):
346 if not ((startTime <= thisTime) and (endTime > thisTime)):
346 return None
347 return None
347
348
348 return thisDatetime
349 return thisDatetime
349
350
350 def __setNextFileOnline(self):
351 def __setNextFileOnline(self):
351 raise NotImplementedError
352 raise NotImplementedError
352
353
353 def __setNextFileOffline(self):
354 def __setNextFileOffline(self):
354 idFile = self.fileIndex
355 idFile = self.fileIndex
355
356
356 while (True):
357 while (True):
357 idFile += 1
358 idFile += 1
358 if not(idFile < len(self.filenameList)):
359 if not(idFile < len(self.filenameList)):
359 self.flagNoMoreFiles = 1
360 self.flagNoMoreFiles = 1
360 print("No more Files")
361 print("No more Files")
361 return 0
362 return 0
362
363
363 filename = self.filenameList[idFile]
364 filename = self.filenameList[idFile]
364
365
365 # if not(self.__verifyFile(filename)):
366 # if not(self.__verifyFile(filename)):
366 # continue
367 # continue
367
368
368 fileSize = os.path.getsize(filename)
369 fileSize = os.path.getsize(filename)
369 fitsObj = pyfits.open(filename,'readonly')
370 fitsObj = pyfits.open(filename,'readonly')
370 break
371 break
371
372
372 self.flagIsNewFile = 1
373 self.flagIsNewFile = 1
373 self.fileIndex = idFile
374 self.fileIndex = idFile
374 self.filename = filename
375 self.filename = filename
375 self.fileSize = fileSize
376 self.fileSize = fileSize
376 self.fitsObj = fitsObj
377 self.fitsObj = fitsObj
377 self.blockIndex = 0
378 self.blockIndex = 0
378 print("Setting the file: %s"%self.filename)
379 print("Setting the file: %s"%self.filename)
379
380
380 return 1
381 return 1
381
382
382 def __setValuesFromHeader(self):
383 def __setValuesFromHeader(self):
383
384
384 self.dataOut.header = self.header_dict
385 self.dataOut.header = self.header_dict
385 self.dataOut.expName = self.expName
386 self.dataOut.expName = self.expName
386
387
387 self.dataOut.timeZone = self.timeZone
388 self.dataOut.timeZone = self.timeZone
388 self.dataOut.dataBlocksPerFile = self.dataBlocksPerFile
389 self.dataOut.dataBlocksPerFile = self.dataBlocksPerFile
389 self.dataOut.comments = self.comments
390 self.dataOut.comments = self.comments
390 # self.dataOut.timeInterval = self.timeInterval
391 # self.dataOut.timeInterval = self.timeInterval
391 self.dataOut.channelList = self.channelList
392 self.dataOut.channelList = self.channelList
392 self.dataOut.heightList = self.heightList
393 self.dataOut.heightList = self.heightList
393
394
394 self.dataOut.nCohInt = self.nCohInt
395 self.dataOut.nCohInt = self.nCohInt
395 self.dataOut.nIncohInt = self.nIncohInt
396 self.dataOut.nIncohInt = self.nIncohInt
396 self.dataOut.ipp_sec = self.ippSeconds
397 self.dataOut.ipp_sec = self.ippSeconds
397
398
398 def readHeader(self):
399 def readHeader(self):
399 headerObj = self.fitsObj[0]
400 headerObj = self.fitsObj[0]
400
401
401 self.header_dict = headerObj.header
402 self.header_dict = headerObj.header
402 if 'EXPNAME' in list(headerObj.header.keys()):
403 if 'EXPNAME' in list(headerObj.header.keys()):
403 self.expName = headerObj.header['EXPNAME']
404 self.expName = headerObj.header['EXPNAME']
404
405
405 if 'DATATYPE' in list(headerObj.header.keys()):
406 if 'DATATYPE' in list(headerObj.header.keys()):
406 self.dataType = headerObj.header['DATATYPE']
407 self.dataType = headerObj.header['DATATYPE']
407
408
408 self.datetimestr = headerObj.header['DATETIME']
409 self.datetimestr = headerObj.header['DATETIME']
409 channelList = headerObj.header['CHANNELLIST']
410 channelList = headerObj.header['CHANNELLIST']
410 channelList = channelList.split('[')
411 channelList = channelList.split('[')
411 channelList = channelList[1].split(']')
412 channelList = channelList[1].split(']')
412 channelList = channelList[0].split(',')
413 channelList = channelList[0].split(',')
413 channelList = [int(ch) for ch in channelList]
414 channelList = [int(ch) for ch in channelList]
414 self.channelList = channelList
415 self.channelList = channelList
415 self.nChannels = headerObj.header['NCHANNELS']
416 self.nChannels = headerObj.header['NCHANNELS']
416 self.nHeights = headerObj.header['NHEIGHTS']
417 self.nHeights = headerObj.header['NHEIGHTS']
417 self.ippSeconds = headerObj.header['IPPSECONDS']
418 self.ippSeconds = headerObj.header['IPPSECONDS']
418 self.nCohInt = headerObj.header['NCOHINT']
419 self.nCohInt = headerObj.header['NCOHINT']
419 self.nIncohInt = headerObj.header['NINCOHINT']
420 self.nIncohInt = headerObj.header['NINCOHINT']
420 self.dataBlocksPerFile = headerObj.header['NBLOCK']
421 self.dataBlocksPerFile = headerObj.header['NBLOCK']
421 self.timeZone = headerObj.header['TIMEZONE']
422 self.timeZone = headerObj.header['TIMEZONE']
422
423
423 # self.timeInterval = self.ippSeconds * self.nCohInt * self.nIncohInt
424 # self.timeInterval = self.ippSeconds * self.nCohInt * self.nIncohInt
424
425
425 if 'COMMENT' in list(headerObj.header.keys()):
426 if 'COMMENT' in list(headerObj.header.keys()):
426 self.comments = headerObj.header['COMMENT']
427 self.comments = headerObj.header['COMMENT']
427
428
428 self.readHeightList()
429 self.readHeightList()
429
430
430 def readHeightList(self):
431 def readHeightList(self):
431 self.blockIndex = self.blockIndex + 1
432 self.blockIndex = self.blockIndex + 1
432 obj = self.fitsObj[self.blockIndex]
433 obj = self.fitsObj[self.blockIndex]
433 self.heightList = obj.data
434 self.heightList = obj.data
434 self.blockIndex = self.blockIndex + 1
435 self.blockIndex = self.blockIndex + 1
435
436
436 def readExtension(self):
437 def readExtension(self):
437 obj = self.fitsObj[self.blockIndex]
438 obj = self.fitsObj[self.blockIndex]
438 self.heightList = obj.data
439 self.heightList = obj.data
439 self.blockIndex = self.blockIndex + 1
440 self.blockIndex = self.blockIndex + 1
440
441
441 def setNextFile(self):
442 def setNextFile(self):
442
443
443 if self.online:
444 if self.online:
444 newFile = self.__setNextFileOnline()
445 newFile = self.__setNextFileOnline()
445 else:
446 else:
446 newFile = self.__setNextFileOffline()
447 newFile = self.__setNextFileOffline()
447
448
448 if not(newFile):
449 if not(newFile):
449 return 0
450 return 0
450
451
451 self.readHeader()
452 self.readHeader()
452 self.__setValuesFromHeader()
453 self.__setValuesFromHeader()
453 self.nReadBlocks = 0
454 self.nReadBlocks = 0
454 # self.blockIndex = 1
455 # self.blockIndex = 1
455 return 1
456 return 1
456
457
457 def searchFilesOffLine(self,
458 def searchFilesOffLine(self,
458 path,
459 path,
459 startDate,
460 startDate,
460 endDate,
461 endDate,
461 startTime=datetime.time(0,0,0),
462 startTime=datetime.time(0,0,0),
462 endTime=datetime.time(23,59,59),
463 endTime=datetime.time(23,59,59),
463 set=None,
464 set=None,
464 expLabel='',
465 expLabel='',
465 ext='.fits',
466 ext='.fits',
466 walk=True):
467 walk=True):
467
468
468 pathList = []
469 pathList = []
469
470
470 if not walk:
471 if not walk:
471 pathList.append(path)
472 pathList.append(path)
472
473
473 else:
474 else:
474 dirList = []
475 dirList = []
475 for thisPath in os.listdir(path):
476 for thisPath in os.listdir(path):
476 if not os.path.isdir(os.path.join(path,thisPath)):
477 if not os.path.isdir(os.path.join(path,thisPath)):
477 continue
478 continue
478 if not isRadarFolder(thisPath):
479 if not isRadarFolder(thisPath):
479 continue
480 continue
480
481
481 dirList.append(thisPath)
482 dirList.append(thisPath)
482
483
483 if not(dirList):
484 if not(dirList):
484 return None, None
485 return None, None
485
486
486 thisDate = startDate
487 thisDate = startDate
487
488
488 while(thisDate <= endDate):
489 while(thisDate <= endDate):
489 year = thisDate.timetuple().tm_year
490 year = thisDate.timetuple().tm_year
490 doy = thisDate.timetuple().tm_yday
491 doy = thisDate.timetuple().tm_yday
491
492
492 matchlist = fnmatch.filter(dirList, '?' + '%4.4d%3.3d' % (year,doy) + '*')
493 matchlist = fnmatch.filter(dirList, '?' + '%4.4d%3.3d' % (year,doy) + '*')
493 if len(matchlist) == 0:
494 if len(matchlist) == 0:
494 thisDate += datetime.timedelta(1)
495 thisDate += datetime.timedelta(1)
495 continue
496 continue
496 for match in matchlist:
497 for match in matchlist:
497 pathList.append(os.path.join(path,match,expLabel))
498 pathList.append(os.path.join(path,match,expLabel))
498
499
499 thisDate += datetime.timedelta(1)
500 thisDate += datetime.timedelta(1)
500
501
501 if pathList == []:
502 if pathList == []:
502 print("Any folder was found for the date range: %s-%s" %(startDate, endDate))
503 print("Any folder was found for the date range: %s-%s" %(startDate, endDate))
503 return None, None
504 return None, None
504
505
505 print("%d folder(s) was(were) found for the date range: %s - %s" %(len(pathList), startDate, endDate))
506 print("%d folder(s) was(were) found for the date range: %s - %s" %(len(pathList), startDate, endDate))
506
507
507 filenameList = []
508 filenameList = []
508 datetimeList = []
509 datetimeList = []
509
510
510 for i in range(len(pathList)):
511 for i in range(len(pathList)):
511
512
512 thisPath = pathList[i]
513 thisPath = pathList[i]
513
514
514 fileList = glob.glob1(thisPath, "*%s" %ext)
515 fileList = glob.glob1(thisPath, "*%s" %ext)
515 fileList.sort()
516 fileList.sort()
516
517
517 for thisFile in fileList:
518 for thisFile in fileList:
518
519
519 filename = os.path.join(thisPath,thisFile)
520 filename = os.path.join(thisPath,thisFile)
520 thisDatetime = self.isFileinThisTime(filename, startTime, endTime)
521 thisDatetime = self.isFileinThisTime(filename, startTime, endTime)
521
522
522 if not(thisDatetime):
523 if not(thisDatetime):
523 continue
524 continue
524
525
525 filenameList.append(filename)
526 filenameList.append(filename)
526 datetimeList.append(thisDatetime)
527 datetimeList.append(thisDatetime)
527
528
528 if not(filenameList):
529 if not(filenameList):
529 print("Any file was found for the time range %s - %s" %(startTime, endTime))
530 print("Any file was found for the time range %s - %s" %(startTime, endTime))
530 return None, None
531 return None, None
531
532
532 print("%d file(s) was(were) found for the time range: %s - %s" %(len(filenameList), startTime, endTime))
533 print("%d file(s) was(were) found for the time range: %s - %s" %(len(filenameList), startTime, endTime))
533 print()
534 print()
534
535
535 for i in range(len(filenameList)):
536 for i in range(len(filenameList)):
536 print("%s -> [%s]" %(filenameList[i], datetimeList[i].ctime()))
537 print("%s -> [%s]" %(filenameList[i], datetimeList[i].ctime()))
537
538
538 self.filenameList = filenameList
539 self.filenameList = filenameList
539 self.datetimeList = datetimeList
540 self.datetimeList = datetimeList
540
541
541 return pathList, filenameList
542 return pathList, filenameList
542
543
543 def setup(self, path=None,
544 def setup(self, path=None,
544 startDate=None,
545 startDate=None,
545 endDate=None,
546 endDate=None,
546 startTime=datetime.time(0,0,0),
547 startTime=datetime.time(0,0,0),
547 endTime=datetime.time(23,59,59),
548 endTime=datetime.time(23,59,59),
548 set=0,
549 set=0,
549 expLabel = "",
550 expLabel = "",
550 ext = None,
551 ext = None,
551 online = False,
552 online = False,
552 delay = 60,
553 delay = 60,
553 walk = True):
554 walk = True):
554
555
555 if path == None:
556 if path == None:
556 raise ValueError("The path is not valid")
557 raise ValueError("The path is not valid")
557
558
558 if ext == None:
559 if ext == None:
559 ext = self.ext
560 ext = self.ext
560
561
561 if not(online):
562 if not(online):
562 print("Searching files in offline mode ...")
563 print("Searching files in offline mode ...")
563 pathList, filenameList = self.searchFilesOffLine(path, startDate=startDate, endDate=endDate,
564 pathList, filenameList = self.searchFilesOffLine(path, startDate=startDate, endDate=endDate,
564 startTime=startTime, endTime=endTime,
565 startTime=startTime, endTime=endTime,
565 set=set, expLabel=expLabel, ext=ext,
566 set=set, expLabel=expLabel, ext=ext,
566 walk=walk)
567 walk=walk)
567
568
568 if not(pathList):
569 if not(pathList):
569 print("No *%s files into the folder %s \nfor the range: %s - %s"%(ext, path,
570 print("No *%s files into the folder %s \nfor the range: %s - %s"%(ext, path,
570 datetime.datetime.combine(startDate,startTime).ctime(),
571 datetime.datetime.combine(startDate,startTime).ctime(),
571 datetime.datetime.combine(endDate,endTime).ctime()))
572 datetime.datetime.combine(endDate,endTime).ctime()))
572
573
573 sys.exit(-1)
574 sys.exit(-1)
574
575
575 self.fileIndex = -1
576 self.fileIndex = -1
576 self.pathList = pathList
577 self.pathList = pathList
577 self.filenameList = filenameList
578 self.filenameList = filenameList
578
579
579 self.online = online
580 self.online = online
580 self.delay = delay
581 self.delay = delay
581 ext = ext.lower()
582 ext = ext.lower()
582 self.ext = ext
583 self.ext = ext
583
584
584 if not(self.setNextFile()):
585 if not(self.setNextFile()):
585 if (startDate!=None) and (endDate!=None):
586 if (startDate!=None) and (endDate!=None):
586 print("No files in range: %s - %s" %(datetime.datetime.combine(startDate,startTime).ctime(), datetime.datetime.combine(endDate,endTime).ctime()))
587 print("No files in range: %s - %s" %(datetime.datetime.combine(startDate,startTime).ctime(), datetime.datetime.combine(endDate,endTime).ctime()))
587 elif startDate != None:
588 elif startDate != None:
588 print("No files in range: %s" %(datetime.datetime.combine(startDate,startTime).ctime()))
589 print("No files in range: %s" %(datetime.datetime.combine(startDate,startTime).ctime()))
589 else:
590 else:
590 print("No files")
591 print("No files")
591
592
592 sys.exit(-1)
593 sys.exit(-1)
593
594
594
595
595
596
596 def readBlock(self):
597 def readBlock(self):
597 dataObj = self.fitsObj[self.blockIndex]
598 dataObj = self.fitsObj[self.blockIndex]
598
599
599 self.data = dataObj.data
600 self.data = dataObj.data
600 self.data_header_dict = dataObj.header
601 self.data_header_dict = dataObj.header
601 self.utc = self.data_header_dict['UTCTIME']
602 self.utc = self.data_header_dict['UTCTIME']
602
603
603 self.flagIsNewFile = 0
604 self.flagIsNewFile = 0
604 self.blockIndex += 1
605 self.blockIndex += 1
605 self.nTotalBlocks += 1
606 self.nTotalBlocks += 1
606 self.nReadBlocks += 1
607 self.nReadBlocks += 1
607
608
608 return 1
609 return 1
609
610
610 def __jumpToLastBlock(self):
611 def __jumpToLastBlock(self):
611 raise NotImplementedError
612 raise NotImplementedError
612
613
613 def __waitNewBlock(self):
614 def __waitNewBlock(self):
614 """
615 """
615 Return 1 si se encontro un nuevo bloque de datos, 0 de otra forma.
616 Return 1 si se encontro un nuevo bloque de datos, 0 de otra forma.
616
617
617 Si el modo de lectura es OffLine siempre retorn 0
618 Si el modo de lectura es OffLine siempre retorn 0
618 """
619 """
619 if not self.online:
620 if not self.online:
620 return 0
621 return 0
621
622
622 if (self.nReadBlocks >= self.dataBlocksPerFile):
623 if (self.nReadBlocks >= self.dataBlocksPerFile):
623 return 0
624 return 0
624
625
625 currentPointer = self.fp.tell()
626 currentPointer = self.fp.tell()
626
627
627 neededSize = self.processingHeaderObj.blockSize + self.basicHeaderSize
628 neededSize = self.processingHeaderObj.blockSize + self.basicHeaderSize
628
629
629 for nTries in range( self.nTries ):
630 for nTries in range( self.nTries ):
630
631
631 self.fp.close()
632 self.fp.close()
632 self.fp = open( self.filename, 'rb' )
633 self.fp = open( self.filename, 'rb' )
633 self.fp.seek( currentPointer )
634 self.fp.seek( currentPointer )
634
635
635 self.fileSize = os.path.getsize( self.filename )
636 self.fileSize = os.path.getsize( self.filename )
636 currentSize = self.fileSize - currentPointer
637 currentSize = self.fileSize - currentPointer
637
638
638 if ( currentSize >= neededSize ):
639 if ( currentSize >= neededSize ):
639 self.__rdBasicHeader()
640 self.__rdBasicHeader()
640 return 1
641 return 1
641
642
642 print("\tWaiting %0.2f seconds for the next block, try %03d ..." % (self.delay, nTries+1))
643 print("\tWaiting %0.2f seconds for the next block, try %03d ..." % (self.delay, nTries+1))
643 sleep( self.delay )
644 sleep( self.delay )
644
645
645
646
646 return 0
647 return 0
647
648
648 def __setNewBlock(self):
649 def __setNewBlock(self):
649
650
650 if self.online:
651 if self.online:
651 self.__jumpToLastBlock()
652 self.__jumpToLastBlock()
652
653
653 if self.flagIsNewFile:
654 if self.flagIsNewFile:
654 return 1
655 return 1
655
656
656 self.lastUTTime = self.utc
657 self.lastUTTime = self.utc
657
658
658 if self.online:
659 if self.online:
659 if self.__waitNewBlock():
660 if self.__waitNewBlock():
660 return 1
661 return 1
661
662
662 if self.nReadBlocks < self.dataBlocksPerFile:
663 if self.nReadBlocks < self.dataBlocksPerFile:
663 return 1
664 return 1
664
665
665 if not(self.setNextFile()):
666 if not(self.setNextFile()):
666 return 0
667 return 0
667
668
668 deltaTime = self.utc - self.lastUTTime
669 deltaTime = self.utc - self.lastUTTime
669
670
670 self.flagDiscontinuousBlock = 0
671 self.flagDiscontinuousBlock = 0
671
672
672 if deltaTime > self.maxTimeStep:
673 if deltaTime > self.maxTimeStep:
673 self.flagDiscontinuousBlock = 1
674 self.flagDiscontinuousBlock = 1
674
675
675 return 1
676 return 1
676
677
677
678
678 def readNextBlock(self):
679 def readNextBlock(self):
679 if not(self.__setNewBlock()):
680 if not(self.__setNewBlock()):
680 return 0
681 return 0
681
682
682 if not(self.readBlock()):
683 if not(self.readBlock()):
683 return 0
684 return 0
684
685
685 return 1
686 return 1
686
687
687 def printInfo(self):
688 def printInfo(self):
688
689
689 pass
690 pass
690
691
691 def getData(self):
692 def getData(self):
692
693
693 if self.flagNoMoreFiles:
694 if self.flagNoMoreFiles:
694 self.dataOut.flagNoData = True
695 self.dataOut.flagNoData = True
695 return (0, 'No more files')
696 return (0, 'No more files')
696
697
697 self.flagDiscontinuousBlock = 0
698 self.flagDiscontinuousBlock = 0
698 self.flagIsNewBlock = 0
699 self.flagIsNewBlock = 0
699
700
700 if not(self.readNextBlock()):
701 if not(self.readNextBlock()):
701 return (1, 'Error reading data')
702 return (1, 'Error reading data')
702
703
703 if self.data is None:
704 if self.data is None:
704 self.dataOut.flagNoData = True
705 self.dataOut.flagNoData = True
705 return (0, 'No more data')
706 return (0, 'No more data')
706
707
707 self.dataOut.data = self.data
708 self.dataOut.data = self.data
708 self.dataOut.data_header = self.data_header_dict
709 self.dataOut.data_header = self.data_header_dict
709 self.dataOut.utctime = self.utc
710 self.dataOut.utctime = self.utc
710
711
711 # self.dataOut.header = self.header_dict
712 # self.dataOut.header = self.header_dict
712 # self.dataOut.expName = self.expName
713 # self.dataOut.expName = self.expName
713 # self.dataOut.nChannels = self.nChannels
714 # self.dataOut.nChannels = self.nChannels
714 # self.dataOut.timeZone = self.timeZone
715 # self.dataOut.timeZone = self.timeZone
715 # self.dataOut.dataBlocksPerFile = self.dataBlocksPerFile
716 # self.dataOut.dataBlocksPerFile = self.dataBlocksPerFile
716 # self.dataOut.comments = self.comments
717 # self.dataOut.comments = self.comments
717 # # self.dataOut.timeInterval = self.timeInterval
718 # # self.dataOut.timeInterval = self.timeInterval
718 # self.dataOut.channelList = self.channelList
719 # self.dataOut.channelList = self.channelList
719 # self.dataOut.heightList = self.heightList
720 # self.dataOut.heightList = self.heightList
720 self.dataOut.flagNoData = False
721 self.dataOut.flagNoData = False
721 # return self.dataOut.data
722 # return self.dataOut.data
722
723
723 def run(self, **kwargs):
724 def run(self, **kwargs):
724
725
725 if not(self.isConfig):
726 if not(self.isConfig):
726 self.setup(**kwargs)
727 self.setup(**kwargs)
727 self.isConfig = True
728 self.isConfig = True
728
729
729 self.getData()
730 self.getData()
730
731
731 @MPDecorator
732 @MPDecorator
732 class SpectraHeisWriter(Operation):
733 class SpectraHeisWriter(Operation):
733 # set = None
734 # set = None
734 setFile = None
735 setFile = None
735 idblock = None
736 idblock = None
736 doypath = None
737 doypath = None
737 subfolder = None
738 subfolder = None
738
739
739 def __init__(self):#, **kwargs):
740 def __init__(self):#, **kwargs):
740 Operation.__init__(self)#, **kwargs)
741 Operation.__init__(self)#, **kwargs)
741 self.wrObj = PyFits()
742 self.wrObj = PyFits()
742 # self.dataOut = dataOut
743 # self.dataOut = dataOut
743 self.nTotalBlocks=0
744 self.nTotalBlocks=0
744 # self.set = None
745 # self.set = None
745 self.setFile = None
746 self.setFile = None
746 self.idblock = 0
747 self.idblock = 0
747 self.wrpath = None
748 self.wrpath = None
748 self.doypath = None
749 self.doypath = None
749 self.subfolder = None
750 self.subfolder = None
750 self.isConfig = False
751 self.isConfig = False
751
752
752 def isNumber(str):
753 def isNumber(str):
753 """
754 """
754 Chequea si el conjunto de caracteres que componen un string puede ser convertidos a un numero.
755 Chequea si el conjunto de caracteres que componen un string puede ser convertidos a un numero.
755
756
756 Excepciones:
757 Excepciones:
757 Si un determinado string no puede ser convertido a numero
758 Si un determinado string no puede ser convertido a numero
758 Input:
759 Input:
759 str, string al cual se le analiza para determinar si convertible a un numero o no
760 str, string al cual se le analiza para determinar si convertible a un numero o no
760
761
761 Return:
762 Return:
762 True : si el string es uno numerico
763 True : si el string es uno numerico
763 False : no es un string numerico
764 False : no es un string numerico
764 """
765 """
765 try:
766 try:
766 float( str )
767 float( str )
767 return True
768 return True
768 except:
769 except:
769 return False
770 return False
770
771
771 def setup(self, dataOut, wrpath):
772 def setup(self, dataOut, wrpath):
772
773
773 if not(os.path.exists(wrpath)):
774 if not(os.path.exists(wrpath)):
774 os.mkdir(wrpath)
775 os.mkdir(wrpath)
775
776
776 self.wrpath = wrpath
777 self.wrpath = wrpath
777 # self.setFile = 0
778 # self.setFile = 0
778 self.dataOut = dataOut
779 self.dataOut = dataOut
779
780
780 def putData(self):
781 def putData(self):
781 name= time.localtime( self.dataOut.utctime)
782 name= time.localtime( self.dataOut.utctime)
782 ext=".fits"
783 ext=".fits"
783
784
784 if self.doypath == None:
785 if self.doypath == None:
785 self.subfolder = 'F%4.4d%3.3d_%d' % (name.tm_year,name.tm_yday,time.mktime(datetime.datetime.now().timetuple()))
786 self.subfolder = 'F%4.4d%3.3d_%d' % (name.tm_year,name.tm_yday,time.mktime(datetime.datetime.now().timetuple()))
786 self.doypath = os.path.join( self.wrpath, self.subfolder )
787 self.doypath = os.path.join( self.wrpath, self.subfolder )
787 os.mkdir(self.doypath)
788 os.mkdir(self.doypath)
788
789
789 if self.setFile == None:
790 if self.setFile == None:
790 # self.set = self.dataOut.set
791 # self.set = self.dataOut.set
791 self.setFile = 0
792 self.setFile = 0
792 # if self.set != self.dataOut.set:
793 # if self.set != self.dataOut.set:
793 ## self.set = self.dataOut.set
794 ## self.set = self.dataOut.set
794 # self.setFile = 0
795 # self.setFile = 0
795
796
796 #make the filename
797 #make the filename
797 thisFile = 'D%4.4d%3.3d_%3.3d%s' % (name.tm_year,name.tm_yday,self.setFile,ext)
798 thisFile = 'D%4.4d%3.3d_%3.3d%s' % (name.tm_year,name.tm_yday,self.setFile,ext)
798
799
799 filename = os.path.join(self.wrpath,self.subfolder, thisFile)
800 filename = os.path.join(self.wrpath,self.subfolder, thisFile)
800
801
801 idblock = numpy.array([self.idblock],dtype="int64")
802 idblock = numpy.array([self.idblock],dtype="int64")
802 header=self.wrObj.cFImage(idblock=idblock,
803 header=self.wrObj.cFImage(idblock=idblock,
803 year=time.gmtime(self.dataOut.utctime).tm_year,
804 year=time.gmtime(self.dataOut.utctime).tm_year,
804 month=time.gmtime(self.dataOut.utctime).tm_mon,
805 month=time.gmtime(self.dataOut.utctime).tm_mon,
805 day=time.gmtime(self.dataOut.utctime).tm_mday,
806 day=time.gmtime(self.dataOut.utctime).tm_mday,
806 hour=time.gmtime(self.dataOut.utctime).tm_hour,
807 hour=time.gmtime(self.dataOut.utctime).tm_hour,
807 minute=time.gmtime(self.dataOut.utctime).tm_min,
808 minute=time.gmtime(self.dataOut.utctime).tm_min,
808 second=time.gmtime(self.dataOut.utctime).tm_sec)
809 second=time.gmtime(self.dataOut.utctime).tm_sec)
809
810
810 c=3E8
811 c=3E8
811 deltaHeight = self.dataOut.heightList[1] - self.dataOut.heightList[0]
812 deltaHeight = self.dataOut.heightList[1] - self.dataOut.heightList[0]
812 freq=numpy.arange(-1*self.dataOut.nHeights/2.,self.dataOut.nHeights/2.)*(c/(2*deltaHeight*1000))
813 freq=numpy.arange(-1*self.dataOut.nHeights/2.,self.dataOut.nHeights/2.)*(c/(2*deltaHeight*1000))
813
814
814 colList = []
815 colList = []
815
816
816 colFreq=self.wrObj.setColF(name="freq", format=str(self.dataOut.nFFTPoints)+'E', array=freq)
817 colFreq=self.wrObj.setColF(name="freq", format=str(self.dataOut.nFFTPoints)+'E', array=freq)
817
818
818 colList.append(colFreq)
819 colList.append(colFreq)
819
820
820 nchannel=self.dataOut.nChannels
821 nchannel=self.dataOut.nChannels
821
822
822 for i in range(nchannel):
823 for i in range(nchannel):
823 col = self.wrObj.writeData(name="PCh"+str(i+1),
824 col = self.wrObj.writeData(name="PCh"+str(i+1),
824 format=str(self.dataOut.nFFTPoints)+'E',
825 format=str(self.dataOut.nFFTPoints)+'E',
825 data=10*numpy.log10(self.dataOut.data_spc[i,:]))
826 data=10*numpy.log10(self.dataOut.data_spc[i,:]))
826
827
827 colList.append(col)
828 colList.append(col)
828
829
829 data=self.wrObj.Ctable(colList=colList)
830 data=self.wrObj.Ctable(colList=colList)
830
831
831 self.wrObj.CFile(header,data)
832 self.wrObj.CFile(header,data)
832
833
833 self.wrObj.wFile(filename)
834 self.wrObj.wFile(filename)
834
835
835 #update the setFile
836 #update the setFile
836 self.setFile += 1
837 self.setFile += 1
837 self.idblock += 1
838 self.idblock += 1
838
839
839 return 1
840 return 1
840
841
841 def run(self, dataOut, **kwargs):
842 def run(self, dataOut, **kwargs):
842
843
843 if not(self.isConfig):
844 if not(self.isConfig):
844
845
845 self.setup(dataOut, **kwargs)
846 self.setup(dataOut, **kwargs)
846 self.isConfig = True
847 self.isConfig = True
847
848
848 self.putData()
849 self.putData()
849 return dataOut No newline at end of file
850 return dataOut
@@ -1,595 +1,595
1 '''
1 '''
2 Created on Aug 1, 2017
2 Created on Aug 1, 2017
3
3
4 @author: Juan C. Espinoza
4 @author: Juan C. Espinoza
5 '''
5 '''
6
6
7 import os
7 import os
8 import sys
8 import sys
9 import time
9 import time
10 import json
10 import json
11 import glob
11 import glob
12 import datetime
12 import datetime
13
13
14 import numpy
14 import numpy
15 import h5py
15 import h5py
16
16
17 import schainpy.admin
17 import schainpy.admin
18 from schainpy.model.io.jroIO_base import LOCALTIME, Reader
18 from schainpy.model.io.jroIO_base import LOCALTIME, Reader
19 from schainpy.model.proc.jroproc_base import ProcessingUnit, Operation, MPDecorator
19 from schainpy.model.proc.jroproc_base import ProcessingUnit, Operation, MPDecorator
20 from schainpy.model.data.jrodata import Parameters
20 from schainpy.model.data.jrodata import Parameters
21 from schainpy.utils import log
21 from schainpy.utils import log
22
22
23 try:
23 try:
24 import madrigal.cedar
24 import madrigal.cedar
25 except:
25 except:
26 pass
26 pass
27
27
28 try:
28 try:
29 basestring
29 basestring
30 except:
30 except:
31 basestring = str
31 basestring = str
32
32
33 DEF_CATALOG = {
33 DEF_CATALOG = {
34 'principleInvestigator': 'Marco Milla',
34 'principleInvestigator': 'Marco Milla',
35 'expPurpose': '',
35 'expPurpose': '',
36 'cycleTime': '',
36 'cycleTime': '',
37 'correlativeExp': '',
37 'correlativeExp': '',
38 'sciRemarks': '',
38 'sciRemarks': '',
39 'instRemarks': ''
39 'instRemarks': ''
40 }
40 }
41
41
42 DEF_HEADER = {
42 DEF_HEADER = {
43 'kindatDesc': '',
43 'kindatDesc': '',
44 'analyst': 'Jicamarca User',
44 'analyst': 'Jicamarca User',
45 'comments': '',
45 'comments': '',
46 'history': ''
46 'history': ''
47 }
47 }
48
48
49 MNEMONICS = {
49 MNEMONICS = {
50 10: 'jro',
50 10: 'jro',
51 11: 'jbr',
51 11: 'jbr',
52 840: 'jul',
52 840: 'jul',
53 13: 'jas',
53 13: 'jas',
54 1000: 'pbr',
54 1000: 'pbr',
55 1001: 'hbr',
55 1001: 'hbr',
56 1002: 'obr',
56 1002: 'obr',
57 400: 'clr'
57 400: 'clr'
58
58
59 }
59 }
60
60
61 UT1970 = datetime.datetime(1970, 1, 1) - datetime.timedelta(seconds=time.timezone)
61 UT1970 = datetime.datetime(1970, 1, 1) - datetime.timedelta(seconds=time.timezone)
62
62
63 def load_json(obj):
63 def load_json(obj):
64 '''
64 '''
65 Parse json as string instead of unicode
65 Parse json as string instead of unicode
66 '''
66 '''
67
67
68 if isinstance(obj, str):
68 if isinstance(obj, str):
69 iterable = json.loads(obj)
69 iterable = json.loads(obj)
70 else:
70 else:
71 iterable = obj
71 iterable = obj
72
72
73 if isinstance(iterable, dict):
73 if isinstance(iterable, dict):
74 return {str(k): load_json(v) if isinstance(v, dict) else str(v) if isinstance(v, basestring) else v
74 return {str(k): load_json(v) if isinstance(v, dict) else str(v) if isinstance(v, basestring) else v
75 for k, v in list(iterable.items())}
75 for k, v in list(iterable.items())}
76 elif isinstance(iterable, (list, tuple)):
76 elif isinstance(iterable, (list, tuple)):
77 return [str(v) if isinstance(v, basestring) else v for v in iterable]
77 return [str(v) if isinstance(v, basestring) else v for v in iterable]
78
78
79 return iterable
79 return iterable
80
80
81 @MPDecorator
81
82 class MADReader(Reader, ProcessingUnit):
82 class MADReader(Reader, ProcessingUnit):
83
83
84 def __init__(self):
84 def __init__(self):
85
85
86 ProcessingUnit.__init__(self)
86 ProcessingUnit.__init__(self)
87
87
88 self.dataOut = Parameters()
88 self.dataOut = Parameters()
89 self.counter_records = 0
89 self.counter_records = 0
90 self.nrecords = None
90 self.nrecords = None
91 self.flagNoMoreFiles = 0
91 self.flagNoMoreFiles = 0
92 self.filename = None
92 self.filename = None
93 self.intervals = set()
93 self.intervals = set()
94 self.datatime = datetime.datetime(1900,1,1)
94 self.datatime = datetime.datetime(1900,1,1)
95 self.format = None
95 self.format = None
96 self.filefmt = "***%Y%m%d*******"
96 self.filefmt = "***%Y%m%d*******"
97
97
98 def setup(self, **kwargs):
98 def setup(self, **kwargs):
99
99
100 self.set_kwargs(**kwargs)
100 self.set_kwargs(**kwargs)
101 self.oneDDict = load_json(self.oneDDict)
101 self.oneDDict = load_json(self.oneDDict)
102 self.twoDDict = load_json(self.twoDDict)
102 self.twoDDict = load_json(self.twoDDict)
103 self.ind2DList = load_json(self.ind2DList)
103 self.ind2DList = load_json(self.ind2DList)
104 self.independentParam = self.ind2DList[0]
104 self.independentParam = self.ind2DList[0]
105
105
106 if self.path is None:
106 if self.path is None:
107 raise ValueError('The path is not valid')
107 raise ValueError('The path is not valid')
108
108
109 self.open_file = open
109 self.open_file = open
110 self.open_mode = 'rb'
110 self.open_mode = 'rb'
111
111
112 if self.format is None:
112 if self.format is None:
113 raise ValueError('The format is not valid choose simple or hdf5')
113 raise ValueError('The format is not valid choose simple or hdf5')
114 elif self.format.lower() in ('simple', 'txt'):
114 elif self.format.lower() in ('simple', 'txt'):
115 self.ext = '.txt'
115 self.ext = '.txt'
116 elif self.format.lower() in ('cedar',):
116 elif self.format.lower() in ('cedar',):
117 self.ext = '.001'
117 self.ext = '.001'
118 else:
118 else:
119 self.ext = '.hdf5'
119 self.ext = '.hdf5'
120 self.open_file = h5py.File
120 self.open_file = h5py.File
121 self.open_mode = 'r'
121 self.open_mode = 'r'
122
122
123 if self.online:
123 if self.online:
124 log.log("Searching files in online mode...", self.name)
124 log.log("Searching files in online mode...", self.name)
125
125
126 for nTries in range(self.nTries):
126 for nTries in range(self.nTries):
127 fullpath = self.searchFilesOnLine(self.path, self.startDate,
127 fullpath = self.searchFilesOnLine(self.path, self.startDate,
128 self.endDate, self.expLabel, self.ext, self.walk,
128 self.endDate, self.expLabel, self.ext, self.walk,
129 self.filefmt, self.folderfmt)
129 self.filefmt, self.folderfmt)
130
130
131 try:
131 try:
132 fullpath = next(fullpath)
132 fullpath = next(fullpath)
133 except:
133 except:
134 fullpath = None
134 fullpath = None
135
135
136 if fullpath:
136 if fullpath:
137 break
137 break
138
138
139 log.warning(
139 log.warning(
140 'Waiting {} sec for a valid file in {}: try {} ...'.format(
140 'Waiting {} sec for a valid file in {}: try {} ...'.format(
141 self.delay, self.path, nTries + 1),
141 self.delay, self.path, nTries + 1),
142 self.name)
142 self.name)
143 time.sleep(self.delay)
143 time.sleep(self.delay)
144
144
145 if not(fullpath):
145 if not(fullpath):
146 raise schainpy.admin.SchainError(
146 raise schainpy.admin.SchainError(
147 'There isn\'t any valid file in {}'.format(self.path))
147 'There isn\'t any valid file in {}'.format(self.path))
148
148
149 else:
149 else:
150 log.log("Searching files in {}".format(self.path), self.name)
150 log.log("Searching files in {}".format(self.path), self.name)
151 self.filenameList = self.searchFilesOffLine(self.path, self.startDate,
151 self.filenameList = self.searchFilesOffLine(self.path, self.startDate,
152 self.endDate, self.expLabel, self.ext, self.walk, self.filefmt, self.folderfmt)
152 self.endDate, self.expLabel, self.ext, self.walk, self.filefmt, self.folderfmt)
153
153
154 self.setNextFile()
154 self.setNextFile()
155
155
156 def readFirstHeader(self):
156 def readFirstHeader(self):
157 '''Read header and data'''
157 '''Read header and data'''
158
158
159 self.parseHeader()
159 self.parseHeader()
160 self.parseData()
160 self.parseData()
161 self.blockIndex = 0
161 self.blockIndex = 0
162
162
163 return
163 return
164
164
165 def parseHeader(self):
165 def parseHeader(self):
166 '''
166 '''
167 '''
167 '''
168
168
169 self.output = {}
169 self.output = {}
170 self.version = '2'
170 self.version = '2'
171 s_parameters = None
171 s_parameters = None
172 if self.ext == '.txt':
172 if self.ext == '.txt':
173 self.parameters = [s.strip().lower() for s in self.fp.readline().decode().strip().split(' ') if s]
173 self.parameters = [s.strip().lower() for s in self.fp.readline().decode().strip().split(' ') if s]
174 elif self.ext == '.hdf5':
174 elif self.ext == '.hdf5':
175 self.metadata = self.fp['Metadata']
175 self.metadata = self.fp['Metadata']
176 if '_record_layout' in self.metadata:
176 if '_record_layout' in self.metadata:
177 s_parameters = [s[0].lower().decode() for s in self.metadata['Independent Spatial Parameters']]
177 s_parameters = [s[0].lower().decode() for s in self.metadata['Independent Spatial Parameters']]
178 self.version = '3'
178 self.version = '3'
179 self.parameters = [s[0].lower().decode() for s in self.metadata['Data Parameters']]
179 self.parameters = [s[0].lower().decode() for s in self.metadata['Data Parameters']]
180
180
181 log.success('Parameters found: {}'.format(self.parameters),
181 log.success('Parameters found: {}'.format(self.parameters),
182 'MADReader')
182 'MADReader')
183 if s_parameters:
183 if s_parameters:
184 log.success('Spatial parameters found: {}'.format(s_parameters),
184 log.success('Spatial parameters found: {}'.format(s_parameters),
185 'MADReader')
185 'MADReader')
186
186
187 for param in list(self.oneDDict.keys()):
187 for param in list(self.oneDDict.keys()):
188 if param.lower() not in self.parameters:
188 if param.lower() not in self.parameters:
189 log.warning(
189 log.warning(
190 'Parameter {} not found will be ignored'.format(
190 'Parameter {} not found will be ignored'.format(
191 param),
191 param),
192 'MADReader')
192 'MADReader')
193 self.oneDDict.pop(param, None)
193 self.oneDDict.pop(param, None)
194
194
195 for param, value in list(self.twoDDict.items()):
195 for param, value in list(self.twoDDict.items()):
196 if param.lower() not in self.parameters:
196 if param.lower() not in self.parameters:
197 log.warning(
197 log.warning(
198 'Parameter {} not found, it will be ignored'.format(
198 'Parameter {} not found, it will be ignored'.format(
199 param),
199 param),
200 'MADReader')
200 'MADReader')
201 self.twoDDict.pop(param, None)
201 self.twoDDict.pop(param, None)
202 continue
202 continue
203 if isinstance(value, list):
203 if isinstance(value, list):
204 if value[0] not in self.output:
204 if value[0] not in self.output:
205 self.output[value[0]] = []
205 self.output[value[0]] = []
206 self.output[value[0]].append([])
206 self.output[value[0]].append([])
207
207
208 def parseData(self):
208 def parseData(self):
209 '''
209 '''
210 '''
210 '''
211
211
212 if self.ext == '.txt':
212 if self.ext == '.txt':
213 self.data = numpy.genfromtxt(self.fp, missing_values=('missing'))
213 self.data = numpy.genfromtxt(self.fp, missing_values=('missing'))
214 self.nrecords = self.data.shape[0]
214 self.nrecords = self.data.shape[0]
215 self.ranges = numpy.unique(self.data[:,self.parameters.index(self.independentParam.lower())])
215 self.ranges = numpy.unique(self.data[:,self.parameters.index(self.independentParam.lower())])
216 self.counter_records = 0
216 self.counter_records = 0
217 elif self.ext == '.hdf5':
217 elif self.ext == '.hdf5':
218 self.data = self.fp['Data']
218 self.data = self.fp['Data']
219 self.ranges = numpy.unique(self.data['Table Layout'][self.independentParam.lower()])
219 self.ranges = numpy.unique(self.data['Table Layout'][self.independentParam.lower()])
220 self.times = numpy.unique(self.data['Table Layout']['ut1_unix'])
220 self.times = numpy.unique(self.data['Table Layout']['ut1_unix'])
221 self.counter_records = int(self.data['Table Layout']['recno'][0])
221 self.counter_records = int(self.data['Table Layout']['recno'][0])
222 self.nrecords = int(self.data['Table Layout']['recno'][-1])
222 self.nrecords = int(self.data['Table Layout']['recno'][-1])
223
223
224 def readNextBlock(self):
224 def readNextBlock(self):
225
225
226 while True:
226 while True:
227 self.flagDiscontinuousBlock = 0
227 self.flagDiscontinuousBlock = 0
228 if self.counter_records == self.nrecords:
228 if self.counter_records == self.nrecords:
229 self.setNextFile()
229 self.setNextFile()
230
230
231 self.readBlock()
231 self.readBlock()
232
232
233 if (self.datatime < datetime.datetime.combine(self.startDate, self.startTime)) or \
233 if (self.datatime < datetime.datetime.combine(self.startDate, self.startTime)) or \
234 (self.datatime > datetime.datetime.combine(self.endDate, self.endTime)):
234 (self.datatime > datetime.datetime.combine(self.endDate, self.endTime)):
235 log.warning(
235 log.warning(
236 'Reading Record No. {}/{} -> {} [Skipping]'.format(
236 'Reading Record No. {}/{} -> {} [Skipping]'.format(
237 self.counter_records,
237 self.counter_records,
238 self.nrecords,
238 self.nrecords,
239 self.datatime.ctime()),
239 self.datatime.ctime()),
240 'MADReader')
240 'MADReader')
241 continue
241 continue
242 break
242 break
243
243
244 log.log(
244 log.log(
245 'Reading Record No. {}/{} -> {}'.format(
245 'Reading Record No. {}/{} -> {}'.format(
246 self.counter_records,
246 self.counter_records,
247 self.nrecords,
247 self.nrecords,
248 self.datatime.ctime()),
248 self.datatime.ctime()),
249 'MADReader')
249 'MADReader')
250
250
251 return 1
251 return 1
252
252
253 def readBlock(self):
253 def readBlock(self):
254 '''
254 '''
255 '''
255 '''
256 dum = []
256 dum = []
257 if self.ext == '.txt':
257 if self.ext == '.txt':
258 dt = self.data[self.counter_records][:6].astype(int)
258 dt = self.data[self.counter_records][:6].astype(int)
259 if datetime.datetime(dt[0], dt[1], dt[2], dt[3], dt[4], dt[5]).date() > self.datatime.date():
259 if datetime.datetime(dt[0], dt[1], dt[2], dt[3], dt[4], dt[5]).date() > self.datatime.date():
260 self.flagDiscontinuousBlock = 1
260 self.flagDiscontinuousBlock = 1
261 self.datatime = datetime.datetime(dt[0], dt[1], dt[2], dt[3], dt[4], dt[5])
261 self.datatime = datetime.datetime(dt[0], dt[1], dt[2], dt[3], dt[4], dt[5])
262 while True:
262 while True:
263 dt = self.data[self.counter_records][:6].astype(int)
263 dt = self.data[self.counter_records][:6].astype(int)
264 datatime = datetime.datetime(dt[0], dt[1], dt[2], dt[3], dt[4], dt[5])
264 datatime = datetime.datetime(dt[0], dt[1], dt[2], dt[3], dt[4], dt[5])
265 if datatime == self.datatime:
265 if datatime == self.datatime:
266 dum.append(self.data[self.counter_records])
266 dum.append(self.data[self.counter_records])
267 self.counter_records += 1
267 self.counter_records += 1
268 if self.counter_records == self.nrecords:
268 if self.counter_records == self.nrecords:
269 break
269 break
270 continue
270 continue
271 self.intervals.add((datatime-self.datatime).seconds)
271 self.intervals.add((datatime-self.datatime).seconds)
272 break
272 break
273 elif self.ext == '.hdf5':
273 elif self.ext == '.hdf5':
274 datatime = datetime.datetime.utcfromtimestamp(
274 datatime = datetime.datetime.utcfromtimestamp(
275 self.times[self.counter_records])
275 self.times[self.counter_records])
276 dum = self.data['Table Layout'][self.data['Table Layout']['recno']==self.counter_records]
276 dum = self.data['Table Layout'][self.data['Table Layout']['recno']==self.counter_records]
277 self.intervals.add((datatime-self.datatime).seconds)
277 self.intervals.add((datatime-self.datatime).seconds)
278 if datatime.date()>self.datatime.date():
278 if datatime.date()>self.datatime.date():
279 self.flagDiscontinuousBlock = 1
279 self.flagDiscontinuousBlock = 1
280 self.datatime = datatime
280 self.datatime = datatime
281 self.counter_records += 1
281 self.counter_records += 1
282
282
283 self.buffer = numpy.array(dum)
283 self.buffer = numpy.array(dum)
284 return
284 return
285
285
286 def set_output(self):
286 def set_output(self):
287 '''
287 '''
288 Storing data from buffer to dataOut object
288 Storing data from buffer to dataOut object
289 '''
289 '''
290
290
291 parameters = [None for __ in self.parameters]
291 parameters = [None for __ in self.parameters]
292
292
293 for param, attr in list(self.oneDDict.items()):
293 for param, attr in list(self.oneDDict.items()):
294 x = self.parameters.index(param.lower())
294 x = self.parameters.index(param.lower())
295 setattr(self.dataOut, attr, self.buffer[0][x])
295 setattr(self.dataOut, attr, self.buffer[0][x])
296
296
297 for param, value in list(self.twoDDict.items()):
297 for param, value in list(self.twoDDict.items()):
298 dummy = numpy.zeros(self.ranges.shape) + numpy.nan
298 dummy = numpy.zeros(self.ranges.shape) + numpy.nan
299 if self.ext == '.txt':
299 if self.ext == '.txt':
300 x = self.parameters.index(param.lower())
300 x = self.parameters.index(param.lower())
301 y = self.parameters.index(self.independentParam.lower())
301 y = self.parameters.index(self.independentParam.lower())
302 ranges = self.buffer[:,y]
302 ranges = self.buffer[:,y]
303 #if self.ranges.size == ranges.size:
303 #if self.ranges.size == ranges.size:
304 # continue
304 # continue
305 index = numpy.where(numpy.in1d(self.ranges, ranges))[0]
305 index = numpy.where(numpy.in1d(self.ranges, ranges))[0]
306 dummy[index] = self.buffer[:,x]
306 dummy[index] = self.buffer[:,x]
307 else:
307 else:
308 ranges = self.buffer[self.independentParam.lower()]
308 ranges = self.buffer[self.independentParam.lower()]
309 index = numpy.where(numpy.in1d(self.ranges, ranges))[0]
309 index = numpy.where(numpy.in1d(self.ranges, ranges))[0]
310 dummy[index] = self.buffer[param.lower()]
310 dummy[index] = self.buffer[param.lower()]
311
311
312 if isinstance(value, str):
312 if isinstance(value, str):
313 if value not in self.independentParam:
313 if value not in self.independentParam:
314 setattr(self.dataOut, value, dummy.reshape(1,-1))
314 setattr(self.dataOut, value, dummy.reshape(1,-1))
315 elif isinstance(value, list):
315 elif isinstance(value, list):
316 self.output[value[0]][value[1]] = dummy
316 self.output[value[0]][value[1]] = dummy
317 parameters[value[1]] = param
317 parameters[value[1]] = param
318 for key, value in list(self.output.items()):
318 for key, value in list(self.output.items()):
319 setattr(self.dataOut, key, numpy.array(value))
319 setattr(self.dataOut, key, numpy.array(value))
320
320
321 self.dataOut.parameters = [s for s in parameters if s]
321 self.dataOut.parameters = [s for s in parameters if s]
322 self.dataOut.heightList = self.ranges
322 self.dataOut.heightList = self.ranges
323 self.dataOut.utctime = (self.datatime - datetime.datetime(1970, 1, 1)).total_seconds()
323 self.dataOut.utctime = (self.datatime - datetime.datetime(1970, 1, 1)).total_seconds()
324 self.dataOut.utctimeInit = self.dataOut.utctime
324 self.dataOut.utctimeInit = self.dataOut.utctime
325 self.dataOut.paramInterval = min(self.intervals)
325 self.dataOut.paramInterval = min(self.intervals)
326 self.dataOut.useLocalTime = False
326 self.dataOut.useLocalTime = False
327 self.dataOut.flagNoData = False
327 self.dataOut.flagNoData = False
328 self.dataOut.nrecords = self.nrecords
328 self.dataOut.nrecords = self.nrecords
329 self.dataOut.flagDiscontinuousBlock = self.flagDiscontinuousBlock
329 self.dataOut.flagDiscontinuousBlock = self.flagDiscontinuousBlock
330
330
331 def getData(self):
331 def getData(self):
332 '''
332 '''
333 Storing data from databuffer to dataOut object
333 Storing data from databuffer to dataOut object
334 '''
334 '''
335
335
336 if not self.readNextBlock():
336 if not self.readNextBlock():
337 self.dataOut.flagNoData = True
337 self.dataOut.flagNoData = True
338 return 0
338 return 0
339
339
340 self.set_output()
340 self.set_output()
341
341
342 return 1
342 return 1
343
343
344 def run(self, **kwargs):
344 def run(self, **kwargs):
345
345
346 if not(self.isConfig):
346 if not(self.isConfig):
347 self.setup(**kwargs)
347 self.setup(**kwargs)
348 self.isConfig = True
348 self.isConfig = True
349
349
350 self.getData()
350 self.getData()
351
351
352 return
352 return
353
353
354 @MPDecorator
354 @MPDecorator
355 class MADWriter(Operation):
355 class MADWriter(Operation):
356 '''Writing module for Madrigal files
356 '''Writing module for Madrigal files
357
357
358 type: external
358 type: external
359
359
360 Inputs:
360 Inputs:
361 path path where files will be created
361 path path where files will be created
362 oneDDict json of one-dimensional parameters in record where keys
362 oneDDict json of one-dimensional parameters in record where keys
363 are Madrigal codes (integers or mnemonics) and values the corresponding
363 are Madrigal codes (integers or mnemonics) and values the corresponding
364 dataOut attribute e.g: {
364 dataOut attribute e.g: {
365 'gdlatr': 'lat',
365 'gdlatr': 'lat',
366 'gdlonr': 'lon',
366 'gdlonr': 'lon',
367 'gdlat2':'lat',
367 'gdlat2':'lat',
368 'glon2':'lon'}
368 'glon2':'lon'}
369 ind2DList list of independent spatial two-dimensional parameters e.g:
369 ind2DList list of independent spatial two-dimensional parameters e.g:
370 ['heigthList']
370 ['heigthList']
371 twoDDict json of two-dimensional parameters in record where keys
371 twoDDict json of two-dimensional parameters in record where keys
372 are Madrigal codes (integers or mnemonics) and values the corresponding
372 are Madrigal codes (integers or mnemonics) and values the corresponding
373 dataOut attribute if multidimensional array specify as tupple
373 dataOut attribute if multidimensional array specify as tupple
374 ('attr', pos) e.g: {
374 ('attr', pos) e.g: {
375 'gdalt': 'heightList',
375 'gdalt': 'heightList',
376 'vn1p2': ('data_output', 0),
376 'vn1p2': ('data_output', 0),
377 'vn2p2': ('data_output', 1),
377 'vn2p2': ('data_output', 1),
378 'vn3': ('data_output', 2),
378 'vn3': ('data_output', 2),
379 'snl': ('data_SNR', 'db')
379 'snl': ('data_SNR', 'db')
380 }
380 }
381 metadata json of madrigal metadata (kinst, kindat, catalog and header)
381 metadata json of madrigal metadata (kinst, kindat, catalog and header)
382 format hdf5, cedar
382 format hdf5, cedar
383 blocks number of blocks per file'''
383 blocks number of blocks per file'''
384
384
385 __attrs__ = ['path', 'oneDDict', 'ind2DList', 'twoDDict','metadata', 'format', 'blocks']
385 __attrs__ = ['path', 'oneDDict', 'ind2DList', 'twoDDict','metadata', 'format', 'blocks']
386 missing = -32767
386 missing = -32767
387
387
388 def __init__(self):
388 def __init__(self):
389
389
390 Operation.__init__(self)
390 Operation.__init__(self)
391 self.dataOut = Parameters()
391 self.dataOut = Parameters()
392 self.counter = 0
392 self.counter = 0
393 self.path = None
393 self.path = None
394 self.fp = None
394 self.fp = None
395
395
396 def run(self, dataOut, path, oneDDict, ind2DList='[]', twoDDict='{}',
396 def run(self, dataOut, path, oneDDict, ind2DList='[]', twoDDict='{}',
397 metadata='{}', format='cedar', **kwargs):
397 metadata='{}', format='cedar', **kwargs):
398
398
399 if not self.isConfig:
399 if not self.isConfig:
400 self.setup(path, oneDDict, ind2DList, twoDDict, metadata, format, **kwargs)
400 self.setup(path, oneDDict, ind2DList, twoDDict, metadata, format, **kwargs)
401 self.isConfig = True
401 self.isConfig = True
402
402
403 self.dataOut = dataOut
403 self.dataOut = dataOut
404 self.putData()
404 self.putData()
405 return 1
405 return 1
406
406
407 def setup(self, path, oneDDict, ind2DList, twoDDict, metadata, format, **kwargs):
407 def setup(self, path, oneDDict, ind2DList, twoDDict, metadata, format, **kwargs):
408 '''
408 '''
409 Configure Operation
409 Configure Operation
410 '''
410 '''
411
411
412 self.path = path
412 self.path = path
413 self.blocks = kwargs.get('blocks', None)
413 self.blocks = kwargs.get('blocks', None)
414 self.counter = 0
414 self.counter = 0
415 self.oneDDict = load_json(oneDDict)
415 self.oneDDict = load_json(oneDDict)
416 self.twoDDict = load_json(twoDDict)
416 self.twoDDict = load_json(twoDDict)
417 self.ind2DList = load_json(ind2DList)
417 self.ind2DList = load_json(ind2DList)
418 meta = load_json(metadata)
418 meta = load_json(metadata)
419 self.kinst = meta.get('kinst')
419 self.kinst = meta.get('kinst')
420 self.kindat = meta.get('kindat')
420 self.kindat = meta.get('kindat')
421 self.catalog = meta.get('catalog', DEF_CATALOG)
421 self.catalog = meta.get('catalog', DEF_CATALOG)
422 self.header = meta.get('header', DEF_HEADER)
422 self.header = meta.get('header', DEF_HEADER)
423 if format == 'cedar':
423 if format == 'cedar':
424 self.ext = '.dat'
424 self.ext = '.dat'
425 self.extra_args = {}
425 self.extra_args = {}
426 elif format == 'hdf5':
426 elif format == 'hdf5':
427 self.ext = '.hdf5'
427 self.ext = '.hdf5'
428 self.extra_args = {'ind2DList': self.ind2DList}
428 self.extra_args = {'ind2DList': self.ind2DList}
429
429
430 self.keys = [k.lower() for k in self.twoDDict]
430 self.keys = [k.lower() for k in self.twoDDict]
431 if 'range' in self.keys:
431 if 'range' in self.keys:
432 self.keys.remove('range')
432 self.keys.remove('range')
433 if 'gdalt' in self.keys:
433 if 'gdalt' in self.keys:
434 self.keys.remove('gdalt')
434 self.keys.remove('gdalt')
435
435
436 def setFile(self):
436 def setFile(self):
437 '''
437 '''
438 Create new cedar file object
438 Create new cedar file object
439 '''
439 '''
440
440
441 self.mnemonic = MNEMONICS[self.kinst] #TODO get mnemonic from madrigal
441 self.mnemonic = MNEMONICS[self.kinst] #TODO get mnemonic from madrigal
442 date = datetime.datetime.utcfromtimestamp(self.dataOut.utctime)
442 date = datetime.datetime.utcfromtimestamp(self.dataOut.utctime)
443
443
444 filename = '{}{}{}'.format(self.mnemonic,
444 filename = '{}{}{}'.format(self.mnemonic,
445 date.strftime('%Y%m%d_%H%M%S'),
445 date.strftime('%Y%m%d_%H%M%S'),
446 self.ext)
446 self.ext)
447
447
448 self.fullname = os.path.join(self.path, filename)
448 self.fullname = os.path.join(self.path, filename)
449
449
450 if os.path.isfile(self.fullname) :
450 if os.path.isfile(self.fullname) :
451 log.warning(
451 log.warning(
452 'Destination file {} already exists, previous file deleted.'.format(
452 'Destination file {} already exists, previous file deleted.'.format(
453 self.fullname),
453 self.fullname),
454 'MADWriter')
454 'MADWriter')
455 os.remove(self.fullname)
455 os.remove(self.fullname)
456
456
457 try:
457 try:
458 log.success(
458 log.success(
459 'Creating file: {}'.format(self.fullname),
459 'Creating file: {}'.format(self.fullname),
460 'MADWriter')
460 'MADWriter')
461 if not os.path.exists(self.path):
461 if not os.path.exists(self.path):
462 os.makedirs(self.path)
462 os.makedirs(self.path)
463 self.fp = madrigal.cedar.MadrigalCedarFile(self.fullname, True)
463 self.fp = madrigal.cedar.MadrigalCedarFile(self.fullname, True)
464 except ValueError as e:
464 except ValueError as e:
465 log.error(
465 log.error(
466 'Impossible to create a cedar object with "madrigal.cedar.MadrigalCedarFile"',
466 'Impossible to create a cedar object with "madrigal.cedar.MadrigalCedarFile"',
467 'MADWriter')
467 'MADWriter')
468 return
468 return
469
469
470 return 1
470 return 1
471
471
472 def writeBlock(self):
472 def writeBlock(self):
473 '''
473 '''
474 Add data records to cedar file taking data from oneDDict and twoDDict
474 Add data records to cedar file taking data from oneDDict and twoDDict
475 attributes.
475 attributes.
476 Allowed parameters in: parcodes.tab
476 Allowed parameters in: parcodes.tab
477 '''
477 '''
478
478
479 startTime = datetime.datetime.utcfromtimestamp(self.dataOut.utctime)
479 startTime = datetime.datetime.utcfromtimestamp(self.dataOut.utctime)
480 endTime = startTime + datetime.timedelta(seconds=self.dataOut.paramInterval)
480 endTime = startTime + datetime.timedelta(seconds=self.dataOut.paramInterval)
481 heights = self.dataOut.heightList
481 heights = self.dataOut.heightList
482
482
483 if self.ext == '.dat':
483 if self.ext == '.dat':
484 for key, value in list(self.twoDDict.items()):
484 for key, value in list(self.twoDDict.items()):
485 if isinstance(value, str):
485 if isinstance(value, str):
486 data = getattr(self.dataOut, value)
486 data = getattr(self.dataOut, value)
487 invalid = numpy.isnan(data)
487 invalid = numpy.isnan(data)
488 data[invalid] = self.missing
488 data[invalid] = self.missing
489 elif isinstance(value, (tuple, list)):
489 elif isinstance(value, (tuple, list)):
490 attr, key = value
490 attr, key = value
491 data = getattr(self.dataOut, attr)
491 data = getattr(self.dataOut, attr)
492 invalid = numpy.isnan(data)
492 invalid = numpy.isnan(data)
493 data[invalid] = self.missing
493 data[invalid] = self.missing
494
494
495 out = {}
495 out = {}
496 for key, value in list(self.twoDDict.items()):
496 for key, value in list(self.twoDDict.items()):
497 key = key.lower()
497 key = key.lower()
498 if isinstance(value, str):
498 if isinstance(value, str):
499 if 'db' in value.lower():
499 if 'db' in value.lower():
500 tmp = getattr(self.dataOut, value.replace('_db', ''))
500 tmp = getattr(self.dataOut, value.replace('_db', ''))
501 SNRavg = numpy.average(tmp, axis=0)
501 SNRavg = numpy.average(tmp, axis=0)
502 tmp = 10*numpy.log10(SNRavg)
502 tmp = 10*numpy.log10(SNRavg)
503 else:
503 else:
504 tmp = getattr(self.dataOut, value)
504 tmp = getattr(self.dataOut, value)
505 out[key] = tmp.flatten()[:len(heights)]
505 out[key] = tmp.flatten()[:len(heights)]
506 elif isinstance(value, (tuple, list)):
506 elif isinstance(value, (tuple, list)):
507 attr, x = value
507 attr, x = value
508 data = getattr(self.dataOut, attr)
508 data = getattr(self.dataOut, attr)
509 out[key] = data[int(x)][:len(heights)]
509 out[key] = data[int(x)][:len(heights)]
510
510
511 a = numpy.array([out[k] for k in self.keys])
511 a = numpy.array([out[k] for k in self.keys])
512 nrows = numpy.array([numpy.isnan(a[:, x]).all() for x in range(len(heights))])
512 nrows = numpy.array([numpy.isnan(a[:, x]).all() for x in range(len(heights))])
513 index = numpy.where(nrows == False)[0]
513 index = numpy.where(nrows == False)[0]
514
514
515 rec = madrigal.cedar.MadrigalDataRecord(
515 rec = madrigal.cedar.MadrigalDataRecord(
516 self.kinst,
516 self.kinst,
517 self.kindat,
517 self.kindat,
518 startTime.year,
518 startTime.year,
519 startTime.month,
519 startTime.month,
520 startTime.day,
520 startTime.day,
521 startTime.hour,
521 startTime.hour,
522 startTime.minute,
522 startTime.minute,
523 startTime.second,
523 startTime.second,
524 startTime.microsecond/10000,
524 startTime.microsecond/10000,
525 endTime.year,
525 endTime.year,
526 endTime.month,
526 endTime.month,
527 endTime.day,
527 endTime.day,
528 endTime.hour,
528 endTime.hour,
529 endTime.minute,
529 endTime.minute,
530 endTime.second,
530 endTime.second,
531 endTime.microsecond/10000,
531 endTime.microsecond/10000,
532 list(self.oneDDict.keys()),
532 list(self.oneDDict.keys()),
533 list(self.twoDDict.keys()),
533 list(self.twoDDict.keys()),
534 len(index),
534 len(index),
535 **self.extra_args
535 **self.extra_args
536 )
536 )
537
537
538 # Setting 1d values
538 # Setting 1d values
539 for key in self.oneDDict:
539 for key in self.oneDDict:
540 rec.set1D(key, getattr(self.dataOut, self.oneDDict[key]))
540 rec.set1D(key, getattr(self.dataOut, self.oneDDict[key]))
541
541
542 # Setting 2d values
542 # Setting 2d values
543 nrec = 0
543 nrec = 0
544 for n in index:
544 for n in index:
545 for key in out:
545 for key in out:
546 rec.set2D(key, nrec, out[key][n])
546 rec.set2D(key, nrec, out[key][n])
547 nrec += 1
547 nrec += 1
548
548
549 self.fp.append(rec)
549 self.fp.append(rec)
550 if self.ext == '.hdf5' and self.counter % 500 == 0 and self.counter > 0:
550 if self.ext == '.hdf5' and self.counter % 500 == 0 and self.counter > 0:
551 self.fp.dump()
551 self.fp.dump()
552 if self.counter % 20 == 0 and self.counter > 0:
552 if self.counter % 20 == 0 and self.counter > 0:
553 log.log(
553 log.log(
554 'Writing {} records'.format(
554 'Writing {} records'.format(
555 self.counter),
555 self.counter),
556 'MADWriter')
556 'MADWriter')
557
557
558 def setHeader(self):
558 def setHeader(self):
559 '''
559 '''
560 Create an add catalog and header to cedar file
560 Create an add catalog and header to cedar file
561 '''
561 '''
562
562
563 log.success('Closing file {}'.format(self.fullname), 'MADWriter')
563 log.success('Closing file {}'.format(self.fullname), 'MADWriter')
564
564
565 if self.ext == '.dat':
565 if self.ext == '.dat':
566 self.fp.write()
566 self.fp.write()
567 else:
567 else:
568 self.fp.dump()
568 self.fp.dump()
569 self.fp.close()
569 self.fp.close()
570
570
571 header = madrigal.cedar.CatalogHeaderCreator(self.fullname)
571 header = madrigal.cedar.CatalogHeaderCreator(self.fullname)
572 header.createCatalog(**self.catalog)
572 header.createCatalog(**self.catalog)
573 header.createHeader(**self.header)
573 header.createHeader(**self.header)
574 header.write()
574 header.write()
575
575
576 def putData(self):
576 def putData(self):
577
577
578 if self.dataOut.flagNoData:
578 if self.dataOut.flagNoData:
579 return 0
579 return 0
580
580
581 if self.dataOut.flagDiscontinuousBlock or self.counter == self.blocks:
581 if self.dataOut.flagDiscontinuousBlock or self.counter == self.blocks:
582 if self.counter > 0:
582 if self.counter > 0:
583 self.setHeader()
583 self.setHeader()
584 self.counter = 0
584 self.counter = 0
585
585
586 if self.counter == 0:
586 if self.counter == 0:
587 self.setFile()
587 self.setFile()
588
588
589 self.writeBlock()
589 self.writeBlock()
590 self.counter += 1
590 self.counter += 1
591
591
592 def close(self):
592 def close(self):
593
593
594 if self.counter > 0:
594 if self.counter > 0:
595 self.setHeader() No newline at end of file
595 self.setHeader()
@@ -1,1435 +1,1435
1 import numpy
1 import numpy
2 import time
2 import time
3 import os
3 import os
4 import h5py
4 import h5py
5 import re
5 import re
6 import datetime
6 import datetime
7
7
8 import schainpy.admin
8 import schainpy.admin
9 from schainpy.model.data.jrodata import *
9 from schainpy.model.data.jrodata import *
10 from schainpy.model.proc.jroproc_base import ProcessingUnit, Operation, MPDecorator
10 from schainpy.model.proc.jroproc_base import ProcessingUnit, Operation, MPDecorator
11 from schainpy.model.io.jroIO_base import *
11 from schainpy.model.io.jroIO_base import *
12 from schainpy.utils import log
12 from schainpy.utils import log
13
13
14 @MPDecorator
14
15 class ParamReader(JRODataReader,ProcessingUnit):
15 class ParamReader(JRODataReader,ProcessingUnit):
16 '''
16 '''
17 Reads HDF5 format files
17 Reads HDF5 format files
18 path
18 path
19 startDate
19 startDate
20 endDate
20 endDate
21 startTime
21 startTime
22 endTime
22 endTime
23 '''
23 '''
24
24
25 ext = ".hdf5"
25 ext = ".hdf5"
26 optchar = "D"
26 optchar = "D"
27 timezone = None
27 timezone = None
28 startTime = None
28 startTime = None
29 endTime = None
29 endTime = None
30 fileIndex = None
30 fileIndex = None
31 utcList = None #To select data in the utctime list
31 utcList = None #To select data in the utctime list
32 blockList = None #List to blocks to be read from the file
32 blockList = None #List to blocks to be read from the file
33 blocksPerFile = None #Number of blocks to be read
33 blocksPerFile = None #Number of blocks to be read
34 blockIndex = None
34 blockIndex = None
35 path = None
35 path = None
36 #List of Files
36 #List of Files
37 filenameList = None
37 filenameList = None
38 datetimeList = None
38 datetimeList = None
39 #Hdf5 File
39 #Hdf5 File
40 listMetaname = None
40 listMetaname = None
41 listMeta = None
41 listMeta = None
42 listDataname = None
42 listDataname = None
43 listData = None
43 listData = None
44 listShapes = None
44 listShapes = None
45 fp = None
45 fp = None
46 #dataOut reconstruction
46 #dataOut reconstruction
47 dataOut = None
47 dataOut = None
48
48
49 def __init__(self):#, **kwargs):
49 def __init__(self):#, **kwargs):
50 ProcessingUnit.__init__(self) #, **kwargs)
50 ProcessingUnit.__init__(self) #, **kwargs)
51 self.dataOut = Parameters()
51 self.dataOut = Parameters()
52 return
52 return
53
53
54 def setup(self, **kwargs):
54 def setup(self, **kwargs):
55
55
56 path = kwargs['path']
56 path = kwargs['path']
57 startDate = kwargs['startDate']
57 startDate = kwargs['startDate']
58 endDate = kwargs['endDate']
58 endDate = kwargs['endDate']
59 startTime = kwargs['startTime']
59 startTime = kwargs['startTime']
60 endTime = kwargs['endTime']
60 endTime = kwargs['endTime']
61 walk = kwargs['walk']
61 walk = kwargs['walk']
62 if 'ext' in kwargs:
62 if 'ext' in kwargs:
63 ext = kwargs['ext']
63 ext = kwargs['ext']
64 else:
64 else:
65 ext = '.hdf5'
65 ext = '.hdf5'
66 if 'timezone' in kwargs:
66 if 'timezone' in kwargs:
67 self.timezone = kwargs['timezone']
67 self.timezone = kwargs['timezone']
68 else:
68 else:
69 self.timezone = 'lt'
69 self.timezone = 'lt'
70
70
71 print("[Reading] Searching files in offline mode ...")
71 print("[Reading] Searching files in offline mode ...")
72 pathList, filenameList = self.searchFilesOffLine(path, startDate=startDate, endDate=endDate,
72 pathList, filenameList = self.searchFilesOffLine(path, startDate=startDate, endDate=endDate,
73 startTime=startTime, endTime=endTime,
73 startTime=startTime, endTime=endTime,
74 ext=ext, walk=walk)
74 ext=ext, walk=walk)
75
75
76 if not(filenameList):
76 if not(filenameList):
77 print("There is no files into the folder: %s"%(path))
77 print("There is no files into the folder: %s"%(path))
78 sys.exit(-1)
78 sys.exit(-1)
79
79
80 self.fileIndex = -1
80 self.fileIndex = -1
81 self.startTime = startTime
81 self.startTime = startTime
82 self.endTime = endTime
82 self.endTime = endTime
83
83
84 self.__readMetadata()
84 self.__readMetadata()
85
85
86 self.__setNextFileOffline()
86 self.__setNextFileOffline()
87
87
88 return
88 return
89
89
90 def searchFilesOffLine(self,
90 def searchFilesOffLine(self,
91 path,
91 path,
92 startDate=None,
92 startDate=None,
93 endDate=None,
93 endDate=None,
94 startTime=datetime.time(0,0,0),
94 startTime=datetime.time(0,0,0),
95 endTime=datetime.time(23,59,59),
95 endTime=datetime.time(23,59,59),
96 ext='.hdf5',
96 ext='.hdf5',
97 walk=True):
97 walk=True):
98
98
99 expLabel = ''
99 expLabel = ''
100 self.filenameList = []
100 self.filenameList = []
101 self.datetimeList = []
101 self.datetimeList = []
102
102
103 pathList = []
103 pathList = []
104
104
105 JRODataObj = JRODataReader()
105 JRODataObj = JRODataReader()
106 dateList, pathList = JRODataObj.findDatafiles(path, startDate, endDate, expLabel, ext, walk, include_path=True)
106 dateList, pathList = JRODataObj.findDatafiles(path, startDate, endDate, expLabel, ext, walk, include_path=True)
107
107
108 if dateList == []:
108 if dateList == []:
109 print("[Reading] No *%s files in %s from %s to %s)"%(ext, path,
109 print("[Reading] No *%s files in %s from %s to %s)"%(ext, path,
110 datetime.datetime.combine(startDate,startTime).ctime(),
110 datetime.datetime.combine(startDate,startTime).ctime(),
111 datetime.datetime.combine(endDate,endTime).ctime()))
111 datetime.datetime.combine(endDate,endTime).ctime()))
112
112
113 return None, None
113 return None, None
114
114
115 if len(dateList) > 1:
115 if len(dateList) > 1:
116 print("[Reading] %d days were found in date range: %s - %s" %(len(dateList), startDate, endDate))
116 print("[Reading] %d days were found in date range: %s - %s" %(len(dateList), startDate, endDate))
117 else:
117 else:
118 print("[Reading] data was found for the date %s" %(dateList[0]))
118 print("[Reading] data was found for the date %s" %(dateList[0]))
119
119
120 filenameList = []
120 filenameList = []
121 datetimeList = []
121 datetimeList = []
122
122
123 #----------------------------------------------------------------------------------
123 #----------------------------------------------------------------------------------
124
124
125 for thisPath in pathList:
125 for thisPath in pathList:
126
126
127 fileList = glob.glob1(thisPath, "*%s" %ext)
127 fileList = glob.glob1(thisPath, "*%s" %ext)
128 fileList.sort()
128 fileList.sort()
129
129
130 for file in fileList:
130 for file in fileList:
131
131
132 filename = os.path.join(thisPath,file)
132 filename = os.path.join(thisPath,file)
133
133
134 if not isFileInDateRange(filename, startDate, endDate):
134 if not isFileInDateRange(filename, startDate, endDate):
135 continue
135 continue
136
136
137 thisDatetime = self.__isFileInTimeRange(filename, startDate, endDate, startTime, endTime)
137 thisDatetime = self.__isFileInTimeRange(filename, startDate, endDate, startTime, endTime)
138
138
139 if not(thisDatetime):
139 if not(thisDatetime):
140 continue
140 continue
141
141
142 filenameList.append(filename)
142 filenameList.append(filename)
143 datetimeList.append(thisDatetime)
143 datetimeList.append(thisDatetime)
144
144
145 if not(filenameList):
145 if not(filenameList):
146 print("[Reading] Any file was found int time range %s - %s" %(datetime.datetime.combine(startDate,startTime).ctime(), datetime.datetime.combine(endDate,endTime).ctime()))
146 print("[Reading] Any file was found int time range %s - %s" %(datetime.datetime.combine(startDate,startTime).ctime(), datetime.datetime.combine(endDate,endTime).ctime()))
147 return None, None
147 return None, None
148
148
149 print("[Reading] %d file(s) was(were) found in time range: %s - %s" %(len(filenameList), startTime, endTime))
149 print("[Reading] %d file(s) was(were) found in time range: %s - %s" %(len(filenameList), startTime, endTime))
150 print()
150 print()
151
151
152 self.filenameList = filenameList
152 self.filenameList = filenameList
153 self.datetimeList = datetimeList
153 self.datetimeList = datetimeList
154
154
155 return pathList, filenameList
155 return pathList, filenameList
156
156
157 def __isFileInTimeRange(self,filename, startDate, endDate, startTime, endTime):
157 def __isFileInTimeRange(self,filename, startDate, endDate, startTime, endTime):
158
158
159 """
159 """
160 Retorna 1 si el archivo de datos se encuentra dentro del rango de horas especificado.
160 Retorna 1 si el archivo de datos se encuentra dentro del rango de horas especificado.
161
161
162 Inputs:
162 Inputs:
163 filename : nombre completo del archivo de datos en formato Jicamarca (.r)
163 filename : nombre completo del archivo de datos en formato Jicamarca (.r)
164 startDate : fecha inicial del rango seleccionado en formato datetime.date
164 startDate : fecha inicial del rango seleccionado en formato datetime.date
165 endDate : fecha final del rango seleccionado en formato datetime.date
165 endDate : fecha final del rango seleccionado en formato datetime.date
166 startTime : tiempo inicial del rango seleccionado en formato datetime.time
166 startTime : tiempo inicial del rango seleccionado en formato datetime.time
167 endTime : tiempo final del rango seleccionado en formato datetime.time
167 endTime : tiempo final del rango seleccionado en formato datetime.time
168
168
169 Return:
169 Return:
170 Boolean : Retorna True si el archivo de datos contiene datos en el rango de
170 Boolean : Retorna True si el archivo de datos contiene datos en el rango de
171 fecha especificado, de lo contrario retorna False.
171 fecha especificado, de lo contrario retorna False.
172
172
173 Excepciones:
173 Excepciones:
174 Si el archivo no existe o no puede ser abierto
174 Si el archivo no existe o no puede ser abierto
175 Si la cabecera no puede ser leida.
175 Si la cabecera no puede ser leida.
176
176
177 """
177 """
178
178
179 try:
179 try:
180 fp = h5py.File(filename,'r')
180 fp = h5py.File(filename,'r')
181 grp1 = fp['Data']
181 grp1 = fp['Data']
182
182
183 except IOError:
183 except IOError:
184 traceback.print_exc()
184 traceback.print_exc()
185 raise IOError("The file %s can't be opened" %(filename))
185 raise IOError("The file %s can't be opened" %(filename))
186
186
187 #In case has utctime attribute
187 #In case has utctime attribute
188 grp2 = grp1['utctime']
188 grp2 = grp1['utctime']
189 # thisUtcTime = grp2.value[0] - 5*3600 #To convert to local time
189 # thisUtcTime = grp2.value[0] - 5*3600 #To convert to local time
190 thisUtcTime = grp2.value[0]
190 thisUtcTime = grp2.value[0]
191
191
192 fp.close()
192 fp.close()
193
193
194 if self.timezone == 'lt':
194 if self.timezone == 'lt':
195 thisUtcTime -= 5*3600
195 thisUtcTime -= 5*3600
196
196
197 thisDatetime = datetime.datetime.fromtimestamp(thisUtcTime[0] + 5*3600)
197 thisDatetime = datetime.datetime.fromtimestamp(thisUtcTime[0] + 5*3600)
198 thisDate = thisDatetime.date()
198 thisDate = thisDatetime.date()
199 thisTime = thisDatetime.time()
199 thisTime = thisDatetime.time()
200
200
201 startUtcTime = (datetime.datetime.combine(thisDate,startTime)- datetime.datetime(1970, 1, 1)).total_seconds()
201 startUtcTime = (datetime.datetime.combine(thisDate,startTime)- datetime.datetime(1970, 1, 1)).total_seconds()
202 endUtcTime = (datetime.datetime.combine(thisDate,endTime)- datetime.datetime(1970, 1, 1)).total_seconds()
202 endUtcTime = (datetime.datetime.combine(thisDate,endTime)- datetime.datetime(1970, 1, 1)).total_seconds()
203
203
204 #General case
204 #General case
205 # o>>>>>>>>>>>>>><<<<<<<<<<<<<<o
205 # o>>>>>>>>>>>>>><<<<<<<<<<<<<<o
206 #-----------o----------------------------o-----------
206 #-----------o----------------------------o-----------
207 # startTime endTime
207 # startTime endTime
208
208
209 if endTime >= startTime:
209 if endTime >= startTime:
210 thisUtcLog = numpy.logical_and(thisUtcTime > startUtcTime, thisUtcTime < endUtcTime)
210 thisUtcLog = numpy.logical_and(thisUtcTime > startUtcTime, thisUtcTime < endUtcTime)
211 if numpy.any(thisUtcLog): #If there is one block between the hours mentioned
211 if numpy.any(thisUtcLog): #If there is one block between the hours mentioned
212 return thisDatetime
212 return thisDatetime
213 return None
213 return None
214
214
215 #If endTime < startTime then endTime belongs to the next day
215 #If endTime < startTime then endTime belongs to the next day
216 #<<<<<<<<<<<o o>>>>>>>>>>>
216 #<<<<<<<<<<<o o>>>>>>>>>>>
217 #-----------o----------------------------o-----------
217 #-----------o----------------------------o-----------
218 # endTime startTime
218 # endTime startTime
219
219
220 if (thisDate == startDate) and numpy.all(thisUtcTime < startUtcTime):
220 if (thisDate == startDate) and numpy.all(thisUtcTime < startUtcTime):
221 return None
221 return None
222
222
223 if (thisDate == endDate) and numpy.all(thisUtcTime > endUtcTime):
223 if (thisDate == endDate) and numpy.all(thisUtcTime > endUtcTime):
224 return None
224 return None
225
225
226 if numpy.all(thisUtcTime < startUtcTime) and numpy.all(thisUtcTime > endUtcTime):
226 if numpy.all(thisUtcTime < startUtcTime) and numpy.all(thisUtcTime > endUtcTime):
227 return None
227 return None
228
228
229 return thisDatetime
229 return thisDatetime
230
230
231 def __setNextFileOffline(self):
231 def __setNextFileOffline(self):
232
232
233 self.fileIndex += 1
233 self.fileIndex += 1
234 idFile = self.fileIndex
234 idFile = self.fileIndex
235
235
236 if not(idFile < len(self.filenameList)):
236 if not(idFile < len(self.filenameList)):
237 raise schainpy.admin.SchainError("No more Files")
237 raise schainpy.admin.SchainError("No more Files")
238 return 0
238 return 0
239
239
240 filename = self.filenameList[idFile]
240 filename = self.filenameList[idFile]
241 filePointer = h5py.File(filename,'r')
241 filePointer = h5py.File(filename,'r')
242 self.filename = filename
242 self.filename = filename
243 self.fp = filePointer
243 self.fp = filePointer
244
244
245 print("Setting the file: %s"%self.filename)
245 print("Setting the file: %s"%self.filename)
246
246
247 self.__setBlockList()
247 self.__setBlockList()
248 self.__readData()
248 self.__readData()
249 self.blockIndex = 0
249 self.blockIndex = 0
250 return 1
250 return 1
251
251
252 def __setBlockList(self):
252 def __setBlockList(self):
253 '''
253 '''
254 Selects the data within the times defined
254 Selects the data within the times defined
255
255
256 self.fp
256 self.fp
257 self.startTime
257 self.startTime
258 self.endTime
258 self.endTime
259
259
260 self.blockList
260 self.blockList
261 self.blocksPerFile
261 self.blocksPerFile
262
262
263 '''
263 '''
264 fp = self.fp
264 fp = self.fp
265 startTime = self.startTime
265 startTime = self.startTime
266 endTime = self.endTime
266 endTime = self.endTime
267
267
268 grp = fp['Data']
268 grp = fp['Data']
269 thisUtcTime = grp['utctime'].value.astype(numpy.float)[0]
269 thisUtcTime = grp['utctime'].value.astype(numpy.float)[0]
270
270
271 #ERROOOOR
271 #ERROOOOR
272 if self.timezone == 'lt':
272 if self.timezone == 'lt':
273 thisUtcTime -= 5*3600
273 thisUtcTime -= 5*3600
274
274
275 thisDatetime = datetime.datetime.fromtimestamp(thisUtcTime[0] + 5*3600)
275 thisDatetime = datetime.datetime.fromtimestamp(thisUtcTime[0] + 5*3600)
276
276
277 thisDate = thisDatetime.date()
277 thisDate = thisDatetime.date()
278 thisTime = thisDatetime.time()
278 thisTime = thisDatetime.time()
279
279
280 startUtcTime = (datetime.datetime.combine(thisDate,startTime) - datetime.datetime(1970, 1, 1)).total_seconds()
280 startUtcTime = (datetime.datetime.combine(thisDate,startTime) - datetime.datetime(1970, 1, 1)).total_seconds()
281 endUtcTime = (datetime.datetime.combine(thisDate,endTime) - datetime.datetime(1970, 1, 1)).total_seconds()
281 endUtcTime = (datetime.datetime.combine(thisDate,endTime) - datetime.datetime(1970, 1, 1)).total_seconds()
282
282
283 ind = numpy.where(numpy.logical_and(thisUtcTime >= startUtcTime, thisUtcTime < endUtcTime))[0]
283 ind = numpy.where(numpy.logical_and(thisUtcTime >= startUtcTime, thisUtcTime < endUtcTime))[0]
284
284
285 self.blockList = ind
285 self.blockList = ind
286 self.blocksPerFile = len(ind)
286 self.blocksPerFile = len(ind)
287
287
288 return
288 return
289
289
290 def __readMetadata(self):
290 def __readMetadata(self):
291 '''
291 '''
292 Reads Metadata
292 Reads Metadata
293
293
294 self.pathMeta
294 self.pathMeta
295 self.listShapes
295 self.listShapes
296 self.listMetaname
296 self.listMetaname
297 self.listMeta
297 self.listMeta
298
298
299 '''
299 '''
300
300
301 filename = self.filenameList[0]
301 filename = self.filenameList[0]
302 fp = h5py.File(filename,'r')
302 fp = h5py.File(filename,'r')
303 gp = fp['Metadata']
303 gp = fp['Metadata']
304
304
305 listMetaname = []
305 listMetaname = []
306 listMetadata = []
306 listMetadata = []
307 for item in list(gp.items()):
307 for item in list(gp.items()):
308 name = item[0]
308 name = item[0]
309
309
310 if name=='array dimensions':
310 if name=='array dimensions':
311 table = gp[name][:]
311 table = gp[name][:]
312 listShapes = {}
312 listShapes = {}
313 for shapes in table:
313 for shapes in table:
314 listShapes[shapes[0]] = numpy.array([shapes[1],shapes[2],shapes[3],shapes[4],shapes[5]])
314 listShapes[shapes[0]] = numpy.array([shapes[1],shapes[2],shapes[3],shapes[4],shapes[5]])
315 else:
315 else:
316 data = gp[name].value
316 data = gp[name].value
317 listMetaname.append(name)
317 listMetaname.append(name)
318 listMetadata.append(data)
318 listMetadata.append(data)
319
319
320 self.listShapes = listShapes
320 self.listShapes = listShapes
321 self.listMetaname = listMetaname
321 self.listMetaname = listMetaname
322 self.listMeta = listMetadata
322 self.listMeta = listMetadata
323
323
324 fp.close()
324 fp.close()
325 return
325 return
326
326
327 def __readData(self):
327 def __readData(self):
328 grp = self.fp['Data']
328 grp = self.fp['Data']
329 listdataname = []
329 listdataname = []
330 listdata = []
330 listdata = []
331
331
332 for item in list(grp.items()):
332 for item in list(grp.items()):
333 name = item[0]
333 name = item[0]
334 listdataname.append(name)
334 listdataname.append(name)
335
335
336 array = self.__setDataArray(grp[name],self.listShapes[name])
336 array = self.__setDataArray(grp[name],self.listShapes[name])
337 listdata.append(array)
337 listdata.append(array)
338
338
339 self.listDataname = listdataname
339 self.listDataname = listdataname
340 self.listData = listdata
340 self.listData = listdata
341 return
341 return
342
342
343 def __setDataArray(self, dataset, shapes):
343 def __setDataArray(self, dataset, shapes):
344
344
345 nDims = shapes[0]
345 nDims = shapes[0]
346 nDim2 = shapes[1] #Dimension 0
346 nDim2 = shapes[1] #Dimension 0
347 nDim1 = shapes[2] #Dimension 1, number of Points or Parameters
347 nDim1 = shapes[2] #Dimension 1, number of Points or Parameters
348 nDim0 = shapes[3] #Dimension 2, number of samples or ranges
348 nDim0 = shapes[3] #Dimension 2, number of samples or ranges
349 mode = shapes[4] #Mode of storing
349 mode = shapes[4] #Mode of storing
350 blockList = self.blockList
350 blockList = self.blockList
351 blocksPerFile = self.blocksPerFile
351 blocksPerFile = self.blocksPerFile
352
352
353 #Depending on what mode the data was stored
353 #Depending on what mode the data was stored
354 if mode == 0: #Divided in channels
354 if mode == 0: #Divided in channels
355 arrayData = dataset.value.astype(numpy.float)[0][blockList]
355 arrayData = dataset.value.astype(numpy.float)[0][blockList]
356 if mode == 1: #Divided in parameter
356 if mode == 1: #Divided in parameter
357 strds = 'table'
357 strds = 'table'
358 nDatas = nDim1
358 nDatas = nDim1
359 newShapes = (blocksPerFile,nDim2,nDim0)
359 newShapes = (blocksPerFile,nDim2,nDim0)
360 elif mode==2: #Concatenated in a table
360 elif mode==2: #Concatenated in a table
361 strds = 'table0'
361 strds = 'table0'
362 arrayData = dataset[strds].value
362 arrayData = dataset[strds].value
363 #Selecting part of the dataset
363 #Selecting part of the dataset
364 utctime = arrayData[:,0]
364 utctime = arrayData[:,0]
365 u, indices = numpy.unique(utctime, return_index=True)
365 u, indices = numpy.unique(utctime, return_index=True)
366
366
367 if blockList.size != indices.size:
367 if blockList.size != indices.size:
368 indMin = indices[blockList[0]]
368 indMin = indices[blockList[0]]
369 if blockList[1] + 1 >= indices.size:
369 if blockList[1] + 1 >= indices.size:
370 arrayData = arrayData[indMin:,:]
370 arrayData = arrayData[indMin:,:]
371 else:
371 else:
372 indMax = indices[blockList[1] + 1]
372 indMax = indices[blockList[1] + 1]
373 arrayData = arrayData[indMin:indMax,:]
373 arrayData = arrayData[indMin:indMax,:]
374 return arrayData
374 return arrayData
375
375
376 # One dimension
376 # One dimension
377 if nDims == 0:
377 if nDims == 0:
378 arrayData = dataset.value.astype(numpy.float)[0][blockList]
378 arrayData = dataset.value.astype(numpy.float)[0][blockList]
379
379
380 # Two dimensions
380 # Two dimensions
381 elif nDims == 2:
381 elif nDims == 2:
382 arrayData = numpy.zeros((blocksPerFile,nDim1,nDim0))
382 arrayData = numpy.zeros((blocksPerFile,nDim1,nDim0))
383 newShapes = (blocksPerFile,nDim0)
383 newShapes = (blocksPerFile,nDim0)
384 nDatas = nDim1
384 nDatas = nDim1
385
385
386 for i in range(nDatas):
386 for i in range(nDatas):
387 data = dataset[strds + str(i)].value
387 data = dataset[strds + str(i)].value
388 arrayData[:,i,:] = data[blockList,:]
388 arrayData[:,i,:] = data[blockList,:]
389
389
390 # Three dimensions
390 # Three dimensions
391 else:
391 else:
392 arrayData = numpy.zeros((blocksPerFile,nDim2,nDim1,nDim0))
392 arrayData = numpy.zeros((blocksPerFile,nDim2,nDim1,nDim0))
393 for i in range(nDatas):
393 for i in range(nDatas):
394
394
395 data = dataset[strds + str(i)].value
395 data = dataset[strds + str(i)].value
396
396
397 for b in range(blockList.size):
397 for b in range(blockList.size):
398 arrayData[b,:,i,:] = data[:,:,blockList[b]]
398 arrayData[b,:,i,:] = data[:,:,blockList[b]]
399
399
400 return arrayData
400 return arrayData
401
401
402 def __setDataOut(self):
402 def __setDataOut(self):
403 listMeta = self.listMeta
403 listMeta = self.listMeta
404 listMetaname = self.listMetaname
404 listMetaname = self.listMetaname
405 listDataname = self.listDataname
405 listDataname = self.listDataname
406 listData = self.listData
406 listData = self.listData
407 listShapes = self.listShapes
407 listShapes = self.listShapes
408
408
409 blockIndex = self.blockIndex
409 blockIndex = self.blockIndex
410 # blockList = self.blockList
410 # blockList = self.blockList
411
411
412 for i in range(len(listMeta)):
412 for i in range(len(listMeta)):
413 setattr(self.dataOut,listMetaname[i],listMeta[i])
413 setattr(self.dataOut,listMetaname[i],listMeta[i])
414
414
415 for j in range(len(listData)):
415 for j in range(len(listData)):
416 nShapes = listShapes[listDataname[j]][0]
416 nShapes = listShapes[listDataname[j]][0]
417 mode = listShapes[listDataname[j]][4]
417 mode = listShapes[listDataname[j]][4]
418 if nShapes == 1:
418 if nShapes == 1:
419 setattr(self.dataOut,listDataname[j],listData[j][blockIndex])
419 setattr(self.dataOut,listDataname[j],listData[j][blockIndex])
420 elif nShapes > 1:
420 elif nShapes > 1:
421 setattr(self.dataOut,listDataname[j],listData[j][blockIndex,:])
421 setattr(self.dataOut,listDataname[j],listData[j][blockIndex,:])
422 elif mode==0:
422 elif mode==0:
423 setattr(self.dataOut,listDataname[j],listData[j][blockIndex])
423 setattr(self.dataOut,listDataname[j],listData[j][blockIndex])
424 #Mode Meteors
424 #Mode Meteors
425 elif mode ==2:
425 elif mode ==2:
426 selectedData = self.__selectDataMode2(listData[j], blockIndex)
426 selectedData = self.__selectDataMode2(listData[j], blockIndex)
427 setattr(self.dataOut, listDataname[j], selectedData)
427 setattr(self.dataOut, listDataname[j], selectedData)
428 return
428 return
429
429
430 def __selectDataMode2(self, data, blockIndex):
430 def __selectDataMode2(self, data, blockIndex):
431 utctime = data[:,0]
431 utctime = data[:,0]
432 aux, indices = numpy.unique(utctime, return_inverse=True)
432 aux, indices = numpy.unique(utctime, return_inverse=True)
433 selInd = numpy.where(indices == blockIndex)[0]
433 selInd = numpy.where(indices == blockIndex)[0]
434 selData = data[selInd,:]
434 selData = data[selInd,:]
435
435
436 return selData
436 return selData
437
437
438 def getData(self):
438 def getData(self):
439
439
440 if self.blockIndex==self.blocksPerFile:
440 if self.blockIndex==self.blocksPerFile:
441 if not( self.__setNextFileOffline() ):
441 if not( self.__setNextFileOffline() ):
442 self.dataOut.flagNoData = True
442 self.dataOut.flagNoData = True
443 return 0
443 return 0
444
444
445 self.__setDataOut()
445 self.__setDataOut()
446 self.dataOut.flagNoData = False
446 self.dataOut.flagNoData = False
447
447
448 self.blockIndex += 1
448 self.blockIndex += 1
449
449
450 return
450 return
451
451
452 def run(self, **kwargs):
452 def run(self, **kwargs):
453
453
454 if not(self.isConfig):
454 if not(self.isConfig):
455 self.setup(**kwargs)
455 self.setup(**kwargs)
456 self.isConfig = True
456 self.isConfig = True
457
457
458 self.getData()
458 self.getData()
459
459
460 return
460 return
461
461
462 @MPDecorator
462 @MPDecorator
463 class ParamWriter(Operation):
463 class ParamWriter(Operation):
464 '''
464 '''
465 HDF5 Writer, stores parameters data in HDF5 format files
465 HDF5 Writer, stores parameters data in HDF5 format files
466
466
467 path: path where the files will be stored
467 path: path where the files will be stored
468 blocksPerFile: number of blocks that will be saved in per HDF5 format file
468 blocksPerFile: number of blocks that will be saved in per HDF5 format file
469 mode: selects the data stacking mode: '0' channels, '1' parameters, '3' table (for meteors)
469 mode: selects the data stacking mode: '0' channels, '1' parameters, '3' table (for meteors)
470 metadataList: list of attributes that will be stored as metadata
470 metadataList: list of attributes that will be stored as metadata
471 dataList: list of attributes that will be stores as data
471 dataList: list of attributes that will be stores as data
472 '''
472 '''
473
473
474 ext = ".hdf5"
474 ext = ".hdf5"
475 optchar = "D"
475 optchar = "D"
476 metaoptchar = "M"
476 metaoptchar = "M"
477 metaFile = None
477 metaFile = None
478 filename = None
478 filename = None
479 path = None
479 path = None
480 setFile = None
480 setFile = None
481 fp = None
481 fp = None
482 grp = None
482 grp = None
483 ds = None
483 ds = None
484 firsttime = True
484 firsttime = True
485 #Configurations
485 #Configurations
486 blocksPerFile = None
486 blocksPerFile = None
487 blockIndex = None
487 blockIndex = None
488 dataOut = None
488 dataOut = None
489 #Data Arrays
489 #Data Arrays
490 dataList = None
490 dataList = None
491 metadataList = None
491 metadataList = None
492 dsList = None #List of dictionaries with dataset properties
492 dsList = None #List of dictionaries with dataset properties
493 tableDim = None
493 tableDim = None
494 dtype = [('arrayName', 'S20'),('nDimensions', 'i'), ('dim2', 'i'), ('dim1', 'i'),('dim0', 'i'),('mode', 'b')]
494 dtype = [('arrayName', 'S20'),('nDimensions', 'i'), ('dim2', 'i'), ('dim1', 'i'),('dim0', 'i'),('mode', 'b')]
495 currentDay = None
495 currentDay = None
496 lastTime = None
496 lastTime = None
497 setType = None
497 setType = None
498
498
499 def __init__(self):
499 def __init__(self):
500
500
501 Operation.__init__(self)
501 Operation.__init__(self)
502 return
502 return
503
503
504 def setup(self, dataOut, path=None, blocksPerFile=10, metadataList=None, dataList=None, mode=None, setType=None):
504 def setup(self, dataOut, path=None, blocksPerFile=10, metadataList=None, dataList=None, mode=None, setType=None):
505 self.path = path
505 self.path = path
506 self.blocksPerFile = blocksPerFile
506 self.blocksPerFile = blocksPerFile
507 self.metadataList = metadataList
507 self.metadataList = metadataList
508 self.dataList = dataList
508 self.dataList = dataList
509 self.dataOut = dataOut
509 self.dataOut = dataOut
510 self.mode = mode
510 self.mode = mode
511 if self.mode is not None:
511 if self.mode is not None:
512 self.mode = numpy.zeros(len(self.dataList)) + mode
512 self.mode = numpy.zeros(len(self.dataList)) + mode
513 else:
513 else:
514 self.mode = numpy.ones(len(self.dataList))
514 self.mode = numpy.ones(len(self.dataList))
515
515
516 self.setType = setType
516 self.setType = setType
517
517
518 arrayDim = numpy.zeros((len(self.dataList),5))
518 arrayDim = numpy.zeros((len(self.dataList),5))
519
519
520 #Table dimensions
520 #Table dimensions
521 dtype0 = self.dtype
521 dtype0 = self.dtype
522 tableList = []
522 tableList = []
523
523
524 #Dictionary and list of tables
524 #Dictionary and list of tables
525 dsList = []
525 dsList = []
526
526
527 for i in range(len(self.dataList)):
527 for i in range(len(self.dataList)):
528 dsDict = {}
528 dsDict = {}
529 dataAux = getattr(self.dataOut, self.dataList[i])
529 dataAux = getattr(self.dataOut, self.dataList[i])
530 dsDict['variable'] = self.dataList[i]
530 dsDict['variable'] = self.dataList[i]
531 #--------------------- Conditionals ------------------------
531 #--------------------- Conditionals ------------------------
532 #There is no data
532 #There is no data
533
533
534 if dataAux is None:
534 if dataAux is None:
535
535
536 return 0
536 return 0
537
537
538 if isinstance(dataAux, (int, float, numpy.integer, numpy.float)):
538 if isinstance(dataAux, (int, float, numpy.integer, numpy.float)):
539 dsDict['mode'] = 0
539 dsDict['mode'] = 0
540 dsDict['nDim'] = 0
540 dsDict['nDim'] = 0
541 arrayDim[i,0] = 0
541 arrayDim[i,0] = 0
542 dsList.append(dsDict)
542 dsList.append(dsDict)
543
543
544 #Mode 2: meteors
544 #Mode 2: meteors
545 elif self.mode[i] == 2:
545 elif self.mode[i] == 2:
546 dsDict['dsName'] = 'table0'
546 dsDict['dsName'] = 'table0'
547 dsDict['mode'] = 2 # Mode meteors
547 dsDict['mode'] = 2 # Mode meteors
548 dsDict['shape'] = dataAux.shape[-1]
548 dsDict['shape'] = dataAux.shape[-1]
549 dsDict['nDim'] = 0
549 dsDict['nDim'] = 0
550 dsDict['dsNumber'] = 1
550 dsDict['dsNumber'] = 1
551 arrayDim[i,3] = dataAux.shape[-1]
551 arrayDim[i,3] = dataAux.shape[-1]
552 arrayDim[i,4] = self.mode[i] #Mode the data was stored
552 arrayDim[i,4] = self.mode[i] #Mode the data was stored
553 dsList.append(dsDict)
553 dsList.append(dsDict)
554
554
555 #Mode 1
555 #Mode 1
556 else:
556 else:
557 arrayDim0 = dataAux.shape #Data dimensions
557 arrayDim0 = dataAux.shape #Data dimensions
558 arrayDim[i,0] = len(arrayDim0) #Number of array dimensions
558 arrayDim[i,0] = len(arrayDim0) #Number of array dimensions
559 arrayDim[i,4] = self.mode[i] #Mode the data was stored
559 arrayDim[i,4] = self.mode[i] #Mode the data was stored
560 strtable = 'table'
560 strtable = 'table'
561 dsDict['mode'] = 1 # Mode parameters
561 dsDict['mode'] = 1 # Mode parameters
562
562
563 # Three-dimension arrays
563 # Three-dimension arrays
564 if len(arrayDim0) == 3:
564 if len(arrayDim0) == 3:
565 arrayDim[i,1:-1] = numpy.array(arrayDim0)
565 arrayDim[i,1:-1] = numpy.array(arrayDim0)
566 nTables = int(arrayDim[i,2])
566 nTables = int(arrayDim[i,2])
567 dsDict['dsNumber'] = nTables
567 dsDict['dsNumber'] = nTables
568 dsDict['shape'] = arrayDim[i,2:4]
568 dsDict['shape'] = arrayDim[i,2:4]
569 dsDict['nDim'] = 3
569 dsDict['nDim'] = 3
570
570
571 for j in range(nTables):
571 for j in range(nTables):
572 dsDict = dsDict.copy()
572 dsDict = dsDict.copy()
573 dsDict['dsName'] = strtable + str(j)
573 dsDict['dsName'] = strtable + str(j)
574 dsList.append(dsDict)
574 dsList.append(dsDict)
575
575
576 # Two-dimension arrays
576 # Two-dimension arrays
577 elif len(arrayDim0) == 2:
577 elif len(arrayDim0) == 2:
578 arrayDim[i,2:-1] = numpy.array(arrayDim0)
578 arrayDim[i,2:-1] = numpy.array(arrayDim0)
579 nTables = int(arrayDim[i,2])
579 nTables = int(arrayDim[i,2])
580 dsDict['dsNumber'] = nTables
580 dsDict['dsNumber'] = nTables
581 dsDict['shape'] = arrayDim[i,3]
581 dsDict['shape'] = arrayDim[i,3]
582 dsDict['nDim'] = 2
582 dsDict['nDim'] = 2
583
583
584 for j in range(nTables):
584 for j in range(nTables):
585 dsDict = dsDict.copy()
585 dsDict = dsDict.copy()
586 dsDict['dsName'] = strtable + str(j)
586 dsDict['dsName'] = strtable + str(j)
587 dsList.append(dsDict)
587 dsList.append(dsDict)
588
588
589 # One-dimension arrays
589 # One-dimension arrays
590 elif len(arrayDim0) == 1:
590 elif len(arrayDim0) == 1:
591 arrayDim[i,3] = arrayDim0[0]
591 arrayDim[i,3] = arrayDim0[0]
592 dsDict['shape'] = arrayDim0[0]
592 dsDict['shape'] = arrayDim0[0]
593 dsDict['dsNumber'] = 1
593 dsDict['dsNumber'] = 1
594 dsDict['dsName'] = strtable + str(0)
594 dsDict['dsName'] = strtable + str(0)
595 dsDict['nDim'] = 1
595 dsDict['nDim'] = 1
596 dsList.append(dsDict)
596 dsList.append(dsDict)
597
597
598 table = numpy.array((self.dataList[i],) + tuple(arrayDim[i,:]),dtype = dtype0)
598 table = numpy.array((self.dataList[i],) + tuple(arrayDim[i,:]),dtype = dtype0)
599 tableList.append(table)
599 tableList.append(table)
600
600
601 self.dsList = dsList
601 self.dsList = dsList
602 self.tableDim = numpy.array(tableList, dtype = dtype0)
602 self.tableDim = numpy.array(tableList, dtype = dtype0)
603 self.blockIndex = 0
603 self.blockIndex = 0
604 timeTuple = time.localtime(dataOut.utctime)
604 timeTuple = time.localtime(dataOut.utctime)
605 self.currentDay = timeTuple.tm_yday
605 self.currentDay = timeTuple.tm_yday
606
606
607 def putMetadata(self):
607 def putMetadata(self):
608
608
609 fp = self.createMetadataFile()
609 fp = self.createMetadataFile()
610 self.writeMetadata(fp)
610 self.writeMetadata(fp)
611 fp.close()
611 fp.close()
612 return
612 return
613
613
614 def createMetadataFile(self):
614 def createMetadataFile(self):
615 ext = self.ext
615 ext = self.ext
616 path = self.path
616 path = self.path
617 setFile = self.setFile
617 setFile = self.setFile
618
618
619 timeTuple = time.localtime(self.dataOut.utctime)
619 timeTuple = time.localtime(self.dataOut.utctime)
620
620
621 subfolder = ''
621 subfolder = ''
622 fullpath = os.path.join( path, subfolder )
622 fullpath = os.path.join( path, subfolder )
623
623
624 if not( os.path.exists(fullpath) ):
624 if not( os.path.exists(fullpath) ):
625 os.mkdir(fullpath)
625 os.mkdir(fullpath)
626 setFile = -1 #inicializo mi contador de seteo
626 setFile = -1 #inicializo mi contador de seteo
627
627
628 subfolder = 'd%4.4d%3.3d' % (timeTuple.tm_year,timeTuple.tm_yday)
628 subfolder = 'd%4.4d%3.3d' % (timeTuple.tm_year,timeTuple.tm_yday)
629 fullpath = os.path.join( path, subfolder )
629 fullpath = os.path.join( path, subfolder )
630
630
631 if not( os.path.exists(fullpath) ):
631 if not( os.path.exists(fullpath) ):
632 os.mkdir(fullpath)
632 os.mkdir(fullpath)
633 setFile = -1 #inicializo mi contador de seteo
633 setFile = -1 #inicializo mi contador de seteo
634
634
635 else:
635 else:
636 filesList = os.listdir( fullpath )
636 filesList = os.listdir( fullpath )
637 filesList = sorted( filesList, key=str.lower )
637 filesList = sorted( filesList, key=str.lower )
638 if len( filesList ) > 0:
638 if len( filesList ) > 0:
639 filesList = [k for k in filesList if k.startswith(self.metaoptchar)]
639 filesList = [k for k in filesList if k.startswith(self.metaoptchar)]
640 filen = filesList[-1]
640 filen = filesList[-1]
641 # el filename debera tener el siguiente formato
641 # el filename debera tener el siguiente formato
642 # 0 1234 567 89A BCDE (hex)
642 # 0 1234 567 89A BCDE (hex)
643 # x YYYY DDD SSS .ext
643 # x YYYY DDD SSS .ext
644 if isNumber( filen[8:11] ):
644 if isNumber( filen[8:11] ):
645 setFile = int( filen[8:11] ) #inicializo mi contador de seteo al seteo del ultimo file
645 setFile = int( filen[8:11] ) #inicializo mi contador de seteo al seteo del ultimo file
646 else:
646 else:
647 setFile = -1
647 setFile = -1
648 else:
648 else:
649 setFile = -1 #inicializo mi contador de seteo
649 setFile = -1 #inicializo mi contador de seteo
650
650
651 if self.setType is None:
651 if self.setType is None:
652 setFile += 1
652 setFile += 1
653 file = '%s%4.4d%3.3d%03d%s' % (self.metaoptchar,
653 file = '%s%4.4d%3.3d%03d%s' % (self.metaoptchar,
654 timeTuple.tm_year,
654 timeTuple.tm_year,
655 timeTuple.tm_yday,
655 timeTuple.tm_yday,
656 setFile,
656 setFile,
657 ext )
657 ext )
658 else:
658 else:
659 setFile = timeTuple.tm_hour*60+timeTuple.tm_min
659 setFile = timeTuple.tm_hour*60+timeTuple.tm_min
660 file = '%s%4.4d%3.3d%04d%s' % (self.metaoptchar,
660 file = '%s%4.4d%3.3d%04d%s' % (self.metaoptchar,
661 timeTuple.tm_year,
661 timeTuple.tm_year,
662 timeTuple.tm_yday,
662 timeTuple.tm_yday,
663 setFile,
663 setFile,
664 ext )
664 ext )
665
665
666 filename = os.path.join( path, subfolder, file )
666 filename = os.path.join( path, subfolder, file )
667 self.metaFile = file
667 self.metaFile = file
668 #Setting HDF5 File
668 #Setting HDF5 File
669 fp = h5py.File(filename,'w')
669 fp = h5py.File(filename,'w')
670
670
671 return fp
671 return fp
672
672
673 def writeMetadata(self, fp):
673 def writeMetadata(self, fp):
674
674
675 grp = fp.create_group("Metadata")
675 grp = fp.create_group("Metadata")
676 grp.create_dataset('array dimensions', data = self.tableDim, dtype = self.dtype)
676 grp.create_dataset('array dimensions', data = self.tableDim, dtype = self.dtype)
677
677
678 for i in range(len(self.metadataList)):
678 for i in range(len(self.metadataList)):
679 grp.create_dataset(self.metadataList[i], data=getattr(self.dataOut, self.metadataList[i]))
679 grp.create_dataset(self.metadataList[i], data=getattr(self.dataOut, self.metadataList[i]))
680 return
680 return
681
681
682 def timeFlag(self):
682 def timeFlag(self):
683 currentTime = self.dataOut.utctime
683 currentTime = self.dataOut.utctime
684
684
685 if self.lastTime is None:
685 if self.lastTime is None:
686 self.lastTime = currentTime
686 self.lastTime = currentTime
687
687
688 #Day
688 #Day
689 timeTuple = time.localtime(currentTime)
689 timeTuple = time.localtime(currentTime)
690 dataDay = timeTuple.tm_yday
690 dataDay = timeTuple.tm_yday
691
691
692 #Time
692 #Time
693 timeDiff = currentTime - self.lastTime
693 timeDiff = currentTime - self.lastTime
694
694
695 #Si el dia es diferente o si la diferencia entre un dato y otro supera la hora
695 #Si el dia es diferente o si la diferencia entre un dato y otro supera la hora
696 if dataDay != self.currentDay:
696 if dataDay != self.currentDay:
697 self.currentDay = dataDay
697 self.currentDay = dataDay
698 return True
698 return True
699 elif timeDiff > 3*60*60:
699 elif timeDiff > 3*60*60:
700 self.lastTime = currentTime
700 self.lastTime = currentTime
701 return True
701 return True
702 else:
702 else:
703 self.lastTime = currentTime
703 self.lastTime = currentTime
704 return False
704 return False
705
705
706 def setNextFile(self):
706 def setNextFile(self):
707
707
708 ext = self.ext
708 ext = self.ext
709 path = self.path
709 path = self.path
710 setFile = self.setFile
710 setFile = self.setFile
711 mode = self.mode
711 mode = self.mode
712
712
713 timeTuple = time.localtime(self.dataOut.utctime)
713 timeTuple = time.localtime(self.dataOut.utctime)
714 subfolder = 'd%4.4d%3.3d' % (timeTuple.tm_year,timeTuple.tm_yday)
714 subfolder = 'd%4.4d%3.3d' % (timeTuple.tm_year,timeTuple.tm_yday)
715
715
716 fullpath = os.path.join( path, subfolder )
716 fullpath = os.path.join( path, subfolder )
717
717
718 if os.path.exists(fullpath):
718 if os.path.exists(fullpath):
719 filesList = os.listdir( fullpath )
719 filesList = os.listdir( fullpath )
720 filesList = [k for k in filesList if 'M' in k]
720 filesList = [k for k in filesList if 'M' in k]
721 if len( filesList ) > 0:
721 if len( filesList ) > 0:
722 filesList = sorted( filesList, key=str.lower )
722 filesList = sorted( filesList, key=str.lower )
723 filen = filesList[-1]
723 filen = filesList[-1]
724 # el filename debera tener el siguiente formato
724 # el filename debera tener el siguiente formato
725 # 0 1234 567 89A BCDE (hex)
725 # 0 1234 567 89A BCDE (hex)
726 # x YYYY DDD SSS .ext
726 # x YYYY DDD SSS .ext
727 if isNumber( filen[8:11] ):
727 if isNumber( filen[8:11] ):
728 setFile = int( filen[8:11] ) #inicializo mi contador de seteo al seteo del ultimo file
728 setFile = int( filen[8:11] ) #inicializo mi contador de seteo al seteo del ultimo file
729 else:
729 else:
730 setFile = -1
730 setFile = -1
731 else:
731 else:
732 setFile = -1 #inicializo mi contador de seteo
732 setFile = -1 #inicializo mi contador de seteo
733 else:
733 else:
734 os.makedirs(fullpath)
734 os.makedirs(fullpath)
735 setFile = -1 #inicializo mi contador de seteo
735 setFile = -1 #inicializo mi contador de seteo
736
736
737 if self.setType is None:
737 if self.setType is None:
738 setFile += 1
738 setFile += 1
739 file = '%s%4.4d%3.3d%03d%s' % (self.optchar,
739 file = '%s%4.4d%3.3d%03d%s' % (self.optchar,
740 timeTuple.tm_year,
740 timeTuple.tm_year,
741 timeTuple.tm_yday,
741 timeTuple.tm_yday,
742 setFile,
742 setFile,
743 ext )
743 ext )
744 else:
744 else:
745 setFile = timeTuple.tm_hour*60+timeTuple.tm_min
745 setFile = timeTuple.tm_hour*60+timeTuple.tm_min
746 file = '%s%4.4d%3.3d%04d%s' % (self.optchar,
746 file = '%s%4.4d%3.3d%04d%s' % (self.optchar,
747 timeTuple.tm_year,
747 timeTuple.tm_year,
748 timeTuple.tm_yday,
748 timeTuple.tm_yday,
749 setFile,
749 setFile,
750 ext )
750 ext )
751
751
752 filename = os.path.join( path, subfolder, file )
752 filename = os.path.join( path, subfolder, file )
753
753
754 #Setting HDF5 File
754 #Setting HDF5 File
755 fp = h5py.File(filename,'w')
755 fp = h5py.File(filename,'w')
756 #write metadata
756 #write metadata
757 self.writeMetadata(fp)
757 self.writeMetadata(fp)
758 #Write data
758 #Write data
759 grp = fp.create_group("Data")
759 grp = fp.create_group("Data")
760 ds = []
760 ds = []
761 data = []
761 data = []
762 dsList = self.dsList
762 dsList = self.dsList
763 i = 0
763 i = 0
764 while i < len(dsList):
764 while i < len(dsList):
765 dsInfo = dsList[i]
765 dsInfo = dsList[i]
766 #One-dimension data
766 #One-dimension data
767 if dsInfo['mode'] == 0:
767 if dsInfo['mode'] == 0:
768 ds0 = grp.create_dataset(dsInfo['variable'], (1,1), maxshape=(1,self.blocksPerFile) , chunks = True, dtype=numpy.float64)
768 ds0 = grp.create_dataset(dsInfo['variable'], (1,1), maxshape=(1,self.blocksPerFile) , chunks = True, dtype=numpy.float64)
769 ds.append(ds0)
769 ds.append(ds0)
770 data.append([])
770 data.append([])
771 i += 1
771 i += 1
772 continue
772 continue
773
773
774 elif dsInfo['mode'] == 2:
774 elif dsInfo['mode'] == 2:
775 grp0 = grp.create_group(dsInfo['variable'])
775 grp0 = grp.create_group(dsInfo['variable'])
776 ds0 = grp0.create_dataset(dsInfo['dsName'], (1,dsInfo['shape']), data = numpy.zeros((1,dsInfo['shape'])) , maxshape=(None,dsInfo['shape']), chunks=True)
776 ds0 = grp0.create_dataset(dsInfo['dsName'], (1,dsInfo['shape']), data = numpy.zeros((1,dsInfo['shape'])) , maxshape=(None,dsInfo['shape']), chunks=True)
777 ds.append(ds0)
777 ds.append(ds0)
778 data.append([])
778 data.append([])
779 i += 1
779 i += 1
780 continue
780 continue
781
781
782 elif dsInfo['mode'] == 1:
782 elif dsInfo['mode'] == 1:
783 grp0 = grp.create_group(dsInfo['variable'])
783 grp0 = grp.create_group(dsInfo['variable'])
784
784
785 for j in range(dsInfo['dsNumber']):
785 for j in range(dsInfo['dsNumber']):
786 dsInfo = dsList[i]
786 dsInfo = dsList[i]
787 tableName = dsInfo['dsName']
787 tableName = dsInfo['dsName']
788
788
789
789
790 if dsInfo['nDim'] == 3:
790 if dsInfo['nDim'] == 3:
791 shape = dsInfo['shape'].astype(int)
791 shape = dsInfo['shape'].astype(int)
792 ds0 = grp0.create_dataset(tableName, (shape[0],shape[1],1) , data = numpy.zeros((shape[0],shape[1],1)), maxshape = (None,shape[1],None), chunks=True)
792 ds0 = grp0.create_dataset(tableName, (shape[0],shape[1],1) , data = numpy.zeros((shape[0],shape[1],1)), maxshape = (None,shape[1],None), chunks=True)
793 else:
793 else:
794 shape = int(dsInfo['shape'])
794 shape = int(dsInfo['shape'])
795 ds0 = grp0.create_dataset(tableName, (1,shape), data = numpy.zeros((1,shape)) , maxshape=(None,shape), chunks=True)
795 ds0 = grp0.create_dataset(tableName, (1,shape), data = numpy.zeros((1,shape)) , maxshape=(None,shape), chunks=True)
796
796
797 ds.append(ds0)
797 ds.append(ds0)
798 data.append([])
798 data.append([])
799 i += 1
799 i += 1
800
800
801 fp.flush()
801 fp.flush()
802 fp.close()
802 fp.close()
803
803
804 log.log('creating file: {}'.format(filename), 'Writing')
804 log.log('creating file: {}'.format(filename), 'Writing')
805 self.filename = filename
805 self.filename = filename
806 self.ds = ds
806 self.ds = ds
807 self.data = data
807 self.data = data
808 self.firsttime = True
808 self.firsttime = True
809 self.blockIndex = 0
809 self.blockIndex = 0
810 return
810 return
811
811
812 def putData(self):
812 def putData(self):
813
813
814 if self.blockIndex == self.blocksPerFile or self.timeFlag():
814 if self.blockIndex == self.blocksPerFile or self.timeFlag():
815 self.setNextFile()
815 self.setNextFile()
816
816
817 self.readBlock()
817 self.readBlock()
818 self.setBlock() #Prepare data to be written
818 self.setBlock() #Prepare data to be written
819 self.writeBlock() #Write data
819 self.writeBlock() #Write data
820
820
821 return
821 return
822
822
823 def readBlock(self):
823 def readBlock(self):
824
824
825 '''
825 '''
826 data Array configured
826 data Array configured
827
827
828
828
829 self.data
829 self.data
830 '''
830 '''
831 dsList = self.dsList
831 dsList = self.dsList
832 ds = self.ds
832 ds = self.ds
833 #Setting HDF5 File
833 #Setting HDF5 File
834 fp = h5py.File(self.filename,'r+')
834 fp = h5py.File(self.filename,'r+')
835 grp = fp["Data"]
835 grp = fp["Data"]
836 ind = 0
836 ind = 0
837
837
838 while ind < len(dsList):
838 while ind < len(dsList):
839 dsInfo = dsList[ind]
839 dsInfo = dsList[ind]
840
840
841 if dsInfo['mode'] == 0:
841 if dsInfo['mode'] == 0:
842 ds0 = grp[dsInfo['variable']]
842 ds0 = grp[dsInfo['variable']]
843 ds[ind] = ds0
843 ds[ind] = ds0
844 ind += 1
844 ind += 1
845 else:
845 else:
846
846
847 grp0 = grp[dsInfo['variable']]
847 grp0 = grp[dsInfo['variable']]
848
848
849 for j in range(dsInfo['dsNumber']):
849 for j in range(dsInfo['dsNumber']):
850 dsInfo = dsList[ind]
850 dsInfo = dsList[ind]
851 ds0 = grp0[dsInfo['dsName']]
851 ds0 = grp0[dsInfo['dsName']]
852 ds[ind] = ds0
852 ds[ind] = ds0
853 ind += 1
853 ind += 1
854
854
855 self.fp = fp
855 self.fp = fp
856 self.grp = grp
856 self.grp = grp
857 self.ds = ds
857 self.ds = ds
858
858
859 return
859 return
860
860
861 def setBlock(self):
861 def setBlock(self):
862 '''
862 '''
863 data Array configured
863 data Array configured
864
864
865
865
866 self.data
866 self.data
867 '''
867 '''
868 #Creating Arrays
868 #Creating Arrays
869 dsList = self.dsList
869 dsList = self.dsList
870 data = self.data
870 data = self.data
871 ind = 0
871 ind = 0
872
872
873 while ind < len(dsList):
873 while ind < len(dsList):
874 dsInfo = dsList[ind]
874 dsInfo = dsList[ind]
875 dataAux = getattr(self.dataOut, dsInfo['variable'])
875 dataAux = getattr(self.dataOut, dsInfo['variable'])
876
876
877 mode = dsInfo['mode']
877 mode = dsInfo['mode']
878 nDim = dsInfo['nDim']
878 nDim = dsInfo['nDim']
879
879
880 if mode == 0 or mode == 2 or nDim == 1:
880 if mode == 0 or mode == 2 or nDim == 1:
881 data[ind] = dataAux
881 data[ind] = dataAux
882 ind += 1
882 ind += 1
883 # elif nDim == 1:
883 # elif nDim == 1:
884 # data[ind] = numpy.reshape(dataAux,(numpy.size(dataAux),1))
884 # data[ind] = numpy.reshape(dataAux,(numpy.size(dataAux),1))
885 # ind += 1
885 # ind += 1
886 elif nDim == 2:
886 elif nDim == 2:
887 for j in range(dsInfo['dsNumber']):
887 for j in range(dsInfo['dsNumber']):
888 data[ind] = dataAux[j,:]
888 data[ind] = dataAux[j,:]
889 ind += 1
889 ind += 1
890 elif nDim == 3:
890 elif nDim == 3:
891 for j in range(dsInfo['dsNumber']):
891 for j in range(dsInfo['dsNumber']):
892 data[ind] = dataAux[:,j,:]
892 data[ind] = dataAux[:,j,:]
893 ind += 1
893 ind += 1
894
894
895 self.data = data
895 self.data = data
896 return
896 return
897
897
898 def writeBlock(self):
898 def writeBlock(self):
899 '''
899 '''
900 Saves the block in the HDF5 file
900 Saves the block in the HDF5 file
901 '''
901 '''
902 dsList = self.dsList
902 dsList = self.dsList
903
903
904 for i in range(len(self.ds)):
904 for i in range(len(self.ds)):
905 dsInfo = dsList[i]
905 dsInfo = dsList[i]
906 nDim = dsInfo['nDim']
906 nDim = dsInfo['nDim']
907 mode = dsInfo['mode']
907 mode = dsInfo['mode']
908
908
909 # First time
909 # First time
910 if self.firsttime:
910 if self.firsttime:
911 if type(self.data[i]) == numpy.ndarray:
911 if type(self.data[i]) == numpy.ndarray:
912
912
913 if nDim == 3:
913 if nDim == 3:
914 self.data[i] = self.data[i].reshape((self.data[i].shape[0],self.data[i].shape[1],1))
914 self.data[i] = self.data[i].reshape((self.data[i].shape[0],self.data[i].shape[1],1))
915 self.ds[i].resize(self.data[i].shape)
915 self.ds[i].resize(self.data[i].shape)
916 if mode == 2:
916 if mode == 2:
917 self.ds[i].resize(self.data[i].shape)
917 self.ds[i].resize(self.data[i].shape)
918 self.ds[i][:] = self.data[i]
918 self.ds[i][:] = self.data[i]
919 else:
919 else:
920
920
921 # From second time
921 # From second time
922 # Meteors!
922 # Meteors!
923 if mode == 2:
923 if mode == 2:
924 dataShape = self.data[i].shape
924 dataShape = self.data[i].shape
925 dsShape = self.ds[i].shape
925 dsShape = self.ds[i].shape
926 self.ds[i].resize((self.ds[i].shape[0] + dataShape[0],self.ds[i].shape[1]))
926 self.ds[i].resize((self.ds[i].shape[0] + dataShape[0],self.ds[i].shape[1]))
927 self.ds[i][dsShape[0]:,:] = self.data[i]
927 self.ds[i][dsShape[0]:,:] = self.data[i]
928 # No dimension
928 # No dimension
929 elif mode == 0:
929 elif mode == 0:
930 self.ds[i].resize((self.ds[i].shape[0], self.ds[i].shape[1] + 1))
930 self.ds[i].resize((self.ds[i].shape[0], self.ds[i].shape[1] + 1))
931 self.ds[i][0,-1] = self.data[i]
931 self.ds[i][0,-1] = self.data[i]
932 # One dimension
932 # One dimension
933 elif nDim == 1:
933 elif nDim == 1:
934 self.ds[i].resize((self.ds[i].shape[0] + 1, self.ds[i].shape[1]))
934 self.ds[i].resize((self.ds[i].shape[0] + 1, self.ds[i].shape[1]))
935 self.ds[i][-1,:] = self.data[i]
935 self.ds[i][-1,:] = self.data[i]
936 # Two dimension
936 # Two dimension
937 elif nDim == 2:
937 elif nDim == 2:
938 self.ds[i].resize((self.ds[i].shape[0] + 1,self.ds[i].shape[1]))
938 self.ds[i].resize((self.ds[i].shape[0] + 1,self.ds[i].shape[1]))
939 self.ds[i][self.blockIndex,:] = self.data[i]
939 self.ds[i][self.blockIndex,:] = self.data[i]
940 # Three dimensions
940 # Three dimensions
941 elif nDim == 3:
941 elif nDim == 3:
942 self.ds[i].resize((self.ds[i].shape[0],self.ds[i].shape[1],self.ds[i].shape[2]+1))
942 self.ds[i].resize((self.ds[i].shape[0],self.ds[i].shape[1],self.ds[i].shape[2]+1))
943 self.ds[i][:,:,-1] = self.data[i]
943 self.ds[i][:,:,-1] = self.data[i]
944
944
945 self.firsttime = False
945 self.firsttime = False
946 self.blockIndex += 1
946 self.blockIndex += 1
947
947
948 #Close to save changes
948 #Close to save changes
949 self.fp.flush()
949 self.fp.flush()
950 self.fp.close()
950 self.fp.close()
951 return
951 return
952
952
953 def run(self, dataOut, path, blocksPerFile=10, metadataList=None, dataList=None, mode=None, setType=None):
953 def run(self, dataOut, path, blocksPerFile=10, metadataList=None, dataList=None, mode=None, setType=None):
954
954
955 self.dataOut = dataOut
955 self.dataOut = dataOut
956 if not(self.isConfig):
956 if not(self.isConfig):
957 self.setup(dataOut, path=path, blocksPerFile=blocksPerFile,
957 self.setup(dataOut, path=path, blocksPerFile=blocksPerFile,
958 metadataList=metadataList, dataList=dataList, mode=mode,
958 metadataList=metadataList, dataList=dataList, mode=mode,
959 setType=setType)
959 setType=setType)
960
960
961 self.isConfig = True
961 self.isConfig = True
962 self.setNextFile()
962 self.setNextFile()
963
963
964 self.putData()
964 self.putData()
965 return
965 return
966
966
967
967
968 @MPDecorator
968
969 class ParameterReader(Reader, ProcessingUnit):
969 class ParameterReader(Reader, ProcessingUnit):
970 '''
970 '''
971 Reads HDF5 format files
971 Reads HDF5 format files
972 '''
972 '''
973
973
974 def __init__(self):
974 def __init__(self):
975 ProcessingUnit.__init__(self)
975 ProcessingUnit.__init__(self)
976 self.dataOut = Parameters()
976 self.dataOut = Parameters()
977 self.ext = ".hdf5"
977 self.ext = ".hdf5"
978 self.optchar = "D"
978 self.optchar = "D"
979 self.timezone = "lt"
979 self.timezone = "lt"
980 self.listMetaname = []
980 self.listMetaname = []
981 self.listMeta = []
981 self.listMeta = []
982 self.listDataname = []
982 self.listDataname = []
983 self.listData = []
983 self.listData = []
984 self.listShapes = []
984 self.listShapes = []
985 self.open_file = h5py.File
985 self.open_file = h5py.File
986 self.open_mode = 'r'
986 self.open_mode = 'r'
987 self.metadata = False
987 self.metadata = False
988 self.filefmt = "*%Y%j***"
988 self.filefmt = "*%Y%j***"
989 self.folderfmt = "*%Y%j"
989 self.folderfmt = "*%Y%j"
990
990
991 def setup(self, **kwargs):
991 def setup(self, **kwargs):
992
992
993 self.set_kwargs(**kwargs)
993 self.set_kwargs(**kwargs)
994 if not self.ext.startswith('.'):
994 if not self.ext.startswith('.'):
995 self.ext = '.{}'.format(self.ext)
995 self.ext = '.{}'.format(self.ext)
996
996
997 if self.online:
997 if self.online:
998 log.log("Searching files in online mode...", self.name)
998 log.log("Searching files in online mode...", self.name)
999
999
1000 for nTries in range(self.nTries):
1000 for nTries in range(self.nTries):
1001 fullpath = self.searchFilesOnLine(self.path, self.startDate,
1001 fullpath = self.searchFilesOnLine(self.path, self.startDate,
1002 self.endDate, self.expLabel, self.ext, self.walk,
1002 self.endDate, self.expLabel, self.ext, self.walk,
1003 self.filefmt, self.folderfmt)
1003 self.filefmt, self.folderfmt)
1004
1004
1005 try:
1005 try:
1006 fullpath = next(fullpath)
1006 fullpath = next(fullpath)
1007 except:
1007 except:
1008 fullpath = None
1008 fullpath = None
1009
1009
1010 if fullpath:
1010 if fullpath:
1011 break
1011 break
1012
1012
1013 log.warning(
1013 log.warning(
1014 'Waiting {} sec for a valid file in {}: try {} ...'.format(
1014 'Waiting {} sec for a valid file in {}: try {} ...'.format(
1015 self.delay, self.path, nTries + 1),
1015 self.delay, self.path, nTries + 1),
1016 self.name)
1016 self.name)
1017 time.sleep(self.delay)
1017 time.sleep(self.delay)
1018
1018
1019 if not(fullpath):
1019 if not(fullpath):
1020 raise schainpy.admin.SchainError(
1020 raise schainpy.admin.SchainError(
1021 'There isn\'t any valid file in {}'.format(self.path))
1021 'There isn\'t any valid file in {}'.format(self.path))
1022
1022
1023 pathname, filename = os.path.split(fullpath)
1023 pathname, filename = os.path.split(fullpath)
1024 self.year = int(filename[1:5])
1024 self.year = int(filename[1:5])
1025 self.doy = int(filename[5:8])
1025 self.doy = int(filename[5:8])
1026 self.set = int(filename[8:11]) - 1
1026 self.set = int(filename[8:11]) - 1
1027 else:
1027 else:
1028 log.log("Searching files in {}".format(self.path), self.name)
1028 log.log("Searching files in {}".format(self.path), self.name)
1029 self.filenameList = self.searchFilesOffLine(self.path, self.startDate,
1029 self.filenameList = self.searchFilesOffLine(self.path, self.startDate,
1030 self.endDate, self.expLabel, self.ext, self.walk, self.filefmt, self.folderfmt)
1030 self.endDate, self.expLabel, self.ext, self.walk, self.filefmt, self.folderfmt)
1031
1031
1032 self.setNextFile()
1032 self.setNextFile()
1033
1033
1034 return
1034 return
1035
1035
1036 def readFirstHeader(self):
1036 def readFirstHeader(self):
1037 '''Read metadata and data'''
1037 '''Read metadata and data'''
1038
1038
1039 self.__readMetadata()
1039 self.__readMetadata()
1040 self.__readData()
1040 self.__readData()
1041 self.__setBlockList()
1041 self.__setBlockList()
1042 self.blockIndex = 0
1042 self.blockIndex = 0
1043
1043
1044 return
1044 return
1045
1045
1046 def __setBlockList(self):
1046 def __setBlockList(self):
1047 '''
1047 '''
1048 Selects the data within the times defined
1048 Selects the data within the times defined
1049
1049
1050 self.fp
1050 self.fp
1051 self.startTime
1051 self.startTime
1052 self.endTime
1052 self.endTime
1053 self.blockList
1053 self.blockList
1054 self.blocksPerFile
1054 self.blocksPerFile
1055
1055
1056 '''
1056 '''
1057
1057
1058 startTime = self.startTime
1058 startTime = self.startTime
1059 endTime = self.endTime
1059 endTime = self.endTime
1060
1060
1061 index = self.listDataname.index('utctime')
1061 index = self.listDataname.index('utctime')
1062 thisUtcTime = self.listData[index]
1062 thisUtcTime = self.listData[index]
1063 self.interval = numpy.min(thisUtcTime[1:] - thisUtcTime[:-1])
1063 self.interval = numpy.min(thisUtcTime[1:] - thisUtcTime[:-1])
1064
1064
1065 if self.timezone == 'lt':
1065 if self.timezone == 'lt':
1066 thisUtcTime -= 5*3600
1066 thisUtcTime -= 5*3600
1067
1067
1068 thisDatetime = datetime.datetime.fromtimestamp(thisUtcTime[0] + 5*3600)
1068 thisDatetime = datetime.datetime.fromtimestamp(thisUtcTime[0] + 5*3600)
1069
1069
1070 thisDate = thisDatetime.date()
1070 thisDate = thisDatetime.date()
1071 thisTime = thisDatetime.time()
1071 thisTime = thisDatetime.time()
1072
1072
1073 startUtcTime = (datetime.datetime.combine(thisDate,startTime) - datetime.datetime(1970, 1, 1)).total_seconds()
1073 startUtcTime = (datetime.datetime.combine(thisDate,startTime) - datetime.datetime(1970, 1, 1)).total_seconds()
1074 endUtcTime = (datetime.datetime.combine(thisDate,endTime) - datetime.datetime(1970, 1, 1)).total_seconds()
1074 endUtcTime = (datetime.datetime.combine(thisDate,endTime) - datetime.datetime(1970, 1, 1)).total_seconds()
1075
1075
1076 ind = numpy.where(numpy.logical_and(thisUtcTime >= startUtcTime, thisUtcTime < endUtcTime))[0]
1076 ind = numpy.where(numpy.logical_and(thisUtcTime >= startUtcTime, thisUtcTime < endUtcTime))[0]
1077
1077
1078 self.blockList = ind
1078 self.blockList = ind
1079 self.blocksPerFile = len(ind)
1079 self.blocksPerFile = len(ind)
1080 return
1080 return
1081
1081
1082 def __readMetadata(self):
1082 def __readMetadata(self):
1083 '''
1083 '''
1084 Reads Metadata
1084 Reads Metadata
1085 '''
1085 '''
1086
1086
1087 listMetaname = []
1087 listMetaname = []
1088 listMetadata = []
1088 listMetadata = []
1089 if 'Metadata' in self.fp:
1089 if 'Metadata' in self.fp:
1090 gp = self.fp['Metadata']
1090 gp = self.fp['Metadata']
1091 for item in list(gp.items()):
1091 for item in list(gp.items()):
1092 name = item[0]
1092 name = item[0]
1093
1093
1094 if name=='variables':
1094 if name=='variables':
1095 table = gp[name][:]
1095 table = gp[name][:]
1096 listShapes = {}
1096 listShapes = {}
1097 for shapes in table:
1097 for shapes in table:
1098 listShapes[shapes[0].decode()] = numpy.array([shapes[1]])
1098 listShapes[shapes[0].decode()] = numpy.array([shapes[1]])
1099 else:
1099 else:
1100 data = gp[name].value
1100 data = gp[name].value
1101 listMetaname.append(name)
1101 listMetaname.append(name)
1102 listMetadata.append(data)
1102 listMetadata.append(data)
1103 elif self.metadata:
1103 elif self.metadata:
1104 metadata = json.loads(self.metadata)
1104 metadata = json.loads(self.metadata)
1105 listShapes = {}
1105 listShapes = {}
1106 for tup in metadata:
1106 for tup in metadata:
1107 name, values, dim = tup
1107 name, values, dim = tup
1108 if dim == -1:
1108 if dim == -1:
1109 listMetaname.append(name)
1109 listMetaname.append(name)
1110 listMetadata.append(self.fp[values].value)
1110 listMetadata.append(self.fp[values].value)
1111 else:
1111 else:
1112 listShapes[name] = numpy.array([dim])
1112 listShapes[name] = numpy.array([dim])
1113 else:
1113 else:
1114 raise IOError('Missing Metadata group in file or metadata info')
1114 raise IOError('Missing Metadata group in file or metadata info')
1115
1115
1116 self.listShapes = listShapes
1116 self.listShapes = listShapes
1117 self.listMetaname = listMetaname
1117 self.listMetaname = listMetaname
1118 self.listMeta = listMetadata
1118 self.listMeta = listMetadata
1119
1119
1120 return
1120 return
1121
1121
1122 def __readData(self):
1122 def __readData(self):
1123
1123
1124 listdataname = []
1124 listdataname = []
1125 listdata = []
1125 listdata = []
1126
1126
1127 if 'Data' in self.fp:
1127 if 'Data' in self.fp:
1128 grp = self.fp['Data']
1128 grp = self.fp['Data']
1129 for item in list(grp.items()):
1129 for item in list(grp.items()):
1130 name = item[0]
1130 name = item[0]
1131 listdataname.append(name)
1131 listdataname.append(name)
1132 dim = self.listShapes[name][0]
1132 dim = self.listShapes[name][0]
1133 if dim == 0:
1133 if dim == 0:
1134 array = grp[name].value
1134 array = grp[name].value
1135 else:
1135 else:
1136 array = []
1136 array = []
1137 for i in range(dim):
1137 for i in range(dim):
1138 array.append(grp[name]['table{:02d}'.format(i)].value)
1138 array.append(grp[name]['table{:02d}'.format(i)].value)
1139 array = numpy.array(array)
1139 array = numpy.array(array)
1140
1140
1141 listdata.append(array)
1141 listdata.append(array)
1142 elif self.metadata:
1142 elif self.metadata:
1143 metadata = json.loads(self.metadata)
1143 metadata = json.loads(self.metadata)
1144 for tup in metadata:
1144 for tup in metadata:
1145 name, values, dim = tup
1145 name, values, dim = tup
1146 listdataname.append(name)
1146 listdataname.append(name)
1147 if dim == -1:
1147 if dim == -1:
1148 continue
1148 continue
1149 elif dim == 0:
1149 elif dim == 0:
1150 array = self.fp[values].value
1150 array = self.fp[values].value
1151 else:
1151 else:
1152 array = []
1152 array = []
1153 for var in values:
1153 for var in values:
1154 array.append(self.fp[var].value)
1154 array.append(self.fp[var].value)
1155 array = numpy.array(array)
1155 array = numpy.array(array)
1156 listdata.append(array)
1156 listdata.append(array)
1157 else:
1157 else:
1158 raise IOError('Missing Data group in file or metadata info')
1158 raise IOError('Missing Data group in file or metadata info')
1159
1159
1160 self.listDataname = listdataname
1160 self.listDataname = listdataname
1161 self.listData = listdata
1161 self.listData = listdata
1162 return
1162 return
1163
1163
1164 def getData(self):
1164 def getData(self):
1165
1165
1166 for i in range(len(self.listMeta)):
1166 for i in range(len(self.listMeta)):
1167 setattr(self.dataOut, self.listMetaname[i], self.listMeta[i])
1167 setattr(self.dataOut, self.listMetaname[i], self.listMeta[i])
1168
1168
1169 for j in range(len(self.listData)):
1169 for j in range(len(self.listData)):
1170 dim = self.listShapes[self.listDataname[j]][0]
1170 dim = self.listShapes[self.listDataname[j]][0]
1171 if dim == 0:
1171 if dim == 0:
1172 setattr(self.dataOut, self.listDataname[j], self.listData[j][self.blockIndex])
1172 setattr(self.dataOut, self.listDataname[j], self.listData[j][self.blockIndex])
1173 else:
1173 else:
1174 setattr(self.dataOut, self.listDataname[j], self.listData[j][:,self.blockIndex])
1174 setattr(self.dataOut, self.listDataname[j], self.listData[j][:,self.blockIndex])
1175
1175
1176 self.dataOut.paramInterval = self.interval
1176 self.dataOut.paramInterval = self.interval
1177 self.dataOut.flagNoData = False
1177 self.dataOut.flagNoData = False
1178 self.blockIndex += 1
1178 self.blockIndex += 1
1179
1179
1180 return
1180 return
1181
1181
1182 def run(self, **kwargs):
1182 def run(self, **kwargs):
1183
1183
1184 if not(self.isConfig):
1184 if not(self.isConfig):
1185 self.setup(**kwargs)
1185 self.setup(**kwargs)
1186 self.isConfig = True
1186 self.isConfig = True
1187
1187
1188 if self.blockIndex == self.blocksPerFile:
1188 if self.blockIndex == self.blocksPerFile:
1189 self.setNextFile()
1189 self.setNextFile()
1190
1190
1191 self.getData()
1191 self.getData()
1192
1192
1193 return
1193 return
1194
1194
1195 @MPDecorator
1195 @MPDecorator
1196 class ParameterWriter(Operation):
1196 class ParameterWriter(Operation):
1197 '''
1197 '''
1198 HDF5 Writer, stores parameters data in HDF5 format files
1198 HDF5 Writer, stores parameters data in HDF5 format files
1199
1199
1200 path: path where the files will be stored
1200 path: path where the files will be stored
1201 blocksPerFile: number of blocks that will be saved in per HDF5 format file
1201 blocksPerFile: number of blocks that will be saved in per HDF5 format file
1202 mode: selects the data stacking mode: '0' channels, '1' parameters, '3' table (for meteors)
1202 mode: selects the data stacking mode: '0' channels, '1' parameters, '3' table (for meteors)
1203 metadataList: list of attributes that will be stored as metadata
1203 metadataList: list of attributes that will be stored as metadata
1204 dataList: list of attributes that will be stores as data
1204 dataList: list of attributes that will be stores as data
1205 '''
1205 '''
1206
1206
1207
1207
1208 ext = ".hdf5"
1208 ext = ".hdf5"
1209 optchar = "D"
1209 optchar = "D"
1210 metaoptchar = "M"
1210 metaoptchar = "M"
1211 metaFile = None
1211 metaFile = None
1212 filename = None
1212 filename = None
1213 path = None
1213 path = None
1214 setFile = None
1214 setFile = None
1215 fp = None
1215 fp = None
1216 grp = None
1216 grp = None
1217 ds = None
1217 ds = None
1218 firsttime = True
1218 firsttime = True
1219 #Configurations
1219 #Configurations
1220 blocksPerFile = None
1220 blocksPerFile = None
1221 blockIndex = None
1221 blockIndex = None
1222 dataOut = None
1222 dataOut = None
1223 #Data Arrays
1223 #Data Arrays
1224 dataList = None
1224 dataList = None
1225 metadataList = None
1225 metadataList = None
1226 dsList = None #List of dictionaries with dataset properties
1226 dsList = None #List of dictionaries with dataset properties
1227 tableDim = None
1227 tableDim = None
1228 dtype = [('name', 'S20'),('nDim', 'i')]
1228 dtype = [('name', 'S20'),('nDim', 'i')]
1229 currentDay = None
1229 currentDay = None
1230 lastTime = None
1230 lastTime = None
1231
1231
1232 def __init__(self):
1232 def __init__(self):
1233
1233
1234 Operation.__init__(self)
1234 Operation.__init__(self)
1235 return
1235 return
1236
1236
1237 def setup(self, path=None, blocksPerFile=10, metadataList=None, dataList=None, setType=None):
1237 def setup(self, path=None, blocksPerFile=10, metadataList=None, dataList=None, setType=None):
1238 self.path = path
1238 self.path = path
1239 self.blocksPerFile = blocksPerFile
1239 self.blocksPerFile = blocksPerFile
1240 self.metadataList = metadataList
1240 self.metadataList = metadataList
1241 self.dataList = dataList
1241 self.dataList = dataList
1242 self.setType = setType
1242 self.setType = setType
1243
1243
1244 tableList = []
1244 tableList = []
1245 dsList = []
1245 dsList = []
1246
1246
1247 for i in range(len(self.dataList)):
1247 for i in range(len(self.dataList)):
1248 dsDict = {}
1248 dsDict = {}
1249 dataAux = getattr(self.dataOut, self.dataList[i])
1249 dataAux = getattr(self.dataOut, self.dataList[i])
1250 dsDict['variable'] = self.dataList[i]
1250 dsDict['variable'] = self.dataList[i]
1251
1251
1252 if dataAux is None:
1252 if dataAux is None:
1253 continue
1253 continue
1254 elif isinstance(dataAux, (int, float, numpy.integer, numpy.float)):
1254 elif isinstance(dataAux, (int, float, numpy.integer, numpy.float)):
1255 dsDict['nDim'] = 0
1255 dsDict['nDim'] = 0
1256 else:
1256 else:
1257 dsDict['nDim'] = len(dataAux.shape)
1257 dsDict['nDim'] = len(dataAux.shape)
1258 dsDict['shape'] = dataAux.shape
1258 dsDict['shape'] = dataAux.shape
1259 dsDict['dsNumber'] = dataAux.shape[0]
1259 dsDict['dsNumber'] = dataAux.shape[0]
1260
1260
1261 dsList.append(dsDict)
1261 dsList.append(dsDict)
1262 tableList.append((self.dataList[i], dsDict['nDim']))
1262 tableList.append((self.dataList[i], dsDict['nDim']))
1263
1263
1264 self.dsList = dsList
1264 self.dsList = dsList
1265 self.tableDim = numpy.array(tableList, dtype=self.dtype)
1265 self.tableDim = numpy.array(tableList, dtype=self.dtype)
1266 self.currentDay = self.dataOut.datatime.date()
1266 self.currentDay = self.dataOut.datatime.date()
1267
1267
1268 def timeFlag(self):
1268 def timeFlag(self):
1269 currentTime = self.dataOut.utctime
1269 currentTime = self.dataOut.utctime
1270 timeTuple = time.localtime(currentTime)
1270 timeTuple = time.localtime(currentTime)
1271 dataDay = timeTuple.tm_yday
1271 dataDay = timeTuple.tm_yday
1272
1272
1273 if self.lastTime is None:
1273 if self.lastTime is None:
1274 self.lastTime = currentTime
1274 self.lastTime = currentTime
1275 self.currentDay = dataDay
1275 self.currentDay = dataDay
1276 return False
1276 return False
1277
1277
1278 timeDiff = currentTime - self.lastTime
1278 timeDiff = currentTime - self.lastTime
1279
1279
1280 #Si el dia es diferente o si la diferencia entre un dato y otro supera la hora
1280 #Si el dia es diferente o si la diferencia entre un dato y otro supera la hora
1281 if dataDay != self.currentDay:
1281 if dataDay != self.currentDay:
1282 self.currentDay = dataDay
1282 self.currentDay = dataDay
1283 return True
1283 return True
1284 elif timeDiff > 3*60*60:
1284 elif timeDiff > 3*60*60:
1285 self.lastTime = currentTime
1285 self.lastTime = currentTime
1286 return True
1286 return True
1287 else:
1287 else:
1288 self.lastTime = currentTime
1288 self.lastTime = currentTime
1289 return False
1289 return False
1290
1290
1291 def run(self, dataOut, path, blocksPerFile=10, metadataList=None, dataList=None, setType=None):
1291 def run(self, dataOut, path, blocksPerFile=10, metadataList=None, dataList=None, setType=None):
1292
1292
1293 self.dataOut = dataOut
1293 self.dataOut = dataOut
1294 if not(self.isConfig):
1294 if not(self.isConfig):
1295 self.setup(path=path, blocksPerFile=blocksPerFile,
1295 self.setup(path=path, blocksPerFile=blocksPerFile,
1296 metadataList=metadataList, dataList=dataList,
1296 metadataList=metadataList, dataList=dataList,
1297 setType=setType)
1297 setType=setType)
1298
1298
1299 self.isConfig = True
1299 self.isConfig = True
1300 self.setNextFile()
1300 self.setNextFile()
1301
1301
1302 self.putData()
1302 self.putData()
1303 return
1303 return
1304
1304
1305 def setNextFile(self):
1305 def setNextFile(self):
1306
1306
1307 ext = self.ext
1307 ext = self.ext
1308 path = self.path
1308 path = self.path
1309 setFile = self.setFile
1309 setFile = self.setFile
1310
1310
1311 timeTuple = time.localtime(self.dataOut.utctime)
1311 timeTuple = time.localtime(self.dataOut.utctime)
1312 subfolder = 'd%4.4d%3.3d' % (timeTuple.tm_year,timeTuple.tm_yday)
1312 subfolder = 'd%4.4d%3.3d' % (timeTuple.tm_year,timeTuple.tm_yday)
1313 fullpath = os.path.join(path, subfolder)
1313 fullpath = os.path.join(path, subfolder)
1314
1314
1315 if os.path.exists(fullpath):
1315 if os.path.exists(fullpath):
1316 filesList = os.listdir(fullpath)
1316 filesList = os.listdir(fullpath)
1317 filesList = [k for k in filesList if k.startswith(self.optchar)]
1317 filesList = [k for k in filesList if k.startswith(self.optchar)]
1318 if len( filesList ) > 0:
1318 if len( filesList ) > 0:
1319 filesList = sorted(filesList, key=str.lower)
1319 filesList = sorted(filesList, key=str.lower)
1320 filen = filesList[-1]
1320 filen = filesList[-1]
1321 # el filename debera tener el siguiente formato
1321 # el filename debera tener el siguiente formato
1322 # 0 1234 567 89A BCDE (hex)
1322 # 0 1234 567 89A BCDE (hex)
1323 # x YYYY DDD SSS .ext
1323 # x YYYY DDD SSS .ext
1324 if isNumber(filen[8:11]):
1324 if isNumber(filen[8:11]):
1325 setFile = int(filen[8:11]) #inicializo mi contador de seteo al seteo del ultimo file
1325 setFile = int(filen[8:11]) #inicializo mi contador de seteo al seteo del ultimo file
1326 else:
1326 else:
1327 setFile = -1
1327 setFile = -1
1328 else:
1328 else:
1329 setFile = -1 #inicializo mi contador de seteo
1329 setFile = -1 #inicializo mi contador de seteo
1330 else:
1330 else:
1331 os.makedirs(fullpath)
1331 os.makedirs(fullpath)
1332 setFile = -1 #inicializo mi contador de seteo
1332 setFile = -1 #inicializo mi contador de seteo
1333
1333
1334 if self.setType is None:
1334 if self.setType is None:
1335 setFile += 1
1335 setFile += 1
1336 file = '%s%4.4d%3.3d%03d%s' % (self.optchar,
1336 file = '%s%4.4d%3.3d%03d%s' % (self.optchar,
1337 timeTuple.tm_year,
1337 timeTuple.tm_year,
1338 timeTuple.tm_yday,
1338 timeTuple.tm_yday,
1339 setFile,
1339 setFile,
1340 ext )
1340 ext )
1341 else:
1341 else:
1342 setFile = timeTuple.tm_hour*60+timeTuple.tm_min
1342 setFile = timeTuple.tm_hour*60+timeTuple.tm_min
1343 file = '%s%4.4d%3.3d%04d%s' % (self.optchar,
1343 file = '%s%4.4d%3.3d%04d%s' % (self.optchar,
1344 timeTuple.tm_year,
1344 timeTuple.tm_year,
1345 timeTuple.tm_yday,
1345 timeTuple.tm_yday,
1346 setFile,
1346 setFile,
1347 ext )
1347 ext )
1348
1348
1349 self.filename = os.path.join( path, subfolder, file )
1349 self.filename = os.path.join( path, subfolder, file )
1350
1350
1351 #Setting HDF5 File
1351 #Setting HDF5 File
1352 self.fp = h5py.File(self.filename, 'w')
1352 self.fp = h5py.File(self.filename, 'w')
1353 #write metadata
1353 #write metadata
1354 self.writeMetadata(self.fp)
1354 self.writeMetadata(self.fp)
1355 #Write data
1355 #Write data
1356 self.writeData(self.fp)
1356 self.writeData(self.fp)
1357
1357
1358 def writeMetadata(self, fp):
1358 def writeMetadata(self, fp):
1359
1359
1360 grp = fp.create_group("Metadata")
1360 grp = fp.create_group("Metadata")
1361 grp.create_dataset('variables', data=self.tableDim, dtype=self.dtype)
1361 grp.create_dataset('variables', data=self.tableDim, dtype=self.dtype)
1362
1362
1363 for i in range(len(self.metadataList)):
1363 for i in range(len(self.metadataList)):
1364 if not hasattr(self.dataOut, self.metadataList[i]):
1364 if not hasattr(self.dataOut, self.metadataList[i]):
1365 log.warning('Metadata: `{}` not found'.format(self.metadataList[i]), self.name)
1365 log.warning('Metadata: `{}` not found'.format(self.metadataList[i]), self.name)
1366 continue
1366 continue
1367 value = getattr(self.dataOut, self.metadataList[i])
1367 value = getattr(self.dataOut, self.metadataList[i])
1368 grp.create_dataset(self.metadataList[i], data=value)
1368 grp.create_dataset(self.metadataList[i], data=value)
1369 return
1369 return
1370
1370
1371 def writeData(self, fp):
1371 def writeData(self, fp):
1372
1372
1373 grp = fp.create_group("Data")
1373 grp = fp.create_group("Data")
1374 dtsets = []
1374 dtsets = []
1375 data = []
1375 data = []
1376
1376
1377 for dsInfo in self.dsList:
1377 for dsInfo in self.dsList:
1378 if dsInfo['nDim'] == 0:
1378 if dsInfo['nDim'] == 0:
1379 ds = grp.create_dataset(
1379 ds = grp.create_dataset(
1380 dsInfo['variable'],
1380 dsInfo['variable'],
1381 (self.blocksPerFile, ),
1381 (self.blocksPerFile, ),
1382 chunks=True,
1382 chunks=True,
1383 dtype=numpy.float64)
1383 dtype=numpy.float64)
1384 dtsets.append(ds)
1384 dtsets.append(ds)
1385 data.append((dsInfo['variable'], -1))
1385 data.append((dsInfo['variable'], -1))
1386 else:
1386 else:
1387 sgrp = grp.create_group(dsInfo['variable'])
1387 sgrp = grp.create_group(dsInfo['variable'])
1388 for i in range(dsInfo['dsNumber']):
1388 for i in range(dsInfo['dsNumber']):
1389 ds = sgrp.create_dataset(
1389 ds = sgrp.create_dataset(
1390 'table{:02d}'.format(i),
1390 'table{:02d}'.format(i),
1391 (self.blocksPerFile, ) + dsInfo['shape'][1:],
1391 (self.blocksPerFile, ) + dsInfo['shape'][1:],
1392 chunks=True)
1392 chunks=True)
1393 dtsets.append(ds)
1393 dtsets.append(ds)
1394 data.append((dsInfo['variable'], i))
1394 data.append((dsInfo['variable'], i))
1395 fp.flush()
1395 fp.flush()
1396
1396
1397 log.log('Creating file: {}'.format(fp.filename), self.name)
1397 log.log('Creating file: {}'.format(fp.filename), self.name)
1398
1398
1399 self.ds = dtsets
1399 self.ds = dtsets
1400 self.data = data
1400 self.data = data
1401 self.firsttime = True
1401 self.firsttime = True
1402 self.blockIndex = 0
1402 self.blockIndex = 0
1403 return
1403 return
1404
1404
1405 def putData(self):
1405 def putData(self):
1406
1406
1407 if (self.blockIndex == self.blocksPerFile) or self.timeFlag():
1407 if (self.blockIndex == self.blocksPerFile) or self.timeFlag():
1408 self.closeFile()
1408 self.closeFile()
1409 self.setNextFile()
1409 self.setNextFile()
1410
1410
1411 for i, ds in enumerate(self.ds):
1411 for i, ds in enumerate(self.ds):
1412 attr, ch = self.data[i]
1412 attr, ch = self.data[i]
1413 if ch == -1:
1413 if ch == -1:
1414 ds[self.blockIndex] = getattr(self.dataOut, attr)
1414 ds[self.blockIndex] = getattr(self.dataOut, attr)
1415 else:
1415 else:
1416 ds[self.blockIndex] = getattr(self.dataOut, attr)[ch]
1416 ds[self.blockIndex] = getattr(self.dataOut, attr)[ch]
1417
1417
1418 self.fp.flush()
1418 self.fp.flush()
1419 self.blockIndex += 1
1419 self.blockIndex += 1
1420 log.log('Block No. {}/{}'.format(self.blockIndex, self.blocksPerFile), self.name)
1420 log.log('Block No. {}/{}'.format(self.blockIndex, self.blocksPerFile), self.name)
1421
1421
1422 return
1422 return
1423
1423
1424 def closeFile(self):
1424 def closeFile(self):
1425
1425
1426 if self.blockIndex != self.blocksPerFile:
1426 if self.blockIndex != self.blocksPerFile:
1427 for ds in self.ds:
1427 for ds in self.ds:
1428 ds.resize(self.blockIndex, axis=0)
1428 ds.resize(self.blockIndex, axis=0)
1429
1429
1430 self.fp.flush()
1430 self.fp.flush()
1431 self.fp.close()
1431 self.fp.close()
1432
1432
1433 def close(self):
1433 def close(self):
1434
1434
1435 self.closeFile()
1435 self.closeFile()
@@ -1,527 +1,527
1 '''
1 '''
2 Created on Jul 2, 2014
2 Created on Jul 2, 2014
3
3
4 @author: roj-idl71
4 @author: roj-idl71
5 '''
5 '''
6 import numpy
6 import numpy
7
7
8 from schainpy.model.io.jroIO_base import LOCALTIME, JRODataReader, JRODataWriter
8 from schainpy.model.io.jroIO_base import LOCALTIME, JRODataReader, JRODataWriter
9 from schainpy.model.proc.jroproc_base import ProcessingUnit, Operation, MPDecorator
9 from schainpy.model.proc.jroproc_base import ProcessingUnit, Operation, MPDecorator
10 from schainpy.model.data.jroheaderIO import PROCFLAG, BasicHeader, SystemHeader, RadarControllerHeader, ProcessingHeader
10 from schainpy.model.data.jroheaderIO import PROCFLAG, BasicHeader, SystemHeader, RadarControllerHeader, ProcessingHeader
11 from schainpy.model.data.jrodata import Spectra
11 from schainpy.model.data.jrodata import Spectra
12 from schainpy.utils import log
12 from schainpy.utils import log
13
13
14 @MPDecorator
14
15 class SpectraReader(JRODataReader, ProcessingUnit):
15 class SpectraReader(JRODataReader, ProcessingUnit):
16 """
16 """
17 Esta clase permite leer datos de espectros desde archivos procesados (.pdata). La lectura
17 Esta clase permite leer datos de espectros desde archivos procesados (.pdata). La lectura
18 de los datos siempre se realiza por bloques. Los datos leidos (array de 3 dimensiones)
18 de los datos siempre se realiza por bloques. Los datos leidos (array de 3 dimensiones)
19 son almacenados en tres buffer's para el Self Spectra, el Cross Spectra y el DC Channel.
19 son almacenados en tres buffer's para el Self Spectra, el Cross Spectra y el DC Channel.
20
20
21 paresCanalesIguales * alturas * perfiles (Self Spectra)
21 paresCanalesIguales * alturas * perfiles (Self Spectra)
22 paresCanalesDiferentes * alturas * perfiles (Cross Spectra)
22 paresCanalesDiferentes * alturas * perfiles (Cross Spectra)
23 canales * alturas (DC Channels)
23 canales * alturas (DC Channels)
24
24
25 Esta clase contiene instancias (objetos) de las clases BasicHeader, SystemHeader,
25 Esta clase contiene instancias (objetos) de las clases BasicHeader, SystemHeader,
26 RadarControllerHeader y Spectra. Los tres primeros se usan para almacenar informacion de la
26 RadarControllerHeader y Spectra. Los tres primeros se usan para almacenar informacion de la
27 cabecera de datos (metadata), y el cuarto (Spectra) para obtener y almacenar un bloque de
27 cabecera de datos (metadata), y el cuarto (Spectra) para obtener y almacenar un bloque de
28 datos desde el "buffer" cada vez que se ejecute el metodo "getData".
28 datos desde el "buffer" cada vez que se ejecute el metodo "getData".
29
29
30 Example:
30 Example:
31 dpath = "/home/myuser/data"
31 dpath = "/home/myuser/data"
32
32
33 startTime = datetime.datetime(2010,1,20,0,0,0,0,0,0)
33 startTime = datetime.datetime(2010,1,20,0,0,0,0,0,0)
34
34
35 endTime = datetime.datetime(2010,1,21,23,59,59,0,0,0)
35 endTime = datetime.datetime(2010,1,21,23,59,59,0,0,0)
36
36
37 readerObj = SpectraReader()
37 readerObj = SpectraReader()
38
38
39 readerObj.setup(dpath, startTime, endTime)
39 readerObj.setup(dpath, startTime, endTime)
40
40
41 while(True):
41 while(True):
42
42
43 readerObj.getData()
43 readerObj.getData()
44
44
45 print readerObj.data_spc
45 print readerObj.data_spc
46
46
47 print readerObj.data_cspc
47 print readerObj.data_cspc
48
48
49 print readerObj.data_dc
49 print readerObj.data_dc
50
50
51 if readerObj.flagNoMoreFiles:
51 if readerObj.flagNoMoreFiles:
52 break
52 break
53
53
54 """
54 """
55
55
56 def __init__(self):#, **kwargs):
56 def __init__(self):#, **kwargs):
57 """
57 """
58 Inicializador de la clase SpectraReader para la lectura de datos de espectros.
58 Inicializador de la clase SpectraReader para la lectura de datos de espectros.
59
59
60 Inputs:
60 Inputs:
61 dataOut : Objeto de la clase Spectra. Este objeto sera utilizado para
61 dataOut : Objeto de la clase Spectra. Este objeto sera utilizado para
62 almacenar un perfil de datos cada vez que se haga un requerimiento
62 almacenar un perfil de datos cada vez que se haga un requerimiento
63 (getData). El perfil sera obtenido a partir del buffer de datos,
63 (getData). El perfil sera obtenido a partir del buffer de datos,
64 si el buffer esta vacio se hara un nuevo proceso de lectura de un
64 si el buffer esta vacio se hara un nuevo proceso de lectura de un
65 bloque de datos.
65 bloque de datos.
66 Si este parametro no es pasado se creara uno internamente.
66 Si este parametro no es pasado se creara uno internamente.
67
67
68 Affected:
68 Affected:
69 self.dataOut
69 self.dataOut
70
70
71 Return : None
71 Return : None
72 """
72 """
73
73
74 ProcessingUnit.__init__(self)
74 ProcessingUnit.__init__(self)
75
75
76 self.pts2read_SelfSpectra = 0
76 self.pts2read_SelfSpectra = 0
77 self.pts2read_CrossSpectra = 0
77 self.pts2read_CrossSpectra = 0
78 self.pts2read_DCchannels = 0
78 self.pts2read_DCchannels = 0
79 self.ext = ".pdata"
79 self.ext = ".pdata"
80 self.optchar = "P"
80 self.optchar = "P"
81 self.basicHeaderObj = BasicHeader(LOCALTIME)
81 self.basicHeaderObj = BasicHeader(LOCALTIME)
82 self.systemHeaderObj = SystemHeader()
82 self.systemHeaderObj = SystemHeader()
83 self.radarControllerHeaderObj = RadarControllerHeader()
83 self.radarControllerHeaderObj = RadarControllerHeader()
84 self.processingHeaderObj = ProcessingHeader()
84 self.processingHeaderObj = ProcessingHeader()
85 self.lastUTTime = 0
85 self.lastUTTime = 0
86 self.maxTimeStep = 30
86 self.maxTimeStep = 30
87 self.dataOut = Spectra()
87 self.dataOut = Spectra()
88 self.profileIndex = 1
88 self.profileIndex = 1
89 self.nRdChannels = None
89 self.nRdChannels = None
90 self.nRdPairs = None
90 self.nRdPairs = None
91 self.rdPairList = []
91 self.rdPairList = []
92
92
93 def createObjByDefault(self):
93 def createObjByDefault(self):
94
94
95 dataObj = Spectra()
95 dataObj = Spectra()
96
96
97 return dataObj
97 return dataObj
98
98
99 def __hasNotDataInBuffer(self):
99 def __hasNotDataInBuffer(self):
100 return 1
100 return 1
101
101
102
102
103 def getBlockDimension(self):
103 def getBlockDimension(self):
104 """
104 """
105 Obtiene la cantidad de puntos a leer por cada bloque de datos
105 Obtiene la cantidad de puntos a leer por cada bloque de datos
106
106
107 Affected:
107 Affected:
108 self.nRdChannels
108 self.nRdChannels
109 self.nRdPairs
109 self.nRdPairs
110 self.pts2read_SelfSpectra
110 self.pts2read_SelfSpectra
111 self.pts2read_CrossSpectra
111 self.pts2read_CrossSpectra
112 self.pts2read_DCchannels
112 self.pts2read_DCchannels
113 self.blocksize
113 self.blocksize
114 self.dataOut.nChannels
114 self.dataOut.nChannels
115 self.dataOut.nPairs
115 self.dataOut.nPairs
116
116
117 Return:
117 Return:
118 None
118 None
119 """
119 """
120 self.nRdChannels = 0
120 self.nRdChannels = 0
121 self.nRdPairs = 0
121 self.nRdPairs = 0
122 self.rdPairList = []
122 self.rdPairList = []
123
123
124 for i in range(0, self.processingHeaderObj.totalSpectra*2, 2):
124 for i in range(0, self.processingHeaderObj.totalSpectra*2, 2):
125 if self.processingHeaderObj.spectraComb[i] == self.processingHeaderObj.spectraComb[i+1]:
125 if self.processingHeaderObj.spectraComb[i] == self.processingHeaderObj.spectraComb[i+1]:
126 self.nRdChannels = self.nRdChannels + 1 #par de canales iguales
126 self.nRdChannels = self.nRdChannels + 1 #par de canales iguales
127 else:
127 else:
128 self.nRdPairs = self.nRdPairs + 1 #par de canales diferentes
128 self.nRdPairs = self.nRdPairs + 1 #par de canales diferentes
129 self.rdPairList.append((self.processingHeaderObj.spectraComb[i], self.processingHeaderObj.spectraComb[i+1]))
129 self.rdPairList.append((self.processingHeaderObj.spectraComb[i], self.processingHeaderObj.spectraComb[i+1]))
130
130
131 pts2read = self.processingHeaderObj.nHeights * self.processingHeaderObj.profilesPerBlock
131 pts2read = self.processingHeaderObj.nHeights * self.processingHeaderObj.profilesPerBlock
132
132
133 self.pts2read_SelfSpectra = int(self.nRdChannels * pts2read)
133 self.pts2read_SelfSpectra = int(self.nRdChannels * pts2read)
134 self.blocksize = self.pts2read_SelfSpectra
134 self.blocksize = self.pts2read_SelfSpectra
135
135
136 if self.processingHeaderObj.flag_cspc:
136 if self.processingHeaderObj.flag_cspc:
137 self.pts2read_CrossSpectra = int(self.nRdPairs * pts2read)
137 self.pts2read_CrossSpectra = int(self.nRdPairs * pts2read)
138 self.blocksize += self.pts2read_CrossSpectra
138 self.blocksize += self.pts2read_CrossSpectra
139
139
140 if self.processingHeaderObj.flag_dc:
140 if self.processingHeaderObj.flag_dc:
141 self.pts2read_DCchannels = int(self.systemHeaderObj.nChannels * self.processingHeaderObj.nHeights)
141 self.pts2read_DCchannels = int(self.systemHeaderObj.nChannels * self.processingHeaderObj.nHeights)
142 self.blocksize += self.pts2read_DCchannels
142 self.blocksize += self.pts2read_DCchannels
143
143
144 def readBlock(self):
144 def readBlock(self):
145 """
145 """
146 Lee el bloque de datos desde la posicion actual del puntero del archivo
146 Lee el bloque de datos desde la posicion actual del puntero del archivo
147 (self.fp) y actualiza todos los parametros relacionados al bloque de datos
147 (self.fp) y actualiza todos los parametros relacionados al bloque de datos
148 (metadata + data). La data leida es almacenada en el buffer y el contador del buffer
148 (metadata + data). La data leida es almacenada en el buffer y el contador del buffer
149 es seteado a 0
149 es seteado a 0
150
150
151 Return: None
151 Return: None
152
152
153 Variables afectadas:
153 Variables afectadas:
154
154
155 self.flagIsNewFile
155 self.flagIsNewFile
156 self.flagIsNewBlock
156 self.flagIsNewBlock
157 self.nTotalBlocks
157 self.nTotalBlocks
158 self.data_spc
158 self.data_spc
159 self.data_cspc
159 self.data_cspc
160 self.data_dc
160 self.data_dc
161
161
162 Exceptions:
162 Exceptions:
163 Si un bloque leido no es un bloque valido
163 Si un bloque leido no es un bloque valido
164 """
164 """
165
165
166 fpointer = self.fp.tell()
166 fpointer = self.fp.tell()
167
167
168 spc = numpy.fromfile( self.fp, self.dtype[0], self.pts2read_SelfSpectra )
168 spc = numpy.fromfile( self.fp, self.dtype[0], self.pts2read_SelfSpectra )
169 spc = spc.reshape( (self.nRdChannels, self.processingHeaderObj.nHeights, self.processingHeaderObj.profilesPerBlock) ) #transforma a un arreglo 3D
169 spc = spc.reshape( (self.nRdChannels, self.processingHeaderObj.nHeights, self.processingHeaderObj.profilesPerBlock) ) #transforma a un arreglo 3D
170
170
171 if self.processingHeaderObj.flag_cspc:
171 if self.processingHeaderObj.flag_cspc:
172 cspc = numpy.fromfile( self.fp, self.dtype, self.pts2read_CrossSpectra )
172 cspc = numpy.fromfile( self.fp, self.dtype, self.pts2read_CrossSpectra )
173 cspc = cspc.reshape( (self.nRdPairs, self.processingHeaderObj.nHeights, self.processingHeaderObj.profilesPerBlock) ) #transforma a un arreglo 3D
173 cspc = cspc.reshape( (self.nRdPairs, self.processingHeaderObj.nHeights, self.processingHeaderObj.profilesPerBlock) ) #transforma a un arreglo 3D
174
174
175 if self.processingHeaderObj.flag_dc:
175 if self.processingHeaderObj.flag_dc:
176 dc = numpy.fromfile( self.fp, self.dtype, self.pts2read_DCchannels ) #int(self.processingHeaderObj.nHeights*self.systemHeaderObj.nChannels) )
176 dc = numpy.fromfile( self.fp, self.dtype, self.pts2read_DCchannels ) #int(self.processingHeaderObj.nHeights*self.systemHeaderObj.nChannels) )
177 dc = dc.reshape( (self.systemHeaderObj.nChannels, self.processingHeaderObj.nHeights) ) #transforma a un arreglo 2D
177 dc = dc.reshape( (self.systemHeaderObj.nChannels, self.processingHeaderObj.nHeights) ) #transforma a un arreglo 2D
178
178
179 if not self.processingHeaderObj.shif_fft:
179 if not self.processingHeaderObj.shif_fft:
180 #desplaza a la derecha en el eje 2 determinadas posiciones
180 #desplaza a la derecha en el eje 2 determinadas posiciones
181 shift = int(self.processingHeaderObj.profilesPerBlock/2)
181 shift = int(self.processingHeaderObj.profilesPerBlock/2)
182 spc = numpy.roll( spc, shift , axis=2 )
182 spc = numpy.roll( spc, shift , axis=2 )
183
183
184 if self.processingHeaderObj.flag_cspc:
184 if self.processingHeaderObj.flag_cspc:
185 #desplaza a la derecha en el eje 2 determinadas posiciones
185 #desplaza a la derecha en el eje 2 determinadas posiciones
186 cspc = numpy.roll( cspc, shift, axis=2 )
186 cspc = numpy.roll( cspc, shift, axis=2 )
187
187
188 #Dimensions : nChannels, nProfiles, nSamples
188 #Dimensions : nChannels, nProfiles, nSamples
189 spc = numpy.transpose( spc, (0,2,1) )
189 spc = numpy.transpose( spc, (0,2,1) )
190 self.data_spc = spc
190 self.data_spc = spc
191
191
192 if self.processingHeaderObj.flag_cspc:
192 if self.processingHeaderObj.flag_cspc:
193 cspc = numpy.transpose( cspc, (0,2,1) )
193 cspc = numpy.transpose( cspc, (0,2,1) )
194 self.data_cspc = cspc['real'] + cspc['imag']*1j
194 self.data_cspc = cspc['real'] + cspc['imag']*1j
195 else:
195 else:
196 self.data_cspc = None
196 self.data_cspc = None
197
197
198 if self.processingHeaderObj.flag_dc:
198 if self.processingHeaderObj.flag_dc:
199 self.data_dc = dc['real'] + dc['imag']*1j
199 self.data_dc = dc['real'] + dc['imag']*1j
200 else:
200 else:
201 self.data_dc = None
201 self.data_dc = None
202
202
203 self.flagIsNewFile = 0
203 self.flagIsNewFile = 0
204 self.flagIsNewBlock = 1
204 self.flagIsNewBlock = 1
205
205
206 self.nTotalBlocks += 1
206 self.nTotalBlocks += 1
207 self.nReadBlocks += 1
207 self.nReadBlocks += 1
208
208
209 return 1
209 return 1
210
210
211 def getFirstHeader(self):
211 def getFirstHeader(self):
212
212
213 self.getBasicHeader()
213 self.getBasicHeader()
214 self.dataOut.systemHeaderObj = self.systemHeaderObj.copy()
214 self.dataOut.systemHeaderObj = self.systemHeaderObj.copy()
215 self.dataOut.radarControllerHeaderObj = self.radarControllerHeaderObj.copy()
215 self.dataOut.radarControllerHeaderObj = self.radarControllerHeaderObj.copy()
216 self.dataOut.dtype = self.dtype
216 self.dataOut.dtype = self.dtype
217 self.dataOut.pairsList = self.rdPairList
217 self.dataOut.pairsList = self.rdPairList
218 self.dataOut.nProfiles = self.processingHeaderObj.profilesPerBlock
218 self.dataOut.nProfiles = self.processingHeaderObj.profilesPerBlock
219 self.dataOut.nFFTPoints = self.processingHeaderObj.profilesPerBlock
219 self.dataOut.nFFTPoints = self.processingHeaderObj.profilesPerBlock
220 self.dataOut.nCohInt = self.processingHeaderObj.nCohInt
220 self.dataOut.nCohInt = self.processingHeaderObj.nCohInt
221 self.dataOut.nIncohInt = self.processingHeaderObj.nIncohInt
221 self.dataOut.nIncohInt = self.processingHeaderObj.nIncohInt
222 xf = self.processingHeaderObj.firstHeight + self.processingHeaderObj.nHeights*self.processingHeaderObj.deltaHeight
222 xf = self.processingHeaderObj.firstHeight + self.processingHeaderObj.nHeights*self.processingHeaderObj.deltaHeight
223 self.dataOut.heightList = numpy.arange(self.processingHeaderObj.firstHeight, xf, self.processingHeaderObj.deltaHeight)
223 self.dataOut.heightList = numpy.arange(self.processingHeaderObj.firstHeight, xf, self.processingHeaderObj.deltaHeight)
224 self.dataOut.channelList = list(range(self.systemHeaderObj.nChannels))
224 self.dataOut.channelList = list(range(self.systemHeaderObj.nChannels))
225 self.dataOut.flagShiftFFT = True #Data is always shifted
225 self.dataOut.flagShiftFFT = True #Data is always shifted
226 self.dataOut.flagDecodeData = self.processingHeaderObj.flag_decode #asumo q la data no esta decodificada
226 self.dataOut.flagDecodeData = self.processingHeaderObj.flag_decode #asumo q la data no esta decodificada
227 self.dataOut.flagDeflipData = self.processingHeaderObj.flag_deflip #asumo q la data esta sin flip
227 self.dataOut.flagDeflipData = self.processingHeaderObj.flag_deflip #asumo q la data esta sin flip
228
228
229 def getData(self):
229 def getData(self):
230 """
230 """
231 First method to execute before "RUN" is called.
231 First method to execute before "RUN" is called.
232
232
233 Copia el buffer de lectura a la clase "Spectra",
233 Copia el buffer de lectura a la clase "Spectra",
234 con todos los parametros asociados a este (metadata). cuando no hay datos en el buffer de
234 con todos los parametros asociados a este (metadata). cuando no hay datos en el buffer de
235 lectura es necesario hacer una nueva lectura de los bloques de datos usando "readNextBlock"
235 lectura es necesario hacer una nueva lectura de los bloques de datos usando "readNextBlock"
236
236
237 Return:
237 Return:
238 0 : Si no hay mas archivos disponibles
238 0 : Si no hay mas archivos disponibles
239 1 : Si hizo una buena copia del buffer
239 1 : Si hizo una buena copia del buffer
240
240
241 Affected:
241 Affected:
242 self.dataOut
242 self.dataOut
243 self.flagDiscontinuousBlock
243 self.flagDiscontinuousBlock
244 self.flagIsNewBlock
244 self.flagIsNewBlock
245 """
245 """
246
246
247 if self.flagNoMoreFiles:
247 if self.flagNoMoreFiles:
248 self.dataOut.flagNoData = True
248 self.dataOut.flagNoData = True
249 return 0
249 return 0
250
250
251 self.flagDiscontinuousBlock = 0
251 self.flagDiscontinuousBlock = 0
252 self.flagIsNewBlock = 0
252 self.flagIsNewBlock = 0
253
253
254 if self.__hasNotDataInBuffer():
254 if self.__hasNotDataInBuffer():
255
255
256 if not( self.readNextBlock() ):
256 if not( self.readNextBlock() ):
257 self.dataOut.flagNoData = True
257 self.dataOut.flagNoData = True
258 return 0
258 return 0
259
259
260 #data es un numpy array de 3 dmensiones (perfiles, alturas y canales)
260 #data es un numpy array de 3 dmensiones (perfiles, alturas y canales)
261
261
262 if self.data_spc is None:
262 if self.data_spc is None:
263 self.dataOut.flagNoData = True
263 self.dataOut.flagNoData = True
264 return 0
264 return 0
265
265
266 self.getBasicHeader()
266 self.getBasicHeader()
267 self.getFirstHeader()
267 self.getFirstHeader()
268 self.dataOut.data_spc = self.data_spc
268 self.dataOut.data_spc = self.data_spc
269 self.dataOut.data_cspc = self.data_cspc
269 self.dataOut.data_cspc = self.data_cspc
270 self.dataOut.data_dc = self.data_dc
270 self.dataOut.data_dc = self.data_dc
271 self.dataOut.flagNoData = False
271 self.dataOut.flagNoData = False
272 self.dataOut.realtime = self.online
272 self.dataOut.realtime = self.online
273
273
274 return self.dataOut.data_spc
274 return self.dataOut.data_spc
275
275
276
276
277 @MPDecorator
277 @MPDecorator
278 class SpectraWriter(JRODataWriter, Operation):
278 class SpectraWriter(JRODataWriter, Operation):
279
279
280 """
280 """
281 Esta clase permite escribir datos de espectros a archivos procesados (.pdata). La escritura
281 Esta clase permite escribir datos de espectros a archivos procesados (.pdata). La escritura
282 de los datos siempre se realiza por bloques.
282 de los datos siempre se realiza por bloques.
283 """
283 """
284
284
285 def __init__(self):
285 def __init__(self):
286 """
286 """
287 Inicializador de la clase SpectraWriter para la escritura de datos de espectros.
287 Inicializador de la clase SpectraWriter para la escritura de datos de espectros.
288
288
289 Affected:
289 Affected:
290 self.dataOut
290 self.dataOut
291 self.basicHeaderObj
291 self.basicHeaderObj
292 self.systemHeaderObj
292 self.systemHeaderObj
293 self.radarControllerHeaderObj
293 self.radarControllerHeaderObj
294 self.processingHeaderObj
294 self.processingHeaderObj
295
295
296 Return: None
296 Return: None
297 """
297 """
298
298
299 Operation.__init__(self)
299 Operation.__init__(self)
300
300
301 self.ext = ".pdata"
301 self.ext = ".pdata"
302 self.optchar = "P"
302 self.optchar = "P"
303 self.shape_spc_Buffer = None
303 self.shape_spc_Buffer = None
304 self.shape_cspc_Buffer = None
304 self.shape_cspc_Buffer = None
305 self.shape_dc_Buffer = None
305 self.shape_dc_Buffer = None
306 self.data_spc = None
306 self.data_spc = None
307 self.data_cspc = None
307 self.data_cspc = None
308 self.data_dc = None
308 self.data_dc = None
309 self.setFile = None
309 self.setFile = None
310 self.noMoreFiles = 0
310 self.noMoreFiles = 0
311 self.basicHeaderObj = BasicHeader(LOCALTIME)
311 self.basicHeaderObj = BasicHeader(LOCALTIME)
312 self.systemHeaderObj = SystemHeader()
312 self.systemHeaderObj = SystemHeader()
313 self.radarControllerHeaderObj = RadarControllerHeader()
313 self.radarControllerHeaderObj = RadarControllerHeader()
314 self.processingHeaderObj = ProcessingHeader()
314 self.processingHeaderObj = ProcessingHeader()
315
315
316 def hasAllDataInBuffer(self):
316 def hasAllDataInBuffer(self):
317 return 1
317 return 1
318
318
319
319
320 def setBlockDimension(self):
320 def setBlockDimension(self):
321 """
321 """
322 Obtiene las formas dimensionales del los subbloques de datos que componen un bloque
322 Obtiene las formas dimensionales del los subbloques de datos que componen un bloque
323
323
324 Affected:
324 Affected:
325 self.shape_spc_Buffer
325 self.shape_spc_Buffer
326 self.shape_cspc_Buffer
326 self.shape_cspc_Buffer
327 self.shape_dc_Buffer
327 self.shape_dc_Buffer
328
328
329 Return: None
329 Return: None
330 """
330 """
331 self.shape_spc_Buffer = (self.dataOut.nChannels,
331 self.shape_spc_Buffer = (self.dataOut.nChannels,
332 self.processingHeaderObj.nHeights,
332 self.processingHeaderObj.nHeights,
333 self.processingHeaderObj.profilesPerBlock)
333 self.processingHeaderObj.profilesPerBlock)
334
334
335 self.shape_cspc_Buffer = (self.dataOut.nPairs,
335 self.shape_cspc_Buffer = (self.dataOut.nPairs,
336 self.processingHeaderObj.nHeights,
336 self.processingHeaderObj.nHeights,
337 self.processingHeaderObj.profilesPerBlock)
337 self.processingHeaderObj.profilesPerBlock)
338
338
339 self.shape_dc_Buffer = (self.dataOut.nChannels,
339 self.shape_dc_Buffer = (self.dataOut.nChannels,
340 self.processingHeaderObj.nHeights)
340 self.processingHeaderObj.nHeights)
341
341
342
342
343 def writeBlock(self):
343 def writeBlock(self):
344 """processingHeaderObj
344 """processingHeaderObj
345 Escribe el buffer en el file designado
345 Escribe el buffer en el file designado
346
346
347 Affected:
347 Affected:
348 self.data_spc
348 self.data_spc
349 self.data_cspc
349 self.data_cspc
350 self.data_dc
350 self.data_dc
351 self.flagIsNewFile
351 self.flagIsNewFile
352 self.flagIsNewBlock
352 self.flagIsNewBlock
353 self.nTotalBlocks
353 self.nTotalBlocks
354 self.nWriteBlocks
354 self.nWriteBlocks
355
355
356 Return: None
356 Return: None
357 """
357 """
358
358
359 spc = numpy.transpose( self.data_spc, (0,2,1) )
359 spc = numpy.transpose( self.data_spc, (0,2,1) )
360 if not self.processingHeaderObj.shif_fft:
360 if not self.processingHeaderObj.shif_fft:
361 spc = numpy.roll( spc, self.processingHeaderObj.profilesPerBlock/2, axis=2 ) #desplaza a la derecha en el eje 2 determinadas posiciones
361 spc = numpy.roll( spc, self.processingHeaderObj.profilesPerBlock/2, axis=2 ) #desplaza a la derecha en el eje 2 determinadas posiciones
362 data = spc.reshape((-1))
362 data = spc.reshape((-1))
363 data = data.astype(self.dtype[0])
363 data = data.astype(self.dtype[0])
364 data.tofile(self.fp)
364 data.tofile(self.fp)
365
365
366 if self.data_cspc is not None:
366 if self.data_cspc is not None:
367
367
368 cspc = numpy.transpose( self.data_cspc, (0,2,1) )
368 cspc = numpy.transpose( self.data_cspc, (0,2,1) )
369 data = numpy.zeros( numpy.shape(cspc), self.dtype )
369 data = numpy.zeros( numpy.shape(cspc), self.dtype )
370 #print 'data.shape', self.shape_cspc_Buffer
370 #print 'data.shape', self.shape_cspc_Buffer
371 if not self.processingHeaderObj.shif_fft:
371 if not self.processingHeaderObj.shif_fft:
372 cspc = numpy.roll( cspc, self.processingHeaderObj.profilesPerBlock/2, axis=2 ) #desplaza a la derecha en el eje 2 determinadas posiciones
372 cspc = numpy.roll( cspc, self.processingHeaderObj.profilesPerBlock/2, axis=2 ) #desplaza a la derecha en el eje 2 determinadas posiciones
373 data['real'] = cspc.real
373 data['real'] = cspc.real
374 data['imag'] = cspc.imag
374 data['imag'] = cspc.imag
375 data = data.reshape((-1))
375 data = data.reshape((-1))
376 data.tofile(self.fp)
376 data.tofile(self.fp)
377
377
378 if self.data_dc is not None:
378 if self.data_dc is not None:
379
379
380 dc = self.data_dc
380 dc = self.data_dc
381 data = numpy.zeros( numpy.shape(dc), self.dtype )
381 data = numpy.zeros( numpy.shape(dc), self.dtype )
382 data['real'] = dc.real
382 data['real'] = dc.real
383 data['imag'] = dc.imag
383 data['imag'] = dc.imag
384 data = data.reshape((-1))
384 data = data.reshape((-1))
385 data.tofile(self.fp)
385 data.tofile(self.fp)
386
386
387 # self.data_spc.fill(0)
387 # self.data_spc.fill(0)
388 #
388 #
389 # if self.data_dc is not None:
389 # if self.data_dc is not None:
390 # self.data_dc.fill(0)
390 # self.data_dc.fill(0)
391 #
391 #
392 # if self.data_cspc is not None:
392 # if self.data_cspc is not None:
393 # self.data_cspc.fill(0)
393 # self.data_cspc.fill(0)
394
394
395 self.flagIsNewFile = 0
395 self.flagIsNewFile = 0
396 self.flagIsNewBlock = 1
396 self.flagIsNewBlock = 1
397 self.nTotalBlocks += 1
397 self.nTotalBlocks += 1
398 self.nWriteBlocks += 1
398 self.nWriteBlocks += 1
399 self.blockIndex += 1
399 self.blockIndex += 1
400
400
401 # print "[Writing] Block = %d04" %self.blockIndex
401 # print "[Writing] Block = %d04" %self.blockIndex
402
402
403 def putData(self):
403 def putData(self):
404 """
404 """
405 Setea un bloque de datos y luego los escribe en un file
405 Setea un bloque de datos y luego los escribe en un file
406
406
407 Affected:
407 Affected:
408 self.data_spc
408 self.data_spc
409 self.data_cspc
409 self.data_cspc
410 self.data_dc
410 self.data_dc
411
411
412 Return:
412 Return:
413 0 : Si no hay data o no hay mas files que puedan escribirse
413 0 : Si no hay data o no hay mas files que puedan escribirse
414 1 : Si se escribio la data de un bloque en un file
414 1 : Si se escribio la data de un bloque en un file
415 """
415 """
416
416
417 if self.dataOut.flagNoData:
417 if self.dataOut.flagNoData:
418 return 0
418 return 0
419
419
420 self.flagIsNewBlock = 0
420 self.flagIsNewBlock = 0
421
421
422 if self.dataOut.flagDiscontinuousBlock:
422 if self.dataOut.flagDiscontinuousBlock:
423 self.data_spc.fill(0)
423 self.data_spc.fill(0)
424 if self.dataOut.data_cspc is not None:
424 if self.dataOut.data_cspc is not None:
425 self.data_cspc.fill(0)
425 self.data_cspc.fill(0)
426 if self.dataOut.data_dc is not None:
426 if self.dataOut.data_dc is not None:
427 self.data_dc.fill(0)
427 self.data_dc.fill(0)
428 self.setNextFile()
428 self.setNextFile()
429
429
430 if self.flagIsNewFile == 0:
430 if self.flagIsNewFile == 0:
431 self.setBasicHeader()
431 self.setBasicHeader()
432
432
433 self.data_spc = self.dataOut.data_spc.copy()
433 self.data_spc = self.dataOut.data_spc.copy()
434
434
435 if self.dataOut.data_cspc is not None:
435 if self.dataOut.data_cspc is not None:
436 self.data_cspc = self.dataOut.data_cspc.copy()
436 self.data_cspc = self.dataOut.data_cspc.copy()
437
437
438 if self.dataOut.data_dc is not None:
438 if self.dataOut.data_dc is not None:
439 self.data_dc = self.dataOut.data_dc.copy()
439 self.data_dc = self.dataOut.data_dc.copy()
440
440
441 # #self.processingHeaderObj.dataBlocksPerFile)
441 # #self.processingHeaderObj.dataBlocksPerFile)
442 if self.hasAllDataInBuffer():
442 if self.hasAllDataInBuffer():
443 # self.setFirstHeader()
443 # self.setFirstHeader()
444 self.writeNextBlock()
444 self.writeNextBlock()
445
445
446 def __getBlockSize(self):
446 def __getBlockSize(self):
447 '''
447 '''
448 Este metodos determina el cantidad de bytes para un bloque de datos de tipo Spectra
448 Este metodos determina el cantidad de bytes para un bloque de datos de tipo Spectra
449 '''
449 '''
450
450
451 dtype_width = self.getDtypeWidth()
451 dtype_width = self.getDtypeWidth()
452
452
453 pts2write = self.dataOut.nHeights * self.dataOut.nFFTPoints
453 pts2write = self.dataOut.nHeights * self.dataOut.nFFTPoints
454
454
455 pts2write_SelfSpectra = int(self.dataOut.nChannels * pts2write)
455 pts2write_SelfSpectra = int(self.dataOut.nChannels * pts2write)
456 blocksize = (pts2write_SelfSpectra*dtype_width)
456 blocksize = (pts2write_SelfSpectra*dtype_width)
457
457
458 if self.dataOut.data_cspc is not None:
458 if self.dataOut.data_cspc is not None:
459 pts2write_CrossSpectra = int(self.dataOut.nPairs * pts2write)
459 pts2write_CrossSpectra = int(self.dataOut.nPairs * pts2write)
460 blocksize += (pts2write_CrossSpectra*dtype_width*2)
460 blocksize += (pts2write_CrossSpectra*dtype_width*2)
461
461
462 if self.dataOut.data_dc is not None:
462 if self.dataOut.data_dc is not None:
463 pts2write_DCchannels = int(self.dataOut.nChannels * self.dataOut.nHeights)
463 pts2write_DCchannels = int(self.dataOut.nChannels * self.dataOut.nHeights)
464 blocksize += (pts2write_DCchannels*dtype_width*2)
464 blocksize += (pts2write_DCchannels*dtype_width*2)
465
465
466 # blocksize = blocksize #* datatypeValue * 2 #CORREGIR ESTO
466 # blocksize = blocksize #* datatypeValue * 2 #CORREGIR ESTO
467
467
468 return blocksize
468 return blocksize
469
469
470 def setFirstHeader(self):
470 def setFirstHeader(self):
471
471
472 """
472 """
473 Obtiene una copia del First Header
473 Obtiene una copia del First Header
474
474
475 Affected:
475 Affected:
476 self.systemHeaderObj
476 self.systemHeaderObj
477 self.radarControllerHeaderObj
477 self.radarControllerHeaderObj
478 self.dtype
478 self.dtype
479
479
480 Return:
480 Return:
481 None
481 None
482 """
482 """
483
483
484 self.systemHeaderObj = self.dataOut.systemHeaderObj.copy()
484 self.systemHeaderObj = self.dataOut.systemHeaderObj.copy()
485 self.systemHeaderObj.nChannels = self.dataOut.nChannels
485 self.systemHeaderObj.nChannels = self.dataOut.nChannels
486 self.radarControllerHeaderObj = self.dataOut.radarControllerHeaderObj.copy()
486 self.radarControllerHeaderObj = self.dataOut.radarControllerHeaderObj.copy()
487
487
488 self.processingHeaderObj.dtype = 1 # Spectra
488 self.processingHeaderObj.dtype = 1 # Spectra
489 self.processingHeaderObj.blockSize = self.__getBlockSize()
489 self.processingHeaderObj.blockSize = self.__getBlockSize()
490 self.processingHeaderObj.profilesPerBlock = self.dataOut.nFFTPoints
490 self.processingHeaderObj.profilesPerBlock = self.dataOut.nFFTPoints
491 self.processingHeaderObj.dataBlocksPerFile = self.blocksPerFile
491 self.processingHeaderObj.dataBlocksPerFile = self.blocksPerFile
492 self.processingHeaderObj.nWindows = 1 #podria ser 1 o self.dataOut.processingHeaderObj.nWindows
492 self.processingHeaderObj.nWindows = 1 #podria ser 1 o self.dataOut.processingHeaderObj.nWindows
493 self.processingHeaderObj.nCohInt = self.dataOut.nCohInt# Se requiere para determinar el valor de timeInterval
493 self.processingHeaderObj.nCohInt = self.dataOut.nCohInt# Se requiere para determinar el valor de timeInterval
494 self.processingHeaderObj.nIncohInt = self.dataOut.nIncohInt
494 self.processingHeaderObj.nIncohInt = self.dataOut.nIncohInt
495 self.processingHeaderObj.totalSpectra = self.dataOut.nPairs + self.dataOut.nChannels
495 self.processingHeaderObj.totalSpectra = self.dataOut.nPairs + self.dataOut.nChannels
496 self.processingHeaderObj.shif_fft = self.dataOut.flagShiftFFT
496 self.processingHeaderObj.shif_fft = self.dataOut.flagShiftFFT
497
497
498 if self.processingHeaderObj.totalSpectra > 0:
498 if self.processingHeaderObj.totalSpectra > 0:
499 channelList = []
499 channelList = []
500 for channel in range(self.dataOut.nChannels):
500 for channel in range(self.dataOut.nChannels):
501 channelList.append(channel)
501 channelList.append(channel)
502 channelList.append(channel)
502 channelList.append(channel)
503
503
504 pairsList = []
504 pairsList = []
505 if self.dataOut.nPairs > 0:
505 if self.dataOut.nPairs > 0:
506 for pair in self.dataOut.pairsList:
506 for pair in self.dataOut.pairsList:
507 pairsList.append(pair[0])
507 pairsList.append(pair[0])
508 pairsList.append(pair[1])
508 pairsList.append(pair[1])
509
509
510 spectraComb = channelList + pairsList
510 spectraComb = channelList + pairsList
511 spectraComb = numpy.array(spectraComb, dtype="u1")
511 spectraComb = numpy.array(spectraComb, dtype="u1")
512 self.processingHeaderObj.spectraComb = spectraComb
512 self.processingHeaderObj.spectraComb = spectraComb
513
513
514 if self.dataOut.code is not None:
514 if self.dataOut.code is not None:
515 self.processingHeaderObj.code = self.dataOut.code
515 self.processingHeaderObj.code = self.dataOut.code
516 self.processingHeaderObj.nCode = self.dataOut.nCode
516 self.processingHeaderObj.nCode = self.dataOut.nCode
517 self.processingHeaderObj.nBaud = self.dataOut.nBaud
517 self.processingHeaderObj.nBaud = self.dataOut.nBaud
518
518
519 if self.processingHeaderObj.nWindows != 0:
519 if self.processingHeaderObj.nWindows != 0:
520 self.processingHeaderObj.firstHeight = self.dataOut.heightList[0]
520 self.processingHeaderObj.firstHeight = self.dataOut.heightList[0]
521 self.processingHeaderObj.deltaHeight = self.dataOut.heightList[1] - self.dataOut.heightList[0]
521 self.processingHeaderObj.deltaHeight = self.dataOut.heightList[1] - self.dataOut.heightList[0]
522 self.processingHeaderObj.nHeights = self.dataOut.nHeights
522 self.processingHeaderObj.nHeights = self.dataOut.nHeights
523 self.processingHeaderObj.samplesWin = self.dataOut.nHeights
523 self.processingHeaderObj.samplesWin = self.dataOut.nHeights
524
524
525 self.processingHeaderObj.processFlags = self.getProcessFlags()
525 self.processingHeaderObj.processFlags = self.getProcessFlags()
526
526
527 self.setBasicHeader() No newline at end of file
527 self.setBasicHeader()
@@ -1,600 +1,602
1 '''
1 '''
2 Created on Jul 3, 2014
2 Created on Jul 3, 2014
3
3
4 @author: roj-idl71
4 @author: roj-idl71
5 '''
5 '''
6 import os
6 import os
7 import datetime
7 import datetime
8 import numpy
8 import numpy
9
9
10 try:
10 try:
11 from gevent import sleep
11 from gevent import sleep
12 except:
12 except:
13 from time import sleep
13 from time import sleep
14
14
15 from schainpy.model.data.jroheaderIO import RadarControllerHeader, SystemHeader
15 from schainpy.model.data.jroheaderIO import RadarControllerHeader, SystemHeader
16 from schainpy.model.data.jrodata import Voltage
16 from schainpy.model.data.jrodata import Voltage
17 from schainpy.model.proc.jroproc_base import ProcessingUnit, Operation
17 from schainpy.model.proc.jroproc_base import ProcessingUnit, Operation, MPDecorator
18
18
19 try:
19 try:
20 import digital_rf_hdf5
20 import digital_rf_hdf5
21 except:
21 except:
22 pass
22 pass
23
23
24 class USRPReader(ProcessingUnit):
24 class USRPReader(ProcessingUnit):
25 '''
25 '''
26 classdocs
26 classdocs
27 '''
27 '''
28
28
29 def __init__(self, **kwargs):
29 def __init__(self, **kwargs):
30 '''
30 '''
31 Constructor
31 Constructor
32 '''
32 '''
33
33
34 ProcessingUnit.__init__(self, **kwargs)
34 ProcessingUnit.__init__(self, **kwargs)
35
35
36 self.dataOut = Voltage()
36 self.dataOut = Voltage()
37 self.__printInfo = True
37 self.__printInfo = True
38 self.__flagDiscontinuousBlock = False
38 self.__flagDiscontinuousBlock = False
39 self.__bufferIndex = 9999999
39 self.__bufferIndex = 9999999
40
40
41 self.__ippKm = None
41 self.__ippKm = None
42 self.__codeType = 0
42 self.__codeType = 0
43 self.__nCode = None
43 self.__nCode = None
44 self.__nBaud = None
44 self.__nBaud = None
45 self.__code = None
45 self.__code = None
46
46
47 def __getCurrentSecond(self):
47 def __getCurrentSecond(self):
48
48
49 return self.__thisUnixSample/self.__sample_rate
49 return self.__thisUnixSample/self.__sample_rate
50
50
51 thisSecond = property(__getCurrentSecond, "I'm the 'thisSecond' property.")
51 thisSecond = property(__getCurrentSecond, "I'm the 'thisSecond' property.")
52
52
53 def __setFileHeader(self):
53 def __setFileHeader(self):
54 '''
54 '''
55 In this method will be initialized every parameter of dataOut object (header, no data)
55 In this method will be initialized every parameter of dataOut object (header, no data)
56 '''
56 '''
57 ippSeconds = 1.0*self.__nSamples/self.__sample_rate
57 ippSeconds = 1.0*self.__nSamples/self.__sample_rate
58
58
59 nProfiles = 1.0/ippSeconds #Number of profiles in one second
59 nProfiles = 1.0/ippSeconds #Number of profiles in one second
60
60
61 self.dataOut.radarControllerHeaderObj = RadarControllerHeader(ippKm=self.__ippKm,
61 self.dataOut.radarControllerHeaderObj = RadarControllerHeader(ippKm=self.__ippKm,
62 txA=0,
62 txA=0,
63 txB=0,
63 txB=0,
64 nWindows=1,
64 nWindows=1,
65 nHeights=self.__nSamples,
65 nHeights=self.__nSamples,
66 firstHeight=self.__firstHeigth,
66 firstHeight=self.__firstHeigth,
67 deltaHeight=self.__deltaHeigth,
67 deltaHeight=self.__deltaHeigth,
68 codeType=self.__codeType,
68 codeType=self.__codeType,
69 nCode=self.__nCode, nBaud=self.__nBaud,
69 nCode=self.__nCode, nBaud=self.__nBaud,
70 code = self.__code)
70 code = self.__code)
71
71
72 self.dataOut.systemHeaderObj = SystemHeader(nSamples=self.__nSamples,
72 self.dataOut.systemHeaderObj = SystemHeader(nSamples=self.__nSamples,
73 nProfiles=nProfiles,
73 nProfiles=nProfiles,
74 nChannels=len(self.__channelList),
74 nChannels=len(self.__channelList),
75 adcResolution=14)
75 adcResolution=14)
76
76
77 self.dataOut.type = "Voltage"
77 self.dataOut.type = "Voltage"
78
78
79 self.dataOut.data = None
79 self.dataOut.data = None
80
80
81 self.dataOut.dtype = numpy.dtype([('real','<i8'),('imag','<i8')])
81 self.dataOut.dtype = numpy.dtype([('real','<i8'),('imag','<i8')])
82
82
83 # self.dataOut.nChannels = 0
83 # self.dataOut.nChannels = 0
84
84
85 # self.dataOut.nHeights = 0
85 # self.dataOut.nHeights = 0
86
86
87 self.dataOut.nProfiles = nProfiles
87 self.dataOut.nProfiles = nProfiles
88
88
89 self.dataOut.heightList = self.__firstHeigth + numpy.arange(self.__nSamples, dtype = numpy.float)*self.__deltaHeigth
89 self.dataOut.heightList = self.__firstHeigth + numpy.arange(self.__nSamples, dtype = numpy.float)*self.__deltaHeigth
90
90
91 self.dataOut.channelList = self.__channelList
91 self.dataOut.channelList = self.__channelList
92
92
93 self.dataOut.blocksize = self.dataOut.getNChannels() * self.dataOut.getNHeights()
93 self.dataOut.blocksize = self.dataOut.getNChannels() * self.dataOut.getNHeights()
94
94
95 # self.dataOut.channelIndexList = None
95 # self.dataOut.channelIndexList = None
96
96
97 self.dataOut.flagNoData = True
97 self.dataOut.flagNoData = True
98
98
99 #Set to TRUE if the data is discontinuous
99 #Set to TRUE if the data is discontinuous
100 self.dataOut.flagDiscontinuousBlock = False
100 self.dataOut.flagDiscontinuousBlock = False
101
101
102 self.dataOut.utctime = None
102 self.dataOut.utctime = None
103
103
104 self.dataOut.timeZone = self.__timezone/60 #timezone like jroheader, difference in minutes between UTC and localtime
104 self.dataOut.timeZone = self.__timezone/60 #timezone like jroheader, difference in minutes between UTC and localtime
105
105
106 self.dataOut.dstFlag = 0
106 self.dataOut.dstFlag = 0
107
107
108 self.dataOut.errorCount = 0
108 self.dataOut.errorCount = 0
109
109
110 self.dataOut.nCohInt = 1
110 self.dataOut.nCohInt = 1
111
111
112 self.dataOut.flagDecodeData = False #asumo que la data esta decodificada
112 self.dataOut.flagDecodeData = False #asumo que la data esta decodificada
113
113
114 self.dataOut.flagDeflipData = False #asumo que la data esta sin flip
114 self.dataOut.flagDeflipData = False #asumo que la data esta sin flip
115
115
116 self.dataOut.flagShiftFFT = False
116 self.dataOut.flagShiftFFT = False
117
117
118 self.dataOut.ippSeconds = ippSeconds
118 self.dataOut.ippSeconds = ippSeconds
119
119
120 #Time interval between profiles
120 #Time interval between profiles
121 #self.dataOut.timeInterval = self.dataOut.ippSeconds * self.dataOut.nCohInt
121 #self.dataOut.timeInterval = self.dataOut.ippSeconds * self.dataOut.nCohInt
122
122
123 self.dataOut.frequency = self.__frequency
123 self.dataOut.frequency = self.__frequency
124
124
125 self.dataOut.realtime = self.__online
125 self.dataOut.realtime = self.__online
126
126
127 def findDatafiles(self, path, startDate=None, endDate=None):
127 def findDatafiles(self, path, startDate=None, endDate=None):
128
128
129 if not os.path.isdir(path):
129 if not os.path.isdir(path):
130 return []
130 return []
131
131
132 try:
132 try:
133 digitalReadObj = digital_rf_hdf5.read_hdf5(path, load_all_metadata=True)
133 digitalReadObj = digital_rf_hdf5.read_hdf5(path, load_all_metadata=True)
134 except:
134 except:
135 digitalReadObj = digital_rf_hdf5.read_hdf5(path)
135 digitalReadObj = digital_rf_hdf5.read_hdf5(path)
136
136
137 channelNameList = digitalReadObj.get_channels()
137 channelNameList = digitalReadObj.get_channels()
138
138
139 if not channelNameList:
139 if not channelNameList:
140 return []
140 return []
141
141
142 metadata_dict = digitalReadObj.get_rf_file_metadata(channelNameList[0])
142 metadata_dict = digitalReadObj.get_rf_file_metadata(channelNameList[0])
143
143
144 sample_rate = metadata_dict['sample_rate'][0]
144 sample_rate = metadata_dict['sample_rate'][0]
145
145
146 this_metadata_file = digitalReadObj.get_metadata(channelNameList[0])
146 this_metadata_file = digitalReadObj.get_metadata(channelNameList[0])
147
147
148 try:
148 try:
149 timezone = this_metadata_file['timezone'].value
149 timezone = this_metadata_file['timezone'].value
150 except:
150 except:
151 timezone = 0
151 timezone = 0
152
152
153 startUTCSecond, endUTCSecond = digitalReadObj.get_bounds(channelNameList[0])/sample_rate - timezone
153 startUTCSecond, endUTCSecond = digitalReadObj.get_bounds(channelNameList[0])/sample_rate - timezone
154
154
155 startDatetime = datetime.datetime.utcfromtimestamp(startUTCSecond)
155 startDatetime = datetime.datetime.utcfromtimestamp(startUTCSecond)
156 endDatatime = datetime.datetime.utcfromtimestamp(endUTCSecond)
156 endDatatime = datetime.datetime.utcfromtimestamp(endUTCSecond)
157
157
158 if not startDate:
158 if not startDate:
159 startDate = startDatetime.date()
159 startDate = startDatetime.date()
160
160
161 if not endDate:
161 if not endDate:
162 endDate = endDatatime.date()
162 endDate = endDatatime.date()
163
163
164 dateList = []
164 dateList = []
165
165
166 thisDatetime = startDatetime
166 thisDatetime = startDatetime
167
167
168 while(thisDatetime<=endDatatime):
168 while(thisDatetime<=endDatatime):
169
169
170 thisDate = thisDatetime.date()
170 thisDate = thisDatetime.date()
171
171
172 if thisDate < startDate:
172 if thisDate < startDate:
173 continue
173 continue
174
174
175 if thisDate > endDate:
175 if thisDate > endDate:
176 break
176 break
177
177
178 dateList.append(thisDate)
178 dateList.append(thisDate)
179 thisDatetime += datetime.timedelta(1)
179 thisDatetime += datetime.timedelta(1)
180
180
181 return dateList
181 return dateList
182
182
183 def setup(self, path = None,
183 def setup(self, path = None,
184 startDate = None,
184 startDate = None,
185 endDate = None,
185 endDate = None,
186 startTime = datetime.time(0,0,0),
186 startTime = datetime.time(0,0,0),
187 endTime = datetime.time(23,59,59),
187 endTime = datetime.time(23,59,59),
188 channelList = None,
188 channelList = None,
189 nSamples = None,
189 nSamples = None,
190 ippKm = 60,
190 ippKm = 60,
191 online = False,
191 online = False,
192 delay = 60,
192 delay = 60,
193 buffer_size = 1024,
193 buffer_size = 1024,
194 **kwargs):
194 **kwargs):
195 '''
195 '''
196 In this method we should set all initial parameters.
196 In this method we should set all initial parameters.
197
197
198 Inputs:
198 Inputs:
199 path
199 path
200 startDate
200 startDate
201 endDate
201 endDate
202 startTime
202 startTime
203 endTime
203 endTime
204 set
204 set
205 expLabel
205 expLabel
206 ext
206 ext
207 online
207 online
208 delay
208 delay
209 '''
209 '''
210
210
211 if not os.path.isdir(path):
211 if not os.path.isdir(path):
212 raise ValueError("[Reading] Directory %s does not exist" %path)
212 raise ValueError("[Reading] Directory %s does not exist" %path)
213
213
214 try:
214 try:
215 self.digitalReadObj = digital_rf_hdf5.read_hdf5(path, load_all_metadata=True)
215 self.digitalReadObj = digital_rf_hdf5.read_hdf5(path, load_all_metadata=True)
216 except:
216 except:
217 self.digitalReadObj = digital_rf_hdf5.read_hdf5(path)
217 self.digitalReadObj = digital_rf_hdf5.read_hdf5(path)
218
218
219 channelNameList = self.digitalReadObj.get_channels()
219 channelNameList = self.digitalReadObj.get_channels()
220
220
221 if not channelNameList:
221 if not channelNameList:
222 raise ValueError("[Reading] Directory %s does not have any files" %path)
222 raise ValueError("[Reading] Directory %s does not have any files" %path)
223
223
224 if not channelList:
224 if not channelList:
225 channelList = list(range(len(channelNameList)))
225 channelList = list(range(len(channelNameList)))
226
226
227 ########## Reading metadata ######################
227 ########## Reading metadata ######################
228
228
229 metadata_dict = self.digitalReadObj.get_rf_file_metadata(channelNameList[channelList[0]])
229 metadata_dict = self.digitalReadObj.get_rf_file_metadata(channelNameList[channelList[0]])
230
230
231 self.__sample_rate = metadata_dict['sample_rate'][0]
231 self.__sample_rate = metadata_dict['sample_rate'][0]
232 # self.__samples_per_file = metadata_dict['samples_per_file'][0]
232 # self.__samples_per_file = metadata_dict['samples_per_file'][0]
233 self.__deltaHeigth = 1e6*0.15/self.__sample_rate
233 self.__deltaHeigth = 1e6*0.15/self.__sample_rate
234
234
235 this_metadata_file = self.digitalReadObj.get_metadata(channelNameList[channelList[0]])
235 this_metadata_file = self.digitalReadObj.get_metadata(channelNameList[channelList[0]])
236
236
237 self.__frequency = None
237 self.__frequency = None
238 try:
238 try:
239 self.__frequency = this_metadata_file['center_frequencies'].value
239 self.__frequency = this_metadata_file['center_frequencies'].value
240 except:
240 except:
241 self.__frequency = this_metadata_file['fc'].value
241 self.__frequency = this_metadata_file['fc'].value
242
242
243 if not self.__frequency:
243 if not self.__frequency:
244 raise ValueError("Center Frequency is not defined in metadata file")
244 raise ValueError("Center Frequency is not defined in metadata file")
245
245
246 try:
246 try:
247 self.__timezone = this_metadata_file['timezone'].value
247 self.__timezone = this_metadata_file['timezone'].value
248 except:
248 except:
249 self.__timezone = 0
249 self.__timezone = 0
250
250
251 self.__firstHeigth = 0
251 self.__firstHeigth = 0
252
252
253 try:
253 try:
254 codeType = this_metadata_file['codeType'].value
254 codeType = this_metadata_file['codeType'].value
255 except:
255 except:
256 codeType = 0
256 codeType = 0
257
257
258 nCode = 1
258 nCode = 1
259 nBaud = 1
259 nBaud = 1
260 code = numpy.ones((nCode, nBaud), dtype=numpy.int)
260 code = numpy.ones((nCode, nBaud), dtype=numpy.int)
261
261
262 if codeType:
262 if codeType:
263 nCode = this_metadata_file['nCode'].value
263 nCode = this_metadata_file['nCode'].value
264 nBaud = this_metadata_file['nBaud'].value
264 nBaud = this_metadata_file['nBaud'].value
265 code = this_metadata_file['code'].value
265 code = this_metadata_file['code'].value
266
266
267 if not ippKm:
267 if not ippKm:
268 try:
268 try:
269 #seconds to km
269 #seconds to km
270 ippKm = 1e6*0.15*this_metadata_file['ipp'].value
270 ippKm = 1e6*0.15*this_metadata_file['ipp'].value
271 except:
271 except:
272 ippKm = None
272 ippKm = None
273
273
274 ####################################################
274 ####################################################
275 startUTCSecond = None
275 startUTCSecond = None
276 endUTCSecond = None
276 endUTCSecond = None
277
277
278 if startDate:
278 if startDate:
279 startDatetime = datetime.datetime.combine(startDate, startTime)
279 startDatetime = datetime.datetime.combine(startDate, startTime)
280 startUTCSecond = (startDatetime-datetime.datetime(1970,1,1)).total_seconds() + self.__timezone
280 startUTCSecond = (startDatetime-datetime.datetime(1970,1,1)).total_seconds() + self.__timezone
281
281
282 if endDate:
282 if endDate:
283 endDatetime = datetime.datetime.combine(endDate, endTime)
283 endDatetime = datetime.datetime.combine(endDate, endTime)
284 endUTCSecond = (endDatetime-datetime.datetime(1970,1,1)).total_seconds() + self.__timezone
284 endUTCSecond = (endDatetime-datetime.datetime(1970,1,1)).total_seconds() + self.__timezone
285
285
286 start_index, end_index = self.digitalReadObj.get_bounds(channelNameList[channelList[0]])
286 start_index, end_index = self.digitalReadObj.get_bounds(channelNameList[channelList[0]])
287
287
288 if not startUTCSecond:
288 if not startUTCSecond:
289 startUTCSecond = start_index/self.__sample_rate
289 startUTCSecond = start_index/self.__sample_rate
290
290
291 if start_index > startUTCSecond*self.__sample_rate:
291 if start_index > startUTCSecond*self.__sample_rate:
292 startUTCSecond = start_index/self.__sample_rate
292 startUTCSecond = start_index/self.__sample_rate
293
293
294 if not endUTCSecond:
294 if not endUTCSecond:
295 endUTCSecond = end_index/self.__sample_rate
295 endUTCSecond = end_index/self.__sample_rate
296
296
297 if end_index < endUTCSecond*self.__sample_rate:
297 if end_index < endUTCSecond*self.__sample_rate:
298 endUTCSecond = end_index/self.__sample_rate
298 endUTCSecond = end_index/self.__sample_rate
299
299
300 if not nSamples:
300 if not nSamples:
301 if not ippKm:
301 if not ippKm:
302 raise ValueError("[Reading] nSamples or ippKm should be defined")
302 raise ValueError("[Reading] nSamples or ippKm should be defined")
303
303
304 nSamples = int(ippKm / (1e6*0.15/self.__sample_rate))
304 nSamples = int(ippKm / (1e6*0.15/self.__sample_rate))
305
305
306 channelBoundList = []
306 channelBoundList = []
307 channelNameListFiltered = []
307 channelNameListFiltered = []
308
308
309 for thisIndexChannel in channelList:
309 for thisIndexChannel in channelList:
310 thisChannelName = channelNameList[thisIndexChannel]
310 thisChannelName = channelNameList[thisIndexChannel]
311 start_index, end_index = self.digitalReadObj.get_bounds(thisChannelName)
311 start_index, end_index = self.digitalReadObj.get_bounds(thisChannelName)
312 channelBoundList.append((start_index, end_index))
312 channelBoundList.append((start_index, end_index))
313 channelNameListFiltered.append(thisChannelName)
313 channelNameListFiltered.append(thisChannelName)
314
314
315 self.profileIndex = 0
315 self.profileIndex = 0
316
316
317 self.__delay = delay
317 self.__delay = delay
318 self.__ippKm = ippKm
318 self.__ippKm = ippKm
319 self.__codeType = codeType
319 self.__codeType = codeType
320 self.__nCode = nCode
320 self.__nCode = nCode
321 self.__nBaud = nBaud
321 self.__nBaud = nBaud
322 self.__code = code
322 self.__code = code
323
323
324 self.__datapath = path
324 self.__datapath = path
325 self.__online = online
325 self.__online = online
326 self.__channelList = channelList
326 self.__channelList = channelList
327 self.__channelNameList = channelNameListFiltered
327 self.__channelNameList = channelNameListFiltered
328 self.__channelBoundList = channelBoundList
328 self.__channelBoundList = channelBoundList
329 self.__nSamples = nSamples
329 self.__nSamples = nSamples
330 self.__samples_to_read = int(buffer_size*nSamples)
330 self.__samples_to_read = int(buffer_size*nSamples)
331 self.__nChannels = len(self.__channelList)
331 self.__nChannels = len(self.__channelList)
332
332
333 self.__startUTCSecond = startUTCSecond
333 self.__startUTCSecond = startUTCSecond
334 self.__endUTCSecond = endUTCSecond
334 self.__endUTCSecond = endUTCSecond
335
335
336 self.__timeInterval = 1.0 * self.__samples_to_read/self.__sample_rate #Time interval
336 self.__timeInterval = 1.0 * self.__samples_to_read/self.__sample_rate #Time interval
337
337
338 if online:
338 if online:
339 # self.__thisUnixSample = int(endUTCSecond*self.__sample_rate - 4*self.__samples_to_read)
339 # self.__thisUnixSample = int(endUTCSecond*self.__sample_rate - 4*self.__samples_to_read)
340 startUTCSecond = numpy.floor(endUTCSecond)
340 startUTCSecond = numpy.floor(endUTCSecond)
341
341
342 self.__thisUnixSample = int(startUTCSecond*self.__sample_rate) - self.__samples_to_read
342 self.__thisUnixSample = int(startUTCSecond*self.__sample_rate) - self.__samples_to_read
343
343
344 self.__data_buffer = numpy.zeros((self.__nChannels, self.__samples_to_read), dtype = numpy.complex)
344 self.__data_buffer = numpy.zeros((self.__nChannels, self.__samples_to_read), dtype = numpy.complex)
345
345
346 self.__setFileHeader()
346 self.__setFileHeader()
347 self.isConfig = True
347 self.isConfig = True
348
348
349 print("[Reading] USRP Data was found from %s to %s " %(
349 print("[Reading] USRP Data was found from %s to %s " %(
350 datetime.datetime.utcfromtimestamp(self.__startUTCSecond - self.__timezone),
350 datetime.datetime.utcfromtimestamp(self.__startUTCSecond - self.__timezone),
351 datetime.datetime.utcfromtimestamp(self.__endUTCSecond - self.__timezone)
351 datetime.datetime.utcfromtimestamp(self.__endUTCSecond - self.__timezone)
352 ))
352 ))
353
353
354 print("[Reading] Starting process from %s to %s" %(datetime.datetime.utcfromtimestamp(startUTCSecond - self.__timezone),
354 print("[Reading] Starting process from %s to %s" %(datetime.datetime.utcfromtimestamp(startUTCSecond - self.__timezone),
355 datetime.datetime.utcfromtimestamp(endUTCSecond - self.__timezone)
355 datetime.datetime.utcfromtimestamp(endUTCSecond - self.__timezone)
356 ))
356 ))
357
357
358 def __reload(self):
358 def __reload(self):
359
359
360 if not self.__online:
360 if not self.__online:
361 return
361 return
362
362
363 # print
363 # print
364 # print "%s not in range [%s, %s]" %(
364 # print "%s not in range [%s, %s]" %(
365 # datetime.datetime.utcfromtimestamp(self.thisSecond - self.__timezone),
365 # datetime.datetime.utcfromtimestamp(self.thisSecond - self.__timezone),
366 # datetime.datetime.utcfromtimestamp(self.__startUTCSecond - self.__timezone),
366 # datetime.datetime.utcfromtimestamp(self.__startUTCSecond - self.__timezone),
367 # datetime.datetime.utcfromtimestamp(self.__endUTCSecond - self.__timezone)
367 # datetime.datetime.utcfromtimestamp(self.__endUTCSecond - self.__timezone)
368 # )
368 # )
369 print("[Reading] reloading metadata ...")
369 print("[Reading] reloading metadata ...")
370
370
371 try:
371 try:
372 self.digitalReadObj.reload(complete_update=True)
372 self.digitalReadObj.reload(complete_update=True)
373 except:
373 except:
374 self.digitalReadObj.reload()
374 self.digitalReadObj.reload()
375
375
376 start_index, end_index = self.digitalReadObj.get_bounds(self.__channelNameList[self.__channelList[0]])
376 start_index, end_index = self.digitalReadObj.get_bounds(self.__channelNameList[self.__channelList[0]])
377
377
378 if start_index > self.__startUTCSecond*self.__sample_rate:
378 if start_index > self.__startUTCSecond*self.__sample_rate:
379 self.__startUTCSecond = 1.0*start_index/self.__sample_rate
379 self.__startUTCSecond = 1.0*start_index/self.__sample_rate
380
380
381 if end_index > self.__endUTCSecond*self.__sample_rate:
381 if end_index > self.__endUTCSecond*self.__sample_rate:
382 self.__endUTCSecond = 1.0*end_index/self.__sample_rate
382 self.__endUTCSecond = 1.0*end_index/self.__sample_rate
383 print()
383 print()
384 print("[Reading] New timerange found [%s, %s] " %(
384 print("[Reading] New timerange found [%s, %s] " %(
385 datetime.datetime.utcfromtimestamp(self.__startUTCSecond - self.__timezone),
385 datetime.datetime.utcfromtimestamp(self.__startUTCSecond - self.__timezone),
386 datetime.datetime.utcfromtimestamp(self.__endUTCSecond - self.__timezone)
386 datetime.datetime.utcfromtimestamp(self.__endUTCSecond - self.__timezone)
387 ))
387 ))
388
388
389 return True
389 return True
390
390
391 return False
391 return False
392
392
393 def __readNextBlock(self, seconds=30, volt_scale = 218776):
393 def __readNextBlock(self, seconds=30, volt_scale = 218776):
394 '''
394 '''
395 '''
395 '''
396
396
397 #Set the next data
397 #Set the next data
398 self.__flagDiscontinuousBlock = False
398 self.__flagDiscontinuousBlock = False
399 self.__thisUnixSample += self.__samples_to_read
399 self.__thisUnixSample += self.__samples_to_read
400
400
401 if self.__thisUnixSample + 2*self.__samples_to_read > self.__endUTCSecond*self.__sample_rate:
401 if self.__thisUnixSample + 2*self.__samples_to_read > self.__endUTCSecond*self.__sample_rate:
402 print("[Reading] There are no more data into selected time-range")
402 print("[Reading] There are no more data into selected time-range")
403
403
404 self.__reload()
404 self.__reload()
405
405
406 if self.__thisUnixSample + 2*self.__samples_to_read > self.__endUTCSecond*self.__sample_rate:
406 if self.__thisUnixSample + 2*self.__samples_to_read > self.__endUTCSecond*self.__sample_rate:
407 self.__thisUnixSample -= self.__samples_to_read
407 self.__thisUnixSample -= self.__samples_to_read
408 return False
408 return False
409
409
410 indexChannel = 0
410 indexChannel = 0
411
411
412 dataOk = False
412 dataOk = False
413
413
414 for thisChannelName in self.__channelNameList:
414 for thisChannelName in self.__channelNameList:
415
415
416 try:
416 try:
417 result = self.digitalReadObj.read_vector_c81d(self.__thisUnixSample,
417 result = self.digitalReadObj.read_vector_c81d(self.__thisUnixSample,
418 self.__samples_to_read,
418 self.__samples_to_read,
419 thisChannelName)
419 thisChannelName)
420
420
421 except IOError as e:
421 except IOError as e:
422 #read next profile
422 #read next profile
423 self.__flagDiscontinuousBlock = True
423 self.__flagDiscontinuousBlock = True
424 print("[Reading] %s" %datetime.datetime.utcfromtimestamp(self.thisSecond - self.__timezone), e)
424 print("[Reading] %s" %datetime.datetime.utcfromtimestamp(self.thisSecond - self.__timezone), e)
425 break
425 break
426
426
427 if result.shape[0] != self.__samples_to_read:
427 if result.shape[0] != self.__samples_to_read:
428 self.__flagDiscontinuousBlock = True
428 self.__flagDiscontinuousBlock = True
429 print("[Reading] %s: Too few samples were found, just %d/%d samples" %(datetime.datetime.utcfromtimestamp(self.thisSecond - self.__timezone),
429 print("[Reading] %s: Too few samples were found, just %d/%d samples" %(datetime.datetime.utcfromtimestamp(self.thisSecond - self.__timezone),
430 result.shape[0],
430 result.shape[0],
431 self.__samples_to_read))
431 self.__samples_to_read))
432 break
432 break
433
433
434 self.__data_buffer[indexChannel,:] = result*volt_scale
434 self.__data_buffer[indexChannel,:] = result*volt_scale
435
435
436 indexChannel += 1
436 indexChannel += 1
437
437
438 dataOk = True
438 dataOk = True
439
439
440 self.__utctime = self.__thisUnixSample/self.__sample_rate
440 self.__utctime = self.__thisUnixSample/self.__sample_rate
441
441
442 if not dataOk:
442 if not dataOk:
443 return False
443 return False
444
444
445 print("[Reading] %s: %d samples <> %f sec" %(datetime.datetime.utcfromtimestamp(self.thisSecond - self.__timezone),
445 print("[Reading] %s: %d samples <> %f sec" %(datetime.datetime.utcfromtimestamp(self.thisSecond - self.__timezone),
446 self.__samples_to_read,
446 self.__samples_to_read,
447 self.__timeInterval))
447 self.__timeInterval))
448
448
449 self.__bufferIndex = 0
449 self.__bufferIndex = 0
450
450
451 return True
451 return True
452
452
453 def __isBufferEmpty(self):
453 def __isBufferEmpty(self):
454
454
455 if self.__bufferIndex <= self.__samples_to_read - self.__nSamples:
455 if self.__bufferIndex <= self.__samples_to_read - self.__nSamples:
456 return False
456 return False
457
457
458 return True
458 return True
459
459
460 def getData(self, seconds=30, nTries=5):
460 def getData(self, seconds=30, nTries=5):
461
461
462 '''
462 '''
463 This method gets the data from files and put the data into the dataOut object
463 This method gets the data from files and put the data into the dataOut object
464
464
465 In addition, increase el the buffer counter in one.
465 In addition, increase el the buffer counter in one.
466
466
467 Return:
467 Return:
468 data : retorna un perfil de voltages (alturas * canales) copiados desde el
468 data : retorna un perfil de voltages (alturas * canales) copiados desde el
469 buffer. Si no hay mas archivos a leer retorna None.
469 buffer. Si no hay mas archivos a leer retorna None.
470
470
471 Affected:
471 Affected:
472 self.dataOut
472 self.dataOut
473 self.profileIndex
473 self.profileIndex
474 self.flagDiscontinuousBlock
474 self.flagDiscontinuousBlock
475 self.flagIsNewBlock
475 self.flagIsNewBlock
476 '''
476 '''
477
477
478 err_counter = 0
478 err_counter = 0
479 self.dataOut.flagNoData = True
479 self.dataOut.flagNoData = True
480
480
481 if self.__isBufferEmpty():
481 if self.__isBufferEmpty():
482
482
483 self.__flagDiscontinuousBlock = False
483 self.__flagDiscontinuousBlock = False
484
484
485 while True:
485 while True:
486 if self.__readNextBlock():
486 if self.__readNextBlock():
487 break
487 break
488
488
489 if self.__thisUnixSample > self.__endUTCSecond*self.__sample_rate:
489 if self.__thisUnixSample > self.__endUTCSecond*self.__sample_rate:
490 return False
490 return False
491
491
492 if self.__flagDiscontinuousBlock:
492 if self.__flagDiscontinuousBlock:
493 print('[Reading] discontinuous block found ... continue with the next block')
493 print('[Reading] discontinuous block found ... continue with the next block')
494 continue
494 continue
495
495
496 if not self.__online:
496 if not self.__online:
497 return False
497 return False
498
498
499 err_counter += 1
499 err_counter += 1
500 if err_counter > nTries:
500 if err_counter > nTries:
501 return False
501 return False
502
502
503 print('[Reading] waiting %d seconds to read a new block' %seconds)
503 print('[Reading] waiting %d seconds to read a new block' %seconds)
504 sleep(seconds)
504 sleep(seconds)
505
505
506 self.dataOut.data = self.__data_buffer[:,self.__bufferIndex:self.__bufferIndex+self.__nSamples]
506 self.dataOut.data = self.__data_buffer[:,self.__bufferIndex:self.__bufferIndex+self.__nSamples]
507 self.dataOut.utctime = (self.__thisUnixSample + self.__bufferIndex)/self.__sample_rate
507 self.dataOut.utctime = (self.__thisUnixSample + self.__bufferIndex)/self.__sample_rate
508 self.dataOut.flagNoData = False
508 self.dataOut.flagNoData = False
509 self.dataOut.flagDiscontinuousBlock = self.__flagDiscontinuousBlock
509 self.dataOut.flagDiscontinuousBlock = self.__flagDiscontinuousBlock
510 self.dataOut.profileIndex = self.profileIndex
510 self.dataOut.profileIndex = self.profileIndex
511
511
512 self.__bufferIndex += self.__nSamples
512 self.__bufferIndex += self.__nSamples
513 self.profileIndex += 1
513 self.profileIndex += 1
514
514
515 if self.profileIndex == self.dataOut.nProfiles:
515 if self.profileIndex == self.dataOut.nProfiles:
516 self.profileIndex = 0
516 self.profileIndex = 0
517
517
518 return True
518 return True
519
519
520 def printInfo(self):
520 def printInfo(self):
521 '''
521 '''
522 '''
522 '''
523 if self.__printInfo == False:
523 if self.__printInfo == False:
524 return
524 return
525
525
526 # self.systemHeaderObj.printInfo()
526 # self.systemHeaderObj.printInfo()
527 # self.radarControllerHeaderObj.printInfo()
527 # self.radarControllerHeaderObj.printInfo()
528
528
529 self.__printInfo = False
529 self.__printInfo = False
530
530
531 def printNumberOfBlock(self):
531 def printNumberOfBlock(self):
532 '''
532 '''
533 '''
533 '''
534
534
535 print(self.profileIndex)
535 print(self.profileIndex)
536
536
537 def run(self, **kwargs):
537 def run(self, **kwargs):
538 '''
538 '''
539 This method will be called many times so here you should put all your code
539 This method will be called many times so here you should put all your code
540 '''
540 '''
541
541
542 if not self.isConfig:
542 if not self.isConfig:
543 self.setup(**kwargs)
543 self.setup(**kwargs)
544
544
545 self.getData(seconds=self.__delay)
545 self.getData(seconds=self.__delay)
546
546
547 return
547 return
548
548
549
550 @MPDecorator
549 class USRPWriter(Operation):
551 class USRPWriter(Operation):
550 '''
552 '''
551 classdocs
553 classdocs
552 '''
554 '''
553
555
554 def __init__(self, **kwargs):
556 def __init__(self, **kwargs):
555 '''
557 '''
556 Constructor
558 Constructor
557 '''
559 '''
558 Operation.__init__(self, **kwargs)
560 Operation.__init__(self, **kwargs)
559 self.dataOut = None
561 self.dataOut = None
560
562
561 def setup(self, dataIn, path, blocksPerFile, set=0, ext=None):
563 def setup(self, dataIn, path, blocksPerFile, set=0, ext=None):
562 '''
564 '''
563 In this method we should set all initial parameters.
565 In this method we should set all initial parameters.
564
566
565 Input:
567 Input:
566 dataIn : Input data will also be outputa data
568 dataIn : Input data will also be outputa data
567
569
568 '''
570 '''
569 self.dataOut = dataIn
571 self.dataOut = dataIn
570
572
571
573
572
574
573
575
574
576
575 self.isConfig = True
577 self.isConfig = True
576
578
577 return
579 return
578
580
579 def run(self, dataIn, **kwargs):
581 def run(self, dataIn, **kwargs):
580 '''
582 '''
581 This method will be called many times so here you should put all your code
583 This method will be called many times so here you should put all your code
582
584
583 Inputs:
585 Inputs:
584
586
585 dataIn : object with the data
587 dataIn : object with the data
586
588
587 '''
589 '''
588
590
589 if not self.isConfig:
591 if not self.isConfig:
590 self.setup(dataIn, **kwargs)
592 self.setup(dataIn, **kwargs)
591
593
592
594
593 if __name__ == '__main__':
595 if __name__ == '__main__':
594
596
595 readObj = USRPReader()
597 readObj = USRPReader()
596
598
597 while True:
599 while True:
598 readObj.run(path='/Volumes/DATA/haystack/passive_radar/')
600 readObj.run(path='/Volumes/DATA/haystack/passive_radar/')
599 # readObj.printInfo()
601 # readObj.printInfo()
600 readObj.printNumberOfBlock() No newline at end of file
602 readObj.printNumberOfBlock()
@@ -1,680 +1,676
1 '''
1 '''
2 Created on Jul 2, 2014
2 Created on Jul 2, 2014
3
3
4 @author: roj-idl71
4 @author: roj-idl71
5 '''
5 '''
6
6
7 import numpy
7 import numpy
8
8
9 from .jroIO_base import LOCALTIME, JRODataReader, JRODataWriter
9 from .jroIO_base import LOCALTIME, JRODataReader, JRODataWriter
10 from schainpy.model.proc.jroproc_base import ProcessingUnit, Operation, MPDecorator
10 from schainpy.model.proc.jroproc_base import ProcessingUnit, Operation, MPDecorator
11 from schainpy.model.data.jroheaderIO import PROCFLAG, BasicHeader, SystemHeader, RadarControllerHeader, ProcessingHeader
11 from schainpy.model.data.jroheaderIO import PROCFLAG, BasicHeader, SystemHeader, RadarControllerHeader, ProcessingHeader
12 from schainpy.model.data.jrodata import Voltage
12 from schainpy.model.data.jrodata import Voltage
13 import zmq
14 import tempfile
15 from io import StringIO
16 # from _sha import blocksize
17
13
18 @MPDecorator
14
19 class VoltageReader(JRODataReader, ProcessingUnit):
15 class VoltageReader(JRODataReader, ProcessingUnit):
20 """
16 """
21 Esta clase permite leer datos de voltage desde archivos en formato rawdata (.r). La lectura
17 Esta clase permite leer datos de voltage desde archivos en formato rawdata (.r). La lectura
22 de los datos siempre se realiza por bloques. Los datos leidos (array de 3 dimensiones:
18 de los datos siempre se realiza por bloques. Los datos leidos (array de 3 dimensiones:
23 perfiles*alturas*canales) son almacenados en la variable "buffer".
19 perfiles*alturas*canales) son almacenados en la variable "buffer".
24
20
25 perfiles * alturas * canales
21 perfiles * alturas * canales
26
22
27 Esta clase contiene instancias (objetos) de las clases BasicHeader, SystemHeader,
23 Esta clase contiene instancias (objetos) de las clases BasicHeader, SystemHeader,
28 RadarControllerHeader y Voltage. Los tres primeros se usan para almacenar informacion de la
24 RadarControllerHeader y Voltage. Los tres primeros se usan para almacenar informacion de la
29 cabecera de datos (metadata), y el cuarto (Voltage) para obtener y almacenar un perfil de
25 cabecera de datos (metadata), y el cuarto (Voltage) para obtener y almacenar un perfil de
30 datos desde el "buffer" cada vez que se ejecute el metodo "getData".
26 datos desde el "buffer" cada vez que se ejecute el metodo "getData".
31
27
32 Example:
28 Example:
33
29
34 dpath = "/home/myuser/data"
30 dpath = "/home/myuser/data"
35
31
36 startTime = datetime.datetime(2010,1,20,0,0,0,0,0,0)
32 startTime = datetime.datetime(2010,1,20,0,0,0,0,0,0)
37
33
38 endTime = datetime.datetime(2010,1,21,23,59,59,0,0,0)
34 endTime = datetime.datetime(2010,1,21,23,59,59,0,0,0)
39
35
40 readerObj = VoltageReader()
36 readerObj = VoltageReader()
41
37
42 readerObj.setup(dpath, startTime, endTime)
38 readerObj.setup(dpath, startTime, endTime)
43
39
44 while(True):
40 while(True):
45
41
46 #to get one profile
42 #to get one profile
47 profile = readerObj.getData()
43 profile = readerObj.getData()
48
44
49 #print the profile
45 #print the profile
50 print profile
46 print profile
51
47
52 #If you want to see all datablock
48 #If you want to see all datablock
53 print readerObj.datablock
49 print readerObj.datablock
54
50
55 if readerObj.flagNoMoreFiles:
51 if readerObj.flagNoMoreFiles:
56 break
52 break
57
53
58 """
54 """
59
55
60 def __init__(self):
56 def __init__(self):
61 """
57 """
62 Inicializador de la clase VoltageReader para la lectura de datos de voltage.
58 Inicializador de la clase VoltageReader para la lectura de datos de voltage.
63
59
64 Input:
60 Input:
65 dataOut : Objeto de la clase Voltage. Este objeto sera utilizado para
61 dataOut : Objeto de la clase Voltage. Este objeto sera utilizado para
66 almacenar un perfil de datos cada vez que se haga un requerimiento
62 almacenar un perfil de datos cada vez que se haga un requerimiento
67 (getData). El perfil sera obtenido a partir del buffer de datos,
63 (getData). El perfil sera obtenido a partir del buffer de datos,
68 si el buffer esta vacio se hara un nuevo proceso de lectura de un
64 si el buffer esta vacio se hara un nuevo proceso de lectura de un
69 bloque de datos.
65 bloque de datos.
70 Si este parametro no es pasado se creara uno internamente.
66 Si este parametro no es pasado se creara uno internamente.
71
67
72 Variables afectadas:
68 Variables afectadas:
73 self.dataOut
69 self.dataOut
74
70
75 Return:
71 Return:
76 None
72 None
77 """
73 """
78
74
79 ProcessingUnit.__init__(self)
75 ProcessingUnit.__init__(self)
80
76
81 self.ext = ".r"
77 self.ext = ".r"
82 self.optchar = "D"
78 self.optchar = "D"
83 self.basicHeaderObj = BasicHeader(LOCALTIME)
79 self.basicHeaderObj = BasicHeader(LOCALTIME)
84 self.systemHeaderObj = SystemHeader()
80 self.systemHeaderObj = SystemHeader()
85 self.radarControllerHeaderObj = RadarControllerHeader()
81 self.radarControllerHeaderObj = RadarControllerHeader()
86 self.processingHeaderObj = ProcessingHeader()
82 self.processingHeaderObj = ProcessingHeader()
87 self.lastUTTime = 0
83 self.lastUTTime = 0
88 self.profileIndex = 2**32 - 1
84 self.profileIndex = 2**32 - 1
89 self.dataOut = Voltage()
85 self.dataOut = Voltage()
90 self.selBlocksize = None
86 self.selBlocksize = None
91 self.selBlocktime = None
87 self.selBlocktime = None
92
88
93 def createObjByDefault(self):
89 def createObjByDefault(self):
94
90
95 dataObj = Voltage()
91 dataObj = Voltage()
96
92
97 return dataObj
93 return dataObj
98
94
99 def __hasNotDataInBuffer(self):
95 def __hasNotDataInBuffer(self):
100
96
101 if self.profileIndex >= self.processingHeaderObj.profilesPerBlock * self.nTxs:
97 if self.profileIndex >= self.processingHeaderObj.profilesPerBlock * self.nTxs:
102 return 1
98 return 1
103
99
104 return 0
100 return 0
105
101
106 def getBlockDimension(self):
102 def getBlockDimension(self):
107 """
103 """
108 Obtiene la cantidad de puntos a leer por cada bloque de datos
104 Obtiene la cantidad de puntos a leer por cada bloque de datos
109
105
110 Affected:
106 Affected:
111 self.blocksize
107 self.blocksize
112
108
113 Return:
109 Return:
114 None
110 None
115 """
111 """
116 pts2read = self.processingHeaderObj.profilesPerBlock * \
112 pts2read = self.processingHeaderObj.profilesPerBlock * \
117 self.processingHeaderObj.nHeights * self.systemHeaderObj.nChannels
113 self.processingHeaderObj.nHeights * self.systemHeaderObj.nChannels
118 self.blocksize = pts2read
114 self.blocksize = pts2read
119
115
120 def readBlock(self):
116 def readBlock(self):
121 """
117 """
122 readBlock lee el bloque de datos desde la posicion actual del puntero del archivo
118 readBlock lee el bloque de datos desde la posicion actual del puntero del archivo
123 (self.fp) y actualiza todos los parametros relacionados al bloque de datos
119 (self.fp) y actualiza todos los parametros relacionados al bloque de datos
124 (metadata + data). La data leida es almacenada en el buffer y el contador del buffer
120 (metadata + data). La data leida es almacenada en el buffer y el contador del buffer
125 es seteado a 0
121 es seteado a 0
126
122
127 Inputs:
123 Inputs:
128 None
124 None
129
125
130 Return:
126 Return:
131 None
127 None
132
128
133 Affected:
129 Affected:
134 self.profileIndex
130 self.profileIndex
135 self.datablock
131 self.datablock
136 self.flagIsNewFile
132 self.flagIsNewFile
137 self.flagIsNewBlock
133 self.flagIsNewBlock
138 self.nTotalBlocks
134 self.nTotalBlocks
139
135
140 Exceptions:
136 Exceptions:
141 Si un bloque leido no es un bloque valido
137 Si un bloque leido no es un bloque valido
142 """
138 """
143
139
144 # if self.server is not None:
140 # if self.server is not None:
145 # self.zBlock = self.receiver.recv()
141 # self.zBlock = self.receiver.recv()
146 # self.zHeader = self.zBlock[:24]
142 # self.zHeader = self.zBlock[:24]
147 # self.zDataBlock = self.zBlock[24:]
143 # self.zDataBlock = self.zBlock[24:]
148 # junk = numpy.fromstring(self.zDataBlock, numpy.dtype([('real','<i4'),('imag','<i4')]))
144 # junk = numpy.fromstring(self.zDataBlock, numpy.dtype([('real','<i4'),('imag','<i4')]))
149 # self.processingHeaderObj.profilesPerBlock = 240
145 # self.processingHeaderObj.profilesPerBlock = 240
150 # self.processingHeaderObj.nHeights = 248
146 # self.processingHeaderObj.nHeights = 248
151 # self.systemHeaderObj.nChannels
147 # self.systemHeaderObj.nChannels
152 # else:
148 # else:
153 current_pointer_location = self.fp.tell()
149 current_pointer_location = self.fp.tell()
154 junk = numpy.fromfile(self.fp, self.dtype, self.blocksize)
150 junk = numpy.fromfile(self.fp, self.dtype, self.blocksize)
155
151
156 try:
152 try:
157 junk = junk.reshape((self.processingHeaderObj.profilesPerBlock,
153 junk = junk.reshape((self.processingHeaderObj.profilesPerBlock,
158 self.processingHeaderObj.nHeights, self.systemHeaderObj.nChannels))
154 self.processingHeaderObj.nHeights, self.systemHeaderObj.nChannels))
159 except:
155 except:
160 # print "The read block (%3d) has not enough data" %self.nReadBlocks
156 # print "The read block (%3d) has not enough data" %self.nReadBlocks
161
157
162 if self.waitDataBlock(pointer_location=current_pointer_location):
158 if self.waitDataBlock(pointer_location=current_pointer_location):
163 junk = numpy.fromfile(self.fp, self.dtype, self.blocksize)
159 junk = numpy.fromfile(self.fp, self.dtype, self.blocksize)
164 junk = junk.reshape((self.processingHeaderObj.profilesPerBlock,
160 junk = junk.reshape((self.processingHeaderObj.profilesPerBlock,
165 self.processingHeaderObj.nHeights, self.systemHeaderObj.nChannels))
161 self.processingHeaderObj.nHeights, self.systemHeaderObj.nChannels))
166 # return 0
162 # return 0
167
163
168 # Dimensions : nChannels, nProfiles, nSamples
164 # Dimensions : nChannels, nProfiles, nSamples
169
165
170 junk = numpy.transpose(junk, (2, 0, 1))
166 junk = numpy.transpose(junk, (2, 0, 1))
171 self.datablock = junk['real'] + junk['imag'] * 1j
167 self.datablock = junk['real'] + junk['imag'] * 1j
172
168
173 self.profileIndex = 0
169 self.profileIndex = 0
174
170
175 self.flagIsNewFile = 0
171 self.flagIsNewFile = 0
176 self.flagIsNewBlock = 1
172 self.flagIsNewBlock = 1
177
173
178 self.nTotalBlocks += 1
174 self.nTotalBlocks += 1
179 self.nReadBlocks += 1
175 self.nReadBlocks += 1
180
176
181 return 1
177 return 1
182
178
183 def getFirstHeader(self):
179 def getFirstHeader(self):
184
180
185 self.getBasicHeader()
181 self.getBasicHeader()
186
182
187 self.dataOut.processingHeaderObj = self.processingHeaderObj.copy()
183 self.dataOut.processingHeaderObj = self.processingHeaderObj.copy()
188
184
189 self.dataOut.systemHeaderObj = self.systemHeaderObj.copy()
185 self.dataOut.systemHeaderObj = self.systemHeaderObj.copy()
190
186
191 self.dataOut.radarControllerHeaderObj = self.radarControllerHeaderObj.copy()
187 self.dataOut.radarControllerHeaderObj = self.radarControllerHeaderObj.copy()
192
188
193 if self.nTxs > 1:
189 if self.nTxs > 1:
194 self.dataOut.radarControllerHeaderObj.ippSeconds = self.radarControllerHeaderObj.ippSeconds / self.nTxs
190 self.dataOut.radarControllerHeaderObj.ippSeconds = self.radarControllerHeaderObj.ippSeconds / self.nTxs
195 # Time interval and code are propierties of dataOut. Its value depends of radarControllerHeaderObj.
191 # Time interval and code are propierties of dataOut. Its value depends of radarControllerHeaderObj.
196
192
197 # self.dataOut.timeInterval = self.radarControllerHeaderObj.ippSeconds * self.processingHeaderObj.nCohInt
193 # self.dataOut.timeInterval = self.radarControllerHeaderObj.ippSeconds * self.processingHeaderObj.nCohInt
198 #
194 #
199 # if self.radarControllerHeaderObj.code is not None:
195 # if self.radarControllerHeaderObj.code is not None:
200 #
196 #
201 # self.dataOut.nCode = self.radarControllerHeaderObj.nCode
197 # self.dataOut.nCode = self.radarControllerHeaderObj.nCode
202 #
198 #
203 # self.dataOut.nBaud = self.radarControllerHeaderObj.nBaud
199 # self.dataOut.nBaud = self.radarControllerHeaderObj.nBaud
204 #
200 #
205 # self.dataOut.code = self.radarControllerHeaderObj.code
201 # self.dataOut.code = self.radarControllerHeaderObj.code
206
202
207 self.dataOut.dtype = self.dtype
203 self.dataOut.dtype = self.dtype
208
204
209 self.dataOut.nProfiles = self.processingHeaderObj.profilesPerBlock
205 self.dataOut.nProfiles = self.processingHeaderObj.profilesPerBlock
210
206
211 self.dataOut.heightList = numpy.arange(
207 self.dataOut.heightList = numpy.arange(
212 self.processingHeaderObj.nHeights) * self.processingHeaderObj.deltaHeight + self.processingHeaderObj.firstHeight
208 self.processingHeaderObj.nHeights) * self.processingHeaderObj.deltaHeight + self.processingHeaderObj.firstHeight
213
209
214 self.dataOut.channelList = list(range(self.systemHeaderObj.nChannels))
210 self.dataOut.channelList = list(range(self.systemHeaderObj.nChannels))
215
211
216 self.dataOut.nCohInt = self.processingHeaderObj.nCohInt
212 self.dataOut.nCohInt = self.processingHeaderObj.nCohInt
217
213
218 # asumo q la data no esta decodificada
214 # asumo q la data no esta decodificada
219 self.dataOut.flagDecodeData = self.processingHeaderObj.flag_decode
215 self.dataOut.flagDecodeData = self.processingHeaderObj.flag_decode
220
216
221 # asumo q la data no esta sin flip
217 # asumo q la data no esta sin flip
222 self.dataOut.flagDeflipData = self.processingHeaderObj.flag_deflip
218 self.dataOut.flagDeflipData = self.processingHeaderObj.flag_deflip
223
219
224 self.dataOut.flagShiftFFT = self.processingHeaderObj.shif_fft
220 self.dataOut.flagShiftFFT = self.processingHeaderObj.shif_fft
225
221
226 def reshapeData(self):
222 def reshapeData(self):
227
223
228 if self.nTxs < 0:
224 if self.nTxs < 0:
229 return
225 return
230
226
231 if self.nTxs == 1:
227 if self.nTxs == 1:
232 return
228 return
233
229
234 if self.nTxs < 1 and self.processingHeaderObj.profilesPerBlock % (1. / self.nTxs) != 0:
230 if self.nTxs < 1 and self.processingHeaderObj.profilesPerBlock % (1. / self.nTxs) != 0:
235 raise ValueError("1./nTxs (=%f), should be a multiple of nProfiles (=%d)" % (
231 raise ValueError("1./nTxs (=%f), should be a multiple of nProfiles (=%d)" % (
236 1. / self.nTxs, self.processingHeaderObj.profilesPerBlock))
232 1. / self.nTxs, self.processingHeaderObj.profilesPerBlock))
237
233
238 if self.nTxs > 1 and self.processingHeaderObj.nHeights % self.nTxs != 0:
234 if self.nTxs > 1 and self.processingHeaderObj.nHeights % self.nTxs != 0:
239 raise ValueError("nTxs (=%d), should be a multiple of nHeights (=%d)" % (
235 raise ValueError("nTxs (=%d), should be a multiple of nHeights (=%d)" % (
240 self.nTxs, self.processingHeaderObj.nHeights))
236 self.nTxs, self.processingHeaderObj.nHeights))
241
237
242 self.datablock = self.datablock.reshape(
238 self.datablock = self.datablock.reshape(
243 (self.systemHeaderObj.nChannels, self.processingHeaderObj.profilesPerBlock * self.nTxs, int(self.processingHeaderObj.nHeights / self.nTxs)))
239 (self.systemHeaderObj.nChannels, self.processingHeaderObj.profilesPerBlock * self.nTxs, int(self.processingHeaderObj.nHeights / self.nTxs)))
244
240
245 self.dataOut.nProfiles = self.processingHeaderObj.profilesPerBlock * self.nTxs
241 self.dataOut.nProfiles = self.processingHeaderObj.profilesPerBlock * self.nTxs
246 self.dataOut.heightList = numpy.arange(self.processingHeaderObj.nHeights / self.nTxs) * \
242 self.dataOut.heightList = numpy.arange(self.processingHeaderObj.nHeights / self.nTxs) * \
247 self.processingHeaderObj.deltaHeight + self.processingHeaderObj.firstHeight
243 self.processingHeaderObj.deltaHeight + self.processingHeaderObj.firstHeight
248 self.dataOut.radarControllerHeaderObj.ippSeconds = self.radarControllerHeaderObj.ippSeconds / self.nTxs
244 self.dataOut.radarControllerHeaderObj.ippSeconds = self.radarControllerHeaderObj.ippSeconds / self.nTxs
249
245
250 return
246 return
251
247
252 def readFirstHeaderFromServer(self):
248 def readFirstHeaderFromServer(self):
253
249
254 self.getFirstHeader()
250 self.getFirstHeader()
255
251
256 self.firstHeaderSize = self.basicHeaderObj.size
252 self.firstHeaderSize = self.basicHeaderObj.size
257
253
258 datatype = int(numpy.log2((self.processingHeaderObj.processFlags &
254 datatype = int(numpy.log2((self.processingHeaderObj.processFlags &
259 PROCFLAG.DATATYPE_MASK)) - numpy.log2(PROCFLAG.DATATYPE_CHAR))
255 PROCFLAG.DATATYPE_MASK)) - numpy.log2(PROCFLAG.DATATYPE_CHAR))
260 if datatype == 0:
256 if datatype == 0:
261 datatype_str = numpy.dtype([('real', '<i1'), ('imag', '<i1')])
257 datatype_str = numpy.dtype([('real', '<i1'), ('imag', '<i1')])
262 elif datatype == 1:
258 elif datatype == 1:
263 datatype_str = numpy.dtype([('real', '<i2'), ('imag', '<i2')])
259 datatype_str = numpy.dtype([('real', '<i2'), ('imag', '<i2')])
264 elif datatype == 2:
260 elif datatype == 2:
265 datatype_str = numpy.dtype([('real', '<i4'), ('imag', '<i4')])
261 datatype_str = numpy.dtype([('real', '<i4'), ('imag', '<i4')])
266 elif datatype == 3:
262 elif datatype == 3:
267 datatype_str = numpy.dtype([('real', '<i8'), ('imag', '<i8')])
263 datatype_str = numpy.dtype([('real', '<i8'), ('imag', '<i8')])
268 elif datatype == 4:
264 elif datatype == 4:
269 datatype_str = numpy.dtype([('real', '<f4'), ('imag', '<f4')])
265 datatype_str = numpy.dtype([('real', '<f4'), ('imag', '<f4')])
270 elif datatype == 5:
266 elif datatype == 5:
271 datatype_str = numpy.dtype([('real', '<f8'), ('imag', '<f8')])
267 datatype_str = numpy.dtype([('real', '<f8'), ('imag', '<f8')])
272 else:
268 else:
273 raise ValueError('Data type was not defined')
269 raise ValueError('Data type was not defined')
274
270
275 self.dtype = datatype_str
271 self.dtype = datatype_str
276 #self.ippSeconds = 2 * 1000 * self.radarControllerHeaderObj.ipp / self.c
272 #self.ippSeconds = 2 * 1000 * self.radarControllerHeaderObj.ipp / self.c
277 self.fileSizeByHeader = self.processingHeaderObj.dataBlocksPerFile * self.processingHeaderObj.blockSize + \
273 self.fileSizeByHeader = self.processingHeaderObj.dataBlocksPerFile * self.processingHeaderObj.blockSize + \
278 self.firstHeaderSize + self.basicHeaderSize * \
274 self.firstHeaderSize + self.basicHeaderSize * \
279 (self.processingHeaderObj.dataBlocksPerFile - 1)
275 (self.processingHeaderObj.dataBlocksPerFile - 1)
280 # self.dataOut.channelList = numpy.arange(self.systemHeaderObj.numChannels)
276 # self.dataOut.channelList = numpy.arange(self.systemHeaderObj.numChannels)
281 # self.dataOut.channelIndexList = numpy.arange(self.systemHeaderObj.numChannels)
277 # self.dataOut.channelIndexList = numpy.arange(self.systemHeaderObj.numChannels)
282 self.getBlockDimension()
278 self.getBlockDimension()
283
279
284 def getFromServer(self):
280 def getFromServer(self):
285 self.flagDiscontinuousBlock = 0
281 self.flagDiscontinuousBlock = 0
286 self.profileIndex = 0
282 self.profileIndex = 0
287 self.flagIsNewBlock = 1
283 self.flagIsNewBlock = 1
288 self.dataOut.flagNoData = False
284 self.dataOut.flagNoData = False
289 self.nTotalBlocks += 1
285 self.nTotalBlocks += 1
290 self.nReadBlocks += 1
286 self.nReadBlocks += 1
291 self.blockPointer = 0
287 self.blockPointer = 0
292
288
293 block = self.receiver.recv()
289 block = self.receiver.recv()
294
290
295 self.basicHeaderObj.read(block[self.blockPointer:])
291 self.basicHeaderObj.read(block[self.blockPointer:])
296 self.blockPointer += self.basicHeaderObj.length
292 self.blockPointer += self.basicHeaderObj.length
297 self.systemHeaderObj.read(block[self.blockPointer:])
293 self.systemHeaderObj.read(block[self.blockPointer:])
298 self.blockPointer += self.systemHeaderObj.length
294 self.blockPointer += self.systemHeaderObj.length
299 self.radarControllerHeaderObj.read(block[self.blockPointer:])
295 self.radarControllerHeaderObj.read(block[self.blockPointer:])
300 self.blockPointer += self.radarControllerHeaderObj.length
296 self.blockPointer += self.radarControllerHeaderObj.length
301 self.processingHeaderObj.read(block[self.blockPointer:])
297 self.processingHeaderObj.read(block[self.blockPointer:])
302 self.blockPointer += self.processingHeaderObj.length
298 self.blockPointer += self.processingHeaderObj.length
303 self.readFirstHeaderFromServer()
299 self.readFirstHeaderFromServer()
304
300
305 timestamp = self.basicHeaderObj.get_datatime()
301 timestamp = self.basicHeaderObj.get_datatime()
306 print('[Reading] - Block {} - {}'.format(self.nTotalBlocks, timestamp))
302 print('[Reading] - Block {} - {}'.format(self.nTotalBlocks, timestamp))
307 current_pointer_location = self.blockPointer
303 current_pointer_location = self.blockPointer
308 junk = numpy.fromstring(
304 junk = numpy.fromstring(
309 block[self.blockPointer:], self.dtype, self.blocksize)
305 block[self.blockPointer:], self.dtype, self.blocksize)
310
306
311 try:
307 try:
312 junk = junk.reshape((self.processingHeaderObj.profilesPerBlock,
308 junk = junk.reshape((self.processingHeaderObj.profilesPerBlock,
313 self.processingHeaderObj.nHeights, self.systemHeaderObj.nChannels))
309 self.processingHeaderObj.nHeights, self.systemHeaderObj.nChannels))
314 except:
310 except:
315 # print "The read block (%3d) has not enough data" %self.nReadBlocks
311 # print "The read block (%3d) has not enough data" %self.nReadBlocks
316 if self.waitDataBlock(pointer_location=current_pointer_location):
312 if self.waitDataBlock(pointer_location=current_pointer_location):
317 junk = numpy.fromstring(
313 junk = numpy.fromstring(
318 block[self.blockPointer:], self.dtype, self.blocksize)
314 block[self.blockPointer:], self.dtype, self.blocksize)
319 junk = junk.reshape((self.processingHeaderObj.profilesPerBlock,
315 junk = junk.reshape((self.processingHeaderObj.profilesPerBlock,
320 self.processingHeaderObj.nHeights, self.systemHeaderObj.nChannels))
316 self.processingHeaderObj.nHeights, self.systemHeaderObj.nChannels))
321 # return 0
317 # return 0
322
318
323 # Dimensions : nChannels, nProfiles, nSamples
319 # Dimensions : nChannels, nProfiles, nSamples
324
320
325 junk = numpy.transpose(junk, (2, 0, 1))
321 junk = numpy.transpose(junk, (2, 0, 1))
326 self.datablock = junk['real'] + junk['imag'] * 1j
322 self.datablock = junk['real'] + junk['imag'] * 1j
327 self.profileIndex = 0
323 self.profileIndex = 0
328 if self.selBlocksize == None:
324 if self.selBlocksize == None:
329 self.selBlocksize = self.dataOut.nProfiles
325 self.selBlocksize = self.dataOut.nProfiles
330 if self.selBlocktime != None:
326 if self.selBlocktime != None:
331 if self.dataOut.nCohInt is not None:
327 if self.dataOut.nCohInt is not None:
332 nCohInt = self.dataOut.nCohInt
328 nCohInt = self.dataOut.nCohInt
333 else:
329 else:
334 nCohInt = 1
330 nCohInt = 1
335 self.selBlocksize = int(self.dataOut.nProfiles * round(self.selBlocktime / (
331 self.selBlocksize = int(self.dataOut.nProfiles * round(self.selBlocktime / (
336 nCohInt * self.dataOut.ippSeconds * self.dataOut.nProfiles)))
332 nCohInt * self.dataOut.ippSeconds * self.dataOut.nProfiles)))
337 self.dataOut.data = self.datablock[:,
333 self.dataOut.data = self.datablock[:,
338 self.profileIndex:self.profileIndex + self.selBlocksize, :]
334 self.profileIndex:self.profileIndex + self.selBlocksize, :]
339 datasize = self.dataOut.data.shape[1]
335 datasize = self.dataOut.data.shape[1]
340 if datasize < self.selBlocksize:
336 if datasize < self.selBlocksize:
341 buffer = numpy.zeros(
337 buffer = numpy.zeros(
342 (self.dataOut.data.shape[0], self.selBlocksize, self.dataOut.data.shape[2]), dtype='complex')
338 (self.dataOut.data.shape[0], self.selBlocksize, self.dataOut.data.shape[2]), dtype='complex')
343 buffer[:, :datasize, :] = self.dataOut.data
339 buffer[:, :datasize, :] = self.dataOut.data
344 self.dataOut.data = buffer
340 self.dataOut.data = buffer
345 self.profileIndex = blockIndex
341 self.profileIndex = blockIndex
346
342
347 self.dataOut.flagDataAsBlock = True
343 self.dataOut.flagDataAsBlock = True
348 self.flagIsNewBlock = 1
344 self.flagIsNewBlock = 1
349 self.dataOut.realtime = self.online
345 self.dataOut.realtime = self.online
350
346
351 return self.dataOut.data
347 return self.dataOut.data
352
348
353 def getData(self):
349 def getData(self):
354 """
350 """
355 getData obtiene una unidad de datos del buffer de lectura, un perfil, y la copia al objeto self.dataOut
351 getData obtiene una unidad de datos del buffer de lectura, un perfil, y la copia al objeto self.dataOut
356 del tipo "Voltage" con todos los parametros asociados a este (metadata). cuando no hay datos
352 del tipo "Voltage" con todos los parametros asociados a este (metadata). cuando no hay datos
357 en el buffer de lectura es necesario hacer una nueva lectura de los bloques de datos usando
353 en el buffer de lectura es necesario hacer una nueva lectura de los bloques de datos usando
358 "readNextBlock"
354 "readNextBlock"
359
355
360 Ademas incrementa el contador del buffer "self.profileIndex" en 1.
356 Ademas incrementa el contador del buffer "self.profileIndex" en 1.
361
357
362 Return:
358 Return:
363
359
364 Si el flag self.getByBlock ha sido seteado el bloque completo es copiado a self.dataOut y el self.profileIndex
360 Si el flag self.getByBlock ha sido seteado el bloque completo es copiado a self.dataOut y el self.profileIndex
365 es igual al total de perfiles leidos desde el archivo.
361 es igual al total de perfiles leidos desde el archivo.
366
362
367 Si self.getByBlock == False:
363 Si self.getByBlock == False:
368
364
369 self.dataOut.data = buffer[:, thisProfile, :]
365 self.dataOut.data = buffer[:, thisProfile, :]
370
366
371 shape = [nChannels, nHeis]
367 shape = [nChannels, nHeis]
372
368
373 Si self.getByBlock == True:
369 Si self.getByBlock == True:
374
370
375 self.dataOut.data = buffer[:, :, :]
371 self.dataOut.data = buffer[:, :, :]
376
372
377 shape = [nChannels, nProfiles, nHeis]
373 shape = [nChannels, nProfiles, nHeis]
378
374
379 Variables afectadas:
375 Variables afectadas:
380 self.dataOut
376 self.dataOut
381 self.profileIndex
377 self.profileIndex
382
378
383 Affected:
379 Affected:
384 self.dataOut
380 self.dataOut
385 self.profileIndex
381 self.profileIndex
386 self.flagDiscontinuousBlock
382 self.flagDiscontinuousBlock
387 self.flagIsNewBlock
383 self.flagIsNewBlock
388 """
384 """
389 if self.flagNoMoreFiles:
385 if self.flagNoMoreFiles:
390 self.dataOut.flagNoData = True
386 self.dataOut.flagNoData = True
391 return 0
387 return 0
392 self.flagDiscontinuousBlock = 0
388 self.flagDiscontinuousBlock = 0
393 self.flagIsNewBlock = 0
389 self.flagIsNewBlock = 0
394 if self.__hasNotDataInBuffer():
390 if self.__hasNotDataInBuffer():
395 if not(self.readNextBlock()):
391 if not(self.readNextBlock()):
396 return 0
392 return 0
397
393
398 self.getFirstHeader()
394 self.getFirstHeader()
399
395
400 self.reshapeData()
396 self.reshapeData()
401 if self.datablock is None:
397 if self.datablock is None:
402 self.dataOut.flagNoData = True
398 self.dataOut.flagNoData = True
403 return 0
399 return 0
404
400
405 if not self.getByBlock:
401 if not self.getByBlock:
406
402
407 """
403 """
408 Return profile by profile
404 Return profile by profile
409
405
410 If nTxs > 1 then one profile is divided by nTxs and number of total
406 If nTxs > 1 then one profile is divided by nTxs and number of total
411 blocks is increased by nTxs (nProfiles *= nTxs)
407 blocks is increased by nTxs (nProfiles *= nTxs)
412 """
408 """
413 self.dataOut.flagDataAsBlock = False
409 self.dataOut.flagDataAsBlock = False
414 self.dataOut.data = self.datablock[:, self.profileIndex, :]
410 self.dataOut.data = self.datablock[:, self.profileIndex, :]
415 self.dataOut.profileIndex = self.profileIndex
411 self.dataOut.profileIndex = self.profileIndex
416
412
417 self.profileIndex += 1
413 self.profileIndex += 1
418
414
419 else:
415 else:
420 """
416 """
421 Return a block
417 Return a block
422 """
418 """
423 if self.selBlocksize == None:
419 if self.selBlocksize == None:
424 self.selBlocksize = self.dataOut.nProfiles
420 self.selBlocksize = self.dataOut.nProfiles
425 if self.selBlocktime != None:
421 if self.selBlocktime != None:
426 if self.dataOut.nCohInt is not None:
422 if self.dataOut.nCohInt is not None:
427 nCohInt = self.dataOut.nCohInt
423 nCohInt = self.dataOut.nCohInt
428 else:
424 else:
429 nCohInt = 1
425 nCohInt = 1
430 self.selBlocksize = int(self.dataOut.nProfiles * round(self.selBlocktime / (
426 self.selBlocksize = int(self.dataOut.nProfiles * round(self.selBlocktime / (
431 nCohInt * self.dataOut.ippSeconds * self.dataOut.nProfiles)))
427 nCohInt * self.dataOut.ippSeconds * self.dataOut.nProfiles)))
432
428
433 self.dataOut.data = self.datablock[:,
429 self.dataOut.data = self.datablock[:,
434 self.profileIndex:self.profileIndex + self.selBlocksize, :]
430 self.profileIndex:self.profileIndex + self.selBlocksize, :]
435 self.profileIndex += self.selBlocksize
431 self.profileIndex += self.selBlocksize
436 datasize = self.dataOut.data.shape[1]
432 datasize = self.dataOut.data.shape[1]
437
433
438 if datasize < self.selBlocksize:
434 if datasize < self.selBlocksize:
439 buffer = numpy.zeros(
435 buffer = numpy.zeros(
440 (self.dataOut.data.shape[0], self.selBlocksize, self.dataOut.data.shape[2]), dtype='complex')
436 (self.dataOut.data.shape[0], self.selBlocksize, self.dataOut.data.shape[2]), dtype='complex')
441 buffer[:, :datasize, :] = self.dataOut.data
437 buffer[:, :datasize, :] = self.dataOut.data
442
438
443 while datasize < self.selBlocksize: # Not enough profiles to fill the block
439 while datasize < self.selBlocksize: # Not enough profiles to fill the block
444 if not(self.readNextBlock()):
440 if not(self.readNextBlock()):
445 return 0
441 return 0
446 self.getFirstHeader()
442 self.getFirstHeader()
447 self.reshapeData()
443 self.reshapeData()
448 if self.datablock is None:
444 if self.datablock is None:
449 self.dataOut.flagNoData = True
445 self.dataOut.flagNoData = True
450 return 0
446 return 0
451 # stack data
447 # stack data
452 blockIndex = self.selBlocksize - datasize
448 blockIndex = self.selBlocksize - datasize
453 datablock1 = self.datablock[:, :blockIndex, :]
449 datablock1 = self.datablock[:, :blockIndex, :]
454
450
455 buffer[:, datasize:datasize +
451 buffer[:, datasize:datasize +
456 datablock1.shape[1], :] = datablock1
452 datablock1.shape[1], :] = datablock1
457 datasize += datablock1.shape[1]
453 datasize += datablock1.shape[1]
458
454
459 self.dataOut.data = buffer
455 self.dataOut.data = buffer
460 self.profileIndex = blockIndex
456 self.profileIndex = blockIndex
461
457
462 self.dataOut.flagDataAsBlock = True
458 self.dataOut.flagDataAsBlock = True
463 self.dataOut.nProfiles = self.dataOut.data.shape[1]
459 self.dataOut.nProfiles = self.dataOut.data.shape[1]
464
460
465 self.dataOut.flagNoData = False
461 self.dataOut.flagNoData = False
466
462
467 self.getBasicHeader()
463 self.getBasicHeader()
468
464
469 self.dataOut.realtime = self.online
465 self.dataOut.realtime = self.online
470
466
471 return self.dataOut.data
467 return self.dataOut.data
472
468
473
469
474 @MPDecorator
470 @MPDecorator
475 class VoltageWriter(JRODataWriter, Operation):
471 class VoltageWriter(JRODataWriter, Operation):
476 """
472 """
477 Esta clase permite escribir datos de voltajes a archivos procesados (.r). La escritura
473 Esta clase permite escribir datos de voltajes a archivos procesados (.r). La escritura
478 de los datos siempre se realiza por bloques.
474 de los datos siempre se realiza por bloques.
479 """
475 """
480
476
481 ext = ".r"
477 ext = ".r"
482
478
483 optchar = "D"
479 optchar = "D"
484
480
485 shapeBuffer = None
481 shapeBuffer = None
486
482
487 def __init__(self):#, **kwargs):
483 def __init__(self):#, **kwargs):
488 """
484 """
489 Inicializador de la clase VoltageWriter para la escritura de datos de espectros.
485 Inicializador de la clase VoltageWriter para la escritura de datos de espectros.
490
486
491 Affected:
487 Affected:
492 self.dataOut
488 self.dataOut
493
489
494 Return: None
490 Return: None
495 """
491 """
496 Operation.__init__(self)#, **kwargs)
492 Operation.__init__(self)#, **kwargs)
497
493
498 self.nTotalBlocks = 0
494 self.nTotalBlocks = 0
499
495
500 self.profileIndex = 0
496 self.profileIndex = 0
501
497
502 self.isConfig = False
498 self.isConfig = False
503
499
504 self.fp = None
500 self.fp = None
505
501
506 self.flagIsNewFile = 1
502 self.flagIsNewFile = 1
507
503
508 self.blockIndex = 0
504 self.blockIndex = 0
509
505
510 self.flagIsNewBlock = 0
506 self.flagIsNewBlock = 0
511
507
512 self.setFile = None
508 self.setFile = None
513
509
514 self.dtype = None
510 self.dtype = None
515
511
516 self.path = None
512 self.path = None
517
513
518 self.filename = None
514 self.filename = None
519
515
520 self.basicHeaderObj = BasicHeader(LOCALTIME)
516 self.basicHeaderObj = BasicHeader(LOCALTIME)
521
517
522 self.systemHeaderObj = SystemHeader()
518 self.systemHeaderObj = SystemHeader()
523
519
524 self.radarControllerHeaderObj = RadarControllerHeader()
520 self.radarControllerHeaderObj = RadarControllerHeader()
525
521
526 self.processingHeaderObj = ProcessingHeader()
522 self.processingHeaderObj = ProcessingHeader()
527
523
528 def hasAllDataInBuffer(self):
524 def hasAllDataInBuffer(self):
529 if self.profileIndex >= self.processingHeaderObj.profilesPerBlock:
525 if self.profileIndex >= self.processingHeaderObj.profilesPerBlock:
530 return 1
526 return 1
531 return 0
527 return 0
532
528
533 def setBlockDimension(self):
529 def setBlockDimension(self):
534 """
530 """
535 Obtiene las formas dimensionales del los subbloques de datos que componen un bloque
531 Obtiene las formas dimensionales del los subbloques de datos que componen un bloque
536
532
537 Affected:
533 Affected:
538 self.shape_spc_Buffer
534 self.shape_spc_Buffer
539 self.shape_cspc_Buffer
535 self.shape_cspc_Buffer
540 self.shape_dc_Buffer
536 self.shape_dc_Buffer
541
537
542 Return: None
538 Return: None
543 """
539 """
544 self.shapeBuffer = (self.processingHeaderObj.profilesPerBlock,
540 self.shapeBuffer = (self.processingHeaderObj.profilesPerBlock,
545 self.processingHeaderObj.nHeights,
541 self.processingHeaderObj.nHeights,
546 self.systemHeaderObj.nChannels)
542 self.systemHeaderObj.nChannels)
547
543
548 self.datablock = numpy.zeros((self.systemHeaderObj.nChannels,
544 self.datablock = numpy.zeros((self.systemHeaderObj.nChannels,
549 self.processingHeaderObj.profilesPerBlock,
545 self.processingHeaderObj.profilesPerBlock,
550 self.processingHeaderObj.nHeights),
546 self.processingHeaderObj.nHeights),
551 dtype=numpy.dtype('complex64'))
547 dtype=numpy.dtype('complex64'))
552
548
553 def writeBlock(self):
549 def writeBlock(self):
554 """
550 """
555 Escribe el buffer en el file designado
551 Escribe el buffer en el file designado
556
552
557 Affected:
553 Affected:
558 self.profileIndex
554 self.profileIndex
559 self.flagIsNewFile
555 self.flagIsNewFile
560 self.flagIsNewBlock
556 self.flagIsNewBlock
561 self.nTotalBlocks
557 self.nTotalBlocks
562 self.blockIndex
558 self.blockIndex
563
559
564 Return: None
560 Return: None
565 """
561 """
566 data = numpy.zeros(self.shapeBuffer, self.dtype)
562 data = numpy.zeros(self.shapeBuffer, self.dtype)
567
563
568 junk = numpy.transpose(self.datablock, (1, 2, 0))
564 junk = numpy.transpose(self.datablock, (1, 2, 0))
569
565
570 data['real'] = junk.real
566 data['real'] = junk.real
571 data['imag'] = junk.imag
567 data['imag'] = junk.imag
572
568
573 data = data.reshape((-1))
569 data = data.reshape((-1))
574
570
575 data.tofile(self.fp)
571 data.tofile(self.fp)
576
572
577 self.datablock.fill(0)
573 self.datablock.fill(0)
578
574
579 self.profileIndex = 0
575 self.profileIndex = 0
580 self.flagIsNewFile = 0
576 self.flagIsNewFile = 0
581 self.flagIsNewBlock = 1
577 self.flagIsNewBlock = 1
582
578
583 self.blockIndex += 1
579 self.blockIndex += 1
584 self.nTotalBlocks += 1
580 self.nTotalBlocks += 1
585
581
586 # print "[Writing] Block = %04d" %self.blockIndex
582 # print "[Writing] Block = %04d" %self.blockIndex
587
583
588 def putData(self):
584 def putData(self):
589 """
585 """
590 Setea un bloque de datos y luego los escribe en un file
586 Setea un bloque de datos y luego los escribe en un file
591
587
592 Affected:
588 Affected:
593 self.flagIsNewBlock
589 self.flagIsNewBlock
594 self.profileIndex
590 self.profileIndex
595
591
596 Return:
592 Return:
597 0 : Si no hay data o no hay mas files que puedan escribirse
593 0 : Si no hay data o no hay mas files que puedan escribirse
598 1 : Si se escribio la data de un bloque en un file
594 1 : Si se escribio la data de un bloque en un file
599 """
595 """
600 if self.dataOut.flagNoData:
596 if self.dataOut.flagNoData:
601 return 0
597 return 0
602
598
603 self.flagIsNewBlock = 0
599 self.flagIsNewBlock = 0
604
600
605 if self.dataOut.flagDiscontinuousBlock:
601 if self.dataOut.flagDiscontinuousBlock:
606 self.datablock.fill(0)
602 self.datablock.fill(0)
607 self.profileIndex = 0
603 self.profileIndex = 0
608 self.setNextFile()
604 self.setNextFile()
609
605
610 if self.profileIndex == 0:
606 if self.profileIndex == 0:
611 self.setBasicHeader()
607 self.setBasicHeader()
612
608
613 self.datablock[:, self.profileIndex, :] = self.dataOut.data
609 self.datablock[:, self.profileIndex, :] = self.dataOut.data
614
610
615 self.profileIndex += 1
611 self.profileIndex += 1
616
612
617 if self.hasAllDataInBuffer():
613 if self.hasAllDataInBuffer():
618 # if self.flagIsNewFile:
614 # if self.flagIsNewFile:
619 self.writeNextBlock()
615 self.writeNextBlock()
620 # self.setFirstHeader()
616 # self.setFirstHeader()
621
617
622 return 1
618 return 1
623
619
624 def __getBlockSize(self):
620 def __getBlockSize(self):
625 '''
621 '''
626 Este metodos determina el cantidad de bytes para un bloque de datos de tipo Voltage
622 Este metodos determina el cantidad de bytes para un bloque de datos de tipo Voltage
627 '''
623 '''
628
624
629 dtype_width = self.getDtypeWidth()
625 dtype_width = self.getDtypeWidth()
630
626
631 blocksize = int(self.dataOut.nHeights * self.dataOut.nChannels *
627 blocksize = int(self.dataOut.nHeights * self.dataOut.nChannels *
632 self.profilesPerBlock * dtype_width * 2)
628 self.profilesPerBlock * dtype_width * 2)
633
629
634 return blocksize
630 return blocksize
635
631
636 def setFirstHeader(self):
632 def setFirstHeader(self):
637 """
633 """
638 Obtiene una copia del First Header
634 Obtiene una copia del First Header
639
635
640 Affected:
636 Affected:
641 self.systemHeaderObj
637 self.systemHeaderObj
642 self.radarControllerHeaderObj
638 self.radarControllerHeaderObj
643 self.dtype
639 self.dtype
644
640
645 Return:
641 Return:
646 None
642 None
647 """
643 """
648
644
649 self.systemHeaderObj = self.dataOut.systemHeaderObj.copy()
645 self.systemHeaderObj = self.dataOut.systemHeaderObj.copy()
650 self.systemHeaderObj.nChannels = self.dataOut.nChannels
646 self.systemHeaderObj.nChannels = self.dataOut.nChannels
651 self.radarControllerHeaderObj = self.dataOut.radarControllerHeaderObj.copy()
647 self.radarControllerHeaderObj = self.dataOut.radarControllerHeaderObj.copy()
652
648
653 self.processingHeaderObj.dtype = 0 # Voltage
649 self.processingHeaderObj.dtype = 0 # Voltage
654 self.processingHeaderObj.blockSize = self.__getBlockSize()
650 self.processingHeaderObj.blockSize = self.__getBlockSize()
655 self.processingHeaderObj.profilesPerBlock = self.profilesPerBlock
651 self.processingHeaderObj.profilesPerBlock = self.profilesPerBlock
656 self.processingHeaderObj.dataBlocksPerFile = self.blocksPerFile
652 self.processingHeaderObj.dataBlocksPerFile = self.blocksPerFile
657 # podria ser 1 o self.dataOut.processingHeaderObj.nWindows
653 # podria ser 1 o self.dataOut.processingHeaderObj.nWindows
658 self.processingHeaderObj.nWindows = 1
654 self.processingHeaderObj.nWindows = 1
659 self.processingHeaderObj.nCohInt = self.dataOut.nCohInt
655 self.processingHeaderObj.nCohInt = self.dataOut.nCohInt
660 # Cuando la data de origen es de tipo Voltage
656 # Cuando la data de origen es de tipo Voltage
661 self.processingHeaderObj.nIncohInt = 1
657 self.processingHeaderObj.nIncohInt = 1
662 # Cuando la data de origen es de tipo Voltage
658 # Cuando la data de origen es de tipo Voltage
663 self.processingHeaderObj.totalSpectra = 0
659 self.processingHeaderObj.totalSpectra = 0
664
660
665 if self.dataOut.code is not None:
661 if self.dataOut.code is not None:
666 self.processingHeaderObj.code = self.dataOut.code
662 self.processingHeaderObj.code = self.dataOut.code
667 self.processingHeaderObj.nCode = self.dataOut.nCode
663 self.processingHeaderObj.nCode = self.dataOut.nCode
668 self.processingHeaderObj.nBaud = self.dataOut.nBaud
664 self.processingHeaderObj.nBaud = self.dataOut.nBaud
669
665
670 if self.processingHeaderObj.nWindows != 0:
666 if self.processingHeaderObj.nWindows != 0:
671 self.processingHeaderObj.firstHeight = self.dataOut.heightList[0]
667 self.processingHeaderObj.firstHeight = self.dataOut.heightList[0]
672 self.processingHeaderObj.deltaHeight = self.dataOut.heightList[1] - \
668 self.processingHeaderObj.deltaHeight = self.dataOut.heightList[1] - \
673 self.dataOut.heightList[0]
669 self.dataOut.heightList[0]
674 self.processingHeaderObj.nHeights = self.dataOut.nHeights
670 self.processingHeaderObj.nHeights = self.dataOut.nHeights
675 self.processingHeaderObj.samplesWin = self.dataOut.nHeights
671 self.processingHeaderObj.samplesWin = self.dataOut.nHeights
676
672
677 self.processingHeaderObj.processFlags = self.getProcessFlags()
673 self.processingHeaderObj.processFlags = self.getProcessFlags()
678
674
679 self.setBasicHeader()
675 self.setBasicHeader()
680 No newline at end of file
676
@@ -1,402 +1,402
1 '''
1 '''
2 Created on Oct 24, 2016
2 Created on Oct 24, 2016
3
3
4 @author: roj- LouVD
4 @author: roj- LouVD
5 '''
5 '''
6
6
7 import numpy
7 import numpy
8 import copy
8 import copy
9 import datetime
9 import datetime
10 import time
10 import time
11 from time import gmtime
11 from time import gmtime
12
12
13 from numpy import transpose
13 from numpy import transpose
14
14
15 from schainpy.model.proc.jroproc_base import ProcessingUnit, Operation, MPDecorator
15 from schainpy.model.proc.jroproc_base import ProcessingUnit, Operation, MPDecorator
16 from schainpy.model.data.jrodata import Parameters
16 from schainpy.model.data.jrodata import Parameters
17
17
18 @MPDecorator
18
19 class BLTRParametersProc(ProcessingUnit):
19 class BLTRParametersProc(ProcessingUnit):
20 '''
20 '''
21 Processing unit for BLTR parameters data (winds)
21 Processing unit for BLTR parameters data (winds)
22
22
23 Inputs:
23 Inputs:
24 self.dataOut.nmodes - Number of operation modes
24 self.dataOut.nmodes - Number of operation modes
25 self.dataOut.nchannels - Number of channels
25 self.dataOut.nchannels - Number of channels
26 self.dataOut.nranges - Number of ranges
26 self.dataOut.nranges - Number of ranges
27
27
28 self.dataOut.data_SNR - SNR array
28 self.dataOut.data_SNR - SNR array
29 self.dataOut.data_output - Zonal, Vertical and Meridional velocity array
29 self.dataOut.data_output - Zonal, Vertical and Meridional velocity array
30 self.dataOut.height - Height array (km)
30 self.dataOut.height - Height array (km)
31 self.dataOut.time - Time array (seconds)
31 self.dataOut.time - Time array (seconds)
32
32
33 self.dataOut.fileIndex -Index of the file currently read
33 self.dataOut.fileIndex -Index of the file currently read
34 self.dataOut.lat - Latitude coordinate of BLTR location
34 self.dataOut.lat - Latitude coordinate of BLTR location
35
35
36 self.dataOut.doy - Experiment doy (number of the day in the current year)
36 self.dataOut.doy - Experiment doy (number of the day in the current year)
37 self.dataOut.month - Experiment month
37 self.dataOut.month - Experiment month
38 self.dataOut.day - Experiment day
38 self.dataOut.day - Experiment day
39 self.dataOut.year - Experiment year
39 self.dataOut.year - Experiment year
40 '''
40 '''
41
41
42 def __init__(self):
42 def __init__(self):
43 '''
43 '''
44 Inputs: None
44 Inputs: None
45 '''
45 '''
46 ProcessingUnit.__init__(self)
46 ProcessingUnit.__init__(self)
47 self.dataOut = Parameters()
47 self.dataOut = Parameters()
48
48
49 def setup(self, mode):
49 def setup(self, mode):
50 '''
50 '''
51 '''
51 '''
52 self.dataOut.mode = mode
52 self.dataOut.mode = mode
53
53
54 def run(self, mode, snr_threshold=None):
54 def run(self, mode, snr_threshold=None):
55 '''
55 '''
56 Inputs:
56 Inputs:
57 mode = High resolution (0) or Low resolution (1) data
57 mode = High resolution (0) or Low resolution (1) data
58 snr_threshold = snr filter value
58 snr_threshold = snr filter value
59 '''
59 '''
60
60
61 if not self.isConfig:
61 if not self.isConfig:
62 self.setup(mode)
62 self.setup(mode)
63 self.isConfig = True
63 self.isConfig = True
64
64
65 if self.dataIn.type == 'Parameters':
65 if self.dataIn.type == 'Parameters':
66 self.dataOut.copy(self.dataIn)
66 self.dataOut.copy(self.dataIn)
67
67
68 self.dataOut.data_param = self.dataOut.data[mode]
68 self.dataOut.data_param = self.dataOut.data[mode]
69 self.dataOut.heightList = self.dataOut.height[0]
69 self.dataOut.heightList = self.dataOut.height[0]
70 self.dataOut.data_SNR = self.dataOut.data_SNR[mode]
70 self.dataOut.data_SNR = self.dataOut.data_SNR[mode]
71
71
72 if snr_threshold is not None:
72 if snr_threshold is not None:
73 SNRavg = numpy.average(self.dataOut.data_SNR, axis=0)
73 SNRavg = numpy.average(self.dataOut.data_SNR, axis=0)
74 SNRavgdB = 10*numpy.log10(SNRavg)
74 SNRavgdB = 10*numpy.log10(SNRavg)
75 for i in range(3):
75 for i in range(3):
76 self.dataOut.data_param[i][SNRavgdB <= snr_threshold] = numpy.nan
76 self.dataOut.data_param[i][SNRavgdB <= snr_threshold] = numpy.nan
77
77
78 # TODO
78 # TODO
79 @MPDecorator
79
80 class OutliersFilter(Operation):
80 class OutliersFilter(Operation):
81
81
82 def __init__(self):
82 def __init__(self):
83 '''
83 '''
84 '''
84 '''
85 Operation.__init__(self)
85 Operation.__init__(self)
86
86
87 def run(self, svalue2, method, factor, filter, npoints=9):
87 def run(self, svalue2, method, factor, filter, npoints=9):
88 '''
88 '''
89 Inputs:
89 Inputs:
90 svalue - string to select array velocity
90 svalue - string to select array velocity
91 svalue2 - string to choose axis filtering
91 svalue2 - string to choose axis filtering
92 method - 0 for SMOOTH or 1 for MEDIAN
92 method - 0 for SMOOTH or 1 for MEDIAN
93 factor - number used to set threshold
93 factor - number used to set threshold
94 filter - 1 for data filtering using the standard deviation criteria else 0
94 filter - 1 for data filtering using the standard deviation criteria else 0
95 npoints - number of points for mask filter
95 npoints - number of points for mask filter
96 '''
96 '''
97
97
98 print(' Outliers Filter {} {} / threshold = {}'.format(svalue, svalue, factor))
98 print(' Outliers Filter {} {} / threshold = {}'.format(svalue, svalue, factor))
99
99
100
100
101 yaxis = self.dataOut.heightList
101 yaxis = self.dataOut.heightList
102 xaxis = numpy.array([[self.dataOut.utctime]])
102 xaxis = numpy.array([[self.dataOut.utctime]])
103
103
104 # Zonal
104 # Zonal
105 value_temp = self.dataOut.data_output[0]
105 value_temp = self.dataOut.data_output[0]
106
106
107 # Zonal
107 # Zonal
108 value_temp = self.dataOut.data_output[1]
108 value_temp = self.dataOut.data_output[1]
109
109
110 # Vertical
110 # Vertical
111 value_temp = numpy.transpose(self.dataOut.data_output[2])
111 value_temp = numpy.transpose(self.dataOut.data_output[2])
112
112
113 htemp = yaxis
113 htemp = yaxis
114 std = value_temp
114 std = value_temp
115 for h in range(len(htemp)):
115 for h in range(len(htemp)):
116 nvalues_valid = len(numpy.where(numpy.isfinite(value_temp[h]))[0])
116 nvalues_valid = len(numpy.where(numpy.isfinite(value_temp[h]))[0])
117 minvalid = npoints
117 minvalid = npoints
118
118
119 #only if valid values greater than the minimum required (10%)
119 #only if valid values greater than the minimum required (10%)
120 if nvalues_valid > minvalid:
120 if nvalues_valid > minvalid:
121
121
122 if method == 0:
122 if method == 0:
123 #SMOOTH
123 #SMOOTH
124 w = value_temp[h] - self.Smooth(input=value_temp[h], width=npoints, edge_truncate=1)
124 w = value_temp[h] - self.Smooth(input=value_temp[h], width=npoints, edge_truncate=1)
125
125
126
126
127 if method == 1:
127 if method == 1:
128 #MEDIAN
128 #MEDIAN
129 w = value_temp[h] - self.Median(input=value_temp[h], width = npoints)
129 w = value_temp[h] - self.Median(input=value_temp[h], width = npoints)
130
130
131 dw = numpy.std(w[numpy.where(numpy.isfinite(w))],ddof = 1)
131 dw = numpy.std(w[numpy.where(numpy.isfinite(w))],ddof = 1)
132
132
133 threshold = dw*factor
133 threshold = dw*factor
134 value_temp[numpy.where(w > threshold),h] = numpy.nan
134 value_temp[numpy.where(w > threshold),h] = numpy.nan
135 value_temp[numpy.where(w < -1*threshold),h] = numpy.nan
135 value_temp[numpy.where(w < -1*threshold),h] = numpy.nan
136
136
137
137
138 #At the end
138 #At the end
139 if svalue2 == 'inHeight':
139 if svalue2 == 'inHeight':
140 value_temp = numpy.transpose(value_temp)
140 value_temp = numpy.transpose(value_temp)
141 output_array[:,m] = value_temp
141 output_array[:,m] = value_temp
142
142
143 if svalue == 'zonal':
143 if svalue == 'zonal':
144 self.dataOut.data_output[0] = output_array
144 self.dataOut.data_output[0] = output_array
145
145
146 elif svalue == 'meridional':
146 elif svalue == 'meridional':
147 self.dataOut.data_output[1] = output_array
147 self.dataOut.data_output[1] = output_array
148
148
149 elif svalue == 'vertical':
149 elif svalue == 'vertical':
150 self.dataOut.data_output[2] = output_array
150 self.dataOut.data_output[2] = output_array
151
151
152 return self.dataOut.data_output
152 return self.dataOut.data_output
153
153
154
154
155 def Median(self,input,width):
155 def Median(self,input,width):
156 '''
156 '''
157 Inputs:
157 Inputs:
158 input - Velocity array
158 input - Velocity array
159 width - Number of points for mask filter
159 width - Number of points for mask filter
160
160
161 '''
161 '''
162
162
163 if numpy.mod(width,2) == 1:
163 if numpy.mod(width,2) == 1:
164 pc = int((width - 1) / 2)
164 pc = int((width - 1) / 2)
165 cont = 0
165 cont = 0
166 output = []
166 output = []
167
167
168 for i in range(len(input)):
168 for i in range(len(input)):
169 if i >= pc and i < len(input) - pc:
169 if i >= pc and i < len(input) - pc:
170 new2 = input[i-pc:i+pc+1]
170 new2 = input[i-pc:i+pc+1]
171 temp = numpy.where(numpy.isfinite(new2))
171 temp = numpy.where(numpy.isfinite(new2))
172 new = new2[temp]
172 new = new2[temp]
173 value = numpy.median(new)
173 value = numpy.median(new)
174 output.append(value)
174 output.append(value)
175
175
176 output = numpy.array(output)
176 output = numpy.array(output)
177 output = numpy.hstack((input[0:pc],output))
177 output = numpy.hstack((input[0:pc],output))
178 output = numpy.hstack((output,input[-pc:len(input)]))
178 output = numpy.hstack((output,input[-pc:len(input)]))
179
179
180 return output
180 return output
181
181
182 def Smooth(self,input,width,edge_truncate = None):
182 def Smooth(self,input,width,edge_truncate = None):
183 '''
183 '''
184 Inputs:
184 Inputs:
185 input - Velocity array
185 input - Velocity array
186 width - Number of points for mask filter
186 width - Number of points for mask filter
187 edge_truncate - 1 for truncate the convolution product else
187 edge_truncate - 1 for truncate the convolution product else
188
188
189 '''
189 '''
190
190
191 if numpy.mod(width,2) == 0:
191 if numpy.mod(width,2) == 0:
192 real_width = width + 1
192 real_width = width + 1
193 nzeros = width / 2
193 nzeros = width / 2
194 else:
194 else:
195 real_width = width
195 real_width = width
196 nzeros = (width - 1) / 2
196 nzeros = (width - 1) / 2
197
197
198 half_width = int(real_width)/2
198 half_width = int(real_width)/2
199 length = len(input)
199 length = len(input)
200
200
201 gate = numpy.ones(real_width,dtype='float')
201 gate = numpy.ones(real_width,dtype='float')
202 norm_of_gate = numpy.sum(gate)
202 norm_of_gate = numpy.sum(gate)
203
203
204 nan_process = 0
204 nan_process = 0
205 nan_id = numpy.where(numpy.isnan(input))
205 nan_id = numpy.where(numpy.isnan(input))
206 if len(nan_id[0]) > 0:
206 if len(nan_id[0]) > 0:
207 nan_process = 1
207 nan_process = 1
208 pb = numpy.zeros(len(input))
208 pb = numpy.zeros(len(input))
209 pb[nan_id] = 1.
209 pb[nan_id] = 1.
210 input[nan_id] = 0.
210 input[nan_id] = 0.
211
211
212 if edge_truncate == True:
212 if edge_truncate == True:
213 output = numpy.convolve(input/norm_of_gate,gate,mode='same')
213 output = numpy.convolve(input/norm_of_gate,gate,mode='same')
214 elif edge_truncate == False or edge_truncate == None:
214 elif edge_truncate == False or edge_truncate == None:
215 output = numpy.convolve(input/norm_of_gate,gate,mode='valid')
215 output = numpy.convolve(input/norm_of_gate,gate,mode='valid')
216 output = numpy.hstack((input[0:half_width],output))
216 output = numpy.hstack((input[0:half_width],output))
217 output = numpy.hstack((output,input[len(input)-half_width:len(input)]))
217 output = numpy.hstack((output,input[len(input)-half_width:len(input)]))
218
218
219 if nan_process:
219 if nan_process:
220 pb = numpy.convolve(pb/norm_of_gate,gate,mode='valid')
220 pb = numpy.convolve(pb/norm_of_gate,gate,mode='valid')
221 pb = numpy.hstack((numpy.zeros(half_width),pb))
221 pb = numpy.hstack((numpy.zeros(half_width),pb))
222 pb = numpy.hstack((pb,numpy.zeros(half_width)))
222 pb = numpy.hstack((pb,numpy.zeros(half_width)))
223 output[numpy.where(pb > 0.9999)] = numpy.nan
223 output[numpy.where(pb > 0.9999)] = numpy.nan
224 input[nan_id] = numpy.nan
224 input[nan_id] = numpy.nan
225 return output
225 return output
226
226
227 def Average(self,aver=0,nhaver=1):
227 def Average(self,aver=0,nhaver=1):
228 '''
228 '''
229 Inputs:
229 Inputs:
230 aver - Indicates the time period over which is averaged or consensus data
230 aver - Indicates the time period over which is averaged or consensus data
231 nhaver - Indicates the decimation factor in heights
231 nhaver - Indicates the decimation factor in heights
232
232
233 '''
233 '''
234 nhpoints = 48
234 nhpoints = 48
235
235
236 lat_piura = -5.17
236 lat_piura = -5.17
237 lat_huancayo = -12.04
237 lat_huancayo = -12.04
238 lat_porcuya = -5.8
238 lat_porcuya = -5.8
239
239
240 if '%2.2f'%self.dataOut.lat == '%2.2f'%lat_piura:
240 if '%2.2f'%self.dataOut.lat == '%2.2f'%lat_piura:
241 hcm = 3.
241 hcm = 3.
242 if self.dataOut.year == 2003 :
242 if self.dataOut.year == 2003 :
243 if self.dataOut.doy >= 25 and self.dataOut.doy < 64:
243 if self.dataOut.doy >= 25 and self.dataOut.doy < 64:
244 nhpoints = 12
244 nhpoints = 12
245
245
246 elif '%2.2f'%self.dataOut.lat == '%2.2f'%lat_huancayo:
246 elif '%2.2f'%self.dataOut.lat == '%2.2f'%lat_huancayo:
247 hcm = 3.
247 hcm = 3.
248 if self.dataOut.year == 2003 :
248 if self.dataOut.year == 2003 :
249 if self.dataOut.doy >= 25 and self.dataOut.doy < 64:
249 if self.dataOut.doy >= 25 and self.dataOut.doy < 64:
250 nhpoints = 12
250 nhpoints = 12
251
251
252
252
253 elif '%2.2f'%self.dataOut.lat == '%2.2f'%lat_porcuya:
253 elif '%2.2f'%self.dataOut.lat == '%2.2f'%lat_porcuya:
254 hcm = 5.#2
254 hcm = 5.#2
255
255
256 pdata = 0.2
256 pdata = 0.2
257 taver = [1,2,3,4,6,8,12,24]
257 taver = [1,2,3,4,6,8,12,24]
258 t0 = 0
258 t0 = 0
259 tf = 24
259 tf = 24
260 ntime =(tf-t0)/taver[aver]
260 ntime =(tf-t0)/taver[aver]
261 ti = numpy.arange(ntime)
261 ti = numpy.arange(ntime)
262 tf = numpy.arange(ntime) + taver[aver]
262 tf = numpy.arange(ntime) + taver[aver]
263
263
264
264
265 old_height = self.dataOut.heightList
265 old_height = self.dataOut.heightList
266
266
267 if nhaver > 1:
267 if nhaver > 1:
268 num_hei = len(self.dataOut.heightList)/nhaver/self.dataOut.nmodes
268 num_hei = len(self.dataOut.heightList)/nhaver/self.dataOut.nmodes
269 deltha = 0.05*nhaver
269 deltha = 0.05*nhaver
270 minhvalid = pdata*nhaver
270 minhvalid = pdata*nhaver
271 for im in range(self.dataOut.nmodes):
271 for im in range(self.dataOut.nmodes):
272 new_height = numpy.arange(num_hei)*deltha + self.dataOut.height[im,0] + deltha/2.
272 new_height = numpy.arange(num_hei)*deltha + self.dataOut.height[im,0] + deltha/2.
273
273
274
274
275 data_fHeigths_List = []
275 data_fHeigths_List = []
276 data_fZonal_List = []
276 data_fZonal_List = []
277 data_fMeridional_List = []
277 data_fMeridional_List = []
278 data_fVertical_List = []
278 data_fVertical_List = []
279 startDTList = []
279 startDTList = []
280
280
281
281
282 for i in range(ntime):
282 for i in range(ntime):
283 height = old_height
283 height = old_height
284
284
285 start = datetime.datetime(self.dataOut.year,self.dataOut.month,self.dataOut.day) + datetime.timedelta(hours = int(ti[i])) - datetime.timedelta(hours = 5)
285 start = datetime.datetime(self.dataOut.year,self.dataOut.month,self.dataOut.day) + datetime.timedelta(hours = int(ti[i])) - datetime.timedelta(hours = 5)
286 stop = datetime.datetime(self.dataOut.year,self.dataOut.month,self.dataOut.day) + datetime.timedelta(hours = int(tf[i])) - datetime.timedelta(hours = 5)
286 stop = datetime.datetime(self.dataOut.year,self.dataOut.month,self.dataOut.day) + datetime.timedelta(hours = int(tf[i])) - datetime.timedelta(hours = 5)
287
287
288
288
289 limit_sec1 = time.mktime(start.timetuple())
289 limit_sec1 = time.mktime(start.timetuple())
290 limit_sec2 = time.mktime(stop.timetuple())
290 limit_sec2 = time.mktime(stop.timetuple())
291
291
292 t1 = numpy.where(self.f_timesec >= limit_sec1)
292 t1 = numpy.where(self.f_timesec >= limit_sec1)
293 t2 = numpy.where(self.f_timesec < limit_sec2)
293 t2 = numpy.where(self.f_timesec < limit_sec2)
294 time_select = []
294 time_select = []
295 for val_sec in t1[0]:
295 for val_sec in t1[0]:
296 if val_sec in t2[0]:
296 if val_sec in t2[0]:
297 time_select.append(val_sec)
297 time_select.append(val_sec)
298
298
299
299
300 time_select = numpy.array(time_select,dtype = 'int')
300 time_select = numpy.array(time_select,dtype = 'int')
301 minvalid = numpy.ceil(pdata*nhpoints)
301 minvalid = numpy.ceil(pdata*nhpoints)
302
302
303 zon_aver = numpy.zeros([self.dataOut.nranges,self.dataOut.nmodes],dtype='f4') + numpy.nan
303 zon_aver = numpy.zeros([self.dataOut.nranges,self.dataOut.nmodes],dtype='f4') + numpy.nan
304 mer_aver = numpy.zeros([self.dataOut.nranges,self.dataOut.nmodes],dtype='f4') + numpy.nan
304 mer_aver = numpy.zeros([self.dataOut.nranges,self.dataOut.nmodes],dtype='f4') + numpy.nan
305 ver_aver = numpy.zeros([self.dataOut.nranges,self.dataOut.nmodes],dtype='f4') + numpy.nan
305 ver_aver = numpy.zeros([self.dataOut.nranges,self.dataOut.nmodes],dtype='f4') + numpy.nan
306
306
307 if nhaver > 1:
307 if nhaver > 1:
308 new_zon_aver = numpy.zeros([num_hei,self.dataOut.nmodes],dtype='f4') + numpy.nan
308 new_zon_aver = numpy.zeros([num_hei,self.dataOut.nmodes],dtype='f4') + numpy.nan
309 new_mer_aver = numpy.zeros([num_hei,self.dataOut.nmodes],dtype='f4') + numpy.nan
309 new_mer_aver = numpy.zeros([num_hei,self.dataOut.nmodes],dtype='f4') + numpy.nan
310 new_ver_aver = numpy.zeros([num_hei,self.dataOut.nmodes],dtype='f4') + numpy.nan
310 new_ver_aver = numpy.zeros([num_hei,self.dataOut.nmodes],dtype='f4') + numpy.nan
311
311
312 if len(time_select) > minvalid:
312 if len(time_select) > minvalid:
313 time_average = self.f_timesec[time_select]
313 time_average = self.f_timesec[time_select]
314
314
315 for im in range(self.dataOut.nmodes):
315 for im in range(self.dataOut.nmodes):
316
316
317 for ih in range(self.dataOut.nranges):
317 for ih in range(self.dataOut.nranges):
318 if numpy.sum(numpy.isfinite(self.f_zon[time_select,ih,im])) >= minvalid:
318 if numpy.sum(numpy.isfinite(self.f_zon[time_select,ih,im])) >= minvalid:
319 zon_aver[ih,im] = numpy.nansum(self.f_zon[time_select,ih,im]) / numpy.sum(numpy.isfinite(self.f_zon[time_select,ih,im]))
319 zon_aver[ih,im] = numpy.nansum(self.f_zon[time_select,ih,im]) / numpy.sum(numpy.isfinite(self.f_zon[time_select,ih,im]))
320
320
321 if numpy.sum(numpy.isfinite(self.f_mer[time_select,ih,im])) >= minvalid:
321 if numpy.sum(numpy.isfinite(self.f_mer[time_select,ih,im])) >= minvalid:
322 mer_aver[ih,im] = numpy.nansum(self.f_mer[time_select,ih,im]) / numpy.sum(numpy.isfinite(self.f_mer[time_select,ih,im]))
322 mer_aver[ih,im] = numpy.nansum(self.f_mer[time_select,ih,im]) / numpy.sum(numpy.isfinite(self.f_mer[time_select,ih,im]))
323
323
324 if numpy.sum(numpy.isfinite(self.f_ver[time_select,ih,im])) >= minvalid:
324 if numpy.sum(numpy.isfinite(self.f_ver[time_select,ih,im])) >= minvalid:
325 ver_aver[ih,im] = numpy.nansum(self.f_ver[time_select,ih,im]) / numpy.sum(numpy.isfinite(self.f_ver[time_select,ih,im]))
325 ver_aver[ih,im] = numpy.nansum(self.f_ver[time_select,ih,im]) / numpy.sum(numpy.isfinite(self.f_ver[time_select,ih,im]))
326
326
327 if nhaver > 1:
327 if nhaver > 1:
328 for ih in range(num_hei):
328 for ih in range(num_hei):
329 hvalid = numpy.arange(nhaver) + nhaver*ih
329 hvalid = numpy.arange(nhaver) + nhaver*ih
330
330
331 if numpy.sum(numpy.isfinite(zon_aver[hvalid,im])) >= minvalid:
331 if numpy.sum(numpy.isfinite(zon_aver[hvalid,im])) >= minvalid:
332 new_zon_aver[ih,im] = numpy.nansum(zon_aver[hvalid,im]) / numpy.sum(numpy.isfinite(zon_aver[hvalid,im]))
332 new_zon_aver[ih,im] = numpy.nansum(zon_aver[hvalid,im]) / numpy.sum(numpy.isfinite(zon_aver[hvalid,im]))
333
333
334 if numpy.sum(numpy.isfinite(mer_aver[hvalid,im])) >= minvalid:
334 if numpy.sum(numpy.isfinite(mer_aver[hvalid,im])) >= minvalid:
335 new_mer_aver[ih,im] = numpy.nansum(mer_aver[hvalid,im]) / numpy.sum(numpy.isfinite(mer_aver[hvalid,im]))
335 new_mer_aver[ih,im] = numpy.nansum(mer_aver[hvalid,im]) / numpy.sum(numpy.isfinite(mer_aver[hvalid,im]))
336
336
337 if numpy.sum(numpy.isfinite(ver_aver[hvalid,im])) >= minvalid:
337 if numpy.sum(numpy.isfinite(ver_aver[hvalid,im])) >= minvalid:
338 new_ver_aver[ih,im] = numpy.nansum(ver_aver[hvalid,im]) / numpy.sum(numpy.isfinite(ver_aver[hvalid,im]))
338 new_ver_aver[ih,im] = numpy.nansum(ver_aver[hvalid,im]) / numpy.sum(numpy.isfinite(ver_aver[hvalid,im]))
339 if nhaver > 1:
339 if nhaver > 1:
340 zon_aver = new_zon_aver
340 zon_aver = new_zon_aver
341 mer_aver = new_mer_aver
341 mer_aver = new_mer_aver
342 ver_aver = new_ver_aver
342 ver_aver = new_ver_aver
343 height = new_height
343 height = new_height
344
344
345
345
346 tstart = time_average[0]
346 tstart = time_average[0]
347 tend = time_average[-1]
347 tend = time_average[-1]
348 startTime = time.gmtime(tstart)
348 startTime = time.gmtime(tstart)
349
349
350 year = startTime.tm_year
350 year = startTime.tm_year
351 month = startTime.tm_mon
351 month = startTime.tm_mon
352 day = startTime.tm_mday
352 day = startTime.tm_mday
353 hour = startTime.tm_hour
353 hour = startTime.tm_hour
354 minute = startTime.tm_min
354 minute = startTime.tm_min
355 second = startTime.tm_sec
355 second = startTime.tm_sec
356
356
357 startDTList.append(datetime.datetime(year,month,day,hour,minute,second))
357 startDTList.append(datetime.datetime(year,month,day,hour,minute,second))
358
358
359
359
360 o_height = numpy.array([])
360 o_height = numpy.array([])
361 o_zon_aver = numpy.array([])
361 o_zon_aver = numpy.array([])
362 o_mer_aver = numpy.array([])
362 o_mer_aver = numpy.array([])
363 o_ver_aver = numpy.array([])
363 o_ver_aver = numpy.array([])
364 if self.dataOut.nmodes > 1:
364 if self.dataOut.nmodes > 1:
365 for im in range(self.dataOut.nmodes):
365 for im in range(self.dataOut.nmodes):
366
366
367 if im == 0:
367 if im == 0:
368 h_select = numpy.where(numpy.bitwise_and(height[0,:] >=0,height[0,:] <= hcm,numpy.isfinite(height[0,:])))
368 h_select = numpy.where(numpy.bitwise_and(height[0,:] >=0,height[0,:] <= hcm,numpy.isfinite(height[0,:])))
369 else:
369 else:
370 h_select = numpy.where(numpy.bitwise_and(height[1,:] > hcm,height[1,:] < 20,numpy.isfinite(height[1,:])))
370 h_select = numpy.where(numpy.bitwise_and(height[1,:] > hcm,height[1,:] < 20,numpy.isfinite(height[1,:])))
371
371
372
372
373 ht = h_select[0]
373 ht = h_select[0]
374
374
375 o_height = numpy.hstack((o_height,height[im,ht]))
375 o_height = numpy.hstack((o_height,height[im,ht]))
376 o_zon_aver = numpy.hstack((o_zon_aver,zon_aver[ht,im]))
376 o_zon_aver = numpy.hstack((o_zon_aver,zon_aver[ht,im]))
377 o_mer_aver = numpy.hstack((o_mer_aver,mer_aver[ht,im]))
377 o_mer_aver = numpy.hstack((o_mer_aver,mer_aver[ht,im]))
378 o_ver_aver = numpy.hstack((o_ver_aver,ver_aver[ht,im]))
378 o_ver_aver = numpy.hstack((o_ver_aver,ver_aver[ht,im]))
379
379
380 data_fHeigths_List.append(o_height)
380 data_fHeigths_List.append(o_height)
381 data_fZonal_List.append(o_zon_aver)
381 data_fZonal_List.append(o_zon_aver)
382 data_fMeridional_List.append(o_mer_aver)
382 data_fMeridional_List.append(o_mer_aver)
383 data_fVertical_List.append(o_ver_aver)
383 data_fVertical_List.append(o_ver_aver)
384
384
385
385
386 else:
386 else:
387 h_select = numpy.where(numpy.bitwise_and(height[0,:] <= hcm,numpy.isfinite(height[0,:])))
387 h_select = numpy.where(numpy.bitwise_and(height[0,:] <= hcm,numpy.isfinite(height[0,:])))
388 ht = h_select[0]
388 ht = h_select[0]
389 o_height = numpy.hstack((o_height,height[im,ht]))
389 o_height = numpy.hstack((o_height,height[im,ht]))
390 o_zon_aver = numpy.hstack((o_zon_aver,zon_aver[ht,im]))
390 o_zon_aver = numpy.hstack((o_zon_aver,zon_aver[ht,im]))
391 o_mer_aver = numpy.hstack((o_mer_aver,mer_aver[ht,im]))
391 o_mer_aver = numpy.hstack((o_mer_aver,mer_aver[ht,im]))
392 o_ver_aver = numpy.hstack((o_ver_aver,ver_aver[ht,im]))
392 o_ver_aver = numpy.hstack((o_ver_aver,ver_aver[ht,im]))
393
393
394 data_fHeigths_List.append(o_height)
394 data_fHeigths_List.append(o_height)
395 data_fZonal_List.append(o_zon_aver)
395 data_fZonal_List.append(o_zon_aver)
396 data_fMeridional_List.append(o_mer_aver)
396 data_fMeridional_List.append(o_mer_aver)
397 data_fVertical_List.append(o_ver_aver)
397 data_fVertical_List.append(o_ver_aver)
398
398
399
399
400 return startDTList, data_fHeigths_List, data_fZonal_List, data_fMeridional_List, data_fVertical_List
400 return startDTList, data_fHeigths_List, data_fZonal_List, data_fMeridional_List, data_fVertical_List
401
401
402
402
@@ -1,143 +1,143
1 '''
1 '''
2 @author: Daniel Suarez
2 @author: Daniel Suarez
3 '''
3 '''
4 import numpy
4 import numpy
5 from .jroproc_base import ProcessingUnit, Operation
5 from .jroproc_base import ProcessingUnit, Operation
6 from schainpy.model.data.jroamisr import AMISR
6 from schainpy.model.data.jroamisr import AMISR
7
7
8 class AMISRProc(ProcessingUnit):
8 class AMISRProc(ProcessingUnit):
9 def __init__(self, **kwargs):
9 def __init__(self, **kwargs):
10 ProcessingUnit.__init__(self, **kwargs)
10 ProcessingUnit.__init__(self, **kwargs)
11 self.objectDict = {}
11 self.objectDict = {}
12 self.dataOut = AMISR()
12 self.dataOut = AMISR()
13
13
14 def run(self):
14 def run(self):
15 if self.dataIn.type == 'AMISR':
15 if self.dataIn.type == 'AMISR':
16 self.dataOut.copy(self.dataIn)
16 self.dataOut.copy(self.dataIn)
17
17
18
18
19 class PrintInfo(Operation):
19 class PrintInfoAMISR(Operation):
20 def __init__(self, **kwargs):
20 def __init__(self, **kwargs):
21 Operation.__init__(self, **kwargs)
21 Operation.__init__(self, **kwargs)
22 self.__isPrinted = False
22 self.__isPrinted = False
23
23
24 def run(self, dataOut):
24 def run(self, dataOut):
25
25
26 if not self.__isPrinted:
26 if not self.__isPrinted:
27 print('Number of Records by File: %d'%dataOut.nRecords)
27 print('Number of Records by File: %d'%dataOut.nRecords)
28 print('Number of Pulses: %d'%dataOut.nProfiles)
28 print('Number of Pulses: %d'%dataOut.nProfiles)
29 print('Number of Pulses by Frame: %d'%dataOut.npulseByFrame)
29 print('Number of Pulses by Frame: %d'%dataOut.npulseByFrame)
30 print('Number of Samples by Pulse: %d'%len(dataOut.heightList))
30 print('Number of Samples by Pulse: %d'%len(dataOut.heightList))
31 print('Ipp Seconds: %f'%dataOut.ippSeconds)
31 print('Ipp Seconds: %f'%dataOut.ippSeconds)
32 print('Number of Beams: %d'%dataOut.nBeams)
32 print('Number of Beams: %d'%dataOut.nBeams)
33 print('BeamCodes:')
33 print('BeamCodes:')
34 beamStrList = ['Beam %d -> Code=%d, azimuth=%2.2f, zenith=%2.2f, gain=%2.2f'%(k,v[0],v[1],v[2],v[3]) for k,v in list(dataOut.beamCodeDict.items())]
34 beamStrList = ['Beam %d -> Code=%d, azimuth=%2.2f, zenith=%2.2f, gain=%2.2f'%(k,v[0],v[1],v[2],v[3]) for k,v in list(dataOut.beamCodeDict.items())]
35 for b in beamStrList:
35 for b in beamStrList:
36 print(b)
36 print(b)
37 self.__isPrinted = True
37 self.__isPrinted = True
38
38
39 return
39 return
40
40
41
41
42 class BeamSelector(Operation):
42 class BeamSelector(Operation):
43 profileIndex = None
43 profileIndex = None
44 nProfiles = None
44 nProfiles = None
45
45
46 def __init__(self, **kwargs):
46 def __init__(self, **kwargs):
47 Operation.__init__(self, **kwargs)
47 Operation.__init__(self, **kwargs)
48 self.profileIndex = 0
48 self.profileIndex = 0
49 self.__isConfig = False
49 self.__isConfig = False
50
50
51 def incIndex(self):
51 def incIndex(self):
52 self.profileIndex += 1
52 self.profileIndex += 1
53
53
54 if self.profileIndex >= self.nProfiles:
54 if self.profileIndex >= self.nProfiles:
55 self.profileIndex = 0
55 self.profileIndex = 0
56
56
57 def isProfileInRange(self, minIndex, maxIndex):
57 def isProfileInRange(self, minIndex, maxIndex):
58
58
59 if self.profileIndex < minIndex:
59 if self.profileIndex < minIndex:
60 return False
60 return False
61
61
62 if self.profileIndex > maxIndex:
62 if self.profileIndex > maxIndex:
63 return False
63 return False
64
64
65 return True
65 return True
66
66
67 def isProfileInList(self, profileList):
67 def isProfileInList(self, profileList):
68
68
69 if self.profileIndex not in profileList:
69 if self.profileIndex not in profileList:
70 return False
70 return False
71
71
72 return True
72 return True
73
73
74 def run(self, dataOut, beam=None):
74 def run(self, dataOut, beam=None):
75
75
76 dataOut.flagNoData = True
76 dataOut.flagNoData = True
77
77
78 if not(self.__isConfig):
78 if not(self.__isConfig):
79
79
80 self.nProfiles = dataOut.nProfiles
80 self.nProfiles = dataOut.nProfiles
81 self.profileIndex = dataOut.profileIndex
81 self.profileIndex = dataOut.profileIndex
82 self.__isConfig = True
82 self.__isConfig = True
83
83
84 if beam != None:
84 if beam != None:
85 if self.isProfileInList(dataOut.beamRangeDict[beam]):
85 if self.isProfileInList(dataOut.beamRangeDict[beam]):
86 beamInfo = dataOut.beamCodeDict[beam]
86 beamInfo = dataOut.beamCodeDict[beam]
87 dataOut.azimuth = beamInfo[1]
87 dataOut.azimuth = beamInfo[1]
88 dataOut.zenith = beamInfo[2]
88 dataOut.zenith = beamInfo[2]
89 dataOut.gain = beamInfo[3]
89 dataOut.gain = beamInfo[3]
90 dataOut.flagNoData = False
90 dataOut.flagNoData = False
91
91
92 self.incIndex()
92 self.incIndex()
93 return 1
93 return 1
94
94
95 else:
95 else:
96 raise ValueError("BeamSelector needs beam value")
96 raise ValueError("BeamSelector needs beam value")
97
97
98 return 0
98 return 0
99
99
100 class ProfileToChannels(Operation):
100 class ProfileToChannels(Operation):
101
101
102 def __init__(self, **kwargs):
102 def __init__(self, **kwargs):
103 Operation.__init__(self, **kwargs)
103 Operation.__init__(self, **kwargs)
104 self.__isConfig = False
104 self.__isConfig = False
105 self.__counter_chan = 0
105 self.__counter_chan = 0
106 self.buffer = None
106 self.buffer = None
107
107
108 def isProfileInList(self, profileList):
108 def isProfileInList(self, profileList):
109
109
110 if self.profileIndex not in profileList:
110 if self.profileIndex not in profileList:
111 return False
111 return False
112
112
113 return True
113 return True
114
114
115 def run(self, dataOut):
115 def run(self, dataOut):
116
116
117 dataOut.flagNoData = True
117 dataOut.flagNoData = True
118
118
119 if not(self.__isConfig):
119 if not(self.__isConfig):
120 nchannels = len(list(dataOut.beamRangeDict.keys()))
120 nchannels = len(list(dataOut.beamRangeDict.keys()))
121 nsamples = dataOut.nHeights
121 nsamples = dataOut.nHeights
122 self.buffer = numpy.zeros((nchannels, nsamples), dtype = 'complex128')
122 self.buffer = numpy.zeros((nchannels, nsamples), dtype = 'complex128')
123 dataOut.beam.codeList = [dataOut.beamCodeDict[x][0] for x in range(nchannels)]
123 dataOut.beam.codeList = [dataOut.beamCodeDict[x][0] for x in range(nchannels)]
124 dataOut.beam.azimuthList = [dataOut.beamCodeDict[x][1] for x in range(nchannels)]
124 dataOut.beam.azimuthList = [dataOut.beamCodeDict[x][1] for x in range(nchannels)]
125 dataOut.beam.zenithList = [dataOut.beamCodeDict[x][2] for x in range(nchannels)]
125 dataOut.beam.zenithList = [dataOut.beamCodeDict[x][2] for x in range(nchannels)]
126 self.__isConfig = True
126 self.__isConfig = True
127
127
128 for i in range(self.buffer.shape[0]):
128 for i in range(self.buffer.shape[0]):
129 if dataOut.profileIndex in dataOut.beamRangeDict[i]:
129 if dataOut.profileIndex in dataOut.beamRangeDict[i]:
130 self.buffer[i,:] = dataOut.data
130 self.buffer[i,:] = dataOut.data
131 break
131 break
132
132
133
133
134 self.__counter_chan += 1
134 self.__counter_chan += 1
135
135
136 if self.__counter_chan >= self.buffer.shape[0]:
136 if self.__counter_chan >= self.buffer.shape[0]:
137 self.__counter_chan = 0
137 self.__counter_chan = 0
138 dataOut.data = self.buffer.copy()
138 dataOut.data = self.buffer.copy()
139 dataOut.channelList = list(range(self.buffer.shape[0]))
139 dataOut.channelList = list(range(self.buffer.shape[0]))
140 self.__isConfig = False
140 self.__isConfig = False
141 dataOut.flagNoData = False
141 dataOut.flagNoData = False
142 pass
142 pass
143 No newline at end of file
143
@@ -1,429 +1,207
1 '''
1 '''
2 Updated for multiprocessing
2 Base clases to create Processing units and operations, the MPDecorator
3 Author : Sergio Cortez
3 must be used in plotting and writing operations to allow to run as an
4 Jan 2018
4 external process.
5 Abstract:
6 Base class for processing units and operations. A decorator provides multiprocessing features and interconnect the processes created.
7 The argument (kwargs) sent from the controller is parsed and filtered via the decorator for each processing unit or operation instantiated.
8 The decorator handle also the methods inside the processing unit to be called from the main script (not as operations) (OPERATION -> type ='self').
9
10 Based on:
11 $Author: murco $
12 $Id: jroproc_base.py 1 2012-11-12 18:56:07Z murco $
13 '''
5 '''
14
6
15 import os
16 import sys
17 import inspect
7 import inspect
18 import zmq
8 import zmq
19 import time
9 import time
20 import pickle
10 import pickle
21 import traceback
11 import traceback
22 try:
12 try:
23 from queue import Queue
13 from queue import Queue
24 except:
14 except:
25 from Queue import Queue
15 from Queue import Queue
26 from threading import Thread
16 from threading import Thread
27 from multiprocessing import Process
17 from multiprocessing import Process, Queue
28
29 from schainpy.utils import log
18 from schainpy.utils import log
30
19
31
20
32 class ProcessingUnit(object):
21 class ProcessingUnit(object):
22 '''
23 Base class to create Signal Chain Units
24 '''
33
25
34 """
35 Update - Jan 2018 - MULTIPROCESSING
36 All the "call" methods present in the previous base were removed.
37 The majority of operations are independant processes, thus
38 the decorator is in charge of communicate the operation processes
39 with the proccessing unit via IPC.
40
41 The constructor does not receive any argument. The remaining methods
42 are related with the operations to execute.
43
44
45 """
46 proc_type = 'processing'
26 proc_type = 'processing'
47 __attrs__ = []
48
27
49 def __init__(self):
28 def __init__(self):
50
29
51 self.dataIn = None
30 self.dataIn = None
52 self.dataOut = None
31 self.dataOut = None
53 self.isConfig = False
32 self.isConfig = False
54 self.operations = []
33 self.operations = []
55 self.plots = []
34
35 def setInput(self, unit):
36
37 self.dataIn = unit.dataOut
56
38
57 def getAllowedArgs(self):
39 def getAllowedArgs(self):
58 if hasattr(self, '__attrs__'):
40 if hasattr(self, '__attrs__'):
59 return self.__attrs__
41 return self.__attrs__
60 else:
42 else:
61 return inspect.getargspec(self.run).args
43 return inspect.getargspec(self.run).args
62
44
63 def addOperation(self, conf, operation):
45 def addOperation(self, conf, operation):
64 """
46 '''
65 This method is used in the controller, and update the dictionary containing the operations to execute. The dict
47 '''
66 posses the id of the operation process (IPC purposes)
67
68 Agrega un objeto del tipo "Operation" (opObj) a la lista de objetos "self.objectList" y retorna el
69 identificador asociado a este objeto.
70
71 Input:
72
73 object : objeto de la clase "Operation"
74
75 Return:
76
77 objId : identificador del objeto, necesario para comunicar con master(procUnit)
78 """
79
80 self.operations.append(
81 (operation, conf.type, conf.id, conf.getKwargs()))
82
48
83 if 'plot' in self.name.lower():
49 self.operations.append((operation, conf.type, conf.getKwargs()))
84 self.plots.append(operation.CODE)
85
50
86 def getOperationObj(self, objId):
51 def getOperationObj(self, objId):
87
52
88 if objId not in list(self.operations.keys()):
53 if objId not in list(self.operations.keys()):
89 return None
54 return None
90
55
91 return self.operations[objId]
56 return self.operations[objId]
92
57
93 def operation(self, **kwargs):
58 def call(self, **kwargs):
94 """
59 '''
95 Operacion directa sobre la data (dataOut.data). Es necesario actualizar los valores de los
60 '''
96 atributos del objeto dataOut
97
61
98 Input:
62 try:
63 if self.dataIn is not None and self.dataIn.flagNoData and not self.dataIn.error:
64 return self.dataIn.isReady()
65 elif self.dataIn is None or not self.dataIn.error:
66 self.run(**kwargs)
67 elif self.dataIn.error:
68 self.dataOut.error = self.dataIn.error
69 self.dataOut.flagNoData = True
70 except:
71 err = traceback.format_exc()
72 if 'SchainWarning' in err:
73 log.warning(err.split('SchainWarning:')[-1].split('\n')[0].strip(), self.name)
74 elif 'SchainError' in err:
75 log.error(err.split('SchainError:')[-1].split('\n')[0].strip(), self.name)
76 else:
77 log.error(err, self.name)
78 self.dataOut.error = True
99
79
100 **kwargs : Diccionario de argumentos de la funcion a ejecutar
80 for op, optype, opkwargs in self.operations:
101 """
81 if optype == 'other' and not self.dataOut.flagNoData:
82 self.dataOut = op.run(self.dataOut, **opkwargs)
83 elif optype == 'external' and not self.dataOut.flagNoData:
84 op.queue.put(self.dataOut)
85 elif optype == 'external' and self.dataOut.error:
86 op.queue.put(self.dataOut)
102
87
103 raise NotImplementedError
88 return 'Error' if self.dataOut.error else self.dataOut.isReady()
104
89
105 def setup(self):
90 def setup(self):
106
91
107 raise NotImplementedError
92 raise NotImplementedError
108
93
109 def run(self):
94 def run(self):
110
95
111 raise NotImplementedError
96 raise NotImplementedError
112
97
113 def close(self):
98 def close(self):
114
99
115 return
100 return
116
101
117
102
118 class Operation(object):
103 class Operation(object):
119
104
120 """
105 '''
121 Update - Jan 2018 - MULTIPROCESSING
106 '''
122
123 Most of the methods remained the same. The decorator parse the arguments and executed the run() method for each process.
124 The constructor doe snot receive any argument, neither the baseclass.
125
126
127 Clase base para definir las operaciones adicionales que se pueden agregar a la clase ProcessingUnit
128 y necesiten acumular informacion previa de los datos a procesar. De preferencia usar un buffer de
129 acumulacion dentro de esta clase
130
131 Ejemplo: Integraciones coherentes, necesita la informacion previa de los n perfiles anteriores (bufffer)
132
107
133 """
134 proc_type = 'operation'
108 proc_type = 'operation'
135 __attrs__ = []
136
109
137 def __init__(self):
110 def __init__(self):
138
111
139 self.id = None
112 self.id = None
140 self.isConfig = False
113 self.isConfig = False
141
114
142 if not hasattr(self, 'name'):
115 if not hasattr(self, 'name'):
143 self.name = self.__class__.__name__
116 self.name = self.__class__.__name__
144
117
145 def getAllowedArgs(self):
118 def getAllowedArgs(self):
146 if hasattr(self, '__attrs__'):
119 if hasattr(self, '__attrs__'):
147 return self.__attrs__
120 return self.__attrs__
148 else:
121 else:
149 return inspect.getargspec(self.run).args
122 return inspect.getargspec(self.run).args
150
123
151 def setup(self):
124 def setup(self):
152
125
153 self.isConfig = True
126 self.isConfig = True
154
127
155 raise NotImplementedError
128 raise NotImplementedError
156
129
157 def run(self, dataIn, **kwargs):
130 def run(self, dataIn, **kwargs):
158 """
131 """
159 Realiza las operaciones necesarias sobre la dataIn.data y actualiza los
132 Realiza las operaciones necesarias sobre la dataIn.data y actualiza los
160 atributos del objeto dataIn.
133 atributos del objeto dataIn.
161
134
162 Input:
135 Input:
163
136
164 dataIn : objeto del tipo JROData
137 dataIn : objeto del tipo JROData
165
138
166 Return:
139 Return:
167
140
168 None
141 None
169
142
170 Affected:
143 Affected:
171 __buffer : buffer de recepcion de datos.
144 __buffer : buffer de recepcion de datos.
172
145
173 """
146 """
174 if not self.isConfig:
147 if not self.isConfig:
175 self.setup(**kwargs)
148 self.setup(**kwargs)
176
149
177 raise NotImplementedError
150 raise NotImplementedError
178
151
179 def close(self):
152 def close(self):
180
153
181 return
154 return
182
155
183 class InputQueue(Thread):
184
185 '''
186 Class to hold input data for Proccessing Units and external Operations,
187 '''
188
189 def __init__(self, project_id, inputId, lock=None):
190
191 Thread.__init__(self)
192 self.queue = Queue()
193 self.project_id = project_id
194 self.inputId = inputId
195 self.lock = lock
196 self.islocked = False
197 self.size = 0
198
199 def run(self):
200
201 c = zmq.Context()
202 self.receiver = c.socket(zmq.SUB)
203 self.receiver.connect(
204 'ipc:///tmp/schain/{}_pub'.format(self.project_id))
205 self.receiver.setsockopt(zmq.SUBSCRIBE, self.inputId.encode())
206
207 while True:
208 obj = self.receiver.recv_multipart()[1]
209 self.size += sys.getsizeof(obj)
210 self.queue.put(obj)
211
212 def get(self):
213
214 if not self.islocked and self.size/1000000 > 512:
215 self.lock.n.value += 1
216 self.islocked = True
217 self.lock.clear()
218 elif self.islocked and self.size/1000000 <= 512:
219 self.islocked = False
220 self.lock.n.value -= 1
221 if self.lock.n.value == 0:
222 self.lock.set()
223
224 obj = self.queue.get()
225 self.size -= sys.getsizeof(obj)
226 return pickle.loads(obj)
227
228
156
229 def MPDecorator(BaseClass):
157 def MPDecorator(BaseClass):
230 """
158 """
231 Multiprocessing class decorator
159 Multiprocessing class decorator
232
160
233 This function add multiprocessing features to a BaseClass. Also, it handle
161 This function add multiprocessing features to a BaseClass.
234 the communication beetween processes (readers, procUnits and operations).
235 """
162 """
236
163
237 class MPClass(BaseClass, Process):
164 class MPClass(BaseClass, Process):
238
165
239 def __init__(self, *args, **kwargs):
166 def __init__(self, *args, **kwargs):
240 super(MPClass, self).__init__()
167 super(MPClass, self).__init__()
241 Process.__init__(self)
168 Process.__init__(self)
242 self.operationKwargs = {}
169
243 self.args = args
170 self.args = args
244 self.kwargs = kwargs
171 self.kwargs = kwargs
245 self.sender = None
246 self.receiver = None
247 self.i = 0
248 self.t = time.time()
172 self.t = time.time()
173 self.op_type = 'external'
249 self.name = BaseClass.__name__
174 self.name = BaseClass.__name__
250 self.__doc__ = BaseClass.__doc__
175 self.__doc__ = BaseClass.__doc__
251
176
252 if 'plot' in self.name.lower() and not self.name.endswith('_'):
177 if 'plot' in self.name.lower() and not self.name.endswith('_'):
253 self.name = '{}{}'.format(self.CODE.upper(), 'Plot')
178 self.name = '{}{}'.format(self.CODE.upper(), 'Plot')
254
179
255 self.start_time = time.time()
180 self.start_time = time.time()
256 self.id = args[0]
257 self.inputId = args[1]
258 self.project_id = args[2]
259 self.err_queue = args[3]
181 self.err_queue = args[3]
260 self.lock = args[4]
182 self.queue = Queue(maxsize=1)
261 self.typeProc = args[5]
183 self.myrun = BaseClass.run
262 self.err_queue.put('#_start_#')
263 if self.inputId is not None:
264 self.queue = InputQueue(self.project_id, self.inputId, self.lock)
265
266 def subscribe(self):
267 '''
268 Start the zmq socket receiver and subcribe to input ID.
269 '''
270
271 self.queue.start()
272
184
273 def listen(self):
185 def run(self):
274 '''
275 This function waits for objects
276 '''
277
278 return self.queue.get()
279
280 def set_publisher(self):
281 '''
282 This function create a zmq socket for publishing objects.
283 '''
284
285 time.sleep(0.5)
286
287 c = zmq.Context()
288 self.sender = c.socket(zmq.PUB)
289 self.sender.connect(
290 'ipc:///tmp/schain/{}_sub'.format(self.project_id))
291
292 def publish(self, data, id):
293 '''
294 This function publish an object, to an specific topic.
295 It blocks publishing when receiver queue is full to avoid data loss
296 '''
297
298 if self.inputId is None:
299 self.lock.wait()
300 self.sender.send_multipart([str(id).encode(), pickle.dumps(data)])
301
302 def runReader(self):
303 '''
304 Run fuction for read units
305 '''
306 while True:
307
308 try:
309 BaseClass.run(self, **self.kwargs)
310 except:
311 err = traceback.format_exc()
312 if 'No more files' in err:
313 log.warning('No more files to read', self.name)
314 else:
315 self.err_queue.put('{}|{}'.format(self.name, err))
316 self.dataOut.error = True
317
318 for op, optype, opId, kwargs in self.operations:
319 if optype == 'self' and not self.dataOut.flagNoData:
320 op(**kwargs)
321 elif optype == 'other' and not self.dataOut.flagNoData:
322 self.dataOut = op.run(self.dataOut, **self.kwargs)
323 elif optype == 'external':
324 self.publish(self.dataOut, opId)
325
326 if self.dataOut.flagNoData and not self.dataOut.error:
327 continue
328
329 self.publish(self.dataOut, self.id)
330
331 if self.dataOut.error:
332 break
333
334 time.sleep(0.5)
335
336 def runProc(self):
337 '''
338 Run function for proccessing units
339 '''
340
341 while True:
342 self.dataIn = self.listen()
343
344 if self.dataIn.flagNoData and self.dataIn.error is None:
345 continue
346 elif not self.dataIn.error:
347 try:
348 BaseClass.run(self, **self.kwargs)
349 except:
350 self.err_queue.put('{}|{}'.format(self.name, traceback.format_exc()))
351 self.dataOut.error = True
352 elif self.dataIn.error:
353 self.dataOut.error = self.dataIn.error
354 self.dataOut.flagNoData = True
355
356 for op, optype, opId, kwargs in self.operations:
357 if optype == 'self' and not self.dataOut.flagNoData:
358 op(**kwargs)
359 elif optype == 'other' and not self.dataOut.flagNoData:
360 self.dataOut = op.run(self.dataOut, **kwargs)
361 elif optype == 'external' and not self.dataOut.flagNoData:
362 self.publish(self.dataOut, opId)
363
364 self.publish(self.dataOut, self.id)
365 for op, optype, opId, kwargs in self.operations:
366 if optype == 'external' and self.dataOut.error:
367 self.publish(self.dataOut, opId)
368
369 if self.dataOut.error:
370 break
371
372 time.sleep(0.5)
373
374 def runOp(self):
375 '''
376 Run function for external operations (this operations just receive data
377 ex: plots, writers, publishers)
378 '''
379
186
380 while True:
187 while True:
381
188
382 dataOut = self.listen()
189 dataOut = self.queue.get()
383
190
384 if not dataOut.error:
191 if not dataOut.error:
385 try:
192 try:
386 BaseClass.run(self, dataOut, **self.kwargs)
193 BaseClass.run(self, dataOut, **self.kwargs)
387 except:
194 except:
388 self.err_queue.put('{}|{}'.format(self.name, traceback.format_exc()))
195 err = traceback.format_exc()
389 dataOut.error = True
196 log.error(err.split('\n')[-2], self.name)
390 else:
197 else:
391 break
198 break
392
199
393 def run(self):
394 if self.typeProc is "ProcUnit":
395
396 if self.inputId is not None:
397 self.subscribe()
398
399 self.set_publisher()
400
401 if 'Reader' not in BaseClass.__name__:
402 self.runProc()
403 else:
404 self.runReader()
405
406 elif self.typeProc is "Operation":
407
408 self.subscribe()
409 self.runOp()
410
411 else:
412 raise ValueError("Unknown type")
413
414 self.close()
200 self.close()
415
201
416 def close(self):
202 def close(self):
417
203
418 BaseClass.close(self)
204 BaseClass.close(self)
419 self.err_queue.put('#_end_#')
420
421 if self.sender:
422 self.sender.close()
423
424 if self.receiver:
425 self.receiver.close()
426
427 log.success('Done...(Time:{:4.2f} secs)'.format(time.time()-self.start_time), self.name)
205 log.success('Done...(Time:{:4.2f} secs)'.format(time.time()-self.start_time), self.name)
428
206
429 return MPClass
207 return MPClass
@@ -1,178 +1,178
1 import numpy
1 import numpy
2
2
3 from .jroproc_base import ProcessingUnit, Operation
3 from .jroproc_base import ProcessingUnit, Operation
4 from schainpy.model.data.jrodata import Correlation, hildebrand_sekhon
4 from schainpy.model.data.jrodata import Correlation
5
5
6 class CorrelationProc(ProcessingUnit):
6 class CorrelationProc(ProcessingUnit):
7
7
8 pairsList = None
8 pairsList = None
9
9
10 data_cf = None
10 data_cf = None
11
11
12 def __init__(self, **kwargs):
12 def __init__(self, **kwargs):
13
13
14 ProcessingUnit.__init__(self, **kwargs)
14 ProcessingUnit.__init__(self, **kwargs)
15
15
16 self.objectDict = {}
16 self.objectDict = {}
17 self.buffer = None
17 self.buffer = None
18 self.firstdatatime = None
18 self.firstdatatime = None
19 self.profIndex = 0
19 self.profIndex = 0
20 self.dataOut = Correlation()
20 self.dataOut = Correlation()
21
21
22 def __updateObjFromVoltage(self):
22 def __updateObjFromVoltage(self):
23
23
24 self.dataOut.timeZone = self.dataIn.timeZone
24 self.dataOut.timeZone = self.dataIn.timeZone
25 self.dataOut.dstFlag = self.dataIn.dstFlag
25 self.dataOut.dstFlag = self.dataIn.dstFlag
26 self.dataOut.errorCount = self.dataIn.errorCount
26 self.dataOut.errorCount = self.dataIn.errorCount
27 self.dataOut.useLocalTime = self.dataIn.useLocalTime
27 self.dataOut.useLocalTime = self.dataIn.useLocalTime
28
28
29 self.dataOut.radarControllerHeaderObj = self.dataIn.radarControllerHeaderObj.copy()
29 self.dataOut.radarControllerHeaderObj = self.dataIn.radarControllerHeaderObj.copy()
30 self.dataOut.systemHeaderObj = self.dataIn.systemHeaderObj.copy()
30 self.dataOut.systemHeaderObj = self.dataIn.systemHeaderObj.copy()
31 self.dataOut.channelList = self.dataIn.channelList
31 self.dataOut.channelList = self.dataIn.channelList
32 self.dataOut.heightList = self.dataIn.heightList
32 self.dataOut.heightList = self.dataIn.heightList
33 self.dataOut.dtype = numpy.dtype([('real','<f4'),('imag','<f4')])
33 self.dataOut.dtype = numpy.dtype([('real','<f4'),('imag','<f4')])
34 # self.dataOut.nHeights = self.dataIn.nHeights
34 # self.dataOut.nHeights = self.dataIn.nHeights
35 # self.dataOut.nChannels = self.dataIn.nChannels
35 # self.dataOut.nChannels = self.dataIn.nChannels
36 self.dataOut.nBaud = self.dataIn.nBaud
36 self.dataOut.nBaud = self.dataIn.nBaud
37 self.dataOut.nCode = self.dataIn.nCode
37 self.dataOut.nCode = self.dataIn.nCode
38 self.dataOut.code = self.dataIn.code
38 self.dataOut.code = self.dataIn.code
39 # self.dataOut.nProfiles = self.dataOut.nFFTPoints
39 # self.dataOut.nProfiles = self.dataOut.nFFTPoints
40 self.dataOut.flagDiscontinuousBlock = self.dataIn.flagDiscontinuousBlock
40 self.dataOut.flagDiscontinuousBlock = self.dataIn.flagDiscontinuousBlock
41 self.dataOut.utctime = self.firstdatatime
41 self.dataOut.utctime = self.firstdatatime
42 self.dataOut.flagDecodeData = self.dataIn.flagDecodeData #asumo q la data esta decodificada
42 self.dataOut.flagDecodeData = self.dataIn.flagDecodeData #asumo q la data esta decodificada
43 self.dataOut.flagDeflipData = self.dataIn.flagDeflipData #asumo q la data esta sin flip
43 self.dataOut.flagDeflipData = self.dataIn.flagDeflipData #asumo q la data esta sin flip
44 self.dataOut.nCohInt = self.dataIn.nCohInt
44 self.dataOut.nCohInt = self.dataIn.nCohInt
45 # self.dataOut.nIncohInt = 1
45 # self.dataOut.nIncohInt = 1
46 self.dataOut.ippSeconds = self.dataIn.ippSeconds
46 self.dataOut.ippSeconds = self.dataIn.ippSeconds
47 self.dataOut.nProfiles = self.dataIn.nProfiles
47 self.dataOut.nProfiles = self.dataIn.nProfiles
48 self.dataOut.utctime = self.dataIn.utctime
48 self.dataOut.utctime = self.dataIn.utctime
49 # self.dataOut.windowOfFilter = self.dataIn.windowOfFilter
49 # self.dataOut.windowOfFilter = self.dataIn.windowOfFilter
50
50
51 # self.dataOut.timeInterval = self.dataIn.timeInterval*self.dataOut.nPoints
51 # self.dataOut.timeInterval = self.dataIn.timeInterval*self.dataOut.nPoints
52
52
53
53
54 def removeDC(self, jspectra):
54 def removeDC(self, jspectra):
55
55
56 nChannel = jspectra.shape[0]
56 nChannel = jspectra.shape[0]
57
57
58 for i in range(nChannel):
58 for i in range(nChannel):
59 jspectra_tmp = jspectra[i,:,:]
59 jspectra_tmp = jspectra[i,:,:]
60 jspectra_DC = numpy.mean(jspectra_tmp,axis = 0)
60 jspectra_DC = numpy.mean(jspectra_tmp,axis = 0)
61
61
62 jspectra_tmp = jspectra_tmp - jspectra_DC
62 jspectra_tmp = jspectra_tmp - jspectra_DC
63 jspectra[i,:,:] = jspectra_tmp
63 jspectra[i,:,:] = jspectra_tmp
64
64
65 return jspectra
65 return jspectra
66
66
67
67
68 def removeNoise(self, mode = 2):
68 def removeNoise(self, mode = 2):
69 indR = numpy.where(self.dataOut.lagR == 0)[0][0]
69 indR = numpy.where(self.dataOut.lagR == 0)[0][0]
70 indT = numpy.where(self.dataOut.lagT == 0)[0][0]
70 indT = numpy.where(self.dataOut.lagT == 0)[0][0]
71
71
72 jspectra = self.dataOut.data_corr[:,:,indR,:]
72 jspectra = self.dataOut.data_corr[:,:,indR,:]
73
73
74 num_chan = jspectra.shape[0]
74 num_chan = jspectra.shape[0]
75 num_hei = jspectra.shape[2]
75 num_hei = jspectra.shape[2]
76
76
77 freq_dc = indT
77 freq_dc = indT
78 ind_vel = numpy.array([-2,-1,1,2]) + freq_dc
78 ind_vel = numpy.array([-2,-1,1,2]) + freq_dc
79
79
80 NPot = self.dataOut.getNoise(mode)
80 NPot = self.dataOut.getNoise(mode)
81 jspectra[:,freq_dc,:] = jspectra[:,freq_dc,:] - NPot
81 jspectra[:,freq_dc,:] = jspectra[:,freq_dc,:] - NPot
82 SPot = jspectra[:,freq_dc,:]
82 SPot = jspectra[:,freq_dc,:]
83 pairsAutoCorr = self.dataOut.getPairsAutoCorr()
83 pairsAutoCorr = self.dataOut.getPairsAutoCorr()
84 # self.dataOut.signalPotency = SPot
84 # self.dataOut.signalPotency = SPot
85 self.dataOut.noise = NPot
85 self.dataOut.noise = NPot
86 self.dataOut.SNR = (SPot/NPot)[pairsAutoCorr]
86 self.dataOut.SNR = (SPot/NPot)[pairsAutoCorr]
87 self.dataOut.data_corr[:,:,indR,:] = jspectra
87 self.dataOut.data_corr[:,:,indR,:] = jspectra
88
88
89 return 1
89 return 1
90
90
91 def run(self, lags=None, mode = 'time', pairsList=None, fullBuffer=False, nAvg = 1, removeDC = False, splitCF=False):
91 def run(self, lags=None, mode = 'time', pairsList=None, fullBuffer=False, nAvg = 1, removeDC = False, splitCF=False):
92
92
93 self.dataOut.flagNoData = True
93 self.dataOut.flagNoData = True
94
94
95 if self.dataIn.type == "Correlation":
95 if self.dataIn.type == "Correlation":
96
96
97 self.dataOut.copy(self.dataIn)
97 self.dataOut.copy(self.dataIn)
98
98
99 return
99 return
100
100
101 if self.dataIn.type == "Voltage":
101 if self.dataIn.type == "Voltage":
102
102
103 nChannels = self.dataIn.nChannels
103 nChannels = self.dataIn.nChannels
104 nProfiles = self.dataIn.nProfiles
104 nProfiles = self.dataIn.nProfiles
105 nHeights = self.dataIn.nHeights
105 nHeights = self.dataIn.nHeights
106 data_pre = self.dataIn.data
106 data_pre = self.dataIn.data
107
107
108 #--------------- Remover DC ------------
108 #--------------- Remover DC ------------
109 if removeDC:
109 if removeDC:
110 data_pre = self.removeDC(data_pre)
110 data_pre = self.removeDC(data_pre)
111
111
112 #---------------------------------------------
112 #---------------------------------------------
113 # pairsList = list(ccfList)
113 # pairsList = list(ccfList)
114 # for i in acfList:
114 # for i in acfList:
115 # pairsList.append((i,i))
115 # pairsList.append((i,i))
116 #
116 #
117 # ccf_pairs = numpy.arange(len(ccfList))
117 # ccf_pairs = numpy.arange(len(ccfList))
118 # acf_pairs = numpy.arange(len(ccfList),len(pairsList))
118 # acf_pairs = numpy.arange(len(ccfList),len(pairsList))
119 self.__updateObjFromVoltage()
119 self.__updateObjFromVoltage()
120 #----------------------------------------------------------------------
120 #----------------------------------------------------------------------
121 #Creating temporal buffers
121 #Creating temporal buffers
122 if fullBuffer:
122 if fullBuffer:
123 tmp = numpy.zeros((len(pairsList), len(lags), nProfiles, nHeights), dtype = 'complex')*numpy.nan
123 tmp = numpy.zeros((len(pairsList), len(lags), nProfiles, nHeights), dtype = 'complex')*numpy.nan
124 elif mode == 'time':
124 elif mode == 'time':
125 if lags == None:
125 if lags == None:
126 lags = numpy.arange(-nProfiles+1, nProfiles)
126 lags = numpy.arange(-nProfiles+1, nProfiles)
127 tmp = numpy.zeros((len(pairsList), len(lags), nHeights),dtype='complex')
127 tmp = numpy.zeros((len(pairsList), len(lags), nHeights),dtype='complex')
128 elif mode == 'height':
128 elif mode == 'height':
129 if lags == None:
129 if lags == None:
130 lags = numpy.arange(-nHeights+1, nHeights)
130 lags = numpy.arange(-nHeights+1, nHeights)
131 tmp = numpy.zeros(len(pairsList), (len(lags), nProfiles),dtype='complex')
131 tmp = numpy.zeros(len(pairsList), (len(lags), nProfiles),dtype='complex')
132
132
133 #For loop
133 #For loop
134 for l in range(len(pairsList)):
134 for l in range(len(pairsList)):
135
135
136 ch0 = pairsList[l][0]
136 ch0 = pairsList[l][0]
137 ch1 = pairsList[l][1]
137 ch1 = pairsList[l][1]
138
138
139 for i in range(len(lags)):
139 for i in range(len(lags)):
140 idx = lags[i]
140 idx = lags[i]
141
141
142 if idx >= 0:
142 if idx >= 0:
143 if mode == 'time':
143 if mode == 'time':
144 ccf0 = data_pre[ch0,:nProfiles-idx,:]*numpy.conj(data_pre[ch1,idx:,:]) #time
144 ccf0 = data_pre[ch0,:nProfiles-idx,:]*numpy.conj(data_pre[ch1,idx:,:]) #time
145 else:
145 else:
146 ccf0 = data_pre[ch0,:,nHeights-idx]*numpy.conj(data_pre[ch1,:,idx:]) #heights
146 ccf0 = data_pre[ch0,:,nHeights-idx]*numpy.conj(data_pre[ch1,:,idx:]) #heights
147 else:
147 else:
148 if mode == 'time':
148 if mode == 'time':
149 ccf0 = data_pre[ch0,-idx:,:]*numpy.conj(data_pre[ch1,:nProfiles+idx,:]) #time
149 ccf0 = data_pre[ch0,-idx:,:]*numpy.conj(data_pre[ch1,:nProfiles+idx,:]) #time
150 else:
150 else:
151 ccf0 = data_pre[ch0,:,-idx:]*numpy.conj(data_pre[ch1,:,:nHeights+idx]) #heights
151 ccf0 = data_pre[ch0,:,-idx:]*numpy.conj(data_pre[ch1,:,:nHeights+idx]) #heights
152
152
153 if fullBuffer:
153 if fullBuffer:
154 tmp[l,i,:ccf0.shape[0],:] = ccf0
154 tmp[l,i,:ccf0.shape[0],:] = ccf0
155 else:
155 else:
156 tmp[l,i,:] = numpy.sum(ccf0, axis=0)
156 tmp[l,i,:] = numpy.sum(ccf0, axis=0)
157
157
158 #-----------------------------------------------------------------
158 #-----------------------------------------------------------------
159 if fullBuffer:
159 if fullBuffer:
160 tmp = numpy.sum(numpy.reshape(tmp,(tmp.shape[0],tmp.shape[1],tmp.shape[2]/nAvg,nAvg,tmp.shape[3])),axis=3)
160 tmp = numpy.sum(numpy.reshape(tmp,(tmp.shape[0],tmp.shape[1],tmp.shape[2]/nAvg,nAvg,tmp.shape[3])),axis=3)
161 self.dataOut.nAvg = nAvg
161 self.dataOut.nAvg = nAvg
162
162
163 self.dataOut.data_cf = tmp
163 self.dataOut.data_cf = tmp
164 self.dataOut.mode = mode
164 self.dataOut.mode = mode
165 self.dataOut.nLags = len(lags)
165 self.dataOut.nLags = len(lags)
166 self.dataOut.pairsList = pairsList
166 self.dataOut.pairsList = pairsList
167 self.dataOut.nPairs = len(pairsList)
167 self.dataOut.nPairs = len(pairsList)
168
168
169 #Se Calcula los factores de Normalizacion
169 #Se Calcula los factores de Normalizacion
170 if mode == 'time':
170 if mode == 'time':
171 delta = self.dataIn.ippSeconds*self.dataIn.nCohInt
171 delta = self.dataIn.ippSeconds*self.dataIn.nCohInt
172 else:
172 else:
173 delta = self.dataIn.heightList[1] - self.dataIn.heightList[0]
173 delta = self.dataIn.heightList[1] - self.dataIn.heightList[0]
174 self.dataOut.lagRange = numpy.array(lags)*delta
174 self.dataOut.lagRange = numpy.array(lags)*delta
175 # self.dataOut.nCohInt = self.dataIn.nCohInt*nAvg
175 # self.dataOut.nCohInt = self.dataIn.nCohInt*nAvg
176 self.dataOut.flagNoData = False
176 self.dataOut.flagNoData = False
177 # a = self.dataOut.normFactor
177 # a = self.dataOut.normFactor
178 return
178 return
@@ -1,350 +1,350
1 import numpy
1 import numpy
2
2
3 from .jroproc_base import ProcessingUnit, Operation, MPDecorator
3 from .jroproc_base import ProcessingUnit, Operation, MPDecorator
4 from schainpy.model.data.jrodata import SpectraHeis
4 from schainpy.model.data.jrodata import SpectraHeis
5 from schainpy.utils import log
5 from schainpy.utils import log
6
6
7
7
8 @MPDecorator
8
9 class SpectraHeisProc(ProcessingUnit):
9 class SpectraHeisProc(ProcessingUnit):
10
10
11 def __init__(self):#, **kwargs):
11 def __init__(self):#, **kwargs):
12
12
13 ProcessingUnit.__init__(self)#, **kwargs)
13 ProcessingUnit.__init__(self)#, **kwargs)
14
14
15 # self.buffer = None
15 # self.buffer = None
16 # self.firstdatatime = None
16 # self.firstdatatime = None
17 # self.profIndex = 0
17 # self.profIndex = 0
18 self.dataOut = SpectraHeis()
18 self.dataOut = SpectraHeis()
19
19
20 def __updateObjFromVoltage(self):
20 def __updateObjFromVoltage(self):
21
21
22 self.dataOut.timeZone = self.dataIn.timeZone
22 self.dataOut.timeZone = self.dataIn.timeZone
23 self.dataOut.dstFlag = self.dataIn.dstFlag
23 self.dataOut.dstFlag = self.dataIn.dstFlag
24 self.dataOut.errorCount = self.dataIn.errorCount
24 self.dataOut.errorCount = self.dataIn.errorCount
25 self.dataOut.useLocalTime = self.dataIn.useLocalTime
25 self.dataOut.useLocalTime = self.dataIn.useLocalTime
26
26
27 self.dataOut.radarControllerHeaderObj = self.dataIn.radarControllerHeaderObj.copy()#
27 self.dataOut.radarControllerHeaderObj = self.dataIn.radarControllerHeaderObj.copy()#
28 self.dataOut.systemHeaderObj = self.dataIn.systemHeaderObj.copy()#
28 self.dataOut.systemHeaderObj = self.dataIn.systemHeaderObj.copy()#
29 self.dataOut.channelList = self.dataIn.channelList
29 self.dataOut.channelList = self.dataIn.channelList
30 self.dataOut.heightList = self.dataIn.heightList
30 self.dataOut.heightList = self.dataIn.heightList
31 # self.dataOut.dtype = self.dataIn.dtype
31 # self.dataOut.dtype = self.dataIn.dtype
32 self.dataOut.dtype = numpy.dtype([('real','<f4'),('imag','<f4')])
32 self.dataOut.dtype = numpy.dtype([('real','<f4'),('imag','<f4')])
33 # self.dataOut.nHeights = self.dataIn.nHeights
33 # self.dataOut.nHeights = self.dataIn.nHeights
34 # self.dataOut.nChannels = self.dataIn.nChannels
34 # self.dataOut.nChannels = self.dataIn.nChannels
35 self.dataOut.nBaud = self.dataIn.nBaud
35 self.dataOut.nBaud = self.dataIn.nBaud
36 self.dataOut.nCode = self.dataIn.nCode
36 self.dataOut.nCode = self.dataIn.nCode
37 self.dataOut.code = self.dataIn.code
37 self.dataOut.code = self.dataIn.code
38 # self.dataOut.nProfiles = 1
38 # self.dataOut.nProfiles = 1
39 self.dataOut.ippFactor = 1
39 self.dataOut.ippFactor = 1
40 self.dataOut.noise_estimation = None
40 self.dataOut.noise_estimation = None
41 # self.dataOut.nProfiles = self.dataOut.nFFTPoints
41 # self.dataOut.nProfiles = self.dataOut.nFFTPoints
42 self.dataOut.nFFTPoints = self.dataIn.nHeights
42 self.dataOut.nFFTPoints = self.dataIn.nHeights
43 # self.dataOut.channelIndexList = self.dataIn.channelIndexList
43 # self.dataOut.channelIndexList = self.dataIn.channelIndexList
44 # self.dataOut.flagNoData = self.dataIn.flagNoData
44 # self.dataOut.flagNoData = self.dataIn.flagNoData
45 self.dataOut.flagDiscontinuousBlock = self.dataIn.flagDiscontinuousBlock
45 self.dataOut.flagDiscontinuousBlock = self.dataIn.flagDiscontinuousBlock
46 self.dataOut.utctime = self.dataIn.utctime
46 self.dataOut.utctime = self.dataIn.utctime
47 # self.dataOut.utctime = self.firstdatatime
47 # self.dataOut.utctime = self.firstdatatime
48 self.dataOut.flagDecodeData = self.dataIn.flagDecodeData #asumo q la data esta decodificada
48 self.dataOut.flagDecodeData = self.dataIn.flagDecodeData #asumo q la data esta decodificada
49 self.dataOut.flagDeflipData = self.dataIn.flagDeflipData #asumo q la data esta sin flip
49 self.dataOut.flagDeflipData = self.dataIn.flagDeflipData #asumo q la data esta sin flip
50 # self.dataOut.flagShiftFFT = self.dataIn.flagShiftFFT
50 # self.dataOut.flagShiftFFT = self.dataIn.flagShiftFFT
51 self.dataOut.nCohInt = self.dataIn.nCohInt
51 self.dataOut.nCohInt = self.dataIn.nCohInt
52 self.dataOut.nIncohInt = 1
52 self.dataOut.nIncohInt = 1
53 # self.dataOut.ippSeconds= self.dataIn.ippSeconds
53 # self.dataOut.ippSeconds= self.dataIn.ippSeconds
54 self.dataOut.windowOfFilter = self.dataIn.windowOfFilter
54 self.dataOut.windowOfFilter = self.dataIn.windowOfFilter
55
55
56 # self.dataOut.timeInterval = self.dataIn.timeInterval*self.dataOut.nIncohInt
56 # self.dataOut.timeInterval = self.dataIn.timeInterval*self.dataOut.nIncohInt
57 # self.dataOut.set=self.dataIn.set
57 # self.dataOut.set=self.dataIn.set
58 # self.dataOut.deltaHeight=self.dataIn.deltaHeight
58 # self.dataOut.deltaHeight=self.dataIn.deltaHeight
59
59
60
60
61 def __updateObjFromFits(self):
61 def __updateObjFromFits(self):
62
62
63 self.dataOut.utctime = self.dataIn.utctime
63 self.dataOut.utctime = self.dataIn.utctime
64 # self.dataOut.channelIndexList = self.dataIn.channelIndexList
64 # self.dataOut.channelIndexList = self.dataIn.channelIndexList
65
65
66 self.dataOut.channelList = self.dataIn.channelList
66 self.dataOut.channelList = self.dataIn.channelList
67 self.dataOut.heightList = self.dataIn.heightList
67 self.dataOut.heightList = self.dataIn.heightList
68 self.dataOut.data_spc = self.dataIn.data
68 self.dataOut.data_spc = self.dataIn.data
69 self.dataOut.ippSeconds = self.dataIn.ippSeconds
69 self.dataOut.ippSeconds = self.dataIn.ippSeconds
70 self.dataOut.nCohInt = self.dataIn.nCohInt
70 self.dataOut.nCohInt = self.dataIn.nCohInt
71 self.dataOut.nIncohInt = self.dataIn.nIncohInt
71 self.dataOut.nIncohInt = self.dataIn.nIncohInt
72 # self.dataOut.timeInterval = self.dataIn.timeInterval
72 # self.dataOut.timeInterval = self.dataIn.timeInterval
73 self.dataOut.timeZone = self.dataIn.timeZone
73 self.dataOut.timeZone = self.dataIn.timeZone
74 self.dataOut.useLocalTime = True
74 self.dataOut.useLocalTime = True
75 # self.dataOut.
75 # self.dataOut.
76 # self.dataOut.
76 # self.dataOut.
77
77
78 def __getFft(self):
78 def __getFft(self):
79
79
80 fft_volt = numpy.fft.fft(self.dataIn.data, axis=1)
80 fft_volt = numpy.fft.fft(self.dataIn.data, axis=1)
81 fft_volt = numpy.fft.fftshift(fft_volt,axes=(1,))
81 fft_volt = numpy.fft.fftshift(fft_volt,axes=(1,))
82 spc = numpy.abs(fft_volt * numpy.conjugate(fft_volt))/(self.dataOut.nFFTPoints)
82 spc = numpy.abs(fft_volt * numpy.conjugate(fft_volt))/(self.dataOut.nFFTPoints)
83 self.dataOut.data_spc = spc
83 self.dataOut.data_spc = spc
84
84
85 def run(self):
85 def run(self):
86
86
87 self.dataOut.flagNoData = True
87 self.dataOut.flagNoData = True
88
88
89 if self.dataIn.type == "Fits":
89 if self.dataIn.type == "Fits":
90 self.__updateObjFromFits()
90 self.__updateObjFromFits()
91 self.dataOut.flagNoData = False
91 self.dataOut.flagNoData = False
92 return
92 return
93
93
94 if self.dataIn.type == "SpectraHeis":
94 if self.dataIn.type == "SpectraHeis":
95 self.dataOut.copy(self.dataIn)
95 self.dataOut.copy(self.dataIn)
96 return
96 return
97
97
98 if self.dataIn.type == "Voltage":
98 if self.dataIn.type == "Voltage":
99 self.__updateObjFromVoltage()
99 self.__updateObjFromVoltage()
100 self.__getFft()
100 self.__getFft()
101 self.dataOut.flagNoData = False
101 self.dataOut.flagNoData = False
102
102
103 return
103 return
104
104
105 raise ValueError("The type object %s is not valid"%(self.dataIn.type))
105 raise ValueError("The type object %s is not valid"%(self.dataIn.type))
106
106
107
107
108 def selectChannels(self, channelList):
108 def selectChannels(self, channelList):
109
109
110 channelIndexList = []
110 channelIndexList = []
111
111
112 for channel in channelList:
112 for channel in channelList:
113 index = self.dataOut.channelList.index(channel)
113 index = self.dataOut.channelList.index(channel)
114 channelIndexList.append(index)
114 channelIndexList.append(index)
115
115
116 self.selectChannelsByIndex(channelIndexList)
116 self.selectChannelsByIndex(channelIndexList)
117
117
118 def selectChannelsByIndex(self, channelIndexList):
118 def selectChannelsByIndex(self, channelIndexList):
119 """
119 """
120 Selecciona un bloque de datos en base a canales segun el channelIndexList
120 Selecciona un bloque de datos en base a canales segun el channelIndexList
121
121
122 Input:
122 Input:
123 channelIndexList : lista sencilla de canales a seleccionar por ej. [2,3,7]
123 channelIndexList : lista sencilla de canales a seleccionar por ej. [2,3,7]
124
124
125 Affected:
125 Affected:
126 self.dataOut.data
126 self.dataOut.data
127 self.dataOut.channelIndexList
127 self.dataOut.channelIndexList
128 self.dataOut.nChannels
128 self.dataOut.nChannels
129 self.dataOut.m_ProcessingHeader.totalSpectra
129 self.dataOut.m_ProcessingHeader.totalSpectra
130 self.dataOut.systemHeaderObj.numChannels
130 self.dataOut.systemHeaderObj.numChannels
131 self.dataOut.m_ProcessingHeader.blockSize
131 self.dataOut.m_ProcessingHeader.blockSize
132
132
133 Return:
133 Return:
134 None
134 None
135 """
135 """
136
136
137 for channelIndex in channelIndexList:
137 for channelIndex in channelIndexList:
138 if channelIndex not in self.dataOut.channelIndexList:
138 if channelIndex not in self.dataOut.channelIndexList:
139 print(channelIndexList)
139 print(channelIndexList)
140 raise ValueError("The value %d in channelIndexList is not valid" %channelIndex)
140 raise ValueError("The value %d in channelIndexList is not valid" %channelIndex)
141
141
142 # nChannels = len(channelIndexList)
142 # nChannels = len(channelIndexList)
143
143
144 data_spc = self.dataOut.data_spc[channelIndexList,:]
144 data_spc = self.dataOut.data_spc[channelIndexList,:]
145
145
146 self.dataOut.data_spc = data_spc
146 self.dataOut.data_spc = data_spc
147 self.dataOut.channelList = [self.dataOut.channelList[i] for i in channelIndexList]
147 self.dataOut.channelList = [self.dataOut.channelList[i] for i in channelIndexList]
148
148
149 return 1
149 return 1
150
150
151
151
152 class IncohInt4SpectraHeis(Operation):
152 class IncohInt4SpectraHeis(Operation):
153
153
154 isConfig = False
154 isConfig = False
155
155
156 __profIndex = 0
156 __profIndex = 0
157 __withOverapping = False
157 __withOverapping = False
158
158
159 __byTime = False
159 __byTime = False
160 __initime = None
160 __initime = None
161 __lastdatatime = None
161 __lastdatatime = None
162 __integrationtime = None
162 __integrationtime = None
163
163
164 __buffer = None
164 __buffer = None
165
165
166 __dataReady = False
166 __dataReady = False
167
167
168 n = None
168 n = None
169
169
170 def __init__(self):#, **kwargs):
170 def __init__(self):#, **kwargs):
171
171
172 Operation.__init__(self)#, **kwargs)
172 Operation.__init__(self)#, **kwargs)
173 # self.isConfig = False
173 # self.isConfig = False
174
174
175 def setup(self, n=None, timeInterval=None, overlapping=False):
175 def setup(self, n=None, timeInterval=None, overlapping=False):
176 """
176 """
177 Set the parameters of the integration class.
177 Set the parameters of the integration class.
178
178
179 Inputs:
179 Inputs:
180
180
181 n : Number of coherent integrations
181 n : Number of coherent integrations
182 timeInterval : Time of integration. If the parameter "n" is selected this one does not work
182 timeInterval : Time of integration. If the parameter "n" is selected this one does not work
183 overlapping :
183 overlapping :
184
184
185 """
185 """
186
186
187 self.__initime = None
187 self.__initime = None
188 self.__lastdatatime = 0
188 self.__lastdatatime = 0
189 self.__buffer = None
189 self.__buffer = None
190 self.__dataReady = False
190 self.__dataReady = False
191
191
192
192
193 if n == None and timeInterval == None:
193 if n == None and timeInterval == None:
194 raise ValueError("n or timeInterval should be specified ...")
194 raise ValueError("n or timeInterval should be specified ...")
195
195
196 if n != None:
196 if n != None:
197 self.n = n
197 self.n = n
198 self.__byTime = False
198 self.__byTime = False
199 else:
199 else:
200 self.__integrationtime = timeInterval #* 60. #if (type(timeInterval)!=integer) -> change this line
200 self.__integrationtime = timeInterval #* 60. #if (type(timeInterval)!=integer) -> change this line
201 self.n = 9999
201 self.n = 9999
202 self.__byTime = True
202 self.__byTime = True
203
203
204 if overlapping:
204 if overlapping:
205 self.__withOverapping = True
205 self.__withOverapping = True
206 self.__buffer = None
206 self.__buffer = None
207 else:
207 else:
208 self.__withOverapping = False
208 self.__withOverapping = False
209 self.__buffer = 0
209 self.__buffer = 0
210
210
211 self.__profIndex = 0
211 self.__profIndex = 0
212
212
213 def putData(self, data):
213 def putData(self, data):
214
214
215 """
215 """
216 Add a profile to the __buffer and increase in one the __profileIndex
216 Add a profile to the __buffer and increase in one the __profileIndex
217
217
218 """
218 """
219
219
220 if not self.__withOverapping:
220 if not self.__withOverapping:
221 self.__buffer += data.copy()
221 self.__buffer += data.copy()
222 self.__profIndex += 1
222 self.__profIndex += 1
223 return
223 return
224
224
225 #Overlapping data
225 #Overlapping data
226 nChannels, nHeis = data.shape
226 nChannels, nHeis = data.shape
227 data = numpy.reshape(data, (1, nChannels, nHeis))
227 data = numpy.reshape(data, (1, nChannels, nHeis))
228
228
229 #If the buffer is empty then it takes the data value
229 #If the buffer is empty then it takes the data value
230 if self.__buffer is None:
230 if self.__buffer is None:
231 self.__buffer = data
231 self.__buffer = data
232 self.__profIndex += 1
232 self.__profIndex += 1
233 return
233 return
234
234
235 #If the buffer length is lower than n then stakcing the data value
235 #If the buffer length is lower than n then stakcing the data value
236 if self.__profIndex < self.n:
236 if self.__profIndex < self.n:
237 self.__buffer = numpy.vstack((self.__buffer, data))
237 self.__buffer = numpy.vstack((self.__buffer, data))
238 self.__profIndex += 1
238 self.__profIndex += 1
239 return
239 return
240
240
241 #If the buffer length is equal to n then replacing the last buffer value with the data value
241 #If the buffer length is equal to n then replacing the last buffer value with the data value
242 self.__buffer = numpy.roll(self.__buffer, -1, axis=0)
242 self.__buffer = numpy.roll(self.__buffer, -1, axis=0)
243 self.__buffer[self.n-1] = data
243 self.__buffer[self.n-1] = data
244 self.__profIndex = self.n
244 self.__profIndex = self.n
245 return
245 return
246
246
247
247
248 def pushData(self):
248 def pushData(self):
249 """
249 """
250 Return the sum of the last profiles and the profiles used in the sum.
250 Return the sum of the last profiles and the profiles used in the sum.
251
251
252 Affected:
252 Affected:
253
253
254 self.__profileIndex
254 self.__profileIndex
255
255
256 """
256 """
257
257
258 if not self.__withOverapping:
258 if not self.__withOverapping:
259 data = self.__buffer
259 data = self.__buffer
260 n = self.__profIndex
260 n = self.__profIndex
261
261
262 self.__buffer = 0
262 self.__buffer = 0
263 self.__profIndex = 0
263 self.__profIndex = 0
264
264
265 return data, n
265 return data, n
266
266
267 #Integration with Overlapping
267 #Integration with Overlapping
268 data = numpy.sum(self.__buffer, axis=0)
268 data = numpy.sum(self.__buffer, axis=0)
269 n = self.__profIndex
269 n = self.__profIndex
270
270
271 return data, n
271 return data, n
272
272
273 def byProfiles(self, data):
273 def byProfiles(self, data):
274
274
275 self.__dataReady = False
275 self.__dataReady = False
276 avgdata = None
276 avgdata = None
277 # n = None
277 # n = None
278
278
279 self.putData(data)
279 self.putData(data)
280
280
281 if self.__profIndex == self.n:
281 if self.__profIndex == self.n:
282
282
283 avgdata, n = self.pushData()
283 avgdata, n = self.pushData()
284 self.__dataReady = True
284 self.__dataReady = True
285
285
286 return avgdata
286 return avgdata
287
287
288 def byTime(self, data, datatime):
288 def byTime(self, data, datatime):
289
289
290 self.__dataReady = False
290 self.__dataReady = False
291 avgdata = None
291 avgdata = None
292 n = None
292 n = None
293
293
294 self.putData(data)
294 self.putData(data)
295
295
296 if (datatime - self.__initime) >= self.__integrationtime:
296 if (datatime - self.__initime) >= self.__integrationtime:
297 avgdata, n = self.pushData()
297 avgdata, n = self.pushData()
298 self.n = n
298 self.n = n
299 self.__dataReady = True
299 self.__dataReady = True
300
300
301 return avgdata
301 return avgdata
302
302
303 def integrate(self, data, datatime=None):
303 def integrate(self, data, datatime=None):
304
304
305 if self.__initime == None:
305 if self.__initime == None:
306 self.__initime = datatime
306 self.__initime = datatime
307
307
308 if self.__byTime:
308 if self.__byTime:
309 avgdata = self.byTime(data, datatime)
309 avgdata = self.byTime(data, datatime)
310 else:
310 else:
311 avgdata = self.byProfiles(data)
311 avgdata = self.byProfiles(data)
312
312
313
313
314 self.__lastdatatime = datatime
314 self.__lastdatatime = datatime
315
315
316 if avgdata is None:
316 if avgdata is None:
317 return None, None
317 return None, None
318
318
319 avgdatatime = self.__initime
319 avgdatatime = self.__initime
320
320
321 deltatime = datatime -self.__lastdatatime
321 deltatime = datatime -self.__lastdatatime
322
322
323 if not self.__withOverapping:
323 if not self.__withOverapping:
324 self.__initime = datatime
324 self.__initime = datatime
325 else:
325 else:
326 self.__initime += deltatime
326 self.__initime += deltatime
327
327
328 return avgdata, avgdatatime
328 return avgdata, avgdatatime
329
329
330 def run(self, dataOut, n=None, timeInterval=None, overlapping=False, **kwargs):
330 def run(self, dataOut, n=None, timeInterval=None, overlapping=False, **kwargs):
331
331
332 if not self.isConfig:
332 if not self.isConfig:
333 self.setup(n=n, timeInterval=timeInterval, overlapping=overlapping)
333 self.setup(n=n, timeInterval=timeInterval, overlapping=overlapping)
334 self.isConfig = True
334 self.isConfig = True
335
335
336 avgdata, avgdatatime = self.integrate(dataOut.data_spc, dataOut.utctime)
336 avgdata, avgdatatime = self.integrate(dataOut.data_spc, dataOut.utctime)
337
337
338 # dataOut.timeInterval *= n
338 # dataOut.timeInterval *= n
339 dataOut.flagNoData = True
339 dataOut.flagNoData = True
340
340
341 if self.__dataReady:
341 if self.__dataReady:
342 dataOut.data_spc = avgdata
342 dataOut.data_spc = avgdata
343 dataOut.nIncohInt *= self.n
343 dataOut.nIncohInt *= self.n
344 # dataOut.nCohInt *= self.n
344 # dataOut.nCohInt *= self.n
345 dataOut.utctime = avgdatatime
345 dataOut.utctime = avgdatatime
346 # dataOut.timeInterval = dataOut.ippSeconds * dataOut.nIncohInt
346 # dataOut.timeInterval = dataOut.ippSeconds * dataOut.nIncohInt
347 # dataOut.timeInterval = self.__timeInterval*self.n
347 # dataOut.timeInterval = self.__timeInterval*self.n
348 dataOut.flagNoData = False
348 dataOut.flagNoData = False
349
349
350 return dataOut No newline at end of file
350 return dataOut
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
General Comments 0
You need to be logged in to leave comments. Login now