##// END OF EJS Templates
Merge branch 'v3.0-WR' of http://intranet.igp.gob.pe:8082/schain into v3.0-WR
Juan C. Espinoza -
r1539:03a9c71b14ee merge
parent child
Show More
@@ -0,0 +1,418
1 # SOPHY PROC script
2 import os, sys, json, argparse
3 import datetime
4 import time
5
6 PATH = '/DATA_RM/DATA'
7 PATH = '/media/jespinoza/Elements'
8 PATH = '/media/jespinoza/data/SOPHY'
9 PATH = '/home/soporte/Documents/EVENTO/'
10
11 # NOTA: EL GRABADO ESTA EN PARAM
12 PARAM = {
13 'S': {'zmin': -45, 'zmax': -25, 'colormap': 'jet', 'label': 'Power', 'wrname': 'power','cb_label': 'dBm', 'ch':0},
14 'SNR': {'zmin': -40, 'zmax': -20, 'colormap': 'jet', 'label': 'SNR', 'wrname': 'snr','cb_label': 'dB', 'ch':0},
15 'V': {'zmin': -12, 'zmax': 12, 'colormap': 'sophy_v', 'label': 'Velocity', 'wrname': 'velocity', 'cb_label': 'm/s', 'ch':0},
16 'R': {'zmin': 0, 'zmax': 1, 'colormap': 'jet', 'label': 'RhoHV', 'wrname':'rhoHV', 'cb_label': '', 'ch':0},
17 'P': {'zmin': -180,'zmax': 180,'colormap': 'RdBu_r', 'label': 'PhiDP', 'wrname':'phiDP' , 'cb_label': 'degrees', 'ch':0},
18 'D': {'zmin': -30, 'zmax': 80, 'colormap': 'sophy_r','label': 'ZDR','wrname':'differential_reflectivity' , 'cb_label': 'dBz','ch':0},
19 'Z': {'zmin': -30, 'zmax': 80, 'colormap': 'sophy_r','label': 'Reflectivity ', 'wrname':'reflectivity', 'cb_label': 'dBz','ch':0},
20 'W': {'zmin': 0, 'zmax': 15, 'colormap': 'sophy_w','label': 'Spectral Width', 'wrname':'spectral_width', 'cb_label': 'm/s', 'ch':0}
21 }
22
23 META = ['heightList', 'data_azi', 'data_ele', 'mode_op', 'latitude', 'longitude', 'altitude', 'heading', 'radar_name',
24 'institution', 'contact', 'h0', 'range_unit', 'prf', 'prf_unit', 'variable', 'variable_unit', 'n_pulses',
25 'pulse1_range', 'pulse1_width', 'pulse2_width', 'pulse1_repetitions', 'pulse2_repetitions', 'pulse_width_unit',
26 'snr_threshold','dataPP_NOISE']
27
28
29 def max_index(r, sample_rate, ipp):
30
31 return int(sample_rate*ipp*1e6 * r / 60) + int(sample_rate*ipp*1e6 * 1.2 / 60)
32
33 def main(args):
34
35 experiment = args.experiment
36 fp = open(os.path.join(PATH, experiment, 'experiment.conf'))
37 conf = json.loads(fp.read())
38
39 ipp_km = conf['usrp_tx']['ipp']
40 ipp = ipp_km * 2 /300000
41 sample_rate = conf['usrp_rx']['sample_rate']
42 axis = ['0' if x=='elevation' else '1' for x in conf['pedestal']['axis']] # AZIMUTH 1 ELEVACION 0
43 speed_axis = conf['pedestal']['speed']
44 steps = conf['pedestal']['table']
45 time_offset = args.time_offset
46 parameters = args.parameters
47 start_date = experiment.split('@')[1].split('T')[0].replace('-', '/')
48 end_date = start_date
49 if args.start_time:
50 start_time = args.start_time
51 else:
52 start_time = experiment.split('@')[1].split('T')[1].replace('-', ':')
53 end_time = '23:59:59'
54 N = int(1/(speed_axis[0]*ipp)) # 1 GRADO DE RESOLUCION
55 path = os.path.join(PATH, experiment, 'rawdata')
56 path_ped = os.path.join(PATH, experiment, 'position')
57 if args.label:
58 label = '-{}'.format(args.label)
59 else:
60 label = ''
61 path_plots = os.path.join(PATH, experiment, 'plots{}'.format(label))
62 path_save = os.path.join(PATH, experiment, 'param{}'.format(label))
63 RMIX = 1.62
64 H0 = -1.68
65 MASK = 0.3
66
67 from schainpy.controller import Project
68
69 project = Project()
70 project.setup(id='1', name='Sophy', description='sophy proc')
71
72 reader = project.addReadUnit(datatype='DigitalRFReader',
73 path=path,
74 startDate=start_date,
75 endDate=end_date,
76 startTime=start_time,
77 endTime=end_time,
78 delay=30,
79 online=args.online,
80 walk=1,
81 ippKm = ipp_km,
82 getByBlock = 1,
83 nProfileBlocks = N,
84 )
85
86 if not conf['usrp_tx']['enable_2']: # One Pulse
87 n_pulses = 1
88 pulse_1_width = conf['usrp_tx']['pulse_1']
89 pulse_1_repetitions = conf['usrp_tx']['repetitions_1']
90 pulse_2_width = 0
91 pulse_2_repetitions = 0
92
93 voltage = project.addProcUnit(datatype='VoltageProc', inputId=reader.getId())
94
95 if conf['usrp_tx']['code_type_1'] != 'None':
96 codes = [ c.strip() for c in conf['usrp_tx']['code_1'].split(',')]
97 code = []
98 for c in codes:
99 code.append([int(x) for x in c])
100 op = voltage.addOperation(name='Decoder', optype='other')
101 op.addParameter(name='code', value=code)
102 op.addParameter(name='nCode', value=len(code), format='int')
103 op.addParameter(name='nBaud', value=len(code[0]), format='int')
104
105 op = voltage.addOperation(name='CohInt', optype='other') #Minimo integrar 2 perfiles por ser codigo complementario
106 op.addParameter(name='n', value=len(code), format='int')
107 ncode = len(code)
108 else:
109 ncode = 1
110 code = ['0']
111
112 op = voltage.addOperation(name='setH0')
113 op.addParameter(name='h0', value=H0)
114
115 if args.range > 0:
116 op = voltage.addOperation(name='selectHeights')
117 op.addParameter(name='minIndex', value='0', format='int')
118 op.addParameter(name='maxIndex', value=max_index(args.range, sample_rate, ipp), format='int')
119
120 op = voltage.addOperation(name='PulsePair_vRF', optype='other')
121 op.addParameter(name='n', value=int(N)/ncode, format='int')
122
123 proc = project.addProcUnit(datatype='ParametersProc', inputId=voltage.getId())
124
125 opObj10 = proc.addOperation(name="WeatherRadar")
126 opObj10.addParameter(name='tauW',value=(1e-6/sample_rate)*len(code[0]))
127 opObj10.addParameter(name='Pt',value=((1e-6/sample_rate)*len(code[0])/ipp)*200)
128
129 op = proc.addOperation(name='PedestalInformation')
130 op.addParameter(name='path', value=path_ped, format='str')
131 op.addParameter(name='interval', value='0.04')
132 op.addParameter(name='time_offset', value=time_offset)
133 op.addParameter(name='mode', value='PPI')
134
135 for param in parameters:
136 op = proc.addOperation(name='Block360')
137 op.addParameter(name='runNextOp', value=True)
138
139 op= proc.addOperation(name='WeatherParamsPlot')
140 if args.save: op.addParameter(name='save', value=path_plots, format='str')
141 op.addParameter(name='save_period', value=-1)
142 op.addParameter(name='show', value=args.show)
143 op.addParameter(name='channels', value='1,')
144 op.addParameter(name='zmin', value=PARAM[param]['zmin'])
145 op.addParameter(name='zmax', value=PARAM[param]['zmax'])
146 op.addParameter(name='attr_data', value=param, format='str')
147 op.addParameter(name='labels', value=[PARAM[param]['label']])
148 op.addParameter(name='save_code', value=param)
149 op.addParameter(name='cb_label', value=PARAM[param]['cb_label'])
150 op.addParameter(name='colormap', value=PARAM[param]['colormap'])
151 op.addParameter(name='bgcolor', value='black')
152 if MASK: op.addParameter(name='mask', value=MASK, format='float')
153 if args.server:
154 op.addParameter(name='server', value='0.0.0.0:4444')
155 op.addParameter(name='exp_code', value='400')
156
157 desc = {
158 'Data': {
159 param: PARAM[param]['wrname'],
160 'utctime': 'time'
161 },
162 'Metadata': {
163 'heightList': 'range',
164 'data_azi': 'azimuth',
165 'data_ele': 'elevation',
166 'mode_op': 'scan_type',
167 'h0': 'range_correction',
168 }
169 }
170
171 if args.save:
172 writer = merge.addOperation(name='HDFWriter')
173 writer.addParameter(name='path', value=path_save, format='str')
174 writer.addParameter(name='Reset', value=True)
175 writer.addParameter(name='setType', value='weather')
176 writer.addParameter(name='description', value=json.dumps(desc))
177 writer.addParameter(name='blocksPerFile', value='1',format='int')
178 writer.addParameter(name='metadataList', value=','.join(META))
179 writer.addParameter(name='dataList', value='data_param,utctime')
180 writer.addParameter(name='weather_var', value=param)
181 writer.addParameter(name='mask', value=MASK, format='float')
182 # meta
183 writer.addParameter(name='latitude', value='-12.040436')
184 writer.addParameter(name='longitude', value='-75.295893')
185 writer.addParameter(name='altitude', value='3379.2147')
186 writer.addParameter(name='heading', value='0')
187 writer.addParameter(name='radar_name', value='SOPHy')
188 writer.addParameter(name='institution', value='IGP')
189 writer.addParameter(name='contact', value='dscipion@igp.gob.pe')
190 writer.addParameter(name='created_by', value='Signal Chain (https://pypi.org/project/schainpy/)')
191 writer.addParameter(name='range_unit', value='km')
192 writer.addParameter(name='prf', value=1/ipp)
193 writer.addParameter(name='prf_unit', value='hertz')
194 writer.addParameter(name='variable', value=PARAM[param]['label'])
195 writer.addParameter(name='variable_unit', value=PARAM[param]['cb_label'])
196 writer.addParameter(name='n_pulses', value=n_pulses)
197 writer.addParameter(name='pulse1_range', value=RMIX)
198 writer.addParameter(name='pulse1_width', value=pulse_1_width)
199 writer.addParameter(name='pulse2_width', value=pulse_2_width)
200 writer.addParameter(name='pulse1_repetitions', value=pulse_1_repetitions)
201 writer.addParameter(name='pulse2_repetitions', value=pulse_2_repetitions)
202 writer.addParameter(name='pulse_width_unit', value='microseconds')
203 writer.addParameter(name='snr_threshold', value=MASK)
204
205
206 else: #Two pulses
207 n_pulses = 1
208 pulse_1_width = conf['usrp_tx']['pulse_1']
209 pulse_1_repetitions = conf['usrp_tx']['repetitions_1']
210 pulse_2_width = conf['usrp_tx']['pulse_2']
211 pulse_2_repetitions = conf['usrp_tx']['repetitions_2']
212
213 voltage1 = project.addProcUnit(datatype='VoltageProc', inputId=reader.getId())
214
215 op = voltage1.addOperation(name='ProfileSelector')
216 op.addParameter(name='profileRangeList', value='0,{}'.format(conf['usrp_tx']['repetitions_1']-1))
217
218 if conf['usrp_tx']['code_type_1'] != 'None':
219 codes = [ c.strip() for c in conf['usrp_tx']['code_1'].split(',')]
220 code = []
221 for c in codes:
222 code.append([int(x) for x in c])
223 op = voltage1.addOperation(name='Decoder', optype='other')
224 op.addParameter(name='code', value=code)
225 op.addParameter(name='nCode', value=len(code), format='int')
226 op.addParameter(name='nBaud', value=len(code[0]), format='int')
227 else:
228 code = ['0']
229
230 op = voltage1.addOperation(name='CohInt', optype='other') #Minimo integrar 2 perfiles por ser codigo complementario
231 op.addParameter(name='n', value=2, format='int')
232
233 if args.range > 0:
234 op = voltage1.addOperation(name='selectHeights')
235 op.addParameter(name='minIndex', value='0', format='int')
236 op.addParameter(name='maxIndex', value=max_index(RMIX, sample_rate, ipp), format='int')
237
238 op = voltage1.addOperation(name='setH0')
239 op.addParameter(name='h0', value=H0, format='float')
240
241 op = voltage1.addOperation(name='PulsePair_vRF', optype='other')
242 op.addParameter(name='n', value=int(conf['usrp_tx']['repetitions_1'])/2, format='int')
243
244 proc1 = project.addProcUnit(datatype='ParametersProc', inputId=voltage1.getId())
245 proc1.addParameter(name='runNextUnit', value=True)
246
247 opObj10 = proc1.addOperation(name="WeatherRadar")
248 opObj10.addParameter(name='tauW',value=(1e-6/sample_rate)*len(code[0]))
249 opObj10.addParameter(name='Pt',value=((1e-6/sample_rate)*len(code[0])/ipp)*200)
250
251 op = proc1.addOperation(name='PedestalInformation')
252 op.addParameter(name='path', value=path_ped, format='str')
253 op.addParameter(name='interval', value='0.04')
254 op.addParameter(name='time_offset', value=time_offset)
255 op.addParameter(name='mode', value='PPI')
256
257 op = proc1.addOperation(name='Block360')
258 op.addParameter(name='attr_data', value='data_param')
259 op.addParameter(name='runNextOp', value=True)
260
261
262 voltage2 = project.addProcUnit(datatype='VoltageProc', inputId=reader.getId())
263
264 op = voltage2.addOperation(name='ProfileSelector')
265 op.addParameter(name='profileRangeList', value='{},{}'.format(conf['usrp_tx']['repetitions_1'], conf['usrp_tx']['repetitions_1']+conf['usrp_tx']['repetitions_2']-1))
266
267 if conf['usrp_tx']['code_type_2']:
268 codes = [ c.strip() for c in conf['usrp_tx']['code_2'].split(',')]
269 code = []
270 for c in codes:
271 code.append([int(x) for x in c])
272 op = voltage2.addOperation(name='Decoder', optype='other')
273 op.addParameter(name='code', value=code)
274 op.addParameter(name='nCode', value=len(code), format='int')
275 op.addParameter(name='nBaud', value=len(code[0]), format='int')
276
277 op = voltage2.addOperation(name='CohInt', optype='other') #Minimo integrar 2 perfiles por ser codigo complementario
278 op.addParameter(name='n', value=len(code), format='int')
279 ncode = len(code)
280 else:
281 ncode = 1
282
283 if args.range > 0:
284 op = voltage2.addOperation(name='selectHeights')
285 op.addParameter(name='minIndex', value=max_index(RMIX, sample_rate, ipp), format='int')
286 op.addParameter(name='maxIndex', value=max_index(args.range, sample_rate, ipp), format='int')
287
288 op = voltage2.addOperation(name='setH0')
289 op.addParameter(name='h0', value=H0, format='float')
290
291 op = voltage2.addOperation(name='PulsePair_vRF', optype='other')
292 op.addParameter(name='n', value=int(conf['usrp_tx']['repetitions_2'])/ncode, format='int')
293
294 proc2 = project.addProcUnit(datatype='ParametersProc', inputId=voltage2.getId())
295 proc2.addParameter(name='runNextUnit', value=True)
296
297 opObj10 = proc2.addOperation(name="WeatherRadar")
298 opObj10.addParameter(name='tauW',value=(1e-6/sample_rate)*len(code[0]))
299 opObj10.addParameter(name='Pt',value=((1e-6/sample_rate)*len(code[0])/ipp)*200)
300
301 op = proc2.addOperation(name='PedestalInformation')
302 op.addParameter(name='path', value=path_ped, format='str')
303 op.addParameter(name='interval', value='0.04')
304 op.addParameter(name='time_offset', value=time_offset)
305 op.addParameter(name='mode', value='PPI')
306
307 op = proc2.addOperation(name='Block360')
308 op.addParameter(name='attr_data', value='data_param')
309 op.addParameter(name='runNextOp', value=True)
310
311 merge = project.addProcUnit(datatype='MergeProc', inputId=[proc1.getId(), proc2.getId()])
312 merge.addParameter(name='attr_data', value='data_param')
313 merge.addParameter(name='mode', value='7') #RM
314
315
316 for param in parameters:
317
318 if args.plot:
319 op= merge.addOperation(name='WeatherParamsPlot')
320 if args.save:
321 op.addParameter(name='save', value=path_plots, format='str')
322 op.addParameter(name='save_period', value=-1)
323 op.addParameter(name='show', value=args.show)
324 op.addParameter(name='channels', value='0,')
325 op.addParameter(name='zmin', value=PARAM[param]['zmin'], format='int')
326 op.addParameter(name='zmax', value=PARAM[param]['zmax'], format='int')
327 op.addParameter(name='attr_data', value=param, format='str')
328 op.addParameter(name='labels', value=[PARAM[param]['label']])
329 op.addParameter(name='save_code', value=param)
330 op.addParameter(name='cb_label', value=PARAM[param]['cb_label'])
331 op.addParameter(name='colormap', value=PARAM[param]['colormap'])
332 op.addParameter(name='bgcolor', value='black')
333 if MASK: op.addParameter(name='mask', value=MASK, format='float')
334 if args.server:
335 op.addParameter(name='server', value='0.0.0.0:4444')
336 op.addParameter(name='exp_code', value='400')
337
338 desc = {
339 'Data': {
340 'data_param': {PARAM[param]['wrname']: ['H', 'V']},
341 'utctime': 'time'
342 },
343 'Metadata': {
344 'heightList': 'range',
345 'data_azi': 'azimuth',
346 'data_ele': 'elevation',
347 'mode_op': 'scan_type',
348 'h0': 'range_correction',
349 'dataPP_NOISE': 'noise',
350 }
351 }
352
353 if args.save:
354 writer = merge.addOperation(name='HDFWriter')
355 writer.addParameter(name='path', value=path_save, format='str')
356 writer.addParameter(name='Reset', value=True)
357 writer.addParameter(name='setType', value='weather')
358 writer.addParameter(name='description', value=json.dumps(desc))
359 writer.addParameter(name='blocksPerFile', value='1',format='int')
360 writer.addParameter(name='metadataList', value=','.join(META))
361 writer.addParameter(name='dataList', value='data_param,utctime')
362 writer.addParameter(name='weather_var', value=param)
363 writer.addParameter(name='mask', value=MASK, format='float')
364 # meta
365 writer.addParameter(name='latitude', value='-12.040436')
366 writer.addParameter(name='longitude', value='-75.295893')
367 writer.addParameter(name='altitude', value='3379.2147')
368 writer.addParameter(name='heading', value='0')
369 writer.addParameter(name='radar_name', value='SOPHy')
370 writer.addParameter(name='institution', value='IGP')
371 writer.addParameter(name='contact', value='dscipion@igp.gob.pe')
372 writer.addParameter(name='created_by', value='Signal Chain (https://pypi.org/project/schainpy/)')
373 writer.addParameter(name='range_unit', value='km')
374 writer.addParameter(name='prf', value=1/ipp)
375 writer.addParameter(name='prf_unit', value='hertz')
376 writer.addParameter(name='variable', value=PARAM[param]['label'])
377 writer.addParameter(name='variable_unit', value=PARAM[param]['cb_label'])
378 writer.addParameter(name='n_pulses', value=n_pulses)
379 writer.addParameter(name='pulse1_range', value=RMIX)
380 writer.addParameter(name='pulse1_width', value=pulse_1_width)
381 writer.addParameter(name='pulse2_width', value=pulse_2_width)
382 writer.addParameter(name='pulse1_repetitions', value=pulse_1_repetitions)
383 writer.addParameter(name='pulse2_repetitions', value=pulse_2_repetitions)
384 writer.addParameter(name='pulse_width_unit', value='microseconds')
385 writer.addParameter(name='snr_threshold', value=MASK)
386
387
388 project.start()
389
390 if __name__ == '__main__':
391
392 parser = argparse.ArgumentParser(description='Script to process SOPHy data.')
393 parser.add_argument('experiment',
394 help='Experiment name')
395 parser.add_argument('--parameters', nargs='*', default=['S'],
396 help='Variables to process: P, Z, V')
397 parser.add_argument('--time_offset', default=0,
398 help='Fix time offset')
399 parser.add_argument('--range', default=0, type=float,
400 help='Max range to plot')
401 parser.add_argument('--save', action='store_true',
402 help='Create output files')
403 parser.add_argument('--plot', action='store_true',
404 help='Create plot files')
405 parser.add_argument('--show', action='store_true',
406 help='Show matplotlib plot.')
407 parser.add_argument('--online', action='store_true',
408 help='Set online mode.')
409 parser.add_argument('--server', action='store_true',
410 help='Send to realtime')
411 parser.add_argument('--start_time', default='',
412 help='Set start time.')
413 parser.add_argument('--label', default='',
414 help='Label for plot & param folder')
415
416 args = parser.parse_args()
417
418 main(args)
@@ -0,0 +1,117
1 import os,sys,json
2 import datetime
3 import time
4 import argparse
5
6 from schainpy.controller import Project
7 '''
8 NOTA:
9 Este script de prueba.
10 - Unidad del lectura 'HDFReader'.
11 - Unidad de procesamiento ParametersProc
12 '''
13 PATH = "/home/soporte/Documents/EVENTO/HYO_PM@2022-06-09T15-05-12/paramC0N36.0/2022-06-09T18-00-00/"
14 #PATH = "/home/soporte/Documents/EVENTO/HYO_PM@2022-06-09T15-05-12/param/2022-06-09T18-00-00/"
15
16 #PATH = "/home/soporte/Documents/EVENTO/HYO_PM@2022-06-09T15-05-12/paramC0N36.0/2022-06-09T19-00-00/"
17 #PATH = "/home/soporte/Documents/EVENTO/HYO_PM@2022-05-31T12-00-17/paramC0N36.0/2022-05-31T16-00-00/"
18 path = PATH
19 PARAM = {
20 'S': {'zmin': -45, 'zmax': -25, 'colormap': 'jet', 'label': 'Power', 'wrname': 'power','cb_label': 'dBm', 'ch':0},
21 'SNR': {'zmin': -40, 'zmax': -20, 'colormap': 'jet', 'label': 'SNR', 'wrname': 'snr','cb_label': 'dB', 'ch':0},
22 'V': {'zmin': -12, 'zmax': 12, 'colormap': 'sophy_v', 'label': 'Velocity', 'wrname': 'velocity', 'cb_label': 'm/s', 'ch':0},
23 'R': {'zmin': 0, 'zmax': 1, 'colormap': 'jet', 'label': 'RhoHV', 'wrname':'rhoHV', 'cb_label': '*', 'ch':0},
24 'P': {'zmin': -180,'zmax': 180,'colormap': 'RdBu_r', 'label': 'PhiDP', 'wrname':'phiDP' , 'cb_label': 'ΒΊ', 'ch':0},
25 'D': {'zmin': -30, 'zmax': 80, 'colormap': 'sophy_r','label': 'ZDR','wrname':'differential_reflectivity' , 'cb_label': 'dBz','ch':0},
26 'Z': {'zmin': -30, 'zmax': 80, 'colormap': 'sophy_r','label': 'Reflectivity ', 'wrname':'reflectivity', 'cb_label': 'dBz','ch':0},
27 'W': {'zmin': 0, 'zmax': 15, 'colormap': 'sophy_w','label': 'Spectral Width', 'wrname':'spectral_width', 'cb_label': 'm/s', 'ch':0}
28 }
29
30 def main(args):
31 #filefmt="******%Y%m%d*%H%M%S*******"
32 #filefmt="SOPHY_20220609_184620_E8.0_Z"
33 parameters = args.parameters
34 grado = args.grado
35 MASK = None
36
37 for param in parameters:
38 filefmt ="******%Y%m%d*%H%M%S*******"
39 filter= "_E"+str(grado)+".0_"+param
40 variable = 'Data/'+PARAM[param]['wrname']+'/H'
41 desc = {
42 'Data': {
43 'data_param': [variable],
44 'utctime' : 'Data/time'
45 },
46 'Metadata': {
47 'heightList': 'Metadata/range',
48 'data_azi' : 'Metadata/azimuth',
49 'data_ele' : 'Metadata/elevation',
50 'mode_op' : 'Metadata/scan_type',
51 'h0' : 'Metadata/range_correction',
52 }
53 }
54
55 project = Project()
56
57 project.setup(id='10',name='Test Simulator',description=desc)
58
59 readUnitConfObj = project.addReadUnit(datatype='HDFReader',
60 path=path,
61 startDate="2022/01/01", #"2020/01/01",#today,
62 endDate= "2022/12/01", #"2020/12/30",#today,
63 startTime='00:00:00',
64 endTime='23:59:59',
65 delay=0,
66 #set=0,
67 online=0,
68 walk=0,
69 filefmt=filefmt,
70 filter=filter,
71 dparam= 1,
72 description= json.dumps(desc))#1
73
74 proc1 = project.addProcUnit(datatype='ParametersProc',inputId=readUnitConfObj.getId())
75
76 if args.plot:
77 print("plotea")
78 op= proc1.addOperation(name='WeatherParamsPlot')
79 if args.save:
80 op.addParameter(name='save', value=path_plots, format='str')
81 op.addParameter(name='save_period', value=-1)
82 op.addParameter(name='show', value=args.show)
83 op.addParameter(name='channels', value='0,')
84 op.addParameter(name='zmin', value=PARAM[param]['zmin'], format='int')
85 op.addParameter(name='zmax', value=PARAM[param]['zmax'], format='int')
86 op.addParameter(name='attr_data', value=param, format='str')
87 op.addParameter(name='labels', value=[PARAM[param]['label']])
88 op.addParameter(name='save_code', value=param)
89 op.addParameter(name='cb_label', value=PARAM[param]['cb_label'])
90 op.addParameter(name='colormap', value=PARAM[param]['colormap'])
91 op.addParameter(name='bgcolor', value='black')
92 if MASK: op.addParameter(name='mask', value=MASK, format='float')
93 if args.server:
94 op.addParameter(name='server', value='0.0.0.0:4444')
95 op.addParameter(name='exp_code', value='400')
96 project.start()
97
98 if __name__ == '__main__':
99
100 parser = argparse.ArgumentParser(description='Script to process SOPHy data.')
101 parser.add_argument('--parameters', nargs='*', default=['S'],
102 help='Variables to process: P, Z, V ,W')
103 parser.add_argument('--grado', default=2,
104 help='Angle in Elev to plot')
105 parser.add_argument('--save', default=0,
106 help='Save plot')
107 parser.add_argument('--range', default=0, type=float,
108 help='Max range to plot')
109 parser.add_argument('--plot', action='store_true',
110 help='Create plot files')
111 parser.add_argument('--show', action='store_true',
112 help='Show matplotlib plot.')
113 parser.add_argument('--server', action='store_true',
114 help='Send to realtime')
115 args = parser.parse_args()
116
117 main(args)
@@ -1,685 +1,691
1 import os
1 import os
2 import datetime
2 import datetime
3 import warnings
3 import warnings
4 import numpy
4 import numpy
5 from mpl_toolkits.axisartist.grid_finder import FixedLocator, DictFormatter
5 from mpl_toolkits.axisartist.grid_finder import FixedLocator, DictFormatter
6
6
7 from schainpy.model.graphics.jroplot_base import Plot, plt
7 from schainpy.model.graphics.jroplot_base import Plot, plt
8 from schainpy.model.graphics.jroplot_spectra import SpectraPlot, RTIPlot, CoherencePlot, SpectraCutPlot
8 from schainpy.model.graphics.jroplot_spectra import SpectraPlot, RTIPlot, CoherencePlot, SpectraCutPlot
9 from schainpy.utils import log
9 from schainpy.utils import log
10
10
11
11
12 EARTH_RADIUS = 6.3710e3
12 EARTH_RADIUS = 6.3710e3
13
13
14
14
15 def antenna_to_cartesian(ranges, azimuths, elevations):
15 def antenna_to_cartesian(ranges, azimuths, elevations):
16 """
16 """
17 Return Cartesian coordinates from antenna coordinates.
17 Return Cartesian coordinates from antenna coordinates.
18
18
19 Parameters
19 Parameters
20 ----------
20 ----------
21 ranges : array
21 ranges : array
22 Distances to the center of the radar gates (bins) in kilometers.
22 Distances to the center of the radar gates (bins) in kilometers.
23 azimuths : array
23 azimuths : array
24 Azimuth angle of the radar in degrees.
24 Azimuth angle of the radar in degrees.
25 elevations : array
25 elevations : array
26 Elevation angle of the radar in degrees.
26 Elevation angle of the radar in degrees.
27
27
28 Returns
28 Returns
29 -------
29 -------
30 x, y, z : array
30 x, y, z : array
31 Cartesian coordinates in meters from the radar.
31 Cartesian coordinates in meters from the radar.
32
32
33 Notes
33 Notes
34 -----
34 -----
35 The calculation for Cartesian coordinate is adapted from equations
35 The calculation for Cartesian coordinate is adapted from equations
36 2.28(b) and 2.28(c) of Doviak and Zrnic [1]_ assuming a
36 2.28(b) and 2.28(c) of Doviak and Zrnic [1]_ assuming a
37 standard atmosphere (4/3 Earth's radius model).
37 standard atmosphere (4/3 Earth's radius model).
38
38
39 .. math::
39 .. math::
40
40
41 z = \\sqrt{r^2+R^2+2*r*R*sin(\\theta_e)} - R
41 z = \\sqrt{r^2+R^2+2*r*R*sin(\\theta_e)} - R
42
42
43 s = R * arcsin(\\frac{r*cos(\\theta_e)}{R+z})
43 s = R * arcsin(\\frac{r*cos(\\theta_e)}{R+z})
44
44
45 x = s * sin(\\theta_a)
45 x = s * sin(\\theta_a)
46
46
47 y = s * cos(\\theta_a)
47 y = s * cos(\\theta_a)
48
48
49 Where r is the distance from the radar to the center of the gate,
49 Where r is the distance from the radar to the center of the gate,
50 :math:`\\theta_a` is the azimuth angle, :math:`\\theta_e` is the
50 :math:`\\theta_a` is the azimuth angle, :math:`\\theta_e` is the
51 elevation angle, s is the arc length, and R is the effective radius
51 elevation angle, s is the arc length, and R is the effective radius
52 of the earth, taken to be 4/3 the mean radius of earth (6371 km).
52 of the earth, taken to be 4/3 the mean radius of earth (6371 km).
53
53
54 References
54 References
55 ----------
55 ----------
56 .. [1] Doviak and Zrnic, Doppler Radar and Weather Observations, Second
56 .. [1] Doviak and Zrnic, Doppler Radar and Weather Observations, Second
57 Edition, 1993, p. 21.
57 Edition, 1993, p. 21.
58
58
59 """
59 """
60 theta_e = numpy.deg2rad(elevations) # elevation angle in radians.
60 theta_e = numpy.deg2rad(elevations) # elevation angle in radians.
61 theta_a = numpy.deg2rad(azimuths) # azimuth angle in radians.
61 theta_a = numpy.deg2rad(azimuths) # azimuth angle in radians.
62 R = 6371.0 * 1000.0 * 4.0 / 3.0 # effective radius of earth in meters.
62 R = 6371.0 * 1000.0 * 4.0 / 3.0 # effective radius of earth in meters.
63 r = ranges * 1000.0 # distances to gates in meters.
63 r = ranges * 1000.0 # distances to gates in meters.
64
64
65 z = (r ** 2 + R ** 2 + 2.0 * r * R * numpy.sin(theta_e)) ** 0.5 - R
65 z = (r ** 2 + R ** 2 + 2.0 * r * R * numpy.sin(theta_e)) ** 0.5 - R
66 s = R * numpy.arcsin(r * numpy.cos(theta_e) / (R + z)) # arc length in m.
66 s = R * numpy.arcsin(r * numpy.cos(theta_e) / (R + z)) # arc length in m.
67 x = s * numpy.sin(theta_a)
67 x = s * numpy.sin(theta_a)
68 y = s * numpy.cos(theta_a)
68 y = s * numpy.cos(theta_a)
69 return x, y, z
69 return x, y, z
70
70
71 def cartesian_to_geographic_aeqd(x, y, lon_0, lat_0, R=EARTH_RADIUS):
71 def cartesian_to_geographic_aeqd(x, y, lon_0, lat_0, R=EARTH_RADIUS):
72 """
72 """
73 Azimuthal equidistant Cartesian to geographic coordinate transform.
73 Azimuthal equidistant Cartesian to geographic coordinate transform.
74
74
75 Transform a set of Cartesian/Cartographic coordinates (x, y) to
75 Transform a set of Cartesian/Cartographic coordinates (x, y) to
76 geographic coordinate system (lat, lon) using a azimuthal equidistant
76 geographic coordinate system (lat, lon) using a azimuthal equidistant
77 map projection [1]_.
77 map projection [1]_.
78
78
79 .. math::
79 .. math::
80
80
81 lat = \\arcsin(\\cos(c) * \\sin(lat_0) +
81 lat = \\arcsin(\\cos(c) * \\sin(lat_0) +
82 (y * \\sin(c) * \\cos(lat_0) / \\rho))
82 (y * \\sin(c) * \\cos(lat_0) / \\rho))
83
83
84 lon = lon_0 + \\arctan2(
84 lon = lon_0 + \\arctan2(
85 x * \\sin(c),
85 x * \\sin(c),
86 \\rho * \\cos(lat_0) * \\cos(c) - y * \\sin(lat_0) * \\sin(c))
86 \\rho * \\cos(lat_0) * \\cos(c) - y * \\sin(lat_0) * \\sin(c))
87
87
88 \\rho = \\sqrt(x^2 + y^2)
88 \\rho = \\sqrt(x^2 + y^2)
89
89
90 c = \\rho / R
90 c = \\rho / R
91
91
92 Where x, y are the Cartesian position from the center of projection;
92 Where x, y are the Cartesian position from the center of projection;
93 lat, lon the corresponding latitude and longitude; lat_0, lon_0 are the
93 lat, lon the corresponding latitude and longitude; lat_0, lon_0 are the
94 latitude and longitude of the center of the projection; R is the radius of
94 latitude and longitude of the center of the projection; R is the radius of
95 the earth (defaults to ~6371 km). lon is adjusted to be between -180 and
95 the earth (defaults to ~6371 km). lon is adjusted to be between -180 and
96 180.
96 180.
97
97
98 Parameters
98 Parameters
99 ----------
99 ----------
100 x, y : array-like
100 x, y : array-like
101 Cartesian coordinates in the same units as R, typically meters.
101 Cartesian coordinates in the same units as R, typically meters.
102 lon_0, lat_0 : float
102 lon_0, lat_0 : float
103 Longitude and latitude, in degrees, of the center of the projection.
103 Longitude and latitude, in degrees, of the center of the projection.
104 R : float, optional
104 R : float, optional
105 Earth radius in the same units as x and y. The default value is in
105 Earth radius in the same units as x and y. The default value is in
106 units of meters.
106 units of meters.
107
107
108 Returns
108 Returns
109 -------
109 -------
110 lon, lat : array
110 lon, lat : array
111 Longitude and latitude of Cartesian coordinates in degrees.
111 Longitude and latitude of Cartesian coordinates in degrees.
112
112
113 References
113 References
114 ----------
114 ----------
115 .. [1] Snyder, J. P. Map Projections--A Working Manual. U. S. Geological
115 .. [1] Snyder, J. P. Map Projections--A Working Manual. U. S. Geological
116 Survey Professional Paper 1395, 1987, pp. 191-202.
116 Survey Professional Paper 1395, 1987, pp. 191-202.
117
117
118 """
118 """
119 x = numpy.atleast_1d(numpy.asarray(x))
119 x = numpy.atleast_1d(numpy.asarray(x))
120 y = numpy.atleast_1d(numpy.asarray(y))
120 y = numpy.atleast_1d(numpy.asarray(y))
121
121
122 lat_0_rad = numpy.deg2rad(lat_0)
122 lat_0_rad = numpy.deg2rad(lat_0)
123 lon_0_rad = numpy.deg2rad(lon_0)
123 lon_0_rad = numpy.deg2rad(lon_0)
124
124
125 rho = numpy.sqrt(x*x + y*y)
125 rho = numpy.sqrt(x*x + y*y)
126 c = rho / R
126 c = rho / R
127
127
128 with warnings.catch_warnings():
128 with warnings.catch_warnings():
129 # division by zero may occur here but is properly addressed below so
129 # division by zero may occur here but is properly addressed below so
130 # the warnings can be ignored
130 # the warnings can be ignored
131 warnings.simplefilter("ignore", RuntimeWarning)
131 warnings.simplefilter("ignore", RuntimeWarning)
132 lat_rad = numpy.arcsin(numpy.cos(c) * numpy.sin(lat_0_rad) +
132 lat_rad = numpy.arcsin(numpy.cos(c) * numpy.sin(lat_0_rad) +
133 y * numpy.sin(c) * numpy.cos(lat_0_rad) / rho)
133 y * numpy.sin(c) * numpy.cos(lat_0_rad) / rho)
134 lat_deg = numpy.rad2deg(lat_rad)
134 lat_deg = numpy.rad2deg(lat_rad)
135 # fix cases where the distance from the center of the projection is zero
135 # fix cases where the distance from the center of the projection is zero
136 lat_deg[rho == 0] = lat_0
136 lat_deg[rho == 0] = lat_0
137
137
138 x1 = x * numpy.sin(c)
138 x1 = x * numpy.sin(c)
139 x2 = rho*numpy.cos(lat_0_rad)*numpy.cos(c) - y*numpy.sin(lat_0_rad)*numpy.sin(c)
139 x2 = rho*numpy.cos(lat_0_rad)*numpy.cos(c) - y*numpy.sin(lat_0_rad)*numpy.sin(c)
140 lon_rad = lon_0_rad + numpy.arctan2(x1, x2)
140 lon_rad = lon_0_rad + numpy.arctan2(x1, x2)
141 lon_deg = numpy.rad2deg(lon_rad)
141 lon_deg = numpy.rad2deg(lon_rad)
142 # Longitudes should be from -180 to 180 degrees
142 # Longitudes should be from -180 to 180 degrees
143 lon_deg[lon_deg > 180] -= 360.
143 lon_deg[lon_deg > 180] -= 360.
144 lon_deg[lon_deg < -180] += 360.
144 lon_deg[lon_deg < -180] += 360.
145
145
146 return lon_deg, lat_deg
146 return lon_deg, lat_deg
147
147
148 def antenna_to_geographic(ranges, azimuths, elevations, site):
148 def antenna_to_geographic(ranges, azimuths, elevations, site):
149
149
150 x, y, z = antenna_to_cartesian(numpy.array(ranges), numpy.array(azimuths), numpy.array(elevations))
150 x, y, z = antenna_to_cartesian(numpy.array(ranges), numpy.array(azimuths), numpy.array(elevations))
151 lon, lat = cartesian_to_geographic_aeqd(x, y, site[0], site[1], R=6370997.)
151 lon, lat = cartesian_to_geographic_aeqd(x, y, site[0], site[1], R=6370997.)
152
152
153 return lon, lat
153 return lon, lat
154
154
155 def ll2xy(lat1, lon1, lat2, lon2):
155 def ll2xy(lat1, lon1, lat2, lon2):
156
156
157 p = 0.017453292519943295
157 p = 0.017453292519943295
158 a = 0.5 - numpy.cos((lat2 - lat1) * p)/2 + numpy.cos(lat1 * p) * \
158 a = 0.5 - numpy.cos((lat2 - lat1) * p)/2 + numpy.cos(lat1 * p) * \
159 numpy.cos(lat2 * p) * (1 - numpy.cos((lon2 - lon1) * p)) / 2
159 numpy.cos(lat2 * p) * (1 - numpy.cos((lon2 - lon1) * p)) / 2
160 r = 12742 * numpy.arcsin(numpy.sqrt(a))
160 r = 12742 * numpy.arcsin(numpy.sqrt(a))
161 theta = numpy.arctan2(numpy.sin((lon2-lon1)*p)*numpy.cos(lat2*p), numpy.cos(lat1*p)
161 theta = numpy.arctan2(numpy.sin((lon2-lon1)*p)*numpy.cos(lat2*p), numpy.cos(lat1*p)
162 * numpy.sin(lat2*p)-numpy.sin(lat1*p)*numpy.cos(lat2*p)*numpy.cos((lon2-lon1)*p))
162 * numpy.sin(lat2*p)-numpy.sin(lat1*p)*numpy.cos(lat2*p)*numpy.cos((lon2-lon1)*p))
163 theta = -theta + numpy.pi/2
163 theta = -theta + numpy.pi/2
164 return r*numpy.cos(theta), r*numpy.sin(theta)
164 return r*numpy.cos(theta), r*numpy.sin(theta)
165
165
166
166
167 def km2deg(km):
167 def km2deg(km):
168 '''
168 '''
169 Convert distance in km to degrees
169 Convert distance in km to degrees
170 '''
170 '''
171
171
172 return numpy.rad2deg(km/EARTH_RADIUS)
172 return numpy.rad2deg(km/EARTH_RADIUS)
173
173
174
174
175
175
176 class SpectralMomentsPlot(SpectraPlot):
176 class SpectralMomentsPlot(SpectraPlot):
177 '''
177 '''
178 Plot for Spectral Moments
178 Plot for Spectral Moments
179 '''
179 '''
180 CODE = 'spc_moments'
180 CODE = 'spc_moments'
181 # colormap = 'jet'
181 # colormap = 'jet'
182 # plot_type = 'pcolor'
182 # plot_type = 'pcolor'
183
183
184 class DobleGaussianPlot(SpectraPlot):
184 class DobleGaussianPlot(SpectraPlot):
185 '''
185 '''
186 Plot for Double Gaussian Plot
186 Plot for Double Gaussian Plot
187 '''
187 '''
188 CODE = 'gaussian_fit'
188 CODE = 'gaussian_fit'
189 # colormap = 'jet'
189 # colormap = 'jet'
190 # plot_type = 'pcolor'
190 # plot_type = 'pcolor'
191
191
192 class DoubleGaussianSpectraCutPlot(SpectraCutPlot):
192 class DoubleGaussianSpectraCutPlot(SpectraCutPlot):
193 '''
193 '''
194 Plot SpectraCut with Double Gaussian Fit
194 Plot SpectraCut with Double Gaussian Fit
195 '''
195 '''
196 CODE = 'cut_gaussian_fit'
196 CODE = 'cut_gaussian_fit'
197
197
198 class SnrPlot(RTIPlot):
198 class SnrPlot(RTIPlot):
199 '''
199 '''
200 Plot for SNR Data
200 Plot for SNR Data
201 '''
201 '''
202
202
203 CODE = 'snr'
203 CODE = 'snr'
204 colormap = 'jet'
204 colormap = 'jet'
205
205
206 def update(self, dataOut):
206 def update(self, dataOut):
207
207
208 data = {
208 data = {
209 'snr': 10*numpy.log10(dataOut.data_snr)
209 'snr': 10*numpy.log10(dataOut.data_snr)
210 }
210 }
211
211
212 return data, {}
212 return data, {}
213
213
214 class DopplerPlot(RTIPlot):
214 class DopplerPlot(RTIPlot):
215 '''
215 '''
216 Plot for DOPPLER Data (1st moment)
216 Plot for DOPPLER Data (1st moment)
217 '''
217 '''
218
218
219 CODE = 'dop'
219 CODE = 'dop'
220 colormap = 'jet'
220 colormap = 'jet'
221
221
222 def update(self, dataOut):
222 def update(self, dataOut):
223
223
224 data = {
224 data = {
225 'dop': 10*numpy.log10(dataOut.data_dop)
225 'dop': 10*numpy.log10(dataOut.data_dop)
226 }
226 }
227
227
228 return data, {}
228 return data, {}
229
229
230 class PowerPlot(RTIPlot):
230 class PowerPlot(RTIPlot):
231 '''
231 '''
232 Plot for Power Data (0 moment)
232 Plot for Power Data (0 moment)
233 '''
233 '''
234
234
235 CODE = 'pow'
235 CODE = 'pow'
236 colormap = 'jet'
236 colormap = 'jet'
237
237
238 def update(self, dataOut):
238 def update(self, dataOut):
239 data = {
239 data = {
240 'pow': 10*numpy.log10(dataOut.data_pow/dataOut.normFactor)
240 'pow': 10*numpy.log10(dataOut.data_pow/dataOut.normFactor)
241 }
241 }
242 return data, {}
242 return data, {}
243
243
244 class SpectralWidthPlot(RTIPlot):
244 class SpectralWidthPlot(RTIPlot):
245 '''
245 '''
246 Plot for Spectral Width Data (2nd moment)
246 Plot for Spectral Width Data (2nd moment)
247 '''
247 '''
248
248
249 CODE = 'width'
249 CODE = 'width'
250 colormap = 'jet'
250 colormap = 'jet'
251
251
252 def update(self, dataOut):
252 def update(self, dataOut):
253
253
254 data = {
254 data = {
255 'width': dataOut.data_width
255 'width': dataOut.data_width
256 }
256 }
257
257
258 return data, {}
258 return data, {}
259
259
260 class SkyMapPlot(Plot):
260 class SkyMapPlot(Plot):
261 '''
261 '''
262 Plot for meteors detection data
262 Plot for meteors detection data
263 '''
263 '''
264
264
265 CODE = 'param'
265 CODE = 'param'
266
266
267 def setup(self):
267 def setup(self):
268
268
269 self.ncols = 1
269 self.ncols = 1
270 self.nrows = 1
270 self.nrows = 1
271 self.width = 7.2
271 self.width = 7.2
272 self.height = 7.2
272 self.height = 7.2
273 self.nplots = 1
273 self.nplots = 1
274 self.xlabel = 'Zonal Zenith Angle (deg)'
274 self.xlabel = 'Zonal Zenith Angle (deg)'
275 self.ylabel = 'Meridional Zenith Angle (deg)'
275 self.ylabel = 'Meridional Zenith Angle (deg)'
276 self.polar = True
276 self.polar = True
277 self.ymin = -180
277 self.ymin = -180
278 self.ymax = 180
278 self.ymax = 180
279 self.colorbar = False
279 self.colorbar = False
280
280
281 def plot(self):
281 def plot(self):
282
282
283 arrayParameters = numpy.concatenate(self.data['param'])
283 arrayParameters = numpy.concatenate(self.data['param'])
284 error = arrayParameters[:, -1]
284 error = arrayParameters[:, -1]
285 indValid = numpy.where(error == 0)[0]
285 indValid = numpy.where(error == 0)[0]
286 finalMeteor = arrayParameters[indValid, :]
286 finalMeteor = arrayParameters[indValid, :]
287 finalAzimuth = finalMeteor[:, 3]
287 finalAzimuth = finalMeteor[:, 3]
288 finalZenith = finalMeteor[:, 4]
288 finalZenith = finalMeteor[:, 4]
289
289
290 x = finalAzimuth * numpy.pi / 180
290 x = finalAzimuth * numpy.pi / 180
291 y = finalZenith
291 y = finalZenith
292
292
293 ax = self.axes[0]
293 ax = self.axes[0]
294
294
295 if ax.firsttime:
295 if ax.firsttime:
296 ax.plot = ax.plot(x, y, 'bo', markersize=5)[0]
296 ax.plot = ax.plot(x, y, 'bo', markersize=5)[0]
297 else:
297 else:
298 ax.plot.set_data(x, y)
298 ax.plot.set_data(x, y)
299
299
300 dt1 = self.getDateTime(self.data.min_time).strftime('%y/%m/%d %H:%M:%S')
300 dt1 = self.getDateTime(self.data.min_time).strftime('%y/%m/%d %H:%M:%S')
301 dt2 = self.getDateTime(self.data.max_time).strftime('%y/%m/%d %H:%M:%S')
301 dt2 = self.getDateTime(self.data.max_time).strftime('%y/%m/%d %H:%M:%S')
302 title = 'Meteor Detection Sky Map\n %s - %s \n Number of events: %5.0f\n' % (dt1,
302 title = 'Meteor Detection Sky Map\n %s - %s \n Number of events: %5.0f\n' % (dt1,
303 dt2,
303 dt2,
304 len(x))
304 len(x))
305 self.titles[0] = title
305 self.titles[0] = title
306
306
307
307
308 class GenericRTIPlot(Plot):
308 class GenericRTIPlot(Plot):
309 '''
309 '''
310 Plot for data_xxxx object
310 Plot for data_xxxx object
311 '''
311 '''
312
312
313 CODE = 'param'
313 CODE = 'param'
314 colormap = 'viridis'
314 colormap = 'viridis'
315 plot_type = 'pcolorbuffer'
315 plot_type = 'pcolorbuffer'
316
316
317 def setup(self):
317 def setup(self):
318 self.xaxis = 'time'
318 self.xaxis = 'time'
319 self.ncols = 1
319 self.ncols = 1
320 self.nrows = self.data.shape('param')[0]
320 self.nrows = self.data.shape('param')[0]
321 self.nplots = self.nrows
321 self.nplots = self.nrows
322 self.plots_adjust.update({'hspace':0.8, 'left': 0.1, 'bottom': 0.08, 'right':0.95, 'top': 0.95})
322 self.plots_adjust.update({'hspace':0.8, 'left': 0.1, 'bottom': 0.08, 'right':0.95, 'top': 0.95})
323
323
324 if not self.xlabel:
324 if not self.xlabel:
325 self.xlabel = 'Time'
325 self.xlabel = 'Time'
326
326
327 self.ylabel = 'Range [km]'
327 self.ylabel = 'Range [km]'
328 if not self.titles:
328 if not self.titles:
329 self.titles = ['Param {}'.format(x) for x in range(self.nrows)]
329 self.titles = ['Param {}'.format(x) for x in range(self.nrows)]
330
330
331 def update(self, dataOut):
331 def update(self, dataOut):
332
332
333 data = {
333 data = {
334 'param' : numpy.concatenate([getattr(dataOut, attr) for attr in self.attr_data], axis=0)
334 'param' : numpy.concatenate([getattr(dataOut, attr) for attr in self.attr_data], axis=0)
335 }
335 }
336
336
337 meta = {}
337 meta = {}
338
338
339 return data, meta
339 return data, meta
340
340
341 def plot(self):
341 def plot(self):
342 # self.data.normalize_heights()
342 # self.data.normalize_heights()
343 self.x = self.data.times
343 self.x = self.data.times
344 self.y = self.data.yrange
344 self.y = self.data.yrange
345 self.z = self.data['param']
345 self.z = self.data['param']
346 self.z = 10*numpy.log10(self.z)
346 self.z = 10*numpy.log10(self.z)
347 self.z = numpy.ma.masked_invalid(self.z)
347 self.z = numpy.ma.masked_invalid(self.z)
348
348
349 if self.decimation is None:
349 if self.decimation is None:
350 x, y, z = self.fill_gaps(self.x, self.y, self.z)
350 x, y, z = self.fill_gaps(self.x, self.y, self.z)
351 else:
351 else:
352 x, y, z = self.fill_gaps(*self.decimate())
352 x, y, z = self.fill_gaps(*self.decimate())
353
353
354 for n, ax in enumerate(self.axes):
354 for n, ax in enumerate(self.axes):
355
355
356 self.zmax = self.zmax if self.zmax is not None else numpy.max(
356 self.zmax = self.zmax if self.zmax is not None else numpy.max(
357 self.z[n])
357 self.z[n])
358 self.zmin = self.zmin if self.zmin is not None else numpy.min(
358 self.zmin = self.zmin if self.zmin is not None else numpy.min(
359 self.z[n])
359 self.z[n])
360
360
361 if ax.firsttime:
361 if ax.firsttime:
362 if self.zlimits is not None:
362 if self.zlimits is not None:
363 self.zmin, self.zmax = self.zlimits[n]
363 self.zmin, self.zmax = self.zlimits[n]
364
364
365 ax.plt = ax.pcolormesh(x, y, z[n].T * self.factors[n],
365 ax.plt = ax.pcolormesh(x, y, z[n].T * self.factors[n],
366 vmin=self.zmin,
366 vmin=self.zmin,
367 vmax=self.zmax,
367 vmax=self.zmax,
368 cmap=self.cmaps[n]
368 cmap=self.cmaps[n]
369 )
369 )
370 else:
370 else:
371 if self.zlimits is not None:
371 if self.zlimits is not None:
372 self.zmin, self.zmax = self.zlimits[n]
372 self.zmin, self.zmax = self.zlimits[n]
373 ax.collections.remove(ax.collections[0])
373 ax.collections.remove(ax.collections[0])
374 ax.plt = ax.pcolormesh(x, y, z[n].T * self.factors[n],
374 ax.plt = ax.pcolormesh(x, y, z[n].T * self.factors[n],
375 vmin=self.zmin,
375 vmin=self.zmin,
376 vmax=self.zmax,
376 vmax=self.zmax,
377 cmap=self.cmaps[n]
377 cmap=self.cmaps[n]
378 )
378 )
379
379
380
380
381 class PolarMapPlot(Plot):
381 class PolarMapPlot(Plot):
382 '''
382 '''
383 Plot for weather radar
383 Plot for weather radar
384 '''
384 '''
385
385
386 CODE = 'param'
386 CODE = 'param'
387 colormap = 'seismic'
387 colormap = 'seismic'
388
388
389 def setup(self):
389 def setup(self):
390 self.ncols = 1
390 self.ncols = 1
391 self.nrows = 1
391 self.nrows = 1
392 self.width = 9
392 self.width = 9
393 self.height = 8
393 self.height = 8
394 self.mode = self.data.meta['mode']
394 self.mode = self.data.meta['mode']
395 if self.channels is not None:
395 if self.channels is not None:
396 self.nplots = len(self.channels)
396 self.nplots = len(self.channels)
397 self.nrows = len(self.channels)
397 self.nrows = len(self.channels)
398 else:
398 else:
399 self.nplots = self.data.shape(self.CODE)[0]
399 self.nplots = self.data.shape(self.CODE)[0]
400 self.nrows = self.nplots
400 self.nrows = self.nplots
401 self.channels = list(range(self.nplots))
401 self.channels = list(range(self.nplots))
402 if self.mode == 'E':
402 if self.mode == 'E':
403 self.xlabel = 'Longitude'
403 self.xlabel = 'Longitude'
404 self.ylabel = 'Latitude'
404 self.ylabel = 'Latitude'
405 else:
405 else:
406 self.xlabel = 'Range (km)'
406 self.xlabel = 'Range (km)'
407 self.ylabel = 'Height (km)'
407 self.ylabel = 'Height (km)'
408 self.bgcolor = 'white'
408 self.bgcolor = 'white'
409 self.cb_labels = self.data.meta['units']
409 self.cb_labels = self.data.meta['units']
410 self.lat = self.data.meta['latitude']
410 self.lat = self.data.meta['latitude']
411 self.lon = self.data.meta['longitude']
411 self.lon = self.data.meta['longitude']
412 self.xmin, self.xmax = float(
412 self.xmin, self.xmax = float(
413 km2deg(self.xmin) + self.lon), float(km2deg(self.xmax) + self.lon)
413 km2deg(self.xmin) + self.lon), float(km2deg(self.xmax) + self.lon)
414 self.ymin, self.ymax = float(
414 self.ymin, self.ymax = float(
415 km2deg(self.ymin) + self.lat), float(km2deg(self.ymax) + self.lat)
415 km2deg(self.ymin) + self.lat), float(km2deg(self.ymax) + self.lat)
416 # self.polar = True
416 # self.polar = True
417
417
418 def plot(self):
418 def plot(self):
419
419
420 for n, ax in enumerate(self.axes):
420 for n, ax in enumerate(self.axes):
421 data = self.data['param'][self.channels[n]]
421 data = self.data['param'][self.channels[n]]
422
422
423 zeniths = numpy.linspace(
423 zeniths = numpy.linspace(
424 0, self.data.meta['max_range'], data.shape[1])
424 0, self.data.meta['max_range'], data.shape[1])
425 if self.mode == 'E':
425 if self.mode == 'E':
426 azimuths = -numpy.radians(self.data.yrange)+numpy.pi/2
426 azimuths = -numpy.radians(self.data.yrange)+numpy.pi/2
427 r, theta = numpy.meshgrid(zeniths, azimuths)
427 r, theta = numpy.meshgrid(zeniths, azimuths)
428 x, y = r*numpy.cos(theta)*numpy.cos(numpy.radians(self.data.meta['elevation'])), r*numpy.sin(
428 x, y = r*numpy.cos(theta)*numpy.cos(numpy.radians(self.data.meta['elevation'])), r*numpy.sin(
429 theta)*numpy.cos(numpy.radians(self.data.meta['elevation']))
429 theta)*numpy.cos(numpy.radians(self.data.meta['elevation']))
430 x = km2deg(x) + self.lon
430 x = km2deg(x) + self.lon
431 y = km2deg(y) + self.lat
431 y = km2deg(y) + self.lat
432 else:
432 else:
433 azimuths = numpy.radians(self.data.yrange)
433 azimuths = numpy.radians(self.data.yrange)
434 r, theta = numpy.meshgrid(zeniths, azimuths)
434 r, theta = numpy.meshgrid(zeniths, azimuths)
435 x, y = r*numpy.cos(theta), r*numpy.sin(theta)
435 x, y = r*numpy.cos(theta), r*numpy.sin(theta)
436 self.y = zeniths
436 self.y = zeniths
437
437
438 if ax.firsttime:
438 if ax.firsttime:
439 if self.zlimits is not None:
439 if self.zlimits is not None:
440 self.zmin, self.zmax = self.zlimits[n]
440 self.zmin, self.zmax = self.zlimits[n]
441 ax.plt = ax.pcolormesh( # r, theta, numpy.ma.array(data, mask=numpy.isnan(data)),
441 ax.plt = ax.pcolormesh( # r, theta, numpy.ma.array(data, mask=numpy.isnan(data)),
442 x, y, numpy.ma.array(data, mask=numpy.isnan(data)),
442 x, y, numpy.ma.array(data, mask=numpy.isnan(data)),
443 vmin=self.zmin,
443 vmin=self.zmin,
444 vmax=self.zmax,
444 vmax=self.zmax,
445 cmap=self.cmaps[n])
445 cmap=self.cmaps[n])
446 else:
446 else:
447 if self.zlimits is not None:
447 if self.zlimits is not None:
448 self.zmin, self.zmax = self.zlimits[n]
448 self.zmin, self.zmax = self.zlimits[n]
449 ax.collections.remove(ax.collections[0])
449 ax.collections.remove(ax.collections[0])
450 ax.plt = ax.pcolormesh( # r, theta, numpy.ma.array(data, mask=numpy.isnan(data)),
450 ax.plt = ax.pcolormesh( # r, theta, numpy.ma.array(data, mask=numpy.isnan(data)),
451 x, y, numpy.ma.array(data, mask=numpy.isnan(data)),
451 x, y, numpy.ma.array(data, mask=numpy.isnan(data)),
452 vmin=self.zmin,
452 vmin=self.zmin,
453 vmax=self.zmax,
453 vmax=self.zmax,
454 cmap=self.cmaps[n])
454 cmap=self.cmaps[n])
455
455
456 if self.mode == 'A':
456 if self.mode == 'A':
457 continue
457 continue
458
458
459 # plot district names
459 # plot district names
460 f = open('/data/workspace/schain_scripts/distrito.csv')
460 f = open('/data/workspace/schain_scripts/distrito.csv')
461 for line in f:
461 for line in f:
462 label, lon, lat = [s.strip() for s in line.split(',') if s]
462 label, lon, lat = [s.strip() for s in line.split(',') if s]
463 lat = float(lat)
463 lat = float(lat)
464 lon = float(lon)
464 lon = float(lon)
465 # ax.plot(lon, lat, '.b', ms=2)
465 # ax.plot(lon, lat, '.b', ms=2)
466 ax.text(lon, lat, label.decode('utf8'), ha='center',
466 ax.text(lon, lat, label.decode('utf8'), ha='center',
467 va='bottom', size='8', color='black')
467 va='bottom', size='8', color='black')
468
468
469 # plot limites
469 # plot limites
470 limites = []
470 limites = []
471 tmp = []
471 tmp = []
472 for line in open('/data/workspace/schain_scripts/lima.csv'):
472 for line in open('/data/workspace/schain_scripts/lima.csv'):
473 if '#' in line:
473 if '#' in line:
474 if tmp:
474 if tmp:
475 limites.append(tmp)
475 limites.append(tmp)
476 tmp = []
476 tmp = []
477 continue
477 continue
478 values = line.strip().split(',')
478 values = line.strip().split(',')
479 tmp.append((float(values[0]), float(values[1])))
479 tmp.append((float(values[0]), float(values[1])))
480 for points in limites:
480 for points in limites:
481 ax.add_patch(
481 ax.add_patch(
482 Polygon(points, ec='k', fc='none', ls='--', lw=0.5))
482 Polygon(points, ec='k', fc='none', ls='--', lw=0.5))
483
483
484 # plot Cuencas
484 # plot Cuencas
485 for cuenca in ('rimac', 'lurin', 'mala', 'chillon', 'chilca', 'chancay-huaral'):
485 for cuenca in ('rimac', 'lurin', 'mala', 'chillon', 'chilca', 'chancay-huaral'):
486 f = open('/data/workspace/schain_scripts/{}.csv'.format(cuenca))
486 f = open('/data/workspace/schain_scripts/{}.csv'.format(cuenca))
487 values = [line.strip().split(',') for line in f]
487 values = [line.strip().split(',') for line in f]
488 points = [(float(s[0]), float(s[1])) for s in values]
488 points = [(float(s[0]), float(s[1])) for s in values]
489 ax.add_patch(Polygon(points, ec='b', fc='none'))
489 ax.add_patch(Polygon(points, ec='b', fc='none'))
490
490
491 # plot grid
491 # plot grid
492 for r in (15, 30, 45, 60):
492 for r in (15, 30, 45, 60):
493 ax.add_artist(plt.Circle((self.lon, self.lat),
493 ax.add_artist(plt.Circle((self.lon, self.lat),
494 km2deg(r), color='0.6', fill=False, lw=0.2))
494 km2deg(r), color='0.6', fill=False, lw=0.2))
495 ax.text(
495 ax.text(
496 self.lon + (km2deg(r))*numpy.cos(60*numpy.pi/180),
496 self.lon + (km2deg(r))*numpy.cos(60*numpy.pi/180),
497 self.lat + (km2deg(r))*numpy.sin(60*numpy.pi/180),
497 self.lat + (km2deg(r))*numpy.sin(60*numpy.pi/180),
498 '{}km'.format(r),
498 '{}km'.format(r),
499 ha='center', va='bottom', size='8', color='0.6', weight='heavy')
499 ha='center', va='bottom', size='8', color='0.6', weight='heavy')
500
500
501 if self.mode == 'E':
501 if self.mode == 'E':
502 title = 'El={}$^\circ$'.format(self.data.meta['elevation'])
502 title = 'El={}$^\circ$'.format(self.data.meta['elevation'])
503 label = 'E{:02d}'.format(int(self.data.meta['elevation']))
503 label = 'E{:02d}'.format(int(self.data.meta['elevation']))
504 else:
504 else:
505 title = 'Az={}$^\circ$'.format(self.data.meta['azimuth'])
505 title = 'Az={}$^\circ$'.format(self.data.meta['azimuth'])
506 label = 'A{:02d}'.format(int(self.data.meta['azimuth']))
506 label = 'A{:02d}'.format(int(self.data.meta['azimuth']))
507
507
508 self.save_labels = ['{}-{}'.format(lbl, label) for lbl in self.labels]
508 self.save_labels = ['{}-{}'.format(lbl, label) for lbl in self.labels]
509 self.titles = ['{} {}'.format(
509 self.titles = ['{} {}'.format(
510 self.data.parameters[x], title) for x in self.channels]
510 self.data.parameters[x], title) for x in self.channels]
511
511
512 class WeatherParamsPlot(Plot):
512 class WeatherParamsPlot(Plot):
513 #CODE = 'RHI'
513 #CODE = 'RHI'
514 #plot_name = 'RHI'
514 #plot_name = 'RHI'
515 plot_type = 'scattermap'
515 plot_type = 'scattermap'
516 buffering = False
516 buffering = False
517
517
518 def setup(self):
518 def setup(self):
519
519
520 self.ncols = 1
520 self.ncols = 1
521 self.nrows = 1
521 self.nrows = 1
522 self.nplots= 1
522 self.nplots= 1
523 self.ylabel= 'Range [km]'
523 self.ylabel= 'Range [km]'
524 self.xlabel= 'Range [km]'
524 self.xlabel= 'Range [km]'
525 self.polar = True
525 self.polar = True
526 self.grid = True
526 self.grid = True
527 if self.channels is not None:
527 if self.channels is not None:
528 self.nplots = len(self.channels)
528 self.nplots = len(self.channels)
529 self.nrows = len(self.channels)
529 self.nrows = len(self.channels)
530 else:
530 else:
531 self.nplots = self.data.shape(self.CODE)[0]
531 self.nplots = self.data.shape(self.CODE)[0]
532 self.nrows = self.nplots
532 self.nrows = self.nplots
533 self.channels = list(range(self.nplots))
533 self.channels = list(range(self.nplots))
534
534
535 self.colorbar=True
535 self.colorbar=True
536 self.width =8
536 self.width =8
537 self.height =8
537 self.height =8
538 self.ini =0
538 self.ini =0
539 self.len_azi =0
539 self.len_azi =0
540 self.buffer_ini = None
540 self.buffer_ini = None
541 self.buffer_ele = None
541 self.buffer_ele = None
542 self.plots_adjust.update({'wspace': 0.4, 'hspace':0.4, 'left': 0.1, 'right': 0.9, 'bottom': 0.08})
542 self.plots_adjust.update({'wspace': 0.4, 'hspace':0.4, 'left': 0.1, 'right': 0.9, 'bottom': 0.08})
543 self.flag =0
543 self.flag =0
544 self.indicador= 0
544 self.indicador= 0
545 self.last_data_ele = None
545 self.last_data_ele = None
546 self.val_mean = None
546 self.val_mean = None
547
547
548 def update(self, dataOut):
548 def update(self, dataOut):
549
549
550 vars = {
550 vars = {
551 'S' : 0,
551 'S' : 0,
552 'V' : 1,
552 'V' : 1,
553 'W' : 2,
553 'W' : 2,
554 'SNR' : 3,
554 'SNR' : 3,
555 'Z' : 4,
555 'Z' : 4,
556 'D' : 5,
556 'D' : 5,
557 'P' : 6,
557 'P' : 6,
558 'R' : 7,
558 'R' : 7,
559 }
559 }
560
560
561 data = {}
561 data = {}
562 meta = {}
562 meta = {}
563
563
564 if hasattr(dataOut, 'nFFTPoints'):
564 if hasattr(dataOut, 'nFFTPoints'):
565 factor = dataOut.normFactor
565 factor = dataOut.normFactor
566 else:
566 else:
567 factor = 1
567 factor = 1
568
568
569 if 'S' in self.attr_data[0]:
569 if hasattr(dataOut, 'dparam'):
570 tmp = 10*numpy.log10(10.0*getattr(dataOut, 'data_param')[:,0,:]/(factor))
570 tmp = getattr(dataOut, 'data_param')
571 else:
571 else:
572 tmp = getattr(dataOut, 'data_param')[:,vars[self.attr_data[0]],:]
572
573
573 if 'S' in self.attr_data[0]:
574 tmp = 10*numpy.log10(10.0*getattr(dataOut, 'data_param')[:,0,:]/(factor))
575 else:
576 tmp = getattr(dataOut, 'data_param')[:,vars[self.attr_data[0]],:]
574
577
575 if self.mask:
578 if self.mask:
576 mask = dataOut.data_param[:,3,:] < self.mask
579 mask = dataOut.data_param[:,3,:] < self.mask
577 tmp = numpy.ma.masked_array(tmp, mask=mask)
580 tmp = numpy.ma.masked_array(tmp, mask=mask)
578
581
579 r = dataOut.heightList
582 r = dataOut.heightList
580 delta_height = r[1]-r[0]
583 delta_height = r[1]-r[0]
581 valid = numpy.where(r>=0)[0]
584 valid = numpy.where(r>=0)[0]
582 data['r'] = numpy.arange(len(valid))*delta_height
585 data['r'] = numpy.arange(len(valid))*delta_height
583
586
584 try:
587 try:
585 data['data'] = tmp[self.channels[0]][:,valid]
588 data['data'] = tmp[self.channels[0]][:,valid]
586 except:
589 except:
587 data['data'] = tmp[0][:,valid]
590 data['data'] = tmp[0][:,valid]
588
591
589 if dataOut.mode_op == 'PPI':
592 if dataOut.mode_op == 'PPI':
590 self.CODE = 'PPI'
593 self.CODE = 'PPI'
591 self.title = self.CODE
594 self.title = self.CODE
592 elif dataOut.mode_op == 'RHI':
595 elif dataOut.mode_op == 'RHI':
593 self.CODE = 'RHI'
596 self.CODE = 'RHI'
594 self.title = self.CODE
597 self.title = self.CODE
595
598
596 data['azi'] = dataOut.data_azi
599 data['azi'] = dataOut.data_azi
597 data['ele'] = dataOut.data_ele
600 data['ele'] = dataOut.data_ele
598 data['mode_op'] = dataOut.mode_op
601 data['mode_op'] = dataOut.mode_op
599 self.mode = dataOut.mode_op
602 self.mode = dataOut.mode_op
600 var = data['data'].flatten()
603 var = data['data'].flatten()
601 r = numpy.tile(data['r'], data['data'].shape[0])
604 r = numpy.tile(data['r'], data['data'].shape[0])
602 az = numpy.repeat(data['azi'], data['data'].shape[1])
605 az = numpy.repeat(data['azi'], data['data'].shape[1])
603 el = numpy.repeat(data['ele'], data['data'].shape[1])
606 el = numpy.repeat(data['ele'], data['data'].shape[1])
604
607
605 # lla = georef.spherical_to_proj(r, data['azi'], data['ele'], (-75.295893, -12.040436, 3379.2147))
608 # lla = georef.spherical_to_proj(r, data['azi'], data['ele'], (-75.295893, -12.040436, 3379.2147))
606
609
607 latlon = antenna_to_geographic(r, az, el, (-75.295893, -12.040436))
610 latlon = antenna_to_geographic(r, az, el, (-75.295893, -12.040436))
608
611
609 if self.mask:
612 if self.mask:
610 meta['lat'] = latlon[1][var.mask==False]
613 meta['lat'] = latlon[1][var.mask==False]
611 meta['lon'] = latlon[0][var.mask==False]
614 meta['lon'] = latlon[0][var.mask==False]
612 data['var'] = numpy.array([var[var.mask==False]])
615 data['var'] = numpy.array([var[var.mask==False]])
613 else:
616 else:
614 meta['lat'] = latlon[1]
617 meta['lat'] = latlon[1]
615 meta['lon'] = latlon[0]
618 meta['lon'] = latlon[0]
616 data['var'] = numpy.array([var])
619 data['var'] = numpy.array([var])
617
620
618 return data, meta
621 return data, meta
619
622
620 def plot(self):
623 def plot(self):
621 data = self.data[-1]
624 data = self.data[-1]
622 z = data['data']
625 z = data['data']
623 r = data['r']
626 r = data['r']
624 self.titles = []
627 self.titles = []
625
628
626 self.ymax = self.ymax if self.ymax else numpy.nanmax(r)
629 self.ymax = self.ymax if self.ymax else numpy.nanmax(r)
627 self.ymin = self.ymin if self.ymin else numpy.nanmin(r)
630 self.ymin = self.ymin if self.ymin else numpy.nanmin(r)
628 self.zmax = self.zmax if self.zmax else numpy.nanmax(z)
631 self.zmax = self.zmax if self.zmax else numpy.nanmax(z)
629 self.zmin = self.zmin if self.zmin is not None else numpy.nanmin(z)
632 self.zmin = self.zmin if self.zmin is not None else numpy.nanmin(z)
630
633
634 if isinstance(data['mode_op'], bytes):
635 data['mode_op'] = data['mode_op'].decode()
636
631 if data['mode_op'] == 'RHI':
637 if data['mode_op'] == 'RHI':
632 try:
638 try:
633 if self.data['mode_op'][-2] == 'PPI':
639 if self.data['mode_op'][-2] == 'PPI':
634 self.ang_min = None
640 self.ang_min = None
635 self.ang_max = None
641 self.ang_max = None
636 except:
642 except:
637 pass
643 pass
638 self.ang_min = self.ang_min if self.ang_min else 0
644 self.ang_min = self.ang_min if self.ang_min else 0
639 self.ang_max = self.ang_max if self.ang_max else 90
645 self.ang_max = self.ang_max if self.ang_max else 90
640 r, theta = numpy.meshgrid(r, numpy.radians(data['ele']) )
646 r, theta = numpy.meshgrid(r, numpy.radians(data['ele']) )
641 elif data['mode_op'] == 'PPI':
647 elif data['mode_op'] == 'PPI':
642 try:
648 try:
643 if self.data['mode_op'][-2] == 'RHI':
649 if self.data['mode_op'][-2] == 'RHI':
644 self.ang_min = None
650 self.ang_min = None
645 self.ang_max = None
651 self.ang_max = None
646 except:
652 except:
647 pass
653 pass
648 self.ang_min = self.ang_min if self.ang_min else 0
654 self.ang_min = self.ang_min if self.ang_min else 0
649 self.ang_max = self.ang_max if self.ang_max else 360
655 self.ang_max = self.ang_max if self.ang_max else 360
650 r, theta = numpy.meshgrid(r, numpy.radians(data['azi']) )
656 r, theta = numpy.meshgrid(r, numpy.radians(data['azi']) )
651
657
652 self.clear_figures()
658 self.clear_figures()
653
659
654 for i,ax in enumerate(self.axes):
660 for i,ax in enumerate(self.axes):
655
661
656 if ax.firsttime:
662 if ax.firsttime:
657 ax.set_xlim(numpy.radians(self.ang_min),numpy.radians(self.ang_max))
663 ax.set_xlim(numpy.radians(self.ang_min),numpy.radians(self.ang_max))
658 ax.plt = ax.pcolormesh(theta, r, z, cmap=self.colormap, vmin=self.zmin, vmax=self.zmax)
664 ax.plt = ax.pcolormesh(theta, r, z, cmap=self.colormap, vmin=self.zmin, vmax=self.zmax)
659 if data['mode_op'] == 'PPI':
665 if data['mode_op'] == 'PPI':
660 ax.set_theta_direction(-1)
666 ax.set_theta_direction(-1)
661 ax.set_theta_offset(numpy.pi/2)
667 ax.set_theta_offset(numpy.pi/2)
662
668
663 else:
669 else:
664 ax.set_xlim(numpy.radians(self.ang_min),numpy.radians(self.ang_max))
670 ax.set_xlim(numpy.radians(self.ang_min),numpy.radians(self.ang_max))
665 ax.plt = ax.pcolormesh(theta, r, z, cmap=self.colormap, vmin=self.zmin, vmax=self.zmax)
671 ax.plt = ax.pcolormesh(theta, r, z, cmap=self.colormap, vmin=self.zmin, vmax=self.zmax)
666 if data['mode_op'] == 'PPI':
672 if data['mode_op'] == 'PPI':
667 ax.set_theta_direction(-1)
673 ax.set_theta_direction(-1)
668 ax.set_theta_offset(numpy.pi/2)
674 ax.set_theta_offset(numpy.pi/2)
669
675
670 ax.grid(True)
676 ax.grid(True)
671 if data['mode_op'] == 'RHI':
677 if data['mode_op'] == 'RHI':
672 len_aux = int(data['azi'].shape[0]/4)
678 len_aux = int(data['azi'].shape[0]/4)
673 mean = numpy.mean(data['azi'][len_aux:-len_aux])
679 mean = numpy.mean(data['azi'][len_aux:-len_aux])
674 if len(self.channels) !=1:
680 if len(self.channels) !=1:
675 self.titles = ['RHI {} at AZ: {} CH {}'.format(self.labels[x], str(round(mean,1)), x) for x in range(self.nrows)]
681 self.titles = ['RHI {} at AZ: {} CH {}'.format(self.labels[x], str(round(mean,1)), x) for x in range(self.nrows)]
676 else:
682 else:
677 self.titles = ['RHI {} at AZ: {} CH {}'.format(self.labels[0], str(round(mean,1)), self.channels[0])]
683 self.titles = ['RHI {} at AZ: {} CH {}'.format(self.labels[0], str(round(mean,1)), self.channels[0])]
678 elif data['mode_op'] == 'PPI':
684 elif data['mode_op'] == 'PPI':
679 len_aux = int(data['ele'].shape[0]/4)
685 len_aux = int(data['ele'].shape[0]/4)
680 mean = numpy.mean(data['ele'][len_aux:-len_aux])
686 mean = numpy.mean(data['ele'][len_aux:-len_aux])
681 if len(self.channels) !=1:
687 if len(self.channels) !=1:
682 self.titles = ['PPI {} at EL: {} CH {}'.format(self.labels[x], str(round(mean,1)), x) for x in range(self.nrows)]
688 self.titles = ['PPI {} at EL: {} CH {}'.format(self.labels[x], str(round(mean,1)), x) for x in range(self.nrows)]
683 else:
689 else:
684 self.titles = ['PPI {} at EL: {} CH {}'.format(self.labels[0], str(round(mean,1)), self.channels[0])]
690 self.titles = ['PPI {} at EL: {} CH {}'.format(self.labels[0], str(round(mean,1)), self.channels[0])]
685 self.mode_value = round(mean,1) No newline at end of file
691 self.mode_value = round(mean,1)
@@ -1,1609 +1,1611
1 """
1 """
2 Created on Jul 2, 2014
2 Created on Jul 2, 2014
3
3
4 @author: roj-idl71
4 @author: roj-idl71
5 """
5 """
6 import os
6 import os
7 import sys
7 import sys
8 import glob
8 import glob
9 import time
9 import time
10 import numpy
10 import numpy
11 import fnmatch
11 import fnmatch
12 import inspect
12 import inspect
13 import time
13 import time
14 import datetime
14 import datetime
15 import zmq
15 import zmq
16
16
17 from schainpy.model.proc.jroproc_base import Operation, MPDecorator
17 from schainpy.model.proc.jroproc_base import Operation, MPDecorator
18 from schainpy.model.data.jroheaderIO import PROCFLAG, BasicHeader, SystemHeader, RadarControllerHeader, ProcessingHeader
18 from schainpy.model.data.jroheaderIO import PROCFLAG, BasicHeader, SystemHeader, RadarControllerHeader, ProcessingHeader
19 from schainpy.model.data.jroheaderIO import get_dtype_index, get_numpy_dtype, get_procflag_dtype, get_dtype_width
19 from schainpy.model.data.jroheaderIO import get_dtype_index, get_numpy_dtype, get_procflag_dtype, get_dtype_width
20 from schainpy.utils import log
20 from schainpy.utils import log
21 import schainpy.admin
21 import schainpy.admin
22
22
23 LOCALTIME = True
23 LOCALTIME = True
24 DT_DIRECTIVES = {
24 DT_DIRECTIVES = {
25 '%Y': 4,
25 '%Y': 4,
26 '%y': 2,
26 '%y': 2,
27 '%m': 2,
27 '%m': 2,
28 '%d': 2,
28 '%d': 2,
29 '%j': 3,
29 '%j': 3,
30 '%H': 2,
30 '%H': 2,
31 '%M': 2,
31 '%M': 2,
32 '%S': 2,
32 '%S': 2,
33 '%f': 6
33 '%f': 6
34 }
34 }
35
35
36
36
37 def isNumber(cad):
37 def isNumber(cad):
38 """
38 """
39 Chequea si el conjunto de caracteres que componen un string puede ser convertidos a un numero.
39 Chequea si el conjunto de caracteres que componen un string puede ser convertidos a un numero.
40
40
41 Excepciones:
41 Excepciones:
42 Si un determinado string no puede ser convertido a numero
42 Si un determinado string no puede ser convertido a numero
43 Input:
43 Input:
44 str, string al cual se le analiza para determinar si convertible a un numero o no
44 str, string al cual se le analiza para determinar si convertible a un numero o no
45
45
46 Return:
46 Return:
47 True : si el string es uno numerico
47 True : si el string es uno numerico
48 False : no es un string numerico
48 False : no es un string numerico
49 """
49 """
50 try:
50 try:
51 float(cad)
51 float(cad)
52 return True
52 return True
53 except:
53 except:
54 return False
54 return False
55
55
56
56
57 def isFileInEpoch(filename, startUTSeconds, endUTSeconds):
57 def isFileInEpoch(filename, startUTSeconds, endUTSeconds):
58 """
58 """
59 Esta funcion determina si un archivo de datos se encuentra o no dentro del rango de fecha especificado.
59 Esta funcion determina si un archivo de datos se encuentra o no dentro del rango de fecha especificado.
60
60
61 Inputs:
61 Inputs:
62 filename : nombre completo del archivo de datos en formato Jicamarca (.r)
62 filename : nombre completo del archivo de datos en formato Jicamarca (.r)
63
63
64 startUTSeconds : fecha inicial del rango seleccionado. La fecha esta dada en
64 startUTSeconds : fecha inicial del rango seleccionado. La fecha esta dada en
65 segundos contados desde 01/01/1970.
65 segundos contados desde 01/01/1970.
66 endUTSeconds : fecha final del rango seleccionado. La fecha esta dada en
66 endUTSeconds : fecha final del rango seleccionado. La fecha esta dada en
67 segundos contados desde 01/01/1970.
67 segundos contados desde 01/01/1970.
68
68
69 Return:
69 Return:
70 Boolean : Retorna True si el archivo de datos contiene datos en el rango de
70 Boolean : Retorna True si el archivo de datos contiene datos en el rango de
71 fecha especificado, de lo contrario retorna False.
71 fecha especificado, de lo contrario retorna False.
72
72
73 Excepciones:
73 Excepciones:
74 Si el archivo no existe o no puede ser abierto
74 Si el archivo no existe o no puede ser abierto
75 Si la cabecera no puede ser leida.
75 Si la cabecera no puede ser leida.
76
76
77 """
77 """
78 basicHeaderObj = BasicHeader(LOCALTIME)
78 basicHeaderObj = BasicHeader(LOCALTIME)
79
79
80 try:
80 try:
81 fp = open(filename, 'rb')
81 fp = open(filename, 'rb')
82 except IOError:
82 except IOError:
83 print("The file %s can't be opened" % (filename))
83 print("The file %s can't be opened" % (filename))
84 return 0
84 return 0
85
85
86 sts = basicHeaderObj.read(fp)
86 sts = basicHeaderObj.read(fp)
87 fp.close()
87 fp.close()
88
88
89 if not(sts):
89 if not(sts):
90 print("Skipping the file %s because it has not a valid header" % (filename))
90 print("Skipping the file %s because it has not a valid header" % (filename))
91 return 0
91 return 0
92
92
93 if not ((startUTSeconds <= basicHeaderObj.utc) and (endUTSeconds > basicHeaderObj.utc)):
93 if not ((startUTSeconds <= basicHeaderObj.utc) and (endUTSeconds > basicHeaderObj.utc)):
94 return 0
94 return 0
95
95
96 return 1
96 return 1
97
97
98
98
99 def isTimeInRange(thisTime, startTime, endTime):
99 def isTimeInRange(thisTime, startTime, endTime):
100 if endTime >= startTime:
100 if endTime >= startTime:
101 if (thisTime < startTime) or (thisTime > endTime):
101 if (thisTime < startTime) or (thisTime > endTime):
102 return 0
102 return 0
103 return 1
103 return 1
104 else:
104 else:
105 if (thisTime < startTime) and (thisTime > endTime):
105 if (thisTime < startTime) and (thisTime > endTime):
106 return 0
106 return 0
107 return 1
107 return 1
108
108
109
109
110 def isFileInTimeRange(filename, startDate, endDate, startTime, endTime):
110 def isFileInTimeRange(filename, startDate, endDate, startTime, endTime):
111 """
111 """
112 Retorna 1 si el archivo de datos se encuentra dentro del rango de horas especificado.
112 Retorna 1 si el archivo de datos se encuentra dentro del rango de horas especificado.
113
113
114 Inputs:
114 Inputs:
115 filename : nombre completo del archivo de datos en formato Jicamarca (.r)
115 filename : nombre completo del archivo de datos en formato Jicamarca (.r)
116
116
117 startDate : fecha inicial del rango seleccionado en formato datetime.date
117 startDate : fecha inicial del rango seleccionado en formato datetime.date
118
118
119 endDate : fecha final del rango seleccionado en formato datetime.date
119 endDate : fecha final del rango seleccionado en formato datetime.date
120
120
121 startTime : tiempo inicial del rango seleccionado en formato datetime.time
121 startTime : tiempo inicial del rango seleccionado en formato datetime.time
122
122
123 endTime : tiempo final del rango seleccionado en formato datetime.time
123 endTime : tiempo final del rango seleccionado en formato datetime.time
124
124
125 Return:
125 Return:
126 Boolean : Retorna True si el archivo de datos contiene datos en el rango de
126 Boolean : Retorna True si el archivo de datos contiene datos en el rango de
127 fecha especificado, de lo contrario retorna False.
127 fecha especificado, de lo contrario retorna False.
128
128
129 Excepciones:
129 Excepciones:
130 Si el archivo no existe o no puede ser abierto
130 Si el archivo no existe o no puede ser abierto
131 Si la cabecera no puede ser leida.
131 Si la cabecera no puede ser leida.
132
132
133 """
133 """
134
134
135 try:
135 try:
136 fp = open(filename, 'rb')
136 fp = open(filename, 'rb')
137 except IOError:
137 except IOError:
138 print("The file %s can't be opened" % (filename))
138 print("The file %s can't be opened" % (filename))
139 return None
139 return None
140
140
141 firstBasicHeaderObj = BasicHeader(LOCALTIME)
141 firstBasicHeaderObj = BasicHeader(LOCALTIME)
142 systemHeaderObj = SystemHeader()
142 systemHeaderObj = SystemHeader()
143 radarControllerHeaderObj = RadarControllerHeader()
143 radarControllerHeaderObj = RadarControllerHeader()
144 processingHeaderObj = ProcessingHeader()
144 processingHeaderObj = ProcessingHeader()
145
145
146 lastBasicHeaderObj = BasicHeader(LOCALTIME)
146 lastBasicHeaderObj = BasicHeader(LOCALTIME)
147
147
148 sts = firstBasicHeaderObj.read(fp)
148 sts = firstBasicHeaderObj.read(fp)
149
149
150 if not(sts):
150 if not(sts):
151 print("[Reading] Skipping the file %s because it has not a valid header" % (filename))
151 print("[Reading] Skipping the file %s because it has not a valid header" % (filename))
152 return None
152 return None
153
153
154 if not systemHeaderObj.read(fp):
154 if not systemHeaderObj.read(fp):
155 return None
155 return None
156
156
157 if not radarControllerHeaderObj.read(fp):
157 if not radarControllerHeaderObj.read(fp):
158 return None
158 return None
159
159
160 if not processingHeaderObj.read(fp):
160 if not processingHeaderObj.read(fp):
161 return None
161 return None
162
162
163 filesize = os.path.getsize(filename)
163 filesize = os.path.getsize(filename)
164
164
165 offset = processingHeaderObj.blockSize + 24 # header size
165 offset = processingHeaderObj.blockSize + 24 # header size
166
166
167 if filesize <= offset:
167 if filesize <= offset:
168 print("[Reading] %s: This file has not enough data" % filename)
168 print("[Reading] %s: This file has not enough data" % filename)
169 return None
169 return None
170
170
171 fp.seek(-offset, 2)
171 fp.seek(-offset, 2)
172
172
173 sts = lastBasicHeaderObj.read(fp)
173 sts = lastBasicHeaderObj.read(fp)
174
174
175 fp.close()
175 fp.close()
176
176
177 thisDatetime = lastBasicHeaderObj.datatime
177 thisDatetime = lastBasicHeaderObj.datatime
178 thisTime_last_block = thisDatetime.time()
178 thisTime_last_block = thisDatetime.time()
179
179
180 thisDatetime = firstBasicHeaderObj.datatime
180 thisDatetime = firstBasicHeaderObj.datatime
181 thisDate = thisDatetime.date()
181 thisDate = thisDatetime.date()
182 thisTime_first_block = thisDatetime.time()
182 thisTime_first_block = thisDatetime.time()
183
183
184 # General case
184 # General case
185 # o>>>>>>>>>>>>>><<<<<<<<<<<<<<o
185 # o>>>>>>>>>>>>>><<<<<<<<<<<<<<o
186 #-----------o----------------------------o-----------
186 #-----------o----------------------------o-----------
187 # startTime endTime
187 # startTime endTime
188
188
189 if endTime >= startTime:
189 if endTime >= startTime:
190 if (thisTime_last_block < startTime) or (thisTime_first_block > endTime):
190 if (thisTime_last_block < startTime) or (thisTime_first_block > endTime):
191 return None
191 return None
192
192
193 return thisDatetime
193 return thisDatetime
194
194
195 # If endTime < startTime then endTime belongs to the next day
195 # If endTime < startTime then endTime belongs to the next day
196
196
197 #<<<<<<<<<<<o o>>>>>>>>>>>
197 #<<<<<<<<<<<o o>>>>>>>>>>>
198 #-----------o----------------------------o-----------
198 #-----------o----------------------------o-----------
199 # endTime startTime
199 # endTime startTime
200
200
201 if (thisDate == startDate) and (thisTime_last_block < startTime):
201 if (thisDate == startDate) and (thisTime_last_block < startTime):
202 return None
202 return None
203
203
204 if (thisDate == endDate) and (thisTime_first_block > endTime):
204 if (thisDate == endDate) and (thisTime_first_block > endTime):
205 return None
205 return None
206
206
207 if (thisTime_last_block < startTime) and (thisTime_first_block > endTime):
207 if (thisTime_last_block < startTime) and (thisTime_first_block > endTime):
208 return None
208 return None
209
209
210 return thisDatetime
210 return thisDatetime
211
211
212
212
213 def isFolderInDateRange(folder, startDate=None, endDate=None):
213 def isFolderInDateRange(folder, startDate=None, endDate=None):
214 """
214 """
215 Retorna 1 si el archivo de datos se encuentra dentro del rango de horas especificado.
215 Retorna 1 si el archivo de datos se encuentra dentro del rango de horas especificado.
216
216
217 Inputs:
217 Inputs:
218 folder : nombre completo del directorio.
218 folder : nombre completo del directorio.
219 Su formato deberia ser "/path_root/?YYYYDDD"
219 Su formato deberia ser "/path_root/?YYYYDDD"
220
220
221 siendo:
221 siendo:
222 YYYY : Anio (ejemplo 2015)
222 YYYY : Anio (ejemplo 2015)
223 DDD : Dia del anio (ejemplo 305)
223 DDD : Dia del anio (ejemplo 305)
224
224
225 startDate : fecha inicial del rango seleccionado en formato datetime.date
225 startDate : fecha inicial del rango seleccionado en formato datetime.date
226
226
227 endDate : fecha final del rango seleccionado en formato datetime.date
227 endDate : fecha final del rango seleccionado en formato datetime.date
228
228
229 Return:
229 Return:
230 Boolean : Retorna True si el archivo de datos contiene datos en el rango de
230 Boolean : Retorna True si el archivo de datos contiene datos en el rango de
231 fecha especificado, de lo contrario retorna False.
231 fecha especificado, de lo contrario retorna False.
232 Excepciones:
232 Excepciones:
233 Si el directorio no tiene el formato adecuado
233 Si el directorio no tiene el formato adecuado
234 """
234 """
235
235
236 basename = os.path.basename(folder)
236 basename = os.path.basename(folder)
237
237
238 if not isRadarFolder(basename):
238 if not isRadarFolder(basename):
239 print("The folder %s has not the rigth format" % folder)
239 print("The folder %s has not the rigth format" % folder)
240 return 0
240 return 0
241
241
242 if startDate and endDate:
242 if startDate and endDate:
243 thisDate = getDateFromRadarFolder(basename)
243 thisDate = getDateFromRadarFolder(basename)
244
244
245 if thisDate < startDate:
245 if thisDate < startDate:
246 return 0
246 return 0
247
247
248 if thisDate > endDate:
248 if thisDate > endDate:
249 return 0
249 return 0
250
250
251 return 1
251 return 1
252
252
253
253
254 def isFileInDateRange(filename, startDate=None, endDate=None):
254 def isFileInDateRange(filename, startDate=None, endDate=None):
255 """
255 """
256 Retorna 1 si el archivo de datos se encuentra dentro del rango de horas especificado.
256 Retorna 1 si el archivo de datos se encuentra dentro del rango de horas especificado.
257
257
258 Inputs:
258 Inputs:
259 filename : nombre completo del archivo de datos en formato Jicamarca (.r)
259 filename : nombre completo del archivo de datos en formato Jicamarca (.r)
260
260
261 Su formato deberia ser "?YYYYDDDsss"
261 Su formato deberia ser "?YYYYDDDsss"
262
262
263 siendo:
263 siendo:
264 YYYY : Anio (ejemplo 2015)
264 YYYY : Anio (ejemplo 2015)
265 DDD : Dia del anio (ejemplo 305)
265 DDD : Dia del anio (ejemplo 305)
266 sss : set
266 sss : set
267
267
268 startDate : fecha inicial del rango seleccionado en formato datetime.date
268 startDate : fecha inicial del rango seleccionado en formato datetime.date
269
269
270 endDate : fecha final del rango seleccionado en formato datetime.date
270 endDate : fecha final del rango seleccionado en formato datetime.date
271
271
272 Return:
272 Return:
273 Boolean : Retorna True si el archivo de datos contiene datos en el rango de
273 Boolean : Retorna True si el archivo de datos contiene datos en el rango de
274 fecha especificado, de lo contrario retorna False.
274 fecha especificado, de lo contrario retorna False.
275 Excepciones:
275 Excepciones:
276 Si el archivo no tiene el formato adecuado
276 Si el archivo no tiene el formato adecuado
277 """
277 """
278
278
279 basename = os.path.basename(filename)
279 basename = os.path.basename(filename)
280
280
281 if not isRadarFile(basename):
281 if not isRadarFile(basename):
282 print("The filename %s has not the rigth format" % filename)
282 print("The filename %s has not the rigth format" % filename)
283 return 0
283 return 0
284
284
285 if startDate and endDate:
285 if startDate and endDate:
286 thisDate = getDateFromRadarFile(basename)
286 thisDate = getDateFromRadarFile(basename)
287
287
288 if thisDate < startDate:
288 if thisDate < startDate:
289 return 0
289 return 0
290
290
291 if thisDate > endDate:
291 if thisDate > endDate:
292 return 0
292 return 0
293
293
294 return 1
294 return 1
295
295
296
296
297 def getFileFromSet(path, ext, set):
297 def getFileFromSet(path, ext, set):
298 validFilelist = []
298 validFilelist = []
299 fileList = os.listdir(path)
299 fileList = os.listdir(path)
300
300
301 # 0 1234 567 89A BCDE
301 # 0 1234 567 89A BCDE
302 # H YYYY DDD SSS .ext
302 # H YYYY DDD SSS .ext
303
303
304 for thisFile in fileList:
304 for thisFile in fileList:
305 try:
305 try:
306 year = int(thisFile[1:5])
306 year = int(thisFile[1:5])
307 doy = int(thisFile[5:8])
307 doy = int(thisFile[5:8])
308 except:
308 except:
309 continue
309 continue
310
310
311 if (os.path.splitext(thisFile)[-1].lower() != ext.lower()):
311 if (os.path.splitext(thisFile)[-1].lower() != ext.lower()):
312 continue
312 continue
313
313
314 validFilelist.append(thisFile)
314 validFilelist.append(thisFile)
315
315
316 myfile = fnmatch.filter(
316 myfile = fnmatch.filter(
317 validFilelist, '*%4.4d%3.3d%3.3d*' % (year, doy, set))
317 validFilelist, '*%4.4d%3.3d%3.3d*' % (year, doy, set))
318
318
319 if len(myfile) != 0:
319 if len(myfile) != 0:
320 return myfile[0]
320 return myfile[0]
321 else:
321 else:
322 filename = '*%4.4d%3.3d%3.3d%s' % (year, doy, set, ext.lower())
322 filename = '*%4.4d%3.3d%3.3d%s' % (year, doy, set, ext.lower())
323 print('the filename %s does not exist' % filename)
323 print('the filename %s does not exist' % filename)
324 print('...going to the last file: ')
324 print('...going to the last file: ')
325
325
326 if validFilelist:
326 if validFilelist:
327 validFilelist = sorted(validFilelist, key=str.lower)
327 validFilelist = sorted(validFilelist, key=str.lower)
328 return validFilelist[-1]
328 return validFilelist[-1]
329
329
330 return None
330 return None
331
331
332
332
333 def getlastFileFromPath(path, ext):
333 def getlastFileFromPath(path, ext):
334 """
334 """
335 Depura el fileList dejando solo los que cumplan el formato de "PYYYYDDDSSS.ext"
335 Depura el fileList dejando solo los que cumplan el formato de "PYYYYDDDSSS.ext"
336 al final de la depuracion devuelve el ultimo file de la lista que quedo.
336 al final de la depuracion devuelve el ultimo file de la lista que quedo.
337
337
338 Input:
338 Input:
339 fileList : lista conteniendo todos los files (sin path) que componen una determinada carpeta
339 fileList : lista conteniendo todos los files (sin path) que componen una determinada carpeta
340 ext : extension de los files contenidos en una carpeta
340 ext : extension de los files contenidos en una carpeta
341
341
342 Return:
342 Return:
343 El ultimo file de una determinada carpeta, no se considera el path.
343 El ultimo file de una determinada carpeta, no se considera el path.
344 """
344 """
345 validFilelist = []
345 validFilelist = []
346 fileList = os.listdir(path)
346 fileList = os.listdir(path)
347
347
348 # 0 1234 567 89A BCDE
348 # 0 1234 567 89A BCDE
349 # H YYYY DDD SSS .ext
349 # H YYYY DDD SSS .ext
350
350
351 for thisFile in fileList:
351 for thisFile in fileList:
352
352
353 year = thisFile[1:5]
353 year = thisFile[1:5]
354 if not isNumber(year):
354 if not isNumber(year):
355 continue
355 continue
356
356
357 doy = thisFile[5:8]
357 doy = thisFile[5:8]
358 if not isNumber(doy):
358 if not isNumber(doy):
359 continue
359 continue
360
360
361 year = int(year)
361 year = int(year)
362 doy = int(doy)
362 doy = int(doy)
363
363
364 if (os.path.splitext(thisFile)[-1].lower() != ext.lower()):
364 if (os.path.splitext(thisFile)[-1].lower() != ext.lower()):
365 continue
365 continue
366
366
367 validFilelist.append(thisFile)
367 validFilelist.append(thisFile)
368
368
369 if validFilelist:
369 if validFilelist:
370 validFilelist = sorted(validFilelist, key=str.lower)
370 validFilelist = sorted(validFilelist, key=str.lower)
371 return validFilelist[-1]
371 return validFilelist[-1]
372
372
373 return None
373 return None
374
374
375
375
376 def isRadarFolder(folder):
376 def isRadarFolder(folder):
377 try:
377 try:
378 year = int(folder[1:5])
378 year = int(folder[1:5])
379 doy = int(folder[5:8])
379 doy = int(folder[5:8])
380 except:
380 except:
381 return 0
381 return 0
382
382
383 return 1
383 return 1
384
384
385
385
386 def isRadarFile(file):
386 def isRadarFile(file):
387 try:
387 try:
388 year = int(file[1:5])
388 year = int(file[1:5])
389 doy = int(file[5:8])
389 doy = int(file[5:8])
390 set = int(file[8:11])
390 set = int(file[8:11])
391 except:
391 except:
392 return 0
392 return 0
393
393
394 return 1
394 return 1
395
395
396
396
397 def getDateFromRadarFile(file):
397 def getDateFromRadarFile(file):
398 try:
398 try:
399 year = int(file[1:5])
399 year = int(file[1:5])
400 doy = int(file[5:8])
400 doy = int(file[5:8])
401 set = int(file[8:11])
401 set = int(file[8:11])
402 except:
402 except:
403 return None
403 return None
404
404
405 thisDate = datetime.date(year, 1, 1) + datetime.timedelta(doy - 1)
405 thisDate = datetime.date(year, 1, 1) + datetime.timedelta(doy - 1)
406 return thisDate
406 return thisDate
407
407
408
408
409 def getDateFromRadarFolder(folder):
409 def getDateFromRadarFolder(folder):
410 try:
410 try:
411 year = int(folder[1:5])
411 year = int(folder[1:5])
412 doy = int(folder[5:8])
412 doy = int(folder[5:8])
413 except:
413 except:
414 return None
414 return None
415
415
416 thisDate = datetime.date(year, 1, 1) + datetime.timedelta(doy - 1)
416 thisDate = datetime.date(year, 1, 1) + datetime.timedelta(doy - 1)
417 return thisDate
417 return thisDate
418
418
419 def parse_format(s, fmt):
419 def parse_format(s, fmt):
420
420
421 for i in range(fmt.count('%')):
421 for i in range(fmt.count('%')):
422 x = fmt.index('%')
422 x = fmt.index('%')
423 d = DT_DIRECTIVES[fmt[x:x+2]]
423 d = DT_DIRECTIVES[fmt[x:x+2]]
424 fmt = fmt.replace(fmt[x:x+2], s[x:x+d])
424 fmt = fmt.replace(fmt[x:x+2], s[x:x+d])
425 return fmt
425 return fmt
426
426
427 class Reader(object):
427 class Reader(object):
428
428
429 c = 3E8
429 c = 3E8
430 isConfig = False
430 isConfig = False
431 dtype = None
431 dtype = None
432 pathList = []
432 pathList = []
433 filenameList = []
433 filenameList = []
434 datetimeList = []
434 datetimeList = []
435 filename = None
435 filename = None
436 ext = None
436 ext = None
437 flagIsNewFile = 1
437 flagIsNewFile = 1
438 flagDiscontinuousBlock = 0
438 flagDiscontinuousBlock = 0
439 flagIsNewBlock = 0
439 flagIsNewBlock = 0
440 flagNoMoreFiles = 0
440 flagNoMoreFiles = 0
441 fp = None
441 fp = None
442 firstHeaderSize = 0
442 firstHeaderSize = 0
443 basicHeaderSize = 24
443 basicHeaderSize = 24
444 versionFile = 1103
444 versionFile = 1103
445 fileSize = None
445 fileSize = None
446 fileSizeByHeader = None
446 fileSizeByHeader = None
447 fileIndex = -1
447 fileIndex = -1
448 profileIndex = None
448 profileIndex = None
449 blockIndex = 0
449 blockIndex = 0
450 nTotalBlocks = 0
450 nTotalBlocks = 0
451 maxTimeStep = 30
451 maxTimeStep = 30
452 lastUTTime = None
452 lastUTTime = None
453 datablock = None
453 datablock = None
454 dataOut = None
454 dataOut = None
455 getByBlock = False
455 getByBlock = False
456 path = None
456 path = None
457 startDate = None
457 startDate = None
458 endDate = None
458 endDate = None
459 startTime = datetime.time(0, 0, 0)
459 startTime = datetime.time(0, 0, 0)
460 endTime = datetime.time(23, 59, 59)
460 endTime = datetime.time(23, 59, 59)
461 set = None
461 set = None
462 expLabel = ""
462 expLabel = ""
463 online = False
463 online = False
464 delay = 60
464 delay = 60
465 nTries = 3 # quantity tries
465 nTries = 3 # quantity tries
466 nFiles = 3 # number of files for searching
466 nFiles = 3 # number of files for searching
467 walk = True
467 walk = True
468 getblock = False
468 getblock = False
469 nTxs = 1
469 nTxs = 1
470 realtime = False
470 realtime = False
471 blocksize = 0
471 blocksize = 0
472 blocktime = None
472 blocktime = None
473 warnings = True
473 warnings = True
474 verbose = True
474 verbose = True
475 server = None
475 server = None
476 format = None
476 format = None
477 oneDDict = None
477 oneDDict = None
478 twoDDict = None
478 twoDDict = None
479 independentParam = None
479 independentParam = None
480 filefmt = None
480 filefmt = None
481 folderfmt = None
481 folderfmt = None
482 open_file = open
482 open_file = open
483 open_mode = 'rb'
483 open_mode = 'rb'
484 filter =None
484
485
485 def run(self):
486 def run(self):
486
487
487 raise NotImplementedError
488 raise NotImplementedError
488
489
489 def getAllowedArgs(self):
490 def getAllowedArgs(self):
490 if hasattr(self, '__attrs__'):
491 if hasattr(self, '__attrs__'):
491 return self.__attrs__
492 return self.__attrs__
492 else:
493 else:
493 return inspect.getargspec(self.run).args
494 return inspect.getargspec(self.run).args
494
495
495 def set_kwargs(self, **kwargs):
496 def set_kwargs(self, **kwargs):
496
497
497 for key, value in kwargs.items():
498 for key, value in kwargs.items():
498 setattr(self, key, value)
499 setattr(self, key, value)
499
500
500 def find_folders(self, path, startDate, endDate, folderfmt, last=False):
501 def find_folders(self, path, startDate, endDate, folderfmt, last=False):
501
502
502 folders = [x for f in path.split(',')
503 folders = [x for f in path.split(',')
503 for x in os.listdir(f) if os.path.isdir(os.path.join(f, x))]
504 for x in os.listdir(f) if os.path.isdir(os.path.join(f, x))]
504 folders.sort()
505 folders.sort()
505
506
506 if last:
507 if last:
507 folders = [folders[-1]]
508 folders = [folders[-1]]
508
509
509 for folder in folders:
510 for folder in folders:
510 try:
511 try:
511 dt = datetime.datetime.strptime(parse_format(folder, folderfmt), folderfmt).date()
512 dt = datetime.datetime.strptime(parse_format(folder, folderfmt), folderfmt).date()
512 if dt >= startDate and dt <= endDate:
513 if dt >= startDate and dt <= endDate:
513 yield os.path.join(path, folder)
514 yield os.path.join(path, folder)
514 else:
515 else:
515 log.log('Skiping folder {}'.format(folder), self.name)
516 log.log('Skiping folder {}'.format(folder), self.name)
516 except Exception as e:
517 except Exception as e:
517 log.log('Skiping folder {}'.format(folder), self.name)
518 log.log('Skiping folder {}'.format(folder), self.name)
518 continue
519 continue
519 return
520 return
520
521
521 def find_files(self, folders, ext, filefmt, startDate=None, endDate=None,
522 def find_files(self, folders, ext, filefmt, startDate=None, endDate=None,
522 expLabel='', last=False):
523 expLabel='', filter=None,last=False):
523
524
524 for path in folders:
525 for path in folders:
525 files = glob.glob1(path, '*{}'.format(ext))
526 files = glob.glob1(path, '*{}'.format(ext))
526 files.sort()
527 files.sort()
528 if filter is not None:
529 files= [ file for file in files if os.path.splitext(file)[0][-len(filter):] == filter]
527 if last:
530 if last:
528 if files:
531 if files:
529 fo = files[-1]
532 fo = files[-1]
530 try:
533 try:
531 dt = datetime.datetime.strptime(parse_format(fo, filefmt), filefmt).date()
534 dt = datetime.datetime.strptime(parse_format(fo, filefmt), filefmt).date()
532 yield os.path.join(path, expLabel, fo)
535 yield os.path.join(path, expLabel, fo)
533 except Exception as e:
536 except Exception as e:
534 pass
537 pass
535 return
538 return
536 else:
539 else:
537 return
540 return
538
541
539 for fo in files:
542 for fo in files:
540 try:
543 try:
541 dt = datetime.datetime.strptime(parse_format(fo, filefmt), filefmt).date()
544 dt = datetime.datetime.strptime(parse_format(fo, filefmt), filefmt).date()
542 if dt >= startDate and dt <= endDate:
545 if dt >= startDate and dt <= endDate:
543 yield os.path.join(path, expLabel, fo)
546 yield os.path.join(path, expLabel, fo)
544 else:
547 else:
545 log.log('Skiping file {}'.format(fo), self.name)
548 log.log('Skiping file {}'.format(fo), self.name)
546 except Exception as e:
549 except Exception as e:
547 log.log('Skiping file {}'.format(fo), self.name)
550 log.log('Skiping file {}'.format(fo), self.name)
548 continue
551 continue
549
552
550 def searchFilesOffLine(self, path, startDate, endDate,
553 def searchFilesOffLine(self, path, startDate, endDate,
551 expLabel, ext, walk,
554 expLabel, ext, walk,
552 filefmt, folderfmt):
555 filefmt, folderfmt,filter):
553 """Search files in offline mode for the given arguments
556 """Search files in offline mode for the given arguments
554
557
555 Return:
558 Return:
556 Generator of files
559 Generator of files
557 """
560 """
558
561
559 if walk:
562 if walk:
560 folders = self.find_folders(
563 folders = self.find_folders(
561 path, startDate, endDate, folderfmt)
564 path, startDate, endDate, folderfmt)
562 else:
565 else:
563 folders = path.split(',')
566 folders = path.split(',')
564
567
565 return self.find_files(
568 return self.find_files(
566 folders, ext, filefmt, startDate, endDate, expLabel)
569 folders, ext, filefmt, startDate, endDate, expLabel,filter)
567
570
568 def searchFilesOnLine(self, path, startDate, endDate,
571 def searchFilesOnLine(self, path, startDate, endDate,
569 expLabel, ext, walk,
572 expLabel, ext, walk,
570 filefmt, folderfmt):
573 filefmt, folderfmt,filter):
571 """Search for the last file of the last folder
574 """Search for the last file of the last folder
572
575
573 Arguments:
576 Arguments:
574 path : carpeta donde estan contenidos los files que contiene data
577 path : carpeta donde estan contenidos los files que contiene data
575 expLabel : Nombre del subexperimento (subfolder)
578 expLabel : Nombre del subexperimento (subfolder)
576 ext : extension de los files
579 ext : extension de los files
577 walk : Si es habilitado no realiza busquedas dentro de los ubdirectorios (doypath)
580 walk : Si es habilitado no realiza busquedas dentro de los ubdirectorios (doypath)
578
581
579 Return:
582 Return:
580 generator with the full path of last filename
583 generator with the full path of last filename
581 """
584 """
582
585
583 if walk:
586 if walk:
584 folders = self.find_folders(
587 folders = self.find_folders(
585 path, startDate, endDate, folderfmt, last=True)
588 path, startDate, endDate, folderfmt, last=True)
586 else:
589 else:
587 folders = path.split(',')
590 folders = path.split(',')
588
591
589 return self.find_files(
592 return self.find_files(folders, ext, filefmt, startDate, endDate, expLabel, filter,last=True)
590 folders, ext, filefmt, startDate, endDate, expLabel, last=True)
591
593
592 def setNextFile(self):
594 def setNextFile(self):
593 """Set the next file to be readed open it and parse de file header"""
595 """Set the next file to be readed open it and parse de file header"""
594
596
595 while True:
597 while True:
596 if self.fp != None:
598 if self.fp != None:
597 self.fp.close()
599 self.fp.close()
598
600
599 if self.online:
601 if self.online:
600 newFile = self.setNextFileOnline()
602 newFile = self.setNextFileOnline()
601 else:
603 else:
602 newFile = self.setNextFileOffline()
604 newFile = self.setNextFileOffline()
603
605
604 if not(newFile):
606 if not(newFile):
605 if self.online:
607 if self.online:
606 raise schainpy.admin.SchainError('Time to wait for new files reach')
608 raise schainpy.admin.SchainError('Time to wait for new files reach')
607 else:
609 else:
608 if self.fileIndex == -1:
610 if self.fileIndex == -1:
609 raise schainpy.admin.SchainWarning('No files found in the given path')
611 raise schainpy.admin.SchainWarning('No files found in the given path')
610 else:
612 else:
611 raise schainpy.admin.SchainWarning('No more files to read')
613 raise schainpy.admin.SchainWarning('No more files to read')
612
614
613 if self.verifyFile(self.filename):
615 if self.verifyFile(self.filename):
614 break
616 break
615
617
616 log.log('Opening file: %s' % self.filename, self.name)
618 log.log('Opening file: %s' % self.filename, self.name)
617
619
618 self.readFirstHeader()
620 self.readFirstHeader()
619 self.nReadBlocks = 0
621 self.nReadBlocks = 0
620
622
621 def setNextFileOnline(self):
623 def setNextFileOnline(self):
622 """Check for the next file to be readed in online mode.
624 """Check for the next file to be readed in online mode.
623
625
624 Set:
626 Set:
625 self.filename
627 self.filename
626 self.fp
628 self.fp
627 self.filesize
629 self.filesize
628
630
629 Return:
631 Return:
630 boolean
632 boolean
631
633
632 """
634 """
633 nextFile = True
635 nextFile = True
634 nextDay = False
636 nextDay = False
635
637
636 for nFiles in range(self.nFiles+1):
638 for nFiles in range(self.nFiles+1):
637 for nTries in range(self.nTries):
639 for nTries in range(self.nTries):
638 fullfilename, filename = self.checkForRealPath(nextFile, nextDay)
640 fullfilename, filename = self.checkForRealPath(nextFile, nextDay)
639 if fullfilename is not None:
641 if fullfilename is not None:
640 break
642 break
641 log.warning(
643 log.warning(
642 "Waiting %0.2f sec for the next file: \"%s\" , try %02d ..." % (self.delay, filename, nTries + 1),
644 "Waiting %0.2f sec for the next file: \"%s\" , try %02d ..." % (self.delay, filename, nTries + 1),
643 self.name)
645 self.name)
644 time.sleep(self.delay)
646 time.sleep(self.delay)
645 nextFile = False
647 nextFile = False
646 continue
648 continue
647
649
648 if fullfilename is not None:
650 if fullfilename is not None:
649 break
651 break
650
652
651 self.nTries = 1
653 self.nTries = 1
652 nextFile = True
654 nextFile = True
653
655
654 if nFiles == (self.nFiles - 1):
656 if nFiles == (self.nFiles - 1):
655 log.log('Trying with next day...', self.name)
657 log.log('Trying with next day...', self.name)
656 nextDay = True
658 nextDay = True
657 self.nTries = 3
659 self.nTries = 3
658
660
659 if fullfilename:
661 if fullfilename:
660 self.fileSize = os.path.getsize(fullfilename)
662 self.fileSize = os.path.getsize(fullfilename)
661 self.filename = fullfilename
663 self.filename = fullfilename
662 self.flagIsNewFile = 1
664 self.flagIsNewFile = 1
663 if self.fp != None:
665 if self.fp != None:
664 self.fp.close()
666 self.fp.close()
665 self.fp = self.open_file(fullfilename, self.open_mode)
667 self.fp = self.open_file(fullfilename, self.open_mode)
666 self.flagNoMoreFiles = 0
668 self.flagNoMoreFiles = 0
667 self.fileIndex += 1
669 self.fileIndex += 1
668 return 1
670 return 1
669 else:
671 else:
670 return 0
672 return 0
671
673
672 def setNextFileOffline(self):
674 def setNextFileOffline(self):
673 """Open the next file to be readed in offline mode"""
675 """Open the next file to be readed in offline mode"""
674
676
675 try:
677 try:
676 filename = next(self.filenameList)
678 filename = next(self.filenameList)
677 self.fileIndex +=1
679 self.fileIndex +=1
678 except StopIteration:
680 except StopIteration:
679 self.flagNoMoreFiles = 1
681 self.flagNoMoreFiles = 1
680 return 0
682 return 0
681
683
682 self.filename = filename
684 self.filename = filename
683 self.fileSize = os.path.getsize(filename)
685 self.fileSize = os.path.getsize(filename)
684 self.fp = self.open_file(filename, self.open_mode)
686 self.fp = self.open_file(filename, self.open_mode)
685 self.flagIsNewFile = 1
687 self.flagIsNewFile = 1
686
688
687 return 1
689 return 1
688
690
689 @staticmethod
691 @staticmethod
690 def isDateTimeInRange(dt, startDate, endDate, startTime, endTime):
692 def isDateTimeInRange(dt, startDate, endDate, startTime, endTime):
691 """Check if the given datetime is in range"""
693 """Check if the given datetime is in range"""
692
694
693 if startDate <= dt.date() <= endDate:
695 if startDate <= dt.date() <= endDate:
694 if startTime <= dt.time() <= endTime:
696 if startTime <= dt.time() <= endTime:
695 return True
697 return True
696 return False
698 return False
697
699
698 def verifyFile(self, filename):
700 def verifyFile(self, filename):
699 """Check for a valid file
701 """Check for a valid file
700
702
701 Arguments:
703 Arguments:
702 filename -- full path filename
704 filename -- full path filename
703
705
704 Return:
706 Return:
705 boolean
707 boolean
706 """
708 """
707
709
708 return True
710 return True
709
711
710 def checkForRealPath(self, nextFile, nextDay):
712 def checkForRealPath(self, nextFile, nextDay):
711 """Check if the next file to be readed exists"""
713 """Check if the next file to be readed exists"""
712 if nextFile:
714 if nextFile:
713 self.set += 1
715 self.set += 1
714 if nextDay:
716 if nextDay:
715 self.set = 0
717 self.set = 0
716 self.doy += 1
718 self.doy += 1
717 foldercounter = 0
719 foldercounter = 0
718 prefixDirList = [None, 'd', 'D']
720 prefixDirList = [None, 'd', 'D']
719 if self.ext.lower() == ".r": # voltage
721 if self.ext.lower() == ".r": # voltage
720 prefixFileList = ['d', 'D']
722 prefixFileList = ['d', 'D']
721 elif self.ext.lower() == ".pdata": # spectra
723 elif self.ext.lower() == ".pdata": # spectra
722 prefixFileList = ['p', 'P']
724 prefixFileList = ['p', 'P']
723 elif self.ext.lower() == ".hdf5": # HDF5
725 elif self.ext.lower() == ".hdf5": # HDF5
724 prefixFileList = ['D', 'P'] # HDF5
726 prefixFileList = ['D', 'P'] # HDF5
725
727
726 # barrido por las combinaciones posibles
728 # barrido por las combinaciones posibles
727 for prefixDir in prefixDirList:
729 for prefixDir in prefixDirList:
728 thispath = self.path
730 thispath = self.path
729 if prefixDir != None:
731 if prefixDir != None:
730 # formo el nombre del directorio xYYYYDDD (x=d o x=D)
732 # formo el nombre del directorio xYYYYDDD (x=d o x=D)
731 if foldercounter == 0:
733 if foldercounter == 0:
732 thispath = os.path.join(self.path, "%s%04d%03d" %
734 thispath = os.path.join(self.path, "%s%04d%03d" %
733 (prefixDir, self.year, self.doy))
735 (prefixDir, self.year, self.doy))
734 else:
736 else:
735 thispath = os.path.join(self.path, "%s%04d%03d_%02d" % (
737 thispath = os.path.join(self.path, "%s%04d%03d_%02d" % (
736 prefixDir, self.year, self.doy, foldercounter))
738 prefixDir, self.year, self.doy, foldercounter))
737 for prefixFile in prefixFileList: # barrido por las dos combinaciones posibles de "D"
739 for prefixFile in prefixFileList: # barrido por las dos combinaciones posibles de "D"
738 # formo el nombre del file xYYYYDDDSSS.ext
740 # formo el nombre del file xYYYYDDDSSS.ext
739 filename = "%s%04d%03d%03d%s" % (prefixFile, self.year, self.doy, self.set, self.ext)
741 filename = "%s%04d%03d%03d%s" % (prefixFile, self.year, self.doy, self.set, self.ext)
740 fullfilename = os.path.join(
742 fullfilename = os.path.join(
741 thispath, filename)
743 thispath, filename)
742
744
743 if os.path.exists(fullfilename):
745 if os.path.exists(fullfilename):
744 return fullfilename, filename
746 return fullfilename, filename
745
747
746 return None, filename
748 return None, filename
747 #raise NotImplementedError
749 #raise NotImplementedError
748
750
749 def readFirstHeader(self):
751 def readFirstHeader(self):
750 """Parse the file header"""
752 """Parse the file header"""
751
753
752 pass
754 pass
753
755
754 def waitDataBlock(self, pointer_location, blocksize=None):
756 def waitDataBlock(self, pointer_location, blocksize=None):
755 """
757 """
756 """
758 """
757
759
758 currentPointer = pointer_location
760 currentPointer = pointer_location
759 if blocksize is None:
761 if blocksize is None:
760 neededSize = self.processingHeaderObj.blockSize # + self.basicHeaderSize
762 neededSize = self.processingHeaderObj.blockSize # + self.basicHeaderSize
761 else:
763 else:
762 neededSize = blocksize
764 neededSize = blocksize
763
765
764 for nTries in range(self.nTries):
766 for nTries in range(self.nTries):
765 self.fp.close()
767 self.fp.close()
766 self.fp = open(self.filename, 'rb')
768 self.fp = open(self.filename, 'rb')
767 self.fp.seek(currentPointer)
769 self.fp.seek(currentPointer)
768
770
769 self.fileSize = os.path.getsize(self.filename)
771 self.fileSize = os.path.getsize(self.filename)
770 currentSize = self.fileSize - currentPointer
772 currentSize = self.fileSize - currentPointer
771
773
772 if (currentSize >= neededSize):
774 if (currentSize >= neededSize):
773 return 1
775 return 1
774
776
775 log.warning(
777 log.warning(
776 "Waiting %0.2f seconds for the next block, try %03d ..." % (self.delay, nTries + 1),
778 "Waiting %0.2f seconds for the next block, try %03d ..." % (self.delay, nTries + 1),
777 self.name
779 self.name
778 )
780 )
779 time.sleep(self.delay)
781 time.sleep(self.delay)
780
782
781 return 0
783 return 0
782
784
783 class JRODataReader(Reader):
785 class JRODataReader(Reader):
784
786
785 utc = 0
787 utc = 0
786 nReadBlocks = 0
788 nReadBlocks = 0
787 foldercounter = 0
789 foldercounter = 0
788 firstHeaderSize = 0
790 firstHeaderSize = 0
789 basicHeaderSize = 24
791 basicHeaderSize = 24
790 __isFirstTimeOnline = 1
792 __isFirstTimeOnline = 1
791 filefmt = "*%Y%j***"
793 filefmt = "*%Y%j***"
792 folderfmt = "*%Y%j"
794 folderfmt = "*%Y%j"
793 __attrs__ = ['path', 'startDate', 'endDate', 'startTime', 'endTime', 'online', 'delay', 'walk']
795 __attrs__ = ['path', 'startDate', 'endDate', 'startTime', 'endTime', 'online', 'delay', 'walk']
794
796
795 def getDtypeWidth(self):
797 def getDtypeWidth(self):
796
798
797 dtype_index = get_dtype_index(self.dtype)
799 dtype_index = get_dtype_index(self.dtype)
798 dtype_width = get_dtype_width(dtype_index)
800 dtype_width = get_dtype_width(dtype_index)
799
801
800 return dtype_width
802 return dtype_width
801
803
802 def checkForRealPath(self, nextFile, nextDay):
804 def checkForRealPath(self, nextFile, nextDay):
803 """Check if the next file to be readed exists.
805 """Check if the next file to be readed exists.
804
806
805 Example :
807 Example :
806 nombre correcto del file es .../.../D2009307/P2009307367.ext
808 nombre correcto del file es .../.../D2009307/P2009307367.ext
807
809
808 Entonces la funcion prueba con las siguientes combinaciones
810 Entonces la funcion prueba con las siguientes combinaciones
809 .../.../y2009307367.ext
811 .../.../y2009307367.ext
810 .../.../Y2009307367.ext
812 .../.../Y2009307367.ext
811 .../.../x2009307/y2009307367.ext
813 .../.../x2009307/y2009307367.ext
812 .../.../x2009307/Y2009307367.ext
814 .../.../x2009307/Y2009307367.ext
813 .../.../X2009307/y2009307367.ext
815 .../.../X2009307/y2009307367.ext
814 .../.../X2009307/Y2009307367.ext
816 .../.../X2009307/Y2009307367.ext
815 siendo para este caso, la ultima combinacion de letras, identica al file buscado
817 siendo para este caso, la ultima combinacion de letras, identica al file buscado
816
818
817 Return:
819 Return:
818 str -- fullpath of the file
820 str -- fullpath of the file
819 """
821 """
820
822
821
823
822 if nextFile:
824 if nextFile:
823 self.set += 1
825 self.set += 1
824 if nextDay:
826 if nextDay:
825 self.set = 0
827 self.set = 0
826 self.doy += 1
828 self.doy += 1
827 foldercounter = 0
829 foldercounter = 0
828 prefixDirList = [None, 'd', 'D']
830 prefixDirList = [None, 'd', 'D']
829 if self.ext.lower() == ".r": # voltage
831 if self.ext.lower() == ".r": # voltage
830 prefixFileList = ['d', 'D']
832 prefixFileList = ['d', 'D']
831 elif self.ext.lower() == ".pdata": # spectra
833 elif self.ext.lower() == ".pdata": # spectra
832 prefixFileList = ['p', 'P']
834 prefixFileList = ['p', 'P']
833
835
834 # barrido por las combinaciones posibles
836 # barrido por las combinaciones posibles
835 for prefixDir in prefixDirList:
837 for prefixDir in prefixDirList:
836 thispath = self.path
838 thispath = self.path
837 if prefixDir != None:
839 if prefixDir != None:
838 # formo el nombre del directorio xYYYYDDD (x=d o x=D)
840 # formo el nombre del directorio xYYYYDDD (x=d o x=D)
839 if foldercounter == 0:
841 if foldercounter == 0:
840 thispath = os.path.join(self.path, "%s%04d%03d" %
842 thispath = os.path.join(self.path, "%s%04d%03d" %
841 (prefixDir, self.year, self.doy))
843 (prefixDir, self.year, self.doy))
842 else:
844 else:
843 thispath = os.path.join(self.path, "%s%04d%03d_%02d" % (
845 thispath = os.path.join(self.path, "%s%04d%03d_%02d" % (
844 prefixDir, self.year, self.doy, foldercounter))
846 prefixDir, self.year, self.doy, foldercounter))
845 for prefixFile in prefixFileList: # barrido por las dos combinaciones posibles de "D"
847 for prefixFile in prefixFileList: # barrido por las dos combinaciones posibles de "D"
846 # formo el nombre del file xYYYYDDDSSS.ext
848 # formo el nombre del file xYYYYDDDSSS.ext
847 filename = "%s%04d%03d%03d%s" % (prefixFile, self.year, self.doy, self.set, self.ext)
849 filename = "%s%04d%03d%03d%s" % (prefixFile, self.year, self.doy, self.set, self.ext)
848 fullfilename = os.path.join(
850 fullfilename = os.path.join(
849 thispath, filename)
851 thispath, filename)
850
852
851 if os.path.exists(fullfilename):
853 if os.path.exists(fullfilename):
852 return fullfilename, filename
854 return fullfilename, filename
853
855
854 return None, filename
856 return None, filename
855
857
856 def __waitNewBlock(self):
858 def __waitNewBlock(self):
857 """
859 """
858 Return 1 si se encontro un nuevo bloque de datos, 0 de otra forma.
860 Return 1 si se encontro un nuevo bloque de datos, 0 de otra forma.
859
861
860 Si el modo de lectura es OffLine siempre retorn 0
862 Si el modo de lectura es OffLine siempre retorn 0
861 """
863 """
862 if not self.online:
864 if not self.online:
863 return 0
865 return 0
864
866
865 if (self.nReadBlocks >= self.processingHeaderObj.dataBlocksPerFile):
867 if (self.nReadBlocks >= self.processingHeaderObj.dataBlocksPerFile):
866 return 0
868 return 0
867
869
868 currentPointer = self.fp.tell()
870 currentPointer = self.fp.tell()
869
871
870 neededSize = self.processingHeaderObj.blockSize + self.basicHeaderSize
872 neededSize = self.processingHeaderObj.blockSize + self.basicHeaderSize
871
873
872 for nTries in range(self.nTries):
874 for nTries in range(self.nTries):
873
875
874 self.fp.close()
876 self.fp.close()
875 self.fp = open(self.filename, 'rb')
877 self.fp = open(self.filename, 'rb')
876 self.fp.seek(currentPointer)
878 self.fp.seek(currentPointer)
877
879
878 self.fileSize = os.path.getsize(self.filename)
880 self.fileSize = os.path.getsize(self.filename)
879 currentSize = self.fileSize - currentPointer
881 currentSize = self.fileSize - currentPointer
880
882
881 if (currentSize >= neededSize):
883 if (currentSize >= neededSize):
882 self.basicHeaderObj.read(self.fp)
884 self.basicHeaderObj.read(self.fp)
883 return 1
885 return 1
884
886
885 if self.fileSize == self.fileSizeByHeader:
887 if self.fileSize == self.fileSizeByHeader:
886 # self.flagEoF = True
888 # self.flagEoF = True
887 return 0
889 return 0
888
890
889 print("[Reading] Waiting %0.2f seconds for the next block, try %03d ..." % (self.delay, nTries + 1))
891 print("[Reading] Waiting %0.2f seconds for the next block, try %03d ..." % (self.delay, nTries + 1))
890 time.sleep(self.delay)
892 time.sleep(self.delay)
891
893
892 return 0
894 return 0
893
895
894 def __setNewBlock(self):
896 def __setNewBlock(self):
895
897
896 if self.fp == None:
898 if self.fp == None:
897 return 0
899 return 0
898
900
899 if self.flagIsNewFile:
901 if self.flagIsNewFile:
900 self.lastUTTime = self.basicHeaderObj.utc
902 self.lastUTTime = self.basicHeaderObj.utc
901 return 1
903 return 1
902
904
903 if self.realtime:
905 if self.realtime:
904 self.flagDiscontinuousBlock = 1
906 self.flagDiscontinuousBlock = 1
905 if not(self.setNextFile()):
907 if not(self.setNextFile()):
906 return 0
908 return 0
907 else:
909 else:
908 return 1
910 return 1
909
911
910 currentSize = self.fileSize - self.fp.tell()
912 currentSize = self.fileSize - self.fp.tell()
911 neededSize = self.processingHeaderObj.blockSize + self.basicHeaderSize
913 neededSize = self.processingHeaderObj.blockSize + self.basicHeaderSize
912
914
913 if (currentSize >= neededSize):
915 if (currentSize >= neededSize):
914 self.basicHeaderObj.read(self.fp)
916 self.basicHeaderObj.read(self.fp)
915 self.lastUTTime = self.basicHeaderObj.utc
917 self.lastUTTime = self.basicHeaderObj.utc
916 return 1
918 return 1
917
919
918 if self.__waitNewBlock():
920 if self.__waitNewBlock():
919 self.lastUTTime = self.basicHeaderObj.utc
921 self.lastUTTime = self.basicHeaderObj.utc
920 return 1
922 return 1
921
923
922 if not(self.setNextFile()):
924 if not(self.setNextFile()):
923 return 0
925 return 0
924
926
925 deltaTime = self.basicHeaderObj.utc - self.lastUTTime
927 deltaTime = self.basicHeaderObj.utc - self.lastUTTime
926 self.lastUTTime = self.basicHeaderObj.utc
928 self.lastUTTime = self.basicHeaderObj.utc
927
929
928 self.flagDiscontinuousBlock = 0
930 self.flagDiscontinuousBlock = 0
929
931
930 if deltaTime > self.maxTimeStep:
932 if deltaTime > self.maxTimeStep:
931 self.flagDiscontinuousBlock = 1
933 self.flagDiscontinuousBlock = 1
932
934
933 return 1
935 return 1
934
936
935 def readNextBlock(self):
937 def readNextBlock(self):
936
938
937 while True:
939 while True:
938 if not(self.__setNewBlock()):
940 if not(self.__setNewBlock()):
939 continue
941 continue
940
942
941 if not(self.readBlock()):
943 if not(self.readBlock()):
942 return 0
944 return 0
943
945
944 self.getBasicHeader()
946 self.getBasicHeader()
945
947
946 if not self.isDateTimeInRange(self.dataOut.datatime, self.startDate, self.endDate, self.startTime, self.endTime):
948 if not self.isDateTimeInRange(self.dataOut.datatime, self.startDate, self.endDate, self.startTime, self.endTime):
947 print("[Reading] Block No. %d/%d -> %s [Skipping]" % (self.nReadBlocks,
949 print("[Reading] Block No. %d/%d -> %s [Skipping]" % (self.nReadBlocks,
948 self.processingHeaderObj.dataBlocksPerFile,
950 self.processingHeaderObj.dataBlocksPerFile,
949 self.dataOut.datatime.ctime()))
951 self.dataOut.datatime.ctime()))
950 continue
952 continue
951
953
952 break
954 break
953
955
954 if self.verbose:
956 if self.verbose:
955 print("[Reading] Block No. %d/%d -> %s" % (self.nReadBlocks,
957 print("[Reading] Block No. %d/%d -> %s" % (self.nReadBlocks,
956 self.processingHeaderObj.dataBlocksPerFile,
958 self.processingHeaderObj.dataBlocksPerFile,
957 self.dataOut.datatime.ctime()))
959 self.dataOut.datatime.ctime()))
958 return 1
960 return 1
959
961
960 def readFirstHeader(self):
962 def readFirstHeader(self):
961
963
962 self.basicHeaderObj.read(self.fp)
964 self.basicHeaderObj.read(self.fp)
963 self.systemHeaderObj.read(self.fp)
965 self.systemHeaderObj.read(self.fp)
964 self.radarControllerHeaderObj.read(self.fp)
966 self.radarControllerHeaderObj.read(self.fp)
965 self.processingHeaderObj.read(self.fp)
967 self.processingHeaderObj.read(self.fp)
966 self.firstHeaderSize = self.basicHeaderObj.size
968 self.firstHeaderSize = self.basicHeaderObj.size
967
969
968 datatype = int(numpy.log2((self.processingHeaderObj.processFlags &
970 datatype = int(numpy.log2((self.processingHeaderObj.processFlags &
969 PROCFLAG.DATATYPE_MASK)) - numpy.log2(PROCFLAG.DATATYPE_CHAR))
971 PROCFLAG.DATATYPE_MASK)) - numpy.log2(PROCFLAG.DATATYPE_CHAR))
970 if datatype == 0:
972 if datatype == 0:
971 datatype_str = numpy.dtype([('real', '<i1'), ('imag', '<i1')])
973 datatype_str = numpy.dtype([('real', '<i1'), ('imag', '<i1')])
972 elif datatype == 1:
974 elif datatype == 1:
973 datatype_str = numpy.dtype([('real', '<i2'), ('imag', '<i2')])
975 datatype_str = numpy.dtype([('real', '<i2'), ('imag', '<i2')])
974 elif datatype == 2:
976 elif datatype == 2:
975 datatype_str = numpy.dtype([('real', '<i4'), ('imag', '<i4')])
977 datatype_str = numpy.dtype([('real', '<i4'), ('imag', '<i4')])
976 elif datatype == 3:
978 elif datatype == 3:
977 datatype_str = numpy.dtype([('real', '<i8'), ('imag', '<i8')])
979 datatype_str = numpy.dtype([('real', '<i8'), ('imag', '<i8')])
978 elif datatype == 4:
980 elif datatype == 4:
979 datatype_str = numpy.dtype([('real', '<f4'), ('imag', '<f4')])
981 datatype_str = numpy.dtype([('real', '<f4'), ('imag', '<f4')])
980 elif datatype == 5:
982 elif datatype == 5:
981 datatype_str = numpy.dtype([('real', '<f8'), ('imag', '<f8')])
983 datatype_str = numpy.dtype([('real', '<f8'), ('imag', '<f8')])
982 else:
984 else:
983 raise ValueError('Data type was not defined')
985 raise ValueError('Data type was not defined')
984
986
985 self.dtype = datatype_str
987 self.dtype = datatype_str
986 #self.ippSeconds = 2 * 1000 * self.radarControllerHeaderObj.ipp / self.c
988 #self.ippSeconds = 2 * 1000 * self.radarControllerHeaderObj.ipp / self.c
987 self.fileSizeByHeader = self.processingHeaderObj.dataBlocksPerFile * self.processingHeaderObj.blockSize + \
989 self.fileSizeByHeader = self.processingHeaderObj.dataBlocksPerFile * self.processingHeaderObj.blockSize + \
988 self.firstHeaderSize + self.basicHeaderSize * \
990 self.firstHeaderSize + self.basicHeaderSize * \
989 (self.processingHeaderObj.dataBlocksPerFile - 1)
991 (self.processingHeaderObj.dataBlocksPerFile - 1)
990 # self.dataOut.channelList = numpy.arange(self.systemHeaderObj.numChannels)
992 # self.dataOut.channelList = numpy.arange(self.systemHeaderObj.numChannels)
991 # self.dataOut.channelIndexList = numpy.arange(self.systemHeaderObj.numChannels)
993 # self.dataOut.channelIndexList = numpy.arange(self.systemHeaderObj.numChannels)
992 self.getBlockDimension()
994 self.getBlockDimension()
993
995
994 def verifyFile(self, filename):
996 def verifyFile(self, filename):
995
997
996 flag = True
998 flag = True
997
999
998 try:
1000 try:
999 fp = open(filename, 'rb')
1001 fp = open(filename, 'rb')
1000 except IOError:
1002 except IOError:
1001 log.error("File {} can't be opened".format(filename), self.name)
1003 log.error("File {} can't be opened".format(filename), self.name)
1002 return False
1004 return False
1003
1005
1004 if self.online and self.waitDataBlock(0):
1006 if self.online and self.waitDataBlock(0):
1005 pass
1007 pass
1006
1008
1007 basicHeaderObj = BasicHeader(LOCALTIME)
1009 basicHeaderObj = BasicHeader(LOCALTIME)
1008 systemHeaderObj = SystemHeader()
1010 systemHeaderObj = SystemHeader()
1009 radarControllerHeaderObj = RadarControllerHeader()
1011 radarControllerHeaderObj = RadarControllerHeader()
1010 processingHeaderObj = ProcessingHeader()
1012 processingHeaderObj = ProcessingHeader()
1011
1013
1012 if not(basicHeaderObj.read(fp)):
1014 if not(basicHeaderObj.read(fp)):
1013 flag = False
1015 flag = False
1014 if not(systemHeaderObj.read(fp)):
1016 if not(systemHeaderObj.read(fp)):
1015 flag = False
1017 flag = False
1016 if not(radarControllerHeaderObj.read(fp)):
1018 if not(radarControllerHeaderObj.read(fp)):
1017 flag = False
1019 flag = False
1018 if not(processingHeaderObj.read(fp)):
1020 if not(processingHeaderObj.read(fp)):
1019 flag = False
1021 flag = False
1020 if not self.online:
1022 if not self.online:
1021 dt1 = basicHeaderObj.datatime
1023 dt1 = basicHeaderObj.datatime
1022 pos = self.fileSize-processingHeaderObj.blockSize-24
1024 pos = self.fileSize-processingHeaderObj.blockSize-24
1023 if pos<0:
1025 if pos<0:
1024 flag = False
1026 flag = False
1025 log.error('Invalid size for file: {}'.format(self.filename), self.name)
1027 log.error('Invalid size for file: {}'.format(self.filename), self.name)
1026 else:
1028 else:
1027 fp.seek(pos)
1029 fp.seek(pos)
1028 if not(basicHeaderObj.read(fp)):
1030 if not(basicHeaderObj.read(fp)):
1029 flag = False
1031 flag = False
1030 dt2 = basicHeaderObj.datatime
1032 dt2 = basicHeaderObj.datatime
1031 if not self.isDateTimeInRange(dt1, self.startDate, self.endDate, self.startTime, self.endTime) and not \
1033 if not self.isDateTimeInRange(dt1, self.startDate, self.endDate, self.startTime, self.endTime) and not \
1032 self.isDateTimeInRange(dt2, self.startDate, self.endDate, self.startTime, self.endTime):
1034 self.isDateTimeInRange(dt2, self.startDate, self.endDate, self.startTime, self.endTime):
1033 flag = False
1035 flag = False
1034
1036
1035 fp.close()
1037 fp.close()
1036 return flag
1038 return flag
1037
1039
1038 def findDatafiles(self, path, startDate=None, endDate=None, expLabel='', ext='.r', walk=True, include_path=False):
1040 def findDatafiles(self, path, startDate=None, endDate=None, expLabel='', ext='.r', walk=True, include_path=False):
1039
1041
1040 path_empty = True
1042 path_empty = True
1041
1043
1042 dateList = []
1044 dateList = []
1043 pathList = []
1045 pathList = []
1044
1046
1045 multi_path = path.split(',')
1047 multi_path = path.split(',')
1046
1048
1047 if not walk:
1049 if not walk:
1048
1050
1049 for single_path in multi_path:
1051 for single_path in multi_path:
1050
1052
1051 if not os.path.isdir(single_path):
1053 if not os.path.isdir(single_path):
1052 continue
1054 continue
1053
1055
1054 fileList = glob.glob1(single_path, "*" + ext)
1056 fileList = glob.glob1(single_path, "*" + ext)
1055
1057
1056 if not fileList:
1058 if not fileList:
1057 continue
1059 continue
1058
1060
1059 path_empty = False
1061 path_empty = False
1060
1062
1061 fileList.sort()
1063 fileList.sort()
1062
1064
1063 for thisFile in fileList:
1065 for thisFile in fileList:
1064
1066
1065 if not os.path.isfile(os.path.join(single_path, thisFile)):
1067 if not os.path.isfile(os.path.join(single_path, thisFile)):
1066 continue
1068 continue
1067
1069
1068 if not isRadarFile(thisFile):
1070 if not isRadarFile(thisFile):
1069 continue
1071 continue
1070
1072
1071 if not isFileInDateRange(thisFile, startDate, endDate):
1073 if not isFileInDateRange(thisFile, startDate, endDate):
1072 continue
1074 continue
1073
1075
1074 thisDate = getDateFromRadarFile(thisFile)
1076 thisDate = getDateFromRadarFile(thisFile)
1075
1077
1076 if thisDate in dateList or single_path in pathList:
1078 if thisDate in dateList or single_path in pathList:
1077 continue
1079 continue
1078
1080
1079 dateList.append(thisDate)
1081 dateList.append(thisDate)
1080 pathList.append(single_path)
1082 pathList.append(single_path)
1081
1083
1082 else:
1084 else:
1083 for single_path in multi_path:
1085 for single_path in multi_path:
1084
1086
1085 if not os.path.isdir(single_path):
1087 if not os.path.isdir(single_path):
1086 continue
1088 continue
1087
1089
1088 dirList = []
1090 dirList = []
1089
1091
1090 for thisPath in os.listdir(single_path):
1092 for thisPath in os.listdir(single_path):
1091
1093
1092 if not os.path.isdir(os.path.join(single_path, thisPath)):
1094 if not os.path.isdir(os.path.join(single_path, thisPath)):
1093 continue
1095 continue
1094
1096
1095 if not isRadarFolder(thisPath):
1097 if not isRadarFolder(thisPath):
1096 continue
1098 continue
1097
1099
1098 if not isFolderInDateRange(thisPath, startDate, endDate):
1100 if not isFolderInDateRange(thisPath, startDate, endDate):
1099 continue
1101 continue
1100
1102
1101 dirList.append(thisPath)
1103 dirList.append(thisPath)
1102
1104
1103 if not dirList:
1105 if not dirList:
1104 continue
1106 continue
1105
1107
1106 dirList.sort()
1108 dirList.sort()
1107
1109
1108 for thisDir in dirList:
1110 for thisDir in dirList:
1109
1111
1110 datapath = os.path.join(single_path, thisDir, expLabel)
1112 datapath = os.path.join(single_path, thisDir, expLabel)
1111 fileList = glob.glob1(datapath, "*" + ext)
1113 fileList = glob.glob1(datapath, "*" + ext)
1112
1114
1113 if not fileList:
1115 if not fileList:
1114 continue
1116 continue
1115
1117
1116 path_empty = False
1118 path_empty = False
1117
1119
1118 thisDate = getDateFromRadarFolder(thisDir)
1120 thisDate = getDateFromRadarFolder(thisDir)
1119
1121
1120 pathList.append(datapath)
1122 pathList.append(datapath)
1121 dateList.append(thisDate)
1123 dateList.append(thisDate)
1122
1124
1123 dateList.sort()
1125 dateList.sort()
1124
1126
1125 if walk:
1127 if walk:
1126 pattern_path = os.path.join(multi_path[0], "[dYYYYDDD]", expLabel)
1128 pattern_path = os.path.join(multi_path[0], "[dYYYYDDD]", expLabel)
1127 else:
1129 else:
1128 pattern_path = multi_path[0]
1130 pattern_path = multi_path[0]
1129
1131
1130 if path_empty:
1132 if path_empty:
1131 raise schainpy.admin.SchainError("[Reading] No *%s files in %s for %s to %s" % (ext, pattern_path, startDate, endDate))
1133 raise schainpy.admin.SchainError("[Reading] No *%s files in %s for %s to %s" % (ext, pattern_path, startDate, endDate))
1132 else:
1134 else:
1133 if not dateList:
1135 if not dateList:
1134 raise schainpy.admin.SchainError("[Reading] Date range selected invalid [%s - %s]: No *%s files in %s)" % (startDate, endDate, ext, path))
1136 raise schainpy.admin.SchainError("[Reading] Date range selected invalid [%s - %s]: No *%s files in %s)" % (startDate, endDate, ext, path))
1135
1137
1136 if include_path:
1138 if include_path:
1137 return dateList, pathList
1139 return dateList, pathList
1138
1140
1139 return dateList
1141 return dateList
1140
1142
1141 def setup(self, **kwargs):
1143 def setup(self, **kwargs):
1142
1144
1143 self.set_kwargs(**kwargs)
1145 self.set_kwargs(**kwargs)
1144 if not self.ext.startswith('.'):
1146 if not self.ext.startswith('.'):
1145 self.ext = '.{}'.format(self.ext)
1147 self.ext = '.{}'.format(self.ext)
1146
1148
1147 if self.server is not None:
1149 if self.server is not None:
1148 if 'tcp://' in self.server:
1150 if 'tcp://' in self.server:
1149 address = server
1151 address = server
1150 else:
1152 else:
1151 address = 'ipc:///tmp/%s' % self.server
1153 address = 'ipc:///tmp/%s' % self.server
1152 self.server = address
1154 self.server = address
1153 self.context = zmq.Context()
1155 self.context = zmq.Context()
1154 self.receiver = self.context.socket(zmq.PULL)
1156 self.receiver = self.context.socket(zmq.PULL)
1155 self.receiver.connect(self.server)
1157 self.receiver.connect(self.server)
1156 time.sleep(0.5)
1158 time.sleep(0.5)
1157 print('[Starting] ReceiverData from {}'.format(self.server))
1159 print('[Starting] ReceiverData from {}'.format(self.server))
1158 else:
1160 else:
1159 self.server = None
1161 self.server = None
1160 if self.path == None:
1162 if self.path == None:
1161 raise ValueError("[Reading] The path is not valid")
1163 raise ValueError("[Reading] The path is not valid")
1162
1164
1163 if self.online:
1165 if self.online:
1164 log.log("[Reading] Searching files in online mode...", self.name)
1166 log.log("[Reading] Searching files in online mode...", self.name)
1165
1167
1166 for nTries in range(self.nTries):
1168 for nTries in range(self.nTries):
1167 fullpath = self.searchFilesOnLine(self.path, self.startDate,
1169 fullpath = self.searchFilesOnLine(self.path, self.startDate,
1168 self.endDate, self.expLabel, self.ext, self.walk,
1170 self.endDate, self.expLabel, self.ext, self.walk,
1169 self.filefmt, self.folderfmt)
1171 self.filefmt, self.folderfmt)
1170
1172
1171 try:
1173 try:
1172 fullpath = next(fullpath)
1174 fullpath = next(fullpath)
1173 except:
1175 except:
1174 fullpath = None
1176 fullpath = None
1175
1177
1176 if fullpath:
1178 if fullpath:
1177 break
1179 break
1178
1180
1179 log.warning(
1181 log.warning(
1180 'Waiting {} sec for a valid file in {}: try {} ...'.format(
1182 'Waiting {} sec for a valid file in {}: try {} ...'.format(
1181 self.delay, self.path, nTries + 1),
1183 self.delay, self.path, nTries + 1),
1182 self.name)
1184 self.name)
1183 time.sleep(self.delay)
1185 time.sleep(self.delay)
1184
1186
1185 if not(fullpath):
1187 if not(fullpath):
1186 raise schainpy.admin.SchainError(
1188 raise schainpy.admin.SchainError(
1187 'There isn\'t any valid file in {}'.format(self.path))
1189 'There isn\'t any valid file in {}'.format(self.path))
1188
1190
1189 pathname, filename = os.path.split(fullpath)
1191 pathname, filename = os.path.split(fullpath)
1190 self.year = int(filename[1:5])
1192 self.year = int(filename[1:5])
1191 self.doy = int(filename[5:8])
1193 self.doy = int(filename[5:8])
1192 self.set = int(filename[8:11]) - 1
1194 self.set = int(filename[8:11]) - 1
1193 else:
1195 else:
1194 log.log("Searching files in {}".format(self.path), self.name)
1196 log.log("Searching files in {}".format(self.path), self.name)
1195 self.filenameList = self.searchFilesOffLine(self.path, self.startDate,
1197 self.filenameList = self.searchFilesOffLine(self.path, self.startDate,
1196 self.endDate, self.expLabel, self.ext, self.walk, self.filefmt, self.folderfmt)
1198 self.endDate, self.expLabel, self.ext, self.walk, self.filefmt, self.folderfmt)
1197
1199
1198 self.setNextFile()
1200 self.setNextFile()
1199
1201
1200 return
1202 return
1201
1203
1202 def getBasicHeader(self):
1204 def getBasicHeader(self):
1203
1205
1204 self.dataOut.utctime = self.basicHeaderObj.utc + self.basicHeaderObj.miliSecond / \
1206 self.dataOut.utctime = self.basicHeaderObj.utc + self.basicHeaderObj.miliSecond / \
1205 1000. + self.profileIndex * self.radarControllerHeaderObj.ippSeconds
1207 1000. + self.profileIndex * self.radarControllerHeaderObj.ippSeconds
1206
1208
1207 self.dataOut.flagDiscontinuousBlock = self.flagDiscontinuousBlock
1209 self.dataOut.flagDiscontinuousBlock = self.flagDiscontinuousBlock
1208
1210
1209 self.dataOut.timeZone = self.basicHeaderObj.timeZone
1211 self.dataOut.timeZone = self.basicHeaderObj.timeZone
1210
1212
1211 self.dataOut.dstFlag = self.basicHeaderObj.dstFlag
1213 self.dataOut.dstFlag = self.basicHeaderObj.dstFlag
1212
1214
1213 self.dataOut.errorCount = self.basicHeaderObj.errorCount
1215 self.dataOut.errorCount = self.basicHeaderObj.errorCount
1214
1216
1215 self.dataOut.useLocalTime = self.basicHeaderObj.useLocalTime
1217 self.dataOut.useLocalTime = self.basicHeaderObj.useLocalTime
1216
1218
1217 self.dataOut.ippSeconds = self.radarControllerHeaderObj.ippSeconds / self.nTxs
1219 self.dataOut.ippSeconds = self.radarControllerHeaderObj.ippSeconds / self.nTxs
1218
1220
1219 def getFirstHeader(self):
1221 def getFirstHeader(self):
1220
1222
1221 raise NotImplementedError
1223 raise NotImplementedError
1222
1224
1223 def getData(self):
1225 def getData(self):
1224
1226
1225 raise NotImplementedError
1227 raise NotImplementedError
1226
1228
1227 def hasNotDataInBuffer(self):
1229 def hasNotDataInBuffer(self):
1228
1230
1229 raise NotImplementedError
1231 raise NotImplementedError
1230
1232
1231 def readBlock(self):
1233 def readBlock(self):
1232
1234
1233 raise NotImplementedError
1235 raise NotImplementedError
1234
1236
1235 def isEndProcess(self):
1237 def isEndProcess(self):
1236
1238
1237 return self.flagNoMoreFiles
1239 return self.flagNoMoreFiles
1238
1240
1239 def printReadBlocks(self):
1241 def printReadBlocks(self):
1240
1242
1241 print("[Reading] Number of read blocks per file %04d" % self.nReadBlocks)
1243 print("[Reading] Number of read blocks per file %04d" % self.nReadBlocks)
1242
1244
1243 def printTotalBlocks(self):
1245 def printTotalBlocks(self):
1244
1246
1245 print("[Reading] Number of read blocks %04d" % self.nTotalBlocks)
1247 print("[Reading] Number of read blocks %04d" % self.nTotalBlocks)
1246
1248
1247 def run(self, **kwargs):
1249 def run(self, **kwargs):
1248 """
1250 """
1249
1251
1250 Arguments:
1252 Arguments:
1251 path :
1253 path :
1252 startDate :
1254 startDate :
1253 endDate :
1255 endDate :
1254 startTime :
1256 startTime :
1255 endTime :
1257 endTime :
1256 set :
1258 set :
1257 expLabel :
1259 expLabel :
1258 ext :
1260 ext :
1259 online :
1261 online :
1260 delay :
1262 delay :
1261 walk :
1263 walk :
1262 getblock :
1264 getblock :
1263 nTxs :
1265 nTxs :
1264 realtime :
1266 realtime :
1265 blocksize :
1267 blocksize :
1266 blocktime :
1268 blocktime :
1267 skip :
1269 skip :
1268 cursor :
1270 cursor :
1269 warnings :
1271 warnings :
1270 server :
1272 server :
1271 verbose :
1273 verbose :
1272 format :
1274 format :
1273 oneDDict :
1275 oneDDict :
1274 twoDDict :
1276 twoDDict :
1275 independentParam :
1277 independentParam :
1276 """
1278 """
1277
1279
1278 if not(self.isConfig):
1280 if not(self.isConfig):
1279 self.setup(**kwargs)
1281 self.setup(**kwargs)
1280 self.isConfig = True
1282 self.isConfig = True
1281 if self.server is None:
1283 if self.server is None:
1282 self.getData()
1284 self.getData()
1283 else:
1285 else:
1284 self.getFromServer()
1286 self.getFromServer()
1285
1287
1286
1288
1287 class JRODataWriter(Reader):
1289 class JRODataWriter(Reader):
1288
1290
1289 """
1291 """
1290 Esta clase permite escribir datos a archivos procesados (.r o ,pdata). La escritura
1292 Esta clase permite escribir datos a archivos procesados (.r o ,pdata). La escritura
1291 de los datos siempre se realiza por bloques.
1293 de los datos siempre se realiza por bloques.
1292 """
1294 """
1293
1295
1294 setFile = None
1296 setFile = None
1295 profilesPerBlock = None
1297 profilesPerBlock = None
1296 blocksPerFile = None
1298 blocksPerFile = None
1297 nWriteBlocks = 0
1299 nWriteBlocks = 0
1298 fileDate = None
1300 fileDate = None
1299
1301
1300 def __init__(self, dataOut=None):
1302 def __init__(self, dataOut=None):
1301 raise NotImplementedError
1303 raise NotImplementedError
1302
1304
1303 def hasAllDataInBuffer(self):
1305 def hasAllDataInBuffer(self):
1304 raise NotImplementedError
1306 raise NotImplementedError
1305
1307
1306 def setBlockDimension(self):
1308 def setBlockDimension(self):
1307 raise NotImplementedError
1309 raise NotImplementedError
1308
1310
1309 def writeBlock(self):
1311 def writeBlock(self):
1310 raise NotImplementedError
1312 raise NotImplementedError
1311
1313
1312 def putData(self):
1314 def putData(self):
1313 raise NotImplementedError
1315 raise NotImplementedError
1314
1316
1315 def getDtypeWidth(self):
1317 def getDtypeWidth(self):
1316
1318
1317 dtype_index = get_dtype_index(self.dtype)
1319 dtype_index = get_dtype_index(self.dtype)
1318 dtype_width = get_dtype_width(dtype_index)
1320 dtype_width = get_dtype_width(dtype_index)
1319
1321
1320 return dtype_width
1322 return dtype_width
1321
1323
1322 def getProcessFlags(self):
1324 def getProcessFlags(self):
1323
1325
1324 processFlags = 0
1326 processFlags = 0
1325
1327
1326 dtype_index = get_dtype_index(self.dtype)
1328 dtype_index = get_dtype_index(self.dtype)
1327 procflag_dtype = get_procflag_dtype(dtype_index)
1329 procflag_dtype = get_procflag_dtype(dtype_index)
1328
1330
1329 processFlags += procflag_dtype
1331 processFlags += procflag_dtype
1330
1332
1331 if self.dataOut.flagDecodeData:
1333 if self.dataOut.flagDecodeData:
1332 processFlags += PROCFLAG.DECODE_DATA
1334 processFlags += PROCFLAG.DECODE_DATA
1333
1335
1334 if self.dataOut.flagDeflipData:
1336 if self.dataOut.flagDeflipData:
1335 processFlags += PROCFLAG.DEFLIP_DATA
1337 processFlags += PROCFLAG.DEFLIP_DATA
1336
1338
1337 if self.dataOut.code is not None:
1339 if self.dataOut.code is not None:
1338 processFlags += PROCFLAG.DEFINE_PROCESS_CODE
1340 processFlags += PROCFLAG.DEFINE_PROCESS_CODE
1339
1341
1340 if self.dataOut.nCohInt > 1:
1342 if self.dataOut.nCohInt > 1:
1341 processFlags += PROCFLAG.COHERENT_INTEGRATION
1343 processFlags += PROCFLAG.COHERENT_INTEGRATION
1342
1344
1343 if self.dataOut.type == "Spectra":
1345 if self.dataOut.type == "Spectra":
1344 if self.dataOut.nIncohInt > 1:
1346 if self.dataOut.nIncohInt > 1:
1345 processFlags += PROCFLAG.INCOHERENT_INTEGRATION
1347 processFlags += PROCFLAG.INCOHERENT_INTEGRATION
1346
1348
1347 if self.dataOut.data_dc is not None:
1349 if self.dataOut.data_dc is not None:
1348 processFlags += PROCFLAG.SAVE_CHANNELS_DC
1350 processFlags += PROCFLAG.SAVE_CHANNELS_DC
1349
1351
1350 if self.dataOut.flagShiftFFT:
1352 if self.dataOut.flagShiftFFT:
1351 processFlags += PROCFLAG.SHIFT_FFT_DATA
1353 processFlags += PROCFLAG.SHIFT_FFT_DATA
1352
1354
1353 return processFlags
1355 return processFlags
1354
1356
1355 def setBasicHeader(self):
1357 def setBasicHeader(self):
1356
1358
1357 self.basicHeaderObj.size = self.basicHeaderSize # bytes
1359 self.basicHeaderObj.size = self.basicHeaderSize # bytes
1358 self.basicHeaderObj.version = self.versionFile
1360 self.basicHeaderObj.version = self.versionFile
1359 self.basicHeaderObj.dataBlock = self.nTotalBlocks
1361 self.basicHeaderObj.dataBlock = self.nTotalBlocks
1360 utc = numpy.floor(self.dataOut.utctime)
1362 utc = numpy.floor(self.dataOut.utctime)
1361 milisecond = (self.dataOut.utctime - utc) * 1000.0
1363 milisecond = (self.dataOut.utctime - utc) * 1000.0
1362 self.basicHeaderObj.utc = utc
1364 self.basicHeaderObj.utc = utc
1363 self.basicHeaderObj.miliSecond = milisecond
1365 self.basicHeaderObj.miliSecond = milisecond
1364 self.basicHeaderObj.timeZone = self.dataOut.timeZone
1366 self.basicHeaderObj.timeZone = self.dataOut.timeZone
1365 self.basicHeaderObj.dstFlag = self.dataOut.dstFlag
1367 self.basicHeaderObj.dstFlag = self.dataOut.dstFlag
1366 self.basicHeaderObj.errorCount = self.dataOut.errorCount
1368 self.basicHeaderObj.errorCount = self.dataOut.errorCount
1367
1369
1368 def setFirstHeader(self):
1370 def setFirstHeader(self):
1369 """
1371 """
1370 Obtiene una copia del First Header
1372 Obtiene una copia del First Header
1371
1373
1372 Affected:
1374 Affected:
1373
1375
1374 self.basicHeaderObj
1376 self.basicHeaderObj
1375 self.systemHeaderObj
1377 self.systemHeaderObj
1376 self.radarControllerHeaderObj
1378 self.radarControllerHeaderObj
1377 self.processingHeaderObj self.
1379 self.processingHeaderObj self.
1378
1380
1379 Return:
1381 Return:
1380 None
1382 None
1381 """
1383 """
1382
1384
1383 raise NotImplementedError
1385 raise NotImplementedError
1384
1386
1385 def __writeFirstHeader(self):
1387 def __writeFirstHeader(self):
1386 """
1388 """
1387 Escribe el primer header del file es decir el Basic header y el Long header (SystemHeader, RadarControllerHeader, ProcessingHeader)
1389 Escribe el primer header del file es decir el Basic header y el Long header (SystemHeader, RadarControllerHeader, ProcessingHeader)
1388
1390
1389 Affected:
1391 Affected:
1390 __dataType
1392 __dataType
1391
1393
1392 Return:
1394 Return:
1393 None
1395 None
1394 """
1396 """
1395
1397
1396 # CALCULAR PARAMETROS
1398 # CALCULAR PARAMETROS
1397
1399
1398 sizeLongHeader = self.systemHeaderObj.size + \
1400 sizeLongHeader = self.systemHeaderObj.size + \
1399 self.radarControllerHeaderObj.size + self.processingHeaderObj.size
1401 self.radarControllerHeaderObj.size + self.processingHeaderObj.size
1400 self.basicHeaderObj.size = self.basicHeaderSize + sizeLongHeader
1402 self.basicHeaderObj.size = self.basicHeaderSize + sizeLongHeader
1401
1403
1402 self.basicHeaderObj.write(self.fp)
1404 self.basicHeaderObj.write(self.fp)
1403 self.systemHeaderObj.write(self.fp)
1405 self.systemHeaderObj.write(self.fp)
1404 self.radarControllerHeaderObj.write(self.fp)
1406 self.radarControllerHeaderObj.write(self.fp)
1405 self.processingHeaderObj.write(self.fp)
1407 self.processingHeaderObj.write(self.fp)
1406
1408
1407 def __setNewBlock(self):
1409 def __setNewBlock(self):
1408 """
1410 """
1409 Si es un nuevo file escribe el First Header caso contrario escribe solo el Basic Header
1411 Si es un nuevo file escribe el First Header caso contrario escribe solo el Basic Header
1410
1412
1411 Return:
1413 Return:
1412 0 : si no pudo escribir nada
1414 0 : si no pudo escribir nada
1413 1 : Si escribio el Basic el First Header
1415 1 : Si escribio el Basic el First Header
1414 """
1416 """
1415 if self.fp == None:
1417 if self.fp == None:
1416 self.setNextFile()
1418 self.setNextFile()
1417
1419
1418 if self.flagIsNewFile:
1420 if self.flagIsNewFile:
1419 return 1
1421 return 1
1420
1422
1421 if self.blockIndex < self.processingHeaderObj.dataBlocksPerFile:
1423 if self.blockIndex < self.processingHeaderObj.dataBlocksPerFile:
1422 self.basicHeaderObj.write(self.fp)
1424 self.basicHeaderObj.write(self.fp)
1423 return 1
1425 return 1
1424
1426
1425 if not(self.setNextFile()):
1427 if not(self.setNextFile()):
1426 return 0
1428 return 0
1427
1429
1428 return 1
1430 return 1
1429
1431
1430 def writeNextBlock(self):
1432 def writeNextBlock(self):
1431 """
1433 """
1432 Selecciona el bloque siguiente de datos y los escribe en un file
1434 Selecciona el bloque siguiente de datos y los escribe en un file
1433
1435
1434 Return:
1436 Return:
1435 0 : Si no hizo pudo escribir el bloque de datos
1437 0 : Si no hizo pudo escribir el bloque de datos
1436 1 : Si no pudo escribir el bloque de datos
1438 1 : Si no pudo escribir el bloque de datos
1437 """
1439 """
1438 if not(self.__setNewBlock()):
1440 if not(self.__setNewBlock()):
1439 return 0
1441 return 0
1440
1442
1441 self.writeBlock()
1443 self.writeBlock()
1442
1444
1443 print("[Writing] Block No. %d/%d" % (self.blockIndex,
1445 print("[Writing] Block No. %d/%d" % (self.blockIndex,
1444 self.processingHeaderObj.dataBlocksPerFile))
1446 self.processingHeaderObj.dataBlocksPerFile))
1445
1447
1446 return 1
1448 return 1
1447
1449
1448 def setNextFile(self):
1450 def setNextFile(self):
1449 """Determina el siguiente file que sera escrito
1451 """Determina el siguiente file que sera escrito
1450
1452
1451 Affected:
1453 Affected:
1452 self.filename
1454 self.filename
1453 self.subfolder
1455 self.subfolder
1454 self.fp
1456 self.fp
1455 self.setFile
1457 self.setFile
1456 self.flagIsNewFile
1458 self.flagIsNewFile
1457
1459
1458 Return:
1460 Return:
1459 0 : Si el archivo no puede ser escrito
1461 0 : Si el archivo no puede ser escrito
1460 1 : Si el archivo esta listo para ser escrito
1462 1 : Si el archivo esta listo para ser escrito
1461 """
1463 """
1462 ext = self.ext
1464 ext = self.ext
1463 path = self.path
1465 path = self.path
1464
1466
1465 if self.fp != None:
1467 if self.fp != None:
1466 self.fp.close()
1468 self.fp.close()
1467
1469
1468 if not os.path.exists(path):
1470 if not os.path.exists(path):
1469 os.mkdir(path)
1471 os.mkdir(path)
1470
1472
1471 timeTuple = time.localtime(self.dataOut.utctime)
1473 timeTuple = time.localtime(self.dataOut.utctime)
1472 subfolder = 'd%4.4d%3.3d' % (timeTuple.tm_year, timeTuple.tm_yday)
1474 subfolder = 'd%4.4d%3.3d' % (timeTuple.tm_year, timeTuple.tm_yday)
1473
1475
1474 fullpath = os.path.join(path, subfolder)
1476 fullpath = os.path.join(path, subfolder)
1475 setFile = self.setFile
1477 setFile = self.setFile
1476
1478
1477 if not(os.path.exists(fullpath)):
1479 if not(os.path.exists(fullpath)):
1478 os.mkdir(fullpath)
1480 os.mkdir(fullpath)
1479 setFile = -1 # inicializo mi contador de seteo
1481 setFile = -1 # inicializo mi contador de seteo
1480 else:
1482 else:
1481 filesList = os.listdir(fullpath)
1483 filesList = os.listdir(fullpath)
1482 if len(filesList) > 0:
1484 if len(filesList) > 0:
1483 filesList = sorted(filesList, key=str.lower)
1485 filesList = sorted(filesList, key=str.lower)
1484 filen = filesList[-1]
1486 filen = filesList[-1]
1485 # el filename debera tener el siguiente formato
1487 # el filename debera tener el siguiente formato
1486 # 0 1234 567 89A BCDE (hex)
1488 # 0 1234 567 89A BCDE (hex)
1487 # x YYYY DDD SSS .ext
1489 # x YYYY DDD SSS .ext
1488 if isNumber(filen[8:11]):
1490 if isNumber(filen[8:11]):
1489 # inicializo mi contador de seteo al seteo del ultimo file
1491 # inicializo mi contador de seteo al seteo del ultimo file
1490 setFile = int(filen[8:11])
1492 setFile = int(filen[8:11])
1491 else:
1493 else:
1492 setFile = -1
1494 setFile = -1
1493 else:
1495 else:
1494 setFile = -1 # inicializo mi contador de seteo
1496 setFile = -1 # inicializo mi contador de seteo
1495
1497
1496 setFile += 1
1498 setFile += 1
1497
1499
1498 # If this is a new day it resets some values
1500 # If this is a new day it resets some values
1499 if self.dataOut.datatime.date() > self.fileDate:
1501 if self.dataOut.datatime.date() > self.fileDate:
1500 setFile = 0
1502 setFile = 0
1501 self.nTotalBlocks = 0
1503 self.nTotalBlocks = 0
1502
1504
1503 filen = '{}{:04d}{:03d}{:03d}{}'.format(
1505 filen = '{}{:04d}{:03d}{:03d}{}'.format(
1504 self.optchar, timeTuple.tm_year, timeTuple.tm_yday, setFile, ext)
1506 self.optchar, timeTuple.tm_year, timeTuple.tm_yday, setFile, ext)
1505
1507
1506 filename = os.path.join(path, subfolder, filen)
1508 filename = os.path.join(path, subfolder, filen)
1507
1509
1508 fp = open(filename, 'wb')
1510 fp = open(filename, 'wb')
1509
1511
1510 self.blockIndex = 0
1512 self.blockIndex = 0
1511 self.filename = filename
1513 self.filename = filename
1512 self.subfolder = subfolder
1514 self.subfolder = subfolder
1513 self.fp = fp
1515 self.fp = fp
1514 self.setFile = setFile
1516 self.setFile = setFile
1515 self.flagIsNewFile = 1
1517 self.flagIsNewFile = 1
1516 self.fileDate = self.dataOut.datatime.date()
1518 self.fileDate = self.dataOut.datatime.date()
1517 self.setFirstHeader()
1519 self.setFirstHeader()
1518
1520
1519 print('[Writing] Opening file: %s' % self.filename)
1521 print('[Writing] Opening file: %s' % self.filename)
1520
1522
1521 self.__writeFirstHeader()
1523 self.__writeFirstHeader()
1522
1524
1523 return 1
1525 return 1
1524
1526
1525 def setup(self, dataOut, path, blocksPerFile, profilesPerBlock=64, set=None, ext=None, datatype=4):
1527 def setup(self, dataOut, path, blocksPerFile, profilesPerBlock=64, set=None, ext=None, datatype=4):
1526 """
1528 """
1527 Setea el tipo de formato en la cual sera guardada la data y escribe el First Header
1529 Setea el tipo de formato en la cual sera guardada la data y escribe el First Header
1528
1530
1529 Inputs:
1531 Inputs:
1530 path : directory where data will be saved
1532 path : directory where data will be saved
1531 profilesPerBlock : number of profiles per block
1533 profilesPerBlock : number of profiles per block
1532 set : initial file set
1534 set : initial file set
1533 datatype : An integer number that defines data type:
1535 datatype : An integer number that defines data type:
1534 0 : int8 (1 byte)
1536 0 : int8 (1 byte)
1535 1 : int16 (2 bytes)
1537 1 : int16 (2 bytes)
1536 2 : int32 (4 bytes)
1538 2 : int32 (4 bytes)
1537 3 : int64 (8 bytes)
1539 3 : int64 (8 bytes)
1538 4 : float32 (4 bytes)
1540 4 : float32 (4 bytes)
1539 5 : double64 (8 bytes)
1541 5 : double64 (8 bytes)
1540
1542
1541 Return:
1543 Return:
1542 0 : Si no realizo un buen seteo
1544 0 : Si no realizo un buen seteo
1543 1 : Si realizo un buen seteo
1545 1 : Si realizo un buen seteo
1544 """
1546 """
1545
1547
1546 if ext == None:
1548 if ext == None:
1547 ext = self.ext
1549 ext = self.ext
1548
1550
1549 self.ext = ext.lower()
1551 self.ext = ext.lower()
1550
1552
1551 self.path = path
1553 self.path = path
1552
1554
1553 if set is None:
1555 if set is None:
1554 self.setFile = -1
1556 self.setFile = -1
1555 else:
1557 else:
1556 self.setFile = set - 1
1558 self.setFile = set - 1
1557
1559
1558 self.blocksPerFile = blocksPerFile
1560 self.blocksPerFile = blocksPerFile
1559 self.profilesPerBlock = profilesPerBlock
1561 self.profilesPerBlock = profilesPerBlock
1560 self.dataOut = dataOut
1562 self.dataOut = dataOut
1561 self.fileDate = self.dataOut.datatime.date()
1563 self.fileDate = self.dataOut.datatime.date()
1562 self.dtype = self.dataOut.dtype
1564 self.dtype = self.dataOut.dtype
1563
1565
1564 if datatype is not None:
1566 if datatype is not None:
1565 self.dtype = get_numpy_dtype(datatype)
1567 self.dtype = get_numpy_dtype(datatype)
1566
1568
1567 if not(self.setNextFile()):
1569 if not(self.setNextFile()):
1568 print("[Writing] There isn't a next file")
1570 print("[Writing] There isn't a next file")
1569 return 0
1571 return 0
1570
1572
1571 self.setBlockDimension()
1573 self.setBlockDimension()
1572
1574
1573 return 1
1575 return 1
1574
1576
1575 def run(self, dataOut, path, blocksPerFile=100, profilesPerBlock=64, set=None, ext=None, datatype=4, **kwargs):
1577 def run(self, dataOut, path, blocksPerFile=100, profilesPerBlock=64, set=None, ext=None, datatype=4, **kwargs):
1576
1578
1577 if not(self.isConfig):
1579 if not(self.isConfig):
1578
1580
1579 self.setup(dataOut, path, blocksPerFile, profilesPerBlock=profilesPerBlock,
1581 self.setup(dataOut, path, blocksPerFile, profilesPerBlock=profilesPerBlock,
1580 set=set, ext=ext, datatype=datatype, **kwargs)
1582 set=set, ext=ext, datatype=datatype, **kwargs)
1581 self.isConfig = True
1583 self.isConfig = True
1582
1584
1583 self.dataOut = dataOut
1585 self.dataOut = dataOut
1584 self.putData()
1586 self.putData()
1585 return self.dataOut
1587 return self.dataOut
1586
1588
1587 @MPDecorator
1589 @MPDecorator
1588 class printInfo(Operation):
1590 class printInfo(Operation):
1589
1591
1590 def __init__(self):
1592 def __init__(self):
1591
1593
1592 Operation.__init__(self)
1594 Operation.__init__(self)
1593 self.__printInfo = True
1595 self.__printInfo = True
1594
1596
1595 def run(self, dataOut, headers = ['systemHeaderObj', 'radarControllerHeaderObj', 'processingHeaderObj']):
1597 def run(self, dataOut, headers = ['systemHeaderObj', 'radarControllerHeaderObj', 'processingHeaderObj']):
1596 if self.__printInfo == False:
1598 if self.__printInfo == False:
1597 return
1599 return
1598
1600
1599 for header in headers:
1601 for header in headers:
1600 if hasattr(dataOut, header):
1602 if hasattr(dataOut, header):
1601 obj = getattr(dataOut, header)
1603 obj = getattr(dataOut, header)
1602 if hasattr(obj, 'printInfo'):
1604 if hasattr(obj, 'printInfo'):
1603 obj.printInfo()
1605 obj.printInfo()
1604 else:
1606 else:
1605 print(obj)
1607 print(obj)
1606 else:
1608 else:
1607 log.warning('Header {} Not found in object'.format(header))
1609 log.warning('Header {} Not found in object'.format(header))
1608
1610
1609 self.__printInfo = False
1611 self.__printInfo = False
@@ -1,722 +1,733
1 from email.utils import localtime
1 from email.utils import localtime
2 import os
2 import os
3 import time
3 import time
4 import datetime
4 import datetime
5
5
6 import numpy
6 import numpy
7 import h5py
7 import h5py
8
8
9 import schainpy.admin
9 import schainpy.admin
10 from schainpy.model.data.jrodata import *
10 from schainpy.model.data.jrodata import *
11 from schainpy.model.proc.jroproc_base import ProcessingUnit, Operation, MPDecorator
11 from schainpy.model.proc.jroproc_base import ProcessingUnit, Operation, MPDecorator
12 from schainpy.model.io.jroIO_base import *
12 from schainpy.model.io.jroIO_base import *
13 from schainpy.utils import log
13 from schainpy.utils import log
14
14
15
15
16 class HDFReader(Reader, ProcessingUnit):
16 class HDFReader(Reader, ProcessingUnit):
17 """Processing unit to read HDF5 format files
17 """Processing unit to read HDF5 format files
18
18
19 This unit reads HDF5 files created with `HDFWriter` operation contains
19 This unit reads HDF5 files created with `HDFWriter` operation contains
20 by default two groups Data and Metadata all variables would be saved as `dataOut`
20 by default two groups Data and Metadata all variables would be saved as `dataOut`
21 attributes.
21 attributes.
22 It is possible to read any HDF5 file by given the structure in the `description`
22 It is possible to read any HDF5 file by given the structure in the `description`
23 parameter, also you can add extra values to metadata with the parameter `extras`.
23 parameter, also you can add extra values to metadata with the parameter `extras`.
24
24
25 Parameters:
25 Parameters:
26 -----------
26 -----------
27 path : str
27 path : str
28 Path where files are located.
28 Path where files are located.
29 startDate : date
29 startDate : date
30 Start date of the files
30 Start date of the files
31 endDate : list
31 endDate : list
32 End date of the files
32 End date of the files
33 startTime : time
33 startTime : time
34 Start time of the files
34 Start time of the files
35 endTime : time
35 endTime : time
36 End time of the files
36 End time of the files
37 description : dict, optional
37 description : dict, optional
38 Dictionary with the description of the HDF5 file
38 Dictionary with the description of the HDF5 file
39 extras : dict, optional
39 extras : dict, optional
40 Dictionary with extra metadata to be be added to `dataOut`
40 Dictionary with extra metadata to be be added to `dataOut`
41
41
42 Examples
42 Examples
43 --------
43 --------
44
44
45 desc = {
45 desc = {
46 'Data': {
46 'Data': {
47 'data_output': ['u', 'v', 'w'],
47 'data_output': ['u', 'v', 'w'],
48 'utctime': 'timestamps',
48 'utctime': 'timestamps',
49 } ,
49 } ,
50 'Metadata': {
50 'Metadata': {
51 'heightList': 'heights'
51 'heightList': 'heights'
52 }
52 }
53 }
53 }
54
54
55 desc = {
55 desc = {
56 'Data': {
56 'Data': {
57 'data_output': 'winds',
57 'data_output': 'winds',
58 'utctime': 'timestamps'
58 'utctime': 'timestamps'
59 },
59 },
60 'Metadata': {
60 'Metadata': {
61 'heightList': 'heights'
61 'heightList': 'heights'
62 }
62 }
63 }
63 }
64
64
65 extras = {
65 extras = {
66 'timeZone': 300
66 'timeZone': 300
67 }
67 }
68
68
69 reader = project.addReadUnit(
69 reader = project.addReadUnit(
70 name='HDFReader',
70 name='HDFReader',
71 path='/path/to/files',
71 path='/path/to/files',
72 startDate='2019/01/01',
72 startDate='2019/01/01',
73 endDate='2019/01/31',
73 endDate='2019/01/31',
74 startTime='00:00:00',
74 startTime='00:00:00',
75 endTime='23:59:59',
75 endTime='23:59:59',
76 # description=json.dumps(desc),
76 # description=json.dumps(desc),
77 # extras=json.dumps(extras),
77 # extras=json.dumps(extras),
78 )
78 )
79
79
80 """
80 """
81
81
82 __attrs__ = ['path', 'startDate', 'endDate', 'startTime', 'endTime', 'description', 'extras']
82 __attrs__ = ['path', 'startDate', 'endDate', 'startTime', 'endTime', 'description', 'extras']
83
83
84 def __init__(self):
84 def __init__(self):
85 ProcessingUnit.__init__(self)
85 ProcessingUnit.__init__(self)
86 self.dataOut = Parameters()
86 self.dataOut = Parameters()
87 self.ext = ".hdf5"
87 self.ext = ".hdf5"
88 self.optchar = "D"
88 self.optchar = "D"
89 self.meta = {}
89 self.meta = {}
90 self.data = {}
90 self.data = {}
91 self.open_file = h5py.File
91 self.open_file = h5py.File
92 self.open_mode = 'r'
92 self.open_mode = 'r'
93 self.description = {}
93 self.description = {}
94 self.extras = {}
94 self.extras = {}
95 self.filefmt = "*%Y%j***"
95 self.filefmt = "*%Y%j***"
96 self.folderfmt = "*%Y%j"
96 self.folderfmt = "*%Y%j"
97 self.utcoffset = 0
97 self.utcoffset = 0
98 self.filter = None
99 self.dparam = None
98
100
99 def setup(self, **kwargs):
101 def setup(self, **kwargs):
100
102
101 self.set_kwargs(**kwargs)
103 self.set_kwargs(**kwargs)
102 if not self.ext.startswith('.'):
104 if not self.ext.startswith('.'):
103 self.ext = '.{}'.format(self.ext)
105 self.ext = '.{}'.format(self.ext)
104
106
105 if self.online:
107 if self.online:
106 log.log("Searching files in online mode...", self.name)
108 log.log("Searching files in online mode...", self.name)
107
109
108 for nTries in range(self.nTries):
110 for nTries in range(self.nTries):
109 fullpath = self.searchFilesOnLine(self.path, self.startDate,
111 fullpath = self.searchFilesOnLine(self.path, self.startDate,
110 self.endDate, self.expLabel, self.ext, self.walk,
112 self.endDate, self.expLabel, self.ext, self.walk,
111 self.filefmt, self.folderfmt)
113 self.filefmt, self.folderfmt,self.filter)
112 try:
114 try:
113 fullpath = next(fullpath)
115 fullpath = next(fullpath)
114 except:
116 except:
115 fullpath = None
117 fullpath = None
116
118
117 if fullpath:
119 if fullpath:
118 break
120 break
119
121
120 log.warning(
122 log.warning(
121 'Waiting {} sec for a valid file in {}: try {} ...'.format(
123 'Waiting {} sec for a valid file in {}: try {} ...'.format(
122 self.delay, self.path, nTries + 1),
124 self.delay, self.path, nTries + 1),
123 self.name)
125 self.name)
124 time.sleep(self.delay)
126 time.sleep(self.delay)
125
127
126 if not(fullpath):
128 if not(fullpath):
127 raise schainpy.admin.SchainError(
129 raise schainpy.admin.SchainError(
128 'There isn\'t any valid file in {}'.format(self.path))
130 'There isn\'t any valid file in {}'.format(self.path))
129
131
130 pathname, filename = os.path.split(fullpath)
132 pathname, filename = os.path.split(fullpath)
131 self.year = int(filename[1:5])
133 self.year = int(filename[1:5])
132 self.doy = int(filename[5:8])
134 self.doy = int(filename[5:8])
133 self.set = int(filename[8:11]) - 1
135 self.set = int(filename[8:11]) - 1
134 else:
136 else:
135 log.log("Searching files in {}".format(self.path), self.name)
137 log.log("Searching files in {}".format(self.path), self.name)
136 self.filenameList = self.searchFilesOffLine(self.path, self.startDate,
138 self.filenameList = self.searchFilesOffLine(self.path, self.startDate,
137 self.endDate, self.expLabel, self.ext, self.walk, self.filefmt, self.folderfmt)
139 self.endDate, self.expLabel, self.ext, self.walk, self.filefmt, self.folderfmt,self.filter)
138
140
139 self.setNextFile()
141 self.setNextFile()
140
142
141 return
143 return
142
144
143 def readFirstHeader(self):
145 def readFirstHeader(self):
144 '''Read metadata and data'''
146 '''Read metadata and data'''
145
147
146 self.__readMetadata()
148 self.__readMetadata()
147 self.__readData()
149 self.__readData()
148 self.__setBlockList()
150 self.__setBlockList()
149
151
150 if 'type' in self.meta:
152 if 'type' in self.meta:
151 self.dataOut = eval(self.meta['type'])()
153 self.dataOut = eval(self.meta['type'])()
152
154
155 if self.dparam:
156 setattr(self.dataOut, "dparam", 1)
157
153 for attr in self.meta:
158 for attr in self.meta:
154 setattr(self.dataOut, attr, self.meta[attr])
159 setattr(self.dataOut, attr, self.meta[attr])
155
160
156 self.blockIndex = 0
161 self.blockIndex = 0
157
162
158 return
163 return
159
164
160 def __setBlockList(self):
165 def __setBlockList(self):
161 '''
166 '''
162 Selects the data within the times defined
167 Selects the data within the times defined
163
168
164 self.fp
169 self.fp
165 self.startTime
170 self.startTime
166 self.endTime
171 self.endTime
167 self.blockList
172 self.blockList
168 self.blocksPerFile
173 self.blocksPerFile
169
174
170 '''
175 '''
171
176
172 startTime = self.startTime
177 startTime = self.startTime
173 endTime = self.endTime
178 endTime = self.endTime
174 thisUtcTime = self.data['utctime'] + self.utcoffset
179 thisUtcTime = self.data['utctime'] + self.utcoffset
175 self.interval = numpy.min(thisUtcTime[1:] - thisUtcTime[:-1])
180 try:
181 self.interval = numpy.min(thisUtcTime[1:] - thisUtcTime[:-1])
182 except:
183 self.interval = 0
176 thisDatetime = datetime.datetime.utcfromtimestamp(thisUtcTime[0])
184 thisDatetime = datetime.datetime.utcfromtimestamp(thisUtcTime[0])
177
185
178 thisDate = thisDatetime.date()
186 thisDate = thisDatetime.date()
179 thisTime = thisDatetime.time()
187 thisTime = thisDatetime.time()
180
188
181 startUtcTime = (datetime.datetime.combine(thisDate, startTime) - datetime.datetime(1970, 1, 1)).total_seconds()
189 startUtcTime = (datetime.datetime.combine(thisDate, startTime) - datetime.datetime(1970, 1, 1)).total_seconds()
182 endUtcTime = (datetime.datetime.combine(thisDate, endTime) - datetime.datetime(1970, 1, 1)).total_seconds()
190 endUtcTime = (datetime.datetime.combine(thisDate, endTime) - datetime.datetime(1970, 1, 1)).total_seconds()
183
191
184 ind = numpy.where(numpy.logical_and(thisUtcTime >= startUtcTime, thisUtcTime < endUtcTime))[0]
192 ind = numpy.where(numpy.logical_and(thisUtcTime >= startUtcTime, thisUtcTime < endUtcTime))[0]
185
193
186 self.blockList = ind
194 self.blockList = ind
187 self.blocksPerFile = len(ind)
195 self.blocksPerFile = len(ind)
188 return
196 return
189
197
190 def __readMetadata(self):
198 def __readMetadata(self):
191 '''
199 '''
192 Reads Metadata
200 Reads Metadata
193 '''
201 '''
194
202
195 meta = {}
203 meta = {}
196
204
197 if self.description:
205 if self.description:
198 for key, value in self.description['Metadata'].items():
206 for key, value in self.description['Metadata'].items():
199 meta[key] = self.fp[value][()]
207 meta[key] = self.fp[value][()]
200 else:
208 else:
201 grp = self.fp['Metadata']
209 grp = self.fp['Metadata']
202 for name in grp:
210 for name in grp:
203 meta[name] = grp[name][()]
211 meta[name] = grp[name][()]
204
212
205 if self.extras:
213 if self.extras:
206 for key, value in self.extras.items():
214 for key, value in self.extras.items():
207 meta[key] = value
215 meta[key] = value
208 self.meta = meta
216 self.meta = meta
209
217
210 return
218 return
211
219
212 def __readData(self):
220 def __readData(self):
213
221
214 data = {}
222 data = {}
215
223
216 if self.description:
224 if self.description:
217 for key, value in self.description['Data'].items():
225 for key, value in self.description['Data'].items():
218 if isinstance(value, str):
226 if isinstance(value, str):
219 if isinstance(self.fp[value], h5py.Dataset):
227 if isinstance(self.fp[value], h5py.Dataset):
220 data[key] = self.fp[value][()]
228 data[key] = self.fp[value][()]
221 elif isinstance(self.fp[value], h5py.Group):
229 elif isinstance(self.fp[value], h5py.Group):
222 array = []
230 array = []
223 for ch in self.fp[value]:
231 for ch in self.fp[value]:
224 array.append(self.fp[value][ch][()])
232 array.append(self.fp[value][ch][()])
225 data[key] = numpy.array(array)
233 data[key] = numpy.array(array)
226 elif isinstance(value, list):
234 elif isinstance(value, list):
227 array = []
235 array = []
228 for ch in value:
236 for ch in value:
229 array.append(self.fp[ch][()])
237 array.append(self.fp[ch][()])
230 data[key] = numpy.array(array)
238 data[key] = numpy.array(array)
231 else:
239 else:
232 grp = self.fp['Data']
240 grp = self.fp['Data']
233 for name in grp:
241 for name in grp:
234 if isinstance(grp[name], h5py.Dataset):
242 if isinstance(grp[name], h5py.Dataset):
235 array = grp[name][()]
243 array = grp[name][()]
236 elif isinstance(grp[name], h5py.Group):
244 elif isinstance(grp[name], h5py.Group):
237 array = []
245 array = []
238 for ch in grp[name]:
246 for ch in grp[name]:
239 array.append(grp[name][ch][()])
247 array.append(grp[name][ch][()])
240 array = numpy.array(array)
248 array = numpy.array(array)
241 else:
249 else:
242 log.warning('Unknown type: {}'.format(name))
250 log.warning('Unknown type: {}'.format(name))
243
251
244 if name in self.description:
252 if name in self.description:
245 key = self.description[name]
253 key = self.description[name]
246 else:
254 else:
247 key = name
255 key = name
248 data[key] = array
256 data[key] = array
249
257
250 self.data = data
258 self.data = data
251 return
259 return
252
260
253 def getData(self):
261 def getData(self):
254
262
255 for attr in self.data:
263 for attr in self.data:
256 if self.data[attr].ndim == 1:
264 if self.data[attr].ndim == 1:
257 setattr(self.dataOut, attr, self.data[attr][self.blockIndex])
265 setattr(self.dataOut, attr, self.data[attr][self.blockIndex])
258 else:
266 else:
259 setattr(self.dataOut, attr, self.data[attr][:, self.blockIndex])
267 if self.dparam:
268 setattr(self.dataOut, attr, self.data[attr])
269 else:
270 setattr(self.dataOut, attr, self.data[attr][:, self.blockIndex])
260
271
261 self.dataOut.flagNoData = False
272 self.dataOut.flagNoData = False
262 self.blockIndex += 1
273 self.blockIndex += 1
263
274
264 log.log("Block No. {}/{} -> {}".format(
275 log.log("Block No. {}/{} -> {}".format(
265 self.blockIndex,
276 self.blockIndex,
266 self.blocksPerFile,
277 self.blocksPerFile,
267 self.dataOut.datatime.ctime()), self.name)
278 self.dataOut.datatime.ctime()), self.name)
268
279
269 return
280 return
270
281
271 def run(self, **kwargs):
282 def run(self, **kwargs):
272
283
273 if not(self.isConfig):
284 if not(self.isConfig):
274 self.setup(**kwargs)
285 self.setup(**kwargs)
275 self.isConfig = True
286 self.isConfig = True
276
287
277 if self.blockIndex == self.blocksPerFile:
288 if self.blockIndex == self.blocksPerFile:
278 self.setNextFile()
289 self.setNextFile()
279
290
280 self.getData()
291 self.getData()
281
292
282 return
293 return
283
294
284 @MPDecorator
295 @MPDecorator
285 class HDFWriter(Operation):
296 class HDFWriter(Operation):
286 """Operation to write HDF5 files.
297 """Operation to write HDF5 files.
287
298
288 The HDF5 file contains by default two groups Data and Metadata where
299 The HDF5 file contains by default two groups Data and Metadata where
289 you can save any `dataOut` attribute specified by `dataList` and `metadataList`
300 you can save any `dataOut` attribute specified by `dataList` and `metadataList`
290 parameters, data attributes are normaly time dependent where the metadata
301 parameters, data attributes are normaly time dependent where the metadata
291 are not.
302 are not.
292 It is possible to customize the structure of the HDF5 file with the
303 It is possible to customize the structure of the HDF5 file with the
293 optional description parameter see the examples.
304 optional description parameter see the examples.
294
305
295 Parameters:
306 Parameters:
296 -----------
307 -----------
297 path : str
308 path : str
298 Path where files will be saved.
309 Path where files will be saved.
299 blocksPerFile : int
310 blocksPerFile : int
300 Number of blocks per file
311 Number of blocks per file
301 metadataList : list
312 metadataList : list
302 List of the dataOut attributes that will be saved as metadata
313 List of the dataOut attributes that will be saved as metadata
303 dataList : int
314 dataList : int
304 List of the dataOut attributes that will be saved as data
315 List of the dataOut attributes that will be saved as data
305 setType : bool
316 setType : bool
306 If True the name of the files corresponds to the timestamp of the data
317 If True the name of the files corresponds to the timestamp of the data
307 description : dict, optional
318 description : dict, optional
308 Dictionary with the desired description of the HDF5 file
319 Dictionary with the desired description of the HDF5 file
309
320
310 Examples
321 Examples
311 --------
322 --------
312
323
313 desc = {
324 desc = {
314 'data_output': {'winds': ['z', 'w', 'v']},
325 'data_output': {'winds': ['z', 'w', 'v']},
315 'utctime': 'timestamps',
326 'utctime': 'timestamps',
316 'heightList': 'heights'
327 'heightList': 'heights'
317 }
328 }
318 desc = {
329 desc = {
319 'data_output': ['z', 'w', 'v'],
330 'data_output': ['z', 'w', 'v'],
320 'utctime': 'timestamps',
331 'utctime': 'timestamps',
321 'heightList': 'heights'
332 'heightList': 'heights'
322 }
333 }
323 desc = {
334 desc = {
324 'Data': {
335 'Data': {
325 'data_output': 'winds',
336 'data_output': 'winds',
326 'utctime': 'timestamps'
337 'utctime': 'timestamps'
327 },
338 },
328 'Metadata': {
339 'Metadata': {
329 'heightList': 'heights'
340 'heightList': 'heights'
330 }
341 }
331 }
342 }
332
343
333 writer = proc_unit.addOperation(name='HDFWriter')
344 writer = proc_unit.addOperation(name='HDFWriter')
334 writer.addParameter(name='path', value='/path/to/file')
345 writer.addParameter(name='path', value='/path/to/file')
335 writer.addParameter(name='blocksPerFile', value='32')
346 writer.addParameter(name='blocksPerFile', value='32')
336 writer.addParameter(name='metadataList', value='heightList,timeZone')
347 writer.addParameter(name='metadataList', value='heightList,timeZone')
337 writer.addParameter(name='dataList',value='data_output,utctime')
348 writer.addParameter(name='dataList',value='data_output,utctime')
338 # writer.addParameter(name='description',value=json.dumps(desc))
349 # writer.addParameter(name='description',value=json.dumps(desc))
339
350
340 """
351 """
341
352
342 ext = ".hdf5"
353 ext = ".hdf5"
343 optchar = "D"
354 optchar = "D"
344 filename = None
355 filename = None
345 path = None
356 path = None
346 setFile = None
357 setFile = None
347 fp = None
358 fp = None
348 firsttime = True
359 firsttime = True
349 #Configurations
360 #Configurations
350 blocksPerFile = None
361 blocksPerFile = None
351 blockIndex = None
362 blockIndex = None
352 dataOut = None
363 dataOut = None
353 #Data Arrays
364 #Data Arrays
354 dataList = None
365 dataList = None
355 metadataList = None
366 metadataList = None
356 currentDay = None
367 currentDay = None
357 lastTime = None
368 lastTime = None
358 last_Azipos = None
369 last_Azipos = None
359 last_Elepos = None
370 last_Elepos = None
360 mode = None
371 mode = None
361 #-----------------------
372 #-----------------------
362 Typename = None
373 Typename = None
363 mask = False
374 mask = False
364
375
365 def __init__(self):
376 def __init__(self):
366
377
367 Operation.__init__(self)
378 Operation.__init__(self)
368 return
379 return
369
380
370 def set_kwargs(self, **kwargs):
381 def set_kwargs(self, **kwargs):
371
382
372 for key, value in kwargs.items():
383 for key, value in kwargs.items():
373 setattr(self, key, value)
384 setattr(self, key, value)
374
385
375 def set_kwargs_obj(self,obj, **kwargs):
386 def set_kwargs_obj(self,obj, **kwargs):
376
387
377 for key, value in kwargs.items():
388 for key, value in kwargs.items():
378 setattr(obj, key, value)
389 setattr(obj, key, value)
379
390
380 def setup(self, path=None, blocksPerFile=10, metadataList=None, dataList=None, setType=None, description=None,type_data=None, localtime=True, **kwargs):
391 def setup(self, path=None, blocksPerFile=10, metadataList=None, dataList=None, setType=None, description=None,type_data=None, localtime=True, **kwargs):
381 self.path = path
392 self.path = path
382 self.blocksPerFile = blocksPerFile
393 self.blocksPerFile = blocksPerFile
383 self.metadataList = metadataList
394 self.metadataList = metadataList
384 self.dataList = [s.strip() for s in dataList]
395 self.dataList = [s.strip() for s in dataList]
385 self.setType = setType
396 self.setType = setType
386 if self.setType == "weather":
397 if self.setType == "weather":
387 self.set_kwargs(**kwargs)
398 self.set_kwargs(**kwargs)
388 self.set_kwargs_obj(self.dataOut,**kwargs)
399 self.set_kwargs_obj(self.dataOut,**kwargs)
389 self.weather_vars = {
400 self.weather_vars = {
390 'S' : 0,
401 'S' : 0,
391 'V' : 1,
402 'V' : 1,
392 'W' : 2,
403 'W' : 2,
393 'SNR' : 3,
404 'SNR' : 3,
394 'Z' : 4,
405 'Z' : 4,
395 'D' : 5,
406 'D' : 5,
396 'P' : 6,
407 'P' : 6,
397 'R' : 7,
408 'R' : 7,
398 }
409 }
399
410
400 if localtime:
411 if localtime:
401 self.getDateTime = datetime.datetime.fromtimestamp
412 self.getDateTime = datetime.datetime.fromtimestamp
402 else:
413 else:
403 self.getDateTime = datetime.datetime.utcfromtimestamp
414 self.getDateTime = datetime.datetime.utcfromtimestamp
404
415
405 self.description = description
416 self.description = description
406 self.type_data=type_data
417 self.type_data=type_data
407
418
408 if self.metadataList is None:
419 if self.metadataList is None:
409 self.metadataList = self.dataOut.metadata_list
420 self.metadataList = self.dataOut.metadata_list
410
421
411 dsList = []
422 dsList = []
412
423
413 for i in range(len(self.dataList)):
424 for i in range(len(self.dataList)):
414 dsDict = {}
425 dsDict = {}
415 if hasattr(self.dataOut, self.dataList[i]):
426 if hasattr(self.dataOut, self.dataList[i]):
416 dataAux = getattr(self.dataOut, self.dataList[i])
427 dataAux = getattr(self.dataOut, self.dataList[i])
417 if self.setType == 'weather' and self.dataList[i] == 'data_param':
428 if self.setType == 'weather' and self.dataList[i] == 'data_param':
418 dataAux = dataAux[:,self.weather_vars[self.weather_var],:]
429 dataAux = dataAux[:,self.weather_vars[self.weather_var],:]
419 dsDict['variable'] = self.dataList[i]
430 dsDict['variable'] = self.dataList[i]
420 else:
431 else:
421 log.warning('Attribute {} not found in dataOut'.format(self.dataList[i]), self.name)
432 log.warning('Attribute {} not found in dataOut'.format(self.dataList[i]), self.name)
422 continue
433 continue
423
434
424 if dataAux is None:
435 if dataAux is None:
425 continue
436 continue
426 elif isinstance(dataAux, (int, float, numpy.integer, numpy.float)):
437 elif isinstance(dataAux, (int, float, numpy.integer, numpy.float)):
427 dsDict['nDim'] = 0
438 dsDict['nDim'] = 0
428 else:
439 else:
429 dsDict['nDim'] = len(dataAux.shape)
440 dsDict['nDim'] = len(dataAux.shape)
430 dsDict['shape'] = dataAux.shape
441 dsDict['shape'] = dataAux.shape
431 dsDict['dsNumber'] = dataAux.shape[0]
442 dsDict['dsNumber'] = dataAux.shape[0]
432 dsDict['dtype'] = dataAux.dtype
443 dsDict['dtype'] = dataAux.dtype
433 dsList.append(dsDict)
444 dsList.append(dsDict)
434
445
435 self.dsList = dsList
446 self.dsList = dsList
436 self.currentDay = self.dataOut.datatime.date()
447 self.currentDay = self.dataOut.datatime.date()
437
448
438 def timeFlag(self):
449 def timeFlag(self):
439 currentTime = self.dataOut.utctime
450 currentTime = self.dataOut.utctime
440 dt = self.getDateTime(currentTime)
451 dt = self.getDateTime(currentTime)
441
452
442 dataDay = int(dt.strftime('%j'))
453 dataDay = int(dt.strftime('%j'))
443
454
444 if self.lastTime is None:
455 if self.lastTime is None:
445 self.lastTime = currentTime
456 self.lastTime = currentTime
446 self.currentDay = dataDay
457 self.currentDay = dataDay
447 return False
458 return False
448
459
449 timeDiff = currentTime - self.lastTime
460 timeDiff = currentTime - self.lastTime
450
461
451 #Si el dia es diferente o si la diferencia entre un dato y otro supera la hora
462 #Si el dia es diferente o si la diferencia entre un dato y otro supera la hora
452 if dataDay != self.currentDay:
463 if dataDay != self.currentDay:
453 self.currentDay = dataDay
464 self.currentDay = dataDay
454 return True
465 return True
455 elif timeDiff > 3*60*60:
466 elif timeDiff > 3*60*60:
456 self.lastTime = currentTime
467 self.lastTime = currentTime
457 return True
468 return True
458 else:
469 else:
459 self.lastTime = currentTime
470 self.lastTime = currentTime
460 return False
471 return False
461
472
462 def run(self, dataOut, path, blocksPerFile=10, metadataList=None,
473 def run(self, dataOut, path, blocksPerFile=10, metadataList=None,
463 dataList=[], setType=None, description={}, mode= None,
474 dataList=[], setType=None, description={}, mode= None,
464 type_data=None, Reset = False, localtime=True, **kwargs):
475 type_data=None, Reset = False, localtime=True, **kwargs):
465
476
466 if Reset:
477 if Reset:
467 self.isConfig = False
478 self.isConfig = False
468 self.closeFile()
479 self.closeFile()
469 self.lastTime = None
480 self.lastTime = None
470 self.blockIndex = 0
481 self.blockIndex = 0
471
482
472 self.dataOut = dataOut
483 self.dataOut = dataOut
473 self.mode = mode
484 self.mode = mode
474
485
475 if not(self.isConfig):
486 if not(self.isConfig):
476 self.setup(path=path, blocksPerFile=blocksPerFile,
487 self.setup(path=path, blocksPerFile=blocksPerFile,
477 metadataList=metadataList, dataList=dataList,
488 metadataList=metadataList, dataList=dataList,
478 setType=setType, description=description,type_data=type_data,
489 setType=setType, description=description,type_data=type_data,
479 localtime=localtime, **kwargs)
490 localtime=localtime, **kwargs)
480
491
481 self.isConfig = True
492 self.isConfig = True
482 self.setNextFile()
493 self.setNextFile()
483
494
484 self.putData()
495 self.putData()
485 return
496 return
486
497
487 def setNextFile(self):
498 def setNextFile(self):
488
499
489 ext = self.ext
500 ext = self.ext
490 path = self.path
501 path = self.path
491 setFile = self.setFile
502 setFile = self.setFile
492
503
493 dt = self.getDateTime(self.dataOut.utctime)
504 dt = self.getDateTime(self.dataOut.utctime)
494
505
495 if self.setType == 'weather':
506 if self.setType == 'weather':
496 subfolder = dt.strftime('%Y-%m-%dT%H-00-00')
507 subfolder = dt.strftime('%Y-%m-%dT%H-00-00')
497 else:
508 else:
498 subfolder = dt.strftime('d%Y%j')
509 subfolder = dt.strftime('d%Y%j')
499
510
500 fullpath = os.path.join(path, subfolder)
511 fullpath = os.path.join(path, subfolder)
501
512
502 if os.path.exists(fullpath):
513 if os.path.exists(fullpath):
503 filesList = os.listdir(fullpath)
514 filesList = os.listdir(fullpath)
504 filesList = [k for k in filesList if k.startswith(self.optchar)]
515 filesList = [k for k in filesList if k.startswith(self.optchar)]
505 if len( filesList ) > 0:
516 if len( filesList ) > 0:
506 filesList = sorted(filesList, key=str.lower)
517 filesList = sorted(filesList, key=str.lower)
507 filen = filesList[-1]
518 filen = filesList[-1]
508 # el filename debera tener el siguiente formato
519 # el filename debera tener el siguiente formato
509 # 0 1234 567 89A BCDE (hex)
520 # 0 1234 567 89A BCDE (hex)
510 # x YYYY DDD SSS .ext
521 # x YYYY DDD SSS .ext
511 if isNumber(filen[8:11]):
522 if isNumber(filen[8:11]):
512 setFile = int(filen[8:11]) #inicializo mi contador de seteo al seteo del ultimo file
523 setFile = int(filen[8:11]) #inicializo mi contador de seteo al seteo del ultimo file
513 else:
524 else:
514 setFile = -1
525 setFile = -1
515 else:
526 else:
516 setFile = -1 #inicializo mi contador de seteo
527 setFile = -1 #inicializo mi contador de seteo
517 else:
528 else:
518 os.makedirs(fullpath)
529 os.makedirs(fullpath)
519 setFile = -1 #inicializo mi contador de seteo
530 setFile = -1 #inicializo mi contador de seteo
520
531
521 if self.setType is None:
532 if self.setType is None:
522 setFile += 1
533 setFile += 1
523 file = '%s%4.4d%3.3d%03d%s' % (self.optchar,
534 file = '%s%4.4d%3.3d%03d%s' % (self.optchar,
524 dt.year,
535 dt.year,
525 int(dt.strftime('%j')),
536 int(dt.strftime('%j')),
526 setFile,
537 setFile,
527 ext )
538 ext )
528 elif self.setType == "weather":
539 elif self.setType == "weather":
529
540
530 #SOPHY_20200505_140215_E10.0_Z.h5
541 #SOPHY_20200505_140215_E10.0_Z.h5
531 #SOPHY_20200505_140215_A40.0_Z.h5
542 #SOPHY_20200505_140215_A40.0_Z.h5
532 if self.dataOut.flagMode == 1: #'AZI' #PPI
543 if self.dataOut.flagMode == 1: #'AZI' #PPI
533 ang_type = 'E'
544 ang_type = 'E'
534 len_aux = int(self.dataOut.data_ele.shape[0]/4)
545 len_aux = int(self.dataOut.data_ele.shape[0]/4)
535 mean = numpy.mean(self.dataOut.data_ele[len_aux:-len_aux])
546 mean = numpy.mean(self.dataOut.data_ele[len_aux:-len_aux])
536 ang_ = round(mean,1)
547 ang_ = round(mean,1)
537 elif self.dataOut.flagMode == 0: #'ELE' #RHI
548 elif self.dataOut.flagMode == 0: #'ELE' #RHI
538 ang_type = 'A'
549 ang_type = 'A'
539 len_aux = int(self.dataOut.data_azi.shape[0]/4)
550 len_aux = int(self.dataOut.data_azi.shape[0]/4)
540 mean = numpy.mean(self.dataOut.data_azi[len_aux:-len_aux])
551 mean = numpy.mean(self.dataOut.data_azi[len_aux:-len_aux])
541 ang_ = round(mean,1)
552 ang_ = round(mean,1)
542
553
543 file = '%s_%2.2d%2.2d%2.2d_%2.2d%2.2d%2.2d_%s%2.1f_%s%s' % (
554 file = '%s_%2.2d%2.2d%2.2d_%2.2d%2.2d%2.2d_%s%2.1f_%s%s' % (
544 'SOPHY',
555 'SOPHY',
545 dt.year,
556 dt.year,
546 dt.month,
557 dt.month,
547 dt.day,
558 dt.day,
548 dt.hour,
559 dt.hour,
549 dt.minute,
560 dt.minute,
550 dt.second,
561 dt.second,
551 ang_type,
562 ang_type,
552 ang_,
563 ang_,
553 self.weather_var,
564 self.weather_var,
554 ext )
565 ext )
555
566
556 else:
567 else:
557 setFile = dt.hour*60+dt.minute
568 setFile = dt.hour*60+dt.minute
558 file = '%s%4.4d%3.3d%04d%s' % (self.optchar,
569 file = '%s%4.4d%3.3d%04d%s' % (self.optchar,
559 dt.year,
570 dt.year,
560 int(dt.strftime('%j')),
571 int(dt.strftime('%j')),
561 setFile,
572 setFile,
562 ext )
573 ext )
563
574
564 self.filename = os.path.join( path, subfolder, file )
575 self.filename = os.path.join( path, subfolder, file )
565
576
566 self.fp = h5py.File(self.filename, 'w')
577 self.fp = h5py.File(self.filename, 'w')
567 #write metadata
578 #write metadata
568 self.writeMetadata(self.fp)
579 self.writeMetadata(self.fp)
569 #Write data
580 #Write data
570 self.writeData(self.fp)
581 self.writeData(self.fp)
571
582
572 def getLabel(self, name, x=None):
583 def getLabel(self, name, x=None):
573
584
574 if x is None:
585 if x is None:
575 if 'Data' in self.description:
586 if 'Data' in self.description:
576 data = self.description['Data']
587 data = self.description['Data']
577 if 'Metadata' in self.description:
588 if 'Metadata' in self.description:
578 data.update(self.description['Metadata'])
589 data.update(self.description['Metadata'])
579 else:
590 else:
580 data = self.description
591 data = self.description
581 if name in data:
592 if name in data:
582 if isinstance(data[name], str):
593 if isinstance(data[name], str):
583 return data[name]
594 return data[name]
584 elif isinstance(data[name], list):
595 elif isinstance(data[name], list):
585 return None
596 return None
586 elif isinstance(data[name], dict):
597 elif isinstance(data[name], dict):
587 for key, value in data[name].items():
598 for key, value in data[name].items():
588 return key
599 return key
589 return name
600 return name
590 else:
601 else:
591 if 'Data' in self.description:
602 if 'Data' in self.description:
592 data = self.description['Data']
603 data = self.description['Data']
593 if 'Metadata' in self.description:
604 if 'Metadata' in self.description:
594 data.update(self.description['Metadata'])
605 data.update(self.description['Metadata'])
595 else:
606 else:
596 data = self.description
607 data = self.description
597 if name in data:
608 if name in data:
598 if isinstance(data[name], list):
609 if isinstance(data[name], list):
599 return data[name][x]
610 return data[name][x]
600 elif isinstance(data[name], dict):
611 elif isinstance(data[name], dict):
601 for key, value in data[name].items():
612 for key, value in data[name].items():
602 return value[x]
613 return value[x]
603 if 'cspc' in name:
614 if 'cspc' in name:
604 return 'pair{:02d}'.format(x)
615 return 'pair{:02d}'.format(x)
605 else:
616 else:
606 return 'channel{:02d}'.format(x)
617 return 'channel{:02d}'.format(x)
607
618
608 def writeMetadata(self, fp):
619 def writeMetadata(self, fp):
609
620
610 if self.description:
621 if self.description:
611 if 'Metadata' in self.description:
622 if 'Metadata' in self.description:
612 grp = fp.create_group('Metadata')
623 grp = fp.create_group('Metadata')
613 else:
624 else:
614 grp = fp
625 grp = fp
615 else:
626 else:
616 grp = fp.create_group('Metadata')
627 grp = fp.create_group('Metadata')
617
628
618 for i in range(len(self.metadataList)):
629 for i in range(len(self.metadataList)):
619 if not hasattr(self.dataOut, self.metadataList[i]):
630 if not hasattr(self.dataOut, self.metadataList[i]):
620 log.warning('Metadata: `{}` not found'.format(self.metadataList[i]), self.name)
631 log.warning('Metadata: `{}` not found'.format(self.metadataList[i]), self.name)
621 continue
632 continue
622 value = getattr(self.dataOut, self.metadataList[i])
633 value = getattr(self.dataOut, self.metadataList[i])
623 if isinstance(value, bool):
634 if isinstance(value, bool):
624 if value is True:
635 if value is True:
625 value = 1
636 value = 1
626 else:
637 else:
627 value = 0
638 value = 0
628 grp.create_dataset(self.getLabel(self.metadataList[i]), data=value)
639 grp.create_dataset(self.getLabel(self.metadataList[i]), data=value)
629 return
640 return
630
641
631 def writeData(self, fp):
642 def writeData(self, fp):
632
643
633 if self.description:
644 if self.description:
634 if 'Data' in self.description:
645 if 'Data' in self.description:
635 grp = fp.create_group('Data')
646 grp = fp.create_group('Data')
636 else:
647 else:
637 grp = fp
648 grp = fp
638 else:
649 else:
639 grp = fp.create_group('Data')
650 grp = fp.create_group('Data')
640
651
641 dtsets = []
652 dtsets = []
642 data = []
653 data = []
643
654
644 for dsInfo in self.dsList:
655 for dsInfo in self.dsList:
645
656
646 if dsInfo['nDim'] == 0:
657 if dsInfo['nDim'] == 0:
647 ds = grp.create_dataset(
658 ds = grp.create_dataset(
648 self.getLabel(dsInfo['variable']),
659 self.getLabel(dsInfo['variable']),
649 (self.blocksPerFile, ),
660 (self.blocksPerFile, ),
650 chunks=True,
661 chunks=True,
651 dtype=numpy.float64)
662 dtype=numpy.float64)
652 dtsets.append(ds)
663 dtsets.append(ds)
653 data.append((dsInfo['variable'], -1))
664 data.append((dsInfo['variable'], -1))
654 else:
665 else:
655 label = self.getLabel(dsInfo['variable'])
666 label = self.getLabel(dsInfo['variable'])
656 if label is not None:
667 if label is not None:
657 sgrp = grp.create_group(label)
668 sgrp = grp.create_group(label)
658 else:
669 else:
659 sgrp = grp
670 sgrp = grp
660 if self.blocksPerFile == 1:
671 if self.blocksPerFile == 1:
661 shape = dsInfo['shape'][1:]
672 shape = dsInfo['shape'][1:]
662 else:
673 else:
663 shape = (self.blocksPerFile, ) + dsInfo['shape'][1:]
674 shape = (self.blocksPerFile, ) + dsInfo['shape'][1:]
664 for i in range(dsInfo['dsNumber']):
675 for i in range(dsInfo['dsNumber']):
665 ds = sgrp.create_dataset(
676 ds = sgrp.create_dataset(
666 self.getLabel(dsInfo['variable'], i),
677 self.getLabel(dsInfo['variable'], i),
667 shape,
678 shape,
668 chunks=True,
679 chunks=True,
669 dtype=dsInfo['dtype'],
680 dtype=dsInfo['dtype'],
670 compression='gzip',
681 compression='gzip',
671 )
682 )
672 dtsets.append(ds)
683 dtsets.append(ds)
673 data.append((dsInfo['variable'], i))
684 data.append((dsInfo['variable'], i))
674 fp.flush()
685 fp.flush()
675
686
676 log.log('Creating file: {}'.format(fp.filename), self.name)
687 log.log('Creating file: {}'.format(fp.filename), self.name)
677
688
678 self.ds = dtsets
689 self.ds = dtsets
679 self.data = data
690 self.data = data
680 self.firsttime = True
691 self.firsttime = True
681 self.blockIndex = 0
692 self.blockIndex = 0
682 return
693 return
683
694
684 def putData(self):
695 def putData(self):
685
696
686 if (self.blockIndex == self.blocksPerFile) or self.timeFlag():
697 if (self.blockIndex == self.blocksPerFile) or self.timeFlag():
687 self.closeFile()
698 self.closeFile()
688 self.setNextFile()
699 self.setNextFile()
689
700
690 for i, ds in enumerate(self.ds):
701 for i, ds in enumerate(self.ds):
691 attr, ch = self.data[i]
702 attr, ch = self.data[i]
692 if ch == -1:
703 if ch == -1:
693 ds[self.blockIndex] = getattr(self.dataOut, attr)
704 ds[self.blockIndex] = getattr(self.dataOut, attr)
694 else:
705 else:
695 if self.blocksPerFile == 1:
706 if self.blocksPerFile == 1:
696 mask = self.dataOut.data_param[:,3,:][ch] < self.mask
707 mask = self.dataOut.data_param[:,3,:][ch] < self.mask
697 tmp = getattr(self.dataOut, attr)[:,self.weather_vars[self.weather_var],:][ch]
708 tmp = getattr(self.dataOut, attr)[:,self.weather_vars[self.weather_var],:][ch]
698 if self.mask:
709 if self.mask:
699 tmp[mask] = numpy.nan
710 tmp[mask] = numpy.nan
700 ds[:] = tmp
711 ds[:] = tmp
701 else:
712 else:
702 ds[self.blockIndex] = getattr(self.dataOut, attr)[ch]
713 ds[self.blockIndex] = getattr(self.dataOut, attr)[ch]
703
714
704 self.fp.flush()
715 self.fp.flush()
705 self.blockIndex += 1
716 self.blockIndex += 1
706 log.log('Block No. {}/{}'.format(self.blockIndex, self.blocksPerFile), self.name)
717 log.log('Block No. {}/{}'.format(self.blockIndex, self.blocksPerFile), self.name)
707
718
708 return
719 return
709
720
710 def closeFile(self):
721 def closeFile(self):
711
722
712 if self.blockIndex != self.blocksPerFile:
723 if self.blockIndex != self.blocksPerFile:
713 for ds in self.ds:
724 for ds in self.ds:
714 ds.resize(self.blockIndex, axis=0)
725 ds.resize(self.blockIndex, axis=0)
715
726
716 if self.fp:
727 if self.fp:
717 self.fp.flush()
728 self.fp.flush()
718 self.fp.close()
729 self.fp.close()
719
730
720 def close(self):
731 def close(self):
721
732
722 self.closeFile()
733 self.closeFile()
General Comments 0
You need to be logged in to leave comments. Login now