##// END OF EJS Templates
Merge branch 'v3.0-WR' of http://intranet.igp.gob.pe:8082/schain into v3.0-WR
Juan C. Espinoza -
r1539:03a9c71b14ee merge
parent child
Show More
@@ -0,0 +1,418
1 # SOPHY PROC script
2 import os, sys, json, argparse
3 import datetime
4 import time
5
6 PATH = '/DATA_RM/DATA'
7 PATH = '/media/jespinoza/Elements'
8 PATH = '/media/jespinoza/data/SOPHY'
9 PATH = '/home/soporte/Documents/EVENTO/'
10
11 # NOTA: EL GRABADO ESTA EN PARAM
12 PARAM = {
13 'S': {'zmin': -45, 'zmax': -25, 'colormap': 'jet', 'label': 'Power', 'wrname': 'power','cb_label': 'dBm', 'ch':0},
14 'SNR': {'zmin': -40, 'zmax': -20, 'colormap': 'jet', 'label': 'SNR', 'wrname': 'snr','cb_label': 'dB', 'ch':0},
15 'V': {'zmin': -12, 'zmax': 12, 'colormap': 'sophy_v', 'label': 'Velocity', 'wrname': 'velocity', 'cb_label': 'm/s', 'ch':0},
16 'R': {'zmin': 0, 'zmax': 1, 'colormap': 'jet', 'label': 'RhoHV', 'wrname':'rhoHV', 'cb_label': '', 'ch':0},
17 'P': {'zmin': -180,'zmax': 180,'colormap': 'RdBu_r', 'label': 'PhiDP', 'wrname':'phiDP' , 'cb_label': 'degrees', 'ch':0},
18 'D': {'zmin': -30, 'zmax': 80, 'colormap': 'sophy_r','label': 'ZDR','wrname':'differential_reflectivity' , 'cb_label': 'dBz','ch':0},
19 'Z': {'zmin': -30, 'zmax': 80, 'colormap': 'sophy_r','label': 'Reflectivity ', 'wrname':'reflectivity', 'cb_label': 'dBz','ch':0},
20 'W': {'zmin': 0, 'zmax': 15, 'colormap': 'sophy_w','label': 'Spectral Width', 'wrname':'spectral_width', 'cb_label': 'm/s', 'ch':0}
21 }
22
23 META = ['heightList', 'data_azi', 'data_ele', 'mode_op', 'latitude', 'longitude', 'altitude', 'heading', 'radar_name',
24 'institution', 'contact', 'h0', 'range_unit', 'prf', 'prf_unit', 'variable', 'variable_unit', 'n_pulses',
25 'pulse1_range', 'pulse1_width', 'pulse2_width', 'pulse1_repetitions', 'pulse2_repetitions', 'pulse_width_unit',
26 'snr_threshold','dataPP_NOISE']
27
28
29 def max_index(r, sample_rate, ipp):
30
31 return int(sample_rate*ipp*1e6 * r / 60) + int(sample_rate*ipp*1e6 * 1.2 / 60)
32
33 def main(args):
34
35 experiment = args.experiment
36 fp = open(os.path.join(PATH, experiment, 'experiment.conf'))
37 conf = json.loads(fp.read())
38
39 ipp_km = conf['usrp_tx']['ipp']
40 ipp = ipp_km * 2 /300000
41 sample_rate = conf['usrp_rx']['sample_rate']
42 axis = ['0' if x=='elevation' else '1' for x in conf['pedestal']['axis']] # AZIMUTH 1 ELEVACION 0
43 speed_axis = conf['pedestal']['speed']
44 steps = conf['pedestal']['table']
45 time_offset = args.time_offset
46 parameters = args.parameters
47 start_date = experiment.split('@')[1].split('T')[0].replace('-', '/')
48 end_date = start_date
49 if args.start_time:
50 start_time = args.start_time
51 else:
52 start_time = experiment.split('@')[1].split('T')[1].replace('-', ':')
53 end_time = '23:59:59'
54 N = int(1/(speed_axis[0]*ipp)) # 1 GRADO DE RESOLUCION
55 path = os.path.join(PATH, experiment, 'rawdata')
56 path_ped = os.path.join(PATH, experiment, 'position')
57 if args.label:
58 label = '-{}'.format(args.label)
59 else:
60 label = ''
61 path_plots = os.path.join(PATH, experiment, 'plots{}'.format(label))
62 path_save = os.path.join(PATH, experiment, 'param{}'.format(label))
63 RMIX = 1.62
64 H0 = -1.68
65 MASK = 0.3
66
67 from schainpy.controller import Project
68
69 project = Project()
70 project.setup(id='1', name='Sophy', description='sophy proc')
71
72 reader = project.addReadUnit(datatype='DigitalRFReader',
73 path=path,
74 startDate=start_date,
75 endDate=end_date,
76 startTime=start_time,
77 endTime=end_time,
78 delay=30,
79 online=args.online,
80 walk=1,
81 ippKm = ipp_km,
82 getByBlock = 1,
83 nProfileBlocks = N,
84 )
85
86 if not conf['usrp_tx']['enable_2']: # One Pulse
87 n_pulses = 1
88 pulse_1_width = conf['usrp_tx']['pulse_1']
89 pulse_1_repetitions = conf['usrp_tx']['repetitions_1']
90 pulse_2_width = 0
91 pulse_2_repetitions = 0
92
93 voltage = project.addProcUnit(datatype='VoltageProc', inputId=reader.getId())
94
95 if conf['usrp_tx']['code_type_1'] != 'None':
96 codes = [ c.strip() for c in conf['usrp_tx']['code_1'].split(',')]
97 code = []
98 for c in codes:
99 code.append([int(x) for x in c])
100 op = voltage.addOperation(name='Decoder', optype='other')
101 op.addParameter(name='code', value=code)
102 op.addParameter(name='nCode', value=len(code), format='int')
103 op.addParameter(name='nBaud', value=len(code[0]), format='int')
104
105 op = voltage.addOperation(name='CohInt', optype='other') #Minimo integrar 2 perfiles por ser codigo complementario
106 op.addParameter(name='n', value=len(code), format='int')
107 ncode = len(code)
108 else:
109 ncode = 1
110 code = ['0']
111
112 op = voltage.addOperation(name='setH0')
113 op.addParameter(name='h0', value=H0)
114
115 if args.range > 0:
116 op = voltage.addOperation(name='selectHeights')
117 op.addParameter(name='minIndex', value='0', format='int')
118 op.addParameter(name='maxIndex', value=max_index(args.range, sample_rate, ipp), format='int')
119
120 op = voltage.addOperation(name='PulsePair_vRF', optype='other')
121 op.addParameter(name='n', value=int(N)/ncode, format='int')
122
123 proc = project.addProcUnit(datatype='ParametersProc', inputId=voltage.getId())
124
125 opObj10 = proc.addOperation(name="WeatherRadar")
126 opObj10.addParameter(name='tauW',value=(1e-6/sample_rate)*len(code[0]))
127 opObj10.addParameter(name='Pt',value=((1e-6/sample_rate)*len(code[0])/ipp)*200)
128
129 op = proc.addOperation(name='PedestalInformation')
130 op.addParameter(name='path', value=path_ped, format='str')
131 op.addParameter(name='interval', value='0.04')
132 op.addParameter(name='time_offset', value=time_offset)
133 op.addParameter(name='mode', value='PPI')
134
135 for param in parameters:
136 op = proc.addOperation(name='Block360')
137 op.addParameter(name='runNextOp', value=True)
138
139 op= proc.addOperation(name='WeatherParamsPlot')
140 if args.save: op.addParameter(name='save', value=path_plots, format='str')
141 op.addParameter(name='save_period', value=-1)
142 op.addParameter(name='show', value=args.show)
143 op.addParameter(name='channels', value='1,')
144 op.addParameter(name='zmin', value=PARAM[param]['zmin'])
145 op.addParameter(name='zmax', value=PARAM[param]['zmax'])
146 op.addParameter(name='attr_data', value=param, format='str')
147 op.addParameter(name='labels', value=[PARAM[param]['label']])
148 op.addParameter(name='save_code', value=param)
149 op.addParameter(name='cb_label', value=PARAM[param]['cb_label'])
150 op.addParameter(name='colormap', value=PARAM[param]['colormap'])
151 op.addParameter(name='bgcolor', value='black')
152 if MASK: op.addParameter(name='mask', value=MASK, format='float')
153 if args.server:
154 op.addParameter(name='server', value='0.0.0.0:4444')
155 op.addParameter(name='exp_code', value='400')
156
157 desc = {
158 'Data': {
159 param: PARAM[param]['wrname'],
160 'utctime': 'time'
161 },
162 'Metadata': {
163 'heightList': 'range',
164 'data_azi': 'azimuth',
165 'data_ele': 'elevation',
166 'mode_op': 'scan_type',
167 'h0': 'range_correction',
168 }
169 }
170
171 if args.save:
172 writer = merge.addOperation(name='HDFWriter')
173 writer.addParameter(name='path', value=path_save, format='str')
174 writer.addParameter(name='Reset', value=True)
175 writer.addParameter(name='setType', value='weather')
176 writer.addParameter(name='description', value=json.dumps(desc))
177 writer.addParameter(name='blocksPerFile', value='1',format='int')
178 writer.addParameter(name='metadataList', value=','.join(META))
179 writer.addParameter(name='dataList', value='data_param,utctime')
180 writer.addParameter(name='weather_var', value=param)
181 writer.addParameter(name='mask', value=MASK, format='float')
182 # meta
183 writer.addParameter(name='latitude', value='-12.040436')
184 writer.addParameter(name='longitude', value='-75.295893')
185 writer.addParameter(name='altitude', value='3379.2147')
186 writer.addParameter(name='heading', value='0')
187 writer.addParameter(name='radar_name', value='SOPHy')
188 writer.addParameter(name='institution', value='IGP')
189 writer.addParameter(name='contact', value='dscipion@igp.gob.pe')
190 writer.addParameter(name='created_by', value='Signal Chain (https://pypi.org/project/schainpy/)')
191 writer.addParameter(name='range_unit', value='km')
192 writer.addParameter(name='prf', value=1/ipp)
193 writer.addParameter(name='prf_unit', value='hertz')
194 writer.addParameter(name='variable', value=PARAM[param]['label'])
195 writer.addParameter(name='variable_unit', value=PARAM[param]['cb_label'])
196 writer.addParameter(name='n_pulses', value=n_pulses)
197 writer.addParameter(name='pulse1_range', value=RMIX)
198 writer.addParameter(name='pulse1_width', value=pulse_1_width)
199 writer.addParameter(name='pulse2_width', value=pulse_2_width)
200 writer.addParameter(name='pulse1_repetitions', value=pulse_1_repetitions)
201 writer.addParameter(name='pulse2_repetitions', value=pulse_2_repetitions)
202 writer.addParameter(name='pulse_width_unit', value='microseconds')
203 writer.addParameter(name='snr_threshold', value=MASK)
204
205
206 else: #Two pulses
207 n_pulses = 1
208 pulse_1_width = conf['usrp_tx']['pulse_1']
209 pulse_1_repetitions = conf['usrp_tx']['repetitions_1']
210 pulse_2_width = conf['usrp_tx']['pulse_2']
211 pulse_2_repetitions = conf['usrp_tx']['repetitions_2']
212
213 voltage1 = project.addProcUnit(datatype='VoltageProc', inputId=reader.getId())
214
215 op = voltage1.addOperation(name='ProfileSelector')
216 op.addParameter(name='profileRangeList', value='0,{}'.format(conf['usrp_tx']['repetitions_1']-1))
217
218 if conf['usrp_tx']['code_type_1'] != 'None':
219 codes = [ c.strip() for c in conf['usrp_tx']['code_1'].split(',')]
220 code = []
221 for c in codes:
222 code.append([int(x) for x in c])
223 op = voltage1.addOperation(name='Decoder', optype='other')
224 op.addParameter(name='code', value=code)
225 op.addParameter(name='nCode', value=len(code), format='int')
226 op.addParameter(name='nBaud', value=len(code[0]), format='int')
227 else:
228 code = ['0']
229
230 op = voltage1.addOperation(name='CohInt', optype='other') #Minimo integrar 2 perfiles por ser codigo complementario
231 op.addParameter(name='n', value=2, format='int')
232
233 if args.range > 0:
234 op = voltage1.addOperation(name='selectHeights')
235 op.addParameter(name='minIndex', value='0', format='int')
236 op.addParameter(name='maxIndex', value=max_index(RMIX, sample_rate, ipp), format='int')
237
238 op = voltage1.addOperation(name='setH0')
239 op.addParameter(name='h0', value=H0, format='float')
240
241 op = voltage1.addOperation(name='PulsePair_vRF', optype='other')
242 op.addParameter(name='n', value=int(conf['usrp_tx']['repetitions_1'])/2, format='int')
243
244 proc1 = project.addProcUnit(datatype='ParametersProc', inputId=voltage1.getId())
245 proc1.addParameter(name='runNextUnit', value=True)
246
247 opObj10 = proc1.addOperation(name="WeatherRadar")
248 opObj10.addParameter(name='tauW',value=(1e-6/sample_rate)*len(code[0]))
249 opObj10.addParameter(name='Pt',value=((1e-6/sample_rate)*len(code[0])/ipp)*200)
250
251 op = proc1.addOperation(name='PedestalInformation')
252 op.addParameter(name='path', value=path_ped, format='str')
253 op.addParameter(name='interval', value='0.04')
254 op.addParameter(name='time_offset', value=time_offset)
255 op.addParameter(name='mode', value='PPI')
256
257 op = proc1.addOperation(name='Block360')
258 op.addParameter(name='attr_data', value='data_param')
259 op.addParameter(name='runNextOp', value=True)
260
261
262 voltage2 = project.addProcUnit(datatype='VoltageProc', inputId=reader.getId())
263
264 op = voltage2.addOperation(name='ProfileSelector')
265 op.addParameter(name='profileRangeList', value='{},{}'.format(conf['usrp_tx']['repetitions_1'], conf['usrp_tx']['repetitions_1']+conf['usrp_tx']['repetitions_2']-1))
266
267 if conf['usrp_tx']['code_type_2']:
268 codes = [ c.strip() for c in conf['usrp_tx']['code_2'].split(',')]
269 code = []
270 for c in codes:
271 code.append([int(x) for x in c])
272 op = voltage2.addOperation(name='Decoder', optype='other')
273 op.addParameter(name='code', value=code)
274 op.addParameter(name='nCode', value=len(code), format='int')
275 op.addParameter(name='nBaud', value=len(code[0]), format='int')
276
277 op = voltage2.addOperation(name='CohInt', optype='other') #Minimo integrar 2 perfiles por ser codigo complementario
278 op.addParameter(name='n', value=len(code), format='int')
279 ncode = len(code)
280 else:
281 ncode = 1
282
283 if args.range > 0:
284 op = voltage2.addOperation(name='selectHeights')
285 op.addParameter(name='minIndex', value=max_index(RMIX, sample_rate, ipp), format='int')
286 op.addParameter(name='maxIndex', value=max_index(args.range, sample_rate, ipp), format='int')
287
288 op = voltage2.addOperation(name='setH0')
289 op.addParameter(name='h0', value=H0, format='float')
290
291 op = voltage2.addOperation(name='PulsePair_vRF', optype='other')
292 op.addParameter(name='n', value=int(conf['usrp_tx']['repetitions_2'])/ncode, format='int')
293
294 proc2 = project.addProcUnit(datatype='ParametersProc', inputId=voltage2.getId())
295 proc2.addParameter(name='runNextUnit', value=True)
296
297 opObj10 = proc2.addOperation(name="WeatherRadar")
298 opObj10.addParameter(name='tauW',value=(1e-6/sample_rate)*len(code[0]))
299 opObj10.addParameter(name='Pt',value=((1e-6/sample_rate)*len(code[0])/ipp)*200)
300
301 op = proc2.addOperation(name='PedestalInformation')
302 op.addParameter(name='path', value=path_ped, format='str')
303 op.addParameter(name='interval', value='0.04')
304 op.addParameter(name='time_offset', value=time_offset)
305 op.addParameter(name='mode', value='PPI')
306
307 op = proc2.addOperation(name='Block360')
308 op.addParameter(name='attr_data', value='data_param')
309 op.addParameter(name='runNextOp', value=True)
310
311 merge = project.addProcUnit(datatype='MergeProc', inputId=[proc1.getId(), proc2.getId()])
312 merge.addParameter(name='attr_data', value='data_param')
313 merge.addParameter(name='mode', value='7') #RM
314
315
316 for param in parameters:
317
318 if args.plot:
319 op= merge.addOperation(name='WeatherParamsPlot')
320 if args.save:
321 op.addParameter(name='save', value=path_plots, format='str')
322 op.addParameter(name='save_period', value=-1)
323 op.addParameter(name='show', value=args.show)
324 op.addParameter(name='channels', value='0,')
325 op.addParameter(name='zmin', value=PARAM[param]['zmin'], format='int')
326 op.addParameter(name='zmax', value=PARAM[param]['zmax'], format='int')
327 op.addParameter(name='attr_data', value=param, format='str')
328 op.addParameter(name='labels', value=[PARAM[param]['label']])
329 op.addParameter(name='save_code', value=param)
330 op.addParameter(name='cb_label', value=PARAM[param]['cb_label'])
331 op.addParameter(name='colormap', value=PARAM[param]['colormap'])
332 op.addParameter(name='bgcolor', value='black')
333 if MASK: op.addParameter(name='mask', value=MASK, format='float')
334 if args.server:
335 op.addParameter(name='server', value='0.0.0.0:4444')
336 op.addParameter(name='exp_code', value='400')
337
338 desc = {
339 'Data': {
340 'data_param': {PARAM[param]['wrname']: ['H', 'V']},
341 'utctime': 'time'
342 },
343 'Metadata': {
344 'heightList': 'range',
345 'data_azi': 'azimuth',
346 'data_ele': 'elevation',
347 'mode_op': 'scan_type',
348 'h0': 'range_correction',
349 'dataPP_NOISE': 'noise',
350 }
351 }
352
353 if args.save:
354 writer = merge.addOperation(name='HDFWriter')
355 writer.addParameter(name='path', value=path_save, format='str')
356 writer.addParameter(name='Reset', value=True)
357 writer.addParameter(name='setType', value='weather')
358 writer.addParameter(name='description', value=json.dumps(desc))
359 writer.addParameter(name='blocksPerFile', value='1',format='int')
360 writer.addParameter(name='metadataList', value=','.join(META))
361 writer.addParameter(name='dataList', value='data_param,utctime')
362 writer.addParameter(name='weather_var', value=param)
363 writer.addParameter(name='mask', value=MASK, format='float')
364 # meta
365 writer.addParameter(name='latitude', value='-12.040436')
366 writer.addParameter(name='longitude', value='-75.295893')
367 writer.addParameter(name='altitude', value='3379.2147')
368 writer.addParameter(name='heading', value='0')
369 writer.addParameter(name='radar_name', value='SOPHy')
370 writer.addParameter(name='institution', value='IGP')
371 writer.addParameter(name='contact', value='dscipion@igp.gob.pe')
372 writer.addParameter(name='created_by', value='Signal Chain (https://pypi.org/project/schainpy/)')
373 writer.addParameter(name='range_unit', value='km')
374 writer.addParameter(name='prf', value=1/ipp)
375 writer.addParameter(name='prf_unit', value='hertz')
376 writer.addParameter(name='variable', value=PARAM[param]['label'])
377 writer.addParameter(name='variable_unit', value=PARAM[param]['cb_label'])
378 writer.addParameter(name='n_pulses', value=n_pulses)
379 writer.addParameter(name='pulse1_range', value=RMIX)
380 writer.addParameter(name='pulse1_width', value=pulse_1_width)
381 writer.addParameter(name='pulse2_width', value=pulse_2_width)
382 writer.addParameter(name='pulse1_repetitions', value=pulse_1_repetitions)
383 writer.addParameter(name='pulse2_repetitions', value=pulse_2_repetitions)
384 writer.addParameter(name='pulse_width_unit', value='microseconds')
385 writer.addParameter(name='snr_threshold', value=MASK)
386
387
388 project.start()
389
390 if __name__ == '__main__':
391
392 parser = argparse.ArgumentParser(description='Script to process SOPHy data.')
393 parser.add_argument('experiment',
394 help='Experiment name')
395 parser.add_argument('--parameters', nargs='*', default=['S'],
396 help='Variables to process: P, Z, V')
397 parser.add_argument('--time_offset', default=0,
398 help='Fix time offset')
399 parser.add_argument('--range', default=0, type=float,
400 help='Max range to plot')
401 parser.add_argument('--save', action='store_true',
402 help='Create output files')
403 parser.add_argument('--plot', action='store_true',
404 help='Create plot files')
405 parser.add_argument('--show', action='store_true',
406 help='Show matplotlib plot.')
407 parser.add_argument('--online', action='store_true',
408 help='Set online mode.')
409 parser.add_argument('--server', action='store_true',
410 help='Send to realtime')
411 parser.add_argument('--start_time', default='',
412 help='Set start time.')
413 parser.add_argument('--label', default='',
414 help='Label for plot & param folder')
415
416 args = parser.parse_args()
417
418 main(args)
@@ -0,0 +1,117
1 import os,sys,json
2 import datetime
3 import time
4 import argparse
5
6 from schainpy.controller import Project
7 '''
8 NOTA:
9 Este script de prueba.
10 - Unidad del lectura 'HDFReader'.
11 - Unidad de procesamiento ParametersProc
12 '''
13 PATH = "/home/soporte/Documents/EVENTO/HYO_PM@2022-06-09T15-05-12/paramC0N36.0/2022-06-09T18-00-00/"
14 #PATH = "/home/soporte/Documents/EVENTO/HYO_PM@2022-06-09T15-05-12/param/2022-06-09T18-00-00/"
15
16 #PATH = "/home/soporte/Documents/EVENTO/HYO_PM@2022-06-09T15-05-12/paramC0N36.0/2022-06-09T19-00-00/"
17 #PATH = "/home/soporte/Documents/EVENTO/HYO_PM@2022-05-31T12-00-17/paramC0N36.0/2022-05-31T16-00-00/"
18 path = PATH
19 PARAM = {
20 'S': {'zmin': -45, 'zmax': -25, 'colormap': 'jet', 'label': 'Power', 'wrname': 'power','cb_label': 'dBm', 'ch':0},
21 'SNR': {'zmin': -40, 'zmax': -20, 'colormap': 'jet', 'label': 'SNR', 'wrname': 'snr','cb_label': 'dB', 'ch':0},
22 'V': {'zmin': -12, 'zmax': 12, 'colormap': 'sophy_v', 'label': 'Velocity', 'wrname': 'velocity', 'cb_label': 'm/s', 'ch':0},
23 'R': {'zmin': 0, 'zmax': 1, 'colormap': 'jet', 'label': 'RhoHV', 'wrname':'rhoHV', 'cb_label': '*', 'ch':0},
24 'P': {'zmin': -180,'zmax': 180,'colormap': 'RdBu_r', 'label': 'PhiDP', 'wrname':'phiDP' , 'cb_label': 'ΒΊ', 'ch':0},
25 'D': {'zmin': -30, 'zmax': 80, 'colormap': 'sophy_r','label': 'ZDR','wrname':'differential_reflectivity' , 'cb_label': 'dBz','ch':0},
26 'Z': {'zmin': -30, 'zmax': 80, 'colormap': 'sophy_r','label': 'Reflectivity ', 'wrname':'reflectivity', 'cb_label': 'dBz','ch':0},
27 'W': {'zmin': 0, 'zmax': 15, 'colormap': 'sophy_w','label': 'Spectral Width', 'wrname':'spectral_width', 'cb_label': 'm/s', 'ch':0}
28 }
29
30 def main(args):
31 #filefmt="******%Y%m%d*%H%M%S*******"
32 #filefmt="SOPHY_20220609_184620_E8.0_Z"
33 parameters = args.parameters
34 grado = args.grado
35 MASK = None
36
37 for param in parameters:
38 filefmt ="******%Y%m%d*%H%M%S*******"
39 filter= "_E"+str(grado)+".0_"+param
40 variable = 'Data/'+PARAM[param]['wrname']+'/H'
41 desc = {
42 'Data': {
43 'data_param': [variable],
44 'utctime' : 'Data/time'
45 },
46 'Metadata': {
47 'heightList': 'Metadata/range',
48 'data_azi' : 'Metadata/azimuth',
49 'data_ele' : 'Metadata/elevation',
50 'mode_op' : 'Metadata/scan_type',
51 'h0' : 'Metadata/range_correction',
52 }
53 }
54
55 project = Project()
56
57 project.setup(id='10',name='Test Simulator',description=desc)
58
59 readUnitConfObj = project.addReadUnit(datatype='HDFReader',
60 path=path,
61 startDate="2022/01/01", #"2020/01/01",#today,
62 endDate= "2022/12/01", #"2020/12/30",#today,
63 startTime='00:00:00',
64 endTime='23:59:59',
65 delay=0,
66 #set=0,
67 online=0,
68 walk=0,
69 filefmt=filefmt,
70 filter=filter,
71 dparam= 1,
72 description= json.dumps(desc))#1
73
74 proc1 = project.addProcUnit(datatype='ParametersProc',inputId=readUnitConfObj.getId())
75
76 if args.plot:
77 print("plotea")
78 op= proc1.addOperation(name='WeatherParamsPlot')
79 if args.save:
80 op.addParameter(name='save', value=path_plots, format='str')
81 op.addParameter(name='save_period', value=-1)
82 op.addParameter(name='show', value=args.show)
83 op.addParameter(name='channels', value='0,')
84 op.addParameter(name='zmin', value=PARAM[param]['zmin'], format='int')
85 op.addParameter(name='zmax', value=PARAM[param]['zmax'], format='int')
86 op.addParameter(name='attr_data', value=param, format='str')
87 op.addParameter(name='labels', value=[PARAM[param]['label']])
88 op.addParameter(name='save_code', value=param)
89 op.addParameter(name='cb_label', value=PARAM[param]['cb_label'])
90 op.addParameter(name='colormap', value=PARAM[param]['colormap'])
91 op.addParameter(name='bgcolor', value='black')
92 if MASK: op.addParameter(name='mask', value=MASK, format='float')
93 if args.server:
94 op.addParameter(name='server', value='0.0.0.0:4444')
95 op.addParameter(name='exp_code', value='400')
96 project.start()
97
98 if __name__ == '__main__':
99
100 parser = argparse.ArgumentParser(description='Script to process SOPHy data.')
101 parser.add_argument('--parameters', nargs='*', default=['S'],
102 help='Variables to process: P, Z, V ,W')
103 parser.add_argument('--grado', default=2,
104 help='Angle in Elev to plot')
105 parser.add_argument('--save', default=0,
106 help='Save plot')
107 parser.add_argument('--range', default=0, type=float,
108 help='Max range to plot')
109 parser.add_argument('--plot', action='store_true',
110 help='Create plot files')
111 parser.add_argument('--show', action='store_true',
112 help='Show matplotlib plot.')
113 parser.add_argument('--server', action='store_true',
114 help='Send to realtime')
115 args = parser.parse_args()
116
117 main(args)
@@ -1,685 +1,691
1 1 import os
2 2 import datetime
3 3 import warnings
4 4 import numpy
5 5 from mpl_toolkits.axisartist.grid_finder import FixedLocator, DictFormatter
6 6
7 7 from schainpy.model.graphics.jroplot_base import Plot, plt
8 8 from schainpy.model.graphics.jroplot_spectra import SpectraPlot, RTIPlot, CoherencePlot, SpectraCutPlot
9 9 from schainpy.utils import log
10 10
11 11
12 12 EARTH_RADIUS = 6.3710e3
13 13
14 14
15 15 def antenna_to_cartesian(ranges, azimuths, elevations):
16 16 """
17 17 Return Cartesian coordinates from antenna coordinates.
18 18
19 19 Parameters
20 20 ----------
21 21 ranges : array
22 22 Distances to the center of the radar gates (bins) in kilometers.
23 23 azimuths : array
24 24 Azimuth angle of the radar in degrees.
25 25 elevations : array
26 26 Elevation angle of the radar in degrees.
27 27
28 28 Returns
29 29 -------
30 30 x, y, z : array
31 31 Cartesian coordinates in meters from the radar.
32 32
33 33 Notes
34 34 -----
35 35 The calculation for Cartesian coordinate is adapted from equations
36 36 2.28(b) and 2.28(c) of Doviak and Zrnic [1]_ assuming a
37 37 standard atmosphere (4/3 Earth's radius model).
38 38
39 39 .. math::
40 40
41 41 z = \\sqrt{r^2+R^2+2*r*R*sin(\\theta_e)} - R
42 42
43 43 s = R * arcsin(\\frac{r*cos(\\theta_e)}{R+z})
44 44
45 45 x = s * sin(\\theta_a)
46 46
47 47 y = s * cos(\\theta_a)
48 48
49 49 Where r is the distance from the radar to the center of the gate,
50 50 :math:`\\theta_a` is the azimuth angle, :math:`\\theta_e` is the
51 51 elevation angle, s is the arc length, and R is the effective radius
52 52 of the earth, taken to be 4/3 the mean radius of earth (6371 km).
53 53
54 54 References
55 55 ----------
56 56 .. [1] Doviak and Zrnic, Doppler Radar and Weather Observations, Second
57 57 Edition, 1993, p. 21.
58 58
59 59 """
60 60 theta_e = numpy.deg2rad(elevations) # elevation angle in radians.
61 61 theta_a = numpy.deg2rad(azimuths) # azimuth angle in radians.
62 62 R = 6371.0 * 1000.0 * 4.0 / 3.0 # effective radius of earth in meters.
63 63 r = ranges * 1000.0 # distances to gates in meters.
64 64
65 65 z = (r ** 2 + R ** 2 + 2.0 * r * R * numpy.sin(theta_e)) ** 0.5 - R
66 66 s = R * numpy.arcsin(r * numpy.cos(theta_e) / (R + z)) # arc length in m.
67 67 x = s * numpy.sin(theta_a)
68 68 y = s * numpy.cos(theta_a)
69 69 return x, y, z
70 70
71 71 def cartesian_to_geographic_aeqd(x, y, lon_0, lat_0, R=EARTH_RADIUS):
72 72 """
73 73 Azimuthal equidistant Cartesian to geographic coordinate transform.
74 74
75 75 Transform a set of Cartesian/Cartographic coordinates (x, y) to
76 76 geographic coordinate system (lat, lon) using a azimuthal equidistant
77 77 map projection [1]_.
78 78
79 79 .. math::
80 80
81 81 lat = \\arcsin(\\cos(c) * \\sin(lat_0) +
82 82 (y * \\sin(c) * \\cos(lat_0) / \\rho))
83 83
84 84 lon = lon_0 + \\arctan2(
85 85 x * \\sin(c),
86 86 \\rho * \\cos(lat_0) * \\cos(c) - y * \\sin(lat_0) * \\sin(c))
87 87
88 88 \\rho = \\sqrt(x^2 + y^2)
89 89
90 90 c = \\rho / R
91 91
92 92 Where x, y are the Cartesian position from the center of projection;
93 93 lat, lon the corresponding latitude and longitude; lat_0, lon_0 are the
94 94 latitude and longitude of the center of the projection; R is the radius of
95 95 the earth (defaults to ~6371 km). lon is adjusted to be between -180 and
96 96 180.
97 97
98 98 Parameters
99 99 ----------
100 100 x, y : array-like
101 101 Cartesian coordinates in the same units as R, typically meters.
102 102 lon_0, lat_0 : float
103 103 Longitude and latitude, in degrees, of the center of the projection.
104 104 R : float, optional
105 105 Earth radius in the same units as x and y. The default value is in
106 106 units of meters.
107 107
108 108 Returns
109 109 -------
110 110 lon, lat : array
111 111 Longitude and latitude of Cartesian coordinates in degrees.
112 112
113 113 References
114 114 ----------
115 115 .. [1] Snyder, J. P. Map Projections--A Working Manual. U. S. Geological
116 116 Survey Professional Paper 1395, 1987, pp. 191-202.
117 117
118 118 """
119 119 x = numpy.atleast_1d(numpy.asarray(x))
120 120 y = numpy.atleast_1d(numpy.asarray(y))
121 121
122 122 lat_0_rad = numpy.deg2rad(lat_0)
123 123 lon_0_rad = numpy.deg2rad(lon_0)
124 124
125 125 rho = numpy.sqrt(x*x + y*y)
126 126 c = rho / R
127 127
128 128 with warnings.catch_warnings():
129 129 # division by zero may occur here but is properly addressed below so
130 130 # the warnings can be ignored
131 131 warnings.simplefilter("ignore", RuntimeWarning)
132 132 lat_rad = numpy.arcsin(numpy.cos(c) * numpy.sin(lat_0_rad) +
133 133 y * numpy.sin(c) * numpy.cos(lat_0_rad) / rho)
134 134 lat_deg = numpy.rad2deg(lat_rad)
135 135 # fix cases where the distance from the center of the projection is zero
136 136 lat_deg[rho == 0] = lat_0
137 137
138 138 x1 = x * numpy.sin(c)
139 139 x2 = rho*numpy.cos(lat_0_rad)*numpy.cos(c) - y*numpy.sin(lat_0_rad)*numpy.sin(c)
140 140 lon_rad = lon_0_rad + numpy.arctan2(x1, x2)
141 141 lon_deg = numpy.rad2deg(lon_rad)
142 142 # Longitudes should be from -180 to 180 degrees
143 143 lon_deg[lon_deg > 180] -= 360.
144 144 lon_deg[lon_deg < -180] += 360.
145 145
146 146 return lon_deg, lat_deg
147 147
148 148 def antenna_to_geographic(ranges, azimuths, elevations, site):
149 149
150 150 x, y, z = antenna_to_cartesian(numpy.array(ranges), numpy.array(azimuths), numpy.array(elevations))
151 151 lon, lat = cartesian_to_geographic_aeqd(x, y, site[0], site[1], R=6370997.)
152 152
153 153 return lon, lat
154 154
155 155 def ll2xy(lat1, lon1, lat2, lon2):
156 156
157 157 p = 0.017453292519943295
158 158 a = 0.5 - numpy.cos((lat2 - lat1) * p)/2 + numpy.cos(lat1 * p) * \
159 159 numpy.cos(lat2 * p) * (1 - numpy.cos((lon2 - lon1) * p)) / 2
160 160 r = 12742 * numpy.arcsin(numpy.sqrt(a))
161 161 theta = numpy.arctan2(numpy.sin((lon2-lon1)*p)*numpy.cos(lat2*p), numpy.cos(lat1*p)
162 162 * numpy.sin(lat2*p)-numpy.sin(lat1*p)*numpy.cos(lat2*p)*numpy.cos((lon2-lon1)*p))
163 163 theta = -theta + numpy.pi/2
164 164 return r*numpy.cos(theta), r*numpy.sin(theta)
165 165
166 166
167 167 def km2deg(km):
168 168 '''
169 169 Convert distance in km to degrees
170 170 '''
171 171
172 172 return numpy.rad2deg(km/EARTH_RADIUS)
173 173
174 174
175 175
176 176 class SpectralMomentsPlot(SpectraPlot):
177 177 '''
178 178 Plot for Spectral Moments
179 179 '''
180 180 CODE = 'spc_moments'
181 181 # colormap = 'jet'
182 182 # plot_type = 'pcolor'
183 183
184 184 class DobleGaussianPlot(SpectraPlot):
185 185 '''
186 186 Plot for Double Gaussian Plot
187 187 '''
188 188 CODE = 'gaussian_fit'
189 189 # colormap = 'jet'
190 190 # plot_type = 'pcolor'
191 191
192 192 class DoubleGaussianSpectraCutPlot(SpectraCutPlot):
193 193 '''
194 194 Plot SpectraCut with Double Gaussian Fit
195 195 '''
196 196 CODE = 'cut_gaussian_fit'
197 197
198 198 class SnrPlot(RTIPlot):
199 199 '''
200 200 Plot for SNR Data
201 201 '''
202 202
203 203 CODE = 'snr'
204 204 colormap = 'jet'
205 205
206 206 def update(self, dataOut):
207 207
208 208 data = {
209 209 'snr': 10*numpy.log10(dataOut.data_snr)
210 210 }
211 211
212 212 return data, {}
213 213
214 214 class DopplerPlot(RTIPlot):
215 215 '''
216 216 Plot for DOPPLER Data (1st moment)
217 217 '''
218 218
219 219 CODE = 'dop'
220 220 colormap = 'jet'
221 221
222 222 def update(self, dataOut):
223 223
224 224 data = {
225 225 'dop': 10*numpy.log10(dataOut.data_dop)
226 226 }
227 227
228 228 return data, {}
229 229
230 230 class PowerPlot(RTIPlot):
231 231 '''
232 232 Plot for Power Data (0 moment)
233 233 '''
234 234
235 235 CODE = 'pow'
236 236 colormap = 'jet'
237 237
238 238 def update(self, dataOut):
239 239 data = {
240 240 'pow': 10*numpy.log10(dataOut.data_pow/dataOut.normFactor)
241 241 }
242 242 return data, {}
243 243
244 244 class SpectralWidthPlot(RTIPlot):
245 245 '''
246 246 Plot for Spectral Width Data (2nd moment)
247 247 '''
248 248
249 249 CODE = 'width'
250 250 colormap = 'jet'
251 251
252 252 def update(self, dataOut):
253 253
254 254 data = {
255 255 'width': dataOut.data_width
256 256 }
257 257
258 258 return data, {}
259 259
260 260 class SkyMapPlot(Plot):
261 261 '''
262 262 Plot for meteors detection data
263 263 '''
264 264
265 265 CODE = 'param'
266 266
267 267 def setup(self):
268 268
269 269 self.ncols = 1
270 270 self.nrows = 1
271 271 self.width = 7.2
272 272 self.height = 7.2
273 273 self.nplots = 1
274 274 self.xlabel = 'Zonal Zenith Angle (deg)'
275 275 self.ylabel = 'Meridional Zenith Angle (deg)'
276 276 self.polar = True
277 277 self.ymin = -180
278 278 self.ymax = 180
279 279 self.colorbar = False
280 280
281 281 def plot(self):
282 282
283 283 arrayParameters = numpy.concatenate(self.data['param'])
284 284 error = arrayParameters[:, -1]
285 285 indValid = numpy.where(error == 0)[0]
286 286 finalMeteor = arrayParameters[indValid, :]
287 287 finalAzimuth = finalMeteor[:, 3]
288 288 finalZenith = finalMeteor[:, 4]
289 289
290 290 x = finalAzimuth * numpy.pi / 180
291 291 y = finalZenith
292 292
293 293 ax = self.axes[0]
294 294
295 295 if ax.firsttime:
296 296 ax.plot = ax.plot(x, y, 'bo', markersize=5)[0]
297 297 else:
298 298 ax.plot.set_data(x, y)
299 299
300 300 dt1 = self.getDateTime(self.data.min_time).strftime('%y/%m/%d %H:%M:%S')
301 301 dt2 = self.getDateTime(self.data.max_time).strftime('%y/%m/%d %H:%M:%S')
302 302 title = 'Meteor Detection Sky Map\n %s - %s \n Number of events: %5.0f\n' % (dt1,
303 303 dt2,
304 304 len(x))
305 305 self.titles[0] = title
306 306
307 307
308 308 class GenericRTIPlot(Plot):
309 309 '''
310 310 Plot for data_xxxx object
311 311 '''
312 312
313 313 CODE = 'param'
314 314 colormap = 'viridis'
315 315 plot_type = 'pcolorbuffer'
316 316
317 317 def setup(self):
318 318 self.xaxis = 'time'
319 319 self.ncols = 1
320 320 self.nrows = self.data.shape('param')[0]
321 321 self.nplots = self.nrows
322 322 self.plots_adjust.update({'hspace':0.8, 'left': 0.1, 'bottom': 0.08, 'right':0.95, 'top': 0.95})
323 323
324 324 if not self.xlabel:
325 325 self.xlabel = 'Time'
326 326
327 327 self.ylabel = 'Range [km]'
328 328 if not self.titles:
329 329 self.titles = ['Param {}'.format(x) for x in range(self.nrows)]
330 330
331 331 def update(self, dataOut):
332 332
333 333 data = {
334 334 'param' : numpy.concatenate([getattr(dataOut, attr) for attr in self.attr_data], axis=0)
335 335 }
336 336
337 337 meta = {}
338 338
339 339 return data, meta
340 340
341 341 def plot(self):
342 342 # self.data.normalize_heights()
343 343 self.x = self.data.times
344 344 self.y = self.data.yrange
345 345 self.z = self.data['param']
346 346 self.z = 10*numpy.log10(self.z)
347 347 self.z = numpy.ma.masked_invalid(self.z)
348 348
349 349 if self.decimation is None:
350 350 x, y, z = self.fill_gaps(self.x, self.y, self.z)
351 351 else:
352 352 x, y, z = self.fill_gaps(*self.decimate())
353 353
354 354 for n, ax in enumerate(self.axes):
355 355
356 356 self.zmax = self.zmax if self.zmax is not None else numpy.max(
357 357 self.z[n])
358 358 self.zmin = self.zmin if self.zmin is not None else numpy.min(
359 359 self.z[n])
360 360
361 361 if ax.firsttime:
362 362 if self.zlimits is not None:
363 363 self.zmin, self.zmax = self.zlimits[n]
364 364
365 365 ax.plt = ax.pcolormesh(x, y, z[n].T * self.factors[n],
366 366 vmin=self.zmin,
367 367 vmax=self.zmax,
368 368 cmap=self.cmaps[n]
369 369 )
370 370 else:
371 371 if self.zlimits is not None:
372 372 self.zmin, self.zmax = self.zlimits[n]
373 373 ax.collections.remove(ax.collections[0])
374 374 ax.plt = ax.pcolormesh(x, y, z[n].T * self.factors[n],
375 375 vmin=self.zmin,
376 376 vmax=self.zmax,
377 377 cmap=self.cmaps[n]
378 378 )
379 379
380 380
381 381 class PolarMapPlot(Plot):
382 382 '''
383 383 Plot for weather radar
384 384 '''
385 385
386 386 CODE = 'param'
387 387 colormap = 'seismic'
388 388
389 389 def setup(self):
390 390 self.ncols = 1
391 391 self.nrows = 1
392 392 self.width = 9
393 393 self.height = 8
394 394 self.mode = self.data.meta['mode']
395 395 if self.channels is not None:
396 396 self.nplots = len(self.channels)
397 397 self.nrows = len(self.channels)
398 398 else:
399 399 self.nplots = self.data.shape(self.CODE)[0]
400 400 self.nrows = self.nplots
401 401 self.channels = list(range(self.nplots))
402 402 if self.mode == 'E':
403 403 self.xlabel = 'Longitude'
404 404 self.ylabel = 'Latitude'
405 405 else:
406 406 self.xlabel = 'Range (km)'
407 407 self.ylabel = 'Height (km)'
408 408 self.bgcolor = 'white'
409 409 self.cb_labels = self.data.meta['units']
410 410 self.lat = self.data.meta['latitude']
411 411 self.lon = self.data.meta['longitude']
412 412 self.xmin, self.xmax = float(
413 413 km2deg(self.xmin) + self.lon), float(km2deg(self.xmax) + self.lon)
414 414 self.ymin, self.ymax = float(
415 415 km2deg(self.ymin) + self.lat), float(km2deg(self.ymax) + self.lat)
416 416 # self.polar = True
417 417
418 418 def plot(self):
419 419
420 420 for n, ax in enumerate(self.axes):
421 421 data = self.data['param'][self.channels[n]]
422 422
423 423 zeniths = numpy.linspace(
424 424 0, self.data.meta['max_range'], data.shape[1])
425 425 if self.mode == 'E':
426 426 azimuths = -numpy.radians(self.data.yrange)+numpy.pi/2
427 427 r, theta = numpy.meshgrid(zeniths, azimuths)
428 428 x, y = r*numpy.cos(theta)*numpy.cos(numpy.radians(self.data.meta['elevation'])), r*numpy.sin(
429 429 theta)*numpy.cos(numpy.radians(self.data.meta['elevation']))
430 430 x = km2deg(x) + self.lon
431 431 y = km2deg(y) + self.lat
432 432 else:
433 433 azimuths = numpy.radians(self.data.yrange)
434 434 r, theta = numpy.meshgrid(zeniths, azimuths)
435 435 x, y = r*numpy.cos(theta), r*numpy.sin(theta)
436 436 self.y = zeniths
437 437
438 438 if ax.firsttime:
439 439 if self.zlimits is not None:
440 440 self.zmin, self.zmax = self.zlimits[n]
441 441 ax.plt = ax.pcolormesh( # r, theta, numpy.ma.array(data, mask=numpy.isnan(data)),
442 442 x, y, numpy.ma.array(data, mask=numpy.isnan(data)),
443 443 vmin=self.zmin,
444 444 vmax=self.zmax,
445 445 cmap=self.cmaps[n])
446 446 else:
447 447 if self.zlimits is not None:
448 448 self.zmin, self.zmax = self.zlimits[n]
449 449 ax.collections.remove(ax.collections[0])
450 450 ax.plt = ax.pcolormesh( # r, theta, numpy.ma.array(data, mask=numpy.isnan(data)),
451 451 x, y, numpy.ma.array(data, mask=numpy.isnan(data)),
452 452 vmin=self.zmin,
453 453 vmax=self.zmax,
454 454 cmap=self.cmaps[n])
455 455
456 456 if self.mode == 'A':
457 457 continue
458 458
459 459 # plot district names
460 460 f = open('/data/workspace/schain_scripts/distrito.csv')
461 461 for line in f:
462 462 label, lon, lat = [s.strip() for s in line.split(',') if s]
463 463 lat = float(lat)
464 464 lon = float(lon)
465 465 # ax.plot(lon, lat, '.b', ms=2)
466 466 ax.text(lon, lat, label.decode('utf8'), ha='center',
467 467 va='bottom', size='8', color='black')
468 468
469 469 # plot limites
470 470 limites = []
471 471 tmp = []
472 472 for line in open('/data/workspace/schain_scripts/lima.csv'):
473 473 if '#' in line:
474 474 if tmp:
475 475 limites.append(tmp)
476 476 tmp = []
477 477 continue
478 478 values = line.strip().split(',')
479 479 tmp.append((float(values[0]), float(values[1])))
480 480 for points in limites:
481 481 ax.add_patch(
482 482 Polygon(points, ec='k', fc='none', ls='--', lw=0.5))
483 483
484 484 # plot Cuencas
485 485 for cuenca in ('rimac', 'lurin', 'mala', 'chillon', 'chilca', 'chancay-huaral'):
486 486 f = open('/data/workspace/schain_scripts/{}.csv'.format(cuenca))
487 487 values = [line.strip().split(',') for line in f]
488 488 points = [(float(s[0]), float(s[1])) for s in values]
489 489 ax.add_patch(Polygon(points, ec='b', fc='none'))
490 490
491 491 # plot grid
492 492 for r in (15, 30, 45, 60):
493 493 ax.add_artist(plt.Circle((self.lon, self.lat),
494 494 km2deg(r), color='0.6', fill=False, lw=0.2))
495 495 ax.text(
496 496 self.lon + (km2deg(r))*numpy.cos(60*numpy.pi/180),
497 497 self.lat + (km2deg(r))*numpy.sin(60*numpy.pi/180),
498 498 '{}km'.format(r),
499 499 ha='center', va='bottom', size='8', color='0.6', weight='heavy')
500 500
501 501 if self.mode == 'E':
502 502 title = 'El={}$^\circ$'.format(self.data.meta['elevation'])
503 503 label = 'E{:02d}'.format(int(self.data.meta['elevation']))
504 504 else:
505 505 title = 'Az={}$^\circ$'.format(self.data.meta['azimuth'])
506 506 label = 'A{:02d}'.format(int(self.data.meta['azimuth']))
507 507
508 508 self.save_labels = ['{}-{}'.format(lbl, label) for lbl in self.labels]
509 509 self.titles = ['{} {}'.format(
510 510 self.data.parameters[x], title) for x in self.channels]
511 511
512 512 class WeatherParamsPlot(Plot):
513 513 #CODE = 'RHI'
514 514 #plot_name = 'RHI'
515 515 plot_type = 'scattermap'
516 516 buffering = False
517 517
518 518 def setup(self):
519 519
520 520 self.ncols = 1
521 521 self.nrows = 1
522 522 self.nplots= 1
523 523 self.ylabel= 'Range [km]'
524 524 self.xlabel= 'Range [km]'
525 525 self.polar = True
526 526 self.grid = True
527 527 if self.channels is not None:
528 528 self.nplots = len(self.channels)
529 529 self.nrows = len(self.channels)
530 530 else:
531 531 self.nplots = self.data.shape(self.CODE)[0]
532 532 self.nrows = self.nplots
533 533 self.channels = list(range(self.nplots))
534 534
535 535 self.colorbar=True
536 536 self.width =8
537 537 self.height =8
538 538 self.ini =0
539 539 self.len_azi =0
540 540 self.buffer_ini = None
541 541 self.buffer_ele = None
542 542 self.plots_adjust.update({'wspace': 0.4, 'hspace':0.4, 'left': 0.1, 'right': 0.9, 'bottom': 0.08})
543 543 self.flag =0
544 544 self.indicador= 0
545 545 self.last_data_ele = None
546 546 self.val_mean = None
547 547
548 548 def update(self, dataOut):
549 549
550 550 vars = {
551 551 'S' : 0,
552 552 'V' : 1,
553 553 'W' : 2,
554 554 'SNR' : 3,
555 555 'Z' : 4,
556 556 'D' : 5,
557 557 'P' : 6,
558 558 'R' : 7,
559 559 }
560 560
561 561 data = {}
562 562 meta = {}
563 563
564 564 if hasattr(dataOut, 'nFFTPoints'):
565 565 factor = dataOut.normFactor
566 566 else:
567 567 factor = 1
568 568
569 if hasattr(dataOut, 'dparam'):
570 tmp = getattr(dataOut, 'data_param')
571 else:
572
569 573 if 'S' in self.attr_data[0]:
570 574 tmp = 10*numpy.log10(10.0*getattr(dataOut, 'data_param')[:,0,:]/(factor))
571 575 else:
572 576 tmp = getattr(dataOut, 'data_param')[:,vars[self.attr_data[0]],:]
573 577
574
575 578 if self.mask:
576 579 mask = dataOut.data_param[:,3,:] < self.mask
577 580 tmp = numpy.ma.masked_array(tmp, mask=mask)
578 581
579 582 r = dataOut.heightList
580 583 delta_height = r[1]-r[0]
581 584 valid = numpy.where(r>=0)[0]
582 585 data['r'] = numpy.arange(len(valid))*delta_height
583 586
584 587 try:
585 588 data['data'] = tmp[self.channels[0]][:,valid]
586 589 except:
587 590 data['data'] = tmp[0][:,valid]
588 591
589 592 if dataOut.mode_op == 'PPI':
590 593 self.CODE = 'PPI'
591 594 self.title = self.CODE
592 595 elif dataOut.mode_op == 'RHI':
593 596 self.CODE = 'RHI'
594 597 self.title = self.CODE
595 598
596 599 data['azi'] = dataOut.data_azi
597 600 data['ele'] = dataOut.data_ele
598 601 data['mode_op'] = dataOut.mode_op
599 602 self.mode = dataOut.mode_op
600 603 var = data['data'].flatten()
601 604 r = numpy.tile(data['r'], data['data'].shape[0])
602 605 az = numpy.repeat(data['azi'], data['data'].shape[1])
603 606 el = numpy.repeat(data['ele'], data['data'].shape[1])
604 607
605 608 # lla = georef.spherical_to_proj(r, data['azi'], data['ele'], (-75.295893, -12.040436, 3379.2147))
606 609
607 610 latlon = antenna_to_geographic(r, az, el, (-75.295893, -12.040436))
608 611
609 612 if self.mask:
610 613 meta['lat'] = latlon[1][var.mask==False]
611 614 meta['lon'] = latlon[0][var.mask==False]
612 615 data['var'] = numpy.array([var[var.mask==False]])
613 616 else:
614 617 meta['lat'] = latlon[1]
615 618 meta['lon'] = latlon[0]
616 619 data['var'] = numpy.array([var])
617 620
618 621 return data, meta
619 622
620 623 def plot(self):
621 624 data = self.data[-1]
622 625 z = data['data']
623 626 r = data['r']
624 627 self.titles = []
625 628
626 629 self.ymax = self.ymax if self.ymax else numpy.nanmax(r)
627 630 self.ymin = self.ymin if self.ymin else numpy.nanmin(r)
628 631 self.zmax = self.zmax if self.zmax else numpy.nanmax(z)
629 632 self.zmin = self.zmin if self.zmin is not None else numpy.nanmin(z)
630 633
634 if isinstance(data['mode_op'], bytes):
635 data['mode_op'] = data['mode_op'].decode()
636
631 637 if data['mode_op'] == 'RHI':
632 638 try:
633 639 if self.data['mode_op'][-2] == 'PPI':
634 640 self.ang_min = None
635 641 self.ang_max = None
636 642 except:
637 643 pass
638 644 self.ang_min = self.ang_min if self.ang_min else 0
639 645 self.ang_max = self.ang_max if self.ang_max else 90
640 646 r, theta = numpy.meshgrid(r, numpy.radians(data['ele']) )
641 647 elif data['mode_op'] == 'PPI':
642 648 try:
643 649 if self.data['mode_op'][-2] == 'RHI':
644 650 self.ang_min = None
645 651 self.ang_max = None
646 652 except:
647 653 pass
648 654 self.ang_min = self.ang_min if self.ang_min else 0
649 655 self.ang_max = self.ang_max if self.ang_max else 360
650 656 r, theta = numpy.meshgrid(r, numpy.radians(data['azi']) )
651 657
652 658 self.clear_figures()
653 659
654 660 for i,ax in enumerate(self.axes):
655 661
656 662 if ax.firsttime:
657 663 ax.set_xlim(numpy.radians(self.ang_min),numpy.radians(self.ang_max))
658 664 ax.plt = ax.pcolormesh(theta, r, z, cmap=self.colormap, vmin=self.zmin, vmax=self.zmax)
659 665 if data['mode_op'] == 'PPI':
660 666 ax.set_theta_direction(-1)
661 667 ax.set_theta_offset(numpy.pi/2)
662 668
663 669 else:
664 670 ax.set_xlim(numpy.radians(self.ang_min),numpy.radians(self.ang_max))
665 671 ax.plt = ax.pcolormesh(theta, r, z, cmap=self.colormap, vmin=self.zmin, vmax=self.zmax)
666 672 if data['mode_op'] == 'PPI':
667 673 ax.set_theta_direction(-1)
668 674 ax.set_theta_offset(numpy.pi/2)
669 675
670 676 ax.grid(True)
671 677 if data['mode_op'] == 'RHI':
672 678 len_aux = int(data['azi'].shape[0]/4)
673 679 mean = numpy.mean(data['azi'][len_aux:-len_aux])
674 680 if len(self.channels) !=1:
675 681 self.titles = ['RHI {} at AZ: {} CH {}'.format(self.labels[x], str(round(mean,1)), x) for x in range(self.nrows)]
676 682 else:
677 683 self.titles = ['RHI {} at AZ: {} CH {}'.format(self.labels[0], str(round(mean,1)), self.channels[0])]
678 684 elif data['mode_op'] == 'PPI':
679 685 len_aux = int(data['ele'].shape[0]/4)
680 686 mean = numpy.mean(data['ele'][len_aux:-len_aux])
681 687 if len(self.channels) !=1:
682 688 self.titles = ['PPI {} at EL: {} CH {}'.format(self.labels[x], str(round(mean,1)), x) for x in range(self.nrows)]
683 689 else:
684 690 self.titles = ['PPI {} at EL: {} CH {}'.format(self.labels[0], str(round(mean,1)), self.channels[0])]
685 691 self.mode_value = round(mean,1) No newline at end of file
@@ -1,1609 +1,1611
1 1 """
2 2 Created on Jul 2, 2014
3 3
4 4 @author: roj-idl71
5 5 """
6 6 import os
7 7 import sys
8 8 import glob
9 9 import time
10 10 import numpy
11 11 import fnmatch
12 12 import inspect
13 13 import time
14 14 import datetime
15 15 import zmq
16 16
17 17 from schainpy.model.proc.jroproc_base import Operation, MPDecorator
18 18 from schainpy.model.data.jroheaderIO import PROCFLAG, BasicHeader, SystemHeader, RadarControllerHeader, ProcessingHeader
19 19 from schainpy.model.data.jroheaderIO import get_dtype_index, get_numpy_dtype, get_procflag_dtype, get_dtype_width
20 20 from schainpy.utils import log
21 21 import schainpy.admin
22 22
23 23 LOCALTIME = True
24 24 DT_DIRECTIVES = {
25 25 '%Y': 4,
26 26 '%y': 2,
27 27 '%m': 2,
28 28 '%d': 2,
29 29 '%j': 3,
30 30 '%H': 2,
31 31 '%M': 2,
32 32 '%S': 2,
33 33 '%f': 6
34 34 }
35 35
36 36
37 37 def isNumber(cad):
38 38 """
39 39 Chequea si el conjunto de caracteres que componen un string puede ser convertidos a un numero.
40 40
41 41 Excepciones:
42 42 Si un determinado string no puede ser convertido a numero
43 43 Input:
44 44 str, string al cual se le analiza para determinar si convertible a un numero o no
45 45
46 46 Return:
47 47 True : si el string es uno numerico
48 48 False : no es un string numerico
49 49 """
50 50 try:
51 51 float(cad)
52 52 return True
53 53 except:
54 54 return False
55 55
56 56
57 57 def isFileInEpoch(filename, startUTSeconds, endUTSeconds):
58 58 """
59 59 Esta funcion determina si un archivo de datos se encuentra o no dentro del rango de fecha especificado.
60 60
61 61 Inputs:
62 62 filename : nombre completo del archivo de datos en formato Jicamarca (.r)
63 63
64 64 startUTSeconds : fecha inicial del rango seleccionado. La fecha esta dada en
65 65 segundos contados desde 01/01/1970.
66 66 endUTSeconds : fecha final del rango seleccionado. La fecha esta dada en
67 67 segundos contados desde 01/01/1970.
68 68
69 69 Return:
70 70 Boolean : Retorna True si el archivo de datos contiene datos en el rango de
71 71 fecha especificado, de lo contrario retorna False.
72 72
73 73 Excepciones:
74 74 Si el archivo no existe o no puede ser abierto
75 75 Si la cabecera no puede ser leida.
76 76
77 77 """
78 78 basicHeaderObj = BasicHeader(LOCALTIME)
79 79
80 80 try:
81 81 fp = open(filename, 'rb')
82 82 except IOError:
83 83 print("The file %s can't be opened" % (filename))
84 84 return 0
85 85
86 86 sts = basicHeaderObj.read(fp)
87 87 fp.close()
88 88
89 89 if not(sts):
90 90 print("Skipping the file %s because it has not a valid header" % (filename))
91 91 return 0
92 92
93 93 if not ((startUTSeconds <= basicHeaderObj.utc) and (endUTSeconds > basicHeaderObj.utc)):
94 94 return 0
95 95
96 96 return 1
97 97
98 98
99 99 def isTimeInRange(thisTime, startTime, endTime):
100 100 if endTime >= startTime:
101 101 if (thisTime < startTime) or (thisTime > endTime):
102 102 return 0
103 103 return 1
104 104 else:
105 105 if (thisTime < startTime) and (thisTime > endTime):
106 106 return 0
107 107 return 1
108 108
109 109
110 110 def isFileInTimeRange(filename, startDate, endDate, startTime, endTime):
111 111 """
112 112 Retorna 1 si el archivo de datos se encuentra dentro del rango de horas especificado.
113 113
114 114 Inputs:
115 115 filename : nombre completo del archivo de datos en formato Jicamarca (.r)
116 116
117 117 startDate : fecha inicial del rango seleccionado en formato datetime.date
118 118
119 119 endDate : fecha final del rango seleccionado en formato datetime.date
120 120
121 121 startTime : tiempo inicial del rango seleccionado en formato datetime.time
122 122
123 123 endTime : tiempo final del rango seleccionado en formato datetime.time
124 124
125 125 Return:
126 126 Boolean : Retorna True si el archivo de datos contiene datos en el rango de
127 127 fecha especificado, de lo contrario retorna False.
128 128
129 129 Excepciones:
130 130 Si el archivo no existe o no puede ser abierto
131 131 Si la cabecera no puede ser leida.
132 132
133 133 """
134 134
135 135 try:
136 136 fp = open(filename, 'rb')
137 137 except IOError:
138 138 print("The file %s can't be opened" % (filename))
139 139 return None
140 140
141 141 firstBasicHeaderObj = BasicHeader(LOCALTIME)
142 142 systemHeaderObj = SystemHeader()
143 143 radarControllerHeaderObj = RadarControllerHeader()
144 144 processingHeaderObj = ProcessingHeader()
145 145
146 146 lastBasicHeaderObj = BasicHeader(LOCALTIME)
147 147
148 148 sts = firstBasicHeaderObj.read(fp)
149 149
150 150 if not(sts):
151 151 print("[Reading] Skipping the file %s because it has not a valid header" % (filename))
152 152 return None
153 153
154 154 if not systemHeaderObj.read(fp):
155 155 return None
156 156
157 157 if not radarControllerHeaderObj.read(fp):
158 158 return None
159 159
160 160 if not processingHeaderObj.read(fp):
161 161 return None
162 162
163 163 filesize = os.path.getsize(filename)
164 164
165 165 offset = processingHeaderObj.blockSize + 24 # header size
166 166
167 167 if filesize <= offset:
168 168 print("[Reading] %s: This file has not enough data" % filename)
169 169 return None
170 170
171 171 fp.seek(-offset, 2)
172 172
173 173 sts = lastBasicHeaderObj.read(fp)
174 174
175 175 fp.close()
176 176
177 177 thisDatetime = lastBasicHeaderObj.datatime
178 178 thisTime_last_block = thisDatetime.time()
179 179
180 180 thisDatetime = firstBasicHeaderObj.datatime
181 181 thisDate = thisDatetime.date()
182 182 thisTime_first_block = thisDatetime.time()
183 183
184 184 # General case
185 185 # o>>>>>>>>>>>>>><<<<<<<<<<<<<<o
186 186 #-----------o----------------------------o-----------
187 187 # startTime endTime
188 188
189 189 if endTime >= startTime:
190 190 if (thisTime_last_block < startTime) or (thisTime_first_block > endTime):
191 191 return None
192 192
193 193 return thisDatetime
194 194
195 195 # If endTime < startTime then endTime belongs to the next day
196 196
197 197 #<<<<<<<<<<<o o>>>>>>>>>>>
198 198 #-----------o----------------------------o-----------
199 199 # endTime startTime
200 200
201 201 if (thisDate == startDate) and (thisTime_last_block < startTime):
202 202 return None
203 203
204 204 if (thisDate == endDate) and (thisTime_first_block > endTime):
205 205 return None
206 206
207 207 if (thisTime_last_block < startTime) and (thisTime_first_block > endTime):
208 208 return None
209 209
210 210 return thisDatetime
211 211
212 212
213 213 def isFolderInDateRange(folder, startDate=None, endDate=None):
214 214 """
215 215 Retorna 1 si el archivo de datos se encuentra dentro del rango de horas especificado.
216 216
217 217 Inputs:
218 218 folder : nombre completo del directorio.
219 219 Su formato deberia ser "/path_root/?YYYYDDD"
220 220
221 221 siendo:
222 222 YYYY : Anio (ejemplo 2015)
223 223 DDD : Dia del anio (ejemplo 305)
224 224
225 225 startDate : fecha inicial del rango seleccionado en formato datetime.date
226 226
227 227 endDate : fecha final del rango seleccionado en formato datetime.date
228 228
229 229 Return:
230 230 Boolean : Retorna True si el archivo de datos contiene datos en el rango de
231 231 fecha especificado, de lo contrario retorna False.
232 232 Excepciones:
233 233 Si el directorio no tiene el formato adecuado
234 234 """
235 235
236 236 basename = os.path.basename(folder)
237 237
238 238 if not isRadarFolder(basename):
239 239 print("The folder %s has not the rigth format" % folder)
240 240 return 0
241 241
242 242 if startDate and endDate:
243 243 thisDate = getDateFromRadarFolder(basename)
244 244
245 245 if thisDate < startDate:
246 246 return 0
247 247
248 248 if thisDate > endDate:
249 249 return 0
250 250
251 251 return 1
252 252
253 253
254 254 def isFileInDateRange(filename, startDate=None, endDate=None):
255 255 """
256 256 Retorna 1 si el archivo de datos se encuentra dentro del rango de horas especificado.
257 257
258 258 Inputs:
259 259 filename : nombre completo del archivo de datos en formato Jicamarca (.r)
260 260
261 261 Su formato deberia ser "?YYYYDDDsss"
262 262
263 263 siendo:
264 264 YYYY : Anio (ejemplo 2015)
265 265 DDD : Dia del anio (ejemplo 305)
266 266 sss : set
267 267
268 268 startDate : fecha inicial del rango seleccionado en formato datetime.date
269 269
270 270 endDate : fecha final del rango seleccionado en formato datetime.date
271 271
272 272 Return:
273 273 Boolean : Retorna True si el archivo de datos contiene datos en el rango de
274 274 fecha especificado, de lo contrario retorna False.
275 275 Excepciones:
276 276 Si el archivo no tiene el formato adecuado
277 277 """
278 278
279 279 basename = os.path.basename(filename)
280 280
281 281 if not isRadarFile(basename):
282 282 print("The filename %s has not the rigth format" % filename)
283 283 return 0
284 284
285 285 if startDate and endDate:
286 286 thisDate = getDateFromRadarFile(basename)
287 287
288 288 if thisDate < startDate:
289 289 return 0
290 290
291 291 if thisDate > endDate:
292 292 return 0
293 293
294 294 return 1
295 295
296 296
297 297 def getFileFromSet(path, ext, set):
298 298 validFilelist = []
299 299 fileList = os.listdir(path)
300 300
301 301 # 0 1234 567 89A BCDE
302 302 # H YYYY DDD SSS .ext
303 303
304 304 for thisFile in fileList:
305 305 try:
306 306 year = int(thisFile[1:5])
307 307 doy = int(thisFile[5:8])
308 308 except:
309 309 continue
310 310
311 311 if (os.path.splitext(thisFile)[-1].lower() != ext.lower()):
312 312 continue
313 313
314 314 validFilelist.append(thisFile)
315 315
316 316 myfile = fnmatch.filter(
317 317 validFilelist, '*%4.4d%3.3d%3.3d*' % (year, doy, set))
318 318
319 319 if len(myfile) != 0:
320 320 return myfile[0]
321 321 else:
322 322 filename = '*%4.4d%3.3d%3.3d%s' % (year, doy, set, ext.lower())
323 323 print('the filename %s does not exist' % filename)
324 324 print('...going to the last file: ')
325 325
326 326 if validFilelist:
327 327 validFilelist = sorted(validFilelist, key=str.lower)
328 328 return validFilelist[-1]
329 329
330 330 return None
331 331
332 332
333 333 def getlastFileFromPath(path, ext):
334 334 """
335 335 Depura el fileList dejando solo los que cumplan el formato de "PYYYYDDDSSS.ext"
336 336 al final de la depuracion devuelve el ultimo file de la lista que quedo.
337 337
338 338 Input:
339 339 fileList : lista conteniendo todos los files (sin path) que componen una determinada carpeta
340 340 ext : extension de los files contenidos en una carpeta
341 341
342 342 Return:
343 343 El ultimo file de una determinada carpeta, no se considera el path.
344 344 """
345 345 validFilelist = []
346 346 fileList = os.listdir(path)
347 347
348 348 # 0 1234 567 89A BCDE
349 349 # H YYYY DDD SSS .ext
350 350
351 351 for thisFile in fileList:
352 352
353 353 year = thisFile[1:5]
354 354 if not isNumber(year):
355 355 continue
356 356
357 357 doy = thisFile[5:8]
358 358 if not isNumber(doy):
359 359 continue
360 360
361 361 year = int(year)
362 362 doy = int(doy)
363 363
364 364 if (os.path.splitext(thisFile)[-1].lower() != ext.lower()):
365 365 continue
366 366
367 367 validFilelist.append(thisFile)
368 368
369 369 if validFilelist:
370 370 validFilelist = sorted(validFilelist, key=str.lower)
371 371 return validFilelist[-1]
372 372
373 373 return None
374 374
375 375
376 376 def isRadarFolder(folder):
377 377 try:
378 378 year = int(folder[1:5])
379 379 doy = int(folder[5:8])
380 380 except:
381 381 return 0
382 382
383 383 return 1
384 384
385 385
386 386 def isRadarFile(file):
387 387 try:
388 388 year = int(file[1:5])
389 389 doy = int(file[5:8])
390 390 set = int(file[8:11])
391 391 except:
392 392 return 0
393 393
394 394 return 1
395 395
396 396
397 397 def getDateFromRadarFile(file):
398 398 try:
399 399 year = int(file[1:5])
400 400 doy = int(file[5:8])
401 401 set = int(file[8:11])
402 402 except:
403 403 return None
404 404
405 405 thisDate = datetime.date(year, 1, 1) + datetime.timedelta(doy - 1)
406 406 return thisDate
407 407
408 408
409 409 def getDateFromRadarFolder(folder):
410 410 try:
411 411 year = int(folder[1:5])
412 412 doy = int(folder[5:8])
413 413 except:
414 414 return None
415 415
416 416 thisDate = datetime.date(year, 1, 1) + datetime.timedelta(doy - 1)
417 417 return thisDate
418 418
419 419 def parse_format(s, fmt):
420 420
421 421 for i in range(fmt.count('%')):
422 422 x = fmt.index('%')
423 423 d = DT_DIRECTIVES[fmt[x:x+2]]
424 424 fmt = fmt.replace(fmt[x:x+2], s[x:x+d])
425 425 return fmt
426 426
427 427 class Reader(object):
428 428
429 429 c = 3E8
430 430 isConfig = False
431 431 dtype = None
432 432 pathList = []
433 433 filenameList = []
434 434 datetimeList = []
435 435 filename = None
436 436 ext = None
437 437 flagIsNewFile = 1
438 438 flagDiscontinuousBlock = 0
439 439 flagIsNewBlock = 0
440 440 flagNoMoreFiles = 0
441 441 fp = None
442 442 firstHeaderSize = 0
443 443 basicHeaderSize = 24
444 444 versionFile = 1103
445 445 fileSize = None
446 446 fileSizeByHeader = None
447 447 fileIndex = -1
448 448 profileIndex = None
449 449 blockIndex = 0
450 450 nTotalBlocks = 0
451 451 maxTimeStep = 30
452 452 lastUTTime = None
453 453 datablock = None
454 454 dataOut = None
455 455 getByBlock = False
456 456 path = None
457 457 startDate = None
458 458 endDate = None
459 459 startTime = datetime.time(0, 0, 0)
460 460 endTime = datetime.time(23, 59, 59)
461 461 set = None
462 462 expLabel = ""
463 463 online = False
464 464 delay = 60
465 465 nTries = 3 # quantity tries
466 466 nFiles = 3 # number of files for searching
467 467 walk = True
468 468 getblock = False
469 469 nTxs = 1
470 470 realtime = False
471 471 blocksize = 0
472 472 blocktime = None
473 473 warnings = True
474 474 verbose = True
475 475 server = None
476 476 format = None
477 477 oneDDict = None
478 478 twoDDict = None
479 479 independentParam = None
480 480 filefmt = None
481 481 folderfmt = None
482 482 open_file = open
483 483 open_mode = 'rb'
484 filter =None
484 485
485 486 def run(self):
486 487
487 488 raise NotImplementedError
488 489
489 490 def getAllowedArgs(self):
490 491 if hasattr(self, '__attrs__'):
491 492 return self.__attrs__
492 493 else:
493 494 return inspect.getargspec(self.run).args
494 495
495 496 def set_kwargs(self, **kwargs):
496 497
497 498 for key, value in kwargs.items():
498 499 setattr(self, key, value)
499 500
500 501 def find_folders(self, path, startDate, endDate, folderfmt, last=False):
501 502
502 503 folders = [x for f in path.split(',')
503 504 for x in os.listdir(f) if os.path.isdir(os.path.join(f, x))]
504 505 folders.sort()
505 506
506 507 if last:
507 508 folders = [folders[-1]]
508 509
509 510 for folder in folders:
510 511 try:
511 512 dt = datetime.datetime.strptime(parse_format(folder, folderfmt), folderfmt).date()
512 513 if dt >= startDate and dt <= endDate:
513 514 yield os.path.join(path, folder)
514 515 else:
515 516 log.log('Skiping folder {}'.format(folder), self.name)
516 517 except Exception as e:
517 518 log.log('Skiping folder {}'.format(folder), self.name)
518 519 continue
519 520 return
520 521
521 522 def find_files(self, folders, ext, filefmt, startDate=None, endDate=None,
522 expLabel='', last=False):
523 expLabel='', filter=None,last=False):
523 524
524 525 for path in folders:
525 526 files = glob.glob1(path, '*{}'.format(ext))
526 527 files.sort()
528 if filter is not None:
529 files= [ file for file in files if os.path.splitext(file)[0][-len(filter):] == filter]
527 530 if last:
528 531 if files:
529 532 fo = files[-1]
530 533 try:
531 534 dt = datetime.datetime.strptime(parse_format(fo, filefmt), filefmt).date()
532 535 yield os.path.join(path, expLabel, fo)
533 536 except Exception as e:
534 537 pass
535 538 return
536 539 else:
537 540 return
538 541
539 542 for fo in files:
540 543 try:
541 544 dt = datetime.datetime.strptime(parse_format(fo, filefmt), filefmt).date()
542 545 if dt >= startDate and dt <= endDate:
543 546 yield os.path.join(path, expLabel, fo)
544 547 else:
545 548 log.log('Skiping file {}'.format(fo), self.name)
546 549 except Exception as e:
547 550 log.log('Skiping file {}'.format(fo), self.name)
548 551 continue
549 552
550 553 def searchFilesOffLine(self, path, startDate, endDate,
551 554 expLabel, ext, walk,
552 filefmt, folderfmt):
555 filefmt, folderfmt,filter):
553 556 """Search files in offline mode for the given arguments
554 557
555 558 Return:
556 559 Generator of files
557 560 """
558 561
559 562 if walk:
560 563 folders = self.find_folders(
561 564 path, startDate, endDate, folderfmt)
562 565 else:
563 566 folders = path.split(',')
564 567
565 568 return self.find_files(
566 folders, ext, filefmt, startDate, endDate, expLabel)
569 folders, ext, filefmt, startDate, endDate, expLabel,filter)
567 570
568 571 def searchFilesOnLine(self, path, startDate, endDate,
569 572 expLabel, ext, walk,
570 filefmt, folderfmt):
573 filefmt, folderfmt,filter):
571 574 """Search for the last file of the last folder
572 575
573 576 Arguments:
574 577 path : carpeta donde estan contenidos los files que contiene data
575 578 expLabel : Nombre del subexperimento (subfolder)
576 579 ext : extension de los files
577 580 walk : Si es habilitado no realiza busquedas dentro de los ubdirectorios (doypath)
578 581
579 582 Return:
580 583 generator with the full path of last filename
581 584 """
582 585
583 586 if walk:
584 587 folders = self.find_folders(
585 588 path, startDate, endDate, folderfmt, last=True)
586 589 else:
587 590 folders = path.split(',')
588 591
589 return self.find_files(
590 folders, ext, filefmt, startDate, endDate, expLabel, last=True)
592 return self.find_files(folders, ext, filefmt, startDate, endDate, expLabel, filter,last=True)
591 593
592 594 def setNextFile(self):
593 595 """Set the next file to be readed open it and parse de file header"""
594 596
595 597 while True:
596 598 if self.fp != None:
597 599 self.fp.close()
598 600
599 601 if self.online:
600 602 newFile = self.setNextFileOnline()
601 603 else:
602 604 newFile = self.setNextFileOffline()
603 605
604 606 if not(newFile):
605 607 if self.online:
606 608 raise schainpy.admin.SchainError('Time to wait for new files reach')
607 609 else:
608 610 if self.fileIndex == -1:
609 611 raise schainpy.admin.SchainWarning('No files found in the given path')
610 612 else:
611 613 raise schainpy.admin.SchainWarning('No more files to read')
612 614
613 615 if self.verifyFile(self.filename):
614 616 break
615 617
616 618 log.log('Opening file: %s' % self.filename, self.name)
617 619
618 620 self.readFirstHeader()
619 621 self.nReadBlocks = 0
620 622
621 623 def setNextFileOnline(self):
622 624 """Check for the next file to be readed in online mode.
623 625
624 626 Set:
625 627 self.filename
626 628 self.fp
627 629 self.filesize
628 630
629 631 Return:
630 632 boolean
631 633
632 634 """
633 635 nextFile = True
634 636 nextDay = False
635 637
636 638 for nFiles in range(self.nFiles+1):
637 639 for nTries in range(self.nTries):
638 640 fullfilename, filename = self.checkForRealPath(nextFile, nextDay)
639 641 if fullfilename is not None:
640 642 break
641 643 log.warning(
642 644 "Waiting %0.2f sec for the next file: \"%s\" , try %02d ..." % (self.delay, filename, nTries + 1),
643 645 self.name)
644 646 time.sleep(self.delay)
645 647 nextFile = False
646 648 continue
647 649
648 650 if fullfilename is not None:
649 651 break
650 652
651 653 self.nTries = 1
652 654 nextFile = True
653 655
654 656 if nFiles == (self.nFiles - 1):
655 657 log.log('Trying with next day...', self.name)
656 658 nextDay = True
657 659 self.nTries = 3
658 660
659 661 if fullfilename:
660 662 self.fileSize = os.path.getsize(fullfilename)
661 663 self.filename = fullfilename
662 664 self.flagIsNewFile = 1
663 665 if self.fp != None:
664 666 self.fp.close()
665 667 self.fp = self.open_file(fullfilename, self.open_mode)
666 668 self.flagNoMoreFiles = 0
667 669 self.fileIndex += 1
668 670 return 1
669 671 else:
670 672 return 0
671 673
672 674 def setNextFileOffline(self):
673 675 """Open the next file to be readed in offline mode"""
674 676
675 677 try:
676 678 filename = next(self.filenameList)
677 679 self.fileIndex +=1
678 680 except StopIteration:
679 681 self.flagNoMoreFiles = 1
680 682 return 0
681 683
682 684 self.filename = filename
683 685 self.fileSize = os.path.getsize(filename)
684 686 self.fp = self.open_file(filename, self.open_mode)
685 687 self.flagIsNewFile = 1
686 688
687 689 return 1
688 690
689 691 @staticmethod
690 692 def isDateTimeInRange(dt, startDate, endDate, startTime, endTime):
691 693 """Check if the given datetime is in range"""
692 694
693 695 if startDate <= dt.date() <= endDate:
694 696 if startTime <= dt.time() <= endTime:
695 697 return True
696 698 return False
697 699
698 700 def verifyFile(self, filename):
699 701 """Check for a valid file
700 702
701 703 Arguments:
702 704 filename -- full path filename
703 705
704 706 Return:
705 707 boolean
706 708 """
707 709
708 710 return True
709 711
710 712 def checkForRealPath(self, nextFile, nextDay):
711 713 """Check if the next file to be readed exists"""
712 714 if nextFile:
713 715 self.set += 1
714 716 if nextDay:
715 717 self.set = 0
716 718 self.doy += 1
717 719 foldercounter = 0
718 720 prefixDirList = [None, 'd', 'D']
719 721 if self.ext.lower() == ".r": # voltage
720 722 prefixFileList = ['d', 'D']
721 723 elif self.ext.lower() == ".pdata": # spectra
722 724 prefixFileList = ['p', 'P']
723 725 elif self.ext.lower() == ".hdf5": # HDF5
724 726 prefixFileList = ['D', 'P'] # HDF5
725 727
726 728 # barrido por las combinaciones posibles
727 729 for prefixDir in prefixDirList:
728 730 thispath = self.path
729 731 if prefixDir != None:
730 732 # formo el nombre del directorio xYYYYDDD (x=d o x=D)
731 733 if foldercounter == 0:
732 734 thispath = os.path.join(self.path, "%s%04d%03d" %
733 735 (prefixDir, self.year, self.doy))
734 736 else:
735 737 thispath = os.path.join(self.path, "%s%04d%03d_%02d" % (
736 738 prefixDir, self.year, self.doy, foldercounter))
737 739 for prefixFile in prefixFileList: # barrido por las dos combinaciones posibles de "D"
738 740 # formo el nombre del file xYYYYDDDSSS.ext
739 741 filename = "%s%04d%03d%03d%s" % (prefixFile, self.year, self.doy, self.set, self.ext)
740 742 fullfilename = os.path.join(
741 743 thispath, filename)
742 744
743 745 if os.path.exists(fullfilename):
744 746 return fullfilename, filename
745 747
746 748 return None, filename
747 749 #raise NotImplementedError
748 750
749 751 def readFirstHeader(self):
750 752 """Parse the file header"""
751 753
752 754 pass
753 755
754 756 def waitDataBlock(self, pointer_location, blocksize=None):
755 757 """
756 758 """
757 759
758 760 currentPointer = pointer_location
759 761 if blocksize is None:
760 762 neededSize = self.processingHeaderObj.blockSize # + self.basicHeaderSize
761 763 else:
762 764 neededSize = blocksize
763 765
764 766 for nTries in range(self.nTries):
765 767 self.fp.close()
766 768 self.fp = open(self.filename, 'rb')
767 769 self.fp.seek(currentPointer)
768 770
769 771 self.fileSize = os.path.getsize(self.filename)
770 772 currentSize = self.fileSize - currentPointer
771 773
772 774 if (currentSize >= neededSize):
773 775 return 1
774 776
775 777 log.warning(
776 778 "Waiting %0.2f seconds for the next block, try %03d ..." % (self.delay, nTries + 1),
777 779 self.name
778 780 )
779 781 time.sleep(self.delay)
780 782
781 783 return 0
782 784
783 785 class JRODataReader(Reader):
784 786
785 787 utc = 0
786 788 nReadBlocks = 0
787 789 foldercounter = 0
788 790 firstHeaderSize = 0
789 791 basicHeaderSize = 24
790 792 __isFirstTimeOnline = 1
791 793 filefmt = "*%Y%j***"
792 794 folderfmt = "*%Y%j"
793 795 __attrs__ = ['path', 'startDate', 'endDate', 'startTime', 'endTime', 'online', 'delay', 'walk']
794 796
795 797 def getDtypeWidth(self):
796 798
797 799 dtype_index = get_dtype_index(self.dtype)
798 800 dtype_width = get_dtype_width(dtype_index)
799 801
800 802 return dtype_width
801 803
802 804 def checkForRealPath(self, nextFile, nextDay):
803 805 """Check if the next file to be readed exists.
804 806
805 807 Example :
806 808 nombre correcto del file es .../.../D2009307/P2009307367.ext
807 809
808 810 Entonces la funcion prueba con las siguientes combinaciones
809 811 .../.../y2009307367.ext
810 812 .../.../Y2009307367.ext
811 813 .../.../x2009307/y2009307367.ext
812 814 .../.../x2009307/Y2009307367.ext
813 815 .../.../X2009307/y2009307367.ext
814 816 .../.../X2009307/Y2009307367.ext
815 817 siendo para este caso, la ultima combinacion de letras, identica al file buscado
816 818
817 819 Return:
818 820 str -- fullpath of the file
819 821 """
820 822
821 823
822 824 if nextFile:
823 825 self.set += 1
824 826 if nextDay:
825 827 self.set = 0
826 828 self.doy += 1
827 829 foldercounter = 0
828 830 prefixDirList = [None, 'd', 'D']
829 831 if self.ext.lower() == ".r": # voltage
830 832 prefixFileList = ['d', 'D']
831 833 elif self.ext.lower() == ".pdata": # spectra
832 834 prefixFileList = ['p', 'P']
833 835
834 836 # barrido por las combinaciones posibles
835 837 for prefixDir in prefixDirList:
836 838 thispath = self.path
837 839 if prefixDir != None:
838 840 # formo el nombre del directorio xYYYYDDD (x=d o x=D)
839 841 if foldercounter == 0:
840 842 thispath = os.path.join(self.path, "%s%04d%03d" %
841 843 (prefixDir, self.year, self.doy))
842 844 else:
843 845 thispath = os.path.join(self.path, "%s%04d%03d_%02d" % (
844 846 prefixDir, self.year, self.doy, foldercounter))
845 847 for prefixFile in prefixFileList: # barrido por las dos combinaciones posibles de "D"
846 848 # formo el nombre del file xYYYYDDDSSS.ext
847 849 filename = "%s%04d%03d%03d%s" % (prefixFile, self.year, self.doy, self.set, self.ext)
848 850 fullfilename = os.path.join(
849 851 thispath, filename)
850 852
851 853 if os.path.exists(fullfilename):
852 854 return fullfilename, filename
853 855
854 856 return None, filename
855 857
856 858 def __waitNewBlock(self):
857 859 """
858 860 Return 1 si se encontro un nuevo bloque de datos, 0 de otra forma.
859 861
860 862 Si el modo de lectura es OffLine siempre retorn 0
861 863 """
862 864 if not self.online:
863 865 return 0
864 866
865 867 if (self.nReadBlocks >= self.processingHeaderObj.dataBlocksPerFile):
866 868 return 0
867 869
868 870 currentPointer = self.fp.tell()
869 871
870 872 neededSize = self.processingHeaderObj.blockSize + self.basicHeaderSize
871 873
872 874 for nTries in range(self.nTries):
873 875
874 876 self.fp.close()
875 877 self.fp = open(self.filename, 'rb')
876 878 self.fp.seek(currentPointer)
877 879
878 880 self.fileSize = os.path.getsize(self.filename)
879 881 currentSize = self.fileSize - currentPointer
880 882
881 883 if (currentSize >= neededSize):
882 884 self.basicHeaderObj.read(self.fp)
883 885 return 1
884 886
885 887 if self.fileSize == self.fileSizeByHeader:
886 888 # self.flagEoF = True
887 889 return 0
888 890
889 891 print("[Reading] Waiting %0.2f seconds for the next block, try %03d ..." % (self.delay, nTries + 1))
890 892 time.sleep(self.delay)
891 893
892 894 return 0
893 895
894 896 def __setNewBlock(self):
895 897
896 898 if self.fp == None:
897 899 return 0
898 900
899 901 if self.flagIsNewFile:
900 902 self.lastUTTime = self.basicHeaderObj.utc
901 903 return 1
902 904
903 905 if self.realtime:
904 906 self.flagDiscontinuousBlock = 1
905 907 if not(self.setNextFile()):
906 908 return 0
907 909 else:
908 910 return 1
909 911
910 912 currentSize = self.fileSize - self.fp.tell()
911 913 neededSize = self.processingHeaderObj.blockSize + self.basicHeaderSize
912 914
913 915 if (currentSize >= neededSize):
914 916 self.basicHeaderObj.read(self.fp)
915 917 self.lastUTTime = self.basicHeaderObj.utc
916 918 return 1
917 919
918 920 if self.__waitNewBlock():
919 921 self.lastUTTime = self.basicHeaderObj.utc
920 922 return 1
921 923
922 924 if not(self.setNextFile()):
923 925 return 0
924 926
925 927 deltaTime = self.basicHeaderObj.utc - self.lastUTTime
926 928 self.lastUTTime = self.basicHeaderObj.utc
927 929
928 930 self.flagDiscontinuousBlock = 0
929 931
930 932 if deltaTime > self.maxTimeStep:
931 933 self.flagDiscontinuousBlock = 1
932 934
933 935 return 1
934 936
935 937 def readNextBlock(self):
936 938
937 939 while True:
938 940 if not(self.__setNewBlock()):
939 941 continue
940 942
941 943 if not(self.readBlock()):
942 944 return 0
943 945
944 946 self.getBasicHeader()
945 947
946 948 if not self.isDateTimeInRange(self.dataOut.datatime, self.startDate, self.endDate, self.startTime, self.endTime):
947 949 print("[Reading] Block No. %d/%d -> %s [Skipping]" % (self.nReadBlocks,
948 950 self.processingHeaderObj.dataBlocksPerFile,
949 951 self.dataOut.datatime.ctime()))
950 952 continue
951 953
952 954 break
953 955
954 956 if self.verbose:
955 957 print("[Reading] Block No. %d/%d -> %s" % (self.nReadBlocks,
956 958 self.processingHeaderObj.dataBlocksPerFile,
957 959 self.dataOut.datatime.ctime()))
958 960 return 1
959 961
960 962 def readFirstHeader(self):
961 963
962 964 self.basicHeaderObj.read(self.fp)
963 965 self.systemHeaderObj.read(self.fp)
964 966 self.radarControllerHeaderObj.read(self.fp)
965 967 self.processingHeaderObj.read(self.fp)
966 968 self.firstHeaderSize = self.basicHeaderObj.size
967 969
968 970 datatype = int(numpy.log2((self.processingHeaderObj.processFlags &
969 971 PROCFLAG.DATATYPE_MASK)) - numpy.log2(PROCFLAG.DATATYPE_CHAR))
970 972 if datatype == 0:
971 973 datatype_str = numpy.dtype([('real', '<i1'), ('imag', '<i1')])
972 974 elif datatype == 1:
973 975 datatype_str = numpy.dtype([('real', '<i2'), ('imag', '<i2')])
974 976 elif datatype == 2:
975 977 datatype_str = numpy.dtype([('real', '<i4'), ('imag', '<i4')])
976 978 elif datatype == 3:
977 979 datatype_str = numpy.dtype([('real', '<i8'), ('imag', '<i8')])
978 980 elif datatype == 4:
979 981 datatype_str = numpy.dtype([('real', '<f4'), ('imag', '<f4')])
980 982 elif datatype == 5:
981 983 datatype_str = numpy.dtype([('real', '<f8'), ('imag', '<f8')])
982 984 else:
983 985 raise ValueError('Data type was not defined')
984 986
985 987 self.dtype = datatype_str
986 988 #self.ippSeconds = 2 * 1000 * self.radarControllerHeaderObj.ipp / self.c
987 989 self.fileSizeByHeader = self.processingHeaderObj.dataBlocksPerFile * self.processingHeaderObj.blockSize + \
988 990 self.firstHeaderSize + self.basicHeaderSize * \
989 991 (self.processingHeaderObj.dataBlocksPerFile - 1)
990 992 # self.dataOut.channelList = numpy.arange(self.systemHeaderObj.numChannels)
991 993 # self.dataOut.channelIndexList = numpy.arange(self.systemHeaderObj.numChannels)
992 994 self.getBlockDimension()
993 995
994 996 def verifyFile(self, filename):
995 997
996 998 flag = True
997 999
998 1000 try:
999 1001 fp = open(filename, 'rb')
1000 1002 except IOError:
1001 1003 log.error("File {} can't be opened".format(filename), self.name)
1002 1004 return False
1003 1005
1004 1006 if self.online and self.waitDataBlock(0):
1005 1007 pass
1006 1008
1007 1009 basicHeaderObj = BasicHeader(LOCALTIME)
1008 1010 systemHeaderObj = SystemHeader()
1009 1011 radarControllerHeaderObj = RadarControllerHeader()
1010 1012 processingHeaderObj = ProcessingHeader()
1011 1013
1012 1014 if not(basicHeaderObj.read(fp)):
1013 1015 flag = False
1014 1016 if not(systemHeaderObj.read(fp)):
1015 1017 flag = False
1016 1018 if not(radarControllerHeaderObj.read(fp)):
1017 1019 flag = False
1018 1020 if not(processingHeaderObj.read(fp)):
1019 1021 flag = False
1020 1022 if not self.online:
1021 1023 dt1 = basicHeaderObj.datatime
1022 1024 pos = self.fileSize-processingHeaderObj.blockSize-24
1023 1025 if pos<0:
1024 1026 flag = False
1025 1027 log.error('Invalid size for file: {}'.format(self.filename), self.name)
1026 1028 else:
1027 1029 fp.seek(pos)
1028 1030 if not(basicHeaderObj.read(fp)):
1029 1031 flag = False
1030 1032 dt2 = basicHeaderObj.datatime
1031 1033 if not self.isDateTimeInRange(dt1, self.startDate, self.endDate, self.startTime, self.endTime) and not \
1032 1034 self.isDateTimeInRange(dt2, self.startDate, self.endDate, self.startTime, self.endTime):
1033 1035 flag = False
1034 1036
1035 1037 fp.close()
1036 1038 return flag
1037 1039
1038 1040 def findDatafiles(self, path, startDate=None, endDate=None, expLabel='', ext='.r', walk=True, include_path=False):
1039 1041
1040 1042 path_empty = True
1041 1043
1042 1044 dateList = []
1043 1045 pathList = []
1044 1046
1045 1047 multi_path = path.split(',')
1046 1048
1047 1049 if not walk:
1048 1050
1049 1051 for single_path in multi_path:
1050 1052
1051 1053 if not os.path.isdir(single_path):
1052 1054 continue
1053 1055
1054 1056 fileList = glob.glob1(single_path, "*" + ext)
1055 1057
1056 1058 if not fileList:
1057 1059 continue
1058 1060
1059 1061 path_empty = False
1060 1062
1061 1063 fileList.sort()
1062 1064
1063 1065 for thisFile in fileList:
1064 1066
1065 1067 if not os.path.isfile(os.path.join(single_path, thisFile)):
1066 1068 continue
1067 1069
1068 1070 if not isRadarFile(thisFile):
1069 1071 continue
1070 1072
1071 1073 if not isFileInDateRange(thisFile, startDate, endDate):
1072 1074 continue
1073 1075
1074 1076 thisDate = getDateFromRadarFile(thisFile)
1075 1077
1076 1078 if thisDate in dateList or single_path in pathList:
1077 1079 continue
1078 1080
1079 1081 dateList.append(thisDate)
1080 1082 pathList.append(single_path)
1081 1083
1082 1084 else:
1083 1085 for single_path in multi_path:
1084 1086
1085 1087 if not os.path.isdir(single_path):
1086 1088 continue
1087 1089
1088 1090 dirList = []
1089 1091
1090 1092 for thisPath in os.listdir(single_path):
1091 1093
1092 1094 if not os.path.isdir(os.path.join(single_path, thisPath)):
1093 1095 continue
1094 1096
1095 1097 if not isRadarFolder(thisPath):
1096 1098 continue
1097 1099
1098 1100 if not isFolderInDateRange(thisPath, startDate, endDate):
1099 1101 continue
1100 1102
1101 1103 dirList.append(thisPath)
1102 1104
1103 1105 if not dirList:
1104 1106 continue
1105 1107
1106 1108 dirList.sort()
1107 1109
1108 1110 for thisDir in dirList:
1109 1111
1110 1112 datapath = os.path.join(single_path, thisDir, expLabel)
1111 1113 fileList = glob.glob1(datapath, "*" + ext)
1112 1114
1113 1115 if not fileList:
1114 1116 continue
1115 1117
1116 1118 path_empty = False
1117 1119
1118 1120 thisDate = getDateFromRadarFolder(thisDir)
1119 1121
1120 1122 pathList.append(datapath)
1121 1123 dateList.append(thisDate)
1122 1124
1123 1125 dateList.sort()
1124 1126
1125 1127 if walk:
1126 1128 pattern_path = os.path.join(multi_path[0], "[dYYYYDDD]", expLabel)
1127 1129 else:
1128 1130 pattern_path = multi_path[0]
1129 1131
1130 1132 if path_empty:
1131 1133 raise schainpy.admin.SchainError("[Reading] No *%s files in %s for %s to %s" % (ext, pattern_path, startDate, endDate))
1132 1134 else:
1133 1135 if not dateList:
1134 1136 raise schainpy.admin.SchainError("[Reading] Date range selected invalid [%s - %s]: No *%s files in %s)" % (startDate, endDate, ext, path))
1135 1137
1136 1138 if include_path:
1137 1139 return dateList, pathList
1138 1140
1139 1141 return dateList
1140 1142
1141 1143 def setup(self, **kwargs):
1142 1144
1143 1145 self.set_kwargs(**kwargs)
1144 1146 if not self.ext.startswith('.'):
1145 1147 self.ext = '.{}'.format(self.ext)
1146 1148
1147 1149 if self.server is not None:
1148 1150 if 'tcp://' in self.server:
1149 1151 address = server
1150 1152 else:
1151 1153 address = 'ipc:///tmp/%s' % self.server
1152 1154 self.server = address
1153 1155 self.context = zmq.Context()
1154 1156 self.receiver = self.context.socket(zmq.PULL)
1155 1157 self.receiver.connect(self.server)
1156 1158 time.sleep(0.5)
1157 1159 print('[Starting] ReceiverData from {}'.format(self.server))
1158 1160 else:
1159 1161 self.server = None
1160 1162 if self.path == None:
1161 1163 raise ValueError("[Reading] The path is not valid")
1162 1164
1163 1165 if self.online:
1164 1166 log.log("[Reading] Searching files in online mode...", self.name)
1165 1167
1166 1168 for nTries in range(self.nTries):
1167 1169 fullpath = self.searchFilesOnLine(self.path, self.startDate,
1168 1170 self.endDate, self.expLabel, self.ext, self.walk,
1169 1171 self.filefmt, self.folderfmt)
1170 1172
1171 1173 try:
1172 1174 fullpath = next(fullpath)
1173 1175 except:
1174 1176 fullpath = None
1175 1177
1176 1178 if fullpath:
1177 1179 break
1178 1180
1179 1181 log.warning(
1180 1182 'Waiting {} sec for a valid file in {}: try {} ...'.format(
1181 1183 self.delay, self.path, nTries + 1),
1182 1184 self.name)
1183 1185 time.sleep(self.delay)
1184 1186
1185 1187 if not(fullpath):
1186 1188 raise schainpy.admin.SchainError(
1187 1189 'There isn\'t any valid file in {}'.format(self.path))
1188 1190
1189 1191 pathname, filename = os.path.split(fullpath)
1190 1192 self.year = int(filename[1:5])
1191 1193 self.doy = int(filename[5:8])
1192 1194 self.set = int(filename[8:11]) - 1
1193 1195 else:
1194 1196 log.log("Searching files in {}".format(self.path), self.name)
1195 1197 self.filenameList = self.searchFilesOffLine(self.path, self.startDate,
1196 1198 self.endDate, self.expLabel, self.ext, self.walk, self.filefmt, self.folderfmt)
1197 1199
1198 1200 self.setNextFile()
1199 1201
1200 1202 return
1201 1203
1202 1204 def getBasicHeader(self):
1203 1205
1204 1206 self.dataOut.utctime = self.basicHeaderObj.utc + self.basicHeaderObj.miliSecond / \
1205 1207 1000. + self.profileIndex * self.radarControllerHeaderObj.ippSeconds
1206 1208
1207 1209 self.dataOut.flagDiscontinuousBlock = self.flagDiscontinuousBlock
1208 1210
1209 1211 self.dataOut.timeZone = self.basicHeaderObj.timeZone
1210 1212
1211 1213 self.dataOut.dstFlag = self.basicHeaderObj.dstFlag
1212 1214
1213 1215 self.dataOut.errorCount = self.basicHeaderObj.errorCount
1214 1216
1215 1217 self.dataOut.useLocalTime = self.basicHeaderObj.useLocalTime
1216 1218
1217 1219 self.dataOut.ippSeconds = self.radarControllerHeaderObj.ippSeconds / self.nTxs
1218 1220
1219 1221 def getFirstHeader(self):
1220 1222
1221 1223 raise NotImplementedError
1222 1224
1223 1225 def getData(self):
1224 1226
1225 1227 raise NotImplementedError
1226 1228
1227 1229 def hasNotDataInBuffer(self):
1228 1230
1229 1231 raise NotImplementedError
1230 1232
1231 1233 def readBlock(self):
1232 1234
1233 1235 raise NotImplementedError
1234 1236
1235 1237 def isEndProcess(self):
1236 1238
1237 1239 return self.flagNoMoreFiles
1238 1240
1239 1241 def printReadBlocks(self):
1240 1242
1241 1243 print("[Reading] Number of read blocks per file %04d" % self.nReadBlocks)
1242 1244
1243 1245 def printTotalBlocks(self):
1244 1246
1245 1247 print("[Reading] Number of read blocks %04d" % self.nTotalBlocks)
1246 1248
1247 1249 def run(self, **kwargs):
1248 1250 """
1249 1251
1250 1252 Arguments:
1251 1253 path :
1252 1254 startDate :
1253 1255 endDate :
1254 1256 startTime :
1255 1257 endTime :
1256 1258 set :
1257 1259 expLabel :
1258 1260 ext :
1259 1261 online :
1260 1262 delay :
1261 1263 walk :
1262 1264 getblock :
1263 1265 nTxs :
1264 1266 realtime :
1265 1267 blocksize :
1266 1268 blocktime :
1267 1269 skip :
1268 1270 cursor :
1269 1271 warnings :
1270 1272 server :
1271 1273 verbose :
1272 1274 format :
1273 1275 oneDDict :
1274 1276 twoDDict :
1275 1277 independentParam :
1276 1278 """
1277 1279
1278 1280 if not(self.isConfig):
1279 1281 self.setup(**kwargs)
1280 1282 self.isConfig = True
1281 1283 if self.server is None:
1282 1284 self.getData()
1283 1285 else:
1284 1286 self.getFromServer()
1285 1287
1286 1288
1287 1289 class JRODataWriter(Reader):
1288 1290
1289 1291 """
1290 1292 Esta clase permite escribir datos a archivos procesados (.r o ,pdata). La escritura
1291 1293 de los datos siempre se realiza por bloques.
1292 1294 """
1293 1295
1294 1296 setFile = None
1295 1297 profilesPerBlock = None
1296 1298 blocksPerFile = None
1297 1299 nWriteBlocks = 0
1298 1300 fileDate = None
1299 1301
1300 1302 def __init__(self, dataOut=None):
1301 1303 raise NotImplementedError
1302 1304
1303 1305 def hasAllDataInBuffer(self):
1304 1306 raise NotImplementedError
1305 1307
1306 1308 def setBlockDimension(self):
1307 1309 raise NotImplementedError
1308 1310
1309 1311 def writeBlock(self):
1310 1312 raise NotImplementedError
1311 1313
1312 1314 def putData(self):
1313 1315 raise NotImplementedError
1314 1316
1315 1317 def getDtypeWidth(self):
1316 1318
1317 1319 dtype_index = get_dtype_index(self.dtype)
1318 1320 dtype_width = get_dtype_width(dtype_index)
1319 1321
1320 1322 return dtype_width
1321 1323
1322 1324 def getProcessFlags(self):
1323 1325
1324 1326 processFlags = 0
1325 1327
1326 1328 dtype_index = get_dtype_index(self.dtype)
1327 1329 procflag_dtype = get_procflag_dtype(dtype_index)
1328 1330
1329 1331 processFlags += procflag_dtype
1330 1332
1331 1333 if self.dataOut.flagDecodeData:
1332 1334 processFlags += PROCFLAG.DECODE_DATA
1333 1335
1334 1336 if self.dataOut.flagDeflipData:
1335 1337 processFlags += PROCFLAG.DEFLIP_DATA
1336 1338
1337 1339 if self.dataOut.code is not None:
1338 1340 processFlags += PROCFLAG.DEFINE_PROCESS_CODE
1339 1341
1340 1342 if self.dataOut.nCohInt > 1:
1341 1343 processFlags += PROCFLAG.COHERENT_INTEGRATION
1342 1344
1343 1345 if self.dataOut.type == "Spectra":
1344 1346 if self.dataOut.nIncohInt > 1:
1345 1347 processFlags += PROCFLAG.INCOHERENT_INTEGRATION
1346 1348
1347 1349 if self.dataOut.data_dc is not None:
1348 1350 processFlags += PROCFLAG.SAVE_CHANNELS_DC
1349 1351
1350 1352 if self.dataOut.flagShiftFFT:
1351 1353 processFlags += PROCFLAG.SHIFT_FFT_DATA
1352 1354
1353 1355 return processFlags
1354 1356
1355 1357 def setBasicHeader(self):
1356 1358
1357 1359 self.basicHeaderObj.size = self.basicHeaderSize # bytes
1358 1360 self.basicHeaderObj.version = self.versionFile
1359 1361 self.basicHeaderObj.dataBlock = self.nTotalBlocks
1360 1362 utc = numpy.floor(self.dataOut.utctime)
1361 1363 milisecond = (self.dataOut.utctime - utc) * 1000.0
1362 1364 self.basicHeaderObj.utc = utc
1363 1365 self.basicHeaderObj.miliSecond = milisecond
1364 1366 self.basicHeaderObj.timeZone = self.dataOut.timeZone
1365 1367 self.basicHeaderObj.dstFlag = self.dataOut.dstFlag
1366 1368 self.basicHeaderObj.errorCount = self.dataOut.errorCount
1367 1369
1368 1370 def setFirstHeader(self):
1369 1371 """
1370 1372 Obtiene una copia del First Header
1371 1373
1372 1374 Affected:
1373 1375
1374 1376 self.basicHeaderObj
1375 1377 self.systemHeaderObj
1376 1378 self.radarControllerHeaderObj
1377 1379 self.processingHeaderObj self.
1378 1380
1379 1381 Return:
1380 1382 None
1381 1383 """
1382 1384
1383 1385 raise NotImplementedError
1384 1386
1385 1387 def __writeFirstHeader(self):
1386 1388 """
1387 1389 Escribe el primer header del file es decir el Basic header y el Long header (SystemHeader, RadarControllerHeader, ProcessingHeader)
1388 1390
1389 1391 Affected:
1390 1392 __dataType
1391 1393
1392 1394 Return:
1393 1395 None
1394 1396 """
1395 1397
1396 1398 # CALCULAR PARAMETROS
1397 1399
1398 1400 sizeLongHeader = self.systemHeaderObj.size + \
1399 1401 self.radarControllerHeaderObj.size + self.processingHeaderObj.size
1400 1402 self.basicHeaderObj.size = self.basicHeaderSize + sizeLongHeader
1401 1403
1402 1404 self.basicHeaderObj.write(self.fp)
1403 1405 self.systemHeaderObj.write(self.fp)
1404 1406 self.radarControllerHeaderObj.write(self.fp)
1405 1407 self.processingHeaderObj.write(self.fp)
1406 1408
1407 1409 def __setNewBlock(self):
1408 1410 """
1409 1411 Si es un nuevo file escribe el First Header caso contrario escribe solo el Basic Header
1410 1412
1411 1413 Return:
1412 1414 0 : si no pudo escribir nada
1413 1415 1 : Si escribio el Basic el First Header
1414 1416 """
1415 1417 if self.fp == None:
1416 1418 self.setNextFile()
1417 1419
1418 1420 if self.flagIsNewFile:
1419 1421 return 1
1420 1422
1421 1423 if self.blockIndex < self.processingHeaderObj.dataBlocksPerFile:
1422 1424 self.basicHeaderObj.write(self.fp)
1423 1425 return 1
1424 1426
1425 1427 if not(self.setNextFile()):
1426 1428 return 0
1427 1429
1428 1430 return 1
1429 1431
1430 1432 def writeNextBlock(self):
1431 1433 """
1432 1434 Selecciona el bloque siguiente de datos y los escribe en un file
1433 1435
1434 1436 Return:
1435 1437 0 : Si no hizo pudo escribir el bloque de datos
1436 1438 1 : Si no pudo escribir el bloque de datos
1437 1439 """
1438 1440 if not(self.__setNewBlock()):
1439 1441 return 0
1440 1442
1441 1443 self.writeBlock()
1442 1444
1443 1445 print("[Writing] Block No. %d/%d" % (self.blockIndex,
1444 1446 self.processingHeaderObj.dataBlocksPerFile))
1445 1447
1446 1448 return 1
1447 1449
1448 1450 def setNextFile(self):
1449 1451 """Determina el siguiente file que sera escrito
1450 1452
1451 1453 Affected:
1452 1454 self.filename
1453 1455 self.subfolder
1454 1456 self.fp
1455 1457 self.setFile
1456 1458 self.flagIsNewFile
1457 1459
1458 1460 Return:
1459 1461 0 : Si el archivo no puede ser escrito
1460 1462 1 : Si el archivo esta listo para ser escrito
1461 1463 """
1462 1464 ext = self.ext
1463 1465 path = self.path
1464 1466
1465 1467 if self.fp != None:
1466 1468 self.fp.close()
1467 1469
1468 1470 if not os.path.exists(path):
1469 1471 os.mkdir(path)
1470 1472
1471 1473 timeTuple = time.localtime(self.dataOut.utctime)
1472 1474 subfolder = 'd%4.4d%3.3d' % (timeTuple.tm_year, timeTuple.tm_yday)
1473 1475
1474 1476 fullpath = os.path.join(path, subfolder)
1475 1477 setFile = self.setFile
1476 1478
1477 1479 if not(os.path.exists(fullpath)):
1478 1480 os.mkdir(fullpath)
1479 1481 setFile = -1 # inicializo mi contador de seteo
1480 1482 else:
1481 1483 filesList = os.listdir(fullpath)
1482 1484 if len(filesList) > 0:
1483 1485 filesList = sorted(filesList, key=str.lower)
1484 1486 filen = filesList[-1]
1485 1487 # el filename debera tener el siguiente formato
1486 1488 # 0 1234 567 89A BCDE (hex)
1487 1489 # x YYYY DDD SSS .ext
1488 1490 if isNumber(filen[8:11]):
1489 1491 # inicializo mi contador de seteo al seteo del ultimo file
1490 1492 setFile = int(filen[8:11])
1491 1493 else:
1492 1494 setFile = -1
1493 1495 else:
1494 1496 setFile = -1 # inicializo mi contador de seteo
1495 1497
1496 1498 setFile += 1
1497 1499
1498 1500 # If this is a new day it resets some values
1499 1501 if self.dataOut.datatime.date() > self.fileDate:
1500 1502 setFile = 0
1501 1503 self.nTotalBlocks = 0
1502 1504
1503 1505 filen = '{}{:04d}{:03d}{:03d}{}'.format(
1504 1506 self.optchar, timeTuple.tm_year, timeTuple.tm_yday, setFile, ext)
1505 1507
1506 1508 filename = os.path.join(path, subfolder, filen)
1507 1509
1508 1510 fp = open(filename, 'wb')
1509 1511
1510 1512 self.blockIndex = 0
1511 1513 self.filename = filename
1512 1514 self.subfolder = subfolder
1513 1515 self.fp = fp
1514 1516 self.setFile = setFile
1515 1517 self.flagIsNewFile = 1
1516 1518 self.fileDate = self.dataOut.datatime.date()
1517 1519 self.setFirstHeader()
1518 1520
1519 1521 print('[Writing] Opening file: %s' % self.filename)
1520 1522
1521 1523 self.__writeFirstHeader()
1522 1524
1523 1525 return 1
1524 1526
1525 1527 def setup(self, dataOut, path, blocksPerFile, profilesPerBlock=64, set=None, ext=None, datatype=4):
1526 1528 """
1527 1529 Setea el tipo de formato en la cual sera guardada la data y escribe el First Header
1528 1530
1529 1531 Inputs:
1530 1532 path : directory where data will be saved
1531 1533 profilesPerBlock : number of profiles per block
1532 1534 set : initial file set
1533 1535 datatype : An integer number that defines data type:
1534 1536 0 : int8 (1 byte)
1535 1537 1 : int16 (2 bytes)
1536 1538 2 : int32 (4 bytes)
1537 1539 3 : int64 (8 bytes)
1538 1540 4 : float32 (4 bytes)
1539 1541 5 : double64 (8 bytes)
1540 1542
1541 1543 Return:
1542 1544 0 : Si no realizo un buen seteo
1543 1545 1 : Si realizo un buen seteo
1544 1546 """
1545 1547
1546 1548 if ext == None:
1547 1549 ext = self.ext
1548 1550
1549 1551 self.ext = ext.lower()
1550 1552
1551 1553 self.path = path
1552 1554
1553 1555 if set is None:
1554 1556 self.setFile = -1
1555 1557 else:
1556 1558 self.setFile = set - 1
1557 1559
1558 1560 self.blocksPerFile = blocksPerFile
1559 1561 self.profilesPerBlock = profilesPerBlock
1560 1562 self.dataOut = dataOut
1561 1563 self.fileDate = self.dataOut.datatime.date()
1562 1564 self.dtype = self.dataOut.dtype
1563 1565
1564 1566 if datatype is not None:
1565 1567 self.dtype = get_numpy_dtype(datatype)
1566 1568
1567 1569 if not(self.setNextFile()):
1568 1570 print("[Writing] There isn't a next file")
1569 1571 return 0
1570 1572
1571 1573 self.setBlockDimension()
1572 1574
1573 1575 return 1
1574 1576
1575 1577 def run(self, dataOut, path, blocksPerFile=100, profilesPerBlock=64, set=None, ext=None, datatype=4, **kwargs):
1576 1578
1577 1579 if not(self.isConfig):
1578 1580
1579 1581 self.setup(dataOut, path, blocksPerFile, profilesPerBlock=profilesPerBlock,
1580 1582 set=set, ext=ext, datatype=datatype, **kwargs)
1581 1583 self.isConfig = True
1582 1584
1583 1585 self.dataOut = dataOut
1584 1586 self.putData()
1585 1587 return self.dataOut
1586 1588
1587 1589 @MPDecorator
1588 1590 class printInfo(Operation):
1589 1591
1590 1592 def __init__(self):
1591 1593
1592 1594 Operation.__init__(self)
1593 1595 self.__printInfo = True
1594 1596
1595 1597 def run(self, dataOut, headers = ['systemHeaderObj', 'radarControllerHeaderObj', 'processingHeaderObj']):
1596 1598 if self.__printInfo == False:
1597 1599 return
1598 1600
1599 1601 for header in headers:
1600 1602 if hasattr(dataOut, header):
1601 1603 obj = getattr(dataOut, header)
1602 1604 if hasattr(obj, 'printInfo'):
1603 1605 obj.printInfo()
1604 1606 else:
1605 1607 print(obj)
1606 1608 else:
1607 1609 log.warning('Header {} Not found in object'.format(header))
1608 1610
1609 1611 self.__printInfo = False
@@ -1,722 +1,733
1 1 from email.utils import localtime
2 2 import os
3 3 import time
4 4 import datetime
5 5
6 6 import numpy
7 7 import h5py
8 8
9 9 import schainpy.admin
10 10 from schainpy.model.data.jrodata import *
11 11 from schainpy.model.proc.jroproc_base import ProcessingUnit, Operation, MPDecorator
12 12 from schainpy.model.io.jroIO_base import *
13 13 from schainpy.utils import log
14 14
15 15
16 16 class HDFReader(Reader, ProcessingUnit):
17 17 """Processing unit to read HDF5 format files
18 18
19 19 This unit reads HDF5 files created with `HDFWriter` operation contains
20 20 by default two groups Data and Metadata all variables would be saved as `dataOut`
21 21 attributes.
22 22 It is possible to read any HDF5 file by given the structure in the `description`
23 23 parameter, also you can add extra values to metadata with the parameter `extras`.
24 24
25 25 Parameters:
26 26 -----------
27 27 path : str
28 28 Path where files are located.
29 29 startDate : date
30 30 Start date of the files
31 31 endDate : list
32 32 End date of the files
33 33 startTime : time
34 34 Start time of the files
35 35 endTime : time
36 36 End time of the files
37 37 description : dict, optional
38 38 Dictionary with the description of the HDF5 file
39 39 extras : dict, optional
40 40 Dictionary with extra metadata to be be added to `dataOut`
41 41
42 42 Examples
43 43 --------
44 44
45 45 desc = {
46 46 'Data': {
47 47 'data_output': ['u', 'v', 'w'],
48 48 'utctime': 'timestamps',
49 49 } ,
50 50 'Metadata': {
51 51 'heightList': 'heights'
52 52 }
53 53 }
54 54
55 55 desc = {
56 56 'Data': {
57 57 'data_output': 'winds',
58 58 'utctime': 'timestamps'
59 59 },
60 60 'Metadata': {
61 61 'heightList': 'heights'
62 62 }
63 63 }
64 64
65 65 extras = {
66 66 'timeZone': 300
67 67 }
68 68
69 69 reader = project.addReadUnit(
70 70 name='HDFReader',
71 71 path='/path/to/files',
72 72 startDate='2019/01/01',
73 73 endDate='2019/01/31',
74 74 startTime='00:00:00',
75 75 endTime='23:59:59',
76 76 # description=json.dumps(desc),
77 77 # extras=json.dumps(extras),
78 78 )
79 79
80 80 """
81 81
82 82 __attrs__ = ['path', 'startDate', 'endDate', 'startTime', 'endTime', 'description', 'extras']
83 83
84 84 def __init__(self):
85 85 ProcessingUnit.__init__(self)
86 86 self.dataOut = Parameters()
87 87 self.ext = ".hdf5"
88 88 self.optchar = "D"
89 89 self.meta = {}
90 90 self.data = {}
91 91 self.open_file = h5py.File
92 92 self.open_mode = 'r'
93 93 self.description = {}
94 94 self.extras = {}
95 95 self.filefmt = "*%Y%j***"
96 96 self.folderfmt = "*%Y%j"
97 97 self.utcoffset = 0
98 self.filter = None
99 self.dparam = None
98 100
99 101 def setup(self, **kwargs):
100 102
101 103 self.set_kwargs(**kwargs)
102 104 if not self.ext.startswith('.'):
103 105 self.ext = '.{}'.format(self.ext)
104 106
105 107 if self.online:
106 108 log.log("Searching files in online mode...", self.name)
107 109
108 110 for nTries in range(self.nTries):
109 111 fullpath = self.searchFilesOnLine(self.path, self.startDate,
110 112 self.endDate, self.expLabel, self.ext, self.walk,
111 self.filefmt, self.folderfmt)
113 self.filefmt, self.folderfmt,self.filter)
112 114 try:
113 115 fullpath = next(fullpath)
114 116 except:
115 117 fullpath = None
116 118
117 119 if fullpath:
118 120 break
119 121
120 122 log.warning(
121 123 'Waiting {} sec for a valid file in {}: try {} ...'.format(
122 124 self.delay, self.path, nTries + 1),
123 125 self.name)
124 126 time.sleep(self.delay)
125 127
126 128 if not(fullpath):
127 129 raise schainpy.admin.SchainError(
128 130 'There isn\'t any valid file in {}'.format(self.path))
129 131
130 132 pathname, filename = os.path.split(fullpath)
131 133 self.year = int(filename[1:5])
132 134 self.doy = int(filename[5:8])
133 135 self.set = int(filename[8:11]) - 1
134 136 else:
135 137 log.log("Searching files in {}".format(self.path), self.name)
136 138 self.filenameList = self.searchFilesOffLine(self.path, self.startDate,
137 self.endDate, self.expLabel, self.ext, self.walk, self.filefmt, self.folderfmt)
139 self.endDate, self.expLabel, self.ext, self.walk, self.filefmt, self.folderfmt,self.filter)
138 140
139 141 self.setNextFile()
140 142
141 143 return
142 144
143 145 def readFirstHeader(self):
144 146 '''Read metadata and data'''
145 147
146 148 self.__readMetadata()
147 149 self.__readData()
148 150 self.__setBlockList()
149 151
150 152 if 'type' in self.meta:
151 153 self.dataOut = eval(self.meta['type'])()
152 154
155 if self.dparam:
156 setattr(self.dataOut, "dparam", 1)
157
153 158 for attr in self.meta:
154 159 setattr(self.dataOut, attr, self.meta[attr])
155 160
156 161 self.blockIndex = 0
157 162
158 163 return
159 164
160 165 def __setBlockList(self):
161 166 '''
162 167 Selects the data within the times defined
163 168
164 169 self.fp
165 170 self.startTime
166 171 self.endTime
167 172 self.blockList
168 173 self.blocksPerFile
169 174
170 175 '''
171 176
172 177 startTime = self.startTime
173 178 endTime = self.endTime
174 179 thisUtcTime = self.data['utctime'] + self.utcoffset
180 try:
175 181 self.interval = numpy.min(thisUtcTime[1:] - thisUtcTime[:-1])
182 except:
183 self.interval = 0
176 184 thisDatetime = datetime.datetime.utcfromtimestamp(thisUtcTime[0])
177 185
178 186 thisDate = thisDatetime.date()
179 187 thisTime = thisDatetime.time()
180 188
181 189 startUtcTime = (datetime.datetime.combine(thisDate, startTime) - datetime.datetime(1970, 1, 1)).total_seconds()
182 190 endUtcTime = (datetime.datetime.combine(thisDate, endTime) - datetime.datetime(1970, 1, 1)).total_seconds()
183 191
184 192 ind = numpy.where(numpy.logical_and(thisUtcTime >= startUtcTime, thisUtcTime < endUtcTime))[0]
185 193
186 194 self.blockList = ind
187 195 self.blocksPerFile = len(ind)
188 196 return
189 197
190 198 def __readMetadata(self):
191 199 '''
192 200 Reads Metadata
193 201 '''
194 202
195 203 meta = {}
196 204
197 205 if self.description:
198 206 for key, value in self.description['Metadata'].items():
199 207 meta[key] = self.fp[value][()]
200 208 else:
201 209 grp = self.fp['Metadata']
202 210 for name in grp:
203 211 meta[name] = grp[name][()]
204 212
205 213 if self.extras:
206 214 for key, value in self.extras.items():
207 215 meta[key] = value
208 216 self.meta = meta
209 217
210 218 return
211 219
212 220 def __readData(self):
213 221
214 222 data = {}
215 223
216 224 if self.description:
217 225 for key, value in self.description['Data'].items():
218 226 if isinstance(value, str):
219 227 if isinstance(self.fp[value], h5py.Dataset):
220 228 data[key] = self.fp[value][()]
221 229 elif isinstance(self.fp[value], h5py.Group):
222 230 array = []
223 231 for ch in self.fp[value]:
224 232 array.append(self.fp[value][ch][()])
225 233 data[key] = numpy.array(array)
226 234 elif isinstance(value, list):
227 235 array = []
228 236 for ch in value:
229 237 array.append(self.fp[ch][()])
230 238 data[key] = numpy.array(array)
231 239 else:
232 240 grp = self.fp['Data']
233 241 for name in grp:
234 242 if isinstance(grp[name], h5py.Dataset):
235 243 array = grp[name][()]
236 244 elif isinstance(grp[name], h5py.Group):
237 245 array = []
238 246 for ch in grp[name]:
239 247 array.append(grp[name][ch][()])
240 248 array = numpy.array(array)
241 249 else:
242 250 log.warning('Unknown type: {}'.format(name))
243 251
244 252 if name in self.description:
245 253 key = self.description[name]
246 254 else:
247 255 key = name
248 256 data[key] = array
249 257
250 258 self.data = data
251 259 return
252 260
253 261 def getData(self):
254 262
255 263 for attr in self.data:
256 264 if self.data[attr].ndim == 1:
257 265 setattr(self.dataOut, attr, self.data[attr][self.blockIndex])
258 266 else:
267 if self.dparam:
268 setattr(self.dataOut, attr, self.data[attr])
269 else:
259 270 setattr(self.dataOut, attr, self.data[attr][:, self.blockIndex])
260 271
261 272 self.dataOut.flagNoData = False
262 273 self.blockIndex += 1
263 274
264 275 log.log("Block No. {}/{} -> {}".format(
265 276 self.blockIndex,
266 277 self.blocksPerFile,
267 278 self.dataOut.datatime.ctime()), self.name)
268 279
269 280 return
270 281
271 282 def run(self, **kwargs):
272 283
273 284 if not(self.isConfig):
274 285 self.setup(**kwargs)
275 286 self.isConfig = True
276 287
277 288 if self.blockIndex == self.blocksPerFile:
278 289 self.setNextFile()
279 290
280 291 self.getData()
281 292
282 293 return
283 294
284 295 @MPDecorator
285 296 class HDFWriter(Operation):
286 297 """Operation to write HDF5 files.
287 298
288 299 The HDF5 file contains by default two groups Data and Metadata where
289 300 you can save any `dataOut` attribute specified by `dataList` and `metadataList`
290 301 parameters, data attributes are normaly time dependent where the metadata
291 302 are not.
292 303 It is possible to customize the structure of the HDF5 file with the
293 304 optional description parameter see the examples.
294 305
295 306 Parameters:
296 307 -----------
297 308 path : str
298 309 Path where files will be saved.
299 310 blocksPerFile : int
300 311 Number of blocks per file
301 312 metadataList : list
302 313 List of the dataOut attributes that will be saved as metadata
303 314 dataList : int
304 315 List of the dataOut attributes that will be saved as data
305 316 setType : bool
306 317 If True the name of the files corresponds to the timestamp of the data
307 318 description : dict, optional
308 319 Dictionary with the desired description of the HDF5 file
309 320
310 321 Examples
311 322 --------
312 323
313 324 desc = {
314 325 'data_output': {'winds': ['z', 'w', 'v']},
315 326 'utctime': 'timestamps',
316 327 'heightList': 'heights'
317 328 }
318 329 desc = {
319 330 'data_output': ['z', 'w', 'v'],
320 331 'utctime': 'timestamps',
321 332 'heightList': 'heights'
322 333 }
323 334 desc = {
324 335 'Data': {
325 336 'data_output': 'winds',
326 337 'utctime': 'timestamps'
327 338 },
328 339 'Metadata': {
329 340 'heightList': 'heights'
330 341 }
331 342 }
332 343
333 344 writer = proc_unit.addOperation(name='HDFWriter')
334 345 writer.addParameter(name='path', value='/path/to/file')
335 346 writer.addParameter(name='blocksPerFile', value='32')
336 347 writer.addParameter(name='metadataList', value='heightList,timeZone')
337 348 writer.addParameter(name='dataList',value='data_output,utctime')
338 349 # writer.addParameter(name='description',value=json.dumps(desc))
339 350
340 351 """
341 352
342 353 ext = ".hdf5"
343 354 optchar = "D"
344 355 filename = None
345 356 path = None
346 357 setFile = None
347 358 fp = None
348 359 firsttime = True
349 360 #Configurations
350 361 blocksPerFile = None
351 362 blockIndex = None
352 363 dataOut = None
353 364 #Data Arrays
354 365 dataList = None
355 366 metadataList = None
356 367 currentDay = None
357 368 lastTime = None
358 369 last_Azipos = None
359 370 last_Elepos = None
360 371 mode = None
361 372 #-----------------------
362 373 Typename = None
363 374 mask = False
364 375
365 376 def __init__(self):
366 377
367 378 Operation.__init__(self)
368 379 return
369 380
370 381 def set_kwargs(self, **kwargs):
371 382
372 383 for key, value in kwargs.items():
373 384 setattr(self, key, value)
374 385
375 386 def set_kwargs_obj(self,obj, **kwargs):
376 387
377 388 for key, value in kwargs.items():
378 389 setattr(obj, key, value)
379 390
380 391 def setup(self, path=None, blocksPerFile=10, metadataList=None, dataList=None, setType=None, description=None,type_data=None, localtime=True, **kwargs):
381 392 self.path = path
382 393 self.blocksPerFile = blocksPerFile
383 394 self.metadataList = metadataList
384 395 self.dataList = [s.strip() for s in dataList]
385 396 self.setType = setType
386 397 if self.setType == "weather":
387 398 self.set_kwargs(**kwargs)
388 399 self.set_kwargs_obj(self.dataOut,**kwargs)
389 400 self.weather_vars = {
390 401 'S' : 0,
391 402 'V' : 1,
392 403 'W' : 2,
393 404 'SNR' : 3,
394 405 'Z' : 4,
395 406 'D' : 5,
396 407 'P' : 6,
397 408 'R' : 7,
398 409 }
399 410
400 411 if localtime:
401 412 self.getDateTime = datetime.datetime.fromtimestamp
402 413 else:
403 414 self.getDateTime = datetime.datetime.utcfromtimestamp
404 415
405 416 self.description = description
406 417 self.type_data=type_data
407 418
408 419 if self.metadataList is None:
409 420 self.metadataList = self.dataOut.metadata_list
410 421
411 422 dsList = []
412 423
413 424 for i in range(len(self.dataList)):
414 425 dsDict = {}
415 426 if hasattr(self.dataOut, self.dataList[i]):
416 427 dataAux = getattr(self.dataOut, self.dataList[i])
417 428 if self.setType == 'weather' and self.dataList[i] == 'data_param':
418 429 dataAux = dataAux[:,self.weather_vars[self.weather_var],:]
419 430 dsDict['variable'] = self.dataList[i]
420 431 else:
421 432 log.warning('Attribute {} not found in dataOut'.format(self.dataList[i]), self.name)
422 433 continue
423 434
424 435 if dataAux is None:
425 436 continue
426 437 elif isinstance(dataAux, (int, float, numpy.integer, numpy.float)):
427 438 dsDict['nDim'] = 0
428 439 else:
429 440 dsDict['nDim'] = len(dataAux.shape)
430 441 dsDict['shape'] = dataAux.shape
431 442 dsDict['dsNumber'] = dataAux.shape[0]
432 443 dsDict['dtype'] = dataAux.dtype
433 444 dsList.append(dsDict)
434 445
435 446 self.dsList = dsList
436 447 self.currentDay = self.dataOut.datatime.date()
437 448
438 449 def timeFlag(self):
439 450 currentTime = self.dataOut.utctime
440 451 dt = self.getDateTime(currentTime)
441 452
442 453 dataDay = int(dt.strftime('%j'))
443 454
444 455 if self.lastTime is None:
445 456 self.lastTime = currentTime
446 457 self.currentDay = dataDay
447 458 return False
448 459
449 460 timeDiff = currentTime - self.lastTime
450 461
451 462 #Si el dia es diferente o si la diferencia entre un dato y otro supera la hora
452 463 if dataDay != self.currentDay:
453 464 self.currentDay = dataDay
454 465 return True
455 466 elif timeDiff > 3*60*60:
456 467 self.lastTime = currentTime
457 468 return True
458 469 else:
459 470 self.lastTime = currentTime
460 471 return False
461 472
462 473 def run(self, dataOut, path, blocksPerFile=10, metadataList=None,
463 474 dataList=[], setType=None, description={}, mode= None,
464 475 type_data=None, Reset = False, localtime=True, **kwargs):
465 476
466 477 if Reset:
467 478 self.isConfig = False
468 479 self.closeFile()
469 480 self.lastTime = None
470 481 self.blockIndex = 0
471 482
472 483 self.dataOut = dataOut
473 484 self.mode = mode
474 485
475 486 if not(self.isConfig):
476 487 self.setup(path=path, blocksPerFile=blocksPerFile,
477 488 metadataList=metadataList, dataList=dataList,
478 489 setType=setType, description=description,type_data=type_data,
479 490 localtime=localtime, **kwargs)
480 491
481 492 self.isConfig = True
482 493 self.setNextFile()
483 494
484 495 self.putData()
485 496 return
486 497
487 498 def setNextFile(self):
488 499
489 500 ext = self.ext
490 501 path = self.path
491 502 setFile = self.setFile
492 503
493 504 dt = self.getDateTime(self.dataOut.utctime)
494 505
495 506 if self.setType == 'weather':
496 507 subfolder = dt.strftime('%Y-%m-%dT%H-00-00')
497 508 else:
498 509 subfolder = dt.strftime('d%Y%j')
499 510
500 511 fullpath = os.path.join(path, subfolder)
501 512
502 513 if os.path.exists(fullpath):
503 514 filesList = os.listdir(fullpath)
504 515 filesList = [k for k in filesList if k.startswith(self.optchar)]
505 516 if len( filesList ) > 0:
506 517 filesList = sorted(filesList, key=str.lower)
507 518 filen = filesList[-1]
508 519 # el filename debera tener el siguiente formato
509 520 # 0 1234 567 89A BCDE (hex)
510 521 # x YYYY DDD SSS .ext
511 522 if isNumber(filen[8:11]):
512 523 setFile = int(filen[8:11]) #inicializo mi contador de seteo al seteo del ultimo file
513 524 else:
514 525 setFile = -1
515 526 else:
516 527 setFile = -1 #inicializo mi contador de seteo
517 528 else:
518 529 os.makedirs(fullpath)
519 530 setFile = -1 #inicializo mi contador de seteo
520 531
521 532 if self.setType is None:
522 533 setFile += 1
523 534 file = '%s%4.4d%3.3d%03d%s' % (self.optchar,
524 535 dt.year,
525 536 int(dt.strftime('%j')),
526 537 setFile,
527 538 ext )
528 539 elif self.setType == "weather":
529 540
530 541 #SOPHY_20200505_140215_E10.0_Z.h5
531 542 #SOPHY_20200505_140215_A40.0_Z.h5
532 543 if self.dataOut.flagMode == 1: #'AZI' #PPI
533 544 ang_type = 'E'
534 545 len_aux = int(self.dataOut.data_ele.shape[0]/4)
535 546 mean = numpy.mean(self.dataOut.data_ele[len_aux:-len_aux])
536 547 ang_ = round(mean,1)
537 548 elif self.dataOut.flagMode == 0: #'ELE' #RHI
538 549 ang_type = 'A'
539 550 len_aux = int(self.dataOut.data_azi.shape[0]/4)
540 551 mean = numpy.mean(self.dataOut.data_azi[len_aux:-len_aux])
541 552 ang_ = round(mean,1)
542 553
543 554 file = '%s_%2.2d%2.2d%2.2d_%2.2d%2.2d%2.2d_%s%2.1f_%s%s' % (
544 555 'SOPHY',
545 556 dt.year,
546 557 dt.month,
547 558 dt.day,
548 559 dt.hour,
549 560 dt.minute,
550 561 dt.second,
551 562 ang_type,
552 563 ang_,
553 564 self.weather_var,
554 565 ext )
555 566
556 567 else:
557 568 setFile = dt.hour*60+dt.minute
558 569 file = '%s%4.4d%3.3d%04d%s' % (self.optchar,
559 570 dt.year,
560 571 int(dt.strftime('%j')),
561 572 setFile,
562 573 ext )
563 574
564 575 self.filename = os.path.join( path, subfolder, file )
565 576
566 577 self.fp = h5py.File(self.filename, 'w')
567 578 #write metadata
568 579 self.writeMetadata(self.fp)
569 580 #Write data
570 581 self.writeData(self.fp)
571 582
572 583 def getLabel(self, name, x=None):
573 584
574 585 if x is None:
575 586 if 'Data' in self.description:
576 587 data = self.description['Data']
577 588 if 'Metadata' in self.description:
578 589 data.update(self.description['Metadata'])
579 590 else:
580 591 data = self.description
581 592 if name in data:
582 593 if isinstance(data[name], str):
583 594 return data[name]
584 595 elif isinstance(data[name], list):
585 596 return None
586 597 elif isinstance(data[name], dict):
587 598 for key, value in data[name].items():
588 599 return key
589 600 return name
590 601 else:
591 602 if 'Data' in self.description:
592 603 data = self.description['Data']
593 604 if 'Metadata' in self.description:
594 605 data.update(self.description['Metadata'])
595 606 else:
596 607 data = self.description
597 608 if name in data:
598 609 if isinstance(data[name], list):
599 610 return data[name][x]
600 611 elif isinstance(data[name], dict):
601 612 for key, value in data[name].items():
602 613 return value[x]
603 614 if 'cspc' in name:
604 615 return 'pair{:02d}'.format(x)
605 616 else:
606 617 return 'channel{:02d}'.format(x)
607 618
608 619 def writeMetadata(self, fp):
609 620
610 621 if self.description:
611 622 if 'Metadata' in self.description:
612 623 grp = fp.create_group('Metadata')
613 624 else:
614 625 grp = fp
615 626 else:
616 627 grp = fp.create_group('Metadata')
617 628
618 629 for i in range(len(self.metadataList)):
619 630 if not hasattr(self.dataOut, self.metadataList[i]):
620 631 log.warning('Metadata: `{}` not found'.format(self.metadataList[i]), self.name)
621 632 continue
622 633 value = getattr(self.dataOut, self.metadataList[i])
623 634 if isinstance(value, bool):
624 635 if value is True:
625 636 value = 1
626 637 else:
627 638 value = 0
628 639 grp.create_dataset(self.getLabel(self.metadataList[i]), data=value)
629 640 return
630 641
631 642 def writeData(self, fp):
632 643
633 644 if self.description:
634 645 if 'Data' in self.description:
635 646 grp = fp.create_group('Data')
636 647 else:
637 648 grp = fp
638 649 else:
639 650 grp = fp.create_group('Data')
640 651
641 652 dtsets = []
642 653 data = []
643 654
644 655 for dsInfo in self.dsList:
645 656
646 657 if dsInfo['nDim'] == 0:
647 658 ds = grp.create_dataset(
648 659 self.getLabel(dsInfo['variable']),
649 660 (self.blocksPerFile, ),
650 661 chunks=True,
651 662 dtype=numpy.float64)
652 663 dtsets.append(ds)
653 664 data.append((dsInfo['variable'], -1))
654 665 else:
655 666 label = self.getLabel(dsInfo['variable'])
656 667 if label is not None:
657 668 sgrp = grp.create_group(label)
658 669 else:
659 670 sgrp = grp
660 671 if self.blocksPerFile == 1:
661 672 shape = dsInfo['shape'][1:]
662 673 else:
663 674 shape = (self.blocksPerFile, ) + dsInfo['shape'][1:]
664 675 for i in range(dsInfo['dsNumber']):
665 676 ds = sgrp.create_dataset(
666 677 self.getLabel(dsInfo['variable'], i),
667 678 shape,
668 679 chunks=True,
669 680 dtype=dsInfo['dtype'],
670 681 compression='gzip',
671 682 )
672 683 dtsets.append(ds)
673 684 data.append((dsInfo['variable'], i))
674 685 fp.flush()
675 686
676 687 log.log('Creating file: {}'.format(fp.filename), self.name)
677 688
678 689 self.ds = dtsets
679 690 self.data = data
680 691 self.firsttime = True
681 692 self.blockIndex = 0
682 693 return
683 694
684 695 def putData(self):
685 696
686 697 if (self.blockIndex == self.blocksPerFile) or self.timeFlag():
687 698 self.closeFile()
688 699 self.setNextFile()
689 700
690 701 for i, ds in enumerate(self.ds):
691 702 attr, ch = self.data[i]
692 703 if ch == -1:
693 704 ds[self.blockIndex] = getattr(self.dataOut, attr)
694 705 else:
695 706 if self.blocksPerFile == 1:
696 707 mask = self.dataOut.data_param[:,3,:][ch] < self.mask
697 708 tmp = getattr(self.dataOut, attr)[:,self.weather_vars[self.weather_var],:][ch]
698 709 if self.mask:
699 710 tmp[mask] = numpy.nan
700 711 ds[:] = tmp
701 712 else:
702 713 ds[self.blockIndex] = getattr(self.dataOut, attr)[ch]
703 714
704 715 self.fp.flush()
705 716 self.blockIndex += 1
706 717 log.log('Block No. {}/{}'.format(self.blockIndex, self.blocksPerFile), self.name)
707 718
708 719 return
709 720
710 721 def closeFile(self):
711 722
712 723 if self.blockIndex != self.blocksPerFile:
713 724 for ds in self.ds:
714 725 ds.resize(self.blockIndex, axis=0)
715 726
716 727 if self.fp:
717 728 self.fp.flush()
718 729 self.fp.close()
719 730
720 731 def close(self):
721 732
722 733 self.closeFile()
General Comments 0
You need to be logged in to leave comments. Login now