##// END OF EJS Templates
fix en escritura y prueba de script con ploteo general
avaldezp -
r1463:03ce684852b1
parent child
Show More
@@ -0,0 +1,152
1 # SOPHY PROC script
2 import os, sys, json, argparse
3 import datetime
4 import time
5
6 PATH = '/DATA_RM/DATA'
7 # PATH = '/Users/jespinoza/workspace/data/'
8 #PATH = '/home/roberto/DATA/data_WR_RHI/RHI'
9 PATH = '/home/soporte/Downloads/data_WR_RHI'
10
11
12 PARAM = {
13 'P': {'name': 'dataPP_POWER', 'zmin': 35, 'zmax': 60, 'colormap': 'jet', 'label': 'Power', 'cb_label': 'dB'},
14 'V': {'name': 'dataPP_DOP', 'zmin': -20, 'zmax': 20, 'colormap': 'seismic', 'label': 'Velocity', 'cb_label': 'm/s'},
15 'RH': {'name': 'RhoHV_R', 'zmin': 0, 'zmax': 1, 'colormap': 'jet', 'label': 'CoeficienteCorrelacion', 'cb_label': '*'},
16 'FD': {'name': 'PhiD_P', 'zmin': -180, 'zmax': 180, 'colormap': 'RdBu_r', 'label': 'Fase Diferencial', 'cb_label': 'ΒΊ'},
17 'ZD': {'name': 'Zdb_D', 'zmin': -20, 'zmax': 80, 'colormap': 'viridis', 'label': 'ReflectividadDiferencial', 'cb_label': 'dBz'},
18 'Z': {'name': 'Zdb', 'zmin': -20, 'zmax': 60, 'colormap': 'viridis', 'label': 'Reflectividad', 'cb_label': 'dBz'},
19 'W': {'name': 'Sigmav_W', 'zmin': -20, 'zmax': 60, 'colormap': 'viridis', 'label': 'AnchoEspectral', 'cb_label': 'hz'}
20 }
21
22 def main(args):
23
24 experiment = args.experiment
25 fp = open(os.path.join(PATH, experiment, 'experiment.conf'))
26 conf = json.loads(fp.read())
27
28 ipp_km = conf['usrp_tx']['ipp']
29 ipp = ipp_km * 2 /300000
30 samp_rate = conf['usrp_rx']['sample_rate']
31 axis = ['0' if x=='elevation' else '1' for x in conf['pedestal']['speed']] # AZIMUTH 1 ELEVACION 0
32 speed_axis = conf['pedestal']['speed']
33 steeps = conf['pedestal']['table']
34 time_offset = args.time_offset
35 parameters = args.parameters
36 #start_date = experiment.split('@')[1].split('T')[0].replace('-', '/')
37 start_date = '2022/04/22'
38 end_date = start_date
39 #start_time = experiment.split('@')[1].split('T')[1]
40 start_time ='17:42:55'
41 end_time = '23:59:59'
42 max_index = int(samp_rate*ipp*1e6 * args.range / 60) + int(samp_rate*ipp*1e6 * 1.2 / 60)
43 N = int(1/(speed_axis[0]*ipp)) # 1 GRADO DE RESOLUCION
44 path = os.path.join(PATH, experiment, 'rawdata')
45 path_ped = os.path.join(PATH, experiment, 'position')
46 path_plots = os.path.join(PATH, experiment, 'plots')
47 path_save = os.path.join(PATH, experiment, 'param')
48
49 dBmin = 35
50 dBmax = 60
51 Vmin = -20
52 Vmax = 20
53
54
55 from schainpy.controller import Project
56
57 project = Project()
58 project.setup(id='1', name='Sophy', description='sophy proc')
59
60 reader = project.addReadUnit(datatype='DigitalRFReader',
61 path=path,
62 startDate=start_date,
63 endDate=end_date,
64 startTime=start_time,
65 endTime=end_time,
66 delay=0,
67 online=0,
68 walk=1,
69 ippKm = ipp_km,
70 getByBlock = 1,
71 nProfileBlocks = N,
72 )
73
74 voltage = project.addProcUnit(datatype='VoltageProc', inputId=reader.getId())
75 op = voltage.addOperation(name='setH0')
76 op.addParameter(name='h0', value='-1.2')
77
78 if args.range > 0:
79 op = voltage.addOperation(name='selectHeights')
80 op.addParameter(name='minIndex', value='0', format='int')
81 op.addParameter(name='maxIndex', value=max_index, format='int')
82
83 op = voltage.addOperation(name='PulsePair_vRF', optype='other')
84 op.addParameter(name='n', value=int(N), format='int')
85
86 proc = project.addProcUnit(datatype='ParametersProc', inputId=voltage.getId())
87 #-----------------------new--------- variables polarimetricas---------------
88 opObj10 = proc.addOperation(name="WeatherRadar")
89 opObj10.addParameter(name='variableList',value='Reflectividad,ReflectividadDiferencial,CoeficienteCorrelacion,FaseDiferencial,VelocidadRadial,AnchoEspectral')
90
91
92
93 op = proc.addOperation(name='PedestalInformation')
94 op.addParameter(name='path', value=path_ped, format='str')
95 op.addParameter(name='interval', value='0.04', format='float')
96 op.addParameter(name='time_offset', value=time_offset)
97 #op.addParameter(name='axis', value=','.join(axis)) #Preguntar en pedestal si todos los elementos
98 #de aqui son iguales, si lo fueran, que considere
99 #el primero dato como el modo (PPI o RHI) y que ya
100 #no pregunte por el modo porque este no cambia
101
102 for param in parameters:
103 op = proc.addOperation(name='Block360_vRF4')
104 #op.addParameter(name='axis', value=','.join(axis))
105 op.addParameter(name='runNextOp', value=True)
106 op.addParameter(name='attr_data', value=PARAM[param]['name'])
107
108 path_fig = '/AUTO{}km'.format(args.range)
109 op = proc.addOperation(name='WeatherParamsPlot')
110 op.addParameter(name='save', value=path_plots+path_fig, format='str')
111 op.addParameter(name='save_period', value=-1)
112 op.addParameter(name='show', value=args.show)
113 op.addParameter(name='channels', value='(1,)')
114 op.addParameter(name='zmin', value=PARAM[param]['zmin'])
115 op.addParameter(name='zmax', value=PARAM[param]['zmax'])
116 op.addParameter(name='attr_data', value=PARAM[param]['name'], format='str')
117 op.addParameter(name='labels', value=[PARAM[param]['label']])
118 op.addParameter(name='save_code', value=param)
119 op.addParameter(name='cb_label', value=PARAM[param]['cb_label'])
120 op.addParameter(name='colormap', value=PARAM[param]['colormap'])
121
122 if args.save:
123 opObj10 = proc.addOperation(name='HDFWriter')
124 opObj10.addParameter(name='path',value=path_save, format='str')
125 opObj10.addParameter(name='Reset',value=True)
126 opObj10.addParameter(name='setType',value='weather')
127 opObj10.addParameter(name='blocksPerFile',value='1',format='int')
128 opObj10.addParameter(name='metadataList',value='heightList,data_azi,data_ele')
129 opObj10.addParameter(name='dataList',value=','.join([PARAM[param]['name'],'utctime']))
130
131 project.start()
132
133
134 if __name__ == '__main__':
135
136 parser = argparse.ArgumentParser(description='Script to process SOPHy data.')
137 parser.add_argument('experiment',
138 help='Experiment name')
139 parser.add_argument('--parameters', nargs='*', default=['P'],
140 help='Variables to process: P, Z, V')
141 parser.add_argument('--time_offset', default=0,
142 help='Fix time offset')
143 parser.add_argument('--range', default=0, type=int,
144 help='Max range to plot')
145 parser.add_argument('--save', action='store_true',
146 help='Create output files')
147 parser.add_argument('--show', action='store_true',
148 help='Show matplotlib plot.')
149
150 args = parser.parse_args()
151 print (args)
152 main(args)
@@ -1,735 +1,735
1 1 import os
2 2 import time
3 3 import datetime
4 4
5 5 import numpy
6 6 import h5py
7 7
8 8 import schainpy.admin
9 9 from schainpy.model.data.jrodata import *
10 10 from schainpy.model.proc.jroproc_base import ProcessingUnit, Operation, MPDecorator
11 11 from schainpy.model.io.jroIO_base import *
12 12 from schainpy.utils import log
13 13
14 14
15 15 class HDFReader(Reader, ProcessingUnit):
16 16 """Processing unit to read HDF5 format files
17 17
18 18 This unit reads HDF5 files created with `HDFWriter` operation contains
19 19 by default two groups Data and Metadata all variables would be saved as `dataOut`
20 20 attributes.
21 21 It is possible to read any HDF5 file by given the structure in the `description`
22 22 parameter, also you can add extra values to metadata with the parameter `extras`.
23 23
24 24 Parameters:
25 25 -----------
26 26 path : str
27 27 Path where files are located.
28 28 startDate : date
29 29 Start date of the files
30 30 endDate : list
31 31 End date of the files
32 32 startTime : time
33 33 Start time of the files
34 34 endTime : time
35 35 End time of the files
36 36 description : dict, optional
37 37 Dictionary with the description of the HDF5 file
38 38 extras : dict, optional
39 39 Dictionary with extra metadata to be be added to `dataOut`
40 40
41 41 Examples
42 42 --------
43 43
44 44 desc = {
45 45 'Data': {
46 46 'data_output': ['u', 'v', 'w'],
47 47 'utctime': 'timestamps',
48 48 } ,
49 49 'Metadata': {
50 50 'heightList': 'heights'
51 51 }
52 52 }
53 53
54 54 desc = {
55 55 'Data': {
56 56 'data_output': 'winds',
57 57 'utctime': 'timestamps'
58 58 },
59 59 'Metadata': {
60 60 'heightList': 'heights'
61 61 }
62 62 }
63 63
64 64 extras = {
65 65 'timeZone': 300
66 66 }
67 67
68 68 reader = project.addReadUnit(
69 69 name='HDFReader',
70 70 path='/path/to/files',
71 71 startDate='2019/01/01',
72 72 endDate='2019/01/31',
73 73 startTime='00:00:00',
74 74 endTime='23:59:59',
75 75 # description=json.dumps(desc),
76 76 # extras=json.dumps(extras),
77 77 )
78 78
79 79 """
80 80
81 81 __attrs__ = ['path', 'startDate', 'endDate', 'startTime', 'endTime', 'description', 'extras']
82 82
83 83 def __init__(self):
84 84 ProcessingUnit.__init__(self)
85 85 self.dataOut = Parameters()
86 86 self.ext = ".hdf5"
87 87 self.optchar = "D"
88 88 self.meta = {}
89 89 self.data = {}
90 90 self.open_file = h5py.File
91 91 self.open_mode = 'r'
92 92 self.description = {}
93 93 self.extras = {}
94 94 self.filefmt = "*%Y%j***"
95 95 self.folderfmt = "*%Y%j"
96 96 self.utcoffset = 0
97 97
98 98 def setup(self, **kwargs):
99 99
100 100 self.set_kwargs(**kwargs)
101 101 if not self.ext.startswith('.'):
102 102 self.ext = '.{}'.format(self.ext)
103 103
104 104 if self.online:
105 105 log.log("Searching files in online mode...", self.name)
106 106
107 107 for nTries in range(self.nTries):
108 108 fullpath = self.searchFilesOnLine(self.path, self.startDate,
109 109 self.endDate, self.expLabel, self.ext, self.walk,
110 110 self.filefmt, self.folderfmt)
111 111 try:
112 112 fullpath = next(fullpath)
113 113 except:
114 114 fullpath = None
115 115
116 116 if fullpath:
117 117 break
118 118
119 119 log.warning(
120 120 'Waiting {} sec for a valid file in {}: try {} ...'.format(
121 121 self.delay, self.path, nTries + 1),
122 122 self.name)
123 123 time.sleep(self.delay)
124 124
125 125 if not(fullpath):
126 126 raise schainpy.admin.SchainError(
127 127 'There isn\'t any valid file in {}'.format(self.path))
128 128
129 129 pathname, filename = os.path.split(fullpath)
130 130 self.year = int(filename[1:5])
131 131 self.doy = int(filename[5:8])
132 132 self.set = int(filename[8:11]) - 1
133 133 else:
134 134 log.log("Searching files in {}".format(self.path), self.name)
135 135 self.filenameList = self.searchFilesOffLine(self.path, self.startDate,
136 136 self.endDate, self.expLabel, self.ext, self.walk, self.filefmt, self.folderfmt)
137 137
138 138 self.setNextFile()
139 139
140 140 return
141 141
142 142 def readFirstHeader(self):
143 143 '''Read metadata and data'''
144 144
145 145 self.__readMetadata()
146 146 self.__readData()
147 147 self.__setBlockList()
148 148
149 149 if 'type' in self.meta:
150 150 self.dataOut = eval(self.meta['type'])()
151 151
152 152 for attr in self.meta:
153 153 setattr(self.dataOut, attr, self.meta[attr])
154 154
155 155 self.blockIndex = 0
156 156
157 157 return
158 158
159 159 def __setBlockList(self):
160 160 '''
161 161 Selects the data within the times defined
162 162
163 163 self.fp
164 164 self.startTime
165 165 self.endTime
166 166 self.blockList
167 167 self.blocksPerFile
168 168
169 169 '''
170 170
171 171 startTime = self.startTime
172 172 endTime = self.endTime
173 173 thisUtcTime = self.data['utctime'] + self.utcoffset
174 174 self.interval = numpy.min(thisUtcTime[1:] - thisUtcTime[:-1])
175 175 thisDatetime = datetime.datetime.utcfromtimestamp(thisUtcTime[0])
176 176
177 177 thisDate = thisDatetime.date()
178 178 thisTime = thisDatetime.time()
179 179
180 180 startUtcTime = (datetime.datetime.combine(thisDate, startTime) - datetime.datetime(1970, 1, 1)).total_seconds()
181 181 endUtcTime = (datetime.datetime.combine(thisDate, endTime) - datetime.datetime(1970, 1, 1)).total_seconds()
182 182
183 183 ind = numpy.where(numpy.logical_and(thisUtcTime >= startUtcTime, thisUtcTime < endUtcTime))[0]
184 184
185 185 self.blockList = ind
186 186 self.blocksPerFile = len(ind)
187 187 return
188 188
189 189 def __readMetadata(self):
190 190 '''
191 191 Reads Metadata
192 192 '''
193 193
194 194 meta = {}
195 195
196 196 if self.description:
197 197 for key, value in self.description['Metadata'].items():
198 198 meta[key] = self.fp[value][()]
199 199 else:
200 200 grp = self.fp['Metadata']
201 201 for name in grp:
202 202 meta[name] = grp[name][()]
203 203
204 204 if self.extras:
205 205 for key, value in self.extras.items():
206 206 meta[key] = value
207 207 self.meta = meta
208 208
209 209 return
210 210
211 211 def __readData(self):
212 212
213 213 data = {}
214 214
215 215 if self.description:
216 216 for key, value in self.description['Data'].items():
217 217 if isinstance(value, str):
218 218 if isinstance(self.fp[value], h5py.Dataset):
219 219 data[key] = self.fp[value][()]
220 220 elif isinstance(self.fp[value], h5py.Group):
221 221 array = []
222 222 for ch in self.fp[value]:
223 223 array.append(self.fp[value][ch][()])
224 224 data[key] = numpy.array(array)
225 225 elif isinstance(value, list):
226 226 array = []
227 227 for ch in value:
228 228 array.append(self.fp[ch][()])
229 229 data[key] = numpy.array(array)
230 230 else:
231 231 grp = self.fp['Data']
232 232 for name in grp:
233 233 if isinstance(grp[name], h5py.Dataset):
234 234 array = grp[name][()]
235 235 elif isinstance(grp[name], h5py.Group):
236 236 array = []
237 237 for ch in grp[name]:
238 238 array.append(grp[name][ch][()])
239 239 array = numpy.array(array)
240 240 else:
241 241 log.warning('Unknown type: {}'.format(name))
242 242
243 243 if name in self.description:
244 244 key = self.description[name]
245 245 else:
246 246 key = name
247 247 data[key] = array
248 248
249 249 self.data = data
250 250 return
251 251
252 252 def getData(self):
253 253
254 254 for attr in self.data:
255 255 if self.data[attr].ndim == 1:
256 256 setattr(self.dataOut, attr, self.data[attr][self.blockIndex])
257 257 else:
258 258 setattr(self.dataOut, attr, self.data[attr][:, self.blockIndex])
259 259
260 260 self.dataOut.flagNoData = False
261 261 self.blockIndex += 1
262 262
263 263 log.log("Block No. {}/{} -> {}".format(
264 264 self.blockIndex,
265 265 self.blocksPerFile,
266 266 self.dataOut.datatime.ctime()), self.name)
267 267
268 268 return
269 269
270 270 def run(self, **kwargs):
271 271
272 272 if not(self.isConfig):
273 273 self.setup(**kwargs)
274 274 self.isConfig = True
275 275
276 276 if self.blockIndex == self.blocksPerFile:
277 277 self.setNextFile()
278 278
279 279 self.getData()
280 280
281 281 return
282 282
283 283 @MPDecorator
284 284 class HDFWriter(Operation):
285 285 """Operation to write HDF5 files.
286 286
287 287 The HDF5 file contains by default two groups Data and Metadata where
288 288 you can save any `dataOut` attribute specified by `dataList` and `metadataList`
289 289 parameters, data attributes are normaly time dependent where the metadata
290 290 are not.
291 291 It is possible to customize the structure of the HDF5 file with the
292 292 optional description parameter see the examples.
293 293
294 294 Parameters:
295 295 -----------
296 296 path : str
297 297 Path where files will be saved.
298 298 blocksPerFile : int
299 299 Number of blocks per file
300 300 metadataList : list
301 301 List of the dataOut attributes that will be saved as metadata
302 302 dataList : int
303 303 List of the dataOut attributes that will be saved as data
304 304 setType : bool
305 305 If True the name of the files corresponds to the timestamp of the data
306 306 description : dict, optional
307 307 Dictionary with the desired description of the HDF5 file
308 308
309 309 Examples
310 310 --------
311 311
312 312 desc = {
313 313 'data_output': {'winds': ['z', 'w', 'v']},
314 314 'utctime': 'timestamps',
315 315 'heightList': 'heights'
316 316 }
317 317 desc = {
318 318 'data_output': ['z', 'w', 'v'],
319 319 'utctime': 'timestamps',
320 320 'heightList': 'heights'
321 321 }
322 322 desc = {
323 323 'Data': {
324 324 'data_output': 'winds',
325 325 'utctime': 'timestamps'
326 326 },
327 327 'Metadata': {
328 328 'heightList': 'heights'
329 329 }
330 330 }
331 331
332 332 writer = proc_unit.addOperation(name='HDFWriter')
333 333 writer.addParameter(name='path', value='/path/to/file')
334 334 writer.addParameter(name='blocksPerFile', value='32')
335 335 writer.addParameter(name='metadataList', value='heightList,timeZone')
336 336 writer.addParameter(name='dataList',value='data_output,utctime')
337 337 # writer.addParameter(name='description',value=json.dumps(desc))
338 338
339 339 """
340 340
341 341 ext = ".hdf5"
342 342 optchar = "D"
343 343 filename = None
344 344 path = None
345 345 setFile = None
346 346 fp = None
347 347 firsttime = True
348 348 #Configurations
349 349 blocksPerFile = None
350 350 blockIndex = None
351 351 dataOut = None
352 352 #Data Arrays
353 353 dataList = None
354 354 metadataList = None
355 355 currentDay = None
356 356 lastTime = None
357 357 last_Azipos = None
358 358 last_Elepos = None
359 359 mode = None
360 360 #-----------------------
361 361 Typename = None
362 362
363 363
364 364
365 365 def __init__(self):
366 366
367 367 Operation.__init__(self)
368 368 return
369 369
370 370
371 371 def set_kwargs(self, **kwargs):
372 372
373 373 for key, value in kwargs.items():
374 374 setattr(self, key, value)
375 375
376 376 def set_kwargs_obj(self,obj, **kwargs):
377 377
378 378 for key, value in kwargs.items():
379 379 setattr(obj, key, value)
380 380
381 381 def generalFlag(self):
382 382 ####rint("GENERALFLAG")
383 383 if self.mode== "weather":
384 384 if self.last_Azipos == None:
385 385 tmp = self.dataOut.azimuth
386 386 ####print("ang azimuth writer",tmp)
387 387 self.last_Azipos = tmp
388 388 flag = False
389 389 return flag
390 390 ####print("ang_azimuth writer",self.dataOut.azimuth)
391 391 result = self.dataOut.azimuth - self.last_Azipos
392 392 self.last_Azipos = self.dataOut.azimuth
393 393 if result<0:
394 394 flag = True
395 395 return flag
396 396
397 397 def generalFlag_vRF(self):
398 398 ####rint("GENERALFLAG")
399 399
400 400 try:
401 401 self.dataOut.flagBlock360Done
402 402 return self.dataOut.flagBlock360Done
403 403 except:
404 404 return 0
405 405
406 406
407 407 def setup(self, path=None, blocksPerFile=10, metadataList=None, dataList=None, setType=None, description=None,type_data=None,**kwargs):
408 408 self.path = path
409 409 self.blocksPerFile = blocksPerFile
410 410 self.metadataList = metadataList
411 411 self.dataList = [s.strip() for s in dataList]
412 412 self.setType = setType
413 413 if self.mode == "weather":
414 414 self.setType = "weather"
415 415 self.set_kwargs(**kwargs)
416 416 self.set_kwargs_obj(self.dataOut,**kwargs)
417 417
418 418
419 419 self.description = description
420 420 self.type_data=type_data
421 421
422 422 if self.metadataList is None:
423 423 self.metadataList = self.dataOut.metadata_list
424 424
425 425 tableList = []
426 426 dsList = []
427 427
428 428 for i in range(len(self.dataList)):
429 429 dsDict = {}
430 430 if hasattr(self.dataOut, self.dataList[i]):
431 431 dataAux = getattr(self.dataOut, self.dataList[i])
432 432 dsDict['variable'] = self.dataList[i]
433 433 else:
434 434 log.warning('Attribute {} not found in dataOut', self.name)
435 435 continue
436 436
437 437 if dataAux is None:
438 438 continue
439 439 elif isinstance(dataAux, (int, float, numpy.integer, numpy.float)):
440 440 dsDict['nDim'] = 0
441 441 else:
442 442 dsDict['nDim'] = len(dataAux.shape)
443 443 dsDict['shape'] = dataAux.shape
444 444 dsDict['dsNumber'] = dataAux.shape[0]
445 445 dsDict['dtype'] = dataAux.dtype
446 446 dsList.append(dsDict)
447 447
448 448 self.dsList = dsList
449 449 self.currentDay = self.dataOut.datatime.date()
450 450
451 451 def timeFlag(self):
452 452 currentTime = self.dataOut.utctime
453 453 timeTuple = time.localtime(currentTime)
454 454 dataDay = timeTuple.tm_yday
455 455
456 456 if self.lastTime is None:
457 457 self.lastTime = currentTime
458 458 self.currentDay = dataDay
459 459 return False
460 460
461 461 timeDiff = currentTime - self.lastTime
462 462
463 463 #Si el dia es diferente o si la diferencia entre un dato y otro supera la hora
464 464 if dataDay != self.currentDay:
465 465 self.currentDay = dataDay
466 466 return True
467 467 elif timeDiff > 3*60*60:
468 468 self.lastTime = currentTime
469 469 return True
470 470 else:
471 471 self.lastTime = currentTime
472 472 return False
473 473
474 474 def run(self, dataOut, path, blocksPerFile=10, metadataList=None,
475 475 dataList=[], setType=None, description={},mode= None,type_data=None,Reset = False,**kwargs):
476 476
477 477 if Reset:
478 478 self.isConfig = False
479 479 self.closeFile()
480 480 self.lastTime = None
481 481 self.blockIndex = 0
482 482
483 483 self.dataOut = dataOut
484 484 self.mode = mode
485 485 self.var = dataList[0]
486 486
487 487 if not(self.isConfig):
488 488 self.setup(path=path, blocksPerFile=blocksPerFile,
489 489 metadataList=metadataList, dataList=dataList,
490 490 setType=setType, description=description,type_data=type_data,**kwargs)
491 491
492 492 self.isConfig = True
493 493 self.setNextFile()
494 494
495 495 self.putData()
496 496 return
497 497
498 498 def setNextFile(self):
499 499 ###print("HELLO WORLD--------------------------------")
500 500 ext = self.ext
501 501 path = self.path
502 502 setFile = self.setFile
503 503 type_data = self.type_data
504 504
505 505 timeTuple = time.localtime(self.dataOut.utctime)
506 506 subfolder = 'd%4.4d%3.3d' % (timeTuple.tm_year,timeTuple.tm_yday)
507 507 fullpath = os.path.join(path, subfolder)
508 508
509 509 if os.path.exists(fullpath):
510 510 filesList = os.listdir(fullpath)
511 511 filesList = [k for k in filesList if k.startswith(self.optchar)]
512 512 if len( filesList ) > 0:
513 513 filesList = sorted(filesList, key=str.lower)
514 514 filen = filesList[-1]
515 515 # el filename debera tener el siguiente formato
516 516 # 0 1234 567 89A BCDE (hex)
517 517 # x YYYY DDD SSS .ext
518 518 if isNumber(filen[8:11]):
519 519 setFile = int(filen[8:11]) #inicializo mi contador de seteo al seteo del ultimo file
520 520 else:
521 521 setFile = -1
522 522 else:
523 523 setFile = -1 #inicializo mi contador de seteo
524 524 else:
525 525 os.makedirs(fullpath)
526 526 setFile = -1 #inicializo mi contador de seteo
527 527
528 528 ###print("**************************",self.setType)
529 529 if self.setType is None:
530 530 setFile += 1
531 531 file = '%s%4.4d%3.3d%03d%s' % (self.optchar,
532 532 timeTuple.tm_year,
533 533 timeTuple.tm_yday,
534 534 setFile,
535 535 ext )
536 536 elif self.setType == "weather":
537 537
538 538 if self.var.lower() == 'Zdb'.lower():
539 539 wr_type = 'Z'
540 540 elif self.var.lower() == 'Zdb_D'.lower():
541 541 wr_type = 'D'
542 542 elif self.var.lower() == 'PhiD_P'.lower():
543 543 wr_type = 'P'
544 544 elif self.var.lower() == 'RhoHV_R'.lower():
545 545 wr_type = 'R'
546 546 elif self.var.lower() == 'velRadial_V'.lower():
547 547 wr_type = 'V'
548 548 elif self.var.lower() == 'Sigmav_W'.lower():
549 549 wr_type = 'S'
550 550 elif self.var.lower() == 'dataPP_POWER'.lower():
551 551 wr_type = 'Pow'
552 552 elif self.var.lower() == 'dataPP_DOP'.lower():
553 553 wr_type = 'Dop'
554 554
555 555
556 556 #Z_SOPHy_El10.0_20200505_14:02:15.h5
557 557 #Z_SOPHy_Az40.0_20200505_14:02:15.h5
558 558 if self.dataOut.flagMode == 1: #'AZI' #PPI
559 559 ang_type = 'El'
560 560 len_aux = int(self.dataOut.data_ele.shape[0]/4)
561 mean = numpy.mean(self.dataOut.data_ele[len_aux:-len:aux])
561 mean = numpy.mean(self.dataOut.data_ele[len_aux:-len_aux])
562 562 ang_ = round(mean,1)
563 563 elif self.dataOut.flagMode == 0: #'ELE' #RHI
564 564 ang_type = 'Az'
565 565 len_aux = int(self.dataOut.data_azi.shape[0]/4)
566 mean = numpy.mean(self.dataOut.data_azi[len_aux:-len:aux])
566 mean = numpy.mean(self.dataOut.data_azi[len_aux:-len_aux])
567 567 ang_ = round(mean,1)
568 568
569 569 file = '%s%s%s%2.1f%s%2.2d%2.2d%2.2d%s%2.2d%2.2d%2.2d%s' % (wr_type,
570 570 '_SOPHy_',
571 571 ang_type,
572 572 ang_,
573 573 '_',
574 574 timeTuple.tm_year,
575 575 timeTuple.tm_mon,
576 576 timeTuple.tm_mday,
577 577 '_',
578 578 timeTuple.tm_hour,
579 579 timeTuple.tm_min,
580 580 timeTuple.tm_sec,
581 581 ext )
582 582
583 583 else:
584 584 setFile = timeTuple.tm_hour*60+timeTuple.tm_min
585 585 file = '%s%4.4d%3.3d%04d%s' % (self.optchar,
586 586 timeTuple.tm_year,
587 587 timeTuple.tm_yday,
588 588 setFile,
589 589 ext )
590 590
591 591 self.filename = os.path.join( path, subfolder, file )
592 592
593 593 #Setting HDF5 File
594 594 #print("filename",self.filename)
595 595 self.fp = h5py.File(self.filename, 'w')
596 596 #write metadata
597 597 self.writeMetadata(self.fp)
598 598 #Write data
599 599 self.writeData(self.fp)
600 600
601 601 def getLabel(self, name, x=None):
602 602
603 603 if x is None:
604 604 if 'Data' in self.description:
605 605 data = self.description['Data']
606 606 if 'Metadata' in self.description:
607 607 data.update(self.description['Metadata'])
608 608 else:
609 609 data = self.description
610 610 if name in data:
611 611 if isinstance(data[name], str):
612 612 return data[name]
613 613 elif isinstance(data[name], list):
614 614 return None
615 615 elif isinstance(data[name], dict):
616 616 for key, value in data[name].items():
617 617 return key
618 618 return name
619 619 else:
620 620 if 'Metadata' in self.description:
621 621 meta = self.description['Metadata']
622 622 else:
623 623 meta = self.description
624 624 if name in meta:
625 625 if isinstance(meta[name], list):
626 626 return meta[name][x]
627 627 elif isinstance(meta[name], dict):
628 628 for key, value in meta[name].items():
629 629 return value[x]
630 630 if 'cspc' in name:
631 631 return 'pair{:02d}'.format(x)
632 632 else:
633 633 return 'channel{:02d}'.format(x)
634 634
635 635 def writeMetadata(self, fp):
636 636
637 637 if self.description:
638 638 if 'Metadata' in self.description:
639 639 grp = fp.create_group('Metadata')
640 640 else:
641 641 grp = fp
642 642 else:
643 643 grp = fp.create_group('Metadata')
644 644
645 645 for i in range(len(self.metadataList)):
646 646 if not hasattr(self.dataOut, self.metadataList[i]):
647 647 log.warning('Metadata: `{}` not found'.format(self.metadataList[i]), self.name)
648 648 continue
649 649 value = getattr(self.dataOut, self.metadataList[i])
650 650 if isinstance(value, bool):
651 651 if value is True:
652 652 value = 1
653 653 else:
654 654 value = 0
655 655 grp.create_dataset(self.getLabel(self.metadataList[i]), data=value)
656 656 return
657 657
658 658 def writeData(self, fp):
659 659
660 660 if self.description:
661 661 if 'Data' in self.description:
662 662 grp = fp.create_group('Data')
663 663 else:
664 664 grp = fp
665 665 else:
666 666 grp = fp.create_group('Data')
667 667
668 668 dtsets = []
669 669 data = []
670 670
671 671 for dsInfo in self.dsList:
672 672 if dsInfo['nDim'] == 0:
673 673 ds = grp.create_dataset(
674 674 self.getLabel(dsInfo['variable']),
675 675 (self.blocksPerFile, ),
676 676 chunks=True,
677 677 dtype=numpy.float64)
678 678 dtsets.append(ds)
679 679 data.append((dsInfo['variable'], -1))
680 680 else:
681 681 label = self.getLabel(dsInfo['variable'])
682 682 if label is not None:
683 683 sgrp = grp.create_group(label)
684 684 else:
685 685 sgrp = grp
686 686 for i in range(dsInfo['dsNumber']):
687 687 ds = sgrp.create_dataset(
688 688 self.getLabel(dsInfo['variable'], i),
689 689 (self.blocksPerFile, ) + dsInfo['shape'][1:],
690 690 chunks=True,
691 691 dtype=dsInfo['dtype'])
692 692 dtsets.append(ds)
693 693 data.append((dsInfo['variable'], i))
694 694 fp.flush()
695 695
696 696 log.log('Creating file: {}'.format(fp.filename), self.name)
697 697
698 698 self.ds = dtsets
699 699 self.data = data
700 700 self.firsttime = True
701 701 self.blockIndex = 0
702 702 return
703 703
704 704 def putData(self):
705 705
706 706 if (self.blockIndex == self.blocksPerFile) or self.timeFlag():# or self.generalFlag_vRF():
707 707 self.closeFile()
708 708 self.setNextFile()
709 709
710 710 for i, ds in enumerate(self.ds):
711 711 attr, ch = self.data[i]
712 712 if ch == -1:
713 713 ds[self.blockIndex] = getattr(self.dataOut, attr)
714 714 else:
715 715 ds[self.blockIndex] = getattr(self.dataOut, attr)[ch]
716 716
717 717 self.fp.flush()
718 718 self.blockIndex += 1
719 719 log.log('Block No. {}/{}'.format(self.blockIndex, self.blocksPerFile), self.name)
720 720
721 721 return
722 722
723 723 def closeFile(self):
724 724
725 725 if self.blockIndex != self.blocksPerFile:
726 726 for ds in self.ds:
727 727 ds.resize(self.blockIndex, axis=0)
728 728
729 729 if self.fp:
730 730 self.fp.flush()
731 731 self.fp.close()
732 732
733 733 def close(self):
734 734
735 735 self.closeFile()
General Comments 0
You need to be logged in to leave comments. Login now