##// END OF EJS Templates
fix save data and test multiple plot
avaldezp -
r1464:3250059d6d95
parent child
Show More
@@ -1,735 +1,734
1 import os
1 import os
2 import time
2 import time
3 import datetime
3 import datetime
4
4
5 import numpy
5 import numpy
6 import h5py
6 import h5py
7
7
8 import schainpy.admin
8 import schainpy.admin
9 from schainpy.model.data.jrodata import *
9 from schainpy.model.data.jrodata import *
10 from schainpy.model.proc.jroproc_base import ProcessingUnit, Operation, MPDecorator
10 from schainpy.model.proc.jroproc_base import ProcessingUnit, Operation, MPDecorator
11 from schainpy.model.io.jroIO_base import *
11 from schainpy.model.io.jroIO_base import *
12 from schainpy.utils import log
12 from schainpy.utils import log
13
13
14
14
15 class HDFReader(Reader, ProcessingUnit):
15 class HDFReader(Reader, ProcessingUnit):
16 """Processing unit to read HDF5 format files
16 """Processing unit to read HDF5 format files
17
17
18 This unit reads HDF5 files created with `HDFWriter` operation contains
18 This unit reads HDF5 files created with `HDFWriter` operation contains
19 by default two groups Data and Metadata all variables would be saved as `dataOut`
19 by default two groups Data and Metadata all variables would be saved as `dataOut`
20 attributes.
20 attributes.
21 It is possible to read any HDF5 file by given the structure in the `description`
21 It is possible to read any HDF5 file by given the structure in the `description`
22 parameter, also you can add extra values to metadata with the parameter `extras`.
22 parameter, also you can add extra values to metadata with the parameter `extras`.
23
23
24 Parameters:
24 Parameters:
25 -----------
25 -----------
26 path : str
26 path : str
27 Path where files are located.
27 Path where files are located.
28 startDate : date
28 startDate : date
29 Start date of the files
29 Start date of the files
30 endDate : list
30 endDate : list
31 End date of the files
31 End date of the files
32 startTime : time
32 startTime : time
33 Start time of the files
33 Start time of the files
34 endTime : time
34 endTime : time
35 End time of the files
35 End time of the files
36 description : dict, optional
36 description : dict, optional
37 Dictionary with the description of the HDF5 file
37 Dictionary with the description of the HDF5 file
38 extras : dict, optional
38 extras : dict, optional
39 Dictionary with extra metadata to be be added to `dataOut`
39 Dictionary with extra metadata to be be added to `dataOut`
40
40
41 Examples
41 Examples
42 --------
42 --------
43
43
44 desc = {
44 desc = {
45 'Data': {
45 'Data': {
46 'data_output': ['u', 'v', 'w'],
46 'data_output': ['u', 'v', 'w'],
47 'utctime': 'timestamps',
47 'utctime': 'timestamps',
48 } ,
48 } ,
49 'Metadata': {
49 'Metadata': {
50 'heightList': 'heights'
50 'heightList': 'heights'
51 }
51 }
52 }
52 }
53
53
54 desc = {
54 desc = {
55 'Data': {
55 'Data': {
56 'data_output': 'winds',
56 'data_output': 'winds',
57 'utctime': 'timestamps'
57 'utctime': 'timestamps'
58 },
58 },
59 'Metadata': {
59 'Metadata': {
60 'heightList': 'heights'
60 'heightList': 'heights'
61 }
61 }
62 }
62 }
63
63
64 extras = {
64 extras = {
65 'timeZone': 300
65 'timeZone': 300
66 }
66 }
67
67
68 reader = project.addReadUnit(
68 reader = project.addReadUnit(
69 name='HDFReader',
69 name='HDFReader',
70 path='/path/to/files',
70 path='/path/to/files',
71 startDate='2019/01/01',
71 startDate='2019/01/01',
72 endDate='2019/01/31',
72 endDate='2019/01/31',
73 startTime='00:00:00',
73 startTime='00:00:00',
74 endTime='23:59:59',
74 endTime='23:59:59',
75 # description=json.dumps(desc),
75 # description=json.dumps(desc),
76 # extras=json.dumps(extras),
76 # extras=json.dumps(extras),
77 )
77 )
78
78
79 """
79 """
80
80
81 __attrs__ = ['path', 'startDate', 'endDate', 'startTime', 'endTime', 'description', 'extras']
81 __attrs__ = ['path', 'startDate', 'endDate', 'startTime', 'endTime', 'description', 'extras']
82
82
83 def __init__(self):
83 def __init__(self):
84 ProcessingUnit.__init__(self)
84 ProcessingUnit.__init__(self)
85 self.dataOut = Parameters()
85 self.dataOut = Parameters()
86 self.ext = ".hdf5"
86 self.ext = ".hdf5"
87 self.optchar = "D"
87 self.optchar = "D"
88 self.meta = {}
88 self.meta = {}
89 self.data = {}
89 self.data = {}
90 self.open_file = h5py.File
90 self.open_file = h5py.File
91 self.open_mode = 'r'
91 self.open_mode = 'r'
92 self.description = {}
92 self.description = {}
93 self.extras = {}
93 self.extras = {}
94 self.filefmt = "*%Y%j***"
94 self.filefmt = "*%Y%j***"
95 self.folderfmt = "*%Y%j"
95 self.folderfmt = "*%Y%j"
96 self.utcoffset = 0
96 self.utcoffset = 0
97
97
98 def setup(self, **kwargs):
98 def setup(self, **kwargs):
99
99
100 self.set_kwargs(**kwargs)
100 self.set_kwargs(**kwargs)
101 if not self.ext.startswith('.'):
101 if not self.ext.startswith('.'):
102 self.ext = '.{}'.format(self.ext)
102 self.ext = '.{}'.format(self.ext)
103
103
104 if self.online:
104 if self.online:
105 log.log("Searching files in online mode...", self.name)
105 log.log("Searching files in online mode...", self.name)
106
106
107 for nTries in range(self.nTries):
107 for nTries in range(self.nTries):
108 fullpath = self.searchFilesOnLine(self.path, self.startDate,
108 fullpath = self.searchFilesOnLine(self.path, self.startDate,
109 self.endDate, self.expLabel, self.ext, self.walk,
109 self.endDate, self.expLabel, self.ext, self.walk,
110 self.filefmt, self.folderfmt)
110 self.filefmt, self.folderfmt)
111 try:
111 try:
112 fullpath = next(fullpath)
112 fullpath = next(fullpath)
113 except:
113 except:
114 fullpath = None
114 fullpath = None
115
115
116 if fullpath:
116 if fullpath:
117 break
117 break
118
118
119 log.warning(
119 log.warning(
120 'Waiting {} sec for a valid file in {}: try {} ...'.format(
120 'Waiting {} sec for a valid file in {}: try {} ...'.format(
121 self.delay, self.path, nTries + 1),
121 self.delay, self.path, nTries + 1),
122 self.name)
122 self.name)
123 time.sleep(self.delay)
123 time.sleep(self.delay)
124
124
125 if not(fullpath):
125 if not(fullpath):
126 raise schainpy.admin.SchainError(
126 raise schainpy.admin.SchainError(
127 'There isn\'t any valid file in {}'.format(self.path))
127 'There isn\'t any valid file in {}'.format(self.path))
128
128
129 pathname, filename = os.path.split(fullpath)
129 pathname, filename = os.path.split(fullpath)
130 self.year = int(filename[1:5])
130 self.year = int(filename[1:5])
131 self.doy = int(filename[5:8])
131 self.doy = int(filename[5:8])
132 self.set = int(filename[8:11]) - 1
132 self.set = int(filename[8:11]) - 1
133 else:
133 else:
134 log.log("Searching files in {}".format(self.path), self.name)
134 log.log("Searching files in {}".format(self.path), self.name)
135 self.filenameList = self.searchFilesOffLine(self.path, self.startDate,
135 self.filenameList = self.searchFilesOffLine(self.path, self.startDate,
136 self.endDate, self.expLabel, self.ext, self.walk, self.filefmt, self.folderfmt)
136 self.endDate, self.expLabel, self.ext, self.walk, self.filefmt, self.folderfmt)
137
137
138 self.setNextFile()
138 self.setNextFile()
139
139
140 return
140 return
141
141
142 def readFirstHeader(self):
142 def readFirstHeader(self):
143 '''Read metadata and data'''
143 '''Read metadata and data'''
144
144
145 self.__readMetadata()
145 self.__readMetadata()
146 self.__readData()
146 self.__readData()
147 self.__setBlockList()
147 self.__setBlockList()
148
148
149 if 'type' in self.meta:
149 if 'type' in self.meta:
150 self.dataOut = eval(self.meta['type'])()
150 self.dataOut = eval(self.meta['type'])()
151
151
152 for attr in self.meta:
152 for attr in self.meta:
153 setattr(self.dataOut, attr, self.meta[attr])
153 setattr(self.dataOut, attr, self.meta[attr])
154
154
155 self.blockIndex = 0
155 self.blockIndex = 0
156
156
157 return
157 return
158
158
159 def __setBlockList(self):
159 def __setBlockList(self):
160 '''
160 '''
161 Selects the data within the times defined
161 Selects the data within the times defined
162
162
163 self.fp
163 self.fp
164 self.startTime
164 self.startTime
165 self.endTime
165 self.endTime
166 self.blockList
166 self.blockList
167 self.blocksPerFile
167 self.blocksPerFile
168
168
169 '''
169 '''
170
170
171 startTime = self.startTime
171 startTime = self.startTime
172 endTime = self.endTime
172 endTime = self.endTime
173 thisUtcTime = self.data['utctime'] + self.utcoffset
173 thisUtcTime = self.data['utctime'] + self.utcoffset
174 self.interval = numpy.min(thisUtcTime[1:] - thisUtcTime[:-1])
174 self.interval = numpy.min(thisUtcTime[1:] - thisUtcTime[:-1])
175 thisDatetime = datetime.datetime.utcfromtimestamp(thisUtcTime[0])
175 thisDatetime = datetime.datetime.utcfromtimestamp(thisUtcTime[0])
176
176
177 thisDate = thisDatetime.date()
177 thisDate = thisDatetime.date()
178 thisTime = thisDatetime.time()
178 thisTime = thisDatetime.time()
179
179
180 startUtcTime = (datetime.datetime.combine(thisDate, startTime) - datetime.datetime(1970, 1, 1)).total_seconds()
180 startUtcTime = (datetime.datetime.combine(thisDate, startTime) - datetime.datetime(1970, 1, 1)).total_seconds()
181 endUtcTime = (datetime.datetime.combine(thisDate, endTime) - datetime.datetime(1970, 1, 1)).total_seconds()
181 endUtcTime = (datetime.datetime.combine(thisDate, endTime) - datetime.datetime(1970, 1, 1)).total_seconds()
182
182
183 ind = numpy.where(numpy.logical_and(thisUtcTime >= startUtcTime, thisUtcTime < endUtcTime))[0]
183 ind = numpy.where(numpy.logical_and(thisUtcTime >= startUtcTime, thisUtcTime < endUtcTime))[0]
184
184
185 self.blockList = ind
185 self.blockList = ind
186 self.blocksPerFile = len(ind)
186 self.blocksPerFile = len(ind)
187 return
187 return
188
188
189 def __readMetadata(self):
189 def __readMetadata(self):
190 '''
190 '''
191 Reads Metadata
191 Reads Metadata
192 '''
192 '''
193
193
194 meta = {}
194 meta = {}
195
195
196 if self.description:
196 if self.description:
197 for key, value in self.description['Metadata'].items():
197 for key, value in self.description['Metadata'].items():
198 meta[key] = self.fp[value][()]
198 meta[key] = self.fp[value][()]
199 else:
199 else:
200 grp = self.fp['Metadata']
200 grp = self.fp['Metadata']
201 for name in grp:
201 for name in grp:
202 meta[name] = grp[name][()]
202 meta[name] = grp[name][()]
203
203
204 if self.extras:
204 if self.extras:
205 for key, value in self.extras.items():
205 for key, value in self.extras.items():
206 meta[key] = value
206 meta[key] = value
207 self.meta = meta
207 self.meta = meta
208
208
209 return
209 return
210
210
211 def __readData(self):
211 def __readData(self):
212
212
213 data = {}
213 data = {}
214
214
215 if self.description:
215 if self.description:
216 for key, value in self.description['Data'].items():
216 for key, value in self.description['Data'].items():
217 if isinstance(value, str):
217 if isinstance(value, str):
218 if isinstance(self.fp[value], h5py.Dataset):
218 if isinstance(self.fp[value], h5py.Dataset):
219 data[key] = self.fp[value][()]
219 data[key] = self.fp[value][()]
220 elif isinstance(self.fp[value], h5py.Group):
220 elif isinstance(self.fp[value], h5py.Group):
221 array = []
221 array = []
222 for ch in self.fp[value]:
222 for ch in self.fp[value]:
223 array.append(self.fp[value][ch][()])
223 array.append(self.fp[value][ch][()])
224 data[key] = numpy.array(array)
224 data[key] = numpy.array(array)
225 elif isinstance(value, list):
225 elif isinstance(value, list):
226 array = []
226 array = []
227 for ch in value:
227 for ch in value:
228 array.append(self.fp[ch][()])
228 array.append(self.fp[ch][()])
229 data[key] = numpy.array(array)
229 data[key] = numpy.array(array)
230 else:
230 else:
231 grp = self.fp['Data']
231 grp = self.fp['Data']
232 for name in grp:
232 for name in grp:
233 if isinstance(grp[name], h5py.Dataset):
233 if isinstance(grp[name], h5py.Dataset):
234 array = grp[name][()]
234 array = grp[name][()]
235 elif isinstance(grp[name], h5py.Group):
235 elif isinstance(grp[name], h5py.Group):
236 array = []
236 array = []
237 for ch in grp[name]:
237 for ch in grp[name]:
238 array.append(grp[name][ch][()])
238 array.append(grp[name][ch][()])
239 array = numpy.array(array)
239 array = numpy.array(array)
240 else:
240 else:
241 log.warning('Unknown type: {}'.format(name))
241 log.warning('Unknown type: {}'.format(name))
242
242
243 if name in self.description:
243 if name in self.description:
244 key = self.description[name]
244 key = self.description[name]
245 else:
245 else:
246 key = name
246 key = name
247 data[key] = array
247 data[key] = array
248
248
249 self.data = data
249 self.data = data
250 return
250 return
251
251
252 def getData(self):
252 def getData(self):
253
253
254 for attr in self.data:
254 for attr in self.data:
255 if self.data[attr].ndim == 1:
255 if self.data[attr].ndim == 1:
256 setattr(self.dataOut, attr, self.data[attr][self.blockIndex])
256 setattr(self.dataOut, attr, self.data[attr][self.blockIndex])
257 else:
257 else:
258 setattr(self.dataOut, attr, self.data[attr][:, self.blockIndex])
258 setattr(self.dataOut, attr, self.data[attr][:, self.blockIndex])
259
259
260 self.dataOut.flagNoData = False
260 self.dataOut.flagNoData = False
261 self.blockIndex += 1
261 self.blockIndex += 1
262
262
263 log.log("Block No. {}/{} -> {}".format(
263 log.log("Block No. {}/{} -> {}".format(
264 self.blockIndex,
264 self.blockIndex,
265 self.blocksPerFile,
265 self.blocksPerFile,
266 self.dataOut.datatime.ctime()), self.name)
266 self.dataOut.datatime.ctime()), self.name)
267
267
268 return
268 return
269
269
270 def run(self, **kwargs):
270 def run(self, **kwargs):
271
271
272 if not(self.isConfig):
272 if not(self.isConfig):
273 self.setup(**kwargs)
273 self.setup(**kwargs)
274 self.isConfig = True
274 self.isConfig = True
275
275
276 if self.blockIndex == self.blocksPerFile:
276 if self.blockIndex == self.blocksPerFile:
277 self.setNextFile()
277 self.setNextFile()
278
278
279 self.getData()
279 self.getData()
280
280
281 return
281 return
282
282
283 @MPDecorator
283 @MPDecorator
284 class HDFWriter(Operation):
284 class HDFWriter(Operation):
285 """Operation to write HDF5 files.
285 """Operation to write HDF5 files.
286
286
287 The HDF5 file contains by default two groups Data and Metadata where
287 The HDF5 file contains by default two groups Data and Metadata where
288 you can save any `dataOut` attribute specified by `dataList` and `metadataList`
288 you can save any `dataOut` attribute specified by `dataList` and `metadataList`
289 parameters, data attributes are normaly time dependent where the metadata
289 parameters, data attributes are normaly time dependent where the metadata
290 are not.
290 are not.
291 It is possible to customize the structure of the HDF5 file with the
291 It is possible to customize the structure of the HDF5 file with the
292 optional description parameter see the examples.
292 optional description parameter see the examples.
293
293
294 Parameters:
294 Parameters:
295 -----------
295 -----------
296 path : str
296 path : str
297 Path where files will be saved.
297 Path where files will be saved.
298 blocksPerFile : int
298 blocksPerFile : int
299 Number of blocks per file
299 Number of blocks per file
300 metadataList : list
300 metadataList : list
301 List of the dataOut attributes that will be saved as metadata
301 List of the dataOut attributes that will be saved as metadata
302 dataList : int
302 dataList : int
303 List of the dataOut attributes that will be saved as data
303 List of the dataOut attributes that will be saved as data
304 setType : bool
304 setType : bool
305 If True the name of the files corresponds to the timestamp of the data
305 If True the name of the files corresponds to the timestamp of the data
306 description : dict, optional
306 description : dict, optional
307 Dictionary with the desired description of the HDF5 file
307 Dictionary with the desired description of the HDF5 file
308
308
309 Examples
309 Examples
310 --------
310 --------
311
311
312 desc = {
312 desc = {
313 'data_output': {'winds': ['z', 'w', 'v']},
313 'data_output': {'winds': ['z', 'w', 'v']},
314 'utctime': 'timestamps',
314 'utctime': 'timestamps',
315 'heightList': 'heights'
315 'heightList': 'heights'
316 }
316 }
317 desc = {
317 desc = {
318 'data_output': ['z', 'w', 'v'],
318 'data_output': ['z', 'w', 'v'],
319 'utctime': 'timestamps',
319 'utctime': 'timestamps',
320 'heightList': 'heights'
320 'heightList': 'heights'
321 }
321 }
322 desc = {
322 desc = {
323 'Data': {
323 'Data': {
324 'data_output': 'winds',
324 'data_output': 'winds',
325 'utctime': 'timestamps'
325 'utctime': 'timestamps'
326 },
326 },
327 'Metadata': {
327 'Metadata': {
328 'heightList': 'heights'
328 'heightList': 'heights'
329 }
329 }
330 }
330 }
331
331
332 writer = proc_unit.addOperation(name='HDFWriter')
332 writer = proc_unit.addOperation(name='HDFWriter')
333 writer.addParameter(name='path', value='/path/to/file')
333 writer.addParameter(name='path', value='/path/to/file')
334 writer.addParameter(name='blocksPerFile', value='32')
334 writer.addParameter(name='blocksPerFile', value='32')
335 writer.addParameter(name='metadataList', value='heightList,timeZone')
335 writer.addParameter(name='metadataList', value='heightList,timeZone')
336 writer.addParameter(name='dataList',value='data_output,utctime')
336 writer.addParameter(name='dataList',value='data_output,utctime')
337 # writer.addParameter(name='description',value=json.dumps(desc))
337 # writer.addParameter(name='description',value=json.dumps(desc))
338
338
339 """
339 """
340
340
341 ext = ".hdf5"
341 ext = ".hdf5"
342 optchar = "D"
342 optchar = "D"
343 filename = None
343 filename = None
344 path = None
344 path = None
345 setFile = None
345 setFile = None
346 fp = None
346 fp = None
347 firsttime = True
347 firsttime = True
348 #Configurations
348 #Configurations
349 blocksPerFile = None
349 blocksPerFile = None
350 blockIndex = None
350 blockIndex = None
351 dataOut = None
351 dataOut = None
352 #Data Arrays
352 #Data Arrays
353 dataList = None
353 dataList = None
354 metadataList = None
354 metadataList = None
355 currentDay = None
355 currentDay = None
356 lastTime = None
356 lastTime = None
357 last_Azipos = None
357 last_Azipos = None
358 last_Elepos = None
358 last_Elepos = None
359 mode = None
359 mode = None
360 #-----------------------
360 #-----------------------
361 Typename = None
361 Typename = None
362
362
363
363
364
364
365 def __init__(self):
365 def __init__(self):
366
366
367 Operation.__init__(self)
367 Operation.__init__(self)
368 return
368 return
369
369
370
370
371 def set_kwargs(self, **kwargs):
371 def set_kwargs(self, **kwargs):
372
372
373 for key, value in kwargs.items():
373 for key, value in kwargs.items():
374 setattr(self, key, value)
374 setattr(self, key, value)
375
375
376 def set_kwargs_obj(self,obj, **kwargs):
376 def set_kwargs_obj(self,obj, **kwargs):
377
377
378 for key, value in kwargs.items():
378 for key, value in kwargs.items():
379 setattr(obj, key, value)
379 setattr(obj, key, value)
380
380
381 def generalFlag(self):
381 def generalFlag(self):
382 ####rint("GENERALFLAG")
382 ####rint("GENERALFLAG")
383 if self.mode== "weather":
383 if self.mode== "weather":
384 if self.last_Azipos == None:
384 if self.last_Azipos == None:
385 tmp = self.dataOut.azimuth
385 tmp = self.dataOut.azimuth
386 ####print("ang azimuth writer",tmp)
386 ####print("ang azimuth writer",tmp)
387 self.last_Azipos = tmp
387 self.last_Azipos = tmp
388 flag = False
388 flag = False
389 return flag
389 return flag
390 ####print("ang_azimuth writer",self.dataOut.azimuth)
390 ####print("ang_azimuth writer",self.dataOut.azimuth)
391 result = self.dataOut.azimuth - self.last_Azipos
391 result = self.dataOut.azimuth - self.last_Azipos
392 self.last_Azipos = self.dataOut.azimuth
392 self.last_Azipos = self.dataOut.azimuth
393 if result<0:
393 if result<0:
394 flag = True
394 flag = True
395 return flag
395 return flag
396
396
397 def generalFlag_vRF(self):
397 def generalFlag_vRF(self):
398 ####rint("GENERALFLAG")
398 ####rint("GENERALFLAG")
399
399
400 try:
400 try:
401 self.dataOut.flagBlock360Done
401 self.dataOut.flagBlock360Done
402 return self.dataOut.flagBlock360Done
402 return self.dataOut.flagBlock360Done
403 except:
403 except:
404 return 0
404 return 0
405
405
406
406
407 def setup(self, path=None, blocksPerFile=10, metadataList=None, dataList=None, setType=None, description=None,type_data=None,**kwargs):
407 def setup(self, path=None, blocksPerFile=10, metadataList=None, dataList=None, setType=None, description=None,type_data=None,**kwargs):
408 self.path = path
408 self.path = path
409 self.blocksPerFile = blocksPerFile
409 self.blocksPerFile = blocksPerFile
410 self.metadataList = metadataList
410 self.metadataList = metadataList
411 self.dataList = [s.strip() for s in dataList]
411 self.dataList = [s.strip() for s in dataList]
412 self.setType = setType
412 self.setType = setType
413 if self.mode == "weather":
413 if self.setType == "weather":
414 self.setType = "weather"
415 self.set_kwargs(**kwargs)
414 self.set_kwargs(**kwargs)
416 self.set_kwargs_obj(self.dataOut,**kwargs)
415 self.set_kwargs_obj(self.dataOut,**kwargs)
417
416
418
417
419 self.description = description
418 self.description = description
420 self.type_data=type_data
419 self.type_data=type_data
421
420
422 if self.metadataList is None:
421 if self.metadataList is None:
423 self.metadataList = self.dataOut.metadata_list
422 self.metadataList = self.dataOut.metadata_list
424
423
425 tableList = []
424 tableList = []
426 dsList = []
425 dsList = []
427
426
428 for i in range(len(self.dataList)):
427 for i in range(len(self.dataList)):
429 dsDict = {}
428 dsDict = {}
430 if hasattr(self.dataOut, self.dataList[i]):
429 if hasattr(self.dataOut, self.dataList[i]):
431 dataAux = getattr(self.dataOut, self.dataList[i])
430 dataAux = getattr(self.dataOut, self.dataList[i])
432 dsDict['variable'] = self.dataList[i]
431 dsDict['variable'] = self.dataList[i]
433 else:
432 else:
434 log.warning('Attribute {} not found in dataOut', self.name)
433 log.warning('Attribute {} not found in dataOut', self.name)
435 continue
434 continue
436
435
437 if dataAux is None:
436 if dataAux is None:
438 continue
437 continue
439 elif isinstance(dataAux, (int, float, numpy.integer, numpy.float)):
438 elif isinstance(dataAux, (int, float, numpy.integer, numpy.float)):
440 dsDict['nDim'] = 0
439 dsDict['nDim'] = 0
441 else:
440 else:
442 dsDict['nDim'] = len(dataAux.shape)
441 dsDict['nDim'] = len(dataAux.shape)
443 dsDict['shape'] = dataAux.shape
442 dsDict['shape'] = dataAux.shape
444 dsDict['dsNumber'] = dataAux.shape[0]
443 dsDict['dsNumber'] = dataAux.shape[0]
445 dsDict['dtype'] = dataAux.dtype
444 dsDict['dtype'] = dataAux.dtype
446 dsList.append(dsDict)
445 dsList.append(dsDict)
447
446
448 self.dsList = dsList
447 self.dsList = dsList
449 self.currentDay = self.dataOut.datatime.date()
448 self.currentDay = self.dataOut.datatime.date()
450
449
451 def timeFlag(self):
450 def timeFlag(self):
452 currentTime = self.dataOut.utctime
451 currentTime = self.dataOut.utctime
453 timeTuple = time.localtime(currentTime)
452 timeTuple = time.localtime(currentTime)
454 dataDay = timeTuple.tm_yday
453 dataDay = timeTuple.tm_yday
455
454
456 if self.lastTime is None:
455 if self.lastTime is None:
457 self.lastTime = currentTime
456 self.lastTime = currentTime
458 self.currentDay = dataDay
457 self.currentDay = dataDay
459 return False
458 return False
460
459
461 timeDiff = currentTime - self.lastTime
460 timeDiff = currentTime - self.lastTime
462
461
463 #Si el dia es diferente o si la diferencia entre un dato y otro supera la hora
462 #Si el dia es diferente o si la diferencia entre un dato y otro supera la hora
464 if dataDay != self.currentDay:
463 if dataDay != self.currentDay:
465 self.currentDay = dataDay
464 self.currentDay = dataDay
466 return True
465 return True
467 elif timeDiff > 3*60*60:
466 elif timeDiff > 3*60*60:
468 self.lastTime = currentTime
467 self.lastTime = currentTime
469 return True
468 return True
470 else:
469 else:
471 self.lastTime = currentTime
470 self.lastTime = currentTime
472 return False
471 return False
473
472
474 def run(self, dataOut, path, blocksPerFile=10, metadataList=None,
473 def run(self, dataOut, path, blocksPerFile=10, metadataList=None,
475 dataList=[], setType=None, description={},mode= None,type_data=None,Reset = False,**kwargs):
474 dataList=[], setType=None, description={},mode= None,type_data=None,Reset = False,**kwargs):
476
475
477 if Reset:
476 if Reset:
478 self.isConfig = False
477 self.isConfig = False
479 self.closeFile()
478 self.closeFile()
480 self.lastTime = None
479 self.lastTime = None
481 self.blockIndex = 0
480 self.blockIndex = 0
482
481
483 self.dataOut = dataOut
482 self.dataOut = dataOut
484 self.mode = mode
483 self.mode = mode
485 self.var = dataList[0]
484 self.var = dataList[0]
486
485
487 if not(self.isConfig):
486 if not(self.isConfig):
488 self.setup(path=path, blocksPerFile=blocksPerFile,
487 self.setup(path=path, blocksPerFile=blocksPerFile,
489 metadataList=metadataList, dataList=dataList,
488 metadataList=metadataList, dataList=dataList,
490 setType=setType, description=description,type_data=type_data,**kwargs)
489 setType=setType, description=description,type_data=type_data,**kwargs)
491
490
492 self.isConfig = True
491 self.isConfig = True
493 self.setNextFile()
492 self.setNextFile()
494
493
495 self.putData()
494 self.putData()
496 return
495 return
497
496
498 def setNextFile(self):
497 def setNextFile(self):
499 ###print("HELLO WORLD--------------------------------")
498 ###print("HELLO WORLD--------------------------------")
500 ext = self.ext
499 ext = self.ext
501 path = self.path
500 path = self.path
502 setFile = self.setFile
501 setFile = self.setFile
503 type_data = self.type_data
502 type_data = self.type_data
504
503
505 timeTuple = time.localtime(self.dataOut.utctime)
504 timeTuple = time.localtime(self.dataOut.utctime)
506 subfolder = 'd%4.4d%3.3d' % (timeTuple.tm_year,timeTuple.tm_yday)
505 subfolder = 'd%4.4d%3.3d' % (timeTuple.tm_year,timeTuple.tm_yday)
507 fullpath = os.path.join(path, subfolder)
506 fullpath = os.path.join(path, subfolder)
508
507
509 if os.path.exists(fullpath):
508 if os.path.exists(fullpath):
510 filesList = os.listdir(fullpath)
509 filesList = os.listdir(fullpath)
511 filesList = [k for k in filesList if k.startswith(self.optchar)]
510 filesList = [k for k in filesList if k.startswith(self.optchar)]
512 if len( filesList ) > 0:
511 if len( filesList ) > 0:
513 filesList = sorted(filesList, key=str.lower)
512 filesList = sorted(filesList, key=str.lower)
514 filen = filesList[-1]
513 filen = filesList[-1]
515 # el filename debera tener el siguiente formato
514 # el filename debera tener el siguiente formato
516 # 0 1234 567 89A BCDE (hex)
515 # 0 1234 567 89A BCDE (hex)
517 # x YYYY DDD SSS .ext
516 # x YYYY DDD SSS .ext
518 if isNumber(filen[8:11]):
517 if isNumber(filen[8:11]):
519 setFile = int(filen[8:11]) #inicializo mi contador de seteo al seteo del ultimo file
518 setFile = int(filen[8:11]) #inicializo mi contador de seteo al seteo del ultimo file
520 else:
519 else:
521 setFile = -1
520 setFile = -1
522 else:
521 else:
523 setFile = -1 #inicializo mi contador de seteo
522 setFile = -1 #inicializo mi contador de seteo
524 else:
523 else:
525 os.makedirs(fullpath)
524 os.makedirs(fullpath)
526 setFile = -1 #inicializo mi contador de seteo
525 setFile = -1 #inicializo mi contador de seteo
527
526
528 ###print("**************************",self.setType)
527 ###print("**************************",self.setType)
529 if self.setType is None:
528 if self.setType is None:
530 setFile += 1
529 setFile += 1
531 file = '%s%4.4d%3.3d%03d%s' % (self.optchar,
530 file = '%s%4.4d%3.3d%03d%s' % (self.optchar,
532 timeTuple.tm_year,
531 timeTuple.tm_year,
533 timeTuple.tm_yday,
532 timeTuple.tm_yday,
534 setFile,
533 setFile,
535 ext )
534 ext )
536 elif self.setType == "weather":
535 elif self.setType == "weather":
537
536
538 if self.var.lower() == 'Zdb'.lower():
537 if self.var.lower() == 'Zdb'.lower():
539 wr_type = 'Z'
538 wr_type = 'Z'
540 elif self.var.lower() == 'Zdb_D'.lower():
539 elif self.var.lower() == 'Zdb_D'.lower():
541 wr_type = 'D'
540 wr_type = 'D'
542 elif self.var.lower() == 'PhiD_P'.lower():
541 elif self.var.lower() == 'PhiD_P'.lower():
543 wr_type = 'P'
542 wr_type = 'P'
544 elif self.var.lower() == 'RhoHV_R'.lower():
543 elif self.var.lower() == 'RhoHV_R'.lower():
545 wr_type = 'R'
544 wr_type = 'R'
546 elif self.var.lower() == 'velRadial_V'.lower():
545 elif self.var.lower() == 'velRadial_V'.lower():
547 wr_type = 'V'
546 wr_type = 'V'
548 elif self.var.lower() == 'Sigmav_W'.lower():
547 elif self.var.lower() == 'Sigmav_W'.lower():
549 wr_type = 'S'
548 wr_type = 'S'
550 elif self.var.lower() == 'dataPP_POWER'.lower():
549 elif self.var.lower() == 'dataPP_POWER'.lower():
551 wr_type = 'Pow'
550 wr_type = 'Pow'
552 elif self.var.lower() == 'dataPP_DOP'.lower():
551 elif self.var.lower() == 'dataPP_DOP'.lower():
553 wr_type = 'Dop'
552 wr_type = 'Dop'
554
553
555
554
556 #Z_SOPHy_El10.0_20200505_14:02:15.h5
555 #Z_SOPHy_El10.0_20200505_14:02:15.h5
557 #Z_SOPHy_Az40.0_20200505_14:02:15.h5
556 #Z_SOPHy_Az40.0_20200505_14:02:15.h5
558 if self.dataOut.flagMode == 1: #'AZI' #PPI
557 if self.dataOut.flagMode == 1: #'AZI' #PPI
559 ang_type = 'El'
558 ang_type = 'El'
560 len_aux = int(self.dataOut.data_ele.shape[0]/4)
559 len_aux = int(self.dataOut.data_ele.shape[0]/4)
561 mean = numpy.mean(self.dataOut.data_ele[len_aux:-len_aux])
560 mean = numpy.mean(self.dataOut.data_ele[len_aux:-len_aux])
562 ang_ = round(mean,1)
561 ang_ = round(mean,1)
563 elif self.dataOut.flagMode == 0: #'ELE' #RHI
562 elif self.dataOut.flagMode == 0: #'ELE' #RHI
564 ang_type = 'Az'
563 ang_type = 'Az'
565 len_aux = int(self.dataOut.data_azi.shape[0]/4)
564 len_aux = int(self.dataOut.data_azi.shape[0]/4)
566 mean = numpy.mean(self.dataOut.data_azi[len_aux:-len_aux])
565 mean = numpy.mean(self.dataOut.data_azi[len_aux:-len_aux])
567 ang_ = round(mean,1)
566 ang_ = round(mean,1)
568
567
569 file = '%s%s%s%2.1f%s%2.2d%2.2d%2.2d%s%2.2d%2.2d%2.2d%s' % (wr_type,
568 file = '%s%s%s%2.1f%s%2.2d%2.2d%2.2d%s%2.2d%2.2d%2.2d%s' % (wr_type,
570 '_SOPHy_',
569 '_SOPHy_',
571 ang_type,
570 ang_type,
572 ang_,
571 ang_,
573 '_',
572 '_',
574 timeTuple.tm_year,
573 timeTuple.tm_year,
575 timeTuple.tm_mon,
574 timeTuple.tm_mon,
576 timeTuple.tm_mday,
575 timeTuple.tm_mday,
577 '_',
576 '_',
578 timeTuple.tm_hour,
577 timeTuple.tm_hour,
579 timeTuple.tm_min,
578 timeTuple.tm_min,
580 timeTuple.tm_sec,
579 timeTuple.tm_sec,
581 ext )
580 ext )
582
581
583 else:
582 else:
584 setFile = timeTuple.tm_hour*60+timeTuple.tm_min
583 setFile = timeTuple.tm_hour*60+timeTuple.tm_min
585 file = '%s%4.4d%3.3d%04d%s' % (self.optchar,
584 file = '%s%4.4d%3.3d%04d%s' % (self.optchar,
586 timeTuple.tm_year,
585 timeTuple.tm_year,
587 timeTuple.tm_yday,
586 timeTuple.tm_yday,
588 setFile,
587 setFile,
589 ext )
588 ext )
590
589
591 self.filename = os.path.join( path, subfolder, file )
590 self.filename = os.path.join( path, subfolder, file )
592
591
593 #Setting HDF5 File
592 #Setting HDF5 File
594 #print("filename",self.filename)
593 #print("filename",self.filename)
595 self.fp = h5py.File(self.filename, 'w')
594 self.fp = h5py.File(self.filename, 'w')
596 #write metadata
595 #write metadata
597 self.writeMetadata(self.fp)
596 self.writeMetadata(self.fp)
598 #Write data
597 #Write data
599 self.writeData(self.fp)
598 self.writeData(self.fp)
600
599
601 def getLabel(self, name, x=None):
600 def getLabel(self, name, x=None):
602
601
603 if x is None:
602 if x is None:
604 if 'Data' in self.description:
603 if 'Data' in self.description:
605 data = self.description['Data']
604 data = self.description['Data']
606 if 'Metadata' in self.description:
605 if 'Metadata' in self.description:
607 data.update(self.description['Metadata'])
606 data.update(self.description['Metadata'])
608 else:
607 else:
609 data = self.description
608 data = self.description
610 if name in data:
609 if name in data:
611 if isinstance(data[name], str):
610 if isinstance(data[name], str):
612 return data[name]
611 return data[name]
613 elif isinstance(data[name], list):
612 elif isinstance(data[name], list):
614 return None
613 return None
615 elif isinstance(data[name], dict):
614 elif isinstance(data[name], dict):
616 for key, value in data[name].items():
615 for key, value in data[name].items():
617 return key
616 return key
618 return name
617 return name
619 else:
618 else:
620 if 'Metadata' in self.description:
619 if 'Metadata' in self.description:
621 meta = self.description['Metadata']
620 meta = self.description['Metadata']
622 else:
621 else:
623 meta = self.description
622 meta = self.description
624 if name in meta:
623 if name in meta:
625 if isinstance(meta[name], list):
624 if isinstance(meta[name], list):
626 return meta[name][x]
625 return meta[name][x]
627 elif isinstance(meta[name], dict):
626 elif isinstance(meta[name], dict):
628 for key, value in meta[name].items():
627 for key, value in meta[name].items():
629 return value[x]
628 return value[x]
630 if 'cspc' in name:
629 if 'cspc' in name:
631 return 'pair{:02d}'.format(x)
630 return 'pair{:02d}'.format(x)
632 else:
631 else:
633 return 'channel{:02d}'.format(x)
632 return 'channel{:02d}'.format(x)
634
633
635 def writeMetadata(self, fp):
634 def writeMetadata(self, fp):
636
635
637 if self.description:
636 if self.description:
638 if 'Metadata' in self.description:
637 if 'Metadata' in self.description:
639 grp = fp.create_group('Metadata')
638 grp = fp.create_group('Metadata')
640 else:
639 else:
641 grp = fp
640 grp = fp
642 else:
641 else:
643 grp = fp.create_group('Metadata')
642 grp = fp.create_group('Metadata')
644
643
645 for i in range(len(self.metadataList)):
644 for i in range(len(self.metadataList)):
646 if not hasattr(self.dataOut, self.metadataList[i]):
645 if not hasattr(self.dataOut, self.metadataList[i]):
647 log.warning('Metadata: `{}` not found'.format(self.metadataList[i]), self.name)
646 log.warning('Metadata: `{}` not found'.format(self.metadataList[i]), self.name)
648 continue
647 continue
649 value = getattr(self.dataOut, self.metadataList[i])
648 value = getattr(self.dataOut, self.metadataList[i])
650 if isinstance(value, bool):
649 if isinstance(value, bool):
651 if value is True:
650 if value is True:
652 value = 1
651 value = 1
653 else:
652 else:
654 value = 0
653 value = 0
655 grp.create_dataset(self.getLabel(self.metadataList[i]), data=value)
654 grp.create_dataset(self.getLabel(self.metadataList[i]), data=value)
656 return
655 return
657
656
658 def writeData(self, fp):
657 def writeData(self, fp):
659
658
660 if self.description:
659 if self.description:
661 if 'Data' in self.description:
660 if 'Data' in self.description:
662 grp = fp.create_group('Data')
661 grp = fp.create_group('Data')
663 else:
662 else:
664 grp = fp
663 grp = fp
665 else:
664 else:
666 grp = fp.create_group('Data')
665 grp = fp.create_group('Data')
667
666
668 dtsets = []
667 dtsets = []
669 data = []
668 data = []
670
669
671 for dsInfo in self.dsList:
670 for dsInfo in self.dsList:
672 if dsInfo['nDim'] == 0:
671 if dsInfo['nDim'] == 0:
673 ds = grp.create_dataset(
672 ds = grp.create_dataset(
674 self.getLabel(dsInfo['variable']),
673 self.getLabel(dsInfo['variable']),
675 (self.blocksPerFile, ),
674 (self.blocksPerFile, ),
676 chunks=True,
675 chunks=True,
677 dtype=numpy.float64)
676 dtype=numpy.float64)
678 dtsets.append(ds)
677 dtsets.append(ds)
679 data.append((dsInfo['variable'], -1))
678 data.append((dsInfo['variable'], -1))
680 else:
679 else:
681 label = self.getLabel(dsInfo['variable'])
680 label = self.getLabel(dsInfo['variable'])
682 if label is not None:
681 if label is not None:
683 sgrp = grp.create_group(label)
682 sgrp = grp.create_group(label)
684 else:
683 else:
685 sgrp = grp
684 sgrp = grp
686 for i in range(dsInfo['dsNumber']):
685 for i in range(dsInfo['dsNumber']):
687 ds = sgrp.create_dataset(
686 ds = sgrp.create_dataset(
688 self.getLabel(dsInfo['variable'], i),
687 self.getLabel(dsInfo['variable'], i),
689 (self.blocksPerFile, ) + dsInfo['shape'][1:],
688 (self.blocksPerFile, ) + dsInfo['shape'][1:],
690 chunks=True,
689 chunks=True,
691 dtype=dsInfo['dtype'])
690 dtype=dsInfo['dtype'])
692 dtsets.append(ds)
691 dtsets.append(ds)
693 data.append((dsInfo['variable'], i))
692 data.append((dsInfo['variable'], i))
694 fp.flush()
693 fp.flush()
695
694
696 log.log('Creating file: {}'.format(fp.filename), self.name)
695 log.log('Creating file: {}'.format(fp.filename), self.name)
697
696
698 self.ds = dtsets
697 self.ds = dtsets
699 self.data = data
698 self.data = data
700 self.firsttime = True
699 self.firsttime = True
701 self.blockIndex = 0
700 self.blockIndex = 0
702 return
701 return
703
702
704 def putData(self):
703 def putData(self):
705
704
706 if (self.blockIndex == self.blocksPerFile) or self.timeFlag():# or self.generalFlag_vRF():
705 if (self.blockIndex == self.blocksPerFile) or self.timeFlag():# or self.generalFlag_vRF():
707 self.closeFile()
706 self.closeFile()
708 self.setNextFile()
707 self.setNextFile()
709
708
710 for i, ds in enumerate(self.ds):
709 for i, ds in enumerate(self.ds):
711 attr, ch = self.data[i]
710 attr, ch = self.data[i]
712 if ch == -1:
711 if ch == -1:
713 ds[self.blockIndex] = getattr(self.dataOut, attr)
712 ds[self.blockIndex] = getattr(self.dataOut, attr)
714 else:
713 else:
715 ds[self.blockIndex] = getattr(self.dataOut, attr)[ch]
714 ds[self.blockIndex] = getattr(self.dataOut, attr)[ch]
716
715
717 self.fp.flush()
716 self.fp.flush()
718 self.blockIndex += 1
717 self.blockIndex += 1
719 log.log('Block No. {}/{}'.format(self.blockIndex, self.blocksPerFile), self.name)
718 log.log('Block No. {}/{}'.format(self.blockIndex, self.blocksPerFile), self.name)
720
719
721 return
720 return
722
721
723 def closeFile(self):
722 def closeFile(self):
724
723
725 if self.blockIndex != self.blocksPerFile:
724 if self.blockIndex != self.blocksPerFile:
726 for ds in self.ds:
725 for ds in self.ds:
727 ds.resize(self.blockIndex, axis=0)
726 ds.resize(self.blockIndex, axis=0)
728
727
729 if self.fp:
728 if self.fp:
730 self.fp.flush()
729 self.fp.flush()
731 self.fp.close()
730 self.fp.close()
732
731
733 def close(self):
732 def close(self):
734
733
735 self.closeFile()
734 self.closeFile()
@@ -1,152 +1,195
1 # SOPHY PROC script
1 # SOPHY PROC script
2 import os, sys, json, argparse
2 import os, sys, json, argparse
3 import datetime
3 import datetime
4 import time
4 import time
5
5
6 PATH = '/DATA_RM/DATA'
6 PATH = '/DATA_RM/DATA'
7 # PATH = '/Users/jespinoza/workspace/data/'
7 # PATH = '/Users/jespinoza/workspace/data/'
8 #PATH = '/home/roberto/DATA/data_WR_RHI/RHI'
8 #PATH = '/home/roberto/DATA/data_WR_RHI/RHI'
9 PATH = '/home/soporte/Downloads/data_WR_RHI'
9 PATH = '/home/soporte/Downloads/data_WR_RHI'
10
10
11
11
12 PARAM = {
12 PARAM = {
13 'P': {'name': 'dataPP_POWER', 'zmin': 35, 'zmax': 60, 'colormap': 'jet', 'label': 'Power', 'cb_label': 'dB'},
13 'P': {'name': 'dataPP_POWER','zmin': 35, 'zmax': 60, 'colormap': 'jet', 'label': 'Power', 'wrname':'Pow', 'cb_label': 'dB', 'ch':1},
14 'V': {'name': 'dataPP_DOP', 'zmin': -20, 'zmax': 20, 'colormap': 'seismic', 'label': 'Velocity', 'cb_label': 'm/s'},
14 'V': {'name': 'dataPP_DOP', 'zmin': -20, 'zmax': 20, 'colormap': 'seismic','label': 'Velocity', 'wrname':'Dop', 'cb_label': 'm/s','ch':1},
15 'RH': {'name': 'RhoHV_R', 'zmin': 0, 'zmax': 1, 'colormap': 'jet', 'label': 'CoeficienteCorrelacion', 'cb_label': '*'},
15 'RH': {'name': 'RhoHV_R', 'zmin': 0, 'zmax': 1, 'colormap': 'jet', 'label': 'Coef.Correlacion', 'wrname':'R', 'cb_label': '*', 'ch':0},
16 'FD': {'name': 'PhiD_P', 'zmin': -180, 'zmax': 180, 'colormap': 'RdBu_r', 'label': 'Fase Diferencial', 'cb_label': 'ΒΊ'},
16 'FD': {'name': 'PhiD_P', 'zmin': -180,'zmax': 180,'colormap': 'RdBu_r', 'label': 'Fase Diferencial', 'wrname':'P' , 'cb_label': 'ΒΊ', 'ch':0},
17 'ZD': {'name': 'Zdb_D', 'zmin': -20, 'zmax': 80, 'colormap': 'viridis', 'label': 'ReflectividadDiferencial', 'cb_label': 'dBz'},
17 'ZD': {'name': 'Zdb_D', 'zmin': -20, 'zmax': 80, 'colormap': 'viridis','label': 'Reflect.Diferencial','wrname':'D' , 'cb_label': 'dBz','ch':0},
18 'Z': {'name': 'Zdb', 'zmin': -20, 'zmax': 60, 'colormap': 'viridis', 'label': 'Reflectividad', 'cb_label': 'dBz'},
18 'Z': {'name': 'Zdb', 'zmin': -20, 'zmax': 60, 'colormap': 'viridis','label': 'Reflectividad', 'wrname':'Z', 'cb_label': 'dBz','ch':1},
19 'W': {'name': 'Sigmav_W', 'zmin': -20, 'zmax': 60, 'colormap': 'viridis', 'label': 'AnchoEspectral', 'cb_label': 'hz'}
19 'W': {'name': 'Sigmav_W', 'zmin': -20, 'zmax': 60, 'colormap': 'viridis','label': 'AnchoEspectral', 'wrname':'S', 'cb_label': 'hz', 'ch':1}
20 }
20 }
21
21
22
23 #---------------------SIGNAL CHAIN ------------------------------------
24 # Definido por el usuario puede ser modificado solo se necesita definir. Ejemplo
25 '''
26 desc_wr= {
27 'Data': {
28 'dataPP_POW': 'Power',
29 'utctime': 'Time',
30 'azimuth': 'az',
31 'elevation':'el'
32 },
33 'Metadata': {
34 'heightList': 'range',
35 'channelList': 'Channels'
36 }
37 }
38 '''
39
22 def main(args):
40 def main(args):
23
41
24 experiment = args.experiment
42 experiment = args.experiment
25 fp = open(os.path.join(PATH, experiment, 'experiment.conf'))
43 fp = open(os.path.join(PATH, experiment, 'experiment.conf'))
26 conf = json.loads(fp.read())
44 conf = json.loads(fp.read())
27
45
28 ipp_km = conf['usrp_tx']['ipp']
46 ipp_km = conf['usrp_tx']['ipp']
29 ipp = ipp_km * 2 /300000
47 ipp = ipp_km * 2 /300000
30 samp_rate = conf['usrp_rx']['sample_rate']
48 samp_rate = conf['usrp_rx']['sample_rate']
31 axis = ['0' if x=='elevation' else '1' for x in conf['pedestal']['speed']] # AZIMUTH 1 ELEVACION 0
49 axis = ['0' if x=='elevation' else '1' for x in conf['pedestal']['speed']] # AZIMUTH 1 ELEVACION 0
32 speed_axis = conf['pedestal']['speed']
50 speed_axis = conf['pedestal']['speed']
33 steeps = conf['pedestal']['table']
51 steeps = conf['pedestal']['table']
34 time_offset = args.time_offset
52 time_offset = args.time_offset
35 parameters = args.parameters
53 parameters = args.parameters
36 #start_date = experiment.split('@')[1].split('T')[0].replace('-', '/')
54 #start_date = experiment.split('@')[1].split('T')[0].replace('-', '/')
37 start_date = '2022/04/22'
55 start_date = '2022/04/22'
38 end_date = start_date
56 end_date = start_date
39 #start_time = experiment.split('@')[1].split('T')[1]
57 #start_time = experiment.split('@')[1].split('T')[1]
40 start_time ='17:42:55'
58 start_time ='17:42:55'
41 end_time = '23:59:59'
59 end_time = '23:59:59'
42 max_index = int(samp_rate*ipp*1e6 * args.range / 60) + int(samp_rate*ipp*1e6 * 1.2 / 60)
60 max_index = int(samp_rate*ipp*1e6 * args.range / 60) + int(samp_rate*ipp*1e6 * 1.2 / 60)
43 N = int(1/(speed_axis[0]*ipp)) # 1 GRADO DE RESOLUCION
61 N = int(1/(speed_axis[0]*ipp)) # 1 GRADO DE RESOLUCION
44 path = os.path.join(PATH, experiment, 'rawdata')
62 path = os.path.join(PATH, experiment, 'rawdata')
45 path_ped = os.path.join(PATH, experiment, 'position')
63 path_ped = os.path.join(PATH, experiment, 'position')
46 path_plots = os.path.join(PATH, experiment, 'plots')
64 path_plots = os.path.join(PATH, experiment, 'plots')
47 path_save = os.path.join(PATH, experiment, 'param')
65 path_save = os.path.join(PATH, experiment, 'param')
48
66
49 dBmin = 35
67 dBmin = 35
50 dBmax = 60
68 dBmax = 60
51 Vmin = -20
69 Vmin = -20
52 Vmax = 20
70 Vmax = 20
53
71
54
72
55 from schainpy.controller import Project
73 from schainpy.controller import Project
56
74
57 project = Project()
75 project = Project()
58 project.setup(id='1', name='Sophy', description='sophy proc')
76 project.setup(id='1', name='Sophy', description='sophy proc')
59
77
60 reader = project.addReadUnit(datatype='DigitalRFReader',
78 reader = project.addReadUnit(datatype='DigitalRFReader',
61 path=path,
79 path=path,
62 startDate=start_date,
80 startDate=start_date,
63 endDate=end_date,
81 endDate=end_date,
64 startTime=start_time,
82 startTime=start_time,
65 endTime=end_time,
83 endTime=end_time,
66 delay=0,
84 delay=0,
67 online=0,
85 online=0,
68 walk=1,
86 walk=1,
69 ippKm = ipp_km,
87 ippKm = ipp_km,
70 getByBlock = 1,
88 getByBlock = 1,
71 nProfileBlocks = N,
89 nProfileBlocks = N,
72 )
90 )
73
91
74 voltage = project.addProcUnit(datatype='VoltageProc', inputId=reader.getId())
92 voltage = project.addProcUnit(datatype='VoltageProc', inputId=reader.getId())
75 op = voltage.addOperation(name='setH0')
93 op = voltage.addOperation(name='setH0')
76 op.addParameter(name='h0', value='-1.2')
94 op.addParameter(name='h0', value='-1.2')
77
95
78 if args.range > 0:
96 if args.range > 0:
79 op = voltage.addOperation(name='selectHeights')
97 op = voltage.addOperation(name='selectHeights')
80 op.addParameter(name='minIndex', value='0', format='int')
98 op.addParameter(name='minIndex', value='0', format='int')
81 op.addParameter(name='maxIndex', value=max_index, format='int')
99 op.addParameter(name='maxIndex', value=max_index, format='int')
82
100
83 op = voltage.addOperation(name='PulsePair_vRF', optype='other')
101 op = voltage.addOperation(name='PulsePair_vRF', optype='other')
84 op.addParameter(name='n', value=int(N), format='int')
102 op.addParameter(name='n', value=int(N), format='int')
85
103
86 proc = project.addProcUnit(datatype='ParametersProc', inputId=voltage.getId())
104 proc = project.addProcUnit(datatype='ParametersProc', inputId=voltage.getId())
87 #-----------------------new--------- variables polarimetricas---------------
105 #-----------------------new--------- variables polarimetricas---------------
88 opObj10 = proc.addOperation(name="WeatherRadar")
106 opObj10 = proc.addOperation(name="WeatherRadar")
89 opObj10.addParameter(name='variableList',value='Reflectividad,ReflectividadDiferencial,CoeficienteCorrelacion,FaseDiferencial,VelocidadRadial,AnchoEspectral')
107 opObj10.addParameter(name='variableList',value='Reflectividad,ReflectividadDiferencial,CoeficienteCorrelacion,FaseDiferencial,VelocidadRadial,AnchoEspectral')
90
108
91
109
92
110
93 op = proc.addOperation(name='PedestalInformation')
111 op = proc.addOperation(name='PedestalInformation')
94 op.addParameter(name='path', value=path_ped, format='str')
112 op.addParameter(name='path', value=path_ped, format='str')
95 op.addParameter(name='interval', value='0.04', format='float')
113 op.addParameter(name='interval', value='0.04', format='float')
96 op.addParameter(name='time_offset', value=time_offset)
114 op.addParameter(name='time_offset', value=time_offset)
97 #op.addParameter(name='axis', value=','.join(axis)) #Preguntar en pedestal si todos los elementos
115 #op.addParameter(name='axis', value=','.join(axis)) #Preguntar en pedestal si todos los elementos
98 #de aqui son iguales, si lo fueran, que considere
116 #de aqui son iguales, si lo fueran, que considere
99 #el primero dato como el modo (PPI o RHI) y que ya
117 #el primero dato como el modo (PPI o RHI) y que ya
100 #no pregunte por el modo porque este no cambia
118 #no pregunte por el modo porque este no cambia
101
119
102 for param in parameters:
120 for param in parameters:
103 op = proc.addOperation(name='Block360_vRF4')
121 op = proc.addOperation(name='Block360_vRF4')
104 #op.addParameter(name='axis', value=','.join(axis))
122 #op.addParameter(name='axis', value=','.join(axis))
105 op.addParameter(name='runNextOp', value=True)
123 op.addParameter(name='runNextOp', value=True)
106 op.addParameter(name='attr_data', value=PARAM[param]['name'])
124 op.addParameter(name='attr_data', value=PARAM[param]['name'])
107
125
108 path_fig = '/AUTO{}km'.format(args.range)
126 path_fig = '/AUTO{}km'.format(args.range)
109 op = proc.addOperation(name='WeatherParamsPlot')
127 op = proc.addOperation(name='WeatherParamsPlot')
110 op.addParameter(name='save', value=path_plots+path_fig, format='str')
128 op.addParameter(name='save', value=path_plots+path_fig, format='str')
111 op.addParameter(name='save_period', value=-1)
129 op.addParameter(name='save_period', value=-1)
112 op.addParameter(name='show', value=args.show)
130 op.addParameter(name='show', value=args.show)
113 op.addParameter(name='channels', value='(1,)')
131 op.addParameter(name='channels', value='(1,)')
114 op.addParameter(name='zmin', value=PARAM[param]['zmin'])
132 op.addParameter(name='zmin', value=PARAM[param]['zmin'])
115 op.addParameter(name='zmax', value=PARAM[param]['zmax'])
133 op.addParameter(name='zmax', value=PARAM[param]['zmax'])
116 op.addParameter(name='attr_data', value=PARAM[param]['name'], format='str')
134 op.addParameter(name='attr_data', value=PARAM[param]['name'], format='str')
117 op.addParameter(name='labels', value=[PARAM[param]['label']])
135 op.addParameter(name='labels', value=[PARAM[param]['label']])
118 op.addParameter(name='save_code', value=param)
136 op.addParameter(name='save_code', value=param)
119 op.addParameter(name='cb_label', value=PARAM[param]['cb_label'])
137 op.addParameter(name='cb_label', value=PARAM[param]['cb_label'])
120 op.addParameter(name='colormap', value=PARAM[param]['colormap'])
138 op.addParameter(name='colormap', value=PARAM[param]['colormap'])
121
139
122 if args.save:
140 if args.save:
141 desc_wr= {
142 'Data': {
143 PARAM[param]['name']: PARAM[param]['wrname'],#PARAM[param]['name']: {PARAM[param]['wrname']:['P0','P1']},
144 'utctime': 'Time'
145 },
146 'Metadata': {
147 'heightList': 'range',
148 'channelList': 'Channels',
149 'data_azi': 'azimuth',
150 'data_ele': 'elevation'
151 }
152 }
123 opObj10 = proc.addOperation(name='HDFWriter')
153 opObj10 = proc.addOperation(name='HDFWriter')
124 opObj10.addParameter(name='path',value=path_save, format='str')
154 opObj10.addParameter(name='path',value=path_save, format='str')
125 opObj10.addParameter(name='Reset',value=True)
155 opObj10.addParameter(name='Reset',value=True)
126 opObj10.addParameter(name='setType',value='weather')
156 opObj10.addParameter(name='setType',value='weather')
127 opObj10.addParameter(name='blocksPerFile',value='1',format='int')
157 opObj10.addParameter(name='blocksPerFile',value='1',format='int')
128 opObj10.addParameter(name='metadataList',value='heightList,data_azi,data_ele')
158 #opObj10.addParameter(name='channel',value=PARAM[param]['ch'],format='int')
159 opObj10.addParameter(name='metadataList',value='heightList,channelList,Typename,Datatype,Scantype,Latitude,Longitud,Heading,Waveform,PRF,CreatedBy,ContactInformation,data_azi,data_ele')
160 opObj10.addParameter(name='Typename', value=PARAM[param]['label'])
161 opObj10.addParameter(name='Datatype', value='RadialSet')
162 opObj10.addParameter(name='Scantype', value='PPI')
163 opObj10.addParameter(name='Latitude', value='-11.96')
164 opObj10.addParameter(name='Longitud', value='-76.54')
165 opObj10.addParameter(name='Heading', value='293')
166 opObj10.addParameter(name='Height', value='293')
167 opObj10.addParameter(name='Waveform', value='OFM')
168 opObj10.addParameter(name='PRF', value='2500')
169 opObj10.addParameter(name='CreatedBy', value='WeatherRadarJROTeam')
170 opObj10.addParameter(name='ContactInformation', value='dscipion@igp.gob.pe')
129 opObj10.addParameter(name='dataList',value=','.join([PARAM[param]['name'],'utctime']))
171 opObj10.addParameter(name='dataList',value=','.join([PARAM[param]['name'],'utctime']))
172 opObj10.addParameter(name='description',value=json.dumps(desc_wr))
130
173
131 project.start()
174 project.start()
132
175
133
176
134 if __name__ == '__main__':
177 if __name__ == '__main__':
135
178
136 parser = argparse.ArgumentParser(description='Script to process SOPHy data.')
179 parser = argparse.ArgumentParser(description='Script to process SOPHy data.')
137 parser.add_argument('experiment',
180 parser.add_argument('experiment',
138 help='Experiment name')
181 help='Experiment name')
139 parser.add_argument('--parameters', nargs='*', default=['P'],
182 parser.add_argument('--parameters', nargs='*', default=['P'],
140 help='Variables to process: P, Z, V')
183 help='Variables to process: P, Z, V')
141 parser.add_argument('--time_offset', default=0,
184 parser.add_argument('--time_offset', default=0,
142 help='Fix time offset')
185 help='Fix time offset')
143 parser.add_argument('--range', default=0, type=int,
186 parser.add_argument('--range', default=0, type=int,
144 help='Max range to plot')
187 help='Max range to plot')
145 parser.add_argument('--save', action='store_true',
188 parser.add_argument('--save', action='store_true',
146 help='Create output files')
189 help='Create output files')
147 parser.add_argument('--show', action='store_true',
190 parser.add_argument('--show', action='store_true',
148 help='Show matplotlib plot.')
191 help='Show matplotlib plot.')
149
192
150 args = parser.parse_args()
193 args = parser.parse_args()
151 print (args)
194 print (args)
152 main(args)
195 main(args)
General Comments 0
You need to be logged in to leave comments. Login now