##// END OF EJS Templates
update test
avaldez -
r1352:55100ef75825
parent child
Show More
@@ -1,648 +1,648
1 1 import os
2 2 import time
3 3 import datetime
4 4
5 5 import numpy
6 6 import h5py
7 7
8 8 import schainpy.admin
9 9 from schainpy.model.data.jrodata import *
10 10 from schainpy.model.proc.jroproc_base import ProcessingUnit, Operation, MPDecorator
11 11 from schainpy.model.io.jroIO_base import *
12 12 from schainpy.utils import log
13 13
14 14
15 15 class HDFReader(Reader, ProcessingUnit):
16 16 """Processing unit to read HDF5 format files
17 17
18 18 This unit reads HDF5 files created with `HDFWriter` operation contains
19 19 by default two groups Data and Metadata all variables would be saved as `dataOut`
20 20 attributes.
21 21 It is possible to read any HDF5 file by given the structure in the `description`
22 22 parameter, also you can add extra values to metadata with the parameter `extras`.
23 23
24 24 Parameters:
25 25 -----------
26 26 path : str
27 27 Path where files are located.
28 28 startDate : date
29 29 Start date of the files
30 30 endDate : list
31 31 End date of the files
32 32 startTime : time
33 33 Start time of the files
34 34 endTime : time
35 35 End time of the files
36 36 description : dict, optional
37 37 Dictionary with the description of the HDF5 file
38 38 extras : dict, optional
39 39 Dictionary with extra metadata to be be added to `dataOut`
40 40
41 41 Examples
42 42 --------
43 43
44 44 desc = {
45 45 'Data': {
46 46 'data_output': ['u', 'v', 'w'],
47 47 'utctime': 'timestamps',
48 48 } ,
49 49 'Metadata': {
50 50 'heightList': 'heights'
51 51 }
52 52 }
53 53
54 54 desc = {
55 55 'Data': {
56 56 'data_output': 'winds',
57 57 'utctime': 'timestamps'
58 58 },
59 59 'Metadata': {
60 60 'heightList': 'heights'
61 61 }
62 62 }
63 63
64 64 extras = {
65 65 'timeZone': 300
66 66 }
67 67
68 68 reader = project.addReadUnit(
69 69 name='HDFReader',
70 70 path='/path/to/files',
71 71 startDate='2019/01/01',
72 72 endDate='2019/01/31',
73 73 startTime='00:00:00',
74 74 endTime='23:59:59',
75 75 # description=json.dumps(desc),
76 76 # extras=json.dumps(extras),
77 77 )
78 78
79 79 """
80 80
81 81 __attrs__ = ['path', 'startDate', 'endDate', 'startTime', 'endTime', 'description', 'extras']
82 82
83 83 def __init__(self):
84 84 ProcessingUnit.__init__(self)
85 85 self.dataOut = Parameters()
86 86 self.ext = ".hdf5"
87 87 self.optchar = "D"
88 88 self.meta = {}
89 89 self.data = {}
90 90 self.open_file = h5py.File
91 91 self.open_mode = 'r'
92 92 self.description = {}
93 93 self.extras = {}
94 94 self.filefmt = "*%Y%j***"
95 95 self.folderfmt = "*%Y%j"
96 96
97 97 def setup(self, **kwargs):
98 98 self.set_kwargs(**kwargs)
99 99 if not self.ext.startswith('.'):
100 100 self.ext = '.{}'.format(self.ext)
101 101
102 102 if self.online:
103 103 log.log("Searching files in online mode...", self.name)
104 104
105 105 for nTries in range(self.nTries):
106 106 fullpath = self.searchFilesOnLine(self.path, self.startDate,
107 107 self.endDate, self.expLabel, self.ext, self.walk,
108 108 self.filefmt, self.folderfmt)
109 109 try:
110 110 fullpath = next(fullpath)
111 111 except:
112 112 fullpath = None
113 113
114 114 if fullpath:
115 115 break
116 116
117 117 log.warning(
118 118 'Waiting {} sec for a valid file in {}: try {} ...'.format(
119 119 self.delay, self.path, nTries + 1),
120 120 self.name)
121 121 time.sleep(self.delay)
122 122
123 123 if not(fullpath):
124 124 raise schainpy.admin.SchainError(
125 125 'There isn\'t any valid file in {}'.format(self.path))
126 126
127 127 pathname, filename = os.path.split(fullpath)
128 128 self.year = int(filename[1:5])
129 129 self.doy = int(filename[5:8])
130 130 self.set = int(filename[8:11]) - 1
131 131 else:
132 132 log.log("Searching files in {}".format(self.path), self.name)
133 133 self.filenameList = self.searchFilesOffLine(self.path, self.startDate,
134 134 self.endDate, self.expLabel, self.ext, self.walk, self.filefmt, self.folderfmt)
135 135 self.setNextFile()
136 136
137 137 return
138 138
139 139 def readFirstHeader(self):
140 140 '''Read metadata and data'''
141 141
142 142 self.__readMetadata()
143 143 self.__readData()
144 144 self.__setBlockList()
145 145
146 146 if 'type' in self.meta:
147 147 self.dataOut = eval(self.meta['type'])()
148 148
149 149 for attr in self.meta:
150 150 setattr(self.dataOut, attr, self.meta[attr])
151 151
152 152 self.blockIndex = 0
153 153
154 154 return
155 155
156 156 def __setBlockList(self):
157 157 '''
158 158 Selects the data within the times defined
159 159
160 160 self.fp
161 161 self.startTime
162 162 self.endTime
163 163 self.blockList
164 164 self.blocksPerFile
165 165
166 166 '''
167 167
168 168 startTime = self.startTime
169 169 endTime = self.endTime
170 170
171 171 thisUtcTime = self.data['utctime']
172 172 self.interval = numpy.min(thisUtcTime[1:] - thisUtcTime[:-1])
173 173
174 174 thisDatetime = datetime.datetime.utcfromtimestamp(thisUtcTime[0])
175 175
176 176 thisDate = thisDatetime.date()
177 177 thisTime = thisDatetime.time()
178 178
179 179 startUtcTime = (datetime.datetime.combine(thisDate, startTime) - datetime.datetime(1970, 1, 1)).total_seconds()
180 180 endUtcTime = (datetime.datetime.combine(thisDate, endTime) - datetime.datetime(1970, 1, 1)).total_seconds()
181 181
182 182 ind = numpy.where(numpy.logical_and(thisUtcTime >= startUtcTime, thisUtcTime < endUtcTime))[0]
183 183
184 184 self.blockList = ind
185 185 self.blocksPerFile = len(ind)
186 186 return
187 187
188 188 def __readMetadata(self):
189 189 '''
190 190 Reads Metadata
191 191 '''
192 192
193 193 meta = {}
194 194
195 195 desc = {
196 196 'Data': {
197 'dataPP_POW': 'Data/dataPP_POW/table00',
197 'dataPP_POW': 'Data/dataPP_POW/channel00',
198 198 'utctime':'Data/utctime'
199 199 },
200 200 'Metadata': {
201 201 'azimuth' :'Metadata/azimuth',
202 202 'heightList' :'Metadata/heightList',
203 203 'flagDataAsBlock':'Metadata/flagDataAsBlock'
204 204 }
205 205 }
206 206
207 207 self.description = desc
208 208 if self.description:
209 209 for key, value in self.description['Metadata'].items():
210 210 try:
211 211 meta[key] = self.fp[value].value
212 212 except:
213 213 meta[key] = self.fp[value][()]
214 214 else:
215 215 grp = self.fp['Metadata']
216 216 for name in grp:
217 217 meta[name] = grp[name].value
218 218
219 219 if self.extras:
220 220 for key, value in self.extras.items():
221 221 meta[key] = value
222 222 self.meta = meta
223 223
224 224 return
225 225
226 226 def __readData(self):
227 227
228 228 data = {}
229 229
230 230 if self.description:
231 231 for key, value in self.description['Data'].items():
232 232 if isinstance(value, str):
233 233 if isinstance(self.fp[value], h5py.Dataset):
234 234 try:
235 235 data[key] = self.fp[value].value
236 236 except:
237 237 ndim= self.fp[value][()].ndim
238 238 if ndim==2:
239 239 data[key] = numpy.swapaxes(self.fp[value][()],0,1)
240 240 if ndim==1:
241 241 data[key] = self.fp[value][()]
242 242 elif isinstance(self.fp[value], h5py.Group):
243 243 array = []
244 244 for ch in self.fp[value]:
245 245 array.append(self.fp[value][ch].value)
246 246 data[key] = numpy.array(array)
247 247 elif isinstance(value, list):
248 248 array = []
249 249 for ch in value:
250 250 array.append(self.fp[ch].value)
251 251 data[key] = numpy.array(array)
252 252 else:
253 253 grp = self.fp['Data']
254 254 for name in grp:
255 255 if isinstance(grp[name], h5py.Dataset):
256 256 array = grp[name].value
257 257 elif isinstance(grp[name], h5py.Group):
258 258 array = []
259 259 for ch in grp[name]:
260 260 array.append(grp[name][ch].value)
261 261 array = numpy.array(array)
262 262 else:
263 263 log.warning('Unknown type: {}'.format(name))
264 264
265 265 if name in self.description:
266 266 key = self.description[name]
267 267 else:
268 268 key = name
269 269 data[key] = array
270 270
271 271 self.data = data
272 272 return
273 273
274 274 def getData(self):
275 275
276 276 for attr in self.data:
277 277 if self.data[attr].ndim == 1:
278 278 setattr(self.dataOut, attr, self.data[attr][self.blockIndex])
279 279 else:
280 280 setattr(self.dataOut, attr, self.data[attr][:, self.blockIndex])
281 281
282 282 self.dataOut.flagNoData = False
283 283 self.blockIndex += 1
284 284
285 285 log.log("Block No. {}/{} -> {}".format(
286 286 self.blockIndex,
287 287 self.blocksPerFile,
288 288 self.dataOut.datatime.ctime()), self.name)
289 289
290 290 return
291 291
292 292 def run(self, **kwargs):
293 293
294 294 if not(self.isConfig):
295 295 self.setup(**kwargs)
296 296 self.isConfig = True
297 297
298 298 if self.blockIndex == self.blocksPerFile:
299 299 self.setNextFile()
300 300
301 301 self.getData()
302 302
303 303 return
304 304
305 305 @MPDecorator
306 306 class HDFWriter(Operation):
307 307 """Operation to write HDF5 files.
308 308
309 309 The HDF5 file contains by default two groups Data and Metadata where
310 310 you can save any `dataOut` attribute specified by `dataList` and `metadataList`
311 311 parameters, data attributes are normaly time dependent where the metadata
312 312 are not.
313 313 It is possible to customize the structure of the HDF5 file with the
314 314 optional description parameter see the examples.
315 315
316 316 Parameters:
317 317 -----------
318 318 path : str
319 319 Path where files will be saved.
320 320 blocksPerFile : int
321 321 Number of blocks per file
322 322 metadataList : list
323 323 List of the dataOut attributes that will be saved as metadata
324 324 dataList : int
325 325 List of the dataOut attributes that will be saved as data
326 326 setType : bool
327 327 If True the name of the files corresponds to the timestamp of the data
328 328 description : dict, optional
329 329 Dictionary with the desired description of the HDF5 file
330 330
331 331 Examples
332 332 --------
333 333
334 334 desc = {
335 335 'data_output': {'winds': ['z', 'w', 'v']},
336 336 'utctime': 'timestamps',
337 337 'heightList': 'heights'
338 338 }
339 339 desc = {
340 340 'data_output': ['z', 'w', 'v'],
341 341 'utctime': 'timestamps',
342 342 'heightList': 'heights'
343 343 }
344 344 desc = {
345 345 'Data': {
346 346 'data_output': 'winds',
347 347 'utctime': 'timestamps'
348 348 },
349 349 'Metadata': {
350 350 'heightList': 'heights'
351 351 }
352 352 }
353 353
354 354 writer = proc_unit.addOperation(name='HDFWriter')
355 355 writer.addParameter(name='path', value='/path/to/file')
356 356 writer.addParameter(name='blocksPerFile', value='32')
357 357 writer.addParameter(name='metadataList', value='heightList,timeZone')
358 358 writer.addParameter(name='dataList',value='data_output,utctime')
359 359 # writer.addParameter(name='description',value=json.dumps(desc))
360 360
361 361 """
362 362
363 363 ext = ".hdf5"
364 364 optchar = "D"
365 365 filename = None
366 366 path = None
367 367 setFile = None
368 368 fp = None
369 369 firsttime = True
370 370 #Configurations
371 371 blocksPerFile = None
372 372 blockIndex = None
373 373 dataOut = None
374 374 #Data Arrays
375 375 dataList = None
376 376 metadataList = None
377 377 currentDay = None
378 378 lastTime = None
379 379
380 380 def __init__(self):
381 381
382 382 Operation.__init__(self)
383 383 return
384 384
385 385 def setup(self, path=None, blocksPerFile=10, metadataList=None, dataList=None, setType=None, description=None):
386 386 self.path = path
387 387 self.blocksPerFile = blocksPerFile
388 388 self.metadataList = metadataList
389 389 self.dataList = [s.strip() for s in dataList]
390 390 self.setType = setType
391 391 self.description = description
392 392
393 393 if self.metadataList is None:
394 394 self.metadataList = self.dataOut.metadata_list
395 395
396 396 tableList = []
397 397 dsList = []
398 398
399 399 for i in range(len(self.dataList)):
400 400 dsDict = {}
401 401 if hasattr(self.dataOut, self.dataList[i]):
402 402 dataAux = getattr(self.dataOut, self.dataList[i])
403 403 dsDict['variable'] = self.dataList[i]
404 404 else:
405 405 log.warning('Attribute {} not found in dataOut', self.name)
406 406 continue
407 407
408 408 if dataAux is None:
409 409 continue
410 410 elif isinstance(dataAux, (int, float, numpy.integer, numpy.float)):
411 411 dsDict['nDim'] = 0
412 412 else:
413 413 dsDict['nDim'] = len(dataAux.shape)
414 414 dsDict['shape'] = dataAux.shape
415 415 dsDict['dsNumber'] = dataAux.shape[0]
416 416 dsDict['dtype'] = dataAux.dtype
417 417
418 418 dsList.append(dsDict)
419 419
420 420 self.dsList = dsList
421 421 self.currentDay = self.dataOut.datatime.date()
422 422
423 423 def timeFlag(self):
424 424 currentTime = self.dataOut.utctime
425 425 timeTuple = time.localtime(currentTime)
426 426 dataDay = timeTuple.tm_yday
427 427
428 428 if self.lastTime is None:
429 429 self.lastTime = currentTime
430 430 self.currentDay = dataDay
431 431 return False
432 432
433 433 timeDiff = currentTime - self.lastTime
434 434
435 435 #Si el dia es diferente o si la diferencia entre un dato y otro supera la hora
436 436 if dataDay != self.currentDay:
437 437 self.currentDay = dataDay
438 438 return True
439 439 elif timeDiff > 3*60*60:
440 440 self.lastTime = currentTime
441 441 return True
442 442 else:
443 443 self.lastTime = currentTime
444 444 return False
445 445
446 446 def run(self, dataOut, path, blocksPerFile=10, metadataList=None,
447 447 dataList=[], setType=None, description={}):
448 448
449 449 self.dataOut = dataOut
450 450 if not(self.isConfig):
451 451 self.setup(path=path, blocksPerFile=blocksPerFile,
452 452 metadataList=metadataList, dataList=dataList,
453 453 setType=setType, description=description)
454 454
455 455 self.isConfig = True
456 456 self.setNextFile()
457 457
458 458 self.putData()
459 459 return
460 460
461 461 def setNextFile(self):
462 462
463 463 ext = self.ext
464 464 path = self.path
465 465 setFile = self.setFile
466 466
467 467 timeTuple = time.localtime(self.dataOut.utctime)
468 468 subfolder = 'd%4.4d%3.3d' % (timeTuple.tm_year,timeTuple.tm_yday)
469 469 fullpath = os.path.join(path, subfolder)
470 470
471 471 if os.path.exists(fullpath):
472 472 filesList = os.listdir(fullpath)
473 473 filesList = [k for k in filesList if k.startswith(self.optchar)]
474 474 if len( filesList ) > 0:
475 475 filesList = sorted(filesList, key=str.lower)
476 476 filen = filesList[-1]
477 477 # el filename debera tener el siguiente formato
478 478 # 0 1234 567 89A BCDE (hex)
479 479 # x YYYY DDD SSS .ext
480 480 if isNumber(filen[8:11]):
481 481 setFile = int(filen[8:11]) #inicializo mi contador de seteo al seteo del ultimo file
482 482 else:
483 483 setFile = -1
484 484 else:
485 485 setFile = -1 #inicializo mi contador de seteo
486 486 else:
487 487 os.makedirs(fullpath)
488 488 setFile = -1 #inicializo mi contador de seteo
489 489
490 490 if self.setType is None:
491 491 setFile += 1
492 492 file = '%s%4.4d%3.3d%03d%s' % (self.optchar,
493 493 timeTuple.tm_year,
494 494 timeTuple.tm_yday,
495 495 setFile,
496 496 ext )
497 497 else:
498 498 setFile = timeTuple.tm_hour*60+timeTuple.tm_min
499 499 file = '%s%4.4d%3.3d%04d%s' % (self.optchar,
500 500 timeTuple.tm_year,
501 501 timeTuple.tm_yday,
502 502 setFile,
503 503 ext )
504 504
505 505 self.filename = os.path.join( path, subfolder, file )
506 506
507 507 #Setting HDF5 File
508 508 self.fp = h5py.File(self.filename, 'w')
509 509 #write metadata
510 510 self.writeMetadata(self.fp)
511 511 #Write data
512 512 self.writeData(self.fp)
513 513
514 514 def getLabel(self, name, x=None):
515 515
516 516 if x is None:
517 517 if 'Data' in self.description:
518 518 data = self.description['Data']
519 519 if 'Metadata' in self.description:
520 520 data.update(self.description['Metadata'])
521 521 else:
522 522 data = self.description
523 523 if name in data:
524 524 if isinstance(data[name], str):
525 525 return data[name]
526 526 elif isinstance(data[name], list):
527 527 return None
528 528 elif isinstance(data[name], dict):
529 529 for key, value in data[name].items():
530 530 return key
531 531 return name
532 532 else:
533 533 if 'Metadata' in self.description:
534 534 meta = self.description['Metadata']
535 535 else:
536 536 meta = self.description
537 537 if name in meta:
538 538 if isinstance(meta[name], list):
539 539 return meta[name][x]
540 540 elif isinstance(meta[name], dict):
541 541 for key, value in meta[name].items():
542 542 return value[x]
543 543 if 'cspc' in name:
544 544 return 'pair{:02d}'.format(x)
545 545 else:
546 546 return 'channel{:02d}'.format(x)
547 547
548 548 def writeMetadata(self, fp):
549 549
550 550 if self.description:
551 551 if 'Metadata' in self.description:
552 552 grp = fp.create_group('Metadata')
553 553 else:
554 554 grp = fp
555 555 else:
556 556 grp = fp.create_group('Metadata')
557 557
558 558 for i in range(len(self.metadataList)):
559 559 if not hasattr(self.dataOut, self.metadataList[i]):
560 560 log.warning('Metadata: `{}` not found'.format(self.metadataList[i]), self.name)
561 561 continue
562 562 value = getattr(self.dataOut, self.metadataList[i])
563 563 if isinstance(value, bool):
564 564 if value is True:
565 565 value = 1
566 566 else:
567 567 value = 0
568 568 grp.create_dataset(self.getLabel(self.metadataList[i]), data=value)
569 569 return
570 570
571 571 def writeData(self, fp):
572 572
573 573 if self.description:
574 574 if 'Data' in self.description:
575 575 grp = fp.create_group('Data')
576 576 else:
577 577 grp = fp
578 578 else:
579 579 grp = fp.create_group('Data')
580 580
581 581 dtsets = []
582 582 data = []
583 583
584 584 for dsInfo in self.dsList:
585 585 if dsInfo['nDim'] == 0:
586 586 ds = grp.create_dataset(
587 587 self.getLabel(dsInfo['variable']),
588 588 (self.blocksPerFile, ),
589 589 chunks=True,
590 590 dtype=numpy.float64)
591 591 dtsets.append(ds)
592 592 data.append((dsInfo['variable'], -1))
593 593 else:
594 594 label = self.getLabel(dsInfo['variable'])
595 595 if label is not None:
596 596 sgrp = grp.create_group(label)
597 597 else:
598 598 sgrp = grp
599 599 for i in range(dsInfo['dsNumber']):
600 600 ds = sgrp.create_dataset(
601 601 self.getLabel(dsInfo['variable'], i),
602 602 (self.blocksPerFile, ) + dsInfo['shape'][1:],
603 603 chunks=True,
604 604 dtype=dsInfo['dtype'])
605 605 dtsets.append(ds)
606 606 data.append((dsInfo['variable'], i))
607 607 fp.flush()
608 608
609 609 log.log('Creating file: {}'.format(fp.filename), self.name)
610 610
611 611 self.ds = dtsets
612 612 self.data = data
613 613 self.firsttime = True
614 614 self.blockIndex = 0
615 615 return
616 616
617 617 def putData(self):
618 618
619 619 if (self.blockIndex == self.blocksPerFile) or self.timeFlag():
620 620 self.closeFile()
621 621 self.setNextFile()
622 622
623 623 for i, ds in enumerate(self.ds):
624 624 attr, ch = self.data[i]
625 625 if ch == -1:
626 626 ds[self.blockIndex] = getattr(self.dataOut, attr)
627 627 else:
628 628 ds[self.blockIndex] = getattr(self.dataOut, attr)[ch]
629 629
630 630 self.fp.flush()
631 631 self.blockIndex += 1
632 632 log.log('Block No. {}/{}'.format(self.blockIndex, self.blocksPerFile), self.name)
633 633
634 634 return
635 635
636 636 def closeFile(self):
637 637
638 638 if self.blockIndex != self.blocksPerFile:
639 639 for ds in self.ds:
640 640 ds.resize(self.blockIndex, axis=0)
641 641
642 642 if self.fp:
643 643 self.fp.flush()
644 644 self.fp.close()
645 645
646 646 def close(self):
647 647
648 648 self.closeFile()
@@ -1,37 +1,37
1 1 import os,sys
2 2 import datetime
3 3 import time
4 4 from schainpy.controller import Project
5 5 #path = '/home/alex/Downloads/hdf5_testPP2'
6 6 #path = '/home/alex/Downloads/hdf5_test'
7 7 #path='/home/alex/Downloads/hdf5_wr'
8 path='/home/developer/Downloads/HDF5_WR'
8 path='/home/developer/Downloads/HDF5_WR/HDF5_064'
9 9 figpath = path
10 10 desc = "Simulator Test"
11 11
12 12 controllerObj = Project()
13 13
14 14 controllerObj.setup(id='10',name='Test Simulator',description=desc)
15 15
16 16 readUnitConfObj = controllerObj.addReadUnit(datatype='HDFReader',
17 17 path=path,
18 18 startDate="2021/01/01", #"2020/01/01",#today,
19 19 endDate= "2021/12/01", #"2020/12/30",#today,
20 20 startTime='00:00:00',
21 21 endTime='23:59:59',
22 22 delay=0,
23 23 #set=0,
24 24 online=0,
25 25 walk=0)#1
26 26
27 27 procUnitConfObjA = controllerObj.addProcUnit(datatype='ParametersProc',inputId=readUnitConfObj.getId())
28 28 opObj11 = procUnitConfObjA.addOperation(name='Block360')
29 29 opObj11.addParameter(name='n', value='10', format='int')
30 30
31 31 opObj11= procUnitConfObjA.addOperation(name='WeatherPlot',optype='other')
32 32 #opObj11.addParameter(name='save', value=figpath)
33 33 #opObj11.addParameter(name='save_period', value=1)
34 34 #opObj11 = procUnitConfObjA.addOperation(name='PowerPlot', optype='other')#PulsepairPowerPlot
35 35 #opObj11 = procUnitConfObjA.addOperation(name='PPSignalPlot', optype='other')
36 36
37 37 controllerObj.start()
@@ -1,189 +1,189
1 1 import os,numpy,h5py
2 2 from shutil import copyfile
3 3 import sys,time
4 4
5 5 def isNumber(str):
6 6 try:
7 7 float(str)
8 8 return True
9 9 except:
10 10 return False
11 11
12 12 def getfirstFilefromPath(path,meta,ext):
13 13 validFilelist = []
14 14 fileList = os.listdir(path)
15 15 if len(fileList)<1:
16 16 return None
17 17 # meta 1234 567 8-18 BCDE
18 18 # H,D,PE YYYY DDD EPOC .ext
19 19
20 20 for thisFile in fileList:
21 21 if meta =="PE":
22 22 try:
23 23 number= int(thisFile[len(meta)+7:len(meta)+17])
24 24 except:
25 25 print("There is a file or folder with different format")
26 26 if meta == "D":
27 27 try:
28 28 number= int(thisFile[8:11])
29 29 except:
30 30 print("There is a file or folder with different format")
31 31
32 32 if not isNumber(str=number):
33 33 continue
34 34 if (os.path.splitext(thisFile)[-1].lower() != ext.lower()):
35 35 continue
36 36 validFilelist.sort()
37 37 validFilelist.append(thisFile)
38 38 if len(validFilelist)>0:
39 39 validFilelist = sorted(validFilelist,key=str.lower)
40 40 return validFilelist
41 41 return None
42 42
43 43 def gettimeutcfromDirFilename(path,file):
44 44 dir_file= path+"/"+file
45 45 fp = h5py.File(dir_file,'r')
46 46 epoc = fp['Metadata'].get('utctimeInit')[()]
47 47 fp.close()
48 48 return epoc
49 49
50 50 def getDatavaluefromDirFilename(path,file,value):
51 51 dir_file= path+"/"+file
52 52 fp = h5py.File(dir_file,'r')
53 53 array = fp['Data'].get(value)[()]
54 54 fp.close()
55 55 return array
56 56
57 57
58 58 #Β·Β·Β·Β·Β·Β·Β·Β·Β·Β· Velocidad de PedestalΒ·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·
59 59 w = input ("Ingresa velocidad de Pedestal: ")
60 60 w = 4
61 61 w = float(w)
62 62 #Β·Β·Β·Β·Β·Β·Β·Β·Β·Β· Resolucion minimo en gradosΒ·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·
63 63 alfa = input ("Ingresa resolucion minima en grados: ")
64 64 alfa = 1
65 65 alfa = float(alfa)
66 66 #Β·Β·Β·Β·Β·Β·Β·Β·Β·Β· IPP del Experimento Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·
67 67 IPP = input ("Ingresa el IPP del experimento: ")
68 68 IPP = 0.0004
69 69 IPP = float(IPP)
70 70 #Β·Β·Β·Β·Β·Β·Β·Β·Β·Β· MODE Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·
71 71 mode = input ("Ingresa el MODO del experimento T or F: ")
72 72 mode = "T"
73 73 mode = str(mode)
74 74
75 75 #Β·Β·Β·Β·Β·Β·Β·Β·Β·Β· Tiempo en generar la resolucion minΒ·Β·Β·
76 76 #Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β· MCU Β·Β· var_ang = w * (var_tiempo)Β·Β·Β·
77 77 var_tiempo = alfa/w
78 78 #Β·Β·Β·Β·Β·Β·Β·Β·Β·Β· Tiempo Equivalente en perfilesΒ·Β·Β·Β·Β·Β·Β·Β·
79 79 #Β·Β·Β·Β·Β·Β·Β·Β·Β·Β· var_tiempo = IPP * ( num_perfiles )Β·
80 80 num_perfiles = int(var_tiempo/IPP)
81 81
82 82 #Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·DATA PEDESTALΒ·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·
83 dir_pedestal = "/home/developer/Downloads/Pedestal/P2021064"
83 dir_pedestal = "/home/developer/Downloads/Pedestal/P2021093"
84 84 #Β·Β·Β·Β·Β·Β·Β·Β·Β·Β· DATA ADQΒ·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·
85 85 if mode=="T":
86 dir_adq = "/home/developer/Downloads/HDF5_TESTPP2/d2021064" # Time domain
86 dir_adq = "/home/developer/Downloads/HDF5_TESTPP2V3/d2021093" # Time domain
87 87 else:
88 88 dir_adq = "/home/developer/Downloads/hdf5_test/d2021053" # Frequency domain
89 89
90 90 print( "Velocidad angular :", w)
91 91 print( "Resolucion minima en grados :", alfa)
92 92 print( "Numero de perfiles equivalente:", num_perfiles)
93 93 print( "Mode :", mode)
94 94
95 95 #Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β· First FileΒ·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·
96 96 list_pedestal = getfirstFilefromPath(path=dir_pedestal,meta="PE",ext=".hdf5")
97 97 list_adq = getfirstFilefromPath(path=dir_adq ,meta="D",ext=".hdf5")
98 98 print("list_pedestal")
99 99 #print(list_pedestal)
100 100 print("list_adq")
101 101 #print(list_adq)
102 102 #sys.exit(0)
103 103 #Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β· utc time Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·
104 104 utc_pedestal= gettimeutcfromDirFilename(path=dir_pedestal,file=list_pedestal[0])
105 105 utc_adq = gettimeutcfromDirFilename(path=dir_adq ,file=list_adq[0])
106 106
107 107 print("utc_pedestal :",utc_pedestal)
108 108 print("utc_adq :",utc_adq)
109 109 #sys.exit(0)
110 110
111 111 #Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Relacion: utc_adq (+/-) var_tiempo*nro_file= utc_pedestal
112 112 time_Interval_p = 0.01
113 113 n_perfiles_p = 100 #muestras por achivo del pedestal
114 114
115 115
116 116 if utc_adq>utc_pedestal:
117 117 nro_file = int((utc_adq - utc_pedestal)/(time_Interval_p*n_perfiles_p))
118 118 print("nro_file",nro_file)
119 119 ff_pedestal = list_pedestal[nro_file]
120 120 print(ff_pedestal)
121 121 utc_pedestal = gettimeutcfromDirFilename(path=dir_pedestal,file=ff_pedestal)
122 122 print(utc_pedestal)
123 123 nro_key_p = int((utc_adq-utc_pedestal)/time_Interval_p)
124 124 print(nro_key_p)
125 125 #sys.exit(0)
126 126 if utc_adq >utc_pedestal:
127 127 ff_pedestal = ff_pedestal
128 128 else:
129 129 nro_file = nro_file-1
130 130 ff_pedestal = list_pedestal[nro_file]
131 131 utc_pedestal = gettimeutcfromDirFilename(path=dir_pedestal,file=ff_pedestal)
132 132 angulo = getDatavaluefromDirFilename(path=dir_pedestal,file=ff_pedestal,value="azimuth")
133 133 nro_key_p = int((utc_adq-utc_pedestal)/time_Interval_p)
134 134 print("nro_file :",nro_file)
135 135 print("name_file :",ff_pedestal)
136 136 print("utc_pedestal_file :",utc_pedestal)
137 137 print("nro_key_p :",nro_key_p)
138 138 print("utc_pedestal_init :",utc_pedestal+nro_key_p*time_Interval_p)
139 139 print("angulo_array :",angulo[nro_key_p])
140 140 #4+25+25+25+21
141 141 #while True:
142 142 list_pedestal = getfirstFilefromPath(path=dir_pedestal,meta="PE",ext=".hdf5")
143 143 list_adq = getfirstFilefromPath(path=dir_adq ,meta="D",ext=".hdf5")
144 144
145 145 nro_file = nro_file #10
146 146 nro_key_perfil = nro_key_p
147 147 blocksPerFile = 100##### aqui se cambia dependiendo de los blqoues por achivo en adq
148 148 wr_path = "/home/developer/Downloads/HDF5_WR/"
149 149 # Lectura de archivos de adquisicion para adicion de azimuth
150 150 # factor de archivos
151 151 #f_a_p = tiempo_file_1_adq/tiempo_file_1_ped=25/1 = 25
152 152 tiempo_file_1_adq=25
153 153 tiempo_file_1_ped=1
154 154 f_a_p= int(tiempo_file_1_adq/tiempo_file_1_ped)
155 155 for thisFile in range(len(list_adq)):
156 156 print("thisFileAdq",thisFile)
157 157 angulo_adq = numpy.zeros(blocksPerFile)
158 158 tmp = 0
159 159 for j in range(blocksPerFile):
160 160 iterador = nro_key_perfil + f_a_p*(j-tmp)
161 161 #print("iterador",iterador)
162 162 if iterador < n_perfiles_p:
163 163 nro_file = nro_file
164 164 else:
165 165 nro_file = nro_file+1
166 166 dif = blocksPerFile-(nro_key_perfil+f_a_p*(j-tmp-1))
167 167 tmp = j
168 168 nro_key_perfil= f_a_p-dif
169 169 iterador = nro_key_perfil
170 170 #print(iterador)
171 171 print("nro_file",nro_file)
172 172 ff_pedestal = list_pedestal[nro_file]
173 173 angulo = getDatavaluefromDirFilename(path=dir_pedestal,file=ff_pedestal,value="azimuth")
174 174 angulo_adq[j]= angulo[iterador]
175 175 copyfile(dir_adq+"/"+list_adq[thisFile],wr_path+list_adq[thisFile])
176 176 fp = h5py.File(wr_path+list_adq[thisFile],'a')
177 177 #grp = fp.create_group("Pedestal")
178 178 grp = fp['Metadata']
179 179 #sgrp = grp.create_group('Pedestal')
180 180 dset = grp.create_dataset("azimuth" , data=angulo_adq)
181 181 fp.close()
182 182 print("Angulo",angulo_adq)
183 183 print("Angulo",len(angulo_adq))
184 184 nro_key_perfil=iterador + f_a_p
185 185 if nro_key_perfil< n_perfiles_p:
186 186 nro_file = nro_file
187 187 else:
188 188 nro_file = nro_file+1
189 189 nro_key_perfil= nro_key_p
General Comments 0
You need to be logged in to leave comments. Login now