##// END OF EJS Templates
hora y fecha en el primer bloque hdf5 >> consola
joabAM -
r1400:005589ae0527
parent child
Show More
@@ -1,659 +1,659
1 1 ''''
2 2 Created on Set 9, 2015
3 3
4 4 @author: roj-idl71 Karim Kuyeng
5 5
6 6 @update: 2021, Joab Apaza
7 7 '''
8 8
9 9 import os
10 10 import sys
11 11 import glob
12 12 import fnmatch
13 13 import datetime
14 14 import time
15 15 import re
16 16 import h5py
17 17 import numpy
18 18
19 19 try:
20 20 from gevent import sleep
21 21 except:
22 22 from time import sleep
23 23
24 24 from schainpy.model.data.jroheaderIO import RadarControllerHeader, SystemHeader
25 25 from schainpy.model.data.jrodata import Voltage
26 26 from schainpy.model.proc.jroproc_base import ProcessingUnit, Operation, MPDecorator
27 27 from numpy import imag
28 28
29 29
30 30 class AMISRReader(ProcessingUnit):
31 31 '''
32 32 classdocs
33 33 '''
34 34
35 35 def __init__(self):
36 36 '''
37 37 Constructor
38 38 '''
39 39
40 40 ProcessingUnit.__init__(self)
41 41
42 42 self.set = None
43 43 self.subset = None
44 44 self.extension_file = '.h5'
45 45 self.dtc_str = 'dtc'
46 46 self.dtc_id = 0
47 47 self.status = True
48 48 self.isConfig = False
49 49 self.dirnameList = []
50 50 self.filenameList = []
51 51 self.fileIndex = None
52 52 self.flagNoMoreFiles = False
53 53 self.flagIsNewFile = 0
54 54 self.filename = ''
55 55 self.amisrFilePointer = None
56 56 self.realBeamCode = []
57 57 self.beamCodeMap = None
58 58 self.azimuthList = []
59 59 self.elevationList = []
60 60 self.dataShape = None
61 61
62 62
63 63
64 64 self.profileIndex = 0
65 65
66 66
67 67 self.beamCodeByFrame = None
68 68 self.radacTimeByFrame = None
69 69
70 70 self.dataset = None
71 71
72 72 self.__firstFile = True
73 73
74 74 self.buffer = None
75 75
76 76 self.timezone = 'ut'
77 77
78 78 self.__waitForNewFile = 20
79 79 self.__filename_online = None
80 80 #Is really necessary create the output object in the initializer
81 81 self.dataOut = Voltage()
82 82 self.dataOut.error=False
83 83
84 84
85 85 def setup(self,path=None,
86 86 startDate=None,
87 87 endDate=None,
88 88 startTime=None,
89 89 endTime=None,
90 90 walk=True,
91 91 timezone='ut',
92 92 all=0,
93 93 code = None,
94 94 nCode = 0,
95 95 nBaud = 0,
96 96 online=False):
97 97
98 98
99 99
100 100 self.timezone = timezone
101 101 self.all = all
102 102 self.online = online
103 103
104 104 self.code = code
105 105 self.nCode = int(nCode)
106 106 self.nBaud = int(nBaud)
107 107
108 108
109 109
110 110 #self.findFiles()
111 111 if not(online):
112 112 #Busqueda de archivos offline
113 113 self.searchFilesOffLine(path, startDate, endDate, startTime, endTime, walk)
114 114 else:
115 115 self.searchFilesOnLine(path, startDate, endDate, startTime,endTime,walk)
116 116
117 117 if not(self.filenameList):
118 print("There is no files into the folder: %s"%(path))
118 raise schainpy.admin.SchainWarning("There is no files into the folder: %s"%(path))
119 119 sys.exit()
120 120
121 121 self.fileIndex = 0
122 122
123 123 self.readNextFile(online)
124 124
125 125 '''
126 126 Add code
127 127 '''
128 128 self.isConfig = True
129 129 # print("Setup Done")
130 130 pass
131 131
132 132
133 133 def readAMISRHeader(self,fp):
134 134
135 135 if self.isConfig and (not self.flagNoMoreFiles):
136 136 newShape = fp.get('Raw11/Data/Samples/Data').shape[1:]
137 137 if self.dataShape != newShape and newShape != None:
138 print("\nNEW FILE HAS A DIFFERENT SHAPE")
138 raise schainpy.admin.SchainError("NEW FILE HAS A DIFFERENT SHAPE: ")
139 139 print(self.dataShape,newShape,"\n")
140 140 return 0
141 141 else:
142 142 self.dataShape = fp.get('Raw11/Data/Samples/Data').shape[1:]
143 143
144 144
145 145 header = 'Raw11/Data/RadacHeader'
146 146 self.beamCodeByPulse = fp.get(header+'/BeamCode') # LIST OF BEAMS PER PROFILE, TO BE USED ON REARRANGE
147 147 if (self.startDate> datetime.date(2021, 7, 15)): #Se cambiΓ³ la forma de extracciΓ³n de Apuntes el 17
148 148 self.beamcodeFile = fp['Setup/Beamcodefile'][()].decode()
149 149 self.trueBeams = self.beamcodeFile.split("\n")
150 150 self.trueBeams.pop()#remove last
151 151 [self.realBeamCode.append(x) for x in self.trueBeams if x not in self.realBeamCode]
152 152 self.beamCode = [int(x, 16) for x in self.realBeamCode]
153 153 else:
154 154 _beamCode= fp.get('Raw11/Data/Beamcodes') #se usa la manera previa al cambio de apuntes
155 155 self.beamCode = _beamCode[0,:]
156 156
157 157 if self.beamCodeMap == None:
158 158 self.beamCodeMap = fp['Setup/BeamcodeMap']
159 159 for beam in self.beamCode:
160 160 beamAziElev = numpy.where(self.beamCodeMap[:,0]==beam)
161 161 beamAziElev = beamAziElev[0].squeeze()
162 162 self.azimuthList.append(self.beamCodeMap[beamAziElev,1])
163 163 self.elevationList.append(self.beamCodeMap[beamAziElev,2])
164 164 #print("Beamssss: ",self.beamCodeMap[beamAziElev,1],self.beamCodeMap[beamAziElev,2])
165 165 #print(self.beamCode)
166 166 #self.code = fp.get(header+'/Code') # NOT USE FOR THIS
167 167 self.frameCount = fp.get(header+'/FrameCount')# NOT USE FOR THIS
168 168 self.modeGroup = fp.get(header+'/ModeGroup')# NOT USE FOR THIS
169 169 self.nsamplesPulse = fp.get(header+'/NSamplesPulse')# TO GET NSA OR USING DATA FOR THAT
170 170 self.pulseCount = fp.get(header+'/PulseCount')# NOT USE FOR THIS
171 171 self.radacTime = fp.get(header+'/RadacTime')# 1st TIME ON FILE ANDE CALCULATE THE REST WITH IPP*nindexprofile
172 172 self.timeCount = fp.get(header+'/TimeCount')# NOT USE FOR THIS
173 173 self.timeStatus = fp.get(header+'/TimeStatus')# NOT USE FOR THIS
174 174 self.rangeFromFile = fp.get('Raw11/Data/Samples/Range')
175 175 self.frequency = fp.get('Rx/Frequency')
176 176 txAus = fp.get('Raw11/Data/Pulsewidth')
177 177
178 178
179 179 self.nblocks = self.pulseCount.shape[0] #nblocks
180 180
181 181 self.nprofiles = self.pulseCount.shape[1] #nprofile
182 182 self.nsa = self.nsamplesPulse[0,0] #ngates
183 183 self.nchannels = len(self.beamCode)
184 184 self.ippSeconds = (self.radacTime[0][1] -self.radacTime[0][0]) #Ipp in seconds
185 185 #self.__waitForNewFile = self.nblocks # wait depending on the number of blocks since each block is 1 sec
186 186 self.__waitForNewFile = self.nblocks * self.nprofiles * self.ippSeconds # wait until new file is created
187 187
188 188 #filling radar controller header parameters
189 189 self.__ippKm = self.ippSeconds *.15*1e6 # in km
190 190 self.__txA = (txAus.value)*.15 #(ipp[us]*.15km/1us) in km
191 191 self.__txB = 0
192 192 nWindows=1
193 193 self.__nSamples = self.nsa
194 194 self.__firstHeight = self.rangeFromFile[0][0]/1000 #in km
195 195 self.__deltaHeight = (self.rangeFromFile[0][1] - self.rangeFromFile[0][0])/1000
196 196
197 197 #for now until understand why the code saved is different (code included even though code not in tuf file)
198 198 #self.__codeType = 0
199 199 # self.__nCode = None
200 200 # self.__nBaud = None
201 201 self.__code = self.code
202 202 self.__codeType = 0
203 203 if self.code != None:
204 204 self.__codeType = 1
205 205 self.__nCode = self.nCode
206 206 self.__nBaud = self.nBaud
207 207 #self.__code = 0
208 208
209 209 #filling system header parameters
210 210 self.__nSamples = self.nsa
211 211 self.newProfiles = self.nprofiles/self.nchannels
212 212 self.__channelList = list(range(self.nchannels))
213 213
214 214 self.__frequency = self.frequency[0][0]
215 215
216 216
217 217 return 1
218 218
219 219
220 220 def createBuffers(self):
221 221
222 222 pass
223 223
224 224 def __setParameters(self,path='', startDate='',endDate='',startTime='', endTime='', walk=''):
225 225 self.path = path
226 226 self.startDate = startDate
227 227 self.endDate = endDate
228 228 self.startTime = startTime
229 229 self.endTime = endTime
230 230 self.walk = walk
231 231
232 232 def __checkPath(self):
233 233 if os.path.exists(self.path):
234 234 self.status = 1
235 235 else:
236 236 self.status = 0
237 237 print('Path:%s does not exists'%self.path)
238 238
239 239 return
240 240
241 241
242 242 def __selDates(self, amisr_dirname_format):
243 243 try:
244 244 year = int(amisr_dirname_format[0:4])
245 245 month = int(amisr_dirname_format[4:6])
246 246 dom = int(amisr_dirname_format[6:8])
247 247 thisDate = datetime.date(year,month,dom)
248 248 #margen de un dΓ­a extra, igual luego se filtra for fecha y hora
249 249 if (thisDate>=(self.startDate - datetime.timedelta(days=1)) and thisDate <= (self.endDate)+ datetime.timedelta(days=1)):
250 250 return amisr_dirname_format
251 251 except:
252 252 return None
253 253
254 254
255 255 def __findDataForDates(self,online=False):
256 256
257 257 if not(self.status):
258 258 return None
259 259
260 260 pat = '\d+.\d+'
261 261 dirnameList = [re.search(pat,x) for x in os.listdir(self.path)]
262 262 dirnameList = [x for x in dirnameList if x!=None]
263 263 dirnameList = [x.string for x in dirnameList]
264 264 if not(online):
265 265 dirnameList = [self.__selDates(x) for x in dirnameList]
266 266 dirnameList = [x for x in dirnameList if x!=None]
267 267 if len(dirnameList)>0:
268 268 self.status = 1
269 269 self.dirnameList = dirnameList
270 270 self.dirnameList.sort()
271 271 else:
272 272 self.status = 0
273 273 return None
274 274
275 275 def __getTimeFromData(self):
276 276 startDateTime_Reader = datetime.datetime.combine(self.startDate,self.startTime)
277 277 endDateTime_Reader = datetime.datetime.combine(self.endDate,self.endTime)
278 278
279 279 print('Filtering Files from %s to %s'%(startDateTime_Reader, endDateTime_Reader))
280 280 print('........................................')
281 281 filter_filenameList = []
282 282 self.filenameList.sort()
283 283 #for i in range(len(self.filenameList)-1):
284 284 for i in range(len(self.filenameList)):
285 285 filename = self.filenameList[i]
286 286 fp = h5py.File(filename,'r')
287 287 time_str = fp.get('Time/RadacTimeString')
288 288
289 289 startDateTimeStr_File = time_str[0][0].decode('UTF-8').split('.')[0]
290 290 #startDateTimeStr_File = "2019-12-16 09:21:11"
291 291 junk = time.strptime(startDateTimeStr_File, '%Y-%m-%d %H:%M:%S')
292 292 startDateTime_File = datetime.datetime(junk.tm_year,junk.tm_mon,junk.tm_mday,junk.tm_hour, junk.tm_min, junk.tm_sec)
293 293
294 294 #endDateTimeStr_File = "2019-12-16 11:10:11"
295 295 endDateTimeStr_File = time_str[-1][-1].decode('UTF-8').split('.')[0]
296 296 junk = time.strptime(endDateTimeStr_File, '%Y-%m-%d %H:%M:%S')
297 297 endDateTime_File = datetime.datetime(junk.tm_year,junk.tm_mon,junk.tm_mday,junk.tm_hour, junk.tm_min, junk.tm_sec)
298 298
299 299 fp.close()
300 300
301 301 #print("check time", startDateTime_File)
302 302 if self.timezone == 'lt':
303 303 startDateTime_File = startDateTime_File - datetime.timedelta(minutes = 300)
304 304 endDateTime_File = endDateTime_File - datetime.timedelta(minutes = 300)
305 305 if (startDateTime_File >=startDateTime_Reader and endDateTime_File<=endDateTime_Reader):
306 306 filter_filenameList.append(filename)
307 307
308 308 if (startDateTime_File>endDateTime_Reader):
309 309 break
310 310
311 311
312 312 filter_filenameList.sort()
313 313 self.filenameList = filter_filenameList
314 314
315 315 return 1
316 316
317 317 def __filterByGlob1(self, dirName):
318 318 filter_files = glob.glob1(dirName, '*.*%s'%self.extension_file)
319 319 filter_files.sort()
320 320 filterDict = {}
321 321 filterDict.setdefault(dirName)
322 322 filterDict[dirName] = filter_files
323 323 return filterDict
324 324
325 325 def __getFilenameList(self, fileListInKeys, dirList):
326 326 for value in fileListInKeys:
327 327 dirName = list(value.keys())[0]
328 328 for file in value[dirName]:
329 329 filename = os.path.join(dirName, file)
330 330 self.filenameList.append(filename)
331 331
332 332
333 333 def __selectDataForTimes(self, online=False):
334 334 #aun no esta implementado el filtro for tiempo
335 335 if not(self.status):
336 336 return None
337 337
338 338 dirList = [os.path.join(self.path,x) for x in self.dirnameList]
339 339 fileListInKeys = [self.__filterByGlob1(x) for x in dirList]
340 340 self.__getFilenameList(fileListInKeys, dirList)
341 341 if not(online):
342 342 #filtro por tiempo
343 343 if not(self.all):
344 344 self.__getTimeFromData()
345 345
346 346 if len(self.filenameList)>0:
347 347 self.status = 1
348 348 self.filenameList.sort()
349 349 else:
350 350 self.status = 0
351 351 return None
352 352
353 353 else:
354 354 #get the last file - 1
355 355 self.filenameList = [self.filenameList[-2]]
356 356 new_dirnameList = []
357 357 for dirname in self.dirnameList:
358 358 junk = numpy.array([dirname in x for x in self.filenameList])
359 359 junk_sum = junk.sum()
360 360 if junk_sum > 0:
361 361 new_dirnameList.append(dirname)
362 362 self.dirnameList = new_dirnameList
363 363 return 1
364 364
365 365 def searchFilesOnLine(self, path, startDate, endDate, startTime=datetime.time(0,0,0),
366 366 endTime=datetime.time(23,59,59),walk=True):
367 367
368 368 if endDate ==None:
369 369 startDate = datetime.datetime.utcnow().date()
370 370 endDate = datetime.datetime.utcnow().date()
371 371
372 372 self.__setParameters(path=path, startDate=startDate, endDate=endDate,startTime = startTime,endTime=endTime, walk=walk)
373 373
374 374 self.__checkPath()
375 375
376 376 self.__findDataForDates(online=True)
377 377
378 378 self.dirnameList = [self.dirnameList[-1]]
379 379
380 380 self.__selectDataForTimes(online=True)
381 381
382 382 return
383 383
384 384
385 385 def searchFilesOffLine(self,
386 386 path,
387 387 startDate,
388 388 endDate,
389 389 startTime=datetime.time(0,0,0),
390 390 endTime=datetime.time(23,59,59),
391 391 walk=True):
392 392
393 393 self.__setParameters(path, startDate, endDate, startTime, endTime, walk)
394 394
395 395 self.__checkPath()
396 396
397 397 self.__findDataForDates()
398 398
399 399 self.__selectDataForTimes()
400 400
401 401 for i in range(len(self.filenameList)):
402 402 print("%s" %(self.filenameList[i]))
403 403
404 404 return
405 405
406 406 def __setNextFileOffline(self):
407 407
408 408 try:
409 409 self.filename = self.filenameList[self.fileIndex]
410 410 self.amisrFilePointer = h5py.File(self.filename,'r')
411 411 self.fileIndex += 1
412 412 except:
413 413 self.flagNoMoreFiles = 1
414 print("No more Files")
414 raise schainpy.admin.SchainError('No more files to read')
415 415 return 0
416 416
417 417 self.flagIsNewFile = 1
418 418 print("Setting the file: %s"%self.filename)
419 419
420 420 return 1
421 421
422 422
423 423 def __setNextFileOnline(self):
424 424 filename = self.filenameList[0]
425 425 if self.__filename_online != None:
426 426 self.__selectDataForTimes(online=True)
427 427 filename = self.filenameList[0]
428 428 wait = 0
429 429 self.__waitForNewFile=300 ## DEBUG:
430 430 while self.__filename_online == filename:
431 431 print('waiting %d seconds to get a new file...'%(self.__waitForNewFile))
432 432 if wait == 5:
433 433 self.flagNoMoreFiles = 1
434 434 return 0
435 435 sleep(self.__waitForNewFile)
436 436 self.__selectDataForTimes(online=True)
437 437 filename = self.filenameList[0]
438 438 wait += 1
439 439
440 440 self.__filename_online = filename
441 441
442 442 self.amisrFilePointer = h5py.File(filename,'r')
443 443 self.flagIsNewFile = 1
444 444 self.filename = filename
445 445 print("Setting the file: %s"%self.filename)
446 446 return 1
447 447
448 448
449 449 def readData(self):
450 450 buffer = self.amisrFilePointer.get('Raw11/Data/Samples/Data')
451 451 re = buffer[:,:,:,0]
452 452 im = buffer[:,:,:,1]
453 453 dataset = re + im*1j
454 454
455 455 self.radacTime = self.amisrFilePointer.get('Raw11/Data/RadacHeader/RadacTime')
456 456 timeset = self.radacTime[:,0]
457 457
458 458 return dataset,timeset
459 459
460 460 def reshapeData(self):
461 461 #self.beamCodeByPulse, self.beamCode, self.nblocks, self.nprofiles, self.nsa,
462 462 channels = self.beamCodeByPulse[0,:]
463 463 nchan = self.nchannels
464 464 #self.newProfiles = self.nprofiles/nchan #must be defined on filljroheader
465 465 nblocks = self.nblocks
466 466 nsamples = self.nsa
467 467
468 468 #Dimensions : nChannels, nProfiles, nSamples
469 469 new_block = numpy.empty((nblocks, nchan, numpy.int_(self.newProfiles), nsamples), dtype="complex64")
470 470 ############################################
471 471
472 472 for thisChannel in range(nchan):
473 473 new_block[:,thisChannel,:,:] = self.dataset[:,numpy.where(channels==self.beamCode[thisChannel])[0],:]
474 474
475 475
476 476 new_block = numpy.transpose(new_block, (1,0,2,3))
477 477 new_block = numpy.reshape(new_block, (nchan,-1, nsamples))
478 478
479 479 return new_block
480 480
481 481 def updateIndexes(self):
482 482
483 483 pass
484 484
485 485 def fillJROHeader(self):
486 486
487 487 #fill radar controller header
488 488 self.dataOut.radarControllerHeaderObj = RadarControllerHeader(ipp=self.__ippKm,
489 489 txA=self.__txA,
490 490 txB=0,
491 491 nWindows=1,
492 492 nHeights=self.__nSamples,
493 493 firstHeight=self.__firstHeight,
494 494 deltaHeight=self.__deltaHeight,
495 495 codeType=self.__codeType,
496 496 nCode=self.__nCode, nBaud=self.__nBaud,
497 497 code = self.__code,
498 498 fClock=1)
499 499
500 500 #fill system header
501 501 self.dataOut.systemHeaderObj = SystemHeader(nSamples=self.__nSamples,
502 502 nProfiles=self.newProfiles,
503 503 nChannels=len(self.__channelList),
504 504 adcResolution=14,
505 505 pciDioBusWidth=32)
506 506
507 507 self.dataOut.type = "Voltage"
508 508 self.dataOut.data = None
509 509 self.dataOut.dtype = numpy.dtype([('real','<i8'),('imag','<i8')])
510 510 # self.dataOut.nChannels = 0
511 511
512 512 # self.dataOut.nHeights = 0
513 513
514 514 self.dataOut.nProfiles = self.newProfiles*self.nblocks
515 515 #self.dataOut.heightList = self.__firstHeigth + numpy.arange(self.__nSamples, dtype = numpy.float)*self.__deltaHeigth
516 516 ranges = numpy.reshape(self.rangeFromFile.value,(-1))
517 517 self.dataOut.heightList = ranges/1000.0 #km
518 518 self.dataOut.channelList = self.__channelList
519 519 self.dataOut.blocksize = self.dataOut.nChannels * self.dataOut.nHeights
520 520
521 521 # self.dataOut.channelIndexList = None
522 522
523 523
524 524 self.dataOut.azimuthList = numpy.array(self.azimuthList)
525 525 self.dataOut.elevationList = numpy.array(self.elevationList)
526 526 self.dataOut.codeList = numpy.array(self.beamCode)
527 527 #print(self.dataOut.elevationList)
528 528 self.dataOut.flagNoData = True
529 529
530 530 #Set to TRUE if the data is discontinuous
531 531 self.dataOut.flagDiscontinuousBlock = False
532 532
533 533 self.dataOut.utctime = None
534 534
535 535 #self.dataOut.timeZone = -5 #self.__timezone/60 #timezone like jroheader, difference in minutes between UTC and localtime
536 536 if self.timezone == 'lt':
537 537 self.dataOut.timeZone = time.timezone / 60. #get the timezone in minutes
538 538 else:
539 539 self.dataOut.timeZone = 0 #by default time is UTC
540 540
541 541 self.dataOut.dstFlag = 0
542 542 self.dataOut.errorCount = 0
543 543 self.dataOut.nCohInt = 1
544 544 self.dataOut.flagDecodeData = False #asumo que la data esta decodificada
545 545 self.dataOut.flagDeflipData = False #asumo que la data esta sin flip
546 546 self.dataOut.flagShiftFFT = False
547 547 self.dataOut.ippSeconds = self.ippSeconds
548 548
549 549 #Time interval between profiles
550 550 #self.dataOut.timeInterval = self.dataOut.ippSeconds * self.dataOut.nCohInt
551 551
552 552 self.dataOut.frequency = self.__frequency
553 553 self.dataOut.realtime = self.online
554 554 pass
555 555
556 556 def readNextFile(self,online=False):
557 557
558 558 if not(online):
559 559 newFile = self.__setNextFileOffline()
560 560 else:
561 561 newFile = self.__setNextFileOnline()
562 562
563 563 if not(newFile):
564 564 self.dataOut.error = True
565 565 return 0
566 566
567 567 if not self.readAMISRHeader(self.amisrFilePointer):
568 568 self.dataOut.error = True
569 569 return 0
570 570
571 571 self.createBuffers()
572 572 self.fillJROHeader()
573 573
574 574 #self.__firstFile = False
575 575
576 576
577 577
578 578 self.dataset,self.timeset = self.readData()
579 579
580 580 if self.endDate!=None:
581 581 endDateTime_Reader = datetime.datetime.combine(self.endDate,self.endTime)
582 582 time_str = self.amisrFilePointer.get('Time/RadacTimeString')
583 583 startDateTimeStr_File = time_str[0][0].decode('UTF-8').split('.')[0]
584 584 junk = time.strptime(startDateTimeStr_File, '%Y-%m-%d %H:%M:%S')
585 585 startDateTime_File = datetime.datetime(junk.tm_year,junk.tm_mon,junk.tm_mday,junk.tm_hour, junk.tm_min, junk.tm_sec)
586 586 if self.timezone == 'lt':
587 587 startDateTime_File = startDateTime_File - datetime.timedelta(minutes = 300)
588 588 if (startDateTime_File>endDateTime_Reader):
589 589 return 0
590 590
591 591 self.jrodataset = self.reshapeData()
592 592 #----self.updateIndexes()
593 593 self.profileIndex = 0
594 594
595 595 return 1
596 596
597 597
598 598 def __hasNotDataInBuffer(self):
599 599 if self.profileIndex >= (self.newProfiles*self.nblocks):
600 600 return 1
601 601 return 0
602 602
603 603
604 604 def getData(self):
605 605
606 606 if self.flagNoMoreFiles:
607 607 self.dataOut.flagNoData = True
608 608 return 0
609 609
610 610 if self.__hasNotDataInBuffer():
611 611 if not (self.readNextFile(self.online)):
612 612 return 0
613 613
614 614
615 615 if self.dataset is None: # setear esta condicion cuando no hayan datos por leer
616 616 self.dataOut.flagNoData = True
617 617 return 0
618 618
619 619 #self.dataOut.data = numpy.reshape(self.jrodataset[self.profileIndex,:],(1,-1))
620 620
621 621 self.dataOut.data = self.jrodataset[:,self.profileIndex,:]
622 622
623 623 #print("R_t",self.timeset)
624 624
625 625 #self.dataOut.utctime = self.jrotimeset[self.profileIndex]
626 626 #verificar basic header de jro data y ver si es compatible con este valor
627 627 #self.dataOut.utctime = self.timeset + (self.profileIndex * self.ippSeconds * self.nchannels)
628 628 indexprof = numpy.mod(self.profileIndex, self.newProfiles)
629 629 indexblock = self.profileIndex/self.newProfiles
630 630 #print (indexblock, indexprof)
631 631 diffUTC = 0
632 632 t_comp = (indexprof * self.ippSeconds * self.nchannels) + diffUTC #
633 633
634 634 #print("utc :",indexblock," __ ",t_comp)
635 635 #print(numpy.shape(self.timeset))
636 636 self.dataOut.utctime = self.timeset[numpy.int_(indexblock)] + t_comp
637 637 #self.dataOut.utctime = self.timeset[self.profileIndex] + t_comp
638 638
639 639 self.dataOut.profileIndex = self.profileIndex
640 640 #print("N profile:",self.profileIndex,self.newProfiles,self.nblocks,self.dataOut.utctime)
641 641 self.dataOut.flagNoData = False
642 642 # if indexprof == 0:
643 643 # print("kamisr: ",self.dataOut.utctime)
644 644
645 645 self.profileIndex += 1
646 646
647 647 return self.dataOut.data
648 648
649 649
650 650 def run(self, **kwargs):
651 651 '''
652 652 This method will be called many times so here you should put all your code
653 653 '''
654 654 #print("running kamisr")
655 655 if not self.isConfig:
656 656 self.setup(**kwargs)
657 657 self.isConfig = True
658 658
659 659 self.getData()
@@ -1,652 +1,660
1 1 import os
2 2 import time
3 3 import datetime
4 4
5 5 import numpy
6 6 import h5py
7 7
8 8 import schainpy.admin
9 9 from schainpy.model.data.jrodata import *
10 10 from schainpy.model.proc.jroproc_base import ProcessingUnit, Operation, MPDecorator
11 11 from schainpy.model.io.jroIO_base import *
12 12 from schainpy.utils import log
13 13
14 14
15 15 class HDFReader(Reader, ProcessingUnit):
16 16 """Processing unit to read HDF5 format files
17 17
18 18 This unit reads HDF5 files created with `HDFWriter` operation contains
19 19 by default two groups Data and Metadata all variables would be saved as `dataOut`
20 20 attributes.
21 21 It is possible to read any HDF5 file by given the structure in the `description`
22 22 parameter, also you can add extra values to metadata with the parameter `extras`.
23 23
24 24 Parameters:
25 25 -----------
26 26 path : str
27 27 Path where files are located.
28 28 startDate : date
29 29 Start date of the files
30 30 endDate : list
31 31 End date of the files
32 32 startTime : time
33 33 Start time of the files
34 34 endTime : time
35 35 End time of the files
36 36 description : dict, optional
37 37 Dictionary with the description of the HDF5 file
38 38 extras : dict, optional
39 39 Dictionary with extra metadata to be be added to `dataOut`
40 40
41 41 Examples
42 42 --------
43 43
44 44 desc = {
45 45 'Data': {
46 46 'data_output': ['u', 'v', 'w'],
47 47 'utctime': 'timestamps',
48 48 } ,
49 49 'Metadata': {
50 50 'heightList': 'heights'
51 51 }
52 52 }
53 53
54 54 desc = {
55 55 'Data': {
56 56 'data_output': 'winds',
57 57 'utctime': 'timestamps'
58 58 },
59 59 'Metadata': {
60 60 'heightList': 'heights'
61 61 }
62 62 }
63 63
64 64 extras = {
65 65 'timeZone': 300
66 66 }
67 67
68 68 reader = project.addReadUnit(
69 69 name='HDFReader',
70 70 path='/path/to/files',
71 71 startDate='2019/01/01',
72 72 endDate='2019/01/31',
73 73 startTime='00:00:00',
74 74 endTime='23:59:59',
75 75 # description=json.dumps(desc),
76 76 # extras=json.dumps(extras),
77 77 )
78 78
79 79 """
80 80
81 81 __attrs__ = ['path', 'startDate', 'endDate', 'startTime', 'endTime', 'description', 'extras']
82 82
83 83 def __init__(self):
84 84 ProcessingUnit.__init__(self)
85 85 self.dataOut = Parameters()
86 86 self.ext = ".hdf5"
87 87 self.optchar = "D"
88 88 self.meta = {}
89 89 self.data = {}
90 90 self.open_file = h5py.File
91 91 self.open_mode = 'r'
92 92 self.description = {}
93 93 self.extras = {}
94 94 self.filefmt = "*%Y%j***"
95 95 self.folderfmt = "*%Y%j"
96 96 self.utcoffset = 0
97 97
98 98 def setup(self, **kwargs):
99 99
100 100 self.set_kwargs(**kwargs)
101 101 if not self.ext.startswith('.'):
102 102 self.ext = '.{}'.format(self.ext)
103 103
104 104 if self.online:
105 105 log.log("Searching files in online mode...", self.name)
106 106
107 107 for nTries in range(self.nTries):
108 108 fullpath = self.searchFilesOnLine(self.path, self.startDate,
109 109 self.endDate, self.expLabel, self.ext, self.walk,
110 110 self.filefmt, self.folderfmt)
111 111 pathname, filename = os.path.split(fullpath)
112 112 #print(pathname,filename)
113 113 try:
114 114 fullpath = next(fullpath)
115 115
116 116 except:
117 117 fullpath = None
118 118
119 119 if fullpath:
120 120 break
121 121
122 122 log.warning(
123 123 'Waiting {} sec for a valid file in {}: try {} ...'.format(
124 124 self.delay, self.path, nTries + 1),
125 125 self.name)
126 126 time.sleep(self.delay)
127 127
128 128 if not(fullpath):
129 129 raise schainpy.admin.SchainError(
130 130 'There isn\'t any valid file in {}'.format(self.path))
131 131
132 132 pathname, filename = os.path.split(fullpath)
133 133 self.year = int(filename[1:5])
134 134 self.doy = int(filename[5:8])
135 135 self.set = int(filename[8:11]) - 1
136 136 else:
137 137 log.log("Searching files in {}".format(self.path), self.name)
138 138 self.filenameList = self.searchFilesOffLine(self.path, self.startDate,
139 139 self.endDate, self.expLabel, self.ext, self.walk, self.filefmt, self.folderfmt)
140 140
141 141 self.setNextFile()
142 142
143 143 return
144 144
145 145
146 146 def readFirstHeader(self):
147 147 '''Read metadata and data'''
148 148
149 149 self.__readMetadata()
150 150 self.__readData()
151 151 self.__setBlockList()
152 152
153 153 if 'type' in self.meta:
154 154 self.dataOut = eval(self.meta['type'])()
155 155
156 156 for attr in self.meta:
157 157 #print("attr: ", attr)
158 158 setattr(self.dataOut, attr, self.meta[attr])
159 159
160 160
161 161 self.blockIndex = 0
162 162
163 163 return
164 164
165 165 def __setBlockList(self):
166 166 '''
167 167 Selects the data within the times defined
168 168
169 169 self.fp
170 170 self.startTime
171 171 self.endTime
172 172 self.blockList
173 173 self.blocksPerFile
174 174
175 175 '''
176 176
177 177 startTime = self.startTime
178 178 endTime = self.endTime
179 179 thisUtcTime = self.data['utctime'] + self.utcoffset
180 180 self.interval = numpy.min(thisUtcTime[1:] - thisUtcTime[:-1])
181 181 thisDatetime = datetime.datetime.utcfromtimestamp(thisUtcTime[0])
182 182 self.startFileDatetime = thisDatetime
183 183 thisDate = thisDatetime.date()
184 184 thisTime = thisDatetime.time()
185 185
186 186 startUtcTime = (datetime.datetime.combine(thisDate, startTime) - datetime.datetime(1970, 1, 1)).total_seconds()
187 187 endUtcTime = (datetime.datetime.combine(thisDate, endTime) - datetime.datetime(1970, 1, 1)).total_seconds()
188 188
189 189 ind = numpy.where(numpy.logical_and(thisUtcTime >= startUtcTime, thisUtcTime < endUtcTime))[0]
190 190
191 191 self.blockList = ind
192 192 self.blocksPerFile = len(ind)
193 193 self.blocksPerFile = len(thisUtcTime)
194 194 return
195 195
196 196 def __readMetadata(self):
197 197 '''
198 198 Reads Metadata
199 199 '''
200 200
201 201 meta = {}
202 202
203 203 if self.description:
204 204 for key, value in self.description['Metadata'].items():
205 205 meta[key] = self.fp[value][()]
206 206 else:
207 207 grp = self.fp['Metadata']
208 208 for name in grp:
209 209 meta[name] = grp[name][()]
210 210
211 211 if self.extras:
212 212 for key, value in self.extras.items():
213 213 meta[key] = value
214 214 self.meta = meta
215 215
216 216 return
217 217
218 218
219 219
220 220 def checkForRealPath(self, nextFile, nextDay):
221 221
222 222 # print("check FRP")
223 223 # dt = self.startFileDatetime + datetime.timedelta(1)
224 224 # filename = '{}.{}{}'.format(self.path, dt.strftime('%Y%m%d'), self.ext)
225 225 # fullfilename = os.path.join(self.path, filename)
226 226 # print("check Path ",fullfilename,filename)
227 227 # if os.path.exists(fullfilename):
228 228 # return fullfilename, filename
229 229 # return None, filename
230 230 return None,None
231 231
232 232 def __readData(self):
233 233
234 234 data = {}
235 235
236 236 if self.description:
237 237 for key, value in self.description['Data'].items():
238 238 if isinstance(value, str):
239 239 if isinstance(self.fp[value], h5py.Dataset):
240 240 data[key] = self.fp[value][()]
241 241 elif isinstance(self.fp[value], h5py.Group):
242 242 array = []
243 243 for ch in self.fp[value]:
244 244 array.append(self.fp[value][ch][()])
245 245 data[key] = numpy.array(array)
246 246 elif isinstance(value, list):
247 247 array = []
248 248 for ch in value:
249 249 array.append(self.fp[ch][()])
250 250 data[key] = numpy.array(array)
251 251 else:
252 252 grp = self.fp['Data']
253 253 for name in grp:
254 254 if isinstance(grp[name], h5py.Dataset):
255 255 array = grp[name][()]
256 256 elif isinstance(grp[name], h5py.Group):
257 257 array = []
258 258 for ch in grp[name]:
259 259 array.append(grp[name][ch][()])
260 260 array = numpy.array(array)
261 261 else:
262 262 log.warning('Unknown type: {}'.format(name))
263 263
264 264 if name in self.description:
265 265 key = self.description[name]
266 266 else:
267 267 key = name
268 268 data[key] = array
269 269
270 270 self.data = data
271 271 return
272 272
273 273 def getData(self):
274 274 if not self.isDateTimeInRange(self.startFileDatetime, self.startDate, self.endDate, self.startTime, self.endTime):
275 275 self.dataOut.flagNoData = True
276 276 self.blockIndex = self.blocksPerFile
277 277 #self.dataOut.error = True TERMINA EL PROGRAMA, removido
278 278 return
279 279 for attr in self.data:
280 280 if self.data[attr].ndim == 1:
281 281 setattr(self.dataOut, attr, self.data[attr][self.blockIndex])
282 282 else:
283 283 setattr(self.dataOut, attr, self.data[attr][:, self.blockIndex])
284 284
285 285 self.dataOut.flagNoData = False
286 286 self.blockIndex += 1
287 287
288 log.log("Block No. {}/{} -> {}".format(
289 self.blockIndex,
290 self.blocksPerFile,
291 self.dataOut.datatime.ctime()), self.name)
288 if self.blockIndex == 1:
289 log.log("Block No. {}/{} -> {}".format(
290 self.blockIndex,
291 self.blocksPerFile,
292 self.dataOut.datatime.ctime()), self.name)
293 else:
294 log.log("Block No. {}/{} ".format(
295 self.blockIndex,
296 self.blocksPerFile),self.name)
297
292 298
293 299 return
294 300
295 301 def run(self, **kwargs):
296 302
297 303 if not(self.isConfig):
298 304 self.setup(**kwargs)
299 305 self.isConfig = True
300 306
301 307 if self.blockIndex == self.blocksPerFile:
302 308 self.setNextFile()
303 309
304 310 self.getData()
305 311
306 312 return
307 313
308 314 @MPDecorator
309 315 class HDFWriter(Operation):
310 316 """Operation to write HDF5 files.
311 317
312 318 The HDF5 file contains by default two groups Data and Metadata where
313 319 you can save any `dataOut` attribute specified by `dataList` and `metadataList`
314 320 parameters, data attributes are normaly time dependent where the metadata
315 321 are not.
316 322 It is possible to customize the structure of the HDF5 file with the
317 323 optional description parameter see the examples.
318 324
319 325 Parameters:
320 326 -----------
321 327 path : str
322 328 Path where files will be saved.
323 329 blocksPerFile : int
324 330 Number of blocks per file
325 331 metadataList : list
326 332 List of the dataOut attributes that will be saved as metadata
327 333 dataList : int
328 334 List of the dataOut attributes that will be saved as data
329 335 setType : bool
330 336 If True the name of the files corresponds to the timestamp of the data
331 337 description : dict, optional
332 338 Dictionary with the desired description of the HDF5 file
333 339
334 340 Examples
335 341 --------
336 342
337 343 desc = {
338 344 'data_output': {'winds': ['z', 'w', 'v']},
339 345 'utctime': 'timestamps',
340 346 'heightList': 'heights'
341 347 }
342 348 desc = {
343 349 'data_output': ['z', 'w', 'v'],
344 350 'utctime': 'timestamps',
345 351 'heightList': 'heights'
346 352 }
347 353 desc = {
348 354 'Data': {
349 355 'data_output': 'winds',
350 356 'utctime': 'timestamps'
351 357 },
352 358 'Metadata': {
353 359 'heightList': 'heights'
354 360 }
355 361 }
356 362
357 363 writer = proc_unit.addOperation(name='HDFWriter')
358 364 writer.addParameter(name='path', value='/path/to/file')
359 365 writer.addParameter(name='blocksPerFile', value='32')
360 366 writer.addParameter(name='metadataList', value='heightList,timeZone')
361 367 writer.addParameter(name='dataList',value='data_output,utctime')
362 368 # writer.addParameter(name='description',value=json.dumps(desc))
363 369
364 370 """
365 371
366 372 ext = ".hdf5"
367 373 optchar = "D"
368 374 filename = None
369 375 path = None
370 376 setFile = None
371 377 fp = None
372 378 firsttime = True
373 379 #Configurations
374 380 blocksPerFile = None
375 381 blockIndex = None
376 382 dataOut = None
377 383 #Data Arrays
378 384 dataList = None
379 385 metadataList = None
380 386 currentDay = None
381 387 lastTime = None
382 388
383 389 def __init__(self):
384 390
385 391 Operation.__init__(self)
386 392 return
387 393
388 394 def setup(self, path=None, blocksPerFile=10, metadataList=None, dataList=None, setType=None, description=None):
389 395 self.path = path
390 396 self.blocksPerFile = blocksPerFile
391 397 self.metadataList = metadataList
392 398 self.dataList = [s.strip() for s in dataList]
393 399 self.setType = setType
394 400 self.description = description
395 401
396 402 if self.metadataList is None:
397 403 self.metadataList = self.dataOut.metadata_list
398 404
399 405 tableList = []
400 406 dsList = []
401 407
402 408 for i in range(len(self.dataList)):
403 409 dsDict = {}
404 410 if hasattr(self.dataOut, self.dataList[i]):
405 411 dataAux = getattr(self.dataOut, self.dataList[i])
406 412 dsDict['variable'] = self.dataList[i]
407 413 else:
408 414 log.warning('Attribute {} not found in dataOut', self.name)
409 415 continue
410 416
411 417 if dataAux is None:
412 418 continue
413 419 elif isinstance(dataAux, (int, float, numpy.integer, numpy.float)):
414 420 dsDict['nDim'] = 0
415 421 else:
416 422 dsDict['nDim'] = len(dataAux.shape)
417 423 dsDict['shape'] = dataAux.shape
418 424 dsDict['dsNumber'] = dataAux.shape[0]
419 425 dsDict['dtype'] = dataAux.dtype
420 426
421 427 dsList.append(dsDict)
422 428
423 429 self.dsList = dsList
424 430 self.currentDay = self.dataOut.datatime.date()
425 431
426 432 def timeFlag(self):
427 433 currentTime = self.dataOut.utctime
428 434 timeTuple = time.localtime(currentTime)
429 435 dataDay = timeTuple.tm_yday
430 436 #print("time UTC: ",currentTime, self.dataOut.datatime)
431 437 if self.lastTime is None:
432 438 self.lastTime = currentTime
433 439 self.currentDay = dataDay
434 440 return False
435 441
436 442 timeDiff = currentTime - self.lastTime
437 443
438 444 #Si el dia es diferente o si la diferencia entre un dato y otro supera la hora
439 445 if dataDay != self.currentDay:
440 446 self.currentDay = dataDay
441 447 return True
442 448 elif timeDiff > 3*60*60:
443 449 self.lastTime = currentTime
444 450 return True
445 451 else:
446 452 self.lastTime = currentTime
447 453 return False
448 454
449 455 def run(self, dataOut, path, blocksPerFile=10, metadataList=None,
450 456 dataList=[], setType=None, description={}):
451 457
452 458 self.dataOut = dataOut
453 459 if not(self.isConfig):
454 460 self.setup(path=path, blocksPerFile=blocksPerFile,
455 461 metadataList=metadataList, dataList=dataList,
456 462 setType=setType, description=description)
457 463
458 464 self.isConfig = True
459 465 self.setNextFile()
460 466
461 467 self.putData()
462 468 return
463 469
464 470 def setNextFile(self):
465 471
466 472 ext = self.ext
467 473 path = self.path
468 474 setFile = self.setFile
469 475
470 476 timeTuple = time.gmtime(self.dataOut.utctime)
471 477 #print("path: ",timeTuple)
472 478 subfolder = 'd%4.4d%3.3d' % (timeTuple.tm_year,timeTuple.tm_yday)
473 479 fullpath = os.path.join(path, subfolder)
474 480
475 481 if os.path.exists(fullpath):
476 482 filesList = os.listdir(fullpath)
477 483 filesList = [k for k in filesList if k.startswith(self.optchar)]
478 484 if len( filesList ) > 0:
479 485 filesList = sorted(filesList, key=str.lower)
480 486 filen = filesList[-1]
481 487 # el filename debera tener el siguiente formato
482 488 # 0 1234 567 89A BCDE (hex)
483 489 # x YYYY DDD SSS .ext
484 490 if isNumber(filen[8:11]):
485 491 setFile = int(filen[8:11]) #inicializo mi contador de seteo al seteo del ultimo file
486 492 else:
487 493 setFile = -1
488 494 else:
489 495 setFile = -1 #inicializo mi contador de seteo
490 496 else:
491 497 os.makedirs(fullpath)
492 498 setFile = -1 #inicializo mi contador de seteo
493 499
494 500 if self.setType is None:
495 501 setFile += 1
496 502 file = '%s%4.4d%3.3d%03d%s' % (self.optchar,
497 503 timeTuple.tm_year,
498 504 timeTuple.tm_yday,
499 505 setFile,
500 506 ext )
501 507 else:
502 508 setFile = timeTuple.tm_hour*60+timeTuple.tm_min
503 509 file = '%s%4.4d%3.3d%04d%s' % (self.optchar,
504 510 timeTuple.tm_year,
505 511 timeTuple.tm_yday,
506 512 setFile,
507 513 ext )
508 514
509 515 self.filename = os.path.join( path, subfolder, file )
510 516
511 517 #Setting HDF5 File
512 518 self.fp = h5py.File(self.filename, 'w')
513 519 #write metadata
514 520 self.writeMetadata(self.fp)
515 521 #Write data
516 522 self.writeData(self.fp)
517 523
518 524 def getLabel(self, name, x=None):
519 525
520 526 if x is None:
521 527 if 'Data' in self.description:
522 528 data = self.description['Data']
523 529 if 'Metadata' in self.description:
524 530 data.update(self.description['Metadata'])
525 531 else:
526 532 data = self.description
527 533 if name in data:
528 534 if isinstance(data[name], str):
529 535 return data[name]
530 536 elif isinstance(data[name], list):
531 537 return None
532 538 elif isinstance(data[name], dict):
533 539 for key, value in data[name].items():
534 540 return key
535 541 return name
536 542 else:
537 543 if 'Metadata' in self.description:
538 544 meta = self.description['Metadata']
539 545 else:
540 546 meta = self.description
541 547 if name in meta:
542 548 if isinstance(meta[name], list):
543 549 return meta[name][x]
544 550 elif isinstance(meta[name], dict):
545 551 for key, value in meta[name].items():
546 552 return value[x]
547 553 if 'cspc' in name:
548 554 return 'pair{:02d}'.format(x)
549 555 else:
550 556 return 'channel{:02d}'.format(x)
551 557
552 558 def writeMetadata(self, fp):
553 559
554 560 if self.description:
555 561 if 'Metadata' in self.description:
556 562 grp = fp.create_group('Metadata')
557 563 else:
558 564 grp = fp
559 565 else:
560 566 grp = fp.create_group('Metadata')
561 567
562 568 for i in range(len(self.metadataList)):
563 569 if not hasattr(self.dataOut, self.metadataList[i]):
564 570 log.warning('Metadata: `{}` not found'.format(self.metadataList[i]), self.name)
565 571 continue
566 572 value = getattr(self.dataOut, self.metadataList[i])
567 573 if isinstance(value, bool):
568 574 if value is True:
569 575 value = 1
570 576 else:
571 577 value = 0
572 578 grp.create_dataset(self.getLabel(self.metadataList[i]), data=value)
573 579 return
574 580
575 581 def writeData(self, fp):
576 582
577 583 if self.description:
578 584 if 'Data' in self.description:
579 585 grp = fp.create_group('Data')
580 586 else:
581 587 grp = fp
582 588 else:
583 589 grp = fp.create_group('Data')
584 590
585 591 dtsets = []
586 592 data = []
587 593
588 594 for dsInfo in self.dsList:
589 595 if dsInfo['nDim'] == 0:
590 596 ds = grp.create_dataset(
591 597 self.getLabel(dsInfo['variable']),
592 598 (self.blocksPerFile, ),
593 599 chunks=True,
594 600 dtype=numpy.float64)
595 601 dtsets.append(ds)
596 602 data.append((dsInfo['variable'], -1))
597 603 else:
598 604 label = self.getLabel(dsInfo['variable'])
599 605 if label is not None:
600 606 sgrp = grp.create_group(label)
601 607 else:
602 608 sgrp = grp
603 609 for i in range(dsInfo['dsNumber']):
604 610 ds = sgrp.create_dataset(
605 611 self.getLabel(dsInfo['variable'], i),
606 612 (self.blocksPerFile, ) + dsInfo['shape'][1:],
607 613 chunks=True,
608 614 dtype=dsInfo['dtype'])
609 615 dtsets.append(ds)
610 616 data.append((dsInfo['variable'], i))
611 617 fp.flush()
612 618
613 619 log.log('Creating file: {}'.format(fp.filename), self.name)
614 620
615 621 self.ds = dtsets
616 622 self.data = data
617 623 self.firsttime = True
618 624 self.blockIndex = 0
619 625 return
620 626
621 627 def putData(self):
622 628
623 629 if (self.blockIndex == self.blocksPerFile) or self.timeFlag():
624 630 self.closeFile()
625 631 self.setNextFile()
626 632
627 633 for i, ds in enumerate(self.ds):
628 634 attr, ch = self.data[i]
629 635 if ch == -1:
630 636 ds[self.blockIndex] = getattr(self.dataOut, attr)
631 637 else:
632 638 ds[self.blockIndex] = getattr(self.dataOut, attr)[ch]
633 639
634 640 self.fp.flush()
635 641 self.blockIndex += 1
636 log.log('Block No. {}/{}'.format(self.blockIndex, self.blocksPerFile), self.name)
637
642 if self.blockIndex == 1:
643 log.log('Block No. {}/{} --> {}'.format(self.blockIndex, self.blocksPerFile,self.dataOut.datatime.ctime()), self.name)
644 else:
645 log.log('Block No. {}/{}'.format(self.blockIndex, self.blocksPerFile), self.name)
638 646 return
639 647
640 648 def closeFile(self):
641 649
642 650 if self.blockIndex != self.blocksPerFile:
643 651 for ds in self.ds:
644 652 ds.resize(self.blockIndex, axis=0)
645 653
646 654 if self.fp:
647 655 self.fp.flush()
648 656 self.fp.close()
649 657
650 658 def close(self):
651 659
652 660 self.closeFile()
General Comments 0
You need to be logged in to leave comments. Login now