##// END OF EJS Templates
GRABADO 1 SOLO CANAL H O V
avaldez -
r1596:5efbcbe7bd6e
parent child
Show More
@@ -1,739 +1,748
1 1 from email.utils import localtime
2 2 import os
3 3 import time
4 4 import datetime
5 5
6 6 import numpy
7 7 import h5py
8 8
9 9 import schainpy.admin
10 10 from schainpy.model.data.jrodata import *
11 11 from schainpy.model.proc.jroproc_base import ProcessingUnit, Operation, MPDecorator
12 12 from schainpy.model.io.jroIO_base import *
13 13 from schainpy.utils import log
14 14
15 15
16 16 class HDFReader(Reader, ProcessingUnit):
17 17 """Processing unit to read HDF5 format files
18 18
19 19 This unit reads HDF5 files created with `HDFWriter` operation contains
20 20 by default two groups Data and Metadata all variables would be saved as `dataOut`
21 21 attributes.
22 22 It is possible to read any HDF5 file by given the structure in the `description`
23 23 parameter, also you can add extra values to metadata with the parameter `extras`.
24 24
25 25 Parameters:
26 26 -----------
27 27 path : str
28 28 Path where files are located.
29 29 startDate : date
30 30 Start date of the files
31 31 endDate : list
32 32 End date of the files
33 33 startTime : time
34 34 Start time of the files
35 35 endTime : time
36 36 End time of the files
37 37 description : dict, optional
38 38 Dictionary with the description of the HDF5 file
39 39 extras : dict, optional
40 40 Dictionary with extra metadata to be be added to `dataOut`
41 41
42 42 Examples
43 43 --------
44 44
45 45 desc = {
46 46 'Data': {
47 47 'data_output': ['u', 'v', 'w'],
48 48 'utctime': 'timestamps',
49 49 } ,
50 50 'Metadata': {
51 51 'heightList': 'heights'
52 52 }
53 53 }
54 54
55 55 desc = {
56 56 'Data': {
57 57 'data_output': 'winds',
58 58 'utctime': 'timestamps'
59 59 },
60 60 'Metadata': {
61 61 'heightList': 'heights'
62 62 }
63 63 }
64 64
65 65 extras = {
66 66 'timeZone': 300
67 67 }
68 68
69 69 reader = project.addReadUnit(
70 70 name='HDFReader',
71 71 path='/path/to/files',
72 72 startDate='2019/01/01',
73 73 endDate='2019/01/31',
74 74 startTime='00:00:00',
75 75 endTime='23:59:59',
76 76 # description=json.dumps(desc),
77 77 # extras=json.dumps(extras),
78 78 )
79 79
80 80 """
81 81
82 82 __attrs__ = ['path', 'startDate', 'endDate', 'startTime', 'endTime', 'description', 'extras']
83 83
84 84 def __init__(self):
85 85 ProcessingUnit.__init__(self)
86 86 self.dataOut = Parameters()
87 87 self.ext = ".hdf5"
88 88 self.optchar = "D"
89 89 self.meta = {}
90 90 self.data = {}
91 91 self.open_file = h5py.File
92 92 self.open_mode = 'r'
93 93 self.description = {}
94 94 self.extras = {}
95 95 self.filefmt = "*%Y%j***"
96 96 self.folderfmt = "*%Y%j"
97 97 self.utcoffset = 0
98 98 self.filter = None
99 99 self.dparam = None
100 100
101 101 def setup(self, **kwargs):
102 102
103 103 self.set_kwargs(**kwargs)
104 104 if not self.ext.startswith('.'):
105 105 self.ext = '.{}'.format(self.ext)
106 106
107 107 if self.online:
108 108 log.log("Searching files in online mode...", self.name)
109 109
110 110 for nTries in range(self.nTries):
111 111 fullpath = self.searchFilesOnLine(self.path, self.startDate,
112 112 self.endDate, self.expLabel, self.ext, self.walk,
113 113 self.filefmt, self.folderfmt,self.filter)
114 114 try:
115 115 fullpath = next(fullpath)
116 116 except:
117 117 fullpath = None
118 118
119 119 if fullpath:
120 120 break
121 121
122 122 log.warning(
123 123 'Waiting {} sec for a valid file in {}: try {} ...'.format(
124 124 self.delay, self.path, nTries + 1),
125 125 self.name)
126 126 time.sleep(self.delay)
127 127
128 128 if not(fullpath):
129 129 raise schainpy.admin.SchainError(
130 130 'There isn\'t any valid file in {}'.format(self.path))
131 131
132 132 pathname, filename = os.path.split(fullpath)
133 133 self.year = int(filename[1:5])
134 134 self.doy = int(filename[5:8])
135 135 self.set = int(filename[8:11]) - 1
136 136 else:
137 137 log.log("Searching files in {}".format(self.path), self.name)
138 138 self.filenameList = self.searchFilesOffLine(self.path, self.startDate,
139 139 self.endDate, self.expLabel, self.ext, self.walk, self.filefmt, self.folderfmt,self.filter)
140 140
141 141 self.setNextFile()
142 142
143 143 return
144 144
145 145 def readFirstHeader(self):
146 146 '''Read metadata and data'''
147 147
148 148 self.__readMetadata()
149 149 self.__readData()
150 150 self.__setBlockList()
151 151
152 152 if 'type' in self.meta:
153 153 self.dataOut = eval(self.meta['type'])()
154 154
155 155 if self.dparam:
156 156 setattr(self.dataOut, "dparam", 1)
157 157
158 158 for attr in self.meta:
159 159 setattr(self.dataOut, attr, self.meta[attr])
160 160
161 161 self.blockIndex = 0
162 162
163 163 return
164 164
165 165 def __setBlockList(self):
166 166 '''
167 167 Selects the data within the times defined
168 168
169 169 self.fp
170 170 self.startTime
171 171 self.endTime
172 172 self.blockList
173 173 self.blocksPerFile
174 174
175 175 '''
176 176
177 177 startTime = self.startTime
178 178 endTime = self.endTime
179 179 thisUtcTime = self.data['utctime'] + self.utcoffset
180 180 try:
181 181 self.interval = numpy.min(thisUtcTime[1:] - thisUtcTime[:-1])
182 182 except:
183 183 self.interval = 0
184 184 thisDatetime = datetime.datetime.utcfromtimestamp(thisUtcTime[0])
185 185
186 186 thisDate = thisDatetime.date()
187 187 thisTime = thisDatetime.time()
188 188
189 189 startUtcTime = (datetime.datetime.combine(thisDate, startTime) - datetime.datetime(1970, 1, 1)).total_seconds()
190 190 endUtcTime = (datetime.datetime.combine(thisDate, endTime) - datetime.datetime(1970, 1, 1)).total_seconds()
191 191
192 192 ind = numpy.where(numpy.logical_and(thisUtcTime >= startUtcTime, thisUtcTime < endUtcTime))[0]
193 193
194 194 self.blockList = ind
195 195 self.blocksPerFile = len(ind)
196 196 return
197 197
198 198 def __readMetadata(self):
199 199 '''
200 200 Reads Metadata
201 201 '''
202 202
203 203 meta = {}
204 204
205 205 if self.description:
206 206 for key, value in self.description['Metadata'].items():
207 207 meta[key] = self.fp[value][()]
208 208 else:
209 209 grp = self.fp['Metadata']
210 210 for name in grp:
211 211 meta[name] = grp[name][()]
212 212
213 213 if self.extras:
214 214 for key, value in self.extras.items():
215 215 meta[key] = value
216 216 self.meta = meta
217 217
218 218 return
219 219
220 220 def __readData(self):
221 221
222 222 data = {}
223 223
224 224 if self.description:
225 225 for key, value in self.description['Data'].items():
226 226 if isinstance(value, str):
227 227 if isinstance(self.fp[value], h5py.Dataset):
228 228 data[key] = self.fp[value][()]
229 229 elif isinstance(self.fp[value], h5py.Group):
230 230 array = []
231 231 for ch in self.fp[value]:
232 232 array.append(self.fp[value][ch][()])
233 233 data[key] = numpy.array(array)
234 234 elif isinstance(value, list):
235 235 array = []
236 236 for ch in value:
237 237 array.append(self.fp[ch][()])
238 238 data[key] = numpy.array(array)
239 239 else:
240 240 grp = self.fp['Data']
241 241 for name in grp:
242 242 if isinstance(grp[name], h5py.Dataset):
243 243 array = grp[name][()]
244 244 elif isinstance(grp[name], h5py.Group):
245 245 array = []
246 246 for ch in grp[name]:
247 247 array.append(grp[name][ch][()])
248 248 array = numpy.array(array)
249 249 else:
250 250 log.warning('Unknown type: {}'.format(name))
251 251
252 252 if name in self.description:
253 253 key = self.description[name]
254 254 else:
255 255 key = name
256 256 data[key] = array
257 257
258 258 self.data = data
259 259 return
260 260
261 261 def getData(self):
262 262
263 263 for attr in self.data:
264 264 if self.data[attr].ndim == 1:
265 265 setattr(self.dataOut, attr, self.data[attr][self.blockIndex])
266 266 else:
267 267 if self.dparam:
268 268 setattr(self.dataOut, attr, self.data[attr])
269 269 else:
270 270 setattr(self.dataOut, attr, self.data[attr][:, self.blockIndex])
271 271
272 272 self.dataOut.flagNoData = False
273 273 self.blockIndex += 1
274 274
275 275 log.log("Block No. {}/{} -> {}".format(
276 276 self.blockIndex,
277 277 self.blocksPerFile,
278 278 self.dataOut.datatime.ctime()), self.name)
279 279
280 280 return
281 281
282 282 def run(self, **kwargs):
283 283
284 284 if not(self.isConfig):
285 285 self.setup(**kwargs)
286 286 self.isConfig = True
287 287
288 288 if self.blockIndex == self.blocksPerFile:
289 289 self.setNextFile()
290 290
291 291 self.getData()
292 292
293 293 return
294 294
295 295 @MPDecorator
296 296 class HDFWriter(Operation):
297 297 """Operation to write HDF5 files.
298 298
299 299 The HDF5 file contains by default two groups Data and Metadata where
300 300 you can save any `dataOut` attribute specified by `dataList` and `metadataList`
301 301 parameters, data attributes are normaly time dependent where the metadata
302 302 are not.
303 303 It is possible to customize the structure of the HDF5 file with the
304 304 optional description parameter see the examples.
305 305
306 306 Parameters:
307 307 -----------
308 308 path : str
309 309 Path where files will be saved.
310 310 blocksPerFile : int
311 311 Number of blocks per file
312 312 metadataList : list
313 313 List of the dataOut attributes that will be saved as metadata
314 314 dataList : int
315 315 List of the dataOut attributes that will be saved as data
316 316 setType : bool
317 317 If True the name of the files corresponds to the timestamp of the data
318 318 description : dict, optional
319 319 Dictionary with the desired description of the HDF5 file
320 320
321 321 Examples
322 322 --------
323 323
324 324 desc = {
325 325 'data_output': {'winds': ['z', 'w', 'v']},
326 326 'utctime': 'timestamps',
327 327 'heightList': 'heights'
328 328 }
329 329 desc = {
330 330 'data_output': ['z', 'w', 'v'],
331 331 'utctime': 'timestamps',
332 332 'heightList': 'heights'
333 333 }
334 334 desc = {
335 335 'Data': {
336 336 'data_output': 'winds',
337 337 'utctime': 'timestamps'
338 338 },
339 339 'Metadata': {
340 340 'heightList': 'heights'
341 341 }
342 342 }
343 343
344 344 writer = proc_unit.addOperation(name='HDFWriter')
345 345 writer.addParameter(name='path', value='/path/to/file')
346 346 writer.addParameter(name='blocksPerFile', value='32')
347 347 writer.addParameter(name='metadataList', value='heightList,timeZone')
348 348 writer.addParameter(name='dataList',value='data_output,utctime')
349 349 # writer.addParameter(name='description',value=json.dumps(desc))
350 350
351 351 """
352 352
353 ext = ".hdf5"
354 optchar = "D"
355 filename = None
356 path = None
357 setFile = None
358 fp = None
359 firsttime = True
353 ext = ".hdf5"
354 optchar = "D"
355 filename = None
356 path = None
357 setFile = None
358 fp = None
359 firsttime = True
360 360 #Configurations
361 361 blocksPerFile = None
362 blockIndex = None
363 dataOut = None
362 blockIndex = None
363 dataOut = None
364 364 #Data Arrays
365 365 dataList = None
366 366 metadataList = None
367 currentDay = None
368 lastTime = None
369 last_Azipos = None
370 last_Elepos = None
371 mode = None
367 currentDay = None
368 lastTime = None
369 last_Azipos = None
370 last_Elepos = None
371 mode = None
372 372 #-----------------------
373 Typename = None
374 mask = False
373 Typename = None
374 mask = False
375 setChannel = None
375 376
376 377 def __init__(self):
377 378
378 379 Operation.__init__(self)
379 380 return
380 381
381 382 def set_kwargs(self, **kwargs):
382 383
383 384 for key, value in kwargs.items():
384 385 setattr(self, key, value)
385 386
386 387 def set_kwargs_obj(self,obj, **kwargs):
387 388
388 389 for key, value in kwargs.items():
389 390 setattr(obj, key, value)
390 391
391 def setup(self, path=None, blocksPerFile=10, metadataList=None, dataList=None, setType=None, description=None,type_data=None, localtime=True, **kwargs):
392 def setup(self, path=None, blocksPerFile=10, metadataList=None, dataList=None, setType=None, description=None,type_data=None, localtime=True,setChannel=None, **kwargs):
392 393 self.path = path
393 394 self.blocksPerFile = blocksPerFile
394 self.metadataList = metadataList
395 self.dataList = [s.strip() for s in dataList]
395 self.metadataList = metadataList
396 self.dataList = [s.strip() for s in dataList]
397 self.setChannel = setChannel
396 398 self.setType = setType
397 399 if self.setType == "weather":
398 400 self.set_kwargs(**kwargs)
399 401 self.set_kwargs_obj(self.dataOut,**kwargs)
400 402 self.weather_vars = {
401 403 'S' : 0,
402 404 'V' : 1,
403 405 'W' : 2,
404 406 'SNR' : 3,
405 407 'Z' : 4,
406 408 'D' : 5,
407 409 'P' : 6,
408 410 'R' : 7,
409 411 }
410 412
411 413 if localtime:
412 414 self.getDateTime = datetime.datetime.fromtimestamp
413 415 else:
414 416 self.getDateTime = datetime.datetime.utcfromtimestamp
415 417
416 418 self.description = description
417 419 self.type_data=type_data
418 420
419 421 if self.metadataList is None:
420 422 self.metadataList = self.dataOut.metadata_list
421 423
422 424 dsList = []
423 425
424 426 for i in range(len(self.dataList)):
425 427 dsDict = {}
426 428 if hasattr(self.dataOut, self.dataList[i]):
427 429 dataAux = getattr(self.dataOut, self.dataList[i])
428 430 if self.setType == 'weather' and self.dataList[i] == 'data_param':
429 dataAux = dataAux[:,self.weather_vars[self.weather_var],:]
431 if self.setChannel is None:
432 dataAux = dataAux[:,self.weather_vars[self.weather_var],:]
433 else:
434 dataAux = dataAux[self.setChannel,self.weather_vars[self.weather_var],:]
435 dataAux = numpy.reshape(dataAux,(1,dataAux.shape[0],dataAux.shape[1]))
430 436 dsDict['variable'] = self.dataList[i]
431 437 else:
432 438 log.warning('Attribute {} not found in dataOut'.format(self.dataList[i]), self.name)
433 439 continue
434 440
435 441 if dataAux is None:
436 442 continue
437 443 elif isinstance(dataAux, (int, float, numpy.integer, numpy.float)):
438 444 dsDict['nDim'] = 0
439 445 else:
440 446 dsDict['nDim'] = len(dataAux.shape)
441 447 dsDict['shape'] = dataAux.shape
442 448 dsDict['dsNumber'] = dataAux.shape[0]
443 449 dsDict['dtype'] = dataAux.dtype
444 450 dsList.append(dsDict)
445 451
446 452 self.dsList = dsList
447 453 self.currentDay = self.dataOut.datatime.date()
448 454
449 455 def timeFlag(self):
450 456 currentTime = self.dataOut.utctime
451 457 dt = self.getDateTime(currentTime)
452 458
453 459 dataDay = int(dt.strftime('%j'))
454 460
455 461 if self.lastTime is None:
456 462 self.lastTime = currentTime
457 463 self.currentDay = dataDay
458 464 return False
459 465
460 466 timeDiff = currentTime - self.lastTime
461 467
462 468 #Si el dia es diferente o si la diferencia entre un dato y otro supera la hora
463 469 if dataDay != self.currentDay:
464 470 self.currentDay = dataDay
465 471 return True
466 472 elif timeDiff > 3*60*60:
467 473 self.lastTime = currentTime
468 474 return True
469 475 else:
470 476 self.lastTime = currentTime
471 477 return False
472 478
473 479 def run(self, dataOut, path, blocksPerFile=10, metadataList=None,
474 480 dataList=[], setType=None, description={}, mode= None,
475 481 type_data=None, Reset = False, localtime=True, **kwargs):
476 482
477 483 if Reset:
478 484 self.isConfig = False
479 485 self.closeFile()
480 486 self.lastTime = None
481 487 self.blockIndex = 0
482 488
483 489 self.dataOut = dataOut
484 490 self.mode = mode
485 491
486 492 if not(self.isConfig):
487 493 self.setup(path=path, blocksPerFile=blocksPerFile,
488 494 metadataList=metadataList, dataList=dataList,
489 495 setType=setType, description=description,type_data=type_data,
490 496 localtime=localtime, **kwargs)
491 497
492 498 self.isConfig = True
493 499 self.setNextFile()
494 500
495 501 self.putData()
496 502 return
497 503
498 504 def setNextFile(self):
499 505
500 506 ext = self.ext
501 507 path = self.path
502 508 setFile = self.setFile
503 509
504 510 dt = self.getDateTime(self.dataOut.utctime)
505 511
506 512 if self.setType == 'weather':
507 513 subfolder = dt.strftime('%Y-%m-%dT%H-00-00')
508 514 subfolder = ''
509 515 else:
510 516 subfolder = dt.strftime('d%Y%j')
511 517
512 518 fullpath = os.path.join(path, subfolder)
513 519
514 520 if os.path.exists(fullpath):
515 521 filesList = os.listdir(fullpath)
516 522 filesList = [k for k in filesList if k.startswith(self.optchar)]
517 523 if len( filesList ) > 0:
518 524 filesList = sorted(filesList, key=str.lower)
519 525 filen = filesList[-1]
520 526 # el filename debera tener el siguiente formato
521 527 # 0 1234 567 89A BCDE (hex)
522 528 # x YYYY DDD SSS .ext
523 529 if isNumber(filen[8:11]):
524 530 setFile = int(filen[8:11]) #inicializo mi contador de seteo al seteo del ultimo file
525 531 else:
526 532 setFile = -1
527 533 else:
528 534 setFile = -1 #inicializo mi contador de seteo
529 535 else:
530 536 os.makedirs(fullpath)
531 537 setFile = -1 #inicializo mi contador de seteo
532 538
533 539 if self.setType is None:
534 540 setFile += 1
535 541 file = '%s%4.4d%3.3d%03d%s' % (self.optchar,
536 542 dt.year,
537 543 int(dt.strftime('%j')),
538 544 setFile,
539 545 ext )
540 546 elif self.setType == "weather":
541 547
542 548 #SOPHY_20200505_140215_E10.0_Z.h5
543 549 #SOPHY_20200505_140215_A40.0_Z.h5
544 550 if self.dataOut.flagMode == 1: #'AZI' #PPI
545 551 ang_type = 'EL'
546 552 mode_type = 'PPI'
547 553 len_aux = int(self.dataOut.data_ele.shape[0]/4)
548 554 mean = numpy.mean(self.dataOut.data_ele[len_aux:-len_aux])
549 555 ang_ = round(mean,1)
550 556 elif self.dataOut.flagMode == 0: #'ELE' #RHI
551 557 ang_type = 'AZ'
552 558 mode_type = 'RHI'
553 559 len_aux = int(self.dataOut.data_azi.shape[0]/4)
554 560 mean = numpy.mean(self.dataOut.data_azi[len_aux:-len_aux])
555 561 ang_ = round(mean,1)
556 562
557 563 file = '%s_%2.2d%2.2d%2.2d_%2.2d%2.2d%2.2d_%s%2.1f_%s%s' % (
558 564 'SOPHY',
559 565 dt.year,
560 566 dt.month,
561 567 dt.day,
562 568 dt.hour,
563 569 dt.minute,
564 570 dt.second,
565 571 ang_type[0],
566 572 ang_,
567 573 self.weather_var,
568 574 ext )
569 575 subfolder = '{}_{}_{}_{:2.1f}'.format(self.weather_var, mode_type, ang_type, ang_)
570 576 fullpath = os.path.join(path, subfolder)
571 577 if not os.path.exists(fullpath):
572 578 os.makedirs(fullpath)
573 579 else:
574 580 setFile = dt.hour*60+dt.minute
575 581 file = '%s%4.4d%3.3d%04d%s' % (self.optchar,
576 582 dt.year,
577 583 int(dt.strftime('%j')),
578 584 setFile,
579 585 ext )
580 586
581 587 self.filename = os.path.join( path, subfolder, file )
582 588
583 589 self.fp = h5py.File(self.filename, 'w')
584 590 #write metadata
585 591 self.writeMetadata(self.fp)
586 592 #Write data
587 593 self.writeData(self.fp)
588 594
589 595 def getLabel(self, name, x=None):
590 596
591 597 if x is None:
592 598 if 'Data' in self.description:
593 599 data = self.description['Data']
594 600 if 'Metadata' in self.description:
595 601 data.update(self.description['Metadata'])
596 602 else:
597 603 data = self.description
598 604 if name in data:
599 605 if isinstance(data[name], str):
600 606 return data[name]
601 607 elif isinstance(data[name], list):
602 608 return None
603 609 elif isinstance(data[name], dict):
604 610 for key, value in data[name].items():
605 611 return key
606 612 return name
607 613 else:
608 614 if 'Data' in self.description:
609 615 data = self.description['Data']
610 616 if 'Metadata' in self.description:
611 617 data.update(self.description['Metadata'])
612 618 else:
613 619 data = self.description
614 620 if name in data:
615 621 if isinstance(data[name], list):
616 622 return data[name][x]
617 623 elif isinstance(data[name], dict):
618 624 for key, value in data[name].items():
619 625 return value[x]
620 626 if 'cspc' in name:
621 627 return 'pair{:02d}'.format(x)
622 628 else:
623 629 return 'channel{:02d}'.format(x)
624 630
625 631 def writeMetadata(self, fp):
626 632
627 633 if self.description:
628 634 if 'Metadata' in self.description:
629 635 grp = fp.create_group('Metadata')
630 636 else:
631 637 grp = fp
632 638 else:
633 639 grp = fp.create_group('Metadata')
634 640
635 641 for i in range(len(self.metadataList)):
636 642 if not hasattr(self.dataOut, self.metadataList[i]):
637 643 log.warning('Metadata: `{}` not found'.format(self.metadataList[i]), self.name)
638 644 continue
639 645 value = getattr(self.dataOut, self.metadataList[i])
640 646 if isinstance(value, bool):
641 647 if value is True:
642 648 value = 1
643 649 else:
644 650 value = 0
645 651 grp.create_dataset(self.getLabel(self.metadataList[i]), data=value)
646 652 return
647 653
648 654 def writeData(self, fp):
649 655
650 656 if self.description:
651 657 if 'Data' in self.description:
652 658 grp = fp.create_group('Data')
653 659 else:
654 660 grp = fp
655 661 else:
656 662 grp = fp.create_group('Data')
657 663
658 664 dtsets = []
659 665 data = []
660 666
661 667 for dsInfo in self.dsList:
662 668
663 669 if dsInfo['nDim'] == 0:
664 670 ds = grp.create_dataset(
665 671 self.getLabel(dsInfo['variable']),
666 672 (self.blocksPerFile, ),
667 673 chunks=True,
668 674 dtype=numpy.float64)
669 675 dtsets.append(ds)
670 676 data.append((dsInfo['variable'], -1))
671 677 else:
672 678 label = self.getLabel(dsInfo['variable'])
673 679 if label is not None:
674 680 sgrp = grp.create_group(label)
675 681 else:
676 682 sgrp = grp
677 683 if self.blocksPerFile == 1:
678 684 shape = dsInfo['shape'][1:]
679 685 else:
680 686 shape = (self.blocksPerFile, ) + dsInfo['shape'][1:]
681 687 for i in range(dsInfo['dsNumber']):
688 if dsInfo['dsNumber']==1:
689 if self.setChannel==1:
690 i=1
682 691 ds = sgrp.create_dataset(
683 692 self.getLabel(dsInfo['variable'], i),
684 693 shape,
685 694 chunks=True,
686 695 dtype=dsInfo['dtype'],
687 696 compression='gzip',
688 697 )
689 698 dtsets.append(ds)
690 699 data.append((dsInfo['variable'], i))
691 700 fp.flush()
692 701
693 702 log.log('Creating file: {}'.format(fp.filename), self.name)
694 703
695 704 self.ds = dtsets
696 705 self.data = data
697 706 self.firsttime = True
698 707 self.blockIndex = 0
699 708 return
700 709
701 710 def putData(self):
702 711
703 712 if (self.blockIndex == self.blocksPerFile) or self.timeFlag():
704 713 self.closeFile()
705 714 self.setNextFile()
706 715
707 716 for i, ds in enumerate(self.ds):
708 717 attr, ch = self.data[i]
709 718 if ch == -1:
710 719 ds[self.blockIndex] = getattr(self.dataOut, attr)
711 720 else:
712 721 if self.blocksPerFile == 1:
713 722 mask = self.dataOut.data_param[:,3,:][ch] < self.mask
714 723 tmp = getattr(self.dataOut, attr)[:,self.weather_vars[self.weather_var],:][ch]
715 724 if self.mask:
716 725 tmp[mask] = numpy.nan
717 726 ds[:] = tmp
718 727 else:
719 728 ds[self.blockIndex] = getattr(self.dataOut, attr)[ch]
720 729
721 730 self.fp.flush()
722 731 self.blockIndex += 1
723 732 log.log('Block No. {}/{}'.format(self.blockIndex, self.blocksPerFile), self.name)
724 733
725 734 return
726 735
727 736 def closeFile(self):
728 737
729 738 if self.blockIndex != self.blocksPerFile:
730 739 for ds in self.ds:
731 740 ds.resize(self.blockIndex, axis=0)
732 741
733 742 if self.fp:
734 743 self.fp.flush()
735 744 self.fp.close()
736 745
737 746 def close(self):
738 747
739 748 self.closeFile()
General Comments 0
You need to be logged in to leave comments. Login now