@@ -71,7 +71,6 class HDFReader(Reader, ProcessingUnit): | |||||
71 | 'timeZone': 300 |
|
71 | 'timeZone': 300 | |
72 | } |
|
72 | } | |
73 |
|
73 | |||
74 |
|
||||
75 | reader = project.addReadUnit( |
|
74 | reader = project.addReadUnit( | |
76 | name='HDFReader', |
|
75 | name='HDFReader', | |
77 | path='/path/to/files', |
|
76 | path='/path/to/files', | |
@@ -104,6 +103,7 class HDFReader(Reader, ProcessingUnit): | |||||
104 | self.utcoffset = 0 |
|
103 | self.utcoffset = 0 | |
105 |
|
104 | |||
106 | def setup(self, **kwargs): |
|
105 | def setup(self, **kwargs): | |
|
106 | ||||
107 | self.set_kwargs(**kwargs) |
|
107 | self.set_kwargs(**kwargs) | |
108 | if not self.ext.startswith('.'): |
|
108 | if not self.ext.startswith('.'): | |
109 |
self.ext = '.{}'.format(self.ext) |
|
109 | self.ext = '.{}'.format(self.ext) | |
@@ -178,7 +178,6 class HDFReader(Reader, ProcessingUnit): | |||||
178 | startTime = self.startTime |
|
178 | startTime = self.startTime | |
179 | endTime = self.endTime |
|
179 | endTime = self.endTime | |
180 | thisUtcTime = self.data['utctime'] + self.utcoffset |
|
180 | thisUtcTime = self.data['utctime'] + self.utcoffset | |
181 |
|
||||
182 | self.interval = numpy.min(thisUtcTime[1:] - thisUtcTime[:-1]) |
|
181 | self.interval = numpy.min(thisUtcTime[1:] - thisUtcTime[:-1]) | |
183 | thisDatetime = datetime.datetime.utcfromtimestamp(thisUtcTime[0]) |
|
182 | thisDatetime = datetime.datetime.utcfromtimestamp(thisUtcTime[0]) | |
184 |
|
183 | |||
@@ -373,13 +372,24 class HDFWriter(Operation): | |||||
373 | Operation.__init__(self) |
|
372 | Operation.__init__(self) | |
374 | return |
|
373 | return | |
375 |
|
374 | |||
376 | def setup(self, path=None, blocksPerFile=10, metadataList=None, dataList=None, setType=None, description=None): |
|
375 | def set_kwargs(self, **kwargs): | |
|
376 | ||||
|
377 | for key, value in kwargs.items(): | |||
|
378 | setattr(self, key, value) | |||
|
379 | ||||
|
380 | def set_kwargs_obj(self, obj, **kwargs): | |||
|
381 | ||||
|
382 | for key, value in kwargs.items(): | |||
|
383 | setattr(obj, key, value) | |||
|
384 | ||||
|
385 | def setup(self, path=None, blocksPerFile=10, metadataList=None, dataList=None, setType=None, description=None, **kwargs): | |||
377 | self.path = path |
|
386 | self.path = path | |
378 | self.blocksPerFile = blocksPerFile |
|
387 | self.blocksPerFile = blocksPerFile | |
379 | self.metadataList = metadataList |
|
388 | self.metadataList = metadataList | |
380 | self.dataList = [s.strip() for s in dataList] |
|
389 | self.dataList = [s.strip() for s in dataList] | |
381 | self.setType = setType |
|
390 | self.setType = setType | |
382 | self.description = description |
|
391 | self.description = description | |
|
392 | self.set_kwargs(**kwargs) | |||
383 |
|
393 | |||
384 | if self.metadataList is None: |
|
394 | if self.metadataList is None: | |
385 | self.metadataList = self.dataOut.metadata_list |
|
395 | self.metadataList = self.dataOut.metadata_list | |
@@ -435,13 +445,14 class HDFWriter(Operation): | |||||
435 | return False |
|
445 | return False | |
436 |
|
446 | |||
437 | def run(self, dataOut, path, blocksPerFile=10, metadataList=None, |
|
447 | def run(self, dataOut, path, blocksPerFile=10, metadataList=None, | |
438 | dataList=[], setType=None, description={}): |
|
448 | dataList=[], setType=None, description={}, **kwargs): | |
439 |
|
449 | |||
440 | self.dataOut = dataOut |
|
450 | self.dataOut = dataOut | |
|
451 | self.set_kwargs_obj(self.dataOut, **kwargs) | |||
441 | if not(self.isConfig): |
|
452 | if not(self.isConfig): | |
442 |
self.setup(path=path, blocksPerFile=blocksPerFile, |
|
453 | self.setup(path=path, blocksPerFile=blocksPerFile, | |
443 | metadataList=metadataList, dataList=dataList, |
|
454 | metadataList=metadataList, dataList=dataList, | |
444 | setType=setType, description=description) |
|
455 | setType=setType, description=description, **kwargs) | |
445 |
|
456 | |||
446 | self.isConfig = True |
|
457 | self.isConfig = True | |
447 | self.setNextFile() |
|
458 | self.setNextFile() |
General Comments 0
You need to be logged in to leave comments.
Login now