@@ -385,7 +385,7 class Voltage(JROData): | |||||
385 | self.flagNoData = True |
|
385 | self.flagNoData = True | |
386 | self.flagDiscontinuousBlock = False |
|
386 | self.flagDiscontinuousBlock = False | |
387 | self.utctime = None |
|
387 | self.utctime = None | |
388 |
self.timeZone = |
|
388 | self.timeZone = 0 | |
389 | self.dstFlag = None |
|
389 | self.dstFlag = None | |
390 | self.errorCount = None |
|
390 | self.errorCount = None | |
391 | self.nCohInt = None |
|
391 | self.nCohInt = None | |
@@ -483,6 +483,7 class Spectra(JROData): | |||||
483 | self.radarControllerHeaderObj = RadarControllerHeader() |
|
483 | self.radarControllerHeaderObj = RadarControllerHeader() | |
484 | self.systemHeaderObj = SystemHeader() |
|
484 | self.systemHeaderObj = SystemHeader() | |
485 | self.type = "Spectra" |
|
485 | self.type = "Spectra" | |
|
486 | self.timeZone = 0 | |||
486 | # self.data = None |
|
487 | # self.data = None | |
487 | # self.dtype = None |
|
488 | # self.dtype = None | |
488 | # self.nChannels = 0 |
|
489 | # self.nChannels = 0 | |
@@ -738,7 +739,6 class Fits(JROData): | |||||
738 | flagDiscontinuousBlock = False |
|
739 | flagDiscontinuousBlock = False | |
739 | useLocalTime = False |
|
740 | useLocalTime = False | |
740 | utctime = None |
|
741 | utctime = None | |
741 | timeZone = None |
|
|||
742 | # ippSeconds = None |
|
742 | # ippSeconds = None | |
743 | # timeInterval = None |
|
743 | # timeInterval = None | |
744 | nCohInt = None |
|
744 | nCohInt = None | |
@@ -775,7 +775,7 class Fits(JROData): | |||||
775 | self.profileIndex = 0 |
|
775 | self.profileIndex = 0 | |
776 |
|
776 | |||
777 | # self.utctime = None |
|
777 | # self.utctime = None | |
778 |
|
|
778 | self.timeZone = 0 | |
779 | # self.ltctime = None |
|
779 | # self.ltctime = None | |
780 | # self.timeInterval = None |
|
780 | # self.timeInterval = None | |
781 | # self.header = None |
|
781 | # self.header = None | |
@@ -913,7 +913,7 class Correlation(JROData): | |||||
913 |
|
913 | |||
914 | self.utctime = None |
|
914 | self.utctime = None | |
915 |
|
915 | |||
916 |
self.timeZone = |
|
916 | self.timeZone = 0 | |
917 |
|
917 | |||
918 | self.dstFlag = None |
|
918 | self.dstFlag = None | |
919 |
|
919 | |||
@@ -1066,10 +1066,9 class Parameters(Spectra): | |||||
1066 | Constructor |
|
1066 | Constructor | |
1067 | ''' |
|
1067 | ''' | |
1068 | self.radarControllerHeaderObj = RadarControllerHeader() |
|
1068 | self.radarControllerHeaderObj = RadarControllerHeader() | |
1069 |
|
||||
1070 | self.systemHeaderObj = SystemHeader() |
|
1069 | self.systemHeaderObj = SystemHeader() | |
1071 |
|
||||
1072 | self.type = "Parameters" |
|
1070 | self.type = "Parameters" | |
|
1071 | self.timeZone = 0 | |||
1073 |
|
1072 | |||
1074 | def getTimeRange1(self, interval): |
|
1073 | def getTimeRange1(self, interval): | |
1075 |
|
1074 |
This diff has been collapsed as it changes many lines, (1337 lines changed) Show them Hide them | |||||
@@ -1,10 +1,10 | |||||
1 | import numpy |
|
|||
2 | import time |
|
|||
3 |
|
|
1 | import os | |
4 |
import |
|
2 | import time | |
5 | import re |
|
|||
6 | import datetime |
|
3 | import datetime | |
7 |
|
4 | |||
|
5 | import numpy | |||
|
6 | import h5py | |||
|
7 | ||||
8 | import schainpy.admin |
|
8 | import schainpy.admin | |
9 | from schainpy.model.data.jrodata import * |
|
9 | from schainpy.model.data.jrodata import * | |
10 | from schainpy.model.proc.jroproc_base import ProcessingUnit, Operation, MPDecorator |
|
10 | from schainpy.model.proc.jroproc_base import ProcessingUnit, Operation, MPDecorator | |
@@ -12,979 +12,85 from schainpy.model.io.jroIO_base import * | |||||
12 | from schainpy.utils import log |
|
12 | from schainpy.utils import log | |
13 |
|
13 | |||
14 |
|
14 | |||
15 |
class |
|
15 | class HDFReader(Reader, ProcessingUnit): | |
16 | ''' |
|
16 | """Processing unit to read HDF5 format files | |
17 | Reads HDF5 format files |
|
17 | ||
18 | path |
|
18 | This unit reads HDF5 files created with `HDFWriter` operation contains | |
19 | startDate |
|
19 | by default two groups Data and Metadata all variables would be saved as `dataOut` | |
20 | endDate |
|
20 | attributes. | |
21 | startTime |
|
21 | It is possible to read any HDF5 file by given the structure in the `description` | |
22 | endTime |
|
22 | parameter, also you can add extra values to metadata with the parameter `extras`. | |
23 | ''' |
|
23 | ||
24 |
|
24 | Parameters: | ||
25 | ext = ".hdf5" |
|
25 | ----------- | |
26 | optchar = "D" |
|
26 | path : str | |
27 | timezone = None |
|
27 | Path where files are located. | |
28 |
start |
|
28 | startDate : date | |
29 | endTime = None |
|
29 | Start date of the files | |
30 | fileIndex = None |
|
30 | endDate : list | |
31 | utcList = None #To select data in the utctime list |
|
31 | End date of the files | |
32 | blockList = None #List to blocks to be read from the file |
|
32 | startTime : time | |
33 | blocksPerFile = None #Number of blocks to be read |
|
33 | Start time of the files | |
34 | blockIndex = None |
|
34 | endTime : time | |
35 | path = None |
|
35 | End time of the files | |
36 | #List of Files |
|
36 | description : dict, optional | |
37 | filenameList = None |
|
37 | Dictionary with the description of the HDF5 file | |
38 | datetimeList = None |
|
38 | extras : dict, optional | |
39 | #Hdf5 File |
|
39 | Dictionary with extra metadata to be be added to `dataOut` | |
40 | listMetaname = None |
|
40 | ||
41 | listMeta = None |
|
41 | Examples | |
42 | listDataname = None |
|
42 | -------- | |
43 | listData = None |
|
43 | ||
44 | listShapes = None |
|
44 | desc = { | |
45 | fp = None |
|
45 | 'Data': { | |
46 | #dataOut reconstruction |
|
46 | 'data_output': ['u', 'v', 'w'], | |
47 | dataOut = None |
|
47 | 'utctime': 'timestamps', | |
48 |
|
48 | } , | ||
49 | def __init__(self):#, **kwargs): |
|
49 | 'Metadata': { | |
50 | ProcessingUnit.__init__(self) #, **kwargs) |
|
50 | 'heightList': 'heights' | |
51 | self.dataOut = Parameters() |
|
51 | } | |
52 | return |
|
52 | } | |
53 |
|
|
53 | ||
54 | def setup(self, **kwargs): |
|
54 | desc = { | |
55 |
|
55 | 'Data': { | ||
56 | path = kwargs['path'] |
|
56 | 'data_output': 'winds', | |
57 | startDate = kwargs['startDate'] |
|
57 | 'utctime': 'timestamps' | |
58 | endDate = kwargs['endDate'] |
|
58 | }, | |
59 | startTime = kwargs['startTime'] |
|
59 | 'Metadata': { | |
60 | endTime = kwargs['endTime'] |
|
60 | 'heightList': 'heights' | |
61 | walk = kwargs['walk'] |
|
61 | } | |
62 | if 'ext' in kwargs: |
|
62 | } | |
63 | ext = kwargs['ext'] |
|
63 | ||
64 | else: |
|
64 | extras = { | |
65 | ext = '.hdf5' |
|
65 | 'timeZone': 300 | |
66 | if 'timezone' in kwargs: |
|
66 | } | |
67 | self.timezone = kwargs['timezone'] |
|
67 | ||
68 | else: |
|
68 | reader = project.addReadUnit( | |
69 | self.timezone = 'lt' |
|
69 | name='HDFReader', | |
70 |
|
70 | path='/path/to/files', | ||
71 | print("[Reading] Searching files in offline mode ...") |
|
71 | startDate='2019/01/01', | |
72 | pathList, filenameList = self.searchFilesOffLine(path, startDate=startDate, endDate=endDate, |
|
72 | endDate='2019/01/31', | |
73 | startTime=startTime, endTime=endTime, |
|
73 | startTime='00:00:00', | |
74 | ext=ext, walk=walk) |
|
74 | endTime='23:59:59', | |
75 |
|
75 | # description=json.dumps(desc), | ||
76 | if not(filenameList): |
|
76 | # extras=json.dumps(extras), | |
77 | print("There is no files into the folder: %s"%(path)) |
|
77 | ) | |
78 | sys.exit(-1) |
|
|||
79 |
|
||||
80 | self.fileIndex = -1 |
|
|||
81 | self.startTime = startTime |
|
|||
82 | self.endTime = endTime |
|
|||
83 |
|
||||
84 | self.__readMetadata() |
|
|||
85 |
|
||||
86 | self.__setNextFileOffline() |
|
|||
87 |
|
||||
88 | return |
|
|||
89 |
|
||||
90 | def searchFilesOffLine(self, |
|
|||
91 | path, |
|
|||
92 | startDate=None, |
|
|||
93 | endDate=None, |
|
|||
94 | startTime=datetime.time(0,0,0), |
|
|||
95 | endTime=datetime.time(23,59,59), |
|
|||
96 | ext='.hdf5', |
|
|||
97 | walk=True): |
|
|||
98 |
|
||||
99 | expLabel = '' |
|
|||
100 | self.filenameList = [] |
|
|||
101 | self.datetimeList = [] |
|
|||
102 |
|
||||
103 | pathList = [] |
|
|||
104 |
|
||||
105 | JRODataObj = JRODataReader() |
|
|||
106 | dateList, pathList = JRODataObj.findDatafiles(path, startDate, endDate, expLabel, ext, walk, include_path=True) |
|
|||
107 |
|
||||
108 | if dateList == []: |
|
|||
109 | print("[Reading] No *%s files in %s from %s to %s)"%(ext, path, |
|
|||
110 | datetime.datetime.combine(startDate,startTime).ctime(), |
|
|||
111 | datetime.datetime.combine(endDate,endTime).ctime())) |
|
|||
112 |
|
||||
113 | return None, None |
|
|||
114 |
|
||||
115 | if len(dateList) > 1: |
|
|||
116 | print("[Reading] %d days were found in date range: %s - %s" %(len(dateList), startDate, endDate)) |
|
|||
117 | else: |
|
|||
118 | print("[Reading] data was found for the date %s" %(dateList[0])) |
|
|||
119 |
|
||||
120 | filenameList = [] |
|
|||
121 | datetimeList = [] |
|
|||
122 |
|
||||
123 | #---------------------------------------------------------------------------------- |
|
|||
124 |
|
||||
125 | for thisPath in pathList: |
|
|||
126 |
|
||||
127 | fileList = glob.glob1(thisPath, "*%s" %ext) |
|
|||
128 | fileList.sort() |
|
|||
129 |
|
||||
130 | for file in fileList: |
|
|||
131 |
|
||||
132 | filename = os.path.join(thisPath,file) |
|
|||
133 |
|
||||
134 | if not isFileInDateRange(filename, startDate, endDate): |
|
|||
135 | continue |
|
|||
136 |
|
||||
137 | thisDatetime = self.__isFileInTimeRange(filename, startDate, endDate, startTime, endTime) |
|
|||
138 |
|
||||
139 | if not(thisDatetime): |
|
|||
140 | continue |
|
|||
141 |
|
||||
142 | filenameList.append(filename) |
|
|||
143 | datetimeList.append(thisDatetime) |
|
|||
144 |
|
||||
145 | if not(filenameList): |
|
|||
146 | print("[Reading] Any file was found int time range %s - %s" %(datetime.datetime.combine(startDate,startTime).ctime(), datetime.datetime.combine(endDate,endTime).ctime())) |
|
|||
147 | return None, None |
|
|||
148 |
|
||||
149 | print("[Reading] %d file(s) was(were) found in time range: %s - %s" %(len(filenameList), startTime, endTime)) |
|
|||
150 | print() |
|
|||
151 |
|
||||
152 | self.filenameList = filenameList |
|
|||
153 | self.datetimeList = datetimeList |
|
|||
154 |
|
||||
155 | return pathList, filenameList |
|
|||
156 |
|
||||
157 | def __isFileInTimeRange(self,filename, startDate, endDate, startTime, endTime): |
|
|||
158 |
|
||||
159 | """ |
|
|||
160 | Retorna 1 si el archivo de datos se encuentra dentro del rango de horas especificado. |
|
|||
161 |
|
||||
162 | Inputs: |
|
|||
163 | filename : nombre completo del archivo de datos en formato Jicamarca (.r) |
|
|||
164 | startDate : fecha inicial del rango seleccionado en formato datetime.date |
|
|||
165 | endDate : fecha final del rango seleccionado en formato datetime.date |
|
|||
166 | startTime : tiempo inicial del rango seleccionado en formato datetime.time |
|
|||
167 | endTime : tiempo final del rango seleccionado en formato datetime.time |
|
|||
168 |
|
||||
169 | Return: |
|
|||
170 | Boolean : Retorna True si el archivo de datos contiene datos en el rango de |
|
|||
171 | fecha especificado, de lo contrario retorna False. |
|
|||
172 |
|
||||
173 | Excepciones: |
|
|||
174 | Si el archivo no existe o no puede ser abierto |
|
|||
175 | Si la cabecera no puede ser leida. |
|
|||
176 |
|
||||
177 | """ |
|
|||
178 |
|
||||
179 | try: |
|
|||
180 | fp = h5py.File(filename,'r') |
|
|||
181 | grp1 = fp['Data'] |
|
|||
182 |
|
||||
183 | except IOError: |
|
|||
184 | traceback.print_exc() |
|
|||
185 | raise IOError("The file %s can't be opened" %(filename)) |
|
|||
186 |
|
||||
187 | #In case has utctime attribute |
|
|||
188 | grp2 = grp1['utctime'] |
|
|||
189 | # thisUtcTime = grp2.value[0] - 5*3600 #To convert to local time |
|
|||
190 | thisUtcTime = grp2.value[0] |
|
|||
191 |
|
||||
192 | fp.close() |
|
|||
193 |
|
||||
194 | if self.timezone == 'lt': |
|
|||
195 | thisUtcTime -= 5*3600 |
|
|||
196 |
|
||||
197 | thisDatetime = datetime.datetime.fromtimestamp(thisUtcTime[0] + 5*3600) |
|
|||
198 | thisDate = thisDatetime.date() |
|
|||
199 | thisTime = thisDatetime.time() |
|
|||
200 |
|
||||
201 | startUtcTime = (datetime.datetime.combine(thisDate,startTime)- datetime.datetime(1970, 1, 1)).total_seconds() |
|
|||
202 | endUtcTime = (datetime.datetime.combine(thisDate,endTime)- datetime.datetime(1970, 1, 1)).total_seconds() |
|
|||
203 |
|
||||
204 | #General case |
|
|||
205 | # o>>>>>>>>>>>>>><<<<<<<<<<<<<<o |
|
|||
206 | #-----------o----------------------------o----------- |
|
|||
207 | # startTime endTime |
|
|||
208 |
|
||||
209 | if endTime >= startTime: |
|
|||
210 | thisUtcLog = numpy.logical_and(thisUtcTime > startUtcTime, thisUtcTime < endUtcTime) |
|
|||
211 | if numpy.any(thisUtcLog): #If there is one block between the hours mentioned |
|
|||
212 | return thisDatetime |
|
|||
213 | return None |
|
|||
214 |
|
||||
215 | #If endTime < startTime then endTime belongs to the next day |
|
|||
216 | #<<<<<<<<<<<o o>>>>>>>>>>> |
|
|||
217 | #-----------o----------------------------o----------- |
|
|||
218 | # endTime startTime |
|
|||
219 |
|
||||
220 | if (thisDate == startDate) and numpy.all(thisUtcTime < startUtcTime): |
|
|||
221 | return None |
|
|||
222 |
|
||||
223 | if (thisDate == endDate) and numpy.all(thisUtcTime > endUtcTime): |
|
|||
224 | return None |
|
|||
225 |
|
||||
226 | if numpy.all(thisUtcTime < startUtcTime) and numpy.all(thisUtcTime > endUtcTime): |
|
|||
227 | return None |
|
|||
228 |
|
||||
229 | return thisDatetime |
|
|||
230 |
|
||||
231 | def __setNextFileOffline(self): |
|
|||
232 |
|
||||
233 | self.fileIndex += 1 |
|
|||
234 | idFile = self.fileIndex |
|
|||
235 |
|
||||
236 | if not(idFile < len(self.filenameList)): |
|
|||
237 | raise schainpy.admin.SchainError("No more Files") |
|
|||
238 | return 0 |
|
|||
239 |
|
||||
240 | filename = self.filenameList[idFile] |
|
|||
241 | filePointer = h5py.File(filename,'r') |
|
|||
242 | self.filename = filename |
|
|||
243 | self.fp = filePointer |
|
|||
244 |
|
||||
245 | print("Setting the file: %s"%self.filename) |
|
|||
246 |
|
||||
247 | self.__setBlockList() |
|
|||
248 | self.__readData() |
|
|||
249 | self.blockIndex = 0 |
|
|||
250 | return 1 |
|
|||
251 |
|
||||
252 | def __setBlockList(self): |
|
|||
253 | ''' |
|
|||
254 | Selects the data within the times defined |
|
|||
255 |
|
||||
256 | self.fp |
|
|||
257 | self.startTime |
|
|||
258 | self.endTime |
|
|||
259 |
|
||||
260 | self.blockList |
|
|||
261 | self.blocksPerFile |
|
|||
262 |
|
||||
263 | ''' |
|
|||
264 | fp = self.fp |
|
|||
265 | startTime = self.startTime |
|
|||
266 | endTime = self.endTime |
|
|||
267 |
|
||||
268 | grp = fp['Data'] |
|
|||
269 | thisUtcTime = grp['utctime'].value.astype(numpy.float)[0] |
|
|||
270 |
|
||||
271 | #ERROOOOR |
|
|||
272 | if self.timezone == 'lt': |
|
|||
273 | thisUtcTime -= 5*3600 |
|
|||
274 |
|
||||
275 | thisDatetime = datetime.datetime.fromtimestamp(thisUtcTime[0] + 5*3600) |
|
|||
276 |
|
||||
277 | thisDate = thisDatetime.date() |
|
|||
278 | thisTime = thisDatetime.time() |
|
|||
279 |
|
||||
280 | startUtcTime = (datetime.datetime.combine(thisDate,startTime) - datetime.datetime(1970, 1, 1)).total_seconds() |
|
|||
281 | endUtcTime = (datetime.datetime.combine(thisDate,endTime) - datetime.datetime(1970, 1, 1)).total_seconds() |
|
|||
282 |
|
||||
283 | ind = numpy.where(numpy.logical_and(thisUtcTime >= startUtcTime, thisUtcTime < endUtcTime))[0] |
|
|||
284 |
|
||||
285 | self.blockList = ind |
|
|||
286 | self.blocksPerFile = len(ind) |
|
|||
287 |
|
||||
288 | return |
|
|||
289 |
|
||||
290 | def __readMetadata(self): |
|
|||
291 | ''' |
|
|||
292 | Reads Metadata |
|
|||
293 |
|
||||
294 | self.pathMeta |
|
|||
295 | self.listShapes |
|
|||
296 | self.listMetaname |
|
|||
297 | self.listMeta |
|
|||
298 |
|
||||
299 | ''' |
|
|||
300 |
|
||||
301 | filename = self.filenameList[0] |
|
|||
302 | fp = h5py.File(filename,'r') |
|
|||
303 | gp = fp['Metadata'] |
|
|||
304 |
|
||||
305 | listMetaname = [] |
|
|||
306 | listMetadata = [] |
|
|||
307 | for item in list(gp.items()): |
|
|||
308 | name = item[0] |
|
|||
309 |
|
||||
310 | if name=='array dimensions': |
|
|||
311 | table = gp[name][:] |
|
|||
312 | listShapes = {} |
|
|||
313 | for shapes in table: |
|
|||
314 | listShapes[shapes[0]] = numpy.array([shapes[1],shapes[2],shapes[3],shapes[4],shapes[5]]) |
|
|||
315 | else: |
|
|||
316 | data = gp[name].value |
|
|||
317 | listMetaname.append(name) |
|
|||
318 | listMetadata.append(data) |
|
|||
319 |
|
||||
320 | self.listShapes = listShapes |
|
|||
321 | self.listMetaname = listMetaname |
|
|||
322 | self.listMeta = listMetadata |
|
|||
323 |
|
||||
324 | fp.close() |
|
|||
325 | return |
|
|||
326 |
|
||||
327 | def __readData(self): |
|
|||
328 | grp = self.fp['Data'] |
|
|||
329 | listdataname = [] |
|
|||
330 | listdata = [] |
|
|||
331 |
|
||||
332 | for item in list(grp.items()): |
|
|||
333 | name = item[0] |
|
|||
334 | listdataname.append(name) |
|
|||
335 |
|
||||
336 | array = self.__setDataArray(grp[name],self.listShapes[name]) |
|
|||
337 | listdata.append(array) |
|
|||
338 |
|
||||
339 | self.listDataname = listdataname |
|
|||
340 | self.listData = listdata |
|
|||
341 | return |
|
|||
342 |
|
||||
343 | def __setDataArray(self, dataset, shapes): |
|
|||
344 |
|
||||
345 | nDims = shapes[0] |
|
|||
346 | nDim2 = shapes[1] #Dimension 0 |
|
|||
347 | nDim1 = shapes[2] #Dimension 1, number of Points or Parameters |
|
|||
348 | nDim0 = shapes[3] #Dimension 2, number of samples or ranges |
|
|||
349 | mode = shapes[4] #Mode of storing |
|
|||
350 | blockList = self.blockList |
|
|||
351 | blocksPerFile = self.blocksPerFile |
|
|||
352 |
|
||||
353 | #Depending on what mode the data was stored |
|
|||
354 | if mode == 0: #Divided in channels |
|
|||
355 | arrayData = dataset.value.astype(numpy.float)[0][blockList] |
|
|||
356 | if mode == 1: #Divided in parameter |
|
|||
357 | strds = 'table' |
|
|||
358 | nDatas = nDim1 |
|
|||
359 | newShapes = (blocksPerFile,nDim2,nDim0) |
|
|||
360 | elif mode==2: #Concatenated in a table |
|
|||
361 | strds = 'table0' |
|
|||
362 | arrayData = dataset[strds].value |
|
|||
363 | #Selecting part of the dataset |
|
|||
364 | utctime = arrayData[:,0] |
|
|||
365 | u, indices = numpy.unique(utctime, return_index=True) |
|
|||
366 |
|
||||
367 | if blockList.size != indices.size: |
|
|||
368 | indMin = indices[blockList[0]] |
|
|||
369 | if blockList[1] + 1 >= indices.size: |
|
|||
370 | arrayData = arrayData[indMin:,:] |
|
|||
371 | else: |
|
|||
372 | indMax = indices[blockList[1] + 1] |
|
|||
373 | arrayData = arrayData[indMin:indMax,:] |
|
|||
374 | return arrayData |
|
|||
375 |
|
||||
376 | # One dimension |
|
|||
377 | if nDims == 0: |
|
|||
378 | arrayData = dataset.value.astype(numpy.float)[0][blockList] |
|
|||
379 |
|
||||
380 | # Two dimensions |
|
|||
381 | elif nDims == 2: |
|
|||
382 | arrayData = numpy.zeros((blocksPerFile,nDim1,nDim0)) |
|
|||
383 | newShapes = (blocksPerFile,nDim0) |
|
|||
384 | nDatas = nDim1 |
|
|||
385 |
|
||||
386 | for i in range(nDatas): |
|
|||
387 | data = dataset[strds + str(i)].value |
|
|||
388 | arrayData[:,i,:] = data[blockList,:] |
|
|||
389 |
|
||||
390 | # Three dimensions |
|
|||
391 | else: |
|
|||
392 | arrayData = numpy.zeros((blocksPerFile,nDim2,nDim1,nDim0)) |
|
|||
393 | for i in range(nDatas): |
|
|||
394 |
|
||||
395 | data = dataset[strds + str(i)].value |
|
|||
396 |
|
||||
397 | for b in range(blockList.size): |
|
|||
398 | arrayData[b,:,i,:] = data[:,:,blockList[b]] |
|
|||
399 |
|
||||
400 | return arrayData |
|
|||
401 |
|
||||
402 | def __setDataOut(self): |
|
|||
403 | listMeta = self.listMeta |
|
|||
404 | listMetaname = self.listMetaname |
|
|||
405 | listDataname = self.listDataname |
|
|||
406 | listData = self.listData |
|
|||
407 | listShapes = self.listShapes |
|
|||
408 |
|
||||
409 | blockIndex = self.blockIndex |
|
|||
410 | # blockList = self.blockList |
|
|||
411 |
|
||||
412 | for i in range(len(listMeta)): |
|
|||
413 | setattr(self.dataOut,listMetaname[i],listMeta[i]) |
|
|||
414 |
|
||||
415 | for j in range(len(listData)): |
|
|||
416 | nShapes = listShapes[listDataname[j]][0] |
|
|||
417 | mode = listShapes[listDataname[j]][4] |
|
|||
418 | if nShapes == 1: |
|
|||
419 | setattr(self.dataOut,listDataname[j],listData[j][blockIndex]) |
|
|||
420 | elif nShapes > 1: |
|
|||
421 | setattr(self.dataOut,listDataname[j],listData[j][blockIndex,:]) |
|
|||
422 | elif mode==0: |
|
|||
423 | setattr(self.dataOut,listDataname[j],listData[j][blockIndex]) |
|
|||
424 | #Mode Meteors |
|
|||
425 | elif mode ==2: |
|
|||
426 | selectedData = self.__selectDataMode2(listData[j], blockIndex) |
|
|||
427 | setattr(self.dataOut, listDataname[j], selectedData) |
|
|||
428 | return |
|
|||
429 |
|
||||
430 | def __selectDataMode2(self, data, blockIndex): |
|
|||
431 | utctime = data[:,0] |
|
|||
432 | aux, indices = numpy.unique(utctime, return_inverse=True) |
|
|||
433 | selInd = numpy.where(indices == blockIndex)[0] |
|
|||
434 | selData = data[selInd,:] |
|
|||
435 |
|
||||
436 | return selData |
|
|||
437 |
|
||||
438 | def getData(self): |
|
|||
439 |
|
||||
440 | if self.blockIndex==self.blocksPerFile: |
|
|||
441 | if not( self.__setNextFileOffline() ): |
|
|||
442 | self.dataOut.flagNoData = True |
|
|||
443 | return 0 |
|
|||
444 |
|
||||
445 | self.__setDataOut() |
|
|||
446 | self.dataOut.flagNoData = False |
|
|||
447 |
|
||||
448 | self.blockIndex += 1 |
|
|||
449 |
|
||||
450 | return |
|
|||
451 |
|
||||
452 | def run(self, **kwargs): |
|
|||
453 |
|
||||
454 | if not(self.isConfig): |
|
|||
455 | self.setup(**kwargs) |
|
|||
456 | self.isConfig = True |
|
|||
457 |
|
||||
458 | self.getData() |
|
|||
459 |
|
||||
460 | return |
|
|||
461 |
|
||||
462 | @MPDecorator |
|
|||
463 | class ParamWriter(Operation): |
|
|||
464 | ''' |
|
|||
465 | HDF5 Writer, stores parameters data in HDF5 format files |
|
|||
466 |
|
||||
467 | path: path where the files will be stored |
|
|||
468 | blocksPerFile: number of blocks that will be saved in per HDF5 format file |
|
|||
469 | mode: selects the data stacking mode: '0' channels, '1' parameters, '3' table (for meteors) |
|
|||
470 | metadataList: list of attributes that will be stored as metadata |
|
|||
471 | dataList: list of attributes that will be stores as data |
|
|||
472 | ''' |
|
|||
473 |
|
||||
474 | ext = ".hdf5" |
|
|||
475 | optchar = "D" |
|
|||
476 | metaoptchar = "M" |
|
|||
477 | metaFile = None |
|
|||
478 | filename = None |
|
|||
479 | path = None |
|
|||
480 | setFile = None |
|
|||
481 | fp = None |
|
|||
482 | grp = None |
|
|||
483 | ds = None |
|
|||
484 | firsttime = True |
|
|||
485 | #Configurations |
|
|||
486 | blocksPerFile = None |
|
|||
487 | blockIndex = None |
|
|||
488 | dataOut = None |
|
|||
489 | #Data Arrays |
|
|||
490 | dataList = None |
|
|||
491 | metadataList = None |
|
|||
492 | dsList = None #List of dictionaries with dataset properties |
|
|||
493 | tableDim = None |
|
|||
494 | dtype = [('arrayName', 'S20'),('nDimensions', 'i'), ('dim2', 'i'), ('dim1', 'i'),('dim0', 'i'),('mode', 'b')] |
|
|||
495 | currentDay = None |
|
|||
496 | lastTime = None |
|
|||
497 | setType = None |
|
|||
498 |
|
||||
499 | def __init__(self): |
|
|||
500 |
|
||||
501 | Operation.__init__(self) |
|
|||
502 | return |
|
|||
503 |
|
||||
504 | def setup(self, dataOut, path=None, blocksPerFile=10, metadataList=None, dataList=None, mode=None, setType=None): |
|
|||
505 | self.path = path |
|
|||
506 | self.blocksPerFile = blocksPerFile |
|
|||
507 | self.metadataList = metadataList |
|
|||
508 | self.dataList = dataList |
|
|||
509 | self.dataOut = dataOut |
|
|||
510 | self.mode = mode |
|
|||
511 | if self.mode is not None: |
|
|||
512 | self.mode = numpy.zeros(len(self.dataList)) + mode |
|
|||
513 | else: |
|
|||
514 | self.mode = numpy.ones(len(self.dataList)) |
|
|||
515 |
|
||||
516 | self.setType = setType |
|
|||
517 |
|
||||
518 | arrayDim = numpy.zeros((len(self.dataList),5)) |
|
|||
519 |
|
||||
520 | #Table dimensions |
|
|||
521 | dtype0 = self.dtype |
|
|||
522 | tableList = [] |
|
|||
523 |
|
||||
524 | #Dictionary and list of tables |
|
|||
525 | dsList = [] |
|
|||
526 |
|
||||
527 | for i in range(len(self.dataList)): |
|
|||
528 | dsDict = {} |
|
|||
529 | dataAux = getattr(self.dataOut, self.dataList[i]) |
|
|||
530 | dsDict['variable'] = self.dataList[i] |
|
|||
531 | #--------------------- Conditionals ------------------------ |
|
|||
532 | #There is no data |
|
|||
533 |
|
||||
534 | if dataAux is None: |
|
|||
535 |
|
||||
536 | return 0 |
|
|||
537 |
|
||||
538 | if isinstance(dataAux, (int, float, numpy.integer, numpy.float)): |
|
|||
539 | dsDict['mode'] = 0 |
|
|||
540 | dsDict['nDim'] = 0 |
|
|||
541 | arrayDim[i,0] = 0 |
|
|||
542 | dsList.append(dsDict) |
|
|||
543 |
|
||||
544 | #Mode 2: meteors |
|
|||
545 | elif self.mode[i] == 2: |
|
|||
546 | dsDict['dsName'] = 'table0' |
|
|||
547 | dsDict['mode'] = 2 # Mode meteors |
|
|||
548 | dsDict['shape'] = dataAux.shape[-1] |
|
|||
549 | dsDict['nDim'] = 0 |
|
|||
550 | dsDict['dsNumber'] = 1 |
|
|||
551 | arrayDim[i,3] = dataAux.shape[-1] |
|
|||
552 | arrayDim[i,4] = self.mode[i] #Mode the data was stored |
|
|||
553 | dsList.append(dsDict) |
|
|||
554 |
|
||||
555 | #Mode 1 |
|
|||
556 | else: |
|
|||
557 | arrayDim0 = dataAux.shape #Data dimensions |
|
|||
558 | arrayDim[i,0] = len(arrayDim0) #Number of array dimensions |
|
|||
559 | arrayDim[i,4] = self.mode[i] #Mode the data was stored |
|
|||
560 | strtable = 'table' |
|
|||
561 | dsDict['mode'] = 1 # Mode parameters |
|
|||
562 |
|
||||
563 | # Three-dimension arrays |
|
|||
564 | if len(arrayDim0) == 3: |
|
|||
565 | arrayDim[i,1:-1] = numpy.array(arrayDim0) |
|
|||
566 | nTables = int(arrayDim[i,2]) |
|
|||
567 | dsDict['dsNumber'] = nTables |
|
|||
568 | dsDict['shape'] = arrayDim[i,2:4] |
|
|||
569 | dsDict['nDim'] = 3 |
|
|||
570 |
|
||||
571 | for j in range(nTables): |
|
|||
572 | dsDict = dsDict.copy() |
|
|||
573 | dsDict['dsName'] = strtable + str(j) |
|
|||
574 | dsList.append(dsDict) |
|
|||
575 |
|
||||
576 | # Two-dimension arrays |
|
|||
577 | elif len(arrayDim0) == 2: |
|
|||
578 | arrayDim[i,2:-1] = numpy.array(arrayDim0) |
|
|||
579 | nTables = int(arrayDim[i,2]) |
|
|||
580 | dsDict['dsNumber'] = nTables |
|
|||
581 | dsDict['shape'] = arrayDim[i,3] |
|
|||
582 | dsDict['nDim'] = 2 |
|
|||
583 |
|
||||
584 | for j in range(nTables): |
|
|||
585 | dsDict = dsDict.copy() |
|
|||
586 | dsDict['dsName'] = strtable + str(j) |
|
|||
587 | dsList.append(dsDict) |
|
|||
588 |
|
||||
589 | # One-dimension arrays |
|
|||
590 | elif len(arrayDim0) == 1: |
|
|||
591 | arrayDim[i,3] = arrayDim0[0] |
|
|||
592 | dsDict['shape'] = arrayDim0[0] |
|
|||
593 | dsDict['dsNumber'] = 1 |
|
|||
594 | dsDict['dsName'] = strtable + str(0) |
|
|||
595 | dsDict['nDim'] = 1 |
|
|||
596 | dsList.append(dsDict) |
|
|||
597 |
|
||||
598 | table = numpy.array((self.dataList[i],) + tuple(arrayDim[i,:]),dtype = dtype0) |
|
|||
599 | tableList.append(table) |
|
|||
600 |
|
||||
601 | self.dsList = dsList |
|
|||
602 | self.tableDim = numpy.array(tableList, dtype = dtype0) |
|
|||
603 | self.blockIndex = 0 |
|
|||
604 | timeTuple = time.localtime(dataOut.utctime) |
|
|||
605 | self.currentDay = timeTuple.tm_yday |
|
|||
606 |
|
||||
607 | def putMetadata(self): |
|
|||
608 |
|
||||
609 | fp = self.createMetadataFile() |
|
|||
610 | self.writeMetadata(fp) |
|
|||
611 | fp.close() |
|
|||
612 | return |
|
|||
613 |
|
||||
614 | def createMetadataFile(self): |
|
|||
615 | ext = self.ext |
|
|||
616 | path = self.path |
|
|||
617 | setFile = self.setFile |
|
|||
618 |
|
||||
619 | timeTuple = time.localtime(self.dataOut.utctime) |
|
|||
620 |
|
||||
621 | subfolder = '' |
|
|||
622 | fullpath = os.path.join( path, subfolder ) |
|
|||
623 |
|
||||
624 | if not( os.path.exists(fullpath) ): |
|
|||
625 | os.mkdir(fullpath) |
|
|||
626 | setFile = -1 #inicializo mi contador de seteo |
|
|||
627 |
|
||||
628 | subfolder = 'd%4.4d%3.3d' % (timeTuple.tm_year,timeTuple.tm_yday) |
|
|||
629 | fullpath = os.path.join( path, subfolder ) |
|
|||
630 |
|
||||
631 | if not( os.path.exists(fullpath) ): |
|
|||
632 | os.mkdir(fullpath) |
|
|||
633 | setFile = -1 #inicializo mi contador de seteo |
|
|||
634 |
|
||||
635 | else: |
|
|||
636 | filesList = os.listdir( fullpath ) |
|
|||
637 | filesList = sorted( filesList, key=str.lower ) |
|
|||
638 | if len( filesList ) > 0: |
|
|||
639 | filesList = [k for k in filesList if k.startswith(self.metaoptchar)] |
|
|||
640 | filen = filesList[-1] |
|
|||
641 | # el filename debera tener el siguiente formato |
|
|||
642 | # 0 1234 567 89A BCDE (hex) |
|
|||
643 | # x YYYY DDD SSS .ext |
|
|||
644 | if isNumber( filen[8:11] ): |
|
|||
645 | setFile = int( filen[8:11] ) #inicializo mi contador de seteo al seteo del ultimo file |
|
|||
646 | else: |
|
|||
647 | setFile = -1 |
|
|||
648 | else: |
|
|||
649 | setFile = -1 #inicializo mi contador de seteo |
|
|||
650 |
|
||||
651 | if self.setType is None: |
|
|||
652 | setFile += 1 |
|
|||
653 | file = '%s%4.4d%3.3d%03d%s' % (self.metaoptchar, |
|
|||
654 | timeTuple.tm_year, |
|
|||
655 | timeTuple.tm_yday, |
|
|||
656 | setFile, |
|
|||
657 | ext ) |
|
|||
658 | else: |
|
|||
659 | setFile = timeTuple.tm_hour*60+timeTuple.tm_min |
|
|||
660 | file = '%s%4.4d%3.3d%04d%s' % (self.metaoptchar, |
|
|||
661 | timeTuple.tm_year, |
|
|||
662 | timeTuple.tm_yday, |
|
|||
663 | setFile, |
|
|||
664 | ext ) |
|
|||
665 |
|
||||
666 | filename = os.path.join( path, subfolder, file ) |
|
|||
667 | self.metaFile = file |
|
|||
668 | #Setting HDF5 File |
|
|||
669 | fp = h5py.File(filename,'w') |
|
|||
670 |
|
||||
671 | return fp |
|
|||
672 |
|
||||
673 | def writeMetadata(self, fp): |
|
|||
674 |
|
||||
675 | grp = fp.create_group("Metadata") |
|
|||
676 | grp.create_dataset('array dimensions', data = self.tableDim, dtype = self.dtype) |
|
|||
677 |
|
||||
678 | for i in range(len(self.metadataList)): |
|
|||
679 | grp.create_dataset(self.metadataList[i], data=getattr(self.dataOut, self.metadataList[i])) |
|
|||
680 | return |
|
|||
681 |
|
||||
682 | def timeFlag(self): |
|
|||
683 | currentTime = self.dataOut.utctime |
|
|||
684 |
|
||||
685 | if self.lastTime is None: |
|
|||
686 | self.lastTime = currentTime |
|
|||
687 |
|
||||
688 | #Day |
|
|||
689 | timeTuple = time.localtime(currentTime) |
|
|||
690 | dataDay = timeTuple.tm_yday |
|
|||
691 |
|
||||
692 | #Time |
|
|||
693 | timeDiff = currentTime - self.lastTime |
|
|||
694 |
|
||||
695 | #Si el dia es diferente o si la diferencia entre un dato y otro supera la hora |
|
|||
696 | if dataDay != self.currentDay: |
|
|||
697 | self.currentDay = dataDay |
|
|||
698 | return True |
|
|||
699 | elif timeDiff > 3*60*60: |
|
|||
700 | self.lastTime = currentTime |
|
|||
701 | return True |
|
|||
702 | else: |
|
|||
703 | self.lastTime = currentTime |
|
|||
704 | return False |
|
|||
705 |
|
||||
706 | def setNextFile(self): |
|
|||
707 |
|
||||
708 | ext = self.ext |
|
|||
709 | path = self.path |
|
|||
710 | setFile = self.setFile |
|
|||
711 | mode = self.mode |
|
|||
712 |
|
||||
713 | timeTuple = time.localtime(self.dataOut.utctime) |
|
|||
714 | subfolder = 'd%4.4d%3.3d' % (timeTuple.tm_year,timeTuple.tm_yday) |
|
|||
715 |
|
||||
716 | fullpath = os.path.join( path, subfolder ) |
|
|||
717 |
|
||||
718 | if os.path.exists(fullpath): |
|
|||
719 | filesList = os.listdir( fullpath ) |
|
|||
720 | filesList = [k for k in filesList if 'M' in k] |
|
|||
721 | if len( filesList ) > 0: |
|
|||
722 | filesList = sorted( filesList, key=str.lower ) |
|
|||
723 | filen = filesList[-1] |
|
|||
724 | # el filename debera tener el siguiente formato |
|
|||
725 | # 0 1234 567 89A BCDE (hex) |
|
|||
726 | # x YYYY DDD SSS .ext |
|
|||
727 | if isNumber( filen[8:11] ): |
|
|||
728 | setFile = int( filen[8:11] ) #inicializo mi contador de seteo al seteo del ultimo file |
|
|||
729 | else: |
|
|||
730 | setFile = -1 |
|
|||
731 | else: |
|
|||
732 | setFile = -1 #inicializo mi contador de seteo |
|
|||
733 | else: |
|
|||
734 | os.makedirs(fullpath) |
|
|||
735 | setFile = -1 #inicializo mi contador de seteo |
|
|||
736 |
|
||||
737 | if self.setType is None: |
|
|||
738 | setFile += 1 |
|
|||
739 | file = '%s%4.4d%3.3d%03d%s' % (self.optchar, |
|
|||
740 | timeTuple.tm_year, |
|
|||
741 | timeTuple.tm_yday, |
|
|||
742 | setFile, |
|
|||
743 | ext ) |
|
|||
744 | else: |
|
|||
745 | setFile = timeTuple.tm_hour*60+timeTuple.tm_min |
|
|||
746 | file = '%s%4.4d%3.3d%04d%s' % (self.optchar, |
|
|||
747 | timeTuple.tm_year, |
|
|||
748 | timeTuple.tm_yday, |
|
|||
749 | setFile, |
|
|||
750 | ext ) |
|
|||
751 |
|
||||
752 | filename = os.path.join( path, subfolder, file ) |
|
|||
753 |
|
||||
754 | #Setting HDF5 File |
|
|||
755 | fp = h5py.File(filename,'w') |
|
|||
756 | #write metadata |
|
|||
757 | self.writeMetadata(fp) |
|
|||
758 | #Write data |
|
|||
759 | grp = fp.create_group("Data") |
|
|||
760 | ds = [] |
|
|||
761 | data = [] |
|
|||
762 | dsList = self.dsList |
|
|||
763 | i = 0 |
|
|||
764 | while i < len(dsList): |
|
|||
765 | dsInfo = dsList[i] |
|
|||
766 | #One-dimension data |
|
|||
767 | if dsInfo['mode'] == 0: |
|
|||
768 | ds0 = grp.create_dataset(dsInfo['variable'], (1,1), maxshape=(1,self.blocksPerFile) , chunks = True, dtype=numpy.float64) |
|
|||
769 | ds.append(ds0) |
|
|||
770 | data.append([]) |
|
|||
771 | i += 1 |
|
|||
772 | continue |
|
|||
773 |
|
||||
774 | elif dsInfo['mode'] == 2: |
|
|||
775 | grp0 = grp.create_group(dsInfo['variable']) |
|
|||
776 | ds0 = grp0.create_dataset(dsInfo['dsName'], (1,dsInfo['shape']), data = numpy.zeros((1,dsInfo['shape'])) , maxshape=(None,dsInfo['shape']), chunks=True) |
|
|||
777 | ds.append(ds0) |
|
|||
778 | data.append([]) |
|
|||
779 | i += 1 |
|
|||
780 | continue |
|
|||
781 |
|
||||
782 | elif dsInfo['mode'] == 1: |
|
|||
783 | grp0 = grp.create_group(dsInfo['variable']) |
|
|||
784 |
|
||||
785 | for j in range(dsInfo['dsNumber']): |
|
|||
786 | dsInfo = dsList[i] |
|
|||
787 | tableName = dsInfo['dsName'] |
|
|||
788 |
|
||||
789 |
|
||||
790 | if dsInfo['nDim'] == 3: |
|
|||
791 | shape = dsInfo['shape'].astype(int) |
|
|||
792 | ds0 = grp0.create_dataset(tableName, (shape[0],shape[1],1) , data = numpy.zeros((shape[0],shape[1],1)), maxshape = (None,shape[1],None), chunks=True) |
|
|||
793 | else: |
|
|||
794 | shape = int(dsInfo['shape']) |
|
|||
795 | ds0 = grp0.create_dataset(tableName, (1,shape), data = numpy.zeros((1,shape)) , maxshape=(None,shape), chunks=True) |
|
|||
796 |
|
||||
797 | ds.append(ds0) |
|
|||
798 | data.append([]) |
|
|||
799 | i += 1 |
|
|||
800 |
|
||||
801 | fp.flush() |
|
|||
802 | fp.close() |
|
|||
803 |
|
||||
804 | log.log('creating file: {}'.format(filename), 'Writing') |
|
|||
805 | self.filename = filename |
|
|||
806 | self.ds = ds |
|
|||
807 | self.data = data |
|
|||
808 | self.firsttime = True |
|
|||
809 | self.blockIndex = 0 |
|
|||
810 | return |
|
|||
811 |
|
||||
812 | def putData(self): |
|
|||
813 |
|
||||
814 | if self.blockIndex == self.blocksPerFile or self.timeFlag(): |
|
|||
815 | self.setNextFile() |
|
|||
816 |
|
||||
817 | self.readBlock() |
|
|||
818 | self.setBlock() #Prepare data to be written |
|
|||
819 | self.writeBlock() #Write data |
|
|||
820 |
|
||||
821 | return |
|
|||
822 |
|
||||
823 | def readBlock(self): |
|
|||
824 |
|
||||
825 | ''' |
|
|||
826 | data Array configured |
|
|||
827 |
|
||||
828 |
|
||||
829 | self.data |
|
|||
830 | ''' |
|
|||
831 | dsList = self.dsList |
|
|||
832 | ds = self.ds |
|
|||
833 | #Setting HDF5 File |
|
|||
834 | fp = h5py.File(self.filename,'r+') |
|
|||
835 | grp = fp["Data"] |
|
|||
836 | ind = 0 |
|
|||
837 |
|
||||
838 | while ind < len(dsList): |
|
|||
839 | dsInfo = dsList[ind] |
|
|||
840 |
|
||||
841 | if dsInfo['mode'] == 0: |
|
|||
842 | ds0 = grp[dsInfo['variable']] |
|
|||
843 | ds[ind] = ds0 |
|
|||
844 | ind += 1 |
|
|||
845 | else: |
|
|||
846 |
|
||||
847 | grp0 = grp[dsInfo['variable']] |
|
|||
848 |
|
||||
849 | for j in range(dsInfo['dsNumber']): |
|
|||
850 | dsInfo = dsList[ind] |
|
|||
851 | ds0 = grp0[dsInfo['dsName']] |
|
|||
852 | ds[ind] = ds0 |
|
|||
853 | ind += 1 |
|
|||
854 |
|
||||
855 | self.fp = fp |
|
|||
856 | self.grp = grp |
|
|||
857 | self.ds = ds |
|
|||
858 |
|
||||
859 | return |
|
|||
860 |
|
||||
861 | def setBlock(self): |
|
|||
862 | ''' |
|
|||
863 | data Array configured |
|
|||
864 |
|
||||
865 |
|
||||
866 | self.data |
|
|||
867 | ''' |
|
|||
868 | #Creating Arrays |
|
|||
869 | dsList = self.dsList |
|
|||
870 | data = self.data |
|
|||
871 | ind = 0 |
|
|||
872 |
|
||||
873 | while ind < len(dsList): |
|
|||
874 | dsInfo = dsList[ind] |
|
|||
875 | dataAux = getattr(self.dataOut, dsInfo['variable']) |
|
|||
876 |
|
||||
877 | mode = dsInfo['mode'] |
|
|||
878 | nDim = dsInfo['nDim'] |
|
|||
879 |
|
||||
880 | if mode == 0 or mode == 2 or nDim == 1: |
|
|||
881 | data[ind] = dataAux |
|
|||
882 | ind += 1 |
|
|||
883 | # elif nDim == 1: |
|
|||
884 | # data[ind] = numpy.reshape(dataAux,(numpy.size(dataAux),1)) |
|
|||
885 | # ind += 1 |
|
|||
886 | elif nDim == 2: |
|
|||
887 | for j in range(dsInfo['dsNumber']): |
|
|||
888 | data[ind] = dataAux[j,:] |
|
|||
889 | ind += 1 |
|
|||
890 | elif nDim == 3: |
|
|||
891 | for j in range(dsInfo['dsNumber']): |
|
|||
892 | data[ind] = dataAux[:,j,:] |
|
|||
893 | ind += 1 |
|
|||
894 |
|
||||
895 | self.data = data |
|
|||
896 | return |
|
|||
897 |
|
||||
898 | def writeBlock(self): |
|
|||
899 | ''' |
|
|||
900 | Saves the block in the HDF5 file |
|
|||
901 | ''' |
|
|||
902 | dsList = self.dsList |
|
|||
903 |
|
||||
904 | for i in range(len(self.ds)): |
|
|||
905 | dsInfo = dsList[i] |
|
|||
906 | nDim = dsInfo['nDim'] |
|
|||
907 | mode = dsInfo['mode'] |
|
|||
908 |
|
||||
909 | # First time |
|
|||
910 | if self.firsttime: |
|
|||
911 | if type(self.data[i]) == numpy.ndarray: |
|
|||
912 |
|
||||
913 | if nDim == 3: |
|
|||
914 | self.data[i] = self.data[i].reshape((self.data[i].shape[0],self.data[i].shape[1],1)) |
|
|||
915 | self.ds[i].resize(self.data[i].shape) |
|
|||
916 | if mode == 2: |
|
|||
917 | self.ds[i].resize(self.data[i].shape) |
|
|||
918 | self.ds[i][:] = self.data[i] |
|
|||
919 | else: |
|
|||
920 |
|
||||
921 | # From second time |
|
|||
922 | # Meteors! |
|
|||
923 | if mode == 2: |
|
|||
924 | dataShape = self.data[i].shape |
|
|||
925 | dsShape = self.ds[i].shape |
|
|||
926 | self.ds[i].resize((self.ds[i].shape[0] + dataShape[0],self.ds[i].shape[1])) |
|
|||
927 | self.ds[i][dsShape[0]:,:] = self.data[i] |
|
|||
928 | # No dimension |
|
|||
929 | elif mode == 0: |
|
|||
930 | self.ds[i].resize((self.ds[i].shape[0], self.ds[i].shape[1] + 1)) |
|
|||
931 | self.ds[i][0,-1] = self.data[i] |
|
|||
932 | # One dimension |
|
|||
933 | elif nDim == 1: |
|
|||
934 | self.ds[i].resize((self.ds[i].shape[0] + 1, self.ds[i].shape[1])) |
|
|||
935 | self.ds[i][-1,:] = self.data[i] |
|
|||
936 | # Two dimension |
|
|||
937 | elif nDim == 2: |
|
|||
938 | self.ds[i].resize((self.ds[i].shape[0] + 1,self.ds[i].shape[1])) |
|
|||
939 | self.ds[i][self.blockIndex,:] = self.data[i] |
|
|||
940 | # Three dimensions |
|
|||
941 | elif nDim == 3: |
|
|||
942 | self.ds[i].resize((self.ds[i].shape[0],self.ds[i].shape[1],self.ds[i].shape[2]+1)) |
|
|||
943 | self.ds[i][:,:,-1] = self.data[i] |
|
|||
944 |
|
||||
945 | self.firsttime = False |
|
|||
946 | self.blockIndex += 1 |
|
|||
947 |
|
||||
948 | #Close to save changes |
|
|||
949 | self.fp.flush() |
|
|||
950 | self.fp.close() |
|
|||
951 | return |
|
|||
952 |
|
||||
953 | def run(self, dataOut, path, blocksPerFile=10, metadataList=None, dataList=None, mode=None, setType=None): |
|
|||
954 |
|
||||
955 | self.dataOut = dataOut |
|
|||
956 | if not(self.isConfig): |
|
|||
957 | self.setup(dataOut, path=path, blocksPerFile=blocksPerFile, |
|
|||
958 | metadataList=metadataList, dataList=dataList, mode=mode, |
|
|||
959 | setType=setType) |
|
|||
960 |
|
||||
961 | self.isConfig = True |
|
|||
962 | self.setNextFile() |
|
|||
963 |
|
||||
964 | self.putData() |
|
|||
965 | return |
|
|||
966 |
|
||||
967 |
|
|
78 | ||
|
79 | """ | |||
968 |
|
80 | |||
969 | class ParameterReader(Reader, ProcessingUnit): |
|
81 | __attrs__ = ['path', 'startDate', 'endDate', 'startTime', 'endTime', 'description', 'extras'] | |
970 | ''' |
|
|||
971 | Reads HDF5 format files |
|
|||
972 | ''' |
|
|||
973 |
|
82 | |||
974 | def __init__(self): |
|
83 | def __init__(self): | |
975 | ProcessingUnit.__init__(self) |
|
84 | ProcessingUnit.__init__(self) | |
976 | self.dataOut = Parameters() |
|
85 | self.dataOut = Parameters() | |
977 | self.ext = ".hdf5" |
|
86 | self.ext = ".hdf5" | |
978 | self.optchar = "D" |
|
87 | self.optchar = "D" | |
979 |
self. |
|
88 | self.meta = {} | |
980 |
self. |
|
89 | self.data = {} | |
981 | self.listMeta = [] |
|
|||
982 | self.listDataname = [] |
|
|||
983 | self.listData = [] |
|
|||
984 | self.listShapes = [] |
|
|||
985 | self.open_file = h5py.File |
|
90 | self.open_file = h5py.File | |
986 | self.open_mode = 'r' |
|
91 | self.open_mode = 'r' | |
987 |
self. |
|
92 | self.description = {} | |
|
93 | self.extras = {} | |||
988 | self.filefmt = "*%Y%j***" |
|
94 | self.filefmt = "*%Y%j***" | |
989 | self.folderfmt = "*%Y%j" |
|
95 | self.folderfmt = "*%Y%j" | |
990 |
|
96 | |||
@@ -1001,7 +107,6 class ParameterReader(Reader, ProcessingUnit): | |||||
1001 | fullpath = self.searchFilesOnLine(self.path, self.startDate, |
|
107 | fullpath = self.searchFilesOnLine(self.path, self.startDate, | |
1002 | self.endDate, self.expLabel, self.ext, self.walk, |
|
108 | self.endDate, self.expLabel, self.ext, self.walk, | |
1003 | self.filefmt, self.folderfmt) |
|
109 | self.filefmt, self.folderfmt) | |
1004 |
|
||||
1005 | try: |
|
110 | try: | |
1006 | fullpath = next(fullpath) |
|
111 | fullpath = next(fullpath) | |
1007 | except: |
|
112 | except: | |
@@ -1039,6 +144,13 class ParameterReader(Reader, ProcessingUnit): | |||||
1039 | self.__readMetadata() |
|
144 | self.__readMetadata() | |
1040 | self.__readData() |
|
145 | self.__readData() | |
1041 | self.__setBlockList() |
|
146 | self.__setBlockList() | |
|
147 | ||||
|
148 | if 'type' in self.meta: | |||
|
149 | self.dataOut = eval(self.meta['type'])() | |||
|
150 | ||||
|
151 | for attr in self.meta: | |||
|
152 | setattr(self.dataOut, attr, self.meta[attr]) | |||
|
153 | ||||
1042 | self.blockIndex = 0 |
|
154 | self.blockIndex = 0 | |
1043 |
|
155 | |||
1044 | return |
|
156 | return | |
@@ -1058,20 +170,16 class ParameterReader(Reader, ProcessingUnit): | |||||
1058 | startTime = self.startTime |
|
170 | startTime = self.startTime | |
1059 | endTime = self.endTime |
|
171 | endTime = self.endTime | |
1060 |
|
172 | |||
1061 |
|
|
173 | thisUtcTime = self.data['utctime'] | |
1062 | thisUtcTime = self.listData[index] |
|
|||
1063 | self.interval = numpy.min(thisUtcTime[1:] - thisUtcTime[:-1]) |
|
174 | self.interval = numpy.min(thisUtcTime[1:] - thisUtcTime[:-1]) | |
1064 |
|
175 | |||
1065 | if self.timezone == 'lt': |
|
176 | thisDatetime = datetime.datetime.utcfromtimestamp(thisUtcTime[0]) | |
1066 | thisUtcTime -= 5*3600 |
|
|||
1067 |
|
||||
1068 | thisDatetime = datetime.datetime.fromtimestamp(thisUtcTime[0] + 5*3600) |
|
|||
1069 |
|
177 | |||
1070 | thisDate = thisDatetime.date() |
|
178 | thisDate = thisDatetime.date() | |
1071 | thisTime = thisDatetime.time() |
|
179 | thisTime = thisDatetime.time() | |
1072 |
|
180 | |||
1073 | startUtcTime = (datetime.datetime.combine(thisDate,startTime) - datetime.datetime(1970, 1, 1)).total_seconds() |
|
181 | startUtcTime = (datetime.datetime.combine(thisDate, startTime) - datetime.datetime(1970, 1, 1)).total_seconds() | |
1074 | endUtcTime = (datetime.datetime.combine(thisDate,endTime) - datetime.datetime(1970, 1, 1)).total_seconds() |
|
182 | endUtcTime = (datetime.datetime.combine(thisDate, endTime) - datetime.datetime(1970, 1, 1)).total_seconds() | |
1075 |
|
183 | |||
1076 | ind = numpy.where(numpy.logical_and(thisUtcTime >= startUtcTime, thisUtcTime < endUtcTime))[0] |
|
184 | ind = numpy.where(numpy.logical_and(thisUtcTime >= startUtcTime, thisUtcTime < endUtcTime))[0] | |
1077 |
|
185 | |||
@@ -1084,99 +192,80 class ParameterReader(Reader, ProcessingUnit): | |||||
1084 | Reads Metadata |
|
192 | Reads Metadata | |
1085 | ''' |
|
193 | ''' | |
1086 |
|
194 | |||
1087 |
|
|
195 | meta = {} | |
1088 | listMetadata = [] |
|
196 | ||
1089 | if 'Metadata' in self.fp: |
|
197 | if self.description: | |
1090 |
|
|
198 | for key, value in self.description['Metadata'].items(): | |
1091 | for item in list(gp.items()): |
|
199 | meta[key] = self.fp[value].value | |
1092 | name = item[0] |
|
|||
1093 |
|
||||
1094 | if name=='variables': |
|
|||
1095 | table = gp[name][:] |
|
|||
1096 | listShapes = {} |
|
|||
1097 | for shapes in table: |
|
|||
1098 | listShapes[shapes[0].decode()] = numpy.array([shapes[1]]) |
|
|||
1099 | else: |
|
|||
1100 | data = gp[name].value |
|
|||
1101 | listMetaname.append(name) |
|
|||
1102 | listMetadata.append(data) |
|
|||
1103 | elif self.metadata: |
|
|||
1104 | metadata = json.loads(self.metadata) |
|
|||
1105 | listShapes = {} |
|
|||
1106 | for tup in metadata: |
|
|||
1107 | name, values, dim = tup |
|
|||
1108 | if dim == -1: |
|
|||
1109 | listMetaname.append(name) |
|
|||
1110 | listMetadata.append(self.fp[values].value) |
|
|||
1111 | else: |
|
|||
1112 | listShapes[name] = numpy.array([dim]) |
|
|||
1113 | else: |
|
200 | else: | |
1114 | raise IOError('Missing Metadata group in file or metadata info') |
|
201 | grp = self.fp['Metadata'] | |
|
202 | for name in grp: | |||
|
203 | meta[name] = grp[name].value | |||
1115 |
|
204 | |||
1116 | self.listShapes = listShapes |
|
205 | if self.extras: | |
1117 | self.listMetaname = listMetaname |
|
206 | for key, value in self.extras.items(): | |
1118 | self.listMeta = listMetadata |
|
207 | meta[key] = value | |
|
208 | self.meta = meta | |||
1119 |
|
209 | |||
1120 | return |
|
210 | return | |
1121 |
|
211 | |||
1122 | def __readData(self): |
|
212 | def __readData(self): | |
1123 |
|
213 | |||
1124 |
|
|
214 | data = {} | |
1125 | listdata = [] |
|
|||
1126 |
|
215 | |||
1127 |
if |
|
216 | if self.description: | |
|
217 | for key, value in self.description['Data'].items(): | |||
|
218 | if isinstance(value, str): | |||
|
219 | if isinstance(self.fp[value], h5py.Dataset): | |||
|
220 | data[key] = self.fp[value].value | |||
|
221 | elif isinstance(self.fp[value], h5py.Group): | |||
|
222 | array = [] | |||
|
223 | for ch in self.fp[value]: | |||
|
224 | array.append(self.fp[value][ch].value) | |||
|
225 | data[key] = numpy.array(array) | |||
|
226 | elif isinstance(value, list): | |||
|
227 | array = [] | |||
|
228 | for ch in value: | |||
|
229 | array.append(self.fp[ch].value) | |||
|
230 | data[key] = numpy.array(array) | |||
|
231 | else: | |||
1128 | grp = self.fp['Data'] |
|
232 | grp = self.fp['Data'] | |
1129 |
for |
|
233 | for name in grp: | |
1130 | name = item[0] |
|
234 | if isinstance(grp[name], h5py.Dataset): | |
1131 | listdataname.append(name) |
|
|||
1132 | dim = self.listShapes[name][0] |
|
|||
1133 | if dim == 0: |
|
|||
1134 | array = grp[name].value |
|
235 | array = grp[name].value | |
1135 | else: |
|
236 | elif isinstance(grp[name], h5py.Group): | |
1136 | array = [] |
|
237 | array = [] | |
1137 |
for |
|
238 | for ch in grp[name]: | |
1138 |
array.append(grp[name][ |
|
239 | array.append(grp[name][ch].value) | |
1139 | array = numpy.array(array) |
|
240 | array = numpy.array(array) | |
1140 |
|
||||
1141 | listdata.append(array) |
|
|||
1142 | elif self.metadata: |
|
|||
1143 | metadata = json.loads(self.metadata) |
|
|||
1144 | for tup in metadata: |
|
|||
1145 | name, values, dim = tup |
|
|||
1146 | listdataname.append(name) |
|
|||
1147 | if dim == -1: |
|
|||
1148 | continue |
|
|||
1149 | elif dim == 0: |
|
|||
1150 | array = self.fp[values].value |
|
|||
1151 | else: |
|
241 | else: | |
1152 | array = [] |
|
242 | log.warning('Unknown type: {}'.format(name)) | |
1153 | for var in values: |
|
243 | ||
1154 | array.append(self.fp[var].value) |
|
244 | if name in self.description: | |
1155 | array = numpy.array(array) |
|
245 | key = self.description[name] | |
1156 |
|
|
246 | else: | |
1157 | else: |
|
247 | key = name | |
1158 | raise IOError('Missing Data group in file or metadata info') |
|
248 | data[key] = array | |
1159 |
|
249 | |||
1160 | self.listDataname = listdataname |
|
250 | self.data = data | |
1161 | self.listData = listdata |
|
|||
1162 | return |
|
251 | return | |
1163 |
|
252 | |||
1164 | def getData(self): |
|
253 | def getData(self): | |
1165 |
|
254 | |||
1166 |
for |
|
255 | for attr in self.data: | |
1167 | setattr(self.dataOut, self.listMetaname[i], self.listMeta[i]) |
|
256 | if self.data[attr].ndim == 1: | |
1168 |
|
257 | setattr(self.dataOut, attr, self.data[attr][self.blockIndex]) | ||
1169 | for j in range(len(self.listData)): |
|
|||
1170 | dim = self.listShapes[self.listDataname[j]][0] |
|
|||
1171 | if dim == 0: |
|
|||
1172 | setattr(self.dataOut, self.listDataname[j], self.listData[j][self.blockIndex]) |
|
|||
1173 | else: |
|
258 | else: | |
1174 |
setattr(self.dataOut, |
|
259 | setattr(self.dataOut, attr, self.data[attr][:, self.blockIndex]) | |
1175 |
|
260 | |||
1176 | self.dataOut.paramInterval = self.interval |
|
|||
1177 | self.dataOut.flagNoData = False |
|
261 | self.dataOut.flagNoData = False | |
1178 | self.blockIndex += 1 |
|
262 | self.blockIndex += 1 | |
1179 |
|
263 | |||
|
264 | log.log("Block No. {}/{} -> {}".format( | |||
|
265 | self.blockIndex, | |||
|
266 | self.blocksPerFile, | |||
|
267 | self.dataOut.datatime.ctime()), self.name) | |||
|
268 | ||||
1180 | return |
|
269 | return | |
1181 |
|
270 | |||
1182 | def run(self, **kwargs): |
|
271 | def run(self, **kwargs): | |
@@ -1193,28 +282,69 class ParameterReader(Reader, ProcessingUnit): | |||||
1193 | return |
|
282 | return | |
1194 |
|
283 | |||
1195 | @MPDecorator |
|
284 | @MPDecorator | |
1196 |
class |
|
285 | class HDFWriter(Operation): | |
1197 | ''' |
|
286 | """Operation to write HDF5 files. | |
1198 | HDF5 Writer, stores parameters data in HDF5 format files |
|
287 | ||
1199 |
|
288 | The HDF5 file contains by default two groups Data and Metadata where | ||
1200 | path: path where the files will be stored |
|
289 | you can save any `dataOut` attribute specified by `dataList` and `metadataList` | |
1201 | blocksPerFile: number of blocks that will be saved in per HDF5 format file |
|
290 | parameters, data attributes are normaly time dependent where the metadata | |
1202 | mode: selects the data stacking mode: '0' channels, '1' parameters, '3' table (for meteors) |
|
291 | are not. | |
1203 | metadataList: list of attributes that will be stored as metadata |
|
292 | It is possible to customize the structure of the HDF5 file with the | |
1204 | dataList: list of attributes that will be stores as data |
|
293 | optional description parameter see the examples. | |
1205 | ''' |
|
294 | ||
|
295 | Parameters: | |||
|
296 | ----------- | |||
|
297 | path : str | |||
|
298 | Path where files will be saved. | |||
|
299 | blocksPerFile : int | |||
|
300 | Number of blocks per file | |||
|
301 | metadataList : list | |||
|
302 | List of the dataOut attributes that will be saved as metadata | |||
|
303 | dataList : int | |||
|
304 | List of the dataOut attributes that will be saved as data | |||
|
305 | setType : bool | |||
|
306 | If True the name of the files corresponds to the timestamp of the data | |||
|
307 | description : dict, optional | |||
|
308 | Dictionary with the desired description of the HDF5 file | |||
|
309 | ||||
|
310 | Examples | |||
|
311 | -------- | |||
|
312 | ||||
|
313 | desc = { | |||
|
314 | 'data_output': {'winds': ['z', 'w', 'v']}, | |||
|
315 | 'utctime': 'timestamps', | |||
|
316 | 'heightList': 'heights' | |||
|
317 | } | |||
|
318 | desc = { | |||
|
319 | 'data_output': ['z', 'w', 'v'], | |||
|
320 | 'utctime': 'timestamps', | |||
|
321 | 'heightList': 'heights' | |||
|
322 | } | |||
|
323 | desc = { | |||
|
324 | 'Data': { | |||
|
325 | 'data_output': 'winds', | |||
|
326 | 'utctime': 'timestamps' | |||
|
327 | }, | |||
|
328 | 'Metadata': { | |||
|
329 | 'heightList': 'heights' | |||
|
330 | } | |||
|
331 | } | |||
|
332 | ||||
|
333 | writer = proc_unit.addOperation(name='HDFWriter') | |||
|
334 | writer.addParameter(name='path', value='/path/to/file') | |||
|
335 | writer.addParameter(name='blocksPerFile', value='32') | |||
|
336 | writer.addParameter(name='metadataList', value='heightList,timeZone') | |||
|
337 | writer.addParameter(name='dataList',value='data_output,utctime') | |||
|
338 | # writer.addParameter(name='description',value=json.dumps(desc)) | |||
1206 |
|
|
339 | ||
|
340 | """ | |||
1207 |
|
341 | |||
1208 | ext = ".hdf5" |
|
342 | ext = ".hdf5" | |
1209 | optchar = "D" |
|
343 | optchar = "D" | |
1210 | metaoptchar = "M" |
|
|||
1211 | metaFile = None |
|
|||
1212 | filename = None |
|
344 | filename = None | |
1213 | path = None |
|
345 | path = None | |
1214 | setFile = None |
|
346 | setFile = None | |
1215 | fp = None |
|
347 | fp = None | |
1216 | grp = None |
|
|||
1217 | ds = None |
|
|||
1218 | firsttime = True |
|
348 | firsttime = True | |
1219 | #Configurations |
|
349 | #Configurations | |
1220 | blocksPerFile = None |
|
350 | blocksPerFile = None | |
@@ -1223,9 +353,6 class ParameterWriter(Operation): | |||||
1223 | #Data Arrays |
|
353 | #Data Arrays | |
1224 | dataList = None |
|
354 | dataList = None | |
1225 | metadataList = None |
|
355 | metadataList = None | |
1226 | dsList = None #List of dictionaries with dataset properties |
|
|||
1227 | tableDim = None |
|
|||
1228 | dtype = [('name', 'S20'),('nDim', 'i')] |
|
|||
1229 | currentDay = None |
|
356 | currentDay = None | |
1230 | lastTime = None |
|
357 | lastTime = None | |
1231 |
|
358 | |||
@@ -1234,12 +361,17 class ParameterWriter(Operation): | |||||
1234 | Operation.__init__(self) |
|
361 | Operation.__init__(self) | |
1235 | return |
|
362 | return | |
1236 |
|
363 | |||
1237 | def setup(self, path=None, blocksPerFile=10, metadataList=None, dataList=None, setType=None): |
|
364 | def setup(self, path=None, blocksPerFile=10, metadataList=None, dataList=None, setType=None, description=None): | |
1238 | self.path = path |
|
365 | self.path = path | |
1239 | self.blocksPerFile = blocksPerFile |
|
366 | self.blocksPerFile = blocksPerFile | |
1240 | self.metadataList = metadataList |
|
367 | self.metadataList = metadataList | |
1241 | self.dataList = dataList |
|
368 | self.dataList = dataList | |
1242 | self.setType = setType |
|
369 | self.setType = setType | |
|
370 | self.description = description | |||
|
371 | ||||
|
372 | for s in ['type', 'timeZone', 'useLocalTime']: | |||
|
373 | if s not in self.metadataList: | |||
|
374 | self.metadataList.append(s) | |||
1243 |
|
375 | |||
1244 | tableList = [] |
|
376 | tableList = [] | |
1245 | dsList = [] |
|
377 | dsList = [] | |
@@ -1257,12 +389,11 class ParameterWriter(Operation): | |||||
1257 | dsDict['nDim'] = len(dataAux.shape) |
|
389 | dsDict['nDim'] = len(dataAux.shape) | |
1258 | dsDict['shape'] = dataAux.shape |
|
390 | dsDict['shape'] = dataAux.shape | |
1259 | dsDict['dsNumber'] = dataAux.shape[0] |
|
391 | dsDict['dsNumber'] = dataAux.shape[0] | |
|
392 | dsDict['dtype'] = dataAux.dtype | |||
1260 |
|
393 | |||
1261 | dsList.append(dsDict) |
|
394 | dsList.append(dsDict) | |
1262 | tableList.append((self.dataList[i], dsDict['nDim'])) |
|
|||
1263 |
|
395 | |||
1264 | self.dsList = dsList |
|
396 | self.dsList = dsList | |
1265 | self.tableDim = numpy.array(tableList, dtype=self.dtype) |
|
|||
1266 | self.currentDay = self.dataOut.datatime.date() |
|
397 | self.currentDay = self.dataOut.datatime.date() | |
1267 |
|
398 | |||
1268 | def timeFlag(self): |
|
399 | def timeFlag(self): | |
@@ -1288,13 +419,14 class ParameterWriter(Operation): | |||||
1288 | self.lastTime = currentTime |
|
419 | self.lastTime = currentTime | |
1289 | return False |
|
420 | return False | |
1290 |
|
421 | |||
1291 |
def run(self, dataOut, path, blocksPerFile=10, metadataList= |
|
422 | def run(self, dataOut, path, blocksPerFile=10, metadataList=[], | |
|
423 | dataList=[], setType=None, description={}): | |||
1292 |
|
424 | |||
1293 | self.dataOut = dataOut |
|
425 | self.dataOut = dataOut | |
1294 | if not(self.isConfig): |
|
426 | if not(self.isConfig): | |
1295 | self.setup(path=path, blocksPerFile=blocksPerFile, |
|
427 | self.setup(path=path, blocksPerFile=blocksPerFile, | |
1296 | metadataList=metadataList, dataList=dataList, |
|
428 | metadataList=metadataList, dataList=dataList, | |
1297 | setType=setType) |
|
429 | setType=setType, description=description) | |
1298 |
|
430 | |||
1299 | self.isConfig = True |
|
431 | self.isConfig = True | |
1300 | self.setNextFile() |
|
432 | self.setNextFile() | |
@@ -1355,41 +487,94 class ParameterWriter(Operation): | |||||
1355 | #Write data |
|
487 | #Write data | |
1356 | self.writeData(self.fp) |
|
488 | self.writeData(self.fp) | |
1357 |
|
489 | |||
|
490 | def getLabel(self, name, x=None): | |||
|
491 | ||||
|
492 | if x is None: | |||
|
493 | if 'Data' in self.description: | |||
|
494 | data = self.description['Data'] | |||
|
495 | if 'Metadata' in self.description: | |||
|
496 | data.update(self.description['Metadata']) | |||
|
497 | else: | |||
|
498 | data = self.description | |||
|
499 | if name in data: | |||
|
500 | if isinstance(data[name], str): | |||
|
501 | return data[name] | |||
|
502 | elif isinstance(data[name], list): | |||
|
503 | return None | |||
|
504 | elif isinstance(data[name], dict): | |||
|
505 | for key, value in data[name].items(): | |||
|
506 | return key | |||
|
507 | return name | |||
|
508 | else: | |||
|
509 | if 'Metadata' in self.description: | |||
|
510 | meta = self.description['Metadata'] | |||
|
511 | else: | |||
|
512 | meta = self.description | |||
|
513 | if name in meta: | |||
|
514 | if isinstance(meta[name], list): | |||
|
515 | return meta[name][x] | |||
|
516 | elif isinstance(meta[name], dict): | |||
|
517 | for key, value in meta[name].items(): | |||
|
518 | return value[x] | |||
|
519 | return 'channel{:02d}'.format(x) | |||
|
520 | ||||
1358 | def writeMetadata(self, fp): |
|
521 | def writeMetadata(self, fp): | |
1359 |
|
522 | |||
1360 | grp = fp.create_group("Metadata") |
|
523 | if self.description: | |
1361 | grp.create_dataset('variables', data=self.tableDim, dtype=self.dtype) |
|
524 | if 'Metadata' in self.description: | |
|
525 | grp = fp.create_group('Metadata') | |||
|
526 | else: | |||
|
527 | grp = fp | |||
|
528 | else: | |||
|
529 | grp = fp.create_group('Metadata') | |||
1362 |
|
530 | |||
1363 | for i in range(len(self.metadataList)): |
|
531 | for i in range(len(self.metadataList)): | |
1364 | if not hasattr(self.dataOut, self.metadataList[i]): |
|
532 | if not hasattr(self.dataOut, self.metadataList[i]): | |
1365 | log.warning('Metadata: `{}` not found'.format(self.metadataList[i]), self.name) |
|
533 | log.warning('Metadata: `{}` not found'.format(self.metadataList[i]), self.name) | |
1366 | continue |
|
534 | continue | |
1367 | value = getattr(self.dataOut, self.metadataList[i]) |
|
535 | value = getattr(self.dataOut, self.metadataList[i]) | |
1368 | grp.create_dataset(self.metadataList[i], data=value) |
|
536 | if isinstance(value, bool): | |
|
537 | if value is True: | |||
|
538 | value = 1 | |||
|
539 | else: | |||
|
540 | value = 0 | |||
|
541 | grp.create_dataset(self.getLabel(self.metadataList[i]), data=value) | |||
1369 | return |
|
542 | return | |
1370 |
|
543 | |||
1371 | def writeData(self, fp): |
|
544 | def writeData(self, fp): | |
1372 |
|
545 | |||
1373 | grp = fp.create_group("Data") |
|
546 | if self.description: | |
|
547 | if 'Data' in self.description: | |||
|
548 | grp = fp.create_group('Data') | |||
|
549 | else: | |||
|
550 | grp = fp | |||
|
551 | else: | |||
|
552 | grp = fp.create_group('Data') | |||
|
553 | ||||
1374 | dtsets = [] |
|
554 | dtsets = [] | |
1375 | data = [] |
|
555 | data = [] | |
1376 |
|
556 | |||
1377 | for dsInfo in self.dsList: |
|
557 | for dsInfo in self.dsList: | |
1378 | if dsInfo['nDim'] == 0: |
|
558 | if dsInfo['nDim'] == 0: | |
1379 | ds = grp.create_dataset( |
|
559 | ds = grp.create_dataset( | |
1380 | dsInfo['variable'], |
|
560 | self.getLabel(dsInfo['variable']), | |
1381 | (self.blocksPerFile, ), |
|
561 | (self.blocksPerFile, ), | |
1382 | chunks=True, |
|
562 | chunks=True, | |
1383 | dtype=numpy.float64) |
|
563 | dtype=numpy.float64) | |
1384 | dtsets.append(ds) |
|
564 | dtsets.append(ds) | |
1385 | data.append((dsInfo['variable'], -1)) |
|
565 | data.append((dsInfo['variable'], -1)) | |
1386 | else: |
|
566 | else: | |
1387 |
|
|
567 | label = self.getLabel(dsInfo['variable']) | |
|
568 | if label is not None: | |||
|
569 | sgrp = grp.create_group(label) | |||
|
570 | else: | |||
|
571 | sgrp = grp | |||
1388 | for i in range(dsInfo['dsNumber']): |
|
572 | for i in range(dsInfo['dsNumber']): | |
1389 | ds = sgrp.create_dataset( |
|
573 | ds = sgrp.create_dataset( | |
1390 |
' |
|
574 | self.getLabel(dsInfo['variable'], i), | |
1391 | (self.blocksPerFile, ) + dsInfo['shape'][1:], |
|
575 | (self.blocksPerFile, ) + dsInfo['shape'][1:], | |
1392 |
chunks=True |
|
576 | chunks=True, | |
|
577 | dtype=dsInfo['dtype']) | |||
1393 | dtsets.append(ds) |
|
578 | dtsets.append(ds) | |
1394 | data.append((dsInfo['variable'], i)) |
|
579 | data.append((dsInfo['variable'], i)) | |
1395 | fp.flush() |
|
580 | fp.flush() |
General Comments 0
You need to be logged in to leave comments.
Login now