@@ -1234,11 +1234,11 class PlotterData(object): | |||
|
1234 | 1234 | if plot == 'snr': |
|
1235 | 1235 | buffer = 10*numpy.log10(dataOut.data_SNR) |
|
1236 | 1236 | if plot == 'dop': |
|
1237 |
buffer = |
|
|
1238 |
if plot == ' |
|
|
1239 |
buffer = dataOut.data_ |
|
|
1240 |
if plot == ' |
|
|
1241 |
buffer = dataOut.data_ |
|
|
1237 | buffer = dataOut.data_DOP | |
|
1238 | if plot == 'pow': | |
|
1239 | buffer = 10*numpy.log10(dataOut.data_POW) | |
|
1240 | if plot == 'width': | |
|
1241 | buffer = dataOut.data_WIDTH | |
|
1242 | 1242 | if plot == 'coh': |
|
1243 | 1243 | buffer = dataOut.getCoherence() |
|
1244 | 1244 | if plot == 'phase': |
@@ -1205,7 +1205,7 class JRODataReader(JRODataIO): | |||
|
1205 | 1205 | |
|
1206 | 1206 | thisDate = getDateFromRadarFile(thisFile) |
|
1207 | 1207 | |
|
1208 | if thisDate in dateList: | |
|
1208 | if thisDate in dateList or single_path in pathList: | |
|
1209 | 1209 | continue |
|
1210 | 1210 | |
|
1211 | 1211 | dateList.append(thisDate) |
@@ -1599,10 +1599,10 class JRODataWriter(JRODataIO): | |||
|
1599 | 1599 | self.basicHeaderObj.size = self.basicHeaderSize # bytes |
|
1600 | 1600 | self.basicHeaderObj.version = self.versionFile |
|
1601 | 1601 | self.basicHeaderObj.dataBlock = self.nTotalBlocks |
|
1602 | ||
|
1602 | log.warning(datetime.datetime.fromtimestamp(self.dataOut.utctime)) | |
|
1603 | 1603 | utc = numpy.floor(self.dataOut.utctime) |
|
1604 | 1604 | milisecond = (self.dataOut.utctime - utc) * 1000.0 |
|
1605 | ||
|
1605 | log.warning(milisecond) | |
|
1606 | 1606 | self.basicHeaderObj.utc = utc |
|
1607 | 1607 | self.basicHeaderObj.miliSecond = milisecond |
|
1608 | 1608 | self.basicHeaderObj.timeZone = self.dataOut.timeZone |
This diff has been collapsed as it changes many lines, (655 lines changed) Show them Hide them | |||
@@ -7,74 +7,44 import datetime | |||
|
7 | 7 | |
|
8 | 8 | from schainpy.model.data.jrodata import * |
|
9 | 9 | from schainpy.model.proc.jroproc_base import ProcessingUnit, Operation, MPDecorator |
|
10 | # from .jroIO_base import * | |
|
11 | 10 | from schainpy.model.io.jroIO_base import * |
|
12 | import schainpy | |
|
13 | 11 | from schainpy.utils import log |
|
14 | 12 | |
|
15 | 13 | @MPDecorator |
|
16 | 14 | class ParamReader(JRODataReader,ProcessingUnit): |
|
17 | 15 | ''' |
|
18 | 16 | Reads HDF5 format files |
|
19 | ||
|
20 | 17 | path |
|
21 | ||
|
22 | 18 | startDate |
|
23 | ||
|
24 | 19 | endDate |
|
25 | ||
|
26 | 20 | startTime |
|
27 | ||
|
28 | 21 | endTime |
|
29 | 22 | ''' |
|
30 | 23 | |
|
31 | 24 | ext = ".hdf5" |
|
32 | ||
|
33 | 25 | optchar = "D" |
|
34 | ||
|
35 | 26 | timezone = None |
|
36 | ||
|
37 | 27 | startTime = None |
|
38 | ||
|
39 | 28 | endTime = None |
|
40 | ||
|
41 | 29 | fileIndex = None |
|
42 | ||
|
43 | 30 | utcList = None #To select data in the utctime list |
|
44 | ||
|
45 | 31 | blockList = None #List to blocks to be read from the file |
|
46 | ||
|
47 | 32 | blocksPerFile = None #Number of blocks to be read |
|
48 | ||
|
49 | 33 | blockIndex = None |
|
50 | ||
|
51 | 34 | path = None |
|
52 | ||
|
53 | 35 | #List of Files |
|
54 | ||
|
55 | 36 | filenameList = None |
|
56 | ||
|
57 | 37 | datetimeList = None |
|
58 | ||
|
59 | 38 | #Hdf5 File |
|
60 | ||
|
61 | 39 | listMetaname = None |
|
62 | ||
|
63 | 40 | listMeta = None |
|
64 | ||
|
65 | 41 | listDataname = None |
|
66 | ||
|
67 | 42 | listData = None |
|
68 | ||
|
69 | 43 | listShapes = None |
|
70 | ||
|
71 | 44 | fp = None |
|
72 | ||
|
73 | 45 | #dataOut reconstruction |
|
74 | ||
|
75 | 46 | dataOut = None |
|
76 | 47 | |
|
77 | ||
|
78 | 48 | def __init__(self):#, **kwargs): |
|
79 | 49 | ProcessingUnit.__init__(self) #, **kwargs) |
|
80 | 50 | self.dataOut = Parameters() |
@@ -152,7 +122,6 class ParamReader(JRODataReader,ProcessingUnit): | |||
|
152 | 122 | #---------------------------------------------------------------------------------- |
|
153 | 123 | |
|
154 | 124 | for thisPath in pathList: |
|
155 | # thisPath = pathList[pathDict[file]] | |
|
156 | 125 | |
|
157 | 126 | fileList = glob.glob1(thisPath, "*%s" %ext) |
|
158 | 127 | fileList.sort() |
@@ -191,13 +160,9 class ParamReader(JRODataReader,ProcessingUnit): | |||
|
191 | 160 | |
|
192 | 161 | Inputs: |
|
193 | 162 | filename : nombre completo del archivo de datos en formato Jicamarca (.r) |
|
194 | ||
|
195 | 163 | startDate : fecha inicial del rango seleccionado en formato datetime.date |
|
196 | ||
|
197 | 164 | endDate : fecha final del rango seleccionado en formato datetime.date |
|
198 | ||
|
199 | 165 | startTime : tiempo inicial del rango seleccionado en formato datetime.time |
|
200 | ||
|
201 | 166 | endTime : tiempo final del rango seleccionado en formato datetime.time |
|
202 | 167 | |
|
203 | 168 | Return: |
@@ -217,7 +182,7 class ParamReader(JRODataReader,ProcessingUnit): | |||
|
217 | 182 | except IOError: |
|
218 | 183 | traceback.print_exc() |
|
219 | 184 | raise IOError("The file %s can't be opened" %(filename)) |
|
220 | #chino rata | |
|
185 | ||
|
221 | 186 | #In case has utctime attribute |
|
222 | 187 | grp2 = grp1['utctime'] |
|
223 | 188 | # thisUtcTime = grp2.value[0] - 5*3600 #To convert to local time |
@@ -229,7 +194,6 class ParamReader(JRODataReader,ProcessingUnit): | |||
|
229 | 194 | thisUtcTime -= 5*3600 |
|
230 | 195 | |
|
231 | 196 | thisDatetime = datetime.datetime.fromtimestamp(thisUtcTime[0] + 5*3600) |
|
232 | # thisDatetime = datetime.datetime.fromtimestamp(thisUtcTime[0]) | |
|
233 | 197 | thisDate = thisDatetime.date() |
|
234 | 198 | thisTime = thisDatetime.time() |
|
235 | 199 | |
@@ -269,24 +233,18 class ParamReader(JRODataReader,ProcessingUnit): | |||
|
269 | 233 | idFile = self.fileIndex |
|
270 | 234 | |
|
271 | 235 | if not(idFile < len(self.filenameList)): |
|
272 |
|
|
|
236 | self.dataOut.error = "No more Files" | |
|
273 | 237 | return 0 |
|
274 | 238 | |
|
275 | 239 | filename = self.filenameList[idFile] |
|
276 | ||
|
277 | 240 | filePointer = h5py.File(filename,'r') |
|
278 | ||
|
279 | 241 | self.filename = filename |
|
280 | ||
|
281 | 242 | self.fp = filePointer |
|
282 | 243 | |
|
283 | 244 | print("Setting the file: %s"%self.filename) |
|
284 | 245 | |
|
285 | # self.__readMetadata() | |
|
286 | 246 | self.__setBlockList() |
|
287 | 247 | self.__readData() |
|
288 | # self.nRecords = self.fp['Data'].attrs['blocksPerFile'] | |
|
289 | # self.nRecords = self.fp['Data'].attrs['nRecords'] | |
|
290 | 248 | self.blockIndex = 0 |
|
291 | 249 | return 1 |
|
292 | 250 | |
@@ -333,28 +291,14 class ParamReader(JRODataReader,ProcessingUnit): | |||
|
333 | 291 | Reads Metadata |
|
334 | 292 | |
|
335 | 293 | self.pathMeta |
|
336 | ||
|
337 | 294 | self.listShapes |
|
338 | 295 | self.listMetaname |
|
339 | 296 | self.listMeta |
|
340 | 297 | |
|
341 | 298 | ''' |
|
342 | 299 | |
|
343 | # grp = self.fp['Data'] | |
|
344 | # pathMeta = os.path.join(self.path, grp.attrs['metadata']) | |
|
345 | # | |
|
346 | # if pathMeta == self.pathMeta: | |
|
347 | # return | |
|
348 | # else: | |
|
349 | # self.pathMeta = pathMeta | |
|
350 | # | |
|
351 | # filePointer = h5py.File(self.pathMeta,'r') | |
|
352 | # groupPointer = filePointer['Metadata'] | |
|
353 | ||
|
354 | 300 | filename = self.filenameList[0] |
|
355 | ||
|
356 | 301 | fp = h5py.File(filename,'r') |
|
357 | ||
|
358 | 302 | gp = fp['Metadata'] |
|
359 | 303 | |
|
360 | 304 | listMetaname = [] |
@@ -372,9 +316,6 class ParamReader(JRODataReader,ProcessingUnit): | |||
|
372 | 316 | listMetaname.append(name) |
|
373 | 317 | listMetadata.append(data) |
|
374 | 318 | |
|
375 | # if name=='type': | |
|
376 | # self.__initDataOut(data) | |
|
377 | ||
|
378 | 319 | self.listShapes = listShapes |
|
379 | 320 | self.listMetaname = listMetaname |
|
380 | 321 | self.listMeta = listMetadata |
@@ -401,17 +342,11 class ParamReader(JRODataReader,ProcessingUnit): | |||
|
401 | 342 | def __setDataArray(self, dataset, shapes): |
|
402 | 343 | |
|
403 | 344 | nDims = shapes[0] |
|
404 | ||
|
405 | 345 | nDim2 = shapes[1] #Dimension 0 |
|
406 | ||
|
407 | 346 | nDim1 = shapes[2] #Dimension 1, number of Points or Parameters |
|
408 | ||
|
409 | 347 | nDim0 = shapes[3] #Dimension 2, number of samples or ranges |
|
410 | ||
|
411 | 348 | mode = shapes[4] #Mode of storing |
|
412 | ||
|
413 | 349 | blockList = self.blockList |
|
414 | ||
|
415 | 350 | blocksPerFile = self.blocksPerFile |
|
416 | 351 | |
|
417 | 352 | #Depending on what mode the data was stored |
@@ -517,7 +452,6 class ParamReader(JRODataReader,ProcessingUnit): | |||
|
517 | 452 | |
|
518 | 453 | if not(self.isConfig): |
|
519 | 454 | self.setup(**kwargs) |
|
520 | # self.setObjProperties() | |
|
521 | 455 | self.isConfig = True |
|
522 | 456 | |
|
523 | 457 | self.getData() |
@@ -530,18 +464,12 class ParamWriter(Operation): | |||
|
530 | 464 | HDF5 Writer, stores parameters data in HDF5 format files |
|
531 | 465 | |
|
532 | 466 | path: path where the files will be stored |
|
533 | ||
|
534 | 467 | blocksPerFile: number of blocks that will be saved in per HDF5 format file |
|
535 | ||
|
536 | 468 | mode: selects the data stacking mode: '0' channels, '1' parameters, '3' table (for meteors) |
|
537 | ||
|
538 | 469 | metadataList: list of attributes that will be stored as metadata |
|
539 | ||
|
540 | 470 | dataList: list of attributes that will be stores as data |
|
541 | ||
|
542 | 471 | ''' |
|
543 | 472 | |
|
544 | ||
|
545 | 473 | ext = ".hdf5" |
|
546 | 474 | optchar = "D" |
|
547 | 475 | metaoptchar = "M" |
@@ -1035,3 +963,582 class ParamWriter(Operation): | |||
|
1035 | 963 | self.putData() |
|
1036 | 964 | return |
|
1037 | 965 | |
|
966 | ||
|
967 | @MPDecorator | |
|
968 | class ParameterReader(JRODataReader,ProcessingUnit): | |
|
969 | ''' | |
|
970 | Reads HDF5 format files | |
|
971 | ''' | |
|
972 | ||
|
973 | ext = ".hdf5" | |
|
974 | optchar = "D" | |
|
975 | timezone = None | |
|
976 | startTime = None | |
|
977 | endTime = None | |
|
978 | fileIndex = None | |
|
979 | blockList = None #List to blocks to be read from the file | |
|
980 | blocksPerFile = None #Number of blocks to be read | |
|
981 | blockIndex = None | |
|
982 | path = None | |
|
983 | #List of Files | |
|
984 | filenameList = None | |
|
985 | datetimeList = None | |
|
986 | #Hdf5 File | |
|
987 | listMetaname = None | |
|
988 | listMeta = None | |
|
989 | listDataname = None | |
|
990 | listData = None | |
|
991 | listShapes = None | |
|
992 | fp = None | |
|
993 | #dataOut reconstruction | |
|
994 | dataOut = None | |
|
995 | ||
|
996 | def __init__(self): | |
|
997 | ProcessingUnit.__init__(self) | |
|
998 | self.dataOut = Parameters() | |
|
999 | return | |
|
1000 | ||
|
1001 | def setup(self, **kwargs): | |
|
1002 | ||
|
1003 | path = kwargs['path'] | |
|
1004 | startDate = kwargs['startDate'] | |
|
1005 | endDate = kwargs['endDate'] | |
|
1006 | startTime = kwargs['startTime'] | |
|
1007 | endTime = kwargs['endTime'] | |
|
1008 | walk = kwargs['walk'] | |
|
1009 | if 'ext' in kwargs: | |
|
1010 | ext = kwargs['ext'] | |
|
1011 | else: | |
|
1012 | ext = '.hdf5' | |
|
1013 | if 'timezone' in kwargs: | |
|
1014 | self.timezone = kwargs['timezone'] | |
|
1015 | else: | |
|
1016 | self.timezone = 'lt' | |
|
1017 | ||
|
1018 | print("[Reading] Searching files in offline mode ...") | |
|
1019 | pathList, filenameList = self.searchFilesOffLine(path, startDate=startDate, endDate=endDate, | |
|
1020 | startTime=startTime, endTime=endTime, | |
|
1021 | ext=ext, walk=walk) | |
|
1022 | ||
|
1023 | if not(filenameList): | |
|
1024 | print("There is no files into the folder: %s"%(path)) | |
|
1025 | sys.exit(-1) | |
|
1026 | ||
|
1027 | self.fileIndex = -1 | |
|
1028 | self.startTime = startTime | |
|
1029 | self.endTime = endTime | |
|
1030 | self.__readMetadata() | |
|
1031 | self.__setNextFileOffline() | |
|
1032 | ||
|
1033 | return | |
|
1034 | ||
|
1035 | def searchFilesOffLine(self, path, startDate=None, endDate=None, startTime=datetime.time(0,0,0), endTime=datetime.time(23,59,59), ext='.hdf5', walk=True): | |
|
1036 | ||
|
1037 | expLabel = '' | |
|
1038 | self.filenameList = [] | |
|
1039 | self.datetimeList = [] | |
|
1040 | pathList = [] | |
|
1041 | dateList, pathList = self.findDatafiles(path, startDate, endDate, expLabel, ext, walk, include_path=True) | |
|
1042 | ||
|
1043 | if dateList == []: | |
|
1044 | print("[Reading] No *%s files in %s from %s to %s)"%(ext, path, | |
|
1045 | datetime.datetime.combine(startDate,startTime).ctime(), | |
|
1046 | datetime.datetime.combine(endDate,endTime).ctime())) | |
|
1047 | ||
|
1048 | return None, None | |
|
1049 | ||
|
1050 | if len(dateList) > 1: | |
|
1051 | print("[Reading] %d days were found in date range: %s - %s" %(len(dateList), startDate, endDate)) | |
|
1052 | else: | |
|
1053 | print("[Reading] data was found for the date %s" %(dateList[0])) | |
|
1054 | ||
|
1055 | filenameList = [] | |
|
1056 | datetimeList = [] | |
|
1057 | ||
|
1058 | for thisPath in pathList: | |
|
1059 | ||
|
1060 | fileList = glob.glob1(thisPath, "*%s" %ext) | |
|
1061 | fileList.sort() | |
|
1062 | ||
|
1063 | for file in fileList: | |
|
1064 | ||
|
1065 | filename = os.path.join(thisPath,file) | |
|
1066 | ||
|
1067 | if not isFileInDateRange(filename, startDate, endDate): | |
|
1068 | continue | |
|
1069 | ||
|
1070 | thisDatetime = self.__isFileInTimeRange(filename, startDate, endDate, startTime, endTime) | |
|
1071 | ||
|
1072 | if not(thisDatetime): | |
|
1073 | continue | |
|
1074 | ||
|
1075 | filenameList.append(filename) | |
|
1076 | datetimeList.append(thisDatetime) | |
|
1077 | ||
|
1078 | if not(filenameList): | |
|
1079 | print("[Reading] Any file was found int time range %s - %s" %(datetime.datetime.combine(startDate,startTime).ctime(), datetime.datetime.combine(endDate,endTime).ctime())) | |
|
1080 | return None, None | |
|
1081 | ||
|
1082 | print("[Reading] %d file(s) was(were) found in time range: %s - %s" %(len(filenameList), startTime, endTime)) | |
|
1083 | print() | |
|
1084 | ||
|
1085 | self.filenameList = filenameList | |
|
1086 | self.datetimeList = datetimeList | |
|
1087 | ||
|
1088 | return pathList, filenameList | |
|
1089 | ||
|
1090 | def __isFileInTimeRange(self,filename, startDate, endDate, startTime, endTime): | |
|
1091 | ||
|
1092 | """ | |
|
1093 | Retorna 1 si el archivo de datos se encuentra dentro del rango de horas especificado. | |
|
1094 | ||
|
1095 | Inputs: | |
|
1096 | filename : nombre completo del archivo de datos en formato Jicamarca (.r) | |
|
1097 | startDate : fecha inicial del rango seleccionado en formato datetime.date | |
|
1098 | endDate : fecha final del rango seleccionado en formato datetime.date | |
|
1099 | startTime : tiempo inicial del rango seleccionado en formato datetime.time | |
|
1100 | endTime : tiempo final del rango seleccionado en formato datetime.time | |
|
1101 | ||
|
1102 | Return: | |
|
1103 | Boolean : Retorna True si el archivo de datos contiene datos en el rango de | |
|
1104 | fecha especificado, de lo contrario retorna False. | |
|
1105 | ||
|
1106 | Excepciones: | |
|
1107 | Si el archivo no existe o no puede ser abierto | |
|
1108 | Si la cabecera no puede ser leida. | |
|
1109 | ||
|
1110 | """ | |
|
1111 | ||
|
1112 | try: | |
|
1113 | fp = h5py.File(filename, 'r') | |
|
1114 | grp1 = fp['Data'] | |
|
1115 | ||
|
1116 | except IOError: | |
|
1117 | traceback.print_exc() | |
|
1118 | raise IOError("The file %s can't be opened" %(filename)) | |
|
1119 | #In case has utctime attribute | |
|
1120 | grp2 = grp1['utctime'] | |
|
1121 | thisUtcTime = grp2.value[0] | |
|
1122 | ||
|
1123 | fp.close() | |
|
1124 | ||
|
1125 | if self.timezone == 'lt': | |
|
1126 | thisUtcTime -= 5*3600 | |
|
1127 | ||
|
1128 | thisDatetime = datetime.datetime.fromtimestamp(thisUtcTime + 5*3600) | |
|
1129 | thisDate = thisDatetime.date() | |
|
1130 | thisTime = thisDatetime.time() | |
|
1131 | ||
|
1132 | startUtcTime = (datetime.datetime.combine(thisDate,startTime)- datetime.datetime(1970, 1, 1)).total_seconds() | |
|
1133 | endUtcTime = (datetime.datetime.combine(thisDate,endTime)- datetime.datetime(1970, 1, 1)).total_seconds() | |
|
1134 | ||
|
1135 | #General case | |
|
1136 | # o>>>>>>>>>>>>>><<<<<<<<<<<<<<o | |
|
1137 | #-----------o----------------------------o----------- | |
|
1138 | # startTime endTime | |
|
1139 | ||
|
1140 | if endTime >= startTime: | |
|
1141 | thisUtcLog = numpy.logical_and(thisUtcTime > startUtcTime, thisUtcTime < endUtcTime) | |
|
1142 | if numpy.any(thisUtcLog): #If there is one block between the hours mentioned | |
|
1143 | return thisDatetime | |
|
1144 | return None | |
|
1145 | ||
|
1146 | #If endTime < startTime then endTime belongs to the next day | |
|
1147 | #<<<<<<<<<<<o o>>>>>>>>>>> | |
|
1148 | #-----------o----------------------------o----------- | |
|
1149 | # endTime startTime | |
|
1150 | ||
|
1151 | if (thisDate == startDate) and numpy.all(thisUtcTime < startUtcTime): | |
|
1152 | return None | |
|
1153 | ||
|
1154 | if (thisDate == endDate) and numpy.all(thisUtcTime > endUtcTime): | |
|
1155 | return None | |
|
1156 | ||
|
1157 | if numpy.all(thisUtcTime < startUtcTime) and numpy.all(thisUtcTime > endUtcTime): | |
|
1158 | return None | |
|
1159 | ||
|
1160 | return thisDatetime | |
|
1161 | ||
|
1162 | def __setNextFileOffline(self): | |
|
1163 | ||
|
1164 | self.fileIndex += 1 | |
|
1165 | idFile = self.fileIndex | |
|
1166 | ||
|
1167 | if not(idFile < len(self.filenameList)): | |
|
1168 | self.dataOut.error = 'No more files' | |
|
1169 | return 0 | |
|
1170 | ||
|
1171 | filename = self.filenameList[idFile] | |
|
1172 | self.fp = h5py.File(filename, 'r') | |
|
1173 | self.filename = filename | |
|
1174 | ||
|
1175 | print("Setting the file: %s"%self.filename) | |
|
1176 | ||
|
1177 | self.__setBlockList() | |
|
1178 | self.__readData() | |
|
1179 | self.blockIndex = 0 | |
|
1180 | return 1 | |
|
1181 | ||
|
1182 | def __setBlockList(self): | |
|
1183 | ''' | |
|
1184 | Selects the data within the times defined | |
|
1185 | ||
|
1186 | self.fp | |
|
1187 | self.startTime | |
|
1188 | self.endTime | |
|
1189 | self.blockList | |
|
1190 | self.blocksPerFile | |
|
1191 | ||
|
1192 | ''' | |
|
1193 | fp = self.fp | |
|
1194 | startTime = self.startTime | |
|
1195 | endTime = self.endTime | |
|
1196 | ||
|
1197 | grp = fp['Data'] | |
|
1198 | thisUtcTime = grp['utctime'].value.astype(numpy.float)[0] | |
|
1199 | ||
|
1200 | if self.timezone == 'lt': | |
|
1201 | thisUtcTime -= 5*3600 | |
|
1202 | ||
|
1203 | thisDatetime = datetime.datetime.fromtimestamp(thisUtcTime + 5*3600) | |
|
1204 | ||
|
1205 | thisDate = thisDatetime.date() | |
|
1206 | thisTime = thisDatetime.time() | |
|
1207 | ||
|
1208 | startUtcTime = (datetime.datetime.combine(thisDate,startTime) - datetime.datetime(1970, 1, 1)).total_seconds() | |
|
1209 | endUtcTime = (datetime.datetime.combine(thisDate,endTime) - datetime.datetime(1970, 1, 1)).total_seconds() | |
|
1210 | ||
|
1211 | ind = numpy.where(numpy.logical_and(thisUtcTime >= startUtcTime, thisUtcTime < endUtcTime))[0] | |
|
1212 | ||
|
1213 | self.blockList = ind | |
|
1214 | self.blocksPerFile = len(ind) | |
|
1215 | ||
|
1216 | return | |
|
1217 | ||
|
1218 | def __readMetadata(self): | |
|
1219 | ''' | |
|
1220 | Reads Metadata | |
|
1221 | ''' | |
|
1222 | ||
|
1223 | filename = self.filenameList[0] | |
|
1224 | fp = h5py.File(filename, 'r') | |
|
1225 | gp = fp['Metadata'] | |
|
1226 | listMetaname = [] | |
|
1227 | listMetadata = [] | |
|
1228 | ||
|
1229 | for item in list(gp.items()): | |
|
1230 | name = item[0] | |
|
1231 | ||
|
1232 | if name=='variables': | |
|
1233 | table = gp[name][:] | |
|
1234 | listShapes = {} | |
|
1235 | for shapes in table: | |
|
1236 | listShapes[shapes[0].decode()] = numpy.array([shapes[1]]) | |
|
1237 | else: | |
|
1238 | data = gp[name].value | |
|
1239 | listMetaname.append(name) | |
|
1240 | listMetadata.append(data) | |
|
1241 | ||
|
1242 | self.listShapes = listShapes | |
|
1243 | self.listMetaname = listMetaname | |
|
1244 | self.listMeta = listMetadata | |
|
1245 | ||
|
1246 | fp.close() | |
|
1247 | return | |
|
1248 | ||
|
1249 | def __readData(self): | |
|
1250 | ||
|
1251 | grp = self.fp['Data'] | |
|
1252 | listdataname = [] | |
|
1253 | listdata = [] | |
|
1254 | ||
|
1255 | for item in list(grp.items()): | |
|
1256 | name = item[0] | |
|
1257 | listdataname.append(name) | |
|
1258 | dim = self.listShapes[name][0] | |
|
1259 | if dim == 0: | |
|
1260 | array = grp[name].value | |
|
1261 | else: | |
|
1262 | array = [] | |
|
1263 | for i in range(dim): | |
|
1264 | array.append(grp[name]['table{:02d}'.format(i)].value) | |
|
1265 | array = numpy.array(array) | |
|
1266 | ||
|
1267 | listdata.append(array) | |
|
1268 | ||
|
1269 | self.listDataname = listdataname | |
|
1270 | self.listData = listdata | |
|
1271 | return | |
|
1272 | ||
|
1273 | def getData(self): | |
|
1274 | ||
|
1275 | for i in range(len(self.listMeta)): | |
|
1276 | setattr(self.dataOut, self.listMetaname[i], self.listMeta[i]) | |
|
1277 | ||
|
1278 | for j in range(len(self.listData)): | |
|
1279 | dim = self.listShapes[self.listDataname[j]][0] | |
|
1280 | if dim == 0: | |
|
1281 | setattr(self.dataOut, self.listDataname[j], self.listData[j][self.blockIndex]) | |
|
1282 | else: | |
|
1283 | setattr(self.dataOut, self.listDataname[j], self.listData[j][:,self.blockIndex]) | |
|
1284 | ||
|
1285 | self.dataOut.flagNoData = False | |
|
1286 | self.blockIndex += 1 | |
|
1287 | ||
|
1288 | return | |
|
1289 | ||
|
1290 | def run(self, **kwargs): | |
|
1291 | ||
|
1292 | if not(self.isConfig): | |
|
1293 | self.setup(**kwargs) | |
|
1294 | self.isConfig = True | |
|
1295 | ||
|
1296 | if self.blockIndex == self.blocksPerFile: | |
|
1297 | if not(self.__setNextFileOffline()): | |
|
1298 | self.dataOut.flagNoData = True | |
|
1299 | return 0 | |
|
1300 | ||
|
1301 | self.getData() | |
|
1302 | ||
|
1303 | return | |
|
1304 | ||
|
1305 | @MPDecorator | |
|
1306 | class ParameterWriter(Operation): | |
|
1307 | ''' | |
|
1308 | HDF5 Writer, stores parameters data in HDF5 format files | |
|
1309 | ||
|
1310 | path: path where the files will be stored | |
|
1311 | blocksPerFile: number of blocks that will be saved in per HDF5 format file | |
|
1312 | mode: selects the data stacking mode: '0' channels, '1' parameters, '3' table (for meteors) | |
|
1313 | metadataList: list of attributes that will be stored as metadata | |
|
1314 | dataList: list of attributes that will be stores as data | |
|
1315 | ''' | |
|
1316 | ||
|
1317 | ||
|
1318 | ext = ".hdf5" | |
|
1319 | optchar = "D" | |
|
1320 | metaoptchar = "M" | |
|
1321 | metaFile = None | |
|
1322 | filename = None | |
|
1323 | path = None | |
|
1324 | setFile = None | |
|
1325 | fp = None | |
|
1326 | grp = None | |
|
1327 | ds = None | |
|
1328 | firsttime = True | |
|
1329 | #Configurations | |
|
1330 | blocksPerFile = None | |
|
1331 | blockIndex = None | |
|
1332 | dataOut = None | |
|
1333 | #Data Arrays | |
|
1334 | dataList = None | |
|
1335 | metadataList = None | |
|
1336 | dsList = None #List of dictionaries with dataset properties | |
|
1337 | tableDim = None | |
|
1338 | dtype = [('name', 'S20'),('nDim', 'i')] | |
|
1339 | currentDay = None | |
|
1340 | lastTime = None | |
|
1341 | ||
|
1342 | def __init__(self): | |
|
1343 | ||
|
1344 | Operation.__init__(self) | |
|
1345 | return | |
|
1346 | ||
|
1347 | def setup(self, path=None, blocksPerFile=10, metadataList=None, dataList=None, setType=None): | |
|
1348 | self.path = path | |
|
1349 | self.blocksPerFile = blocksPerFile | |
|
1350 | self.metadataList = metadataList | |
|
1351 | self.dataList = dataList | |
|
1352 | self.setType = setType | |
|
1353 | ||
|
1354 | tableList = [] | |
|
1355 | dsList = [] | |
|
1356 | ||
|
1357 | for i in range(len(self.dataList)): | |
|
1358 | dsDict = {} | |
|
1359 | dataAux = getattr(self.dataOut, self.dataList[i]) | |
|
1360 | dsDict['variable'] = self.dataList[i] | |
|
1361 | ||
|
1362 | if dataAux is None: | |
|
1363 | continue | |
|
1364 | elif isinstance(dataAux, (int, float, numpy.integer, numpy.float)): | |
|
1365 | dsDict['nDim'] = 0 | |
|
1366 | else: | |
|
1367 | dsDict['nDim'] = len(dataAux.shape) | |
|
1368 | dsDict['shape'] = dataAux.shape | |
|
1369 | dsDict['dsNumber'] = dataAux.shape[0] | |
|
1370 | ||
|
1371 | dsList.append(dsDict) | |
|
1372 | tableList.append((self.dataList[i], dsDict['nDim'])) | |
|
1373 | ||
|
1374 | self.dsList = dsList | |
|
1375 | self.tableDim = numpy.array(tableList, dtype=self.dtype) | |
|
1376 | self.currentDay = self.dataOut.datatime.date() | |
|
1377 | ||
|
1378 | def timeFlag(self): | |
|
1379 | currentTime = self.dataOut.utctime | |
|
1380 | timeTuple = time.localtime(currentTime) | |
|
1381 | dataDay = timeTuple.tm_yday | |
|
1382 | ||
|
1383 | if self.lastTime is None: | |
|
1384 | self.lastTime = currentTime | |
|
1385 | self.currentDay = dataDay | |
|
1386 | return False | |
|
1387 | ||
|
1388 | timeDiff = currentTime - self.lastTime | |
|
1389 | ||
|
1390 | #Si el dia es diferente o si la diferencia entre un dato y otro supera la hora | |
|
1391 | if dataDay != self.currentDay: | |
|
1392 | self.currentDay = dataDay | |
|
1393 | return True | |
|
1394 | elif timeDiff > 3*60*60: | |
|
1395 | self.lastTime = currentTime | |
|
1396 | return True | |
|
1397 | else: | |
|
1398 | self.lastTime = currentTime | |
|
1399 | return False | |
|
1400 | ||
|
1401 | def run(self, dataOut, path, blocksPerFile=10, metadataList=None, dataList=None, setType=None): | |
|
1402 | ||
|
1403 | self.dataOut = dataOut | |
|
1404 | if not(self.isConfig): | |
|
1405 | self.setup(path=path, blocksPerFile=blocksPerFile, | |
|
1406 | metadataList=metadataList, dataList=dataList, | |
|
1407 | setType=setType) | |
|
1408 | ||
|
1409 | self.isConfig = True | |
|
1410 | self.setNextFile() | |
|
1411 | ||
|
1412 | self.putData() | |
|
1413 | return | |
|
1414 | ||
|
1415 | def setNextFile(self): | |
|
1416 | ||
|
1417 | ext = self.ext | |
|
1418 | path = self.path | |
|
1419 | setFile = self.setFile | |
|
1420 | ||
|
1421 | timeTuple = time.localtime(self.dataOut.utctime) | |
|
1422 | subfolder = 'd%4.4d%3.3d' % (timeTuple.tm_year,timeTuple.tm_yday) | |
|
1423 | fullpath = os.path.join(path, subfolder) | |
|
1424 | ||
|
1425 | if os.path.exists(fullpath): | |
|
1426 | filesList = os.listdir(fullpath) | |
|
1427 | filesList = [k for k in filesList if k.startswith(self.optchar)] | |
|
1428 | if len( filesList ) > 0: | |
|
1429 | filesList = sorted(filesList, key=str.lower) | |
|
1430 | filen = filesList[-1] | |
|
1431 | # el filename debera tener el siguiente formato | |
|
1432 | # 0 1234 567 89A BCDE (hex) | |
|
1433 | # x YYYY DDD SSS .ext | |
|
1434 | if isNumber(filen[8:11]): | |
|
1435 | setFile = int(filen[8:11]) #inicializo mi contador de seteo al seteo del ultimo file | |
|
1436 | else: | |
|
1437 | setFile = -1 | |
|
1438 | else: | |
|
1439 | setFile = -1 #inicializo mi contador de seteo | |
|
1440 | else: | |
|
1441 | os.makedirs(fullpath) | |
|
1442 | setFile = -1 #inicializo mi contador de seteo | |
|
1443 | ||
|
1444 | if self.setType is None: | |
|
1445 | setFile += 1 | |
|
1446 | file = '%s%4.4d%3.3d%03d%s' % (self.optchar, | |
|
1447 | timeTuple.tm_year, | |
|
1448 | timeTuple.tm_yday, | |
|
1449 | setFile, | |
|
1450 | ext ) | |
|
1451 | else: | |
|
1452 | setFile = timeTuple.tm_hour*60+timeTuple.tm_min | |
|
1453 | file = '%s%4.4d%3.3d%04d%s' % (self.optchar, | |
|
1454 | timeTuple.tm_year, | |
|
1455 | timeTuple.tm_yday, | |
|
1456 | setFile, | |
|
1457 | ext ) | |
|
1458 | ||
|
1459 | self.filename = os.path.join( path, subfolder, file ) | |
|
1460 | ||
|
1461 | #Setting HDF5 File | |
|
1462 | self.fp = h5py.File(self.filename, 'w') | |
|
1463 | #write metadata | |
|
1464 | self.writeMetadata(self.fp) | |
|
1465 | #Write data | |
|
1466 | self.writeData(self.fp) | |
|
1467 | ||
|
1468 | def writeMetadata(self, fp): | |
|
1469 | ||
|
1470 | grp = fp.create_group("Metadata") | |
|
1471 | grp.create_dataset('variables', data=self.tableDim, dtype=self.dtype) | |
|
1472 | ||
|
1473 | for i in range(len(self.metadataList)): | |
|
1474 | if not hasattr(self.dataOut, self.metadataList[i]): | |
|
1475 | log.warning('Metadata: `{}` not found'.format(self.metadataList[i]), self.name) | |
|
1476 | continue | |
|
1477 | value = getattr(self.dataOut, self.metadataList[i]) | |
|
1478 | grp.create_dataset(self.metadataList[i], data=value) | |
|
1479 | return | |
|
1480 | ||
|
1481 | def writeData(self, fp): | |
|
1482 | ||
|
1483 | grp = fp.create_group("Data") | |
|
1484 | dtsets = [] | |
|
1485 | data = [] | |
|
1486 | ||
|
1487 | for dsInfo in self.dsList: | |
|
1488 | if dsInfo['nDim'] == 0: | |
|
1489 | ds = grp.create_dataset( | |
|
1490 | dsInfo['variable'], | |
|
1491 | (self.blocksPerFile, ), | |
|
1492 | chunks=True, | |
|
1493 | dtype=numpy.float64) | |
|
1494 | dtsets.append(ds) | |
|
1495 | data.append((dsInfo['variable'], -1)) | |
|
1496 | else: | |
|
1497 | sgrp = grp.create_group(dsInfo['variable']) | |
|
1498 | for i in range(dsInfo['dsNumber']): | |
|
1499 | ds = sgrp.create_dataset( | |
|
1500 | 'table{:02d}'.format(i), | |
|
1501 | (self.blocksPerFile, ) + dsInfo['shape'][1:], | |
|
1502 | chunks=True) | |
|
1503 | dtsets.append(ds) | |
|
1504 | data.append((dsInfo['variable'], i)) | |
|
1505 | fp.flush() | |
|
1506 | ||
|
1507 | log.log('creating file: {}'.format(fp.filename), 'Writing') | |
|
1508 | ||
|
1509 | self.ds = dtsets | |
|
1510 | self.data = data | |
|
1511 | self.firsttime = True | |
|
1512 | self.blockIndex = 0 | |
|
1513 | return | |
|
1514 | ||
|
1515 | def putData(self): | |
|
1516 | ||
|
1517 | if (self.blockIndex == self.blocksPerFile) or self.timeFlag(): | |
|
1518 | self.closeFile() | |
|
1519 | self.setNextFile() | |
|
1520 | ||
|
1521 | for i, ds in enumerate(self.ds): | |
|
1522 | attr, ch = self.data[i] | |
|
1523 | if ch == -1: | |
|
1524 | ds[self.blockIndex] = getattr(self.dataOut, attr) | |
|
1525 | else: | |
|
1526 | ds[self.blockIndex] = getattr(self.dataOut, attr)[ch] | |
|
1527 | ||
|
1528 | self.fp.flush() | |
|
1529 | self.blockIndex += 1 | |
|
1530 | ||
|
1531 | return | |
|
1532 | ||
|
1533 | def closeFile(self): | |
|
1534 | ||
|
1535 | if self.blockIndex != self.blocksPerFile: | |
|
1536 | for ds in self.ds: | |
|
1537 | ds.resize(self.blockIndex, axis=0) | |
|
1538 | ||
|
1539 | self.fp.flush() | |
|
1540 | self.fp.close() | |
|
1541 | ||
|
1542 | def close(self): | |
|
1543 | ||
|
1544 | self.closeFile() |
General Comments 0
You need to be logged in to leave comments.
Login now