@@ -1,659 +1,659 | |||||
1 | '''' |
|
1 | '''' | |
2 | Created on Set 9, 2015 |
|
2 | Created on Set 9, 2015 | |
3 |
|
3 | |||
4 | @author: roj-idl71 Karim Kuyeng |
|
4 | @author: roj-idl71 Karim Kuyeng | |
5 |
|
5 | |||
6 | @update: 2021, Joab Apaza |
|
6 | @update: 2021, Joab Apaza | |
7 | ''' |
|
7 | ''' | |
8 |
|
8 | |||
9 | import os |
|
9 | import os | |
10 | import sys |
|
10 | import sys | |
11 | import glob |
|
11 | import glob | |
12 | import fnmatch |
|
12 | import fnmatch | |
13 | import datetime |
|
13 | import datetime | |
14 | import time |
|
14 | import time | |
15 | import re |
|
15 | import re | |
16 | import h5py |
|
16 | import h5py | |
17 | import numpy |
|
17 | import numpy | |
18 |
|
18 | |||
19 | try: |
|
19 | try: | |
20 | from gevent import sleep |
|
20 | from gevent import sleep | |
21 | except: |
|
21 | except: | |
22 | from time import sleep |
|
22 | from time import sleep | |
23 |
|
23 | |||
24 | from schainpy.model.data.jroheaderIO import RadarControllerHeader, SystemHeader |
|
24 | from schainpy.model.data.jroheaderIO import RadarControllerHeader, SystemHeader | |
25 | from schainpy.model.data.jrodata import Voltage |
|
25 | from schainpy.model.data.jrodata import Voltage | |
26 | from schainpy.model.proc.jroproc_base import ProcessingUnit, Operation, MPDecorator |
|
26 | from schainpy.model.proc.jroproc_base import ProcessingUnit, Operation, MPDecorator | |
27 | from numpy import imag |
|
27 | from numpy import imag | |
28 |
|
28 | |||
29 |
|
29 | |||
30 | class AMISRReader(ProcessingUnit): |
|
30 | class AMISRReader(ProcessingUnit): | |
31 | ''' |
|
31 | ''' | |
32 | classdocs |
|
32 | classdocs | |
33 | ''' |
|
33 | ''' | |
34 |
|
34 | |||
35 | def __init__(self): |
|
35 | def __init__(self): | |
36 | ''' |
|
36 | ''' | |
37 | Constructor |
|
37 | Constructor | |
38 | ''' |
|
38 | ''' | |
39 |
|
39 | |||
40 | ProcessingUnit.__init__(self) |
|
40 | ProcessingUnit.__init__(self) | |
41 |
|
41 | |||
42 | self.set = None |
|
42 | self.set = None | |
43 | self.subset = None |
|
43 | self.subset = None | |
44 | self.extension_file = '.h5' |
|
44 | self.extension_file = '.h5' | |
45 | self.dtc_str = 'dtc' |
|
45 | self.dtc_str = 'dtc' | |
46 | self.dtc_id = 0 |
|
46 | self.dtc_id = 0 | |
47 | self.status = True |
|
47 | self.status = True | |
48 | self.isConfig = False |
|
48 | self.isConfig = False | |
49 | self.dirnameList = [] |
|
49 | self.dirnameList = [] | |
50 | self.filenameList = [] |
|
50 | self.filenameList = [] | |
51 | self.fileIndex = None |
|
51 | self.fileIndex = None | |
52 | self.flagNoMoreFiles = False |
|
52 | self.flagNoMoreFiles = False | |
53 | self.flagIsNewFile = 0 |
|
53 | self.flagIsNewFile = 0 | |
54 | self.filename = '' |
|
54 | self.filename = '' | |
55 | self.amisrFilePointer = None |
|
55 | self.amisrFilePointer = None | |
56 | self.realBeamCode = [] |
|
56 | self.realBeamCode = [] | |
57 | self.beamCodeMap = None |
|
57 | self.beamCodeMap = None | |
58 | self.azimuthList = [] |
|
58 | self.azimuthList = [] | |
59 | self.elevationList = [] |
|
59 | self.elevationList = [] | |
60 | self.dataShape = None |
|
60 | self.dataShape = None | |
61 |
|
61 | |||
62 |
|
62 | |||
63 |
|
63 | |||
64 | self.profileIndex = 0 |
|
64 | self.profileIndex = 0 | |
65 |
|
65 | |||
66 |
|
66 | |||
67 | self.beamCodeByFrame = None |
|
67 | self.beamCodeByFrame = None | |
68 | self.radacTimeByFrame = None |
|
68 | self.radacTimeByFrame = None | |
69 |
|
69 | |||
70 | self.dataset = None |
|
70 | self.dataset = None | |
71 |
|
71 | |||
72 | self.__firstFile = True |
|
72 | self.__firstFile = True | |
73 |
|
73 | |||
74 | self.buffer = None |
|
74 | self.buffer = None | |
75 |
|
75 | |||
76 | self.timezone = 'ut' |
|
76 | self.timezone = 'ut' | |
77 |
|
77 | |||
78 | self.__waitForNewFile = 20 |
|
78 | self.__waitForNewFile = 20 | |
79 | self.__filename_online = None |
|
79 | self.__filename_online = None | |
80 | #Is really necessary create the output object in the initializer |
|
80 | #Is really necessary create the output object in the initializer | |
81 | self.dataOut = Voltage() |
|
81 | self.dataOut = Voltage() | |
82 | self.dataOut.error=False |
|
82 | self.dataOut.error=False | |
83 |
|
83 | |||
84 |
|
84 | |||
85 | def setup(self,path=None, |
|
85 | def setup(self,path=None, | |
86 | startDate=None, |
|
86 | startDate=None, | |
87 | endDate=None, |
|
87 | endDate=None, | |
88 | startTime=None, |
|
88 | startTime=None, | |
89 | endTime=None, |
|
89 | endTime=None, | |
90 | walk=True, |
|
90 | walk=True, | |
91 | timezone='ut', |
|
91 | timezone='ut', | |
92 | all=0, |
|
92 | all=0, | |
93 | code = None, |
|
93 | code = None, | |
94 | nCode = 0, |
|
94 | nCode = 0, | |
95 | nBaud = 0, |
|
95 | nBaud = 0, | |
96 | online=False): |
|
96 | online=False): | |
97 |
|
97 | |||
98 |
|
98 | |||
99 |
|
99 | |||
100 | self.timezone = timezone |
|
100 | self.timezone = timezone | |
101 | self.all = all |
|
101 | self.all = all | |
102 | self.online = online |
|
102 | self.online = online | |
103 |
|
103 | |||
104 | self.code = code |
|
104 | self.code = code | |
105 | self.nCode = int(nCode) |
|
105 | self.nCode = int(nCode) | |
106 | self.nBaud = int(nBaud) |
|
106 | self.nBaud = int(nBaud) | |
107 |
|
107 | |||
108 |
|
108 | |||
109 |
|
109 | |||
110 | #self.findFiles() |
|
110 | #self.findFiles() | |
111 | if not(online): |
|
111 | if not(online): | |
112 | #Busqueda de archivos offline |
|
112 | #Busqueda de archivos offline | |
113 | self.searchFilesOffLine(path, startDate, endDate, startTime, endTime, walk) |
|
113 | self.searchFilesOffLine(path, startDate, endDate, startTime, endTime, walk) | |
114 | else: |
|
114 | else: | |
115 | self.searchFilesOnLine(path, startDate, endDate, startTime,endTime,walk) |
|
115 | self.searchFilesOnLine(path, startDate, endDate, startTime,endTime,walk) | |
116 |
|
116 | |||
117 | if not(self.filenameList): |
|
117 | if not(self.filenameList): | |
118 |
|
|
118 | raise schainpy.admin.SchainWarning("There is no files into the folder: %s"%(path)) | |
119 | sys.exit() |
|
119 | sys.exit() | |
120 |
|
120 | |||
121 | self.fileIndex = 0 |
|
121 | self.fileIndex = 0 | |
122 |
|
122 | |||
123 | self.readNextFile(online) |
|
123 | self.readNextFile(online) | |
124 |
|
124 | |||
125 | ''' |
|
125 | ''' | |
126 | Add code |
|
126 | Add code | |
127 | ''' |
|
127 | ''' | |
128 | self.isConfig = True |
|
128 | self.isConfig = True | |
129 | # print("Setup Done") |
|
129 | # print("Setup Done") | |
130 | pass |
|
130 | pass | |
131 |
|
131 | |||
132 |
|
132 | |||
133 | def readAMISRHeader(self,fp): |
|
133 | def readAMISRHeader(self,fp): | |
134 |
|
134 | |||
135 | if self.isConfig and (not self.flagNoMoreFiles): |
|
135 | if self.isConfig and (not self.flagNoMoreFiles): | |
136 | newShape = fp.get('Raw11/Data/Samples/Data').shape[1:] |
|
136 | newShape = fp.get('Raw11/Data/Samples/Data').shape[1:] | |
137 | if self.dataShape != newShape and newShape != None: |
|
137 | if self.dataShape != newShape and newShape != None: | |
138 |
|
|
138 | raise schainpy.admin.SchainError("NEW FILE HAS A DIFFERENT SHAPE: ") | |
139 | print(self.dataShape,newShape,"\n") |
|
139 | print(self.dataShape,newShape,"\n") | |
140 | return 0 |
|
140 | return 0 | |
141 | else: |
|
141 | else: | |
142 | self.dataShape = fp.get('Raw11/Data/Samples/Data').shape[1:] |
|
142 | self.dataShape = fp.get('Raw11/Data/Samples/Data').shape[1:] | |
143 |
|
143 | |||
144 |
|
144 | |||
145 | header = 'Raw11/Data/RadacHeader' |
|
145 | header = 'Raw11/Data/RadacHeader' | |
146 | self.beamCodeByPulse = fp.get(header+'/BeamCode') # LIST OF BEAMS PER PROFILE, TO BE USED ON REARRANGE |
|
146 | self.beamCodeByPulse = fp.get(header+'/BeamCode') # LIST OF BEAMS PER PROFILE, TO BE USED ON REARRANGE | |
147 | if (self.startDate> datetime.date(2021, 7, 15)): #Se cambiΓ³ la forma de extracciΓ³n de Apuntes el 17 |
|
147 | if (self.startDate> datetime.date(2021, 7, 15)): #Se cambiΓ³ la forma de extracciΓ³n de Apuntes el 17 | |
148 | self.beamcodeFile = fp['Setup/Beamcodefile'][()].decode() |
|
148 | self.beamcodeFile = fp['Setup/Beamcodefile'][()].decode() | |
149 | self.trueBeams = self.beamcodeFile.split("\n") |
|
149 | self.trueBeams = self.beamcodeFile.split("\n") | |
150 | self.trueBeams.pop()#remove last |
|
150 | self.trueBeams.pop()#remove last | |
151 | [self.realBeamCode.append(x) for x in self.trueBeams if x not in self.realBeamCode] |
|
151 | [self.realBeamCode.append(x) for x in self.trueBeams if x not in self.realBeamCode] | |
152 | self.beamCode = [int(x, 16) for x in self.realBeamCode] |
|
152 | self.beamCode = [int(x, 16) for x in self.realBeamCode] | |
153 | else: |
|
153 | else: | |
154 | _beamCode= fp.get('Raw11/Data/Beamcodes') #se usa la manera previa al cambio de apuntes |
|
154 | _beamCode= fp.get('Raw11/Data/Beamcodes') #se usa la manera previa al cambio de apuntes | |
155 | self.beamCode = _beamCode[0,:] |
|
155 | self.beamCode = _beamCode[0,:] | |
156 |
|
156 | |||
157 | if self.beamCodeMap == None: |
|
157 | if self.beamCodeMap == None: | |
158 | self.beamCodeMap = fp['Setup/BeamcodeMap'] |
|
158 | self.beamCodeMap = fp['Setup/BeamcodeMap'] | |
159 | for beam in self.beamCode: |
|
159 | for beam in self.beamCode: | |
160 | beamAziElev = numpy.where(self.beamCodeMap[:,0]==beam) |
|
160 | beamAziElev = numpy.where(self.beamCodeMap[:,0]==beam) | |
161 | beamAziElev = beamAziElev[0].squeeze() |
|
161 | beamAziElev = beamAziElev[0].squeeze() | |
162 | self.azimuthList.append(self.beamCodeMap[beamAziElev,1]) |
|
162 | self.azimuthList.append(self.beamCodeMap[beamAziElev,1]) | |
163 | self.elevationList.append(self.beamCodeMap[beamAziElev,2]) |
|
163 | self.elevationList.append(self.beamCodeMap[beamAziElev,2]) | |
164 | #print("Beamssss: ",self.beamCodeMap[beamAziElev,1],self.beamCodeMap[beamAziElev,2]) |
|
164 | #print("Beamssss: ",self.beamCodeMap[beamAziElev,1],self.beamCodeMap[beamAziElev,2]) | |
165 | #print(self.beamCode) |
|
165 | #print(self.beamCode) | |
166 | #self.code = fp.get(header+'/Code') # NOT USE FOR THIS |
|
166 | #self.code = fp.get(header+'/Code') # NOT USE FOR THIS | |
167 | self.frameCount = fp.get(header+'/FrameCount')# NOT USE FOR THIS |
|
167 | self.frameCount = fp.get(header+'/FrameCount')# NOT USE FOR THIS | |
168 | self.modeGroup = fp.get(header+'/ModeGroup')# NOT USE FOR THIS |
|
168 | self.modeGroup = fp.get(header+'/ModeGroup')# NOT USE FOR THIS | |
169 | self.nsamplesPulse = fp.get(header+'/NSamplesPulse')# TO GET NSA OR USING DATA FOR THAT |
|
169 | self.nsamplesPulse = fp.get(header+'/NSamplesPulse')# TO GET NSA OR USING DATA FOR THAT | |
170 | self.pulseCount = fp.get(header+'/PulseCount')# NOT USE FOR THIS |
|
170 | self.pulseCount = fp.get(header+'/PulseCount')# NOT USE FOR THIS | |
171 | self.radacTime = fp.get(header+'/RadacTime')# 1st TIME ON FILE ANDE CALCULATE THE REST WITH IPP*nindexprofile |
|
171 | self.radacTime = fp.get(header+'/RadacTime')# 1st TIME ON FILE ANDE CALCULATE THE REST WITH IPP*nindexprofile | |
172 | self.timeCount = fp.get(header+'/TimeCount')# NOT USE FOR THIS |
|
172 | self.timeCount = fp.get(header+'/TimeCount')# NOT USE FOR THIS | |
173 | self.timeStatus = fp.get(header+'/TimeStatus')# NOT USE FOR THIS |
|
173 | self.timeStatus = fp.get(header+'/TimeStatus')# NOT USE FOR THIS | |
174 | self.rangeFromFile = fp.get('Raw11/Data/Samples/Range') |
|
174 | self.rangeFromFile = fp.get('Raw11/Data/Samples/Range') | |
175 | self.frequency = fp.get('Rx/Frequency') |
|
175 | self.frequency = fp.get('Rx/Frequency') | |
176 | txAus = fp.get('Raw11/Data/Pulsewidth') |
|
176 | txAus = fp.get('Raw11/Data/Pulsewidth') | |
177 |
|
177 | |||
178 |
|
178 | |||
179 | self.nblocks = self.pulseCount.shape[0] #nblocks |
|
179 | self.nblocks = self.pulseCount.shape[0] #nblocks | |
180 |
|
180 | |||
181 | self.nprofiles = self.pulseCount.shape[1] #nprofile |
|
181 | self.nprofiles = self.pulseCount.shape[1] #nprofile | |
182 | self.nsa = self.nsamplesPulse[0,0] #ngates |
|
182 | self.nsa = self.nsamplesPulse[0,0] #ngates | |
183 | self.nchannels = len(self.beamCode) |
|
183 | self.nchannels = len(self.beamCode) | |
184 | self.ippSeconds = (self.radacTime[0][1] -self.radacTime[0][0]) #Ipp in seconds |
|
184 | self.ippSeconds = (self.radacTime[0][1] -self.radacTime[0][0]) #Ipp in seconds | |
185 | #self.__waitForNewFile = self.nblocks # wait depending on the number of blocks since each block is 1 sec |
|
185 | #self.__waitForNewFile = self.nblocks # wait depending on the number of blocks since each block is 1 sec | |
186 | self.__waitForNewFile = self.nblocks * self.nprofiles * self.ippSeconds # wait until new file is created |
|
186 | self.__waitForNewFile = self.nblocks * self.nprofiles * self.ippSeconds # wait until new file is created | |
187 |
|
187 | |||
188 | #filling radar controller header parameters |
|
188 | #filling radar controller header parameters | |
189 | self.__ippKm = self.ippSeconds *.15*1e6 # in km |
|
189 | self.__ippKm = self.ippSeconds *.15*1e6 # in km | |
190 | self.__txA = (txAus.value)*.15 #(ipp[us]*.15km/1us) in km |
|
190 | self.__txA = (txAus.value)*.15 #(ipp[us]*.15km/1us) in km | |
191 | self.__txB = 0 |
|
191 | self.__txB = 0 | |
192 | nWindows=1 |
|
192 | nWindows=1 | |
193 | self.__nSamples = self.nsa |
|
193 | self.__nSamples = self.nsa | |
194 | self.__firstHeight = self.rangeFromFile[0][0]/1000 #in km |
|
194 | self.__firstHeight = self.rangeFromFile[0][0]/1000 #in km | |
195 | self.__deltaHeight = (self.rangeFromFile[0][1] - self.rangeFromFile[0][0])/1000 |
|
195 | self.__deltaHeight = (self.rangeFromFile[0][1] - self.rangeFromFile[0][0])/1000 | |
196 |
|
196 | |||
197 | #for now until understand why the code saved is different (code included even though code not in tuf file) |
|
197 | #for now until understand why the code saved is different (code included even though code not in tuf file) | |
198 | #self.__codeType = 0 |
|
198 | #self.__codeType = 0 | |
199 | # self.__nCode = None |
|
199 | # self.__nCode = None | |
200 | # self.__nBaud = None |
|
200 | # self.__nBaud = None | |
201 | self.__code = self.code |
|
201 | self.__code = self.code | |
202 | self.__codeType = 0 |
|
202 | self.__codeType = 0 | |
203 | if self.code != None: |
|
203 | if self.code != None: | |
204 | self.__codeType = 1 |
|
204 | self.__codeType = 1 | |
205 | self.__nCode = self.nCode |
|
205 | self.__nCode = self.nCode | |
206 | self.__nBaud = self.nBaud |
|
206 | self.__nBaud = self.nBaud | |
207 | #self.__code = 0 |
|
207 | #self.__code = 0 | |
208 |
|
208 | |||
209 | #filling system header parameters |
|
209 | #filling system header parameters | |
210 | self.__nSamples = self.nsa |
|
210 | self.__nSamples = self.nsa | |
211 | self.newProfiles = self.nprofiles/self.nchannels |
|
211 | self.newProfiles = self.nprofiles/self.nchannels | |
212 | self.__channelList = list(range(self.nchannels)) |
|
212 | self.__channelList = list(range(self.nchannels)) | |
213 |
|
213 | |||
214 | self.__frequency = self.frequency[0][0] |
|
214 | self.__frequency = self.frequency[0][0] | |
215 |
|
215 | |||
216 |
|
216 | |||
217 | return 1 |
|
217 | return 1 | |
218 |
|
218 | |||
219 |
|
219 | |||
220 | def createBuffers(self): |
|
220 | def createBuffers(self): | |
221 |
|
221 | |||
222 | pass |
|
222 | pass | |
223 |
|
223 | |||
224 | def __setParameters(self,path='', startDate='',endDate='',startTime='', endTime='', walk=''): |
|
224 | def __setParameters(self,path='', startDate='',endDate='',startTime='', endTime='', walk=''): | |
225 | self.path = path |
|
225 | self.path = path | |
226 | self.startDate = startDate |
|
226 | self.startDate = startDate | |
227 | self.endDate = endDate |
|
227 | self.endDate = endDate | |
228 | self.startTime = startTime |
|
228 | self.startTime = startTime | |
229 | self.endTime = endTime |
|
229 | self.endTime = endTime | |
230 | self.walk = walk |
|
230 | self.walk = walk | |
231 |
|
231 | |||
232 | def __checkPath(self): |
|
232 | def __checkPath(self): | |
233 | if os.path.exists(self.path): |
|
233 | if os.path.exists(self.path): | |
234 | self.status = 1 |
|
234 | self.status = 1 | |
235 | else: |
|
235 | else: | |
236 | self.status = 0 |
|
236 | self.status = 0 | |
237 | print('Path:%s does not exists'%self.path) |
|
237 | print('Path:%s does not exists'%self.path) | |
238 |
|
238 | |||
239 | return |
|
239 | return | |
240 |
|
240 | |||
241 |
|
241 | |||
242 | def __selDates(self, amisr_dirname_format): |
|
242 | def __selDates(self, amisr_dirname_format): | |
243 | try: |
|
243 | try: | |
244 | year = int(amisr_dirname_format[0:4]) |
|
244 | year = int(amisr_dirname_format[0:4]) | |
245 | month = int(amisr_dirname_format[4:6]) |
|
245 | month = int(amisr_dirname_format[4:6]) | |
246 | dom = int(amisr_dirname_format[6:8]) |
|
246 | dom = int(amisr_dirname_format[6:8]) | |
247 | thisDate = datetime.date(year,month,dom) |
|
247 | thisDate = datetime.date(year,month,dom) | |
248 | #margen de un dΓa extra, igual luego se filtra for fecha y hora |
|
248 | #margen de un dΓa extra, igual luego se filtra for fecha y hora | |
249 | if (thisDate>=(self.startDate - datetime.timedelta(days=1)) and thisDate <= (self.endDate)+ datetime.timedelta(days=1)): |
|
249 | if (thisDate>=(self.startDate - datetime.timedelta(days=1)) and thisDate <= (self.endDate)+ datetime.timedelta(days=1)): | |
250 | return amisr_dirname_format |
|
250 | return amisr_dirname_format | |
251 | except: |
|
251 | except: | |
252 | return None |
|
252 | return None | |
253 |
|
253 | |||
254 |
|
254 | |||
255 | def __findDataForDates(self,online=False): |
|
255 | def __findDataForDates(self,online=False): | |
256 |
|
256 | |||
257 | if not(self.status): |
|
257 | if not(self.status): | |
258 | return None |
|
258 | return None | |
259 |
|
259 | |||
260 | pat = '\d+.\d+' |
|
260 | pat = '\d+.\d+' | |
261 | dirnameList = [re.search(pat,x) for x in os.listdir(self.path)] |
|
261 | dirnameList = [re.search(pat,x) for x in os.listdir(self.path)] | |
262 | dirnameList = [x for x in dirnameList if x!=None] |
|
262 | dirnameList = [x for x in dirnameList if x!=None] | |
263 | dirnameList = [x.string for x in dirnameList] |
|
263 | dirnameList = [x.string for x in dirnameList] | |
264 | if not(online): |
|
264 | if not(online): | |
265 | dirnameList = [self.__selDates(x) for x in dirnameList] |
|
265 | dirnameList = [self.__selDates(x) for x in dirnameList] | |
266 | dirnameList = [x for x in dirnameList if x!=None] |
|
266 | dirnameList = [x for x in dirnameList if x!=None] | |
267 | if len(dirnameList)>0: |
|
267 | if len(dirnameList)>0: | |
268 | self.status = 1 |
|
268 | self.status = 1 | |
269 | self.dirnameList = dirnameList |
|
269 | self.dirnameList = dirnameList | |
270 | self.dirnameList.sort() |
|
270 | self.dirnameList.sort() | |
271 | else: |
|
271 | else: | |
272 | self.status = 0 |
|
272 | self.status = 0 | |
273 | return None |
|
273 | return None | |
274 |
|
274 | |||
275 | def __getTimeFromData(self): |
|
275 | def __getTimeFromData(self): | |
276 | startDateTime_Reader = datetime.datetime.combine(self.startDate,self.startTime) |
|
276 | startDateTime_Reader = datetime.datetime.combine(self.startDate,self.startTime) | |
277 | endDateTime_Reader = datetime.datetime.combine(self.endDate,self.endTime) |
|
277 | endDateTime_Reader = datetime.datetime.combine(self.endDate,self.endTime) | |
278 |
|
278 | |||
279 | print('Filtering Files from %s to %s'%(startDateTime_Reader, endDateTime_Reader)) |
|
279 | print('Filtering Files from %s to %s'%(startDateTime_Reader, endDateTime_Reader)) | |
280 | print('........................................') |
|
280 | print('........................................') | |
281 | filter_filenameList = [] |
|
281 | filter_filenameList = [] | |
282 | self.filenameList.sort() |
|
282 | self.filenameList.sort() | |
283 | #for i in range(len(self.filenameList)-1): |
|
283 | #for i in range(len(self.filenameList)-1): | |
284 | for i in range(len(self.filenameList)): |
|
284 | for i in range(len(self.filenameList)): | |
285 | filename = self.filenameList[i] |
|
285 | filename = self.filenameList[i] | |
286 | fp = h5py.File(filename,'r') |
|
286 | fp = h5py.File(filename,'r') | |
287 | time_str = fp.get('Time/RadacTimeString') |
|
287 | time_str = fp.get('Time/RadacTimeString') | |
288 |
|
288 | |||
289 | startDateTimeStr_File = time_str[0][0].decode('UTF-8').split('.')[0] |
|
289 | startDateTimeStr_File = time_str[0][0].decode('UTF-8').split('.')[0] | |
290 | #startDateTimeStr_File = "2019-12-16 09:21:11" |
|
290 | #startDateTimeStr_File = "2019-12-16 09:21:11" | |
291 | junk = time.strptime(startDateTimeStr_File, '%Y-%m-%d %H:%M:%S') |
|
291 | junk = time.strptime(startDateTimeStr_File, '%Y-%m-%d %H:%M:%S') | |
292 | startDateTime_File = datetime.datetime(junk.tm_year,junk.tm_mon,junk.tm_mday,junk.tm_hour, junk.tm_min, junk.tm_sec) |
|
292 | startDateTime_File = datetime.datetime(junk.tm_year,junk.tm_mon,junk.tm_mday,junk.tm_hour, junk.tm_min, junk.tm_sec) | |
293 |
|
293 | |||
294 | #endDateTimeStr_File = "2019-12-16 11:10:11" |
|
294 | #endDateTimeStr_File = "2019-12-16 11:10:11" | |
295 | endDateTimeStr_File = time_str[-1][-1].decode('UTF-8').split('.')[0] |
|
295 | endDateTimeStr_File = time_str[-1][-1].decode('UTF-8').split('.')[0] | |
296 | junk = time.strptime(endDateTimeStr_File, '%Y-%m-%d %H:%M:%S') |
|
296 | junk = time.strptime(endDateTimeStr_File, '%Y-%m-%d %H:%M:%S') | |
297 | endDateTime_File = datetime.datetime(junk.tm_year,junk.tm_mon,junk.tm_mday,junk.tm_hour, junk.tm_min, junk.tm_sec) |
|
297 | endDateTime_File = datetime.datetime(junk.tm_year,junk.tm_mon,junk.tm_mday,junk.tm_hour, junk.tm_min, junk.tm_sec) | |
298 |
|
298 | |||
299 | fp.close() |
|
299 | fp.close() | |
300 |
|
300 | |||
301 | #print("check time", startDateTime_File) |
|
301 | #print("check time", startDateTime_File) | |
302 | if self.timezone == 'lt': |
|
302 | if self.timezone == 'lt': | |
303 | startDateTime_File = startDateTime_File - datetime.timedelta(minutes = 300) |
|
303 | startDateTime_File = startDateTime_File - datetime.timedelta(minutes = 300) | |
304 | endDateTime_File = endDateTime_File - datetime.timedelta(minutes = 300) |
|
304 | endDateTime_File = endDateTime_File - datetime.timedelta(minutes = 300) | |
305 | if (startDateTime_File >=startDateTime_Reader and endDateTime_File<=endDateTime_Reader): |
|
305 | if (startDateTime_File >=startDateTime_Reader and endDateTime_File<=endDateTime_Reader): | |
306 | filter_filenameList.append(filename) |
|
306 | filter_filenameList.append(filename) | |
307 |
|
307 | |||
308 | if (startDateTime_File>endDateTime_Reader): |
|
308 | if (startDateTime_File>endDateTime_Reader): | |
309 | break |
|
309 | break | |
310 |
|
310 | |||
311 |
|
311 | |||
312 | filter_filenameList.sort() |
|
312 | filter_filenameList.sort() | |
313 | self.filenameList = filter_filenameList |
|
313 | self.filenameList = filter_filenameList | |
314 |
|
314 | |||
315 | return 1 |
|
315 | return 1 | |
316 |
|
316 | |||
317 | def __filterByGlob1(self, dirName): |
|
317 | def __filterByGlob1(self, dirName): | |
318 | filter_files = glob.glob1(dirName, '*.*%s'%self.extension_file) |
|
318 | filter_files = glob.glob1(dirName, '*.*%s'%self.extension_file) | |
319 | filter_files.sort() |
|
319 | filter_files.sort() | |
320 | filterDict = {} |
|
320 | filterDict = {} | |
321 | filterDict.setdefault(dirName) |
|
321 | filterDict.setdefault(dirName) | |
322 | filterDict[dirName] = filter_files |
|
322 | filterDict[dirName] = filter_files | |
323 | return filterDict |
|
323 | return filterDict | |
324 |
|
324 | |||
325 | def __getFilenameList(self, fileListInKeys, dirList): |
|
325 | def __getFilenameList(self, fileListInKeys, dirList): | |
326 | for value in fileListInKeys: |
|
326 | for value in fileListInKeys: | |
327 | dirName = list(value.keys())[0] |
|
327 | dirName = list(value.keys())[0] | |
328 | for file in value[dirName]: |
|
328 | for file in value[dirName]: | |
329 | filename = os.path.join(dirName, file) |
|
329 | filename = os.path.join(dirName, file) | |
330 | self.filenameList.append(filename) |
|
330 | self.filenameList.append(filename) | |
331 |
|
331 | |||
332 |
|
332 | |||
333 | def __selectDataForTimes(self, online=False): |
|
333 | def __selectDataForTimes(self, online=False): | |
334 | #aun no esta implementado el filtro for tiempo |
|
334 | #aun no esta implementado el filtro for tiempo | |
335 | if not(self.status): |
|
335 | if not(self.status): | |
336 | return None |
|
336 | return None | |
337 |
|
337 | |||
338 | dirList = [os.path.join(self.path,x) for x in self.dirnameList] |
|
338 | dirList = [os.path.join(self.path,x) for x in self.dirnameList] | |
339 | fileListInKeys = [self.__filterByGlob1(x) for x in dirList] |
|
339 | fileListInKeys = [self.__filterByGlob1(x) for x in dirList] | |
340 | self.__getFilenameList(fileListInKeys, dirList) |
|
340 | self.__getFilenameList(fileListInKeys, dirList) | |
341 | if not(online): |
|
341 | if not(online): | |
342 | #filtro por tiempo |
|
342 | #filtro por tiempo | |
343 | if not(self.all): |
|
343 | if not(self.all): | |
344 | self.__getTimeFromData() |
|
344 | self.__getTimeFromData() | |
345 |
|
345 | |||
346 | if len(self.filenameList)>0: |
|
346 | if len(self.filenameList)>0: | |
347 | self.status = 1 |
|
347 | self.status = 1 | |
348 | self.filenameList.sort() |
|
348 | self.filenameList.sort() | |
349 | else: |
|
349 | else: | |
350 | self.status = 0 |
|
350 | self.status = 0 | |
351 | return None |
|
351 | return None | |
352 |
|
352 | |||
353 | else: |
|
353 | else: | |
354 | #get the last file - 1 |
|
354 | #get the last file - 1 | |
355 | self.filenameList = [self.filenameList[-2]] |
|
355 | self.filenameList = [self.filenameList[-2]] | |
356 | new_dirnameList = [] |
|
356 | new_dirnameList = [] | |
357 | for dirname in self.dirnameList: |
|
357 | for dirname in self.dirnameList: | |
358 | junk = numpy.array([dirname in x for x in self.filenameList]) |
|
358 | junk = numpy.array([dirname in x for x in self.filenameList]) | |
359 | junk_sum = junk.sum() |
|
359 | junk_sum = junk.sum() | |
360 | if junk_sum > 0: |
|
360 | if junk_sum > 0: | |
361 | new_dirnameList.append(dirname) |
|
361 | new_dirnameList.append(dirname) | |
362 | self.dirnameList = new_dirnameList |
|
362 | self.dirnameList = new_dirnameList | |
363 | return 1 |
|
363 | return 1 | |
364 |
|
364 | |||
365 | def searchFilesOnLine(self, path, startDate, endDate, startTime=datetime.time(0,0,0), |
|
365 | def searchFilesOnLine(self, path, startDate, endDate, startTime=datetime.time(0,0,0), | |
366 | endTime=datetime.time(23,59,59),walk=True): |
|
366 | endTime=datetime.time(23,59,59),walk=True): | |
367 |
|
367 | |||
368 | if endDate ==None: |
|
368 | if endDate ==None: | |
369 | startDate = datetime.datetime.utcnow().date() |
|
369 | startDate = datetime.datetime.utcnow().date() | |
370 | endDate = datetime.datetime.utcnow().date() |
|
370 | endDate = datetime.datetime.utcnow().date() | |
371 |
|
371 | |||
372 | self.__setParameters(path=path, startDate=startDate, endDate=endDate,startTime = startTime,endTime=endTime, walk=walk) |
|
372 | self.__setParameters(path=path, startDate=startDate, endDate=endDate,startTime = startTime,endTime=endTime, walk=walk) | |
373 |
|
373 | |||
374 | self.__checkPath() |
|
374 | self.__checkPath() | |
375 |
|
375 | |||
376 | self.__findDataForDates(online=True) |
|
376 | self.__findDataForDates(online=True) | |
377 |
|
377 | |||
378 | self.dirnameList = [self.dirnameList[-1]] |
|
378 | self.dirnameList = [self.dirnameList[-1]] | |
379 |
|
379 | |||
380 | self.__selectDataForTimes(online=True) |
|
380 | self.__selectDataForTimes(online=True) | |
381 |
|
381 | |||
382 | return |
|
382 | return | |
383 |
|
383 | |||
384 |
|
384 | |||
385 | def searchFilesOffLine(self, |
|
385 | def searchFilesOffLine(self, | |
386 | path, |
|
386 | path, | |
387 | startDate, |
|
387 | startDate, | |
388 | endDate, |
|
388 | endDate, | |
389 | startTime=datetime.time(0,0,0), |
|
389 | startTime=datetime.time(0,0,0), | |
390 | endTime=datetime.time(23,59,59), |
|
390 | endTime=datetime.time(23,59,59), | |
391 | walk=True): |
|
391 | walk=True): | |
392 |
|
392 | |||
393 | self.__setParameters(path, startDate, endDate, startTime, endTime, walk) |
|
393 | self.__setParameters(path, startDate, endDate, startTime, endTime, walk) | |
394 |
|
394 | |||
395 | self.__checkPath() |
|
395 | self.__checkPath() | |
396 |
|
396 | |||
397 | self.__findDataForDates() |
|
397 | self.__findDataForDates() | |
398 |
|
398 | |||
399 | self.__selectDataForTimes() |
|
399 | self.__selectDataForTimes() | |
400 |
|
400 | |||
401 | for i in range(len(self.filenameList)): |
|
401 | for i in range(len(self.filenameList)): | |
402 | print("%s" %(self.filenameList[i])) |
|
402 | print("%s" %(self.filenameList[i])) | |
403 |
|
403 | |||
404 | return |
|
404 | return | |
405 |
|
405 | |||
406 | def __setNextFileOffline(self): |
|
406 | def __setNextFileOffline(self): | |
407 |
|
407 | |||
408 | try: |
|
408 | try: | |
409 | self.filename = self.filenameList[self.fileIndex] |
|
409 | self.filename = self.filenameList[self.fileIndex] | |
410 | self.amisrFilePointer = h5py.File(self.filename,'r') |
|
410 | self.amisrFilePointer = h5py.File(self.filename,'r') | |
411 | self.fileIndex += 1 |
|
411 | self.fileIndex += 1 | |
412 | except: |
|
412 | except: | |
413 | self.flagNoMoreFiles = 1 |
|
413 | self.flagNoMoreFiles = 1 | |
414 | print("No more Files") |
|
414 | raise schainpy.admin.SchainError('No more files to read') | |
415 | return 0 |
|
415 | return 0 | |
416 |
|
416 | |||
417 | self.flagIsNewFile = 1 |
|
417 | self.flagIsNewFile = 1 | |
418 | print("Setting the file: %s"%self.filename) |
|
418 | print("Setting the file: %s"%self.filename) | |
419 |
|
419 | |||
420 | return 1 |
|
420 | return 1 | |
421 |
|
421 | |||
422 |
|
422 | |||
423 | def __setNextFileOnline(self): |
|
423 | def __setNextFileOnline(self): | |
424 | filename = self.filenameList[0] |
|
424 | filename = self.filenameList[0] | |
425 | if self.__filename_online != None: |
|
425 | if self.__filename_online != None: | |
426 | self.__selectDataForTimes(online=True) |
|
426 | self.__selectDataForTimes(online=True) | |
427 | filename = self.filenameList[0] |
|
427 | filename = self.filenameList[0] | |
428 | wait = 0 |
|
428 | wait = 0 | |
429 | self.__waitForNewFile=300 ## DEBUG: |
|
429 | self.__waitForNewFile=300 ## DEBUG: | |
430 | while self.__filename_online == filename: |
|
430 | while self.__filename_online == filename: | |
431 | print('waiting %d seconds to get a new file...'%(self.__waitForNewFile)) |
|
431 | print('waiting %d seconds to get a new file...'%(self.__waitForNewFile)) | |
432 | if wait == 5: |
|
432 | if wait == 5: | |
433 | self.flagNoMoreFiles = 1 |
|
433 | self.flagNoMoreFiles = 1 | |
434 | return 0 |
|
434 | return 0 | |
435 | sleep(self.__waitForNewFile) |
|
435 | sleep(self.__waitForNewFile) | |
436 | self.__selectDataForTimes(online=True) |
|
436 | self.__selectDataForTimes(online=True) | |
437 | filename = self.filenameList[0] |
|
437 | filename = self.filenameList[0] | |
438 | wait += 1 |
|
438 | wait += 1 | |
439 |
|
439 | |||
440 | self.__filename_online = filename |
|
440 | self.__filename_online = filename | |
441 |
|
441 | |||
442 | self.amisrFilePointer = h5py.File(filename,'r') |
|
442 | self.amisrFilePointer = h5py.File(filename,'r') | |
443 | self.flagIsNewFile = 1 |
|
443 | self.flagIsNewFile = 1 | |
444 | self.filename = filename |
|
444 | self.filename = filename | |
445 | print("Setting the file: %s"%self.filename) |
|
445 | print("Setting the file: %s"%self.filename) | |
446 | return 1 |
|
446 | return 1 | |
447 |
|
447 | |||
448 |
|
448 | |||
449 | def readData(self): |
|
449 | def readData(self): | |
450 | buffer = self.amisrFilePointer.get('Raw11/Data/Samples/Data') |
|
450 | buffer = self.amisrFilePointer.get('Raw11/Data/Samples/Data') | |
451 | re = buffer[:,:,:,0] |
|
451 | re = buffer[:,:,:,0] | |
452 | im = buffer[:,:,:,1] |
|
452 | im = buffer[:,:,:,1] | |
453 | dataset = re + im*1j |
|
453 | dataset = re + im*1j | |
454 |
|
454 | |||
455 | self.radacTime = self.amisrFilePointer.get('Raw11/Data/RadacHeader/RadacTime') |
|
455 | self.radacTime = self.amisrFilePointer.get('Raw11/Data/RadacHeader/RadacTime') | |
456 | timeset = self.radacTime[:,0] |
|
456 | timeset = self.radacTime[:,0] | |
457 |
|
457 | |||
458 | return dataset,timeset |
|
458 | return dataset,timeset | |
459 |
|
459 | |||
460 | def reshapeData(self): |
|
460 | def reshapeData(self): | |
461 | #self.beamCodeByPulse, self.beamCode, self.nblocks, self.nprofiles, self.nsa, |
|
461 | #self.beamCodeByPulse, self.beamCode, self.nblocks, self.nprofiles, self.nsa, | |
462 | channels = self.beamCodeByPulse[0,:] |
|
462 | channels = self.beamCodeByPulse[0,:] | |
463 | nchan = self.nchannels |
|
463 | nchan = self.nchannels | |
464 | #self.newProfiles = self.nprofiles/nchan #must be defined on filljroheader |
|
464 | #self.newProfiles = self.nprofiles/nchan #must be defined on filljroheader | |
465 | nblocks = self.nblocks |
|
465 | nblocks = self.nblocks | |
466 | nsamples = self.nsa |
|
466 | nsamples = self.nsa | |
467 |
|
467 | |||
468 | #Dimensions : nChannels, nProfiles, nSamples |
|
468 | #Dimensions : nChannels, nProfiles, nSamples | |
469 | new_block = numpy.empty((nblocks, nchan, numpy.int_(self.newProfiles), nsamples), dtype="complex64") |
|
469 | new_block = numpy.empty((nblocks, nchan, numpy.int_(self.newProfiles), nsamples), dtype="complex64") | |
470 | ############################################ |
|
470 | ############################################ | |
471 |
|
471 | |||
472 | for thisChannel in range(nchan): |
|
472 | for thisChannel in range(nchan): | |
473 | new_block[:,thisChannel,:,:] = self.dataset[:,numpy.where(channels==self.beamCode[thisChannel])[0],:] |
|
473 | new_block[:,thisChannel,:,:] = self.dataset[:,numpy.where(channels==self.beamCode[thisChannel])[0],:] | |
474 |
|
474 | |||
475 |
|
475 | |||
476 | new_block = numpy.transpose(new_block, (1,0,2,3)) |
|
476 | new_block = numpy.transpose(new_block, (1,0,2,3)) | |
477 | new_block = numpy.reshape(new_block, (nchan,-1, nsamples)) |
|
477 | new_block = numpy.reshape(new_block, (nchan,-1, nsamples)) | |
478 |
|
478 | |||
479 | return new_block |
|
479 | return new_block | |
480 |
|
480 | |||
481 | def updateIndexes(self): |
|
481 | def updateIndexes(self): | |
482 |
|
482 | |||
483 | pass |
|
483 | pass | |
484 |
|
484 | |||
485 | def fillJROHeader(self): |
|
485 | def fillJROHeader(self): | |
486 |
|
486 | |||
487 | #fill radar controller header |
|
487 | #fill radar controller header | |
488 | self.dataOut.radarControllerHeaderObj = RadarControllerHeader(ipp=self.__ippKm, |
|
488 | self.dataOut.radarControllerHeaderObj = RadarControllerHeader(ipp=self.__ippKm, | |
489 | txA=self.__txA, |
|
489 | txA=self.__txA, | |
490 | txB=0, |
|
490 | txB=0, | |
491 | nWindows=1, |
|
491 | nWindows=1, | |
492 | nHeights=self.__nSamples, |
|
492 | nHeights=self.__nSamples, | |
493 | firstHeight=self.__firstHeight, |
|
493 | firstHeight=self.__firstHeight, | |
494 | deltaHeight=self.__deltaHeight, |
|
494 | deltaHeight=self.__deltaHeight, | |
495 | codeType=self.__codeType, |
|
495 | codeType=self.__codeType, | |
496 | nCode=self.__nCode, nBaud=self.__nBaud, |
|
496 | nCode=self.__nCode, nBaud=self.__nBaud, | |
497 | code = self.__code, |
|
497 | code = self.__code, | |
498 | fClock=1) |
|
498 | fClock=1) | |
499 |
|
499 | |||
500 | #fill system header |
|
500 | #fill system header | |
501 | self.dataOut.systemHeaderObj = SystemHeader(nSamples=self.__nSamples, |
|
501 | self.dataOut.systemHeaderObj = SystemHeader(nSamples=self.__nSamples, | |
502 | nProfiles=self.newProfiles, |
|
502 | nProfiles=self.newProfiles, | |
503 | nChannels=len(self.__channelList), |
|
503 | nChannels=len(self.__channelList), | |
504 | adcResolution=14, |
|
504 | adcResolution=14, | |
505 | pciDioBusWidth=32) |
|
505 | pciDioBusWidth=32) | |
506 |
|
506 | |||
507 | self.dataOut.type = "Voltage" |
|
507 | self.dataOut.type = "Voltage" | |
508 | self.dataOut.data = None |
|
508 | self.dataOut.data = None | |
509 | self.dataOut.dtype = numpy.dtype([('real','<i8'),('imag','<i8')]) |
|
509 | self.dataOut.dtype = numpy.dtype([('real','<i8'),('imag','<i8')]) | |
510 | # self.dataOut.nChannels = 0 |
|
510 | # self.dataOut.nChannels = 0 | |
511 |
|
511 | |||
512 | # self.dataOut.nHeights = 0 |
|
512 | # self.dataOut.nHeights = 0 | |
513 |
|
513 | |||
514 | self.dataOut.nProfiles = self.newProfiles*self.nblocks |
|
514 | self.dataOut.nProfiles = self.newProfiles*self.nblocks | |
515 | #self.dataOut.heightList = self.__firstHeigth + numpy.arange(self.__nSamples, dtype = numpy.float)*self.__deltaHeigth |
|
515 | #self.dataOut.heightList = self.__firstHeigth + numpy.arange(self.__nSamples, dtype = numpy.float)*self.__deltaHeigth | |
516 | ranges = numpy.reshape(self.rangeFromFile.value,(-1)) |
|
516 | ranges = numpy.reshape(self.rangeFromFile.value,(-1)) | |
517 | self.dataOut.heightList = ranges/1000.0 #km |
|
517 | self.dataOut.heightList = ranges/1000.0 #km | |
518 | self.dataOut.channelList = self.__channelList |
|
518 | self.dataOut.channelList = self.__channelList | |
519 | self.dataOut.blocksize = self.dataOut.nChannels * self.dataOut.nHeights |
|
519 | self.dataOut.blocksize = self.dataOut.nChannels * self.dataOut.nHeights | |
520 |
|
520 | |||
521 | # self.dataOut.channelIndexList = None |
|
521 | # self.dataOut.channelIndexList = None | |
522 |
|
522 | |||
523 |
|
523 | |||
524 | self.dataOut.azimuthList = numpy.array(self.azimuthList) |
|
524 | self.dataOut.azimuthList = numpy.array(self.azimuthList) | |
525 | self.dataOut.elevationList = numpy.array(self.elevationList) |
|
525 | self.dataOut.elevationList = numpy.array(self.elevationList) | |
526 | self.dataOut.codeList = numpy.array(self.beamCode) |
|
526 | self.dataOut.codeList = numpy.array(self.beamCode) | |
527 | #print(self.dataOut.elevationList) |
|
527 | #print(self.dataOut.elevationList) | |
528 | self.dataOut.flagNoData = True |
|
528 | self.dataOut.flagNoData = True | |
529 |
|
529 | |||
530 | #Set to TRUE if the data is discontinuous |
|
530 | #Set to TRUE if the data is discontinuous | |
531 | self.dataOut.flagDiscontinuousBlock = False |
|
531 | self.dataOut.flagDiscontinuousBlock = False | |
532 |
|
532 | |||
533 | self.dataOut.utctime = None |
|
533 | self.dataOut.utctime = None | |
534 |
|
534 | |||
535 | #self.dataOut.timeZone = -5 #self.__timezone/60 #timezone like jroheader, difference in minutes between UTC and localtime |
|
535 | #self.dataOut.timeZone = -5 #self.__timezone/60 #timezone like jroheader, difference in minutes between UTC and localtime | |
536 | if self.timezone == 'lt': |
|
536 | if self.timezone == 'lt': | |
537 | self.dataOut.timeZone = time.timezone / 60. #get the timezone in minutes |
|
537 | self.dataOut.timeZone = time.timezone / 60. #get the timezone in minutes | |
538 | else: |
|
538 | else: | |
539 | self.dataOut.timeZone = 0 #by default time is UTC |
|
539 | self.dataOut.timeZone = 0 #by default time is UTC | |
540 |
|
540 | |||
541 | self.dataOut.dstFlag = 0 |
|
541 | self.dataOut.dstFlag = 0 | |
542 | self.dataOut.errorCount = 0 |
|
542 | self.dataOut.errorCount = 0 | |
543 | self.dataOut.nCohInt = 1 |
|
543 | self.dataOut.nCohInt = 1 | |
544 | self.dataOut.flagDecodeData = False #asumo que la data esta decodificada |
|
544 | self.dataOut.flagDecodeData = False #asumo que la data esta decodificada | |
545 | self.dataOut.flagDeflipData = False #asumo que la data esta sin flip |
|
545 | self.dataOut.flagDeflipData = False #asumo que la data esta sin flip | |
546 | self.dataOut.flagShiftFFT = False |
|
546 | self.dataOut.flagShiftFFT = False | |
547 | self.dataOut.ippSeconds = self.ippSeconds |
|
547 | self.dataOut.ippSeconds = self.ippSeconds | |
548 |
|
548 | |||
549 | #Time interval between profiles |
|
549 | #Time interval between profiles | |
550 | #self.dataOut.timeInterval = self.dataOut.ippSeconds * self.dataOut.nCohInt |
|
550 | #self.dataOut.timeInterval = self.dataOut.ippSeconds * self.dataOut.nCohInt | |
551 |
|
551 | |||
552 | self.dataOut.frequency = self.__frequency |
|
552 | self.dataOut.frequency = self.__frequency | |
553 | self.dataOut.realtime = self.online |
|
553 | self.dataOut.realtime = self.online | |
554 | pass |
|
554 | pass | |
555 |
|
555 | |||
556 | def readNextFile(self,online=False): |
|
556 | def readNextFile(self,online=False): | |
557 |
|
557 | |||
558 | if not(online): |
|
558 | if not(online): | |
559 | newFile = self.__setNextFileOffline() |
|
559 | newFile = self.__setNextFileOffline() | |
560 | else: |
|
560 | else: | |
561 | newFile = self.__setNextFileOnline() |
|
561 | newFile = self.__setNextFileOnline() | |
562 |
|
562 | |||
563 | if not(newFile): |
|
563 | if not(newFile): | |
564 | self.dataOut.error = True |
|
564 | self.dataOut.error = True | |
565 | return 0 |
|
565 | return 0 | |
566 |
|
566 | |||
567 | if not self.readAMISRHeader(self.amisrFilePointer): |
|
567 | if not self.readAMISRHeader(self.amisrFilePointer): | |
568 | self.dataOut.error = True |
|
568 | self.dataOut.error = True | |
569 | return 0 |
|
569 | return 0 | |
570 |
|
570 | |||
571 | self.createBuffers() |
|
571 | self.createBuffers() | |
572 | self.fillJROHeader() |
|
572 | self.fillJROHeader() | |
573 |
|
573 | |||
574 | #self.__firstFile = False |
|
574 | #self.__firstFile = False | |
575 |
|
575 | |||
576 |
|
576 | |||
577 |
|
577 | |||
578 | self.dataset,self.timeset = self.readData() |
|
578 | self.dataset,self.timeset = self.readData() | |
579 |
|
579 | |||
580 | if self.endDate!=None: |
|
580 | if self.endDate!=None: | |
581 | endDateTime_Reader = datetime.datetime.combine(self.endDate,self.endTime) |
|
581 | endDateTime_Reader = datetime.datetime.combine(self.endDate,self.endTime) | |
582 | time_str = self.amisrFilePointer.get('Time/RadacTimeString') |
|
582 | time_str = self.amisrFilePointer.get('Time/RadacTimeString') | |
583 | startDateTimeStr_File = time_str[0][0].decode('UTF-8').split('.')[0] |
|
583 | startDateTimeStr_File = time_str[0][0].decode('UTF-8').split('.')[0] | |
584 | junk = time.strptime(startDateTimeStr_File, '%Y-%m-%d %H:%M:%S') |
|
584 | junk = time.strptime(startDateTimeStr_File, '%Y-%m-%d %H:%M:%S') | |
585 | startDateTime_File = datetime.datetime(junk.tm_year,junk.tm_mon,junk.tm_mday,junk.tm_hour, junk.tm_min, junk.tm_sec) |
|
585 | startDateTime_File = datetime.datetime(junk.tm_year,junk.tm_mon,junk.tm_mday,junk.tm_hour, junk.tm_min, junk.tm_sec) | |
586 | if self.timezone == 'lt': |
|
586 | if self.timezone == 'lt': | |
587 | startDateTime_File = startDateTime_File - datetime.timedelta(minutes = 300) |
|
587 | startDateTime_File = startDateTime_File - datetime.timedelta(minutes = 300) | |
588 | if (startDateTime_File>endDateTime_Reader): |
|
588 | if (startDateTime_File>endDateTime_Reader): | |
589 | return 0 |
|
589 | return 0 | |
590 |
|
590 | |||
591 | self.jrodataset = self.reshapeData() |
|
591 | self.jrodataset = self.reshapeData() | |
592 | #----self.updateIndexes() |
|
592 | #----self.updateIndexes() | |
593 | self.profileIndex = 0 |
|
593 | self.profileIndex = 0 | |
594 |
|
594 | |||
595 | return 1 |
|
595 | return 1 | |
596 |
|
596 | |||
597 |
|
597 | |||
598 | def __hasNotDataInBuffer(self): |
|
598 | def __hasNotDataInBuffer(self): | |
599 | if self.profileIndex >= (self.newProfiles*self.nblocks): |
|
599 | if self.profileIndex >= (self.newProfiles*self.nblocks): | |
600 | return 1 |
|
600 | return 1 | |
601 | return 0 |
|
601 | return 0 | |
602 |
|
602 | |||
603 |
|
603 | |||
604 | def getData(self): |
|
604 | def getData(self): | |
605 |
|
605 | |||
606 | if self.flagNoMoreFiles: |
|
606 | if self.flagNoMoreFiles: | |
607 | self.dataOut.flagNoData = True |
|
607 | self.dataOut.flagNoData = True | |
608 | return 0 |
|
608 | return 0 | |
609 |
|
609 | |||
610 | if self.__hasNotDataInBuffer(): |
|
610 | if self.__hasNotDataInBuffer(): | |
611 | if not (self.readNextFile(self.online)): |
|
611 | if not (self.readNextFile(self.online)): | |
612 | return 0 |
|
612 | return 0 | |
613 |
|
613 | |||
614 |
|
614 | |||
615 | if self.dataset is None: # setear esta condicion cuando no hayan datos por leer |
|
615 | if self.dataset is None: # setear esta condicion cuando no hayan datos por leer | |
616 | self.dataOut.flagNoData = True |
|
616 | self.dataOut.flagNoData = True | |
617 | return 0 |
|
617 | return 0 | |
618 |
|
618 | |||
619 | #self.dataOut.data = numpy.reshape(self.jrodataset[self.profileIndex,:],(1,-1)) |
|
619 | #self.dataOut.data = numpy.reshape(self.jrodataset[self.profileIndex,:],(1,-1)) | |
620 |
|
620 | |||
621 | self.dataOut.data = self.jrodataset[:,self.profileIndex,:] |
|
621 | self.dataOut.data = self.jrodataset[:,self.profileIndex,:] | |
622 |
|
622 | |||
623 | #print("R_t",self.timeset) |
|
623 | #print("R_t",self.timeset) | |
624 |
|
624 | |||
625 | #self.dataOut.utctime = self.jrotimeset[self.profileIndex] |
|
625 | #self.dataOut.utctime = self.jrotimeset[self.profileIndex] | |
626 | #verificar basic header de jro data y ver si es compatible con este valor |
|
626 | #verificar basic header de jro data y ver si es compatible con este valor | |
627 | #self.dataOut.utctime = self.timeset + (self.profileIndex * self.ippSeconds * self.nchannels) |
|
627 | #self.dataOut.utctime = self.timeset + (self.profileIndex * self.ippSeconds * self.nchannels) | |
628 | indexprof = numpy.mod(self.profileIndex, self.newProfiles) |
|
628 | indexprof = numpy.mod(self.profileIndex, self.newProfiles) | |
629 | indexblock = self.profileIndex/self.newProfiles |
|
629 | indexblock = self.profileIndex/self.newProfiles | |
630 | #print (indexblock, indexprof) |
|
630 | #print (indexblock, indexprof) | |
631 | diffUTC = 0 |
|
631 | diffUTC = 0 | |
632 | t_comp = (indexprof * self.ippSeconds * self.nchannels) + diffUTC # |
|
632 | t_comp = (indexprof * self.ippSeconds * self.nchannels) + diffUTC # | |
633 |
|
633 | |||
634 | #print("utc :",indexblock," __ ",t_comp) |
|
634 | #print("utc :",indexblock," __ ",t_comp) | |
635 | #print(numpy.shape(self.timeset)) |
|
635 | #print(numpy.shape(self.timeset)) | |
636 | self.dataOut.utctime = self.timeset[numpy.int_(indexblock)] + t_comp |
|
636 | self.dataOut.utctime = self.timeset[numpy.int_(indexblock)] + t_comp | |
637 | #self.dataOut.utctime = self.timeset[self.profileIndex] + t_comp |
|
637 | #self.dataOut.utctime = self.timeset[self.profileIndex] + t_comp | |
638 |
|
638 | |||
639 | self.dataOut.profileIndex = self.profileIndex |
|
639 | self.dataOut.profileIndex = self.profileIndex | |
640 | #print("N profile:",self.profileIndex,self.newProfiles,self.nblocks,self.dataOut.utctime) |
|
640 | #print("N profile:",self.profileIndex,self.newProfiles,self.nblocks,self.dataOut.utctime) | |
641 | self.dataOut.flagNoData = False |
|
641 | self.dataOut.flagNoData = False | |
642 | # if indexprof == 0: |
|
642 | # if indexprof == 0: | |
643 | # print("kamisr: ",self.dataOut.utctime) |
|
643 | # print("kamisr: ",self.dataOut.utctime) | |
644 |
|
644 | |||
645 | self.profileIndex += 1 |
|
645 | self.profileIndex += 1 | |
646 |
|
646 | |||
647 | return self.dataOut.data |
|
647 | return self.dataOut.data | |
648 |
|
648 | |||
649 |
|
649 | |||
650 | def run(self, **kwargs): |
|
650 | def run(self, **kwargs): | |
651 | ''' |
|
651 | ''' | |
652 | This method will be called many times so here you should put all your code |
|
652 | This method will be called many times so here you should put all your code | |
653 | ''' |
|
653 | ''' | |
654 | #print("running kamisr") |
|
654 | #print("running kamisr") | |
655 | if not self.isConfig: |
|
655 | if not self.isConfig: | |
656 | self.setup(**kwargs) |
|
656 | self.setup(**kwargs) | |
657 | self.isConfig = True |
|
657 | self.isConfig = True | |
658 |
|
658 | |||
659 | self.getData() |
|
659 | self.getData() |
@@ -1,652 +1,660 | |||||
1 | import os |
|
1 | import os | |
2 | import time |
|
2 | import time | |
3 | import datetime |
|
3 | import datetime | |
4 |
|
4 | |||
5 | import numpy |
|
5 | import numpy | |
6 | import h5py |
|
6 | import h5py | |
7 |
|
7 | |||
8 | import schainpy.admin |
|
8 | import schainpy.admin | |
9 | from schainpy.model.data.jrodata import * |
|
9 | from schainpy.model.data.jrodata import * | |
10 | from schainpy.model.proc.jroproc_base import ProcessingUnit, Operation, MPDecorator |
|
10 | from schainpy.model.proc.jroproc_base import ProcessingUnit, Operation, MPDecorator | |
11 | from schainpy.model.io.jroIO_base import * |
|
11 | from schainpy.model.io.jroIO_base import * | |
12 | from schainpy.utils import log |
|
12 | from schainpy.utils import log | |
13 |
|
13 | |||
14 |
|
14 | |||
15 | class HDFReader(Reader, ProcessingUnit): |
|
15 | class HDFReader(Reader, ProcessingUnit): | |
16 | """Processing unit to read HDF5 format files |
|
16 | """Processing unit to read HDF5 format files | |
17 |
|
17 | |||
18 | This unit reads HDF5 files created with `HDFWriter` operation contains |
|
18 | This unit reads HDF5 files created with `HDFWriter` operation contains | |
19 | by default two groups Data and Metadata all variables would be saved as `dataOut` |
|
19 | by default two groups Data and Metadata all variables would be saved as `dataOut` | |
20 | attributes. |
|
20 | attributes. | |
21 | It is possible to read any HDF5 file by given the structure in the `description` |
|
21 | It is possible to read any HDF5 file by given the structure in the `description` | |
22 | parameter, also you can add extra values to metadata with the parameter `extras`. |
|
22 | parameter, also you can add extra values to metadata with the parameter `extras`. | |
23 |
|
23 | |||
24 | Parameters: |
|
24 | Parameters: | |
25 | ----------- |
|
25 | ----------- | |
26 | path : str |
|
26 | path : str | |
27 | Path where files are located. |
|
27 | Path where files are located. | |
28 | startDate : date |
|
28 | startDate : date | |
29 | Start date of the files |
|
29 | Start date of the files | |
30 | endDate : list |
|
30 | endDate : list | |
31 | End date of the files |
|
31 | End date of the files | |
32 | startTime : time |
|
32 | startTime : time | |
33 | Start time of the files |
|
33 | Start time of the files | |
34 | endTime : time |
|
34 | endTime : time | |
35 | End time of the files |
|
35 | End time of the files | |
36 | description : dict, optional |
|
36 | description : dict, optional | |
37 | Dictionary with the description of the HDF5 file |
|
37 | Dictionary with the description of the HDF5 file | |
38 | extras : dict, optional |
|
38 | extras : dict, optional | |
39 | Dictionary with extra metadata to be be added to `dataOut` |
|
39 | Dictionary with extra metadata to be be added to `dataOut` | |
40 |
|
40 | |||
41 | Examples |
|
41 | Examples | |
42 | -------- |
|
42 | -------- | |
43 |
|
43 | |||
44 | desc = { |
|
44 | desc = { | |
45 | 'Data': { |
|
45 | 'Data': { | |
46 | 'data_output': ['u', 'v', 'w'], |
|
46 | 'data_output': ['u', 'v', 'w'], | |
47 | 'utctime': 'timestamps', |
|
47 | 'utctime': 'timestamps', | |
48 | } , |
|
48 | } , | |
49 | 'Metadata': { |
|
49 | 'Metadata': { | |
50 | 'heightList': 'heights' |
|
50 | 'heightList': 'heights' | |
51 | } |
|
51 | } | |
52 | } |
|
52 | } | |
53 |
|
53 | |||
54 | desc = { |
|
54 | desc = { | |
55 | 'Data': { |
|
55 | 'Data': { | |
56 | 'data_output': 'winds', |
|
56 | 'data_output': 'winds', | |
57 | 'utctime': 'timestamps' |
|
57 | 'utctime': 'timestamps' | |
58 | }, |
|
58 | }, | |
59 | 'Metadata': { |
|
59 | 'Metadata': { | |
60 | 'heightList': 'heights' |
|
60 | 'heightList': 'heights' | |
61 | } |
|
61 | } | |
62 | } |
|
62 | } | |
63 |
|
63 | |||
64 | extras = { |
|
64 | extras = { | |
65 | 'timeZone': 300 |
|
65 | 'timeZone': 300 | |
66 | } |
|
66 | } | |
67 |
|
67 | |||
68 | reader = project.addReadUnit( |
|
68 | reader = project.addReadUnit( | |
69 | name='HDFReader', |
|
69 | name='HDFReader', | |
70 | path='/path/to/files', |
|
70 | path='/path/to/files', | |
71 | startDate='2019/01/01', |
|
71 | startDate='2019/01/01', | |
72 | endDate='2019/01/31', |
|
72 | endDate='2019/01/31', | |
73 | startTime='00:00:00', |
|
73 | startTime='00:00:00', | |
74 | endTime='23:59:59', |
|
74 | endTime='23:59:59', | |
75 | # description=json.dumps(desc), |
|
75 | # description=json.dumps(desc), | |
76 | # extras=json.dumps(extras), |
|
76 | # extras=json.dumps(extras), | |
77 | ) |
|
77 | ) | |
78 |
|
78 | |||
79 | """ |
|
79 | """ | |
80 |
|
80 | |||
81 | __attrs__ = ['path', 'startDate', 'endDate', 'startTime', 'endTime', 'description', 'extras'] |
|
81 | __attrs__ = ['path', 'startDate', 'endDate', 'startTime', 'endTime', 'description', 'extras'] | |
82 |
|
82 | |||
83 | def __init__(self): |
|
83 | def __init__(self): | |
84 | ProcessingUnit.__init__(self) |
|
84 | ProcessingUnit.__init__(self) | |
85 | self.dataOut = Parameters() |
|
85 | self.dataOut = Parameters() | |
86 | self.ext = ".hdf5" |
|
86 | self.ext = ".hdf5" | |
87 | self.optchar = "D" |
|
87 | self.optchar = "D" | |
88 | self.meta = {} |
|
88 | self.meta = {} | |
89 | self.data = {} |
|
89 | self.data = {} | |
90 | self.open_file = h5py.File |
|
90 | self.open_file = h5py.File | |
91 | self.open_mode = 'r' |
|
91 | self.open_mode = 'r' | |
92 | self.description = {} |
|
92 | self.description = {} | |
93 | self.extras = {} |
|
93 | self.extras = {} | |
94 | self.filefmt = "*%Y%j***" |
|
94 | self.filefmt = "*%Y%j***" | |
95 | self.folderfmt = "*%Y%j" |
|
95 | self.folderfmt = "*%Y%j" | |
96 | self.utcoffset = 0 |
|
96 | self.utcoffset = 0 | |
97 |
|
97 | |||
98 | def setup(self, **kwargs): |
|
98 | def setup(self, **kwargs): | |
99 |
|
99 | |||
100 | self.set_kwargs(**kwargs) |
|
100 | self.set_kwargs(**kwargs) | |
101 | if not self.ext.startswith('.'): |
|
101 | if not self.ext.startswith('.'): | |
102 | self.ext = '.{}'.format(self.ext) |
|
102 | self.ext = '.{}'.format(self.ext) | |
103 |
|
103 | |||
104 | if self.online: |
|
104 | if self.online: | |
105 | log.log("Searching files in online mode...", self.name) |
|
105 | log.log("Searching files in online mode...", self.name) | |
106 |
|
106 | |||
107 | for nTries in range(self.nTries): |
|
107 | for nTries in range(self.nTries): | |
108 | fullpath = self.searchFilesOnLine(self.path, self.startDate, |
|
108 | fullpath = self.searchFilesOnLine(self.path, self.startDate, | |
109 | self.endDate, self.expLabel, self.ext, self.walk, |
|
109 | self.endDate, self.expLabel, self.ext, self.walk, | |
110 | self.filefmt, self.folderfmt) |
|
110 | self.filefmt, self.folderfmt) | |
111 | pathname, filename = os.path.split(fullpath) |
|
111 | pathname, filename = os.path.split(fullpath) | |
112 | #print(pathname,filename) |
|
112 | #print(pathname,filename) | |
113 | try: |
|
113 | try: | |
114 | fullpath = next(fullpath) |
|
114 | fullpath = next(fullpath) | |
115 |
|
115 | |||
116 | except: |
|
116 | except: | |
117 | fullpath = None |
|
117 | fullpath = None | |
118 |
|
118 | |||
119 | if fullpath: |
|
119 | if fullpath: | |
120 | break |
|
120 | break | |
121 |
|
121 | |||
122 | log.warning( |
|
122 | log.warning( | |
123 | 'Waiting {} sec for a valid file in {}: try {} ...'.format( |
|
123 | 'Waiting {} sec for a valid file in {}: try {} ...'.format( | |
124 | self.delay, self.path, nTries + 1), |
|
124 | self.delay, self.path, nTries + 1), | |
125 | self.name) |
|
125 | self.name) | |
126 | time.sleep(self.delay) |
|
126 | time.sleep(self.delay) | |
127 |
|
127 | |||
128 | if not(fullpath): |
|
128 | if not(fullpath): | |
129 | raise schainpy.admin.SchainError( |
|
129 | raise schainpy.admin.SchainError( | |
130 | 'There isn\'t any valid file in {}'.format(self.path)) |
|
130 | 'There isn\'t any valid file in {}'.format(self.path)) | |
131 |
|
131 | |||
132 | pathname, filename = os.path.split(fullpath) |
|
132 | pathname, filename = os.path.split(fullpath) | |
133 | self.year = int(filename[1:5]) |
|
133 | self.year = int(filename[1:5]) | |
134 | self.doy = int(filename[5:8]) |
|
134 | self.doy = int(filename[5:8]) | |
135 | self.set = int(filename[8:11]) - 1 |
|
135 | self.set = int(filename[8:11]) - 1 | |
136 | else: |
|
136 | else: | |
137 | log.log("Searching files in {}".format(self.path), self.name) |
|
137 | log.log("Searching files in {}".format(self.path), self.name) | |
138 | self.filenameList = self.searchFilesOffLine(self.path, self.startDate, |
|
138 | self.filenameList = self.searchFilesOffLine(self.path, self.startDate, | |
139 | self.endDate, self.expLabel, self.ext, self.walk, self.filefmt, self.folderfmt) |
|
139 | self.endDate, self.expLabel, self.ext, self.walk, self.filefmt, self.folderfmt) | |
140 |
|
140 | |||
141 | self.setNextFile() |
|
141 | self.setNextFile() | |
142 |
|
142 | |||
143 | return |
|
143 | return | |
144 |
|
144 | |||
145 |
|
145 | |||
146 | def readFirstHeader(self): |
|
146 | def readFirstHeader(self): | |
147 | '''Read metadata and data''' |
|
147 | '''Read metadata and data''' | |
148 |
|
148 | |||
149 | self.__readMetadata() |
|
149 | self.__readMetadata() | |
150 | self.__readData() |
|
150 | self.__readData() | |
151 | self.__setBlockList() |
|
151 | self.__setBlockList() | |
152 |
|
152 | |||
153 | if 'type' in self.meta: |
|
153 | if 'type' in self.meta: | |
154 | self.dataOut = eval(self.meta['type'])() |
|
154 | self.dataOut = eval(self.meta['type'])() | |
155 |
|
155 | |||
156 | for attr in self.meta: |
|
156 | for attr in self.meta: | |
157 | #print("attr: ", attr) |
|
157 | #print("attr: ", attr) | |
158 | setattr(self.dataOut, attr, self.meta[attr]) |
|
158 | setattr(self.dataOut, attr, self.meta[attr]) | |
159 |
|
159 | |||
160 |
|
160 | |||
161 | self.blockIndex = 0 |
|
161 | self.blockIndex = 0 | |
162 |
|
162 | |||
163 | return |
|
163 | return | |
164 |
|
164 | |||
165 | def __setBlockList(self): |
|
165 | def __setBlockList(self): | |
166 | ''' |
|
166 | ''' | |
167 | Selects the data within the times defined |
|
167 | Selects the data within the times defined | |
168 |
|
168 | |||
169 | self.fp |
|
169 | self.fp | |
170 | self.startTime |
|
170 | self.startTime | |
171 | self.endTime |
|
171 | self.endTime | |
172 | self.blockList |
|
172 | self.blockList | |
173 | self.blocksPerFile |
|
173 | self.blocksPerFile | |
174 |
|
174 | |||
175 | ''' |
|
175 | ''' | |
176 |
|
176 | |||
177 | startTime = self.startTime |
|
177 | startTime = self.startTime | |
178 | endTime = self.endTime |
|
178 | endTime = self.endTime | |
179 | thisUtcTime = self.data['utctime'] + self.utcoffset |
|
179 | thisUtcTime = self.data['utctime'] + self.utcoffset | |
180 | self.interval = numpy.min(thisUtcTime[1:] - thisUtcTime[:-1]) |
|
180 | self.interval = numpy.min(thisUtcTime[1:] - thisUtcTime[:-1]) | |
181 | thisDatetime = datetime.datetime.utcfromtimestamp(thisUtcTime[0]) |
|
181 | thisDatetime = datetime.datetime.utcfromtimestamp(thisUtcTime[0]) | |
182 | self.startFileDatetime = thisDatetime |
|
182 | self.startFileDatetime = thisDatetime | |
183 | thisDate = thisDatetime.date() |
|
183 | thisDate = thisDatetime.date() | |
184 | thisTime = thisDatetime.time() |
|
184 | thisTime = thisDatetime.time() | |
185 |
|
185 | |||
186 | startUtcTime = (datetime.datetime.combine(thisDate, startTime) - datetime.datetime(1970, 1, 1)).total_seconds() |
|
186 | startUtcTime = (datetime.datetime.combine(thisDate, startTime) - datetime.datetime(1970, 1, 1)).total_seconds() | |
187 | endUtcTime = (datetime.datetime.combine(thisDate, endTime) - datetime.datetime(1970, 1, 1)).total_seconds() |
|
187 | endUtcTime = (datetime.datetime.combine(thisDate, endTime) - datetime.datetime(1970, 1, 1)).total_seconds() | |
188 |
|
188 | |||
189 | ind = numpy.where(numpy.logical_and(thisUtcTime >= startUtcTime, thisUtcTime < endUtcTime))[0] |
|
189 | ind = numpy.where(numpy.logical_and(thisUtcTime >= startUtcTime, thisUtcTime < endUtcTime))[0] | |
190 |
|
190 | |||
191 | self.blockList = ind |
|
191 | self.blockList = ind | |
192 | self.blocksPerFile = len(ind) |
|
192 | self.blocksPerFile = len(ind) | |
193 | self.blocksPerFile = len(thisUtcTime) |
|
193 | self.blocksPerFile = len(thisUtcTime) | |
194 | return |
|
194 | return | |
195 |
|
195 | |||
196 | def __readMetadata(self): |
|
196 | def __readMetadata(self): | |
197 | ''' |
|
197 | ''' | |
198 | Reads Metadata |
|
198 | Reads Metadata | |
199 | ''' |
|
199 | ''' | |
200 |
|
200 | |||
201 | meta = {} |
|
201 | meta = {} | |
202 |
|
202 | |||
203 | if self.description: |
|
203 | if self.description: | |
204 | for key, value in self.description['Metadata'].items(): |
|
204 | for key, value in self.description['Metadata'].items(): | |
205 | meta[key] = self.fp[value][()] |
|
205 | meta[key] = self.fp[value][()] | |
206 | else: |
|
206 | else: | |
207 | grp = self.fp['Metadata'] |
|
207 | grp = self.fp['Metadata'] | |
208 | for name in grp: |
|
208 | for name in grp: | |
209 | meta[name] = grp[name][()] |
|
209 | meta[name] = grp[name][()] | |
210 |
|
210 | |||
211 | if self.extras: |
|
211 | if self.extras: | |
212 | for key, value in self.extras.items(): |
|
212 | for key, value in self.extras.items(): | |
213 | meta[key] = value |
|
213 | meta[key] = value | |
214 | self.meta = meta |
|
214 | self.meta = meta | |
215 |
|
215 | |||
216 | return |
|
216 | return | |
217 |
|
217 | |||
218 |
|
218 | |||
219 |
|
219 | |||
220 | def checkForRealPath(self, nextFile, nextDay): |
|
220 | def checkForRealPath(self, nextFile, nextDay): | |
221 |
|
221 | |||
222 | # print("check FRP") |
|
222 | # print("check FRP") | |
223 | # dt = self.startFileDatetime + datetime.timedelta(1) |
|
223 | # dt = self.startFileDatetime + datetime.timedelta(1) | |
224 | # filename = '{}.{}{}'.format(self.path, dt.strftime('%Y%m%d'), self.ext) |
|
224 | # filename = '{}.{}{}'.format(self.path, dt.strftime('%Y%m%d'), self.ext) | |
225 | # fullfilename = os.path.join(self.path, filename) |
|
225 | # fullfilename = os.path.join(self.path, filename) | |
226 | # print("check Path ",fullfilename,filename) |
|
226 | # print("check Path ",fullfilename,filename) | |
227 | # if os.path.exists(fullfilename): |
|
227 | # if os.path.exists(fullfilename): | |
228 | # return fullfilename, filename |
|
228 | # return fullfilename, filename | |
229 | # return None, filename |
|
229 | # return None, filename | |
230 | return None,None |
|
230 | return None,None | |
231 |
|
231 | |||
232 | def __readData(self): |
|
232 | def __readData(self): | |
233 |
|
233 | |||
234 | data = {} |
|
234 | data = {} | |
235 |
|
235 | |||
236 | if self.description: |
|
236 | if self.description: | |
237 | for key, value in self.description['Data'].items(): |
|
237 | for key, value in self.description['Data'].items(): | |
238 | if isinstance(value, str): |
|
238 | if isinstance(value, str): | |
239 | if isinstance(self.fp[value], h5py.Dataset): |
|
239 | if isinstance(self.fp[value], h5py.Dataset): | |
240 | data[key] = self.fp[value][()] |
|
240 | data[key] = self.fp[value][()] | |
241 | elif isinstance(self.fp[value], h5py.Group): |
|
241 | elif isinstance(self.fp[value], h5py.Group): | |
242 | array = [] |
|
242 | array = [] | |
243 | for ch in self.fp[value]: |
|
243 | for ch in self.fp[value]: | |
244 | array.append(self.fp[value][ch][()]) |
|
244 | array.append(self.fp[value][ch][()]) | |
245 | data[key] = numpy.array(array) |
|
245 | data[key] = numpy.array(array) | |
246 | elif isinstance(value, list): |
|
246 | elif isinstance(value, list): | |
247 | array = [] |
|
247 | array = [] | |
248 | for ch in value: |
|
248 | for ch in value: | |
249 | array.append(self.fp[ch][()]) |
|
249 | array.append(self.fp[ch][()]) | |
250 | data[key] = numpy.array(array) |
|
250 | data[key] = numpy.array(array) | |
251 | else: |
|
251 | else: | |
252 | grp = self.fp['Data'] |
|
252 | grp = self.fp['Data'] | |
253 | for name in grp: |
|
253 | for name in grp: | |
254 | if isinstance(grp[name], h5py.Dataset): |
|
254 | if isinstance(grp[name], h5py.Dataset): | |
255 | array = grp[name][()] |
|
255 | array = grp[name][()] | |
256 | elif isinstance(grp[name], h5py.Group): |
|
256 | elif isinstance(grp[name], h5py.Group): | |
257 | array = [] |
|
257 | array = [] | |
258 | for ch in grp[name]: |
|
258 | for ch in grp[name]: | |
259 | array.append(grp[name][ch][()]) |
|
259 | array.append(grp[name][ch][()]) | |
260 | array = numpy.array(array) |
|
260 | array = numpy.array(array) | |
261 | else: |
|
261 | else: | |
262 | log.warning('Unknown type: {}'.format(name)) |
|
262 | log.warning('Unknown type: {}'.format(name)) | |
263 |
|
263 | |||
264 | if name in self.description: |
|
264 | if name in self.description: | |
265 | key = self.description[name] |
|
265 | key = self.description[name] | |
266 | else: |
|
266 | else: | |
267 | key = name |
|
267 | key = name | |
268 | data[key] = array |
|
268 | data[key] = array | |
269 |
|
269 | |||
270 | self.data = data |
|
270 | self.data = data | |
271 | return |
|
271 | return | |
272 |
|
272 | |||
273 | def getData(self): |
|
273 | def getData(self): | |
274 | if not self.isDateTimeInRange(self.startFileDatetime, self.startDate, self.endDate, self.startTime, self.endTime): |
|
274 | if not self.isDateTimeInRange(self.startFileDatetime, self.startDate, self.endDate, self.startTime, self.endTime): | |
275 | self.dataOut.flagNoData = True |
|
275 | self.dataOut.flagNoData = True | |
276 | self.blockIndex = self.blocksPerFile |
|
276 | self.blockIndex = self.blocksPerFile | |
277 | #self.dataOut.error = True TERMINA EL PROGRAMA, removido |
|
277 | #self.dataOut.error = True TERMINA EL PROGRAMA, removido | |
278 | return |
|
278 | return | |
279 | for attr in self.data: |
|
279 | for attr in self.data: | |
280 | if self.data[attr].ndim == 1: |
|
280 | if self.data[attr].ndim == 1: | |
281 | setattr(self.dataOut, attr, self.data[attr][self.blockIndex]) |
|
281 | setattr(self.dataOut, attr, self.data[attr][self.blockIndex]) | |
282 | else: |
|
282 | else: | |
283 | setattr(self.dataOut, attr, self.data[attr][:, self.blockIndex]) |
|
283 | setattr(self.dataOut, attr, self.data[attr][:, self.blockIndex]) | |
284 |
|
284 | |||
285 | self.dataOut.flagNoData = False |
|
285 | self.dataOut.flagNoData = False | |
286 | self.blockIndex += 1 |
|
286 | self.blockIndex += 1 | |
287 |
|
287 | |||
|
288 | if self.blockIndex == 1: | |||
288 | log.log("Block No. {}/{} -> {}".format( |
|
289 | log.log("Block No. {}/{} -> {}".format( | |
289 | self.blockIndex, |
|
290 | self.blockIndex, | |
290 | self.blocksPerFile, |
|
291 | self.blocksPerFile, | |
291 | self.dataOut.datatime.ctime()), self.name) |
|
292 | self.dataOut.datatime.ctime()), self.name) | |
|
293 | else: | |||
|
294 | log.log("Block No. {}/{} ".format( | |||
|
295 | self.blockIndex, | |||
|
296 | self.blocksPerFile),self.name) | |||
|
297 | ||||
292 |
|
298 | |||
293 | return |
|
299 | return | |
294 |
|
300 | |||
295 | def run(self, **kwargs): |
|
301 | def run(self, **kwargs): | |
296 |
|
302 | |||
297 | if not(self.isConfig): |
|
303 | if not(self.isConfig): | |
298 | self.setup(**kwargs) |
|
304 | self.setup(**kwargs) | |
299 | self.isConfig = True |
|
305 | self.isConfig = True | |
300 |
|
306 | |||
301 | if self.blockIndex == self.blocksPerFile: |
|
307 | if self.blockIndex == self.blocksPerFile: | |
302 | self.setNextFile() |
|
308 | self.setNextFile() | |
303 |
|
309 | |||
304 | self.getData() |
|
310 | self.getData() | |
305 |
|
311 | |||
306 | return |
|
312 | return | |
307 |
|
313 | |||
308 | @MPDecorator |
|
314 | @MPDecorator | |
309 | class HDFWriter(Operation): |
|
315 | class HDFWriter(Operation): | |
310 | """Operation to write HDF5 files. |
|
316 | """Operation to write HDF5 files. | |
311 |
|
317 | |||
312 | The HDF5 file contains by default two groups Data and Metadata where |
|
318 | The HDF5 file contains by default two groups Data and Metadata where | |
313 | you can save any `dataOut` attribute specified by `dataList` and `metadataList` |
|
319 | you can save any `dataOut` attribute specified by `dataList` and `metadataList` | |
314 | parameters, data attributes are normaly time dependent where the metadata |
|
320 | parameters, data attributes are normaly time dependent where the metadata | |
315 | are not. |
|
321 | are not. | |
316 | It is possible to customize the structure of the HDF5 file with the |
|
322 | It is possible to customize the structure of the HDF5 file with the | |
317 | optional description parameter see the examples. |
|
323 | optional description parameter see the examples. | |
318 |
|
324 | |||
319 | Parameters: |
|
325 | Parameters: | |
320 | ----------- |
|
326 | ----------- | |
321 | path : str |
|
327 | path : str | |
322 | Path where files will be saved. |
|
328 | Path where files will be saved. | |
323 | blocksPerFile : int |
|
329 | blocksPerFile : int | |
324 | Number of blocks per file |
|
330 | Number of blocks per file | |
325 | metadataList : list |
|
331 | metadataList : list | |
326 | List of the dataOut attributes that will be saved as metadata |
|
332 | List of the dataOut attributes that will be saved as metadata | |
327 | dataList : int |
|
333 | dataList : int | |
328 | List of the dataOut attributes that will be saved as data |
|
334 | List of the dataOut attributes that will be saved as data | |
329 | setType : bool |
|
335 | setType : bool | |
330 | If True the name of the files corresponds to the timestamp of the data |
|
336 | If True the name of the files corresponds to the timestamp of the data | |
331 | description : dict, optional |
|
337 | description : dict, optional | |
332 | Dictionary with the desired description of the HDF5 file |
|
338 | Dictionary with the desired description of the HDF5 file | |
333 |
|
339 | |||
334 | Examples |
|
340 | Examples | |
335 | -------- |
|
341 | -------- | |
336 |
|
342 | |||
337 | desc = { |
|
343 | desc = { | |
338 | 'data_output': {'winds': ['z', 'w', 'v']}, |
|
344 | 'data_output': {'winds': ['z', 'w', 'v']}, | |
339 | 'utctime': 'timestamps', |
|
345 | 'utctime': 'timestamps', | |
340 | 'heightList': 'heights' |
|
346 | 'heightList': 'heights' | |
341 | } |
|
347 | } | |
342 | desc = { |
|
348 | desc = { | |
343 | 'data_output': ['z', 'w', 'v'], |
|
349 | 'data_output': ['z', 'w', 'v'], | |
344 | 'utctime': 'timestamps', |
|
350 | 'utctime': 'timestamps', | |
345 | 'heightList': 'heights' |
|
351 | 'heightList': 'heights' | |
346 | } |
|
352 | } | |
347 | desc = { |
|
353 | desc = { | |
348 | 'Data': { |
|
354 | 'Data': { | |
349 | 'data_output': 'winds', |
|
355 | 'data_output': 'winds', | |
350 | 'utctime': 'timestamps' |
|
356 | 'utctime': 'timestamps' | |
351 | }, |
|
357 | }, | |
352 | 'Metadata': { |
|
358 | 'Metadata': { | |
353 | 'heightList': 'heights' |
|
359 | 'heightList': 'heights' | |
354 | } |
|
360 | } | |
355 | } |
|
361 | } | |
356 |
|
362 | |||
357 | writer = proc_unit.addOperation(name='HDFWriter') |
|
363 | writer = proc_unit.addOperation(name='HDFWriter') | |
358 | writer.addParameter(name='path', value='/path/to/file') |
|
364 | writer.addParameter(name='path', value='/path/to/file') | |
359 | writer.addParameter(name='blocksPerFile', value='32') |
|
365 | writer.addParameter(name='blocksPerFile', value='32') | |
360 | writer.addParameter(name='metadataList', value='heightList,timeZone') |
|
366 | writer.addParameter(name='metadataList', value='heightList,timeZone') | |
361 | writer.addParameter(name='dataList',value='data_output,utctime') |
|
367 | writer.addParameter(name='dataList',value='data_output,utctime') | |
362 | # writer.addParameter(name='description',value=json.dumps(desc)) |
|
368 | # writer.addParameter(name='description',value=json.dumps(desc)) | |
363 |
|
369 | |||
364 | """ |
|
370 | """ | |
365 |
|
371 | |||
366 | ext = ".hdf5" |
|
372 | ext = ".hdf5" | |
367 | optchar = "D" |
|
373 | optchar = "D" | |
368 | filename = None |
|
374 | filename = None | |
369 | path = None |
|
375 | path = None | |
370 | setFile = None |
|
376 | setFile = None | |
371 | fp = None |
|
377 | fp = None | |
372 | firsttime = True |
|
378 | firsttime = True | |
373 | #Configurations |
|
379 | #Configurations | |
374 | blocksPerFile = None |
|
380 | blocksPerFile = None | |
375 | blockIndex = None |
|
381 | blockIndex = None | |
376 | dataOut = None |
|
382 | dataOut = None | |
377 | #Data Arrays |
|
383 | #Data Arrays | |
378 | dataList = None |
|
384 | dataList = None | |
379 | metadataList = None |
|
385 | metadataList = None | |
380 | currentDay = None |
|
386 | currentDay = None | |
381 | lastTime = None |
|
387 | lastTime = None | |
382 |
|
388 | |||
383 | def __init__(self): |
|
389 | def __init__(self): | |
384 |
|
390 | |||
385 | Operation.__init__(self) |
|
391 | Operation.__init__(self) | |
386 | return |
|
392 | return | |
387 |
|
393 | |||
388 | def setup(self, path=None, blocksPerFile=10, metadataList=None, dataList=None, setType=None, description=None): |
|
394 | def setup(self, path=None, blocksPerFile=10, metadataList=None, dataList=None, setType=None, description=None): | |
389 | self.path = path |
|
395 | self.path = path | |
390 | self.blocksPerFile = blocksPerFile |
|
396 | self.blocksPerFile = blocksPerFile | |
391 | self.metadataList = metadataList |
|
397 | self.metadataList = metadataList | |
392 | self.dataList = [s.strip() for s in dataList] |
|
398 | self.dataList = [s.strip() for s in dataList] | |
393 | self.setType = setType |
|
399 | self.setType = setType | |
394 | self.description = description |
|
400 | self.description = description | |
395 |
|
401 | |||
396 | if self.metadataList is None: |
|
402 | if self.metadataList is None: | |
397 | self.metadataList = self.dataOut.metadata_list |
|
403 | self.metadataList = self.dataOut.metadata_list | |
398 |
|
404 | |||
399 | tableList = [] |
|
405 | tableList = [] | |
400 | dsList = [] |
|
406 | dsList = [] | |
401 |
|
407 | |||
402 | for i in range(len(self.dataList)): |
|
408 | for i in range(len(self.dataList)): | |
403 | dsDict = {} |
|
409 | dsDict = {} | |
404 | if hasattr(self.dataOut, self.dataList[i]): |
|
410 | if hasattr(self.dataOut, self.dataList[i]): | |
405 | dataAux = getattr(self.dataOut, self.dataList[i]) |
|
411 | dataAux = getattr(self.dataOut, self.dataList[i]) | |
406 | dsDict['variable'] = self.dataList[i] |
|
412 | dsDict['variable'] = self.dataList[i] | |
407 | else: |
|
413 | else: | |
408 | log.warning('Attribute {} not found in dataOut', self.name) |
|
414 | log.warning('Attribute {} not found in dataOut', self.name) | |
409 | continue |
|
415 | continue | |
410 |
|
416 | |||
411 | if dataAux is None: |
|
417 | if dataAux is None: | |
412 | continue |
|
418 | continue | |
413 | elif isinstance(dataAux, (int, float, numpy.integer, numpy.float)): |
|
419 | elif isinstance(dataAux, (int, float, numpy.integer, numpy.float)): | |
414 | dsDict['nDim'] = 0 |
|
420 | dsDict['nDim'] = 0 | |
415 | else: |
|
421 | else: | |
416 | dsDict['nDim'] = len(dataAux.shape) |
|
422 | dsDict['nDim'] = len(dataAux.shape) | |
417 | dsDict['shape'] = dataAux.shape |
|
423 | dsDict['shape'] = dataAux.shape | |
418 | dsDict['dsNumber'] = dataAux.shape[0] |
|
424 | dsDict['dsNumber'] = dataAux.shape[0] | |
419 | dsDict['dtype'] = dataAux.dtype |
|
425 | dsDict['dtype'] = dataAux.dtype | |
420 |
|
426 | |||
421 | dsList.append(dsDict) |
|
427 | dsList.append(dsDict) | |
422 |
|
428 | |||
423 | self.dsList = dsList |
|
429 | self.dsList = dsList | |
424 | self.currentDay = self.dataOut.datatime.date() |
|
430 | self.currentDay = self.dataOut.datatime.date() | |
425 |
|
431 | |||
426 | def timeFlag(self): |
|
432 | def timeFlag(self): | |
427 | currentTime = self.dataOut.utctime |
|
433 | currentTime = self.dataOut.utctime | |
428 | timeTuple = time.localtime(currentTime) |
|
434 | timeTuple = time.localtime(currentTime) | |
429 | dataDay = timeTuple.tm_yday |
|
435 | dataDay = timeTuple.tm_yday | |
430 | #print("time UTC: ",currentTime, self.dataOut.datatime) |
|
436 | #print("time UTC: ",currentTime, self.dataOut.datatime) | |
431 | if self.lastTime is None: |
|
437 | if self.lastTime is None: | |
432 | self.lastTime = currentTime |
|
438 | self.lastTime = currentTime | |
433 | self.currentDay = dataDay |
|
439 | self.currentDay = dataDay | |
434 | return False |
|
440 | return False | |
435 |
|
441 | |||
436 | timeDiff = currentTime - self.lastTime |
|
442 | timeDiff = currentTime - self.lastTime | |
437 |
|
443 | |||
438 | #Si el dia es diferente o si la diferencia entre un dato y otro supera la hora |
|
444 | #Si el dia es diferente o si la diferencia entre un dato y otro supera la hora | |
439 | if dataDay != self.currentDay: |
|
445 | if dataDay != self.currentDay: | |
440 | self.currentDay = dataDay |
|
446 | self.currentDay = dataDay | |
441 | return True |
|
447 | return True | |
442 | elif timeDiff > 3*60*60: |
|
448 | elif timeDiff > 3*60*60: | |
443 | self.lastTime = currentTime |
|
449 | self.lastTime = currentTime | |
444 | return True |
|
450 | return True | |
445 | else: |
|
451 | else: | |
446 | self.lastTime = currentTime |
|
452 | self.lastTime = currentTime | |
447 | return False |
|
453 | return False | |
448 |
|
454 | |||
449 | def run(self, dataOut, path, blocksPerFile=10, metadataList=None, |
|
455 | def run(self, dataOut, path, blocksPerFile=10, metadataList=None, | |
450 | dataList=[], setType=None, description={}): |
|
456 | dataList=[], setType=None, description={}): | |
451 |
|
457 | |||
452 | self.dataOut = dataOut |
|
458 | self.dataOut = dataOut | |
453 | if not(self.isConfig): |
|
459 | if not(self.isConfig): | |
454 | self.setup(path=path, blocksPerFile=blocksPerFile, |
|
460 | self.setup(path=path, blocksPerFile=blocksPerFile, | |
455 | metadataList=metadataList, dataList=dataList, |
|
461 | metadataList=metadataList, dataList=dataList, | |
456 | setType=setType, description=description) |
|
462 | setType=setType, description=description) | |
457 |
|
463 | |||
458 | self.isConfig = True |
|
464 | self.isConfig = True | |
459 | self.setNextFile() |
|
465 | self.setNextFile() | |
460 |
|
466 | |||
461 | self.putData() |
|
467 | self.putData() | |
462 | return |
|
468 | return | |
463 |
|
469 | |||
464 | def setNextFile(self): |
|
470 | def setNextFile(self): | |
465 |
|
471 | |||
466 | ext = self.ext |
|
472 | ext = self.ext | |
467 | path = self.path |
|
473 | path = self.path | |
468 | setFile = self.setFile |
|
474 | setFile = self.setFile | |
469 |
|
475 | |||
470 | timeTuple = time.gmtime(self.dataOut.utctime) |
|
476 | timeTuple = time.gmtime(self.dataOut.utctime) | |
471 | #print("path: ",timeTuple) |
|
477 | #print("path: ",timeTuple) | |
472 | subfolder = 'd%4.4d%3.3d' % (timeTuple.tm_year,timeTuple.tm_yday) |
|
478 | subfolder = 'd%4.4d%3.3d' % (timeTuple.tm_year,timeTuple.tm_yday) | |
473 | fullpath = os.path.join(path, subfolder) |
|
479 | fullpath = os.path.join(path, subfolder) | |
474 |
|
480 | |||
475 | if os.path.exists(fullpath): |
|
481 | if os.path.exists(fullpath): | |
476 | filesList = os.listdir(fullpath) |
|
482 | filesList = os.listdir(fullpath) | |
477 | filesList = [k for k in filesList if k.startswith(self.optchar)] |
|
483 | filesList = [k for k in filesList if k.startswith(self.optchar)] | |
478 | if len( filesList ) > 0: |
|
484 | if len( filesList ) > 0: | |
479 | filesList = sorted(filesList, key=str.lower) |
|
485 | filesList = sorted(filesList, key=str.lower) | |
480 | filen = filesList[-1] |
|
486 | filen = filesList[-1] | |
481 | # el filename debera tener el siguiente formato |
|
487 | # el filename debera tener el siguiente formato | |
482 | # 0 1234 567 89A BCDE (hex) |
|
488 | # 0 1234 567 89A BCDE (hex) | |
483 | # x YYYY DDD SSS .ext |
|
489 | # x YYYY DDD SSS .ext | |
484 | if isNumber(filen[8:11]): |
|
490 | if isNumber(filen[8:11]): | |
485 | setFile = int(filen[8:11]) #inicializo mi contador de seteo al seteo del ultimo file |
|
491 | setFile = int(filen[8:11]) #inicializo mi contador de seteo al seteo del ultimo file | |
486 | else: |
|
492 | else: | |
487 | setFile = -1 |
|
493 | setFile = -1 | |
488 | else: |
|
494 | else: | |
489 | setFile = -1 #inicializo mi contador de seteo |
|
495 | setFile = -1 #inicializo mi contador de seteo | |
490 | else: |
|
496 | else: | |
491 | os.makedirs(fullpath) |
|
497 | os.makedirs(fullpath) | |
492 | setFile = -1 #inicializo mi contador de seteo |
|
498 | setFile = -1 #inicializo mi contador de seteo | |
493 |
|
499 | |||
494 | if self.setType is None: |
|
500 | if self.setType is None: | |
495 | setFile += 1 |
|
501 | setFile += 1 | |
496 | file = '%s%4.4d%3.3d%03d%s' % (self.optchar, |
|
502 | file = '%s%4.4d%3.3d%03d%s' % (self.optchar, | |
497 | timeTuple.tm_year, |
|
503 | timeTuple.tm_year, | |
498 | timeTuple.tm_yday, |
|
504 | timeTuple.tm_yday, | |
499 | setFile, |
|
505 | setFile, | |
500 | ext ) |
|
506 | ext ) | |
501 | else: |
|
507 | else: | |
502 | setFile = timeTuple.tm_hour*60+timeTuple.tm_min |
|
508 | setFile = timeTuple.tm_hour*60+timeTuple.tm_min | |
503 | file = '%s%4.4d%3.3d%04d%s' % (self.optchar, |
|
509 | file = '%s%4.4d%3.3d%04d%s' % (self.optchar, | |
504 | timeTuple.tm_year, |
|
510 | timeTuple.tm_year, | |
505 | timeTuple.tm_yday, |
|
511 | timeTuple.tm_yday, | |
506 | setFile, |
|
512 | setFile, | |
507 | ext ) |
|
513 | ext ) | |
508 |
|
514 | |||
509 | self.filename = os.path.join( path, subfolder, file ) |
|
515 | self.filename = os.path.join( path, subfolder, file ) | |
510 |
|
516 | |||
511 | #Setting HDF5 File |
|
517 | #Setting HDF5 File | |
512 | self.fp = h5py.File(self.filename, 'w') |
|
518 | self.fp = h5py.File(self.filename, 'w') | |
513 | #write metadata |
|
519 | #write metadata | |
514 | self.writeMetadata(self.fp) |
|
520 | self.writeMetadata(self.fp) | |
515 | #Write data |
|
521 | #Write data | |
516 | self.writeData(self.fp) |
|
522 | self.writeData(self.fp) | |
517 |
|
523 | |||
518 | def getLabel(self, name, x=None): |
|
524 | def getLabel(self, name, x=None): | |
519 |
|
525 | |||
520 | if x is None: |
|
526 | if x is None: | |
521 | if 'Data' in self.description: |
|
527 | if 'Data' in self.description: | |
522 | data = self.description['Data'] |
|
528 | data = self.description['Data'] | |
523 | if 'Metadata' in self.description: |
|
529 | if 'Metadata' in self.description: | |
524 | data.update(self.description['Metadata']) |
|
530 | data.update(self.description['Metadata']) | |
525 | else: |
|
531 | else: | |
526 | data = self.description |
|
532 | data = self.description | |
527 | if name in data: |
|
533 | if name in data: | |
528 | if isinstance(data[name], str): |
|
534 | if isinstance(data[name], str): | |
529 | return data[name] |
|
535 | return data[name] | |
530 | elif isinstance(data[name], list): |
|
536 | elif isinstance(data[name], list): | |
531 | return None |
|
537 | return None | |
532 | elif isinstance(data[name], dict): |
|
538 | elif isinstance(data[name], dict): | |
533 | for key, value in data[name].items(): |
|
539 | for key, value in data[name].items(): | |
534 | return key |
|
540 | return key | |
535 | return name |
|
541 | return name | |
536 | else: |
|
542 | else: | |
537 | if 'Metadata' in self.description: |
|
543 | if 'Metadata' in self.description: | |
538 | meta = self.description['Metadata'] |
|
544 | meta = self.description['Metadata'] | |
539 | else: |
|
545 | else: | |
540 | meta = self.description |
|
546 | meta = self.description | |
541 | if name in meta: |
|
547 | if name in meta: | |
542 | if isinstance(meta[name], list): |
|
548 | if isinstance(meta[name], list): | |
543 | return meta[name][x] |
|
549 | return meta[name][x] | |
544 | elif isinstance(meta[name], dict): |
|
550 | elif isinstance(meta[name], dict): | |
545 | for key, value in meta[name].items(): |
|
551 | for key, value in meta[name].items(): | |
546 | return value[x] |
|
552 | return value[x] | |
547 | if 'cspc' in name: |
|
553 | if 'cspc' in name: | |
548 | return 'pair{:02d}'.format(x) |
|
554 | return 'pair{:02d}'.format(x) | |
549 | else: |
|
555 | else: | |
550 | return 'channel{:02d}'.format(x) |
|
556 | return 'channel{:02d}'.format(x) | |
551 |
|
557 | |||
552 | def writeMetadata(self, fp): |
|
558 | def writeMetadata(self, fp): | |
553 |
|
559 | |||
554 | if self.description: |
|
560 | if self.description: | |
555 | if 'Metadata' in self.description: |
|
561 | if 'Metadata' in self.description: | |
556 | grp = fp.create_group('Metadata') |
|
562 | grp = fp.create_group('Metadata') | |
557 | else: |
|
563 | else: | |
558 | grp = fp |
|
564 | grp = fp | |
559 | else: |
|
565 | else: | |
560 | grp = fp.create_group('Metadata') |
|
566 | grp = fp.create_group('Metadata') | |
561 |
|
567 | |||
562 | for i in range(len(self.metadataList)): |
|
568 | for i in range(len(self.metadataList)): | |
563 | if not hasattr(self.dataOut, self.metadataList[i]): |
|
569 | if not hasattr(self.dataOut, self.metadataList[i]): | |
564 | log.warning('Metadata: `{}` not found'.format(self.metadataList[i]), self.name) |
|
570 | log.warning('Metadata: `{}` not found'.format(self.metadataList[i]), self.name) | |
565 | continue |
|
571 | continue | |
566 | value = getattr(self.dataOut, self.metadataList[i]) |
|
572 | value = getattr(self.dataOut, self.metadataList[i]) | |
567 | if isinstance(value, bool): |
|
573 | if isinstance(value, bool): | |
568 | if value is True: |
|
574 | if value is True: | |
569 | value = 1 |
|
575 | value = 1 | |
570 | else: |
|
576 | else: | |
571 | value = 0 |
|
577 | value = 0 | |
572 | grp.create_dataset(self.getLabel(self.metadataList[i]), data=value) |
|
578 | grp.create_dataset(self.getLabel(self.metadataList[i]), data=value) | |
573 | return |
|
579 | return | |
574 |
|
580 | |||
575 | def writeData(self, fp): |
|
581 | def writeData(self, fp): | |
576 |
|
582 | |||
577 | if self.description: |
|
583 | if self.description: | |
578 | if 'Data' in self.description: |
|
584 | if 'Data' in self.description: | |
579 | grp = fp.create_group('Data') |
|
585 | grp = fp.create_group('Data') | |
580 | else: |
|
586 | else: | |
581 | grp = fp |
|
587 | grp = fp | |
582 | else: |
|
588 | else: | |
583 | grp = fp.create_group('Data') |
|
589 | grp = fp.create_group('Data') | |
584 |
|
590 | |||
585 | dtsets = [] |
|
591 | dtsets = [] | |
586 | data = [] |
|
592 | data = [] | |
587 |
|
593 | |||
588 | for dsInfo in self.dsList: |
|
594 | for dsInfo in self.dsList: | |
589 | if dsInfo['nDim'] == 0: |
|
595 | if dsInfo['nDim'] == 0: | |
590 | ds = grp.create_dataset( |
|
596 | ds = grp.create_dataset( | |
591 | self.getLabel(dsInfo['variable']), |
|
597 | self.getLabel(dsInfo['variable']), | |
592 | (self.blocksPerFile, ), |
|
598 | (self.blocksPerFile, ), | |
593 | chunks=True, |
|
599 | chunks=True, | |
594 | dtype=numpy.float64) |
|
600 | dtype=numpy.float64) | |
595 | dtsets.append(ds) |
|
601 | dtsets.append(ds) | |
596 | data.append((dsInfo['variable'], -1)) |
|
602 | data.append((dsInfo['variable'], -1)) | |
597 | else: |
|
603 | else: | |
598 | label = self.getLabel(dsInfo['variable']) |
|
604 | label = self.getLabel(dsInfo['variable']) | |
599 | if label is not None: |
|
605 | if label is not None: | |
600 | sgrp = grp.create_group(label) |
|
606 | sgrp = grp.create_group(label) | |
601 | else: |
|
607 | else: | |
602 | sgrp = grp |
|
608 | sgrp = grp | |
603 | for i in range(dsInfo['dsNumber']): |
|
609 | for i in range(dsInfo['dsNumber']): | |
604 | ds = sgrp.create_dataset( |
|
610 | ds = sgrp.create_dataset( | |
605 | self.getLabel(dsInfo['variable'], i), |
|
611 | self.getLabel(dsInfo['variable'], i), | |
606 | (self.blocksPerFile, ) + dsInfo['shape'][1:], |
|
612 | (self.blocksPerFile, ) + dsInfo['shape'][1:], | |
607 | chunks=True, |
|
613 | chunks=True, | |
608 | dtype=dsInfo['dtype']) |
|
614 | dtype=dsInfo['dtype']) | |
609 | dtsets.append(ds) |
|
615 | dtsets.append(ds) | |
610 | data.append((dsInfo['variable'], i)) |
|
616 | data.append((dsInfo['variable'], i)) | |
611 | fp.flush() |
|
617 | fp.flush() | |
612 |
|
618 | |||
613 | log.log('Creating file: {}'.format(fp.filename), self.name) |
|
619 | log.log('Creating file: {}'.format(fp.filename), self.name) | |
614 |
|
620 | |||
615 | self.ds = dtsets |
|
621 | self.ds = dtsets | |
616 | self.data = data |
|
622 | self.data = data | |
617 | self.firsttime = True |
|
623 | self.firsttime = True | |
618 | self.blockIndex = 0 |
|
624 | self.blockIndex = 0 | |
619 | return |
|
625 | return | |
620 |
|
626 | |||
621 | def putData(self): |
|
627 | def putData(self): | |
622 |
|
628 | |||
623 | if (self.blockIndex == self.blocksPerFile) or self.timeFlag(): |
|
629 | if (self.blockIndex == self.blocksPerFile) or self.timeFlag(): | |
624 | self.closeFile() |
|
630 | self.closeFile() | |
625 | self.setNextFile() |
|
631 | self.setNextFile() | |
626 |
|
632 | |||
627 | for i, ds in enumerate(self.ds): |
|
633 | for i, ds in enumerate(self.ds): | |
628 | attr, ch = self.data[i] |
|
634 | attr, ch = self.data[i] | |
629 | if ch == -1: |
|
635 | if ch == -1: | |
630 | ds[self.blockIndex] = getattr(self.dataOut, attr) |
|
636 | ds[self.blockIndex] = getattr(self.dataOut, attr) | |
631 | else: |
|
637 | else: | |
632 | ds[self.blockIndex] = getattr(self.dataOut, attr)[ch] |
|
638 | ds[self.blockIndex] = getattr(self.dataOut, attr)[ch] | |
633 |
|
639 | |||
634 | self.fp.flush() |
|
640 | self.fp.flush() | |
635 | self.blockIndex += 1 |
|
641 | self.blockIndex += 1 | |
|
642 | if self.blockIndex == 1: | |||
|
643 | log.log('Block No. {}/{} --> {}'.format(self.blockIndex, self.blocksPerFile,self.dataOut.datatime.ctime()), self.name) | |||
|
644 | else: | |||
636 | log.log('Block No. {}/{}'.format(self.blockIndex, self.blocksPerFile), self.name) |
|
645 | log.log('Block No. {}/{}'.format(self.blockIndex, self.blocksPerFile), self.name) | |
637 |
|
||||
638 | return |
|
646 | return | |
639 |
|
647 | |||
640 | def closeFile(self): |
|
648 | def closeFile(self): | |
641 |
|
649 | |||
642 | if self.blockIndex != self.blocksPerFile: |
|
650 | if self.blockIndex != self.blocksPerFile: | |
643 | for ds in self.ds: |
|
651 | for ds in self.ds: | |
644 | ds.resize(self.blockIndex, axis=0) |
|
652 | ds.resize(self.blockIndex, axis=0) | |
645 |
|
653 | |||
646 | if self.fp: |
|
654 | if self.fp: | |
647 | self.fp.flush() |
|
655 | self.fp.flush() | |
648 | self.fp.close() |
|
656 | self.fp.close() | |
649 |
|
657 | |||
650 | def close(self): |
|
658 | def close(self): | |
651 |
|
659 | |||
652 | self.closeFile() |
|
660 | self.closeFile() |
General Comments 0
You need to be logged in to leave comments.
Login now