##// END OF EJS Templates
example correction
joabAM -
r1580:400c11b1ac3f
parent child
Show More
@@ -0,0 +1,222
1 import os, sys, json
2 import time
3 import datetime
4 from multiprocessing import Process
5 from shutil import rmtree
6 from schainpy.controller import Project
7 from multiprocessing.connection import wait
8 from schainpy.model.io.utilsIO import MergeH5
9 ###########################################################################
10 ###########################################################################
11 dty = datetime.date.today()
12 str1 = dty + datetime.timedelta(days=1)
13 str2 = dty - datetime.timedelta(days=1)
14 today = dty.strftime("%Y/%m/%d")
15 tomorrow = str1.strftime("%Y/%m/%d")
16 yesterday = str2.strftime("%Y/%m/%d")
17 ###########################################################################
18 ###########################################################################
19
20
21 path= '/media/soporte/Elements/DATA_AMISR/2022'
22 #path= '/media/soporte/Elements/DATA_AMISR/2022/'
23 outPath = '/home/soporte/Data/data2Fabiano/POWER_H5_AMISR/2022/'
24
25 ###########################################################################
26 ###########################################################################
27 xmin = 7
28 xmax = 18
29 localtime=1
30
31 startDate=today
32 endDate=tomorrow
33
34 startDate='2022/09/24' #apunte perp y oblicuo
35 endDate='2022/09/25'
36
37 startTime = '18:00:00'
38 endTime = '07:00:00'
39 ###############################################################################
40 ###############################################################################
41 nChannels = 10
42 IPPms =10
43 code = [1,-1,-1,-1,1,1,1,1,-1,-1,-1,1,-1,-1,-1,1,-1,-1,-1,1,-1,-1,1,-1,1,1,-1,1]
44 nCode = 1
45 nBaud = 28
46 nOsamp = 2 # oversample
47 dataList = ['data_pow','utctime']
48 ###############################################################################
49 ###############################################################################
50 nipp = (1000/IPPms)/nChannels
51 ippP10sec = nipp*10
52 print("{} profiles in 10 seconds".format(ippP10sec))
53 ###############################################################################
54 ###############################################################################
55 l = startDate.split('/') #adding day of the year to outPath
56 datelist = datetime.date(int(l[0]),int(l[1]),int(l[2]))
57 DOY = datelist.timetuple().tm_yday
58 year = l[0]
59 month = l[1].zfill(2)
60 day = l[2].zfill(2)
61 doy = str(DOY).zfill(3)
62
63 ###########################################################################
64 ###########################################################################
65 fpathOut = outPath+ "ESF"+l[0]+doy
66
67 if os.path.exists(fpathOut):
68 print("outPath 1: ", fpathOut)
69 else :
70 os.mkdir(fpathOut)
71
72 outPaths = [fpathOut+"/CH{}".format(ch) for ch in range(nChannels)]
73 ###########################################################################
74 ###########################################################################
75
76
77 def schainByChannels(channel, Outdata):
78 '''
79
80 '''
81
82 if os.path.exists(Outdata):
83 print("Outdata {}: ".format(channel), Outdata)
84 else :
85 os.mkdir(Outdata)
86
87 controllerObj = Project()
88 controllerObj.setup(id = channel, name='amisr_test', description='desc')
89 readUnitConfObj = controllerObj.addReadUnit(datatype='AMISRReader',
90 path=path,
91 startDate=startDate,#'2016/07/12',
92 endDate=endDate,#'2016/07/13',
93 startTime=startTime,#'18:00:00',
94 endTime=endTime,#'07:00:00',
95 walk='1',
96 code = code,
97 nCode = nCode,
98 nBaud = nBaud,
99 nOsamp = nOsamp,
100 nFFT = 10,
101 timezone='lt',
102 margin_days=5,
103 online=0)
104
105 proc_volts = controllerObj.addProcUnit(datatype='VoltageProc', inputId=readUnitConfObj.getId())
106
107 opObj03 = proc_volts.addOperation(name='selectChannels', optype='other')
108 opObj03.addParameter(name='channelList', value=[channel], format='list')
109
110 opObj01 = proc_volts.addOperation(name='Decoder', optype='other')
111 opObj01.addParameter(name='code', value=code, format='floatlist')
112 opObj01.addParameter(name='nCode', value=nCode, format='int')
113 opObj01.addParameter(name='nBaud', value=nBaud, format='int')
114 opObj01.addParameter(name='osamp', value=nOsamp, format='int')
115
116
117 proc_spc = controllerObj.addProcUnit(datatype='SpectraProc', inputId=proc_volts.getId())
118 proc_spc.addParameter(name='nFFTPoints', value=10, format='int')
119
120
121 opObj13 = proc_spc.addOperation(name='IncohInt', optype='other')
122 opObj13.addParameter(name='timeInterval', value='20', format='int')
123
124
125 procParam= controllerObj.addProcUnit(datatype='ParametersProc',inputId=proc_spc.getId())
126 moments = procParam.addOperation(name='SpectralMoments',optype='other')
127 opObj12 = procParam.addOperation(name='setAttribute')
128 opObj12.addParameter(name='type', value='Spectra')
129
130 writer = procParam.addOperation(name='HDFWriter',optype='other')
131 writer.addParameter(name='path', value=Outdata)
132 writer.addParameter(name='timeZone', value="ut")
133 writer.addParameter(name='hourLimit', value=14)
134 writer.addParameter(name='breakDays', value=False)
135 writer.addParameter(name='blocksPerFile', value='2340',format='int') #2340ΓΆ
136 # writer1.addParameter(name='metadataList', value='type,timeZone,frequency,channelList,heightList,ippSeconds,\
137 # azimuthList,elevationList,nCohInt,nIncohInt,nFFTPoints',format='list')
138 writer.addParameter(name='metadataList', value='timeZone,type,unitsDescription,\
139 radarControllerHeaderObj.dtype,radarControllerHeaderObj.ipp,radarControllerHeaderObj.txA,radarControllerHeaderObj.frequency,\
140 radarControllerHeaderObj.sampleRate,radarControllerHeaderObj.heightList,radarControllerHeaderObj.elevationList,\
141 radarControllerHeaderObj.azimuthList,radarControllerHeaderObj.channelList,radarControllerHeaderObj.heightResolution,\
142 radarControllerHeaderObj.code,radarControllerHeaderObj.nCode,radarControllerHeaderObj.nBaud,radarControllerHeaderObj.nOsamp,\
143 processingHeaderObj.dtype,processingHeaderObj.ipp,processingHeaderObj.nCohInt,processingHeaderObj.nSamplesFFT,\
144 processingHeaderObj.nFFTPoints,processingHeaderObj.timeIncohInt,processingHeaderObj.heightList,processingHeaderObj.channelList,processingHeaderObj.elevationList,\
145 processingHeaderObj.azimuthList,processingHeaderObj.heightResolution',format='list')
146 writer.addParameter(name='dataList',value='data_pow,utctime',format='list')
147
148 controllerObj.start()
149
150
151 def plotMerged(mergedPath):
152
153 controllerObj = Project()
154 controllerObj.setup(id = '22164', name='esf_proc', description="plot merged power")
155 ##..........................................................................
156 ##..........................................................................
157
158 readerUnit = controllerObj.addReadUnit(datatype='HDFReader',
159 path=mergedPath,
160 startDate=startDate,#startDate,#'2016/07/12',
161 endDate=endDate,#endDate,#'2016/07/13',
162 startTime=startTime,#'07:00:00',
163 endTime=endTime,#'15:00:00',
164 walk=0,
165 timezone='lt',
166 online=0)
167 ##..........................................................................
168 opObj21 = readerUnit.addOperation(name='PowerPlot', optype='external')
169 opObj21.addParameter(name='showprofile', value='1', format='int')
170 opObj21.addParameter(name='xmin', value=xmin, format='int')
171 opObj21.addParameter(name='xmax', value=xmax, format='int')
172 opObj21.addParameter(name='zmin', value=100, format='int')
173 opObj21.addParameter(name='zmax', value=120, format='int')
174 opObj21.addParameter(name='show', value=0, format='int')
175 opObj21.addParameter(name='localtime', value=1,format='int')
176 opObj21.addParameter(name='save', value=mergedPath, format='str')
177 opObj21.addParameter(name='t_units', value='h', format='str')
178
179 controllerObj.start()
180 controllerObj.join()
181
182 powerPath = mergedPath +"/pow"
183 figPaths = [powerPath]
184 for pch in figPaths:
185 print("Removing ",pch)
186 rmtree(pch)
187
188
189 if __name__ == '__main__':
190
191
192
193 pool = []
194 for ch in range(nChannels):
195 p = Process(target=schainByChannels, args=(ch,outPaths[ch]))
196 pool.append(p)
197 p.start()
198
199 wait(p.sentinel for p in pool)
200 time.sleep(10)
201
202 ############################################################################
203 ############################################################################
204
205 print("Starting merging proc...")
206 if os.path.exists(fpathOut):
207 print("final path Out: {}: ", fpathOut)
208 else :
209 os.mkdir(fpathOut)
210
211 merger = MergeH5(nChannels,fpathOut,dataList,*outPaths)
212 merger.run()
213 time.sleep(2)
214
215 ############################################################################
216 plotMerged(fpathOut)
217 ############################################################################
218
219 print("Removing hdf5 files from channels...")
220 for pch in outPaths:
221 rmtree(pch)
222 print("Proc finished ! :)")
@@ -1,669 +1,671
1 1 """
2 2 Utilities for IO modules
3 3 @modified: Joab Apaza
4 4 @email: roj-op01@igp.gob.pe, joab.apaza32@gmail.com
5 5
6 6 """
7 7 ################################################################################
8 8 ################################################################################
9 9 import os
10 10 from datetime import datetime
11 11 import numpy
12 12
13 13 from schainpy.model.data.jrodata import Parameters
14 14 import itertools
15 15 import numpy
16 16 import h5py
17 17 import re
18 18 import time
19 19 ################################################################################
20 20 ################################################################################
21 21 ################################################################################
22 22 def folder_in_range(folder, start_date, end_date, pattern):
23 23 """
24 24 Check whether folder is bettwen start_date and end_date
25 25
26 26 Args:
27 27 folder (str): Folder to check
28 28 start_date (date): Initial date
29 29 end_date (date): Final date
30 30 pattern (str): Datetime format of the folder
31 31 Returns:
32 32 bool: True for success, False otherwise
33 33 """
34 34 try:
35 35 dt = datetime.strptime(folder, pattern)
36 36 except:
37 37 raise ValueError('Folder {} does not match {} format'.format(folder, pattern))
38 38 return start_date <= dt.date() <= end_date
39 39
40 40 ################################################################################
41 41 ################################################################################
42 42 ################################################################################
43 43 def getHei_index( minHei, maxHei, heightList):
44 44 if (minHei < heightList[0]):
45 45 minHei = heightList[0]
46 46
47 47 if (maxHei > heightList[-1]):
48 48 maxHei = heightList[-1]
49 49
50 50 minIndex = 0
51 51 maxIndex = 0
52 52 heights = numpy.asarray(heightList)
53 53
54 54 inda = numpy.where(heights >= minHei)
55 55 indb = numpy.where(heights <= maxHei)
56 56
57 57 try:
58 58 minIndex = inda[0][0]
59 59 except:
60 60 minIndex = 0
61 61
62 62 try:
63 63 maxIndex = indb[0][-1]
64 64 except:
65 65 maxIndex = len(heightList)
66 66 return minIndex,maxIndex
67 67
68 68 ################################################################################
69 69 ################################################################################
70 70 ################################################################################
71 71 class MergeH5(object):
72 72 """Processing unit to read HDF5 format files
73 73
74 74 This unit reads HDF5 files created with `HDFWriter` operation when channels area
75 75 processed by separated. Then merge all channels in a single files.
76 76
77 77 "example"
78 78 nChannels = 4
79 79 pathOut = "/home/soporte/Data/OutTest/clean2D/perpAndObliq/byChannels/merged"
80 80 p0 = "/home/soporte/Data/OutTest/clean2D/perpAndObliq/byChannels/d2022240_Ch0"
81 81 p1 = "/home/soporte/Data/OutTest/clean2D/perpAndObliq/byChannels/d2022240_Ch1"
82 82 p2 = "/home/soporte/Data/OutTest/clean2D/perpAndObliq/byChannels/d2022240_Ch2"
83 83 p3 = "/home/soporte/Data/OutTest/clean2D/perpAndObliq/byChannels/d2022240_Ch3"
84 84 list = ['data_spc','data_cspc','nIncohInt','utctime']
85 85 merger = MergeH5(nChannels,pathOut,list, p0, p1,p2,p3)
86 86 merger.run()
87 87
88 The file example_FULLmultiprocessing_merge.txt show an application for AMISR data
89
88 90 """
89 91
90 92 # #__attrs__ = ['paths', 'nChannels']
91 93 isConfig = False
92 94 inPaths = None
93 95 nChannels = None
94 96 ch_dataIn = []
95 97
96 98 channelList = []
97 99
98 100 def __init__(self,nChannels, pOut, dataList, *args):
99 101
100 102 self.inPaths = [p for p in args]
101 103 #print(self.inPaths)
102 104 if len(self.inPaths) != nChannels:
103 105 print("ERROR, number of channels different from iput paths {} != {}".format(nChannels, len(args)))
104 106 return
105 107
106 108 self.pathOut = pOut
107 109 self.dataList = dataList
108 110 self.nChannels = len(self.inPaths)
109 111 self.ch_dataIn = [Parameters() for p in args]
110 112 self.dataOut = Parameters()
111 113 self.channelList = [n for n in range(nChannels)]
112 114 self.blocksPerFile = None
113 115 self.date = None
114 116 self.ext = ".hdf5$"
115 117 self.dataList = dataList
116 118 self.optchar = "D"
117 119 self.meta = {}
118 120 self.data = {}
119 121 self.open_file = h5py.File
120 122 self.open_mode = 'r'
121 123 self.description = {}
122 124 self.extras = {}
123 125 self.filefmt = "*%Y%j***"
124 126 self.folderfmt = "*%Y%j"
125 127
126 128 self.flag_spc = False
127 129 self.flag_pow = False
128 130 self.flag_snr = False
129 131 self.flag_nIcoh = False
130 132 self.flagProcessingHeader = False
131 133 self.flagControllerHeader = False
132 134
133 135 def setup(self):
134 136
135 137
136 138 # if not self.ext.startswith('.'):
137 139 # self.ext = '.{}'.format(self.ext)
138 140
139 141 self.filenameList = self.searchFiles(self.inPaths, None)
140 142 self.nfiles = len(self.filenameList[0])
141 143
142 144
143 145
144 146 def searchFiles(self, paths, date, walk=True):
145 147 # self.paths = path
146 148 #self.date = startDate
147 149 #self.walk = walk
148 150 filenameList = [[] for n in range(self.nChannels)]
149 151 ch = 0
150 152 for path in paths:
151 153 if os.path.exists(path):
152 154 print("Searching files in {}".format(path))
153 155 filenameList[ch] = self.getH5files(path, walk)
154 156 print("Found: ")
155 157 for f in filenameList[ch]:
156 158 print(f)
157 159 else:
158 160 self.status = 0
159 161 print('Path:%s does not exists'%path)
160 162 return 0
161 163 ch+=1
162 164 return filenameList
163 165
164 166 def getH5files(self, path, walk):
165 167
166 168 dirnameList = []
167 169 pat = '(\d)+.'+self.ext
168 170 if walk:
169 171 for root, dirs, files in os.walk(path):
170 172 for dir in dirs:
171 173 #print(os.path.join(root,dir))
172 174 files = [re.search(pat,x) for x in os.listdir(os.path.join(root,dir))]
173 175 #print(files)
174 176 files = [x for x in files if x!=None]
175 177 files = [x.string for x in files]
176 178 files = [os.path.join(root,dir,x) for x in files if x!=None]
177 179 files.sort()
178 180
179 181 dirnameList += files
180 182 return dirnameList
181 183 else:
182 184
183 185 dirnameList = [re.search(pat,x) for x in os.listdir(path)]
184 186 dirnameList = [x for x in dirnameList if x!=None]
185 187 dirnameList = [x.string for x in dirnameList]
186 188 dirnameList = [x for x in dirnameList if x!=None]
187 189 dirnameList.sort()
188 190
189 191 return dirnameList
190 192
191 193
192 194 def readFile(self,fp,ch):
193 195
194 196 '''Read metadata and data'''
195 197
196 198 self.readMetadata(fp,ch)
197 199 #print(self.metadataList)
198 200 data = self.readData(fp)
199 201
200 202
201 203 for attr in self.meta:
202 204
203 205 if "processingHeaderObj" in attr:
204 206 self.flagProcessingHeader=True
205 207
206 208 if "radarControllerHeaderObj" in attr:
207 209 self.flagControllerHeader=True
208 210
209 211 at = attr.split('.')
210 212 #print("AT ", at)
211 213 if len(at) > 1:
212 214 setattr(eval("self.ch_dataIn[ch]."+at[0]),at[1], self.meta[attr])
213 215 else:
214 216 setattr(self.ch_dataIn[ch], attr, self.meta[attr])
215 217
216 218 self.fill_dataIn(data, self.ch_dataIn[ch])
217 219
218 220
219 221 return
220 222
221 223
222 224 def readMetadata(self, fp, ch):
223 225 '''
224 226 Reads Metadata
225 227 '''
226 228 meta = {}
227 229 self.metadataList = []
228 230 grp = fp['Metadata']
229 231 for item in grp.values():
230 232 name = item.name
231 233
232 234 if isinstance(item, h5py.Dataset):
233 235 name = name.split("/")[-1]
234 236 if 'List' in name:
235 237 meta[name] = item[()].tolist()
236 238 else:
237 239 meta[name] = item[()]
238 240 self.metadataList.append(name)
239 241 else:
240 242 grp2 = fp[name]
241 243 Obj = name.split("/")[-1]
242 244 #print(Obj)
243 245 for item2 in grp2.values():
244 246 name2 = Obj+"."+item2.name.split("/")[-1]
245 247 if 'List' in name2:
246 248 meta[name2] = item2[()].tolist()
247 249 else:
248 250 meta[name2] = item2[()]
249 251 self.metadataList.append(name2)
250 252
251 253
252 254
253 255 if not self.meta:
254 256 self.meta = meta.copy()
255 257 for key in list(self.meta.keys()):
256 258 if "channelList" in key:
257 259 self.meta["channelList"] =[n for n in range(self.nChannels)]
258 260 if "processingHeaderObj" in key:
259 261 self.meta["processingHeaderObj.channelList"] =[n for n in range(self.nChannels)]
260 262 if "radarControllerHeaderObj" in key:
261 263 self.meta["radarControllerHeaderObj.channelList"] =[n for n in range(self.nChannels)]
262 264 return 1
263 265
264 266 else:
265 267
266 268 for k in list(self.meta.keys()):
267 269 if 'List' in k and 'channel' not in k and "height" not in k and "radarControllerHeaderObj" not in k:
268 270 self.meta[k] += meta[k]
269 271
270 272 #print("Metadata: ",self.meta)
271 273 return 1
272 274
273 275
274 276
275 277
276 278 def fill_dataIn(self,data, dataIn):
277 279
278 280 for attr in data:
279 281 if data[attr].ndim == 1:
280 282 setattr(dataIn, attr, data[attr][:])
281 283 else:
282 284 setattr(dataIn, attr, numpy.squeeze(data[attr][:,:]))
283 285 #print("shape in", dataIn.data_spc.shape, len(dataIn.data_spc))
284 286 if self.flag_spc:
285 287 if dataIn.data_spc.ndim > 3:
286 288 dataIn.data_spc = dataIn.data_spc[0]
287 289 #print("shape in", dataIn.data_spc.shape)
288 290
289 291
290 292
291 293 def getBlocksPerFile(self):
292 294 b = numpy.zeros(self.nChannels)
293 295 for i in range(self.nChannels):
294 296 if self.flag_spc:
295 297 b[i] = self.ch_dataIn[i].data_spc.shape[0] #number of blocks
296 298 elif self.flag_pow:
297 299 b[i] = self.ch_dataIn[i].data_pow.shape[0] #number of blocks
298 300 elif self.flag_snr:
299 301 b[i] = self.ch_dataIn[i].data_snr.shape[0] #number of blocks
300 302
301 303 self.blocksPerFile = int(b.min())
302 304 iresh_ch = numpy.where(b > self.blocksPerFile)[0]
303 305 if len(iresh_ch) > 0:
304 306 for ich in iresh_ch:
305 307 for i in range(len(self.dataList)):
306 308 if hasattr(self.ch_dataIn[ich], self.dataList[i]):
307 309 # print("reshaping ", self.dataList[i])
308 310 # print(getattr(self.ch_dataIn[ich], self.dataList[i]).shape)
309 311 dataAux = getattr(self.ch_dataIn[ich], self.dataList[i])
310 312 setattr(self.ch_dataIn[ich], self.dataList[i], None)
311 313 setattr(self.ch_dataIn[ich], self.dataList[i], dataAux[0:self.blocksPerFile])
312 314 # print(getattr(self.ch_dataIn[ich], self.dataList[i]).shape)
313 315 else:
314 316 return
315 317
316 318
317 319 def getLabel(self, name, x=None):
318 320
319 321 if x is None:
320 322 if 'Data' in self.description:
321 323 data = self.description['Data']
322 324 if 'Metadata' in self.description:
323 325 data.update(self.description['Metadata'])
324 326 else:
325 327 data = self.description
326 328 if name in data:
327 329 if isinstance(data[name], str):
328 330 return data[name]
329 331 elif isinstance(data[name], list):
330 332 return None
331 333 elif isinstance(data[name], dict):
332 334 for key, value in data[name].items():
333 335 return key
334 336 return name
335 337 else:
336 338 if 'Metadata' in self.description:
337 339 meta = self.description['Metadata']
338 340 else:
339 341 meta = self.description
340 342 if name in meta:
341 343 if isinstance(meta[name], list):
342 344 return meta[name][x]
343 345 elif isinstance(meta[name], dict):
344 346 for key, value in meta[name].items():
345 347 return value[x]
346 348
347 349 if 'cspc' in name:
348 350 return 'pair{:02d}'.format(x)
349 351 else:
350 352 return 'channel{:02d}'.format(x)
351 353
352 354 def readData(self, fp):
353 355 #print("read fp: ", fp)
354 356 data = {}
355 357
356 358 grp = fp['Data']
357 359
358 360 for name in grp:
359 361 if "spc" in name:
360 362 self.flag_spc = True
361 363 if "pow" in name:
362 364 self.flag_pow = True
363 365 if "snr" in name:
364 366 self.flag_snr = True
365 367 if "nIncohInt" in name:
366 368 self.flag_nIcoh = True
367 369
368 370 if isinstance(grp[name], h5py.Dataset):
369 371 array = grp[name][()]
370 372 elif isinstance(grp[name], h5py.Group):
371 373 array = []
372 374 for ch in grp[name]:
373 375 array.append(grp[name][ch][()])
374 376 array = numpy.array(array)
375 377 else:
376 378 print('Unknown type: {}'.format(name))
377 379 data[name] = array
378 380
379 381 return data
380 382
381 383 def getDataOut(self):
382 384
383 385 self.dataOut = self.ch_dataIn[0].copy() #dataIn #blocks, fft, hei for metadata
384 386 if self.flagProcessingHeader:
385 387 self.dataOut.processingHeaderObj = self.ch_dataIn[0].processingHeaderObj.copy()
386 388 self.dataOut.heightList = self.dataOut.processingHeaderObj.heightList
387 389 self.dataOut.ippSeconds = self.dataOut.processingHeaderObj.ipp
388 390 self.dataOut.channelList = self.dataOut.processingHeaderObj.channelList
389 391 self.dataOut.nCohInt = self.dataOut.processingHeaderObj.nCohInt
390 392 self.dataOut.nFFTPoints = self.dataOut.processingHeaderObj.nFFTPoints
391 393
392 394 if self.flagControllerHeader:
393 395 self.dataOut.radarControllerHeaderObj = self.ch_dataIn[0].radarControllerHeaderObj.copy()
394 396 self.dataOut.frequency = self.dataOut.radarControllerHeaderObj.frequency
395 397 #--------------------------------------------------------------------
396 398
397 399
398 400 #--------------------------------------------------------------------
399 401 if self.flag_spc:
400 402 if self.dataOut.data_spc.ndim < 3:
401 403 print("shape spc in: ",self.dataOut.data_spc.shape )
402 404 return 0
403 405 if self.flag_pow:
404 406 if self.dataOut.data_pow.ndim < 2:
405 407 print("shape pow in: ",self.dataOut.data_pow.shape )
406 408 return 0
407 409 if self.flag_snr:
408 410 if self.dataOut.data_snr.ndim < 2:
409 411 print("shape snr in: ",self.dataOut.data_snr.shape )
410 412 return 0
411 413
412 414 self.dataOut.data_spc = None
413 415 self.dataOut.data_cspc = None
414 416 self.dataOut.data_pow = None
415 417 self.dataOut.data_snr = None
416 418 self.dataOut.utctim = None
417 419 self.dataOut.nIncohInt = None
418 420 #--------------------------------------------------------------------
419 421 if self.flag_spc:
420 422 spc = [data.data_spc for data in self.ch_dataIn]
421 423 self.dataOut.data_spc = numpy.stack(spc, axis=1) #blocks, ch, fft, hei
422 424 #--------------------------------------------------------------------
423 425 if self.flag_pow:
424 426 pow = [data.data_pow for data in self.ch_dataIn]
425 427 self.dataOut.data_pow = numpy.stack(pow, axis=1) #blocks, ch, fft, hei
426 428 #--------------------------------------------------------------------
427 429 if self.flag_snr:
428 430 snr = [data.data_snr for data in self.ch_dataIn]
429 431 self.dataOut.data_snr = numpy.stack(snr, axis=1) #blocks, ch, fft, hei
430 432
431 433 #--------------------------------------------------------------------
432 434 time = [data.utctime for data in self.ch_dataIn]
433 435 time = numpy.asarray(time).mean(axis=0)
434 436 time = numpy.squeeze(time)
435 437 self.dataOut.utctime = time
436 438 #--------------------------------------------------------------------
437 439 if self.flag_nIcoh:
438 440 ints = [data.nIncohInt for data in self.ch_dataIn]
439 441 self.dataOut.nIncohInt = numpy.stack(ints, axis=1)
440 442
441 443 if self.dataOut.nIncohInt.ndim > 3:
442 444 aux = self.dataOut.nIncohInt
443 445 self.dataOut.nIncohInt = None
444 446 self.dataOut.nIncohInt = aux[0]
445 447
446 448 if self.dataOut.nIncohInt.ndim < 3:
447 449 nIncohInt = numpy.repeat(self.dataOut.nIncohInt, self.dataOut.nHeights).reshape(self.blocksPerFile,self.nChannels, self.dataOut.nHeights)
448 450 #nIncohInt = numpy.reshape(nIncohInt, (self.blocksPerFile,self.nChannels, self.dataOut.nHeights))
449 451 self.dataOut.nIncohInt = None
450 452 self.dataOut.nIncohInt = nIncohInt
451 453
452 454 if (self.dataOut.nIncohInt.shape)[0]==self.nChannels: ## ch,blocks, hei
453 455 self.dataOut.nIncohInt = numpy.swapaxes(self.dataOut.nIncohInt, 0, 1) ## blocks,ch, hei
454 456 else:
455 457 self.dataOut.nIncohInt = self.ch_dataIn[0].nIncohInt
456 458 #--------------------------------------------------------------------
457 459 #print("utcTime: ", time.shape)
458 460 #print("data_spc ",self.dataOut.data_spc.shape)
459 461 if "data_cspc" in self.dataList:
460 462 pairsList = [pair for pair in itertools.combinations(self.channelList, 2)]
461 463 #print("PairsList: ", pairsList)
462 464 self.dataOut.pairsList = pairsList
463 465 cspc = []
464 466
465 467 for i, j in pairsList:
466 468 cspc.append(self.ch_dataIn[i].data_spc*numpy.conjugate(self.ch_dataIn[j].data_spc)) #blocks, fft, hei
467 469
468 470 cspc = numpy.asarray(cspc) # # pairs, blocks, fft, hei
469 471 #print("cspc: ",cspc.shape)
470 472 self.dataOut.data_cspc = numpy.swapaxes(cspc, 0, 1) ## blocks, pairs, fft, hei
471 473 #print("dataOut.data_cspc: ",self.dataOut.data_cspc.shape)
472 474 #if "data_pow" in self.dataList:
473 475
474 476 return 1
475 477
476 478 # def writeMetadata(self, fp):
477 479 #
478 480 #
479 481 # grp = fp.create_group('Metadata')
480 482 #
481 483 # for i in range(len(self.metadataList)):
482 484 # if not hasattr(self.dataOut, self.metadataList[i]):
483 485 # print('Metadata: `{}` not found'.format(self.metadataList[i]))
484 486 # continue
485 487 # value = getattr(self.dataOut, self.metadataList[i])
486 488 # if isinstance(value, bool):
487 489 # if value is True:
488 490 # value = 1
489 491 # else:
490 492 # value = 0
491 493 # grp.create_dataset(self.getLabel(self.metadataList[i]), data=value)
492 494 # return
493 495 def writeMetadata(self, fp):
494 496
495 497
496 498 grp = fp.create_group('Metadata')
497 499
498 500 for i in range(len(self.metadataList)):
499 501 attribute = self.metadataList[i]
500 502 attr = attribute.split('.')
501 503 if '' in attr:
502 504 attr.remove('')
503 505 #print(attr)
504 506 if len(attr) > 1:
505 507 if not hasattr(eval("self.dataOut."+attr[0]),attr[1]):
506 508 print('Metadata: {}.{} not found'.format(attr[0],attr[1]))
507 509 continue
508 510 value = getattr(eval("self.dataOut."+attr[0]),attr[1])
509 511 if isinstance(value, bool):
510 512 if value is True:
511 513 value = 1
512 514 else:
513 515 value = 0
514 516 grp2 = None
515 517 if not 'Metadata/'+attr[0] in fp:
516 518 grp2 = fp.create_group('Metadata/'+attr[0])
517 519 else:
518 520 grp2 = fp['Metadata/'+attr[0]]
519 521
520 522 grp2.create_dataset(attr[1], data=value)
521 523
522 524 else:
523 525 if not hasattr(self.dataOut, attr[0] ):
524 526 print('Metadata: `{}` not found'.format(attribute))
525 527 continue
526 528 value = getattr(self.dataOut, attr[0])
527 529 if isinstance(value, bool):
528 530 if value is True:
529 531 value = 1
530 532 else:
531 533 value = 0
532 534 if isinstance(value, type(None)):
533 535 print("------ERROR, value {} is None".format(attribute))
534 536
535 537 grp.create_dataset(self.getLabel(attribute), data=value)
536 538 return
537 539
538 540 def getDsList(self):
539 541
540 542 dsList =[]
541 543 for i in range(len(self.dataList)):
542 544 dsDict = {}
543 545 if hasattr(self.dataOut, self.dataList[i]):
544 546 dataAux = getattr(self.dataOut, self.dataList[i])
545 547 dsDict['variable'] = self.dataList[i]
546 548 else:
547 549 print('Attribute {} not found in dataOut'.format(self.dataList[i]))
548 550 continue
549 551
550 552 if dataAux is None:
551 553 continue
552 554 elif isinstance(dataAux, (int, float, numpy.integer, numpy.float)):
553 555 dsDict['nDim'] = 0
554 556 else:
555 557
556 558 dsDict['nDim'] = len(dataAux.shape) -1
557 559 dsDict['shape'] = dataAux.shape
558 560
559 561 if len(dsDict['shape'])>=2:
560 562 dsDict['dsNumber'] = dataAux.shape[1]
561 563 else:
562 564 dsDict['dsNumber'] = 1
563 565 dsDict['dtype'] = dataAux.dtype
564 566 # if len(dataAux.shape) == 4:
565 567 # dsDict['nDim'] = len(dataAux.shape) -1
566 568 # dsDict['shape'] = dataAux.shape
567 569 # dsDict['dsNumber'] = dataAux.shape[1]
568 570 # dsDict['dtype'] = dataAux.dtype
569 571 # else:
570 572 # dsDict['nDim'] = len(dataAux.shape)
571 573 # dsDict['shape'] = dataAux.shape
572 574 # dsDict['dsNumber'] = dataAux.shape[0]
573 575 # dsDict['dtype'] = dataAux.dtype
574 576
575 577 dsList.append(dsDict)
576 578 #print(dsList)
577 579 self.dsList = dsList
578 580
579 581 def clean_dataIn(self):
580 582 for ch in range(self.nChannels):
581 583 self.ch_dataIn[ch].data_spc = None
582 584 self.ch_dataIn[ch].utctime = None
583 585 self.ch_dataIn[ch].nIncohInt = None
584 586 self.meta ={}
585 587 self.blocksPerFile = None
586 588
587 589 def writeData(self, outFilename):
588 590
589 591 self.getDsList()
590 592
591 593 fp = h5py.File(outFilename, 'w')
592 594 self.writeMetadata(fp)
593 595 grp = fp.create_group('Data')
594 596
595 597 dtsets = []
596 598 data = []
597 599 for dsInfo in self.dsList:
598 600 if dsInfo['nDim'] == 0:
599 601 ds = grp.create_dataset(
600 602 self.getLabel(dsInfo['variable']),(self.blocksPerFile, ),chunks=True,dtype=numpy.float64)
601 603 dtsets.append(ds)
602 604 data.append((dsInfo['variable'], -1))
603 605 else:
604 606 label = self.getLabel(dsInfo['variable'])
605 607 if label is not None:
606 608 sgrp = grp.create_group(label)
607 609 else:
608 610 sgrp = grp
609 611 k = -1*(dsInfo['nDim'] - 1)
610 612 #print(k, dsInfo['shape'], dsInfo['shape'][k:])
611 613 for i in range(dsInfo['dsNumber']):
612 614 ds = sgrp.create_dataset(
613 615 self.getLabel(dsInfo['variable'], i),(self.blocksPerFile, ) + dsInfo['shape'][k:],
614 616 chunks=True,
615 617 dtype=dsInfo['dtype'])
616 618 dtsets.append(ds)
617 619 data.append((dsInfo['variable'], i))
618 620
619 621 #print("\n",dtsets)
620 622
621 623 print('Creating merged file: {}'.format(fp.filename))
622 624
623 625 for i, ds in enumerate(dtsets):
624 626 attr, ch = data[i]
625 627 if ch == -1:
626 628 ds[:] = getattr(self.dataOut, attr)
627 629 else:
628 630 #print(ds, getattr(self.dataOut, attr)[ch].shape)
629 631 aux = getattr(self.dataOut, attr)# block, ch, ...
630 632 aux = numpy.swapaxes(aux,0,1) # ch, blocks, ...
631 633 #print(ds.shape, aux.shape)
632 634 #ds[:] = getattr(self.dataOut, attr)[ch]
633 635 ds[:] = aux[ch]
634 636
635 637 fp.flush()
636 638 fp.close()
637 639 self.clean_dataIn()
638 640 return
639 641
640 642
641 643
642 644 def run(self):
643 645
644 646 if not(self.isConfig):
645 647 self.setup()
646 648 self.isConfig = True
647 649
648 650 for nf in range(self.nfiles):
649 651 name = None
650 652 for ch in range(self.nChannels):
651 653 name = self.filenameList[ch][nf]
652 654 filename = os.path.join(self.inPaths[ch], name)
653 655 fp = h5py.File(filename, 'r')
654 656 #print("Opening file: ",filename)
655 657 self.readFile(fp,ch)
656 658 fp.close()
657 659
658 660 if self.blocksPerFile == None:
659 661 self.getBlocksPerFile()
660 662 print("blocks per file: ", self.blocksPerFile)
661 663
662 664 if not self.getDataOut():
663 665 print("Error getting DataOut invalid number of blocks")
664 666 return
665 667 name = name[-16:]
666 668 #print("Final name out: ", name)
667 669 outFile = os.path.join(self.pathOut, name)
668 670 #print("Outfile: ", outFile)
669 671 self.writeData(outFile)
General Comments 0
You need to be logged in to leave comments. Login now