##// END OF EJS Templates
example correction
joabAM -
r1580:400c11b1ac3f
parent child
Show More
@@ -0,0 +1,222
1 import os, sys, json
2 import time
3 import datetime
4 from multiprocessing import Process
5 from shutil import rmtree
6 from schainpy.controller import Project
7 from multiprocessing.connection import wait
8 from schainpy.model.io.utilsIO import MergeH5
9 ###########################################################################
10 ###########################################################################
11 dty = datetime.date.today()
12 str1 = dty + datetime.timedelta(days=1)
13 str2 = dty - datetime.timedelta(days=1)
14 today = dty.strftime("%Y/%m/%d")
15 tomorrow = str1.strftime("%Y/%m/%d")
16 yesterday = str2.strftime("%Y/%m/%d")
17 ###########################################################################
18 ###########################################################################
19
20
21 path= '/media/soporte/Elements/DATA_AMISR/2022'
22 #path= '/media/soporte/Elements/DATA_AMISR/2022/'
23 outPath = '/home/soporte/Data/data2Fabiano/POWER_H5_AMISR/2022/'
24
25 ###########################################################################
26 ###########################################################################
27 xmin = 7
28 xmax = 18
29 localtime=1
30
31 startDate=today
32 endDate=tomorrow
33
34 startDate='2022/09/24' #apunte perp y oblicuo
35 endDate='2022/09/25'
36
37 startTime = '18:00:00'
38 endTime = '07:00:00'
39 ###############################################################################
40 ###############################################################################
41 nChannels = 10
42 IPPms =10
43 code = [1,-1,-1,-1,1,1,1,1,-1,-1,-1,1,-1,-1,-1,1,-1,-1,-1,1,-1,-1,1,-1,1,1,-1,1]
44 nCode = 1
45 nBaud = 28
46 nOsamp = 2 # oversample
47 dataList = ['data_pow','utctime']
48 ###############################################################################
49 ###############################################################################
50 nipp = (1000/IPPms)/nChannels
51 ippP10sec = nipp*10
52 print("{} profiles in 10 seconds".format(ippP10sec))
53 ###############################################################################
54 ###############################################################################
55 l = startDate.split('/') #adding day of the year to outPath
56 datelist = datetime.date(int(l[0]),int(l[1]),int(l[2]))
57 DOY = datelist.timetuple().tm_yday
58 year = l[0]
59 month = l[1].zfill(2)
60 day = l[2].zfill(2)
61 doy = str(DOY).zfill(3)
62
63 ###########################################################################
64 ###########################################################################
65 fpathOut = outPath+ "ESF"+l[0]+doy
66
67 if os.path.exists(fpathOut):
68 print("outPath 1: ", fpathOut)
69 else :
70 os.mkdir(fpathOut)
71
72 outPaths = [fpathOut+"/CH{}".format(ch) for ch in range(nChannels)]
73 ###########################################################################
74 ###########################################################################
75
76
77 def schainByChannels(channel, Outdata):
78 '''
79
80 '''
81
82 if os.path.exists(Outdata):
83 print("Outdata {}: ".format(channel), Outdata)
84 else :
85 os.mkdir(Outdata)
86
87 controllerObj = Project()
88 controllerObj.setup(id = channel, name='amisr_test', description='desc')
89 readUnitConfObj = controllerObj.addReadUnit(datatype='AMISRReader',
90 path=path,
91 startDate=startDate,#'2016/07/12',
92 endDate=endDate,#'2016/07/13',
93 startTime=startTime,#'18:00:00',
94 endTime=endTime,#'07:00:00',
95 walk='1',
96 code = code,
97 nCode = nCode,
98 nBaud = nBaud,
99 nOsamp = nOsamp,
100 nFFT = 10,
101 timezone='lt',
102 margin_days=5,
103 online=0)
104
105 proc_volts = controllerObj.addProcUnit(datatype='VoltageProc', inputId=readUnitConfObj.getId())
106
107 opObj03 = proc_volts.addOperation(name='selectChannels', optype='other')
108 opObj03.addParameter(name='channelList', value=[channel], format='list')
109
110 opObj01 = proc_volts.addOperation(name='Decoder', optype='other')
111 opObj01.addParameter(name='code', value=code, format='floatlist')
112 opObj01.addParameter(name='nCode', value=nCode, format='int')
113 opObj01.addParameter(name='nBaud', value=nBaud, format='int')
114 opObj01.addParameter(name='osamp', value=nOsamp, format='int')
115
116
117 proc_spc = controllerObj.addProcUnit(datatype='SpectraProc', inputId=proc_volts.getId())
118 proc_spc.addParameter(name='nFFTPoints', value=10, format='int')
119
120
121 opObj13 = proc_spc.addOperation(name='IncohInt', optype='other')
122 opObj13.addParameter(name='timeInterval', value='20', format='int')
123
124
125 procParam= controllerObj.addProcUnit(datatype='ParametersProc',inputId=proc_spc.getId())
126 moments = procParam.addOperation(name='SpectralMoments',optype='other')
127 opObj12 = procParam.addOperation(name='setAttribute')
128 opObj12.addParameter(name='type', value='Spectra')
129
130 writer = procParam.addOperation(name='HDFWriter',optype='other')
131 writer.addParameter(name='path', value=Outdata)
132 writer.addParameter(name='timeZone', value="ut")
133 writer.addParameter(name='hourLimit', value=14)
134 writer.addParameter(name='breakDays', value=False)
135 writer.addParameter(name='blocksPerFile', value='2340',format='int') #2340ΓΆ
136 # writer1.addParameter(name='metadataList', value='type,timeZone,frequency,channelList,heightList,ippSeconds,\
137 # azimuthList,elevationList,nCohInt,nIncohInt,nFFTPoints',format='list')
138 writer.addParameter(name='metadataList', value='timeZone,type,unitsDescription,\
139 radarControllerHeaderObj.dtype,radarControllerHeaderObj.ipp,radarControllerHeaderObj.txA,radarControllerHeaderObj.frequency,\
140 radarControllerHeaderObj.sampleRate,radarControllerHeaderObj.heightList,radarControllerHeaderObj.elevationList,\
141 radarControllerHeaderObj.azimuthList,radarControllerHeaderObj.channelList,radarControllerHeaderObj.heightResolution,\
142 radarControllerHeaderObj.code,radarControllerHeaderObj.nCode,radarControllerHeaderObj.nBaud,radarControllerHeaderObj.nOsamp,\
143 processingHeaderObj.dtype,processingHeaderObj.ipp,processingHeaderObj.nCohInt,processingHeaderObj.nSamplesFFT,\
144 processingHeaderObj.nFFTPoints,processingHeaderObj.timeIncohInt,processingHeaderObj.heightList,processingHeaderObj.channelList,processingHeaderObj.elevationList,\
145 processingHeaderObj.azimuthList,processingHeaderObj.heightResolution',format='list')
146 writer.addParameter(name='dataList',value='data_pow,utctime',format='list')
147
148 controllerObj.start()
149
150
151 def plotMerged(mergedPath):
152
153 controllerObj = Project()
154 controllerObj.setup(id = '22164', name='esf_proc', description="plot merged power")
155 ##..........................................................................
156 ##..........................................................................
157
158 readerUnit = controllerObj.addReadUnit(datatype='HDFReader',
159 path=mergedPath,
160 startDate=startDate,#startDate,#'2016/07/12',
161 endDate=endDate,#endDate,#'2016/07/13',
162 startTime=startTime,#'07:00:00',
163 endTime=endTime,#'15:00:00',
164 walk=0,
165 timezone='lt',
166 online=0)
167 ##..........................................................................
168 opObj21 = readerUnit.addOperation(name='PowerPlot', optype='external')
169 opObj21.addParameter(name='showprofile', value='1', format='int')
170 opObj21.addParameter(name='xmin', value=xmin, format='int')
171 opObj21.addParameter(name='xmax', value=xmax, format='int')
172 opObj21.addParameter(name='zmin', value=100, format='int')
173 opObj21.addParameter(name='zmax', value=120, format='int')
174 opObj21.addParameter(name='show', value=0, format='int')
175 opObj21.addParameter(name='localtime', value=1,format='int')
176 opObj21.addParameter(name='save', value=mergedPath, format='str')
177 opObj21.addParameter(name='t_units', value='h', format='str')
178
179 controllerObj.start()
180 controllerObj.join()
181
182 powerPath = mergedPath +"/pow"
183 figPaths = [powerPath]
184 for pch in figPaths:
185 print("Removing ",pch)
186 rmtree(pch)
187
188
189 if __name__ == '__main__':
190
191
192
193 pool = []
194 for ch in range(nChannels):
195 p = Process(target=schainByChannels, args=(ch,outPaths[ch]))
196 pool.append(p)
197 p.start()
198
199 wait(p.sentinel for p in pool)
200 time.sleep(10)
201
202 ############################################################################
203 ############################################################################
204
205 print("Starting merging proc...")
206 if os.path.exists(fpathOut):
207 print("final path Out: {}: ", fpathOut)
208 else :
209 os.mkdir(fpathOut)
210
211 merger = MergeH5(nChannels,fpathOut,dataList,*outPaths)
212 merger.run()
213 time.sleep(2)
214
215 ############################################################################
216 plotMerged(fpathOut)
217 ############################################################################
218
219 print("Removing hdf5 files from channels...")
220 for pch in outPaths:
221 rmtree(pch)
222 print("Proc finished ! :)")
@@ -1,669 +1,671
1 """
1 """
2 Utilities for IO modules
2 Utilities for IO modules
3 @modified: Joab Apaza
3 @modified: Joab Apaza
4 @email: roj-op01@igp.gob.pe, joab.apaza32@gmail.com
4 @email: roj-op01@igp.gob.pe, joab.apaza32@gmail.com
5
5
6 """
6 """
7 ################################################################################
7 ################################################################################
8 ################################################################################
8 ################################################################################
9 import os
9 import os
10 from datetime import datetime
10 from datetime import datetime
11 import numpy
11 import numpy
12
12
13 from schainpy.model.data.jrodata import Parameters
13 from schainpy.model.data.jrodata import Parameters
14 import itertools
14 import itertools
15 import numpy
15 import numpy
16 import h5py
16 import h5py
17 import re
17 import re
18 import time
18 import time
19 ################################################################################
19 ################################################################################
20 ################################################################################
20 ################################################################################
21 ################################################################################
21 ################################################################################
22 def folder_in_range(folder, start_date, end_date, pattern):
22 def folder_in_range(folder, start_date, end_date, pattern):
23 """
23 """
24 Check whether folder is bettwen start_date and end_date
24 Check whether folder is bettwen start_date and end_date
25
25
26 Args:
26 Args:
27 folder (str): Folder to check
27 folder (str): Folder to check
28 start_date (date): Initial date
28 start_date (date): Initial date
29 end_date (date): Final date
29 end_date (date): Final date
30 pattern (str): Datetime format of the folder
30 pattern (str): Datetime format of the folder
31 Returns:
31 Returns:
32 bool: True for success, False otherwise
32 bool: True for success, False otherwise
33 """
33 """
34 try:
34 try:
35 dt = datetime.strptime(folder, pattern)
35 dt = datetime.strptime(folder, pattern)
36 except:
36 except:
37 raise ValueError('Folder {} does not match {} format'.format(folder, pattern))
37 raise ValueError('Folder {} does not match {} format'.format(folder, pattern))
38 return start_date <= dt.date() <= end_date
38 return start_date <= dt.date() <= end_date
39
39
40 ################################################################################
40 ################################################################################
41 ################################################################################
41 ################################################################################
42 ################################################################################
42 ################################################################################
43 def getHei_index( minHei, maxHei, heightList):
43 def getHei_index( minHei, maxHei, heightList):
44 if (minHei < heightList[0]):
44 if (minHei < heightList[0]):
45 minHei = heightList[0]
45 minHei = heightList[0]
46
46
47 if (maxHei > heightList[-1]):
47 if (maxHei > heightList[-1]):
48 maxHei = heightList[-1]
48 maxHei = heightList[-1]
49
49
50 minIndex = 0
50 minIndex = 0
51 maxIndex = 0
51 maxIndex = 0
52 heights = numpy.asarray(heightList)
52 heights = numpy.asarray(heightList)
53
53
54 inda = numpy.where(heights >= minHei)
54 inda = numpy.where(heights >= minHei)
55 indb = numpy.where(heights <= maxHei)
55 indb = numpy.where(heights <= maxHei)
56
56
57 try:
57 try:
58 minIndex = inda[0][0]
58 minIndex = inda[0][0]
59 except:
59 except:
60 minIndex = 0
60 minIndex = 0
61
61
62 try:
62 try:
63 maxIndex = indb[0][-1]
63 maxIndex = indb[0][-1]
64 except:
64 except:
65 maxIndex = len(heightList)
65 maxIndex = len(heightList)
66 return minIndex,maxIndex
66 return minIndex,maxIndex
67
67
68 ################################################################################
68 ################################################################################
69 ################################################################################
69 ################################################################################
70 ################################################################################
70 ################################################################################
71 class MergeH5(object):
71 class MergeH5(object):
72 """Processing unit to read HDF5 format files
72 """Processing unit to read HDF5 format files
73
73
74 This unit reads HDF5 files created with `HDFWriter` operation when channels area
74 This unit reads HDF5 files created with `HDFWriter` operation when channels area
75 processed by separated. Then merge all channels in a single files.
75 processed by separated. Then merge all channels in a single files.
76
76
77 "example"
77 "example"
78 nChannels = 4
78 nChannels = 4
79 pathOut = "/home/soporte/Data/OutTest/clean2D/perpAndObliq/byChannels/merged"
79 pathOut = "/home/soporte/Data/OutTest/clean2D/perpAndObliq/byChannels/merged"
80 p0 = "/home/soporte/Data/OutTest/clean2D/perpAndObliq/byChannels/d2022240_Ch0"
80 p0 = "/home/soporte/Data/OutTest/clean2D/perpAndObliq/byChannels/d2022240_Ch0"
81 p1 = "/home/soporte/Data/OutTest/clean2D/perpAndObliq/byChannels/d2022240_Ch1"
81 p1 = "/home/soporte/Data/OutTest/clean2D/perpAndObliq/byChannels/d2022240_Ch1"
82 p2 = "/home/soporte/Data/OutTest/clean2D/perpAndObliq/byChannels/d2022240_Ch2"
82 p2 = "/home/soporte/Data/OutTest/clean2D/perpAndObliq/byChannels/d2022240_Ch2"
83 p3 = "/home/soporte/Data/OutTest/clean2D/perpAndObliq/byChannels/d2022240_Ch3"
83 p3 = "/home/soporte/Data/OutTest/clean2D/perpAndObliq/byChannels/d2022240_Ch3"
84 list = ['data_spc','data_cspc','nIncohInt','utctime']
84 list = ['data_spc','data_cspc','nIncohInt','utctime']
85 merger = MergeH5(nChannels,pathOut,list, p0, p1,p2,p3)
85 merger = MergeH5(nChannels,pathOut,list, p0, p1,p2,p3)
86 merger.run()
86 merger.run()
87
87
88 The file example_FULLmultiprocessing_merge.txt show an application for AMISR data
89
88 """
90 """
89
91
90 # #__attrs__ = ['paths', 'nChannels']
92 # #__attrs__ = ['paths', 'nChannels']
91 isConfig = False
93 isConfig = False
92 inPaths = None
94 inPaths = None
93 nChannels = None
95 nChannels = None
94 ch_dataIn = []
96 ch_dataIn = []
95
97
96 channelList = []
98 channelList = []
97
99
98 def __init__(self,nChannels, pOut, dataList, *args):
100 def __init__(self,nChannels, pOut, dataList, *args):
99
101
100 self.inPaths = [p for p in args]
102 self.inPaths = [p for p in args]
101 #print(self.inPaths)
103 #print(self.inPaths)
102 if len(self.inPaths) != nChannels:
104 if len(self.inPaths) != nChannels:
103 print("ERROR, number of channels different from iput paths {} != {}".format(nChannels, len(args)))
105 print("ERROR, number of channels different from iput paths {} != {}".format(nChannels, len(args)))
104 return
106 return
105
107
106 self.pathOut = pOut
108 self.pathOut = pOut
107 self.dataList = dataList
109 self.dataList = dataList
108 self.nChannels = len(self.inPaths)
110 self.nChannels = len(self.inPaths)
109 self.ch_dataIn = [Parameters() for p in args]
111 self.ch_dataIn = [Parameters() for p in args]
110 self.dataOut = Parameters()
112 self.dataOut = Parameters()
111 self.channelList = [n for n in range(nChannels)]
113 self.channelList = [n for n in range(nChannels)]
112 self.blocksPerFile = None
114 self.blocksPerFile = None
113 self.date = None
115 self.date = None
114 self.ext = ".hdf5$"
116 self.ext = ".hdf5$"
115 self.dataList = dataList
117 self.dataList = dataList
116 self.optchar = "D"
118 self.optchar = "D"
117 self.meta = {}
119 self.meta = {}
118 self.data = {}
120 self.data = {}
119 self.open_file = h5py.File
121 self.open_file = h5py.File
120 self.open_mode = 'r'
122 self.open_mode = 'r'
121 self.description = {}
123 self.description = {}
122 self.extras = {}
124 self.extras = {}
123 self.filefmt = "*%Y%j***"
125 self.filefmt = "*%Y%j***"
124 self.folderfmt = "*%Y%j"
126 self.folderfmt = "*%Y%j"
125
127
126 self.flag_spc = False
128 self.flag_spc = False
127 self.flag_pow = False
129 self.flag_pow = False
128 self.flag_snr = False
130 self.flag_snr = False
129 self.flag_nIcoh = False
131 self.flag_nIcoh = False
130 self.flagProcessingHeader = False
132 self.flagProcessingHeader = False
131 self.flagControllerHeader = False
133 self.flagControllerHeader = False
132
134
133 def setup(self):
135 def setup(self):
134
136
135
137
136 # if not self.ext.startswith('.'):
138 # if not self.ext.startswith('.'):
137 # self.ext = '.{}'.format(self.ext)
139 # self.ext = '.{}'.format(self.ext)
138
140
139 self.filenameList = self.searchFiles(self.inPaths, None)
141 self.filenameList = self.searchFiles(self.inPaths, None)
140 self.nfiles = len(self.filenameList[0])
142 self.nfiles = len(self.filenameList[0])
141
143
142
144
143
145
144 def searchFiles(self, paths, date, walk=True):
146 def searchFiles(self, paths, date, walk=True):
145 # self.paths = path
147 # self.paths = path
146 #self.date = startDate
148 #self.date = startDate
147 #self.walk = walk
149 #self.walk = walk
148 filenameList = [[] for n in range(self.nChannels)]
150 filenameList = [[] for n in range(self.nChannels)]
149 ch = 0
151 ch = 0
150 for path in paths:
152 for path in paths:
151 if os.path.exists(path):
153 if os.path.exists(path):
152 print("Searching files in {}".format(path))
154 print("Searching files in {}".format(path))
153 filenameList[ch] = self.getH5files(path, walk)
155 filenameList[ch] = self.getH5files(path, walk)
154 print("Found: ")
156 print("Found: ")
155 for f in filenameList[ch]:
157 for f in filenameList[ch]:
156 print(f)
158 print(f)
157 else:
159 else:
158 self.status = 0
160 self.status = 0
159 print('Path:%s does not exists'%path)
161 print('Path:%s does not exists'%path)
160 return 0
162 return 0
161 ch+=1
163 ch+=1
162 return filenameList
164 return filenameList
163
165
164 def getH5files(self, path, walk):
166 def getH5files(self, path, walk):
165
167
166 dirnameList = []
168 dirnameList = []
167 pat = '(\d)+.'+self.ext
169 pat = '(\d)+.'+self.ext
168 if walk:
170 if walk:
169 for root, dirs, files in os.walk(path):
171 for root, dirs, files in os.walk(path):
170 for dir in dirs:
172 for dir in dirs:
171 #print(os.path.join(root,dir))
173 #print(os.path.join(root,dir))
172 files = [re.search(pat,x) for x in os.listdir(os.path.join(root,dir))]
174 files = [re.search(pat,x) for x in os.listdir(os.path.join(root,dir))]
173 #print(files)
175 #print(files)
174 files = [x for x in files if x!=None]
176 files = [x for x in files if x!=None]
175 files = [x.string for x in files]
177 files = [x.string for x in files]
176 files = [os.path.join(root,dir,x) for x in files if x!=None]
178 files = [os.path.join(root,dir,x) for x in files if x!=None]
177 files.sort()
179 files.sort()
178
180
179 dirnameList += files
181 dirnameList += files
180 return dirnameList
182 return dirnameList
181 else:
183 else:
182
184
183 dirnameList = [re.search(pat,x) for x in os.listdir(path)]
185 dirnameList = [re.search(pat,x) for x in os.listdir(path)]
184 dirnameList = [x for x in dirnameList if x!=None]
186 dirnameList = [x for x in dirnameList if x!=None]
185 dirnameList = [x.string for x in dirnameList]
187 dirnameList = [x.string for x in dirnameList]
186 dirnameList = [x for x in dirnameList if x!=None]
188 dirnameList = [x for x in dirnameList if x!=None]
187 dirnameList.sort()
189 dirnameList.sort()
188
190
189 return dirnameList
191 return dirnameList
190
192
191
193
192 def readFile(self,fp,ch):
194 def readFile(self,fp,ch):
193
195
194 '''Read metadata and data'''
196 '''Read metadata and data'''
195
197
196 self.readMetadata(fp,ch)
198 self.readMetadata(fp,ch)
197 #print(self.metadataList)
199 #print(self.metadataList)
198 data = self.readData(fp)
200 data = self.readData(fp)
199
201
200
202
201 for attr in self.meta:
203 for attr in self.meta:
202
204
203 if "processingHeaderObj" in attr:
205 if "processingHeaderObj" in attr:
204 self.flagProcessingHeader=True
206 self.flagProcessingHeader=True
205
207
206 if "radarControllerHeaderObj" in attr:
208 if "radarControllerHeaderObj" in attr:
207 self.flagControllerHeader=True
209 self.flagControllerHeader=True
208
210
209 at = attr.split('.')
211 at = attr.split('.')
210 #print("AT ", at)
212 #print("AT ", at)
211 if len(at) > 1:
213 if len(at) > 1:
212 setattr(eval("self.ch_dataIn[ch]."+at[0]),at[1], self.meta[attr])
214 setattr(eval("self.ch_dataIn[ch]."+at[0]),at[1], self.meta[attr])
213 else:
215 else:
214 setattr(self.ch_dataIn[ch], attr, self.meta[attr])
216 setattr(self.ch_dataIn[ch], attr, self.meta[attr])
215
217
216 self.fill_dataIn(data, self.ch_dataIn[ch])
218 self.fill_dataIn(data, self.ch_dataIn[ch])
217
219
218
220
219 return
221 return
220
222
221
223
222 def readMetadata(self, fp, ch):
224 def readMetadata(self, fp, ch):
223 '''
225 '''
224 Reads Metadata
226 Reads Metadata
225 '''
227 '''
226 meta = {}
228 meta = {}
227 self.metadataList = []
229 self.metadataList = []
228 grp = fp['Metadata']
230 grp = fp['Metadata']
229 for item in grp.values():
231 for item in grp.values():
230 name = item.name
232 name = item.name
231
233
232 if isinstance(item, h5py.Dataset):
234 if isinstance(item, h5py.Dataset):
233 name = name.split("/")[-1]
235 name = name.split("/")[-1]
234 if 'List' in name:
236 if 'List' in name:
235 meta[name] = item[()].tolist()
237 meta[name] = item[()].tolist()
236 else:
238 else:
237 meta[name] = item[()]
239 meta[name] = item[()]
238 self.metadataList.append(name)
240 self.metadataList.append(name)
239 else:
241 else:
240 grp2 = fp[name]
242 grp2 = fp[name]
241 Obj = name.split("/")[-1]
243 Obj = name.split("/")[-1]
242 #print(Obj)
244 #print(Obj)
243 for item2 in grp2.values():
245 for item2 in grp2.values():
244 name2 = Obj+"."+item2.name.split("/")[-1]
246 name2 = Obj+"."+item2.name.split("/")[-1]
245 if 'List' in name2:
247 if 'List' in name2:
246 meta[name2] = item2[()].tolist()
248 meta[name2] = item2[()].tolist()
247 else:
249 else:
248 meta[name2] = item2[()]
250 meta[name2] = item2[()]
249 self.metadataList.append(name2)
251 self.metadataList.append(name2)
250
252
251
253
252
254
253 if not self.meta:
255 if not self.meta:
254 self.meta = meta.copy()
256 self.meta = meta.copy()
255 for key in list(self.meta.keys()):
257 for key in list(self.meta.keys()):
256 if "channelList" in key:
258 if "channelList" in key:
257 self.meta["channelList"] =[n for n in range(self.nChannels)]
259 self.meta["channelList"] =[n for n in range(self.nChannels)]
258 if "processingHeaderObj" in key:
260 if "processingHeaderObj" in key:
259 self.meta["processingHeaderObj.channelList"] =[n for n in range(self.nChannels)]
261 self.meta["processingHeaderObj.channelList"] =[n for n in range(self.nChannels)]
260 if "radarControllerHeaderObj" in key:
262 if "radarControllerHeaderObj" in key:
261 self.meta["radarControllerHeaderObj.channelList"] =[n for n in range(self.nChannels)]
263 self.meta["radarControllerHeaderObj.channelList"] =[n for n in range(self.nChannels)]
262 return 1
264 return 1
263
265
264 else:
266 else:
265
267
266 for k in list(self.meta.keys()):
268 for k in list(self.meta.keys()):
267 if 'List' in k and 'channel' not in k and "height" not in k and "radarControllerHeaderObj" not in k:
269 if 'List' in k and 'channel' not in k and "height" not in k and "radarControllerHeaderObj" not in k:
268 self.meta[k] += meta[k]
270 self.meta[k] += meta[k]
269
271
270 #print("Metadata: ",self.meta)
272 #print("Metadata: ",self.meta)
271 return 1
273 return 1
272
274
273
275
274
276
275
277
276 def fill_dataIn(self,data, dataIn):
278 def fill_dataIn(self,data, dataIn):
277
279
278 for attr in data:
280 for attr in data:
279 if data[attr].ndim == 1:
281 if data[attr].ndim == 1:
280 setattr(dataIn, attr, data[attr][:])
282 setattr(dataIn, attr, data[attr][:])
281 else:
283 else:
282 setattr(dataIn, attr, numpy.squeeze(data[attr][:,:]))
284 setattr(dataIn, attr, numpy.squeeze(data[attr][:,:]))
283 #print("shape in", dataIn.data_spc.shape, len(dataIn.data_spc))
285 #print("shape in", dataIn.data_spc.shape, len(dataIn.data_spc))
284 if self.flag_spc:
286 if self.flag_spc:
285 if dataIn.data_spc.ndim > 3:
287 if dataIn.data_spc.ndim > 3:
286 dataIn.data_spc = dataIn.data_spc[0]
288 dataIn.data_spc = dataIn.data_spc[0]
287 #print("shape in", dataIn.data_spc.shape)
289 #print("shape in", dataIn.data_spc.shape)
288
290
289
291
290
292
291 def getBlocksPerFile(self):
293 def getBlocksPerFile(self):
292 b = numpy.zeros(self.nChannels)
294 b = numpy.zeros(self.nChannels)
293 for i in range(self.nChannels):
295 for i in range(self.nChannels):
294 if self.flag_spc:
296 if self.flag_spc:
295 b[i] = self.ch_dataIn[i].data_spc.shape[0] #number of blocks
297 b[i] = self.ch_dataIn[i].data_spc.shape[0] #number of blocks
296 elif self.flag_pow:
298 elif self.flag_pow:
297 b[i] = self.ch_dataIn[i].data_pow.shape[0] #number of blocks
299 b[i] = self.ch_dataIn[i].data_pow.shape[0] #number of blocks
298 elif self.flag_snr:
300 elif self.flag_snr:
299 b[i] = self.ch_dataIn[i].data_snr.shape[0] #number of blocks
301 b[i] = self.ch_dataIn[i].data_snr.shape[0] #number of blocks
300
302
301 self.blocksPerFile = int(b.min())
303 self.blocksPerFile = int(b.min())
302 iresh_ch = numpy.where(b > self.blocksPerFile)[0]
304 iresh_ch = numpy.where(b > self.blocksPerFile)[0]
303 if len(iresh_ch) > 0:
305 if len(iresh_ch) > 0:
304 for ich in iresh_ch:
306 for ich in iresh_ch:
305 for i in range(len(self.dataList)):
307 for i in range(len(self.dataList)):
306 if hasattr(self.ch_dataIn[ich], self.dataList[i]):
308 if hasattr(self.ch_dataIn[ich], self.dataList[i]):
307 # print("reshaping ", self.dataList[i])
309 # print("reshaping ", self.dataList[i])
308 # print(getattr(self.ch_dataIn[ich], self.dataList[i]).shape)
310 # print(getattr(self.ch_dataIn[ich], self.dataList[i]).shape)
309 dataAux = getattr(self.ch_dataIn[ich], self.dataList[i])
311 dataAux = getattr(self.ch_dataIn[ich], self.dataList[i])
310 setattr(self.ch_dataIn[ich], self.dataList[i], None)
312 setattr(self.ch_dataIn[ich], self.dataList[i], None)
311 setattr(self.ch_dataIn[ich], self.dataList[i], dataAux[0:self.blocksPerFile])
313 setattr(self.ch_dataIn[ich], self.dataList[i], dataAux[0:self.blocksPerFile])
312 # print(getattr(self.ch_dataIn[ich], self.dataList[i]).shape)
314 # print(getattr(self.ch_dataIn[ich], self.dataList[i]).shape)
313 else:
315 else:
314 return
316 return
315
317
316
318
317 def getLabel(self, name, x=None):
319 def getLabel(self, name, x=None):
318
320
319 if x is None:
321 if x is None:
320 if 'Data' in self.description:
322 if 'Data' in self.description:
321 data = self.description['Data']
323 data = self.description['Data']
322 if 'Metadata' in self.description:
324 if 'Metadata' in self.description:
323 data.update(self.description['Metadata'])
325 data.update(self.description['Metadata'])
324 else:
326 else:
325 data = self.description
327 data = self.description
326 if name in data:
328 if name in data:
327 if isinstance(data[name], str):
329 if isinstance(data[name], str):
328 return data[name]
330 return data[name]
329 elif isinstance(data[name], list):
331 elif isinstance(data[name], list):
330 return None
332 return None
331 elif isinstance(data[name], dict):
333 elif isinstance(data[name], dict):
332 for key, value in data[name].items():
334 for key, value in data[name].items():
333 return key
335 return key
334 return name
336 return name
335 else:
337 else:
336 if 'Metadata' in self.description:
338 if 'Metadata' in self.description:
337 meta = self.description['Metadata']
339 meta = self.description['Metadata']
338 else:
340 else:
339 meta = self.description
341 meta = self.description
340 if name in meta:
342 if name in meta:
341 if isinstance(meta[name], list):
343 if isinstance(meta[name], list):
342 return meta[name][x]
344 return meta[name][x]
343 elif isinstance(meta[name], dict):
345 elif isinstance(meta[name], dict):
344 for key, value in meta[name].items():
346 for key, value in meta[name].items():
345 return value[x]
347 return value[x]
346
348
347 if 'cspc' in name:
349 if 'cspc' in name:
348 return 'pair{:02d}'.format(x)
350 return 'pair{:02d}'.format(x)
349 else:
351 else:
350 return 'channel{:02d}'.format(x)
352 return 'channel{:02d}'.format(x)
351
353
352 def readData(self, fp):
354 def readData(self, fp):
353 #print("read fp: ", fp)
355 #print("read fp: ", fp)
354 data = {}
356 data = {}
355
357
356 grp = fp['Data']
358 grp = fp['Data']
357
359
358 for name in grp:
360 for name in grp:
359 if "spc" in name:
361 if "spc" in name:
360 self.flag_spc = True
362 self.flag_spc = True
361 if "pow" in name:
363 if "pow" in name:
362 self.flag_pow = True
364 self.flag_pow = True
363 if "snr" in name:
365 if "snr" in name:
364 self.flag_snr = True
366 self.flag_snr = True
365 if "nIncohInt" in name:
367 if "nIncohInt" in name:
366 self.flag_nIcoh = True
368 self.flag_nIcoh = True
367
369
368 if isinstance(grp[name], h5py.Dataset):
370 if isinstance(grp[name], h5py.Dataset):
369 array = grp[name][()]
371 array = grp[name][()]
370 elif isinstance(grp[name], h5py.Group):
372 elif isinstance(grp[name], h5py.Group):
371 array = []
373 array = []
372 for ch in grp[name]:
374 for ch in grp[name]:
373 array.append(grp[name][ch][()])
375 array.append(grp[name][ch][()])
374 array = numpy.array(array)
376 array = numpy.array(array)
375 else:
377 else:
376 print('Unknown type: {}'.format(name))
378 print('Unknown type: {}'.format(name))
377 data[name] = array
379 data[name] = array
378
380
379 return data
381 return data
380
382
381 def getDataOut(self):
383 def getDataOut(self):
382
384
383 self.dataOut = self.ch_dataIn[0].copy() #dataIn #blocks, fft, hei for metadata
385 self.dataOut = self.ch_dataIn[0].copy() #dataIn #blocks, fft, hei for metadata
384 if self.flagProcessingHeader:
386 if self.flagProcessingHeader:
385 self.dataOut.processingHeaderObj = self.ch_dataIn[0].processingHeaderObj.copy()
387 self.dataOut.processingHeaderObj = self.ch_dataIn[0].processingHeaderObj.copy()
386 self.dataOut.heightList = self.dataOut.processingHeaderObj.heightList
388 self.dataOut.heightList = self.dataOut.processingHeaderObj.heightList
387 self.dataOut.ippSeconds = self.dataOut.processingHeaderObj.ipp
389 self.dataOut.ippSeconds = self.dataOut.processingHeaderObj.ipp
388 self.dataOut.channelList = self.dataOut.processingHeaderObj.channelList
390 self.dataOut.channelList = self.dataOut.processingHeaderObj.channelList
389 self.dataOut.nCohInt = self.dataOut.processingHeaderObj.nCohInt
391 self.dataOut.nCohInt = self.dataOut.processingHeaderObj.nCohInt
390 self.dataOut.nFFTPoints = self.dataOut.processingHeaderObj.nFFTPoints
392 self.dataOut.nFFTPoints = self.dataOut.processingHeaderObj.nFFTPoints
391
393
392 if self.flagControllerHeader:
394 if self.flagControllerHeader:
393 self.dataOut.radarControllerHeaderObj = self.ch_dataIn[0].radarControllerHeaderObj.copy()
395 self.dataOut.radarControllerHeaderObj = self.ch_dataIn[0].radarControllerHeaderObj.copy()
394 self.dataOut.frequency = self.dataOut.radarControllerHeaderObj.frequency
396 self.dataOut.frequency = self.dataOut.radarControllerHeaderObj.frequency
395 #--------------------------------------------------------------------
397 #--------------------------------------------------------------------
396
398
397
399
398 #--------------------------------------------------------------------
400 #--------------------------------------------------------------------
399 if self.flag_spc:
401 if self.flag_spc:
400 if self.dataOut.data_spc.ndim < 3:
402 if self.dataOut.data_spc.ndim < 3:
401 print("shape spc in: ",self.dataOut.data_spc.shape )
403 print("shape spc in: ",self.dataOut.data_spc.shape )
402 return 0
404 return 0
403 if self.flag_pow:
405 if self.flag_pow:
404 if self.dataOut.data_pow.ndim < 2:
406 if self.dataOut.data_pow.ndim < 2:
405 print("shape pow in: ",self.dataOut.data_pow.shape )
407 print("shape pow in: ",self.dataOut.data_pow.shape )
406 return 0
408 return 0
407 if self.flag_snr:
409 if self.flag_snr:
408 if self.dataOut.data_snr.ndim < 2:
410 if self.dataOut.data_snr.ndim < 2:
409 print("shape snr in: ",self.dataOut.data_snr.shape )
411 print("shape snr in: ",self.dataOut.data_snr.shape )
410 return 0
412 return 0
411
413
412 self.dataOut.data_spc = None
414 self.dataOut.data_spc = None
413 self.dataOut.data_cspc = None
415 self.dataOut.data_cspc = None
414 self.dataOut.data_pow = None
416 self.dataOut.data_pow = None
415 self.dataOut.data_snr = None
417 self.dataOut.data_snr = None
416 self.dataOut.utctim = None
418 self.dataOut.utctim = None
417 self.dataOut.nIncohInt = None
419 self.dataOut.nIncohInt = None
418 #--------------------------------------------------------------------
420 #--------------------------------------------------------------------
419 if self.flag_spc:
421 if self.flag_spc:
420 spc = [data.data_spc for data in self.ch_dataIn]
422 spc = [data.data_spc for data in self.ch_dataIn]
421 self.dataOut.data_spc = numpy.stack(spc, axis=1) #blocks, ch, fft, hei
423 self.dataOut.data_spc = numpy.stack(spc, axis=1) #blocks, ch, fft, hei
422 #--------------------------------------------------------------------
424 #--------------------------------------------------------------------
423 if self.flag_pow:
425 if self.flag_pow:
424 pow = [data.data_pow for data in self.ch_dataIn]
426 pow = [data.data_pow for data in self.ch_dataIn]
425 self.dataOut.data_pow = numpy.stack(pow, axis=1) #blocks, ch, fft, hei
427 self.dataOut.data_pow = numpy.stack(pow, axis=1) #blocks, ch, fft, hei
426 #--------------------------------------------------------------------
428 #--------------------------------------------------------------------
427 if self.flag_snr:
429 if self.flag_snr:
428 snr = [data.data_snr for data in self.ch_dataIn]
430 snr = [data.data_snr for data in self.ch_dataIn]
429 self.dataOut.data_snr = numpy.stack(snr, axis=1) #blocks, ch, fft, hei
431 self.dataOut.data_snr = numpy.stack(snr, axis=1) #blocks, ch, fft, hei
430
432
431 #--------------------------------------------------------------------
433 #--------------------------------------------------------------------
432 time = [data.utctime for data in self.ch_dataIn]
434 time = [data.utctime for data in self.ch_dataIn]
433 time = numpy.asarray(time).mean(axis=0)
435 time = numpy.asarray(time).mean(axis=0)
434 time = numpy.squeeze(time)
436 time = numpy.squeeze(time)
435 self.dataOut.utctime = time
437 self.dataOut.utctime = time
436 #--------------------------------------------------------------------
438 #--------------------------------------------------------------------
437 if self.flag_nIcoh:
439 if self.flag_nIcoh:
438 ints = [data.nIncohInt for data in self.ch_dataIn]
440 ints = [data.nIncohInt for data in self.ch_dataIn]
439 self.dataOut.nIncohInt = numpy.stack(ints, axis=1)
441 self.dataOut.nIncohInt = numpy.stack(ints, axis=1)
440
442
441 if self.dataOut.nIncohInt.ndim > 3:
443 if self.dataOut.nIncohInt.ndim > 3:
442 aux = self.dataOut.nIncohInt
444 aux = self.dataOut.nIncohInt
443 self.dataOut.nIncohInt = None
445 self.dataOut.nIncohInt = None
444 self.dataOut.nIncohInt = aux[0]
446 self.dataOut.nIncohInt = aux[0]
445
447
446 if self.dataOut.nIncohInt.ndim < 3:
448 if self.dataOut.nIncohInt.ndim < 3:
447 nIncohInt = numpy.repeat(self.dataOut.nIncohInt, self.dataOut.nHeights).reshape(self.blocksPerFile,self.nChannels, self.dataOut.nHeights)
449 nIncohInt = numpy.repeat(self.dataOut.nIncohInt, self.dataOut.nHeights).reshape(self.blocksPerFile,self.nChannels, self.dataOut.nHeights)
448 #nIncohInt = numpy.reshape(nIncohInt, (self.blocksPerFile,self.nChannels, self.dataOut.nHeights))
450 #nIncohInt = numpy.reshape(nIncohInt, (self.blocksPerFile,self.nChannels, self.dataOut.nHeights))
449 self.dataOut.nIncohInt = None
451 self.dataOut.nIncohInt = None
450 self.dataOut.nIncohInt = nIncohInt
452 self.dataOut.nIncohInt = nIncohInt
451
453
452 if (self.dataOut.nIncohInt.shape)[0]==self.nChannels: ## ch,blocks, hei
454 if (self.dataOut.nIncohInt.shape)[0]==self.nChannels: ## ch,blocks, hei
453 self.dataOut.nIncohInt = numpy.swapaxes(self.dataOut.nIncohInt, 0, 1) ## blocks,ch, hei
455 self.dataOut.nIncohInt = numpy.swapaxes(self.dataOut.nIncohInt, 0, 1) ## blocks,ch, hei
454 else:
456 else:
455 self.dataOut.nIncohInt = self.ch_dataIn[0].nIncohInt
457 self.dataOut.nIncohInt = self.ch_dataIn[0].nIncohInt
456 #--------------------------------------------------------------------
458 #--------------------------------------------------------------------
457 #print("utcTime: ", time.shape)
459 #print("utcTime: ", time.shape)
458 #print("data_spc ",self.dataOut.data_spc.shape)
460 #print("data_spc ",self.dataOut.data_spc.shape)
459 if "data_cspc" in self.dataList:
461 if "data_cspc" in self.dataList:
460 pairsList = [pair for pair in itertools.combinations(self.channelList, 2)]
462 pairsList = [pair for pair in itertools.combinations(self.channelList, 2)]
461 #print("PairsList: ", pairsList)
463 #print("PairsList: ", pairsList)
462 self.dataOut.pairsList = pairsList
464 self.dataOut.pairsList = pairsList
463 cspc = []
465 cspc = []
464
466
465 for i, j in pairsList:
467 for i, j in pairsList:
466 cspc.append(self.ch_dataIn[i].data_spc*numpy.conjugate(self.ch_dataIn[j].data_spc)) #blocks, fft, hei
468 cspc.append(self.ch_dataIn[i].data_spc*numpy.conjugate(self.ch_dataIn[j].data_spc)) #blocks, fft, hei
467
469
468 cspc = numpy.asarray(cspc) # # pairs, blocks, fft, hei
470 cspc = numpy.asarray(cspc) # # pairs, blocks, fft, hei
469 #print("cspc: ",cspc.shape)
471 #print("cspc: ",cspc.shape)
470 self.dataOut.data_cspc = numpy.swapaxes(cspc, 0, 1) ## blocks, pairs, fft, hei
472 self.dataOut.data_cspc = numpy.swapaxes(cspc, 0, 1) ## blocks, pairs, fft, hei
471 #print("dataOut.data_cspc: ",self.dataOut.data_cspc.shape)
473 #print("dataOut.data_cspc: ",self.dataOut.data_cspc.shape)
472 #if "data_pow" in self.dataList:
474 #if "data_pow" in self.dataList:
473
475
474 return 1
476 return 1
475
477
476 # def writeMetadata(self, fp):
478 # def writeMetadata(self, fp):
477 #
479 #
478 #
480 #
479 # grp = fp.create_group('Metadata')
481 # grp = fp.create_group('Metadata')
480 #
482 #
481 # for i in range(len(self.metadataList)):
483 # for i in range(len(self.metadataList)):
482 # if not hasattr(self.dataOut, self.metadataList[i]):
484 # if not hasattr(self.dataOut, self.metadataList[i]):
483 # print('Metadata: `{}` not found'.format(self.metadataList[i]))
485 # print('Metadata: `{}` not found'.format(self.metadataList[i]))
484 # continue
486 # continue
485 # value = getattr(self.dataOut, self.metadataList[i])
487 # value = getattr(self.dataOut, self.metadataList[i])
486 # if isinstance(value, bool):
488 # if isinstance(value, bool):
487 # if value is True:
489 # if value is True:
488 # value = 1
490 # value = 1
489 # else:
491 # else:
490 # value = 0
492 # value = 0
491 # grp.create_dataset(self.getLabel(self.metadataList[i]), data=value)
493 # grp.create_dataset(self.getLabel(self.metadataList[i]), data=value)
492 # return
494 # return
493 def writeMetadata(self, fp):
495 def writeMetadata(self, fp):
494
496
495
497
496 grp = fp.create_group('Metadata')
498 grp = fp.create_group('Metadata')
497
499
498 for i in range(len(self.metadataList)):
500 for i in range(len(self.metadataList)):
499 attribute = self.metadataList[i]
501 attribute = self.metadataList[i]
500 attr = attribute.split('.')
502 attr = attribute.split('.')
501 if '' in attr:
503 if '' in attr:
502 attr.remove('')
504 attr.remove('')
503 #print(attr)
505 #print(attr)
504 if len(attr) > 1:
506 if len(attr) > 1:
505 if not hasattr(eval("self.dataOut."+attr[0]),attr[1]):
507 if not hasattr(eval("self.dataOut."+attr[0]),attr[1]):
506 print('Metadata: {}.{} not found'.format(attr[0],attr[1]))
508 print('Metadata: {}.{} not found'.format(attr[0],attr[1]))
507 continue
509 continue
508 value = getattr(eval("self.dataOut."+attr[0]),attr[1])
510 value = getattr(eval("self.dataOut."+attr[0]),attr[1])
509 if isinstance(value, bool):
511 if isinstance(value, bool):
510 if value is True:
512 if value is True:
511 value = 1
513 value = 1
512 else:
514 else:
513 value = 0
515 value = 0
514 grp2 = None
516 grp2 = None
515 if not 'Metadata/'+attr[0] in fp:
517 if not 'Metadata/'+attr[0] in fp:
516 grp2 = fp.create_group('Metadata/'+attr[0])
518 grp2 = fp.create_group('Metadata/'+attr[0])
517 else:
519 else:
518 grp2 = fp['Metadata/'+attr[0]]
520 grp2 = fp['Metadata/'+attr[0]]
519
521
520 grp2.create_dataset(attr[1], data=value)
522 grp2.create_dataset(attr[1], data=value)
521
523
522 else:
524 else:
523 if not hasattr(self.dataOut, attr[0] ):
525 if not hasattr(self.dataOut, attr[0] ):
524 print('Metadata: `{}` not found'.format(attribute))
526 print('Metadata: `{}` not found'.format(attribute))
525 continue
527 continue
526 value = getattr(self.dataOut, attr[0])
528 value = getattr(self.dataOut, attr[0])
527 if isinstance(value, bool):
529 if isinstance(value, bool):
528 if value is True:
530 if value is True:
529 value = 1
531 value = 1
530 else:
532 else:
531 value = 0
533 value = 0
532 if isinstance(value, type(None)):
534 if isinstance(value, type(None)):
533 print("------ERROR, value {} is None".format(attribute))
535 print("------ERROR, value {} is None".format(attribute))
534
536
535 grp.create_dataset(self.getLabel(attribute), data=value)
537 grp.create_dataset(self.getLabel(attribute), data=value)
536 return
538 return
537
539
538 def getDsList(self):
540 def getDsList(self):
539
541
540 dsList =[]
542 dsList =[]
541 for i in range(len(self.dataList)):
543 for i in range(len(self.dataList)):
542 dsDict = {}
544 dsDict = {}
543 if hasattr(self.dataOut, self.dataList[i]):
545 if hasattr(self.dataOut, self.dataList[i]):
544 dataAux = getattr(self.dataOut, self.dataList[i])
546 dataAux = getattr(self.dataOut, self.dataList[i])
545 dsDict['variable'] = self.dataList[i]
547 dsDict['variable'] = self.dataList[i]
546 else:
548 else:
547 print('Attribute {} not found in dataOut'.format(self.dataList[i]))
549 print('Attribute {} not found in dataOut'.format(self.dataList[i]))
548 continue
550 continue
549
551
550 if dataAux is None:
552 if dataAux is None:
551 continue
553 continue
552 elif isinstance(dataAux, (int, float, numpy.integer, numpy.float)):
554 elif isinstance(dataAux, (int, float, numpy.integer, numpy.float)):
553 dsDict['nDim'] = 0
555 dsDict['nDim'] = 0
554 else:
556 else:
555
557
556 dsDict['nDim'] = len(dataAux.shape) -1
558 dsDict['nDim'] = len(dataAux.shape) -1
557 dsDict['shape'] = dataAux.shape
559 dsDict['shape'] = dataAux.shape
558
560
559 if len(dsDict['shape'])>=2:
561 if len(dsDict['shape'])>=2:
560 dsDict['dsNumber'] = dataAux.shape[1]
562 dsDict['dsNumber'] = dataAux.shape[1]
561 else:
563 else:
562 dsDict['dsNumber'] = 1
564 dsDict['dsNumber'] = 1
563 dsDict['dtype'] = dataAux.dtype
565 dsDict['dtype'] = dataAux.dtype
564 # if len(dataAux.shape) == 4:
566 # if len(dataAux.shape) == 4:
565 # dsDict['nDim'] = len(dataAux.shape) -1
567 # dsDict['nDim'] = len(dataAux.shape) -1
566 # dsDict['shape'] = dataAux.shape
568 # dsDict['shape'] = dataAux.shape
567 # dsDict['dsNumber'] = dataAux.shape[1]
569 # dsDict['dsNumber'] = dataAux.shape[1]
568 # dsDict['dtype'] = dataAux.dtype
570 # dsDict['dtype'] = dataAux.dtype
569 # else:
571 # else:
570 # dsDict['nDim'] = len(dataAux.shape)
572 # dsDict['nDim'] = len(dataAux.shape)
571 # dsDict['shape'] = dataAux.shape
573 # dsDict['shape'] = dataAux.shape
572 # dsDict['dsNumber'] = dataAux.shape[0]
574 # dsDict['dsNumber'] = dataAux.shape[0]
573 # dsDict['dtype'] = dataAux.dtype
575 # dsDict['dtype'] = dataAux.dtype
574
576
575 dsList.append(dsDict)
577 dsList.append(dsDict)
576 #print(dsList)
578 #print(dsList)
577 self.dsList = dsList
579 self.dsList = dsList
578
580
579 def clean_dataIn(self):
581 def clean_dataIn(self):
580 for ch in range(self.nChannels):
582 for ch in range(self.nChannels):
581 self.ch_dataIn[ch].data_spc = None
583 self.ch_dataIn[ch].data_spc = None
582 self.ch_dataIn[ch].utctime = None
584 self.ch_dataIn[ch].utctime = None
583 self.ch_dataIn[ch].nIncohInt = None
585 self.ch_dataIn[ch].nIncohInt = None
584 self.meta ={}
586 self.meta ={}
585 self.blocksPerFile = None
587 self.blocksPerFile = None
586
588
587 def writeData(self, outFilename):
589 def writeData(self, outFilename):
588
590
589 self.getDsList()
591 self.getDsList()
590
592
591 fp = h5py.File(outFilename, 'w')
593 fp = h5py.File(outFilename, 'w')
592 self.writeMetadata(fp)
594 self.writeMetadata(fp)
593 grp = fp.create_group('Data')
595 grp = fp.create_group('Data')
594
596
595 dtsets = []
597 dtsets = []
596 data = []
598 data = []
597 for dsInfo in self.dsList:
599 for dsInfo in self.dsList:
598 if dsInfo['nDim'] == 0:
600 if dsInfo['nDim'] == 0:
599 ds = grp.create_dataset(
601 ds = grp.create_dataset(
600 self.getLabel(dsInfo['variable']),(self.blocksPerFile, ),chunks=True,dtype=numpy.float64)
602 self.getLabel(dsInfo['variable']),(self.blocksPerFile, ),chunks=True,dtype=numpy.float64)
601 dtsets.append(ds)
603 dtsets.append(ds)
602 data.append((dsInfo['variable'], -1))
604 data.append((dsInfo['variable'], -1))
603 else:
605 else:
604 label = self.getLabel(dsInfo['variable'])
606 label = self.getLabel(dsInfo['variable'])
605 if label is not None:
607 if label is not None:
606 sgrp = grp.create_group(label)
608 sgrp = grp.create_group(label)
607 else:
609 else:
608 sgrp = grp
610 sgrp = grp
609 k = -1*(dsInfo['nDim'] - 1)
611 k = -1*(dsInfo['nDim'] - 1)
610 #print(k, dsInfo['shape'], dsInfo['shape'][k:])
612 #print(k, dsInfo['shape'], dsInfo['shape'][k:])
611 for i in range(dsInfo['dsNumber']):
613 for i in range(dsInfo['dsNumber']):
612 ds = sgrp.create_dataset(
614 ds = sgrp.create_dataset(
613 self.getLabel(dsInfo['variable'], i),(self.blocksPerFile, ) + dsInfo['shape'][k:],
615 self.getLabel(dsInfo['variable'], i),(self.blocksPerFile, ) + dsInfo['shape'][k:],
614 chunks=True,
616 chunks=True,
615 dtype=dsInfo['dtype'])
617 dtype=dsInfo['dtype'])
616 dtsets.append(ds)
618 dtsets.append(ds)
617 data.append((dsInfo['variable'], i))
619 data.append((dsInfo['variable'], i))
618
620
619 #print("\n",dtsets)
621 #print("\n",dtsets)
620
622
621 print('Creating merged file: {}'.format(fp.filename))
623 print('Creating merged file: {}'.format(fp.filename))
622
624
623 for i, ds in enumerate(dtsets):
625 for i, ds in enumerate(dtsets):
624 attr, ch = data[i]
626 attr, ch = data[i]
625 if ch == -1:
627 if ch == -1:
626 ds[:] = getattr(self.dataOut, attr)
628 ds[:] = getattr(self.dataOut, attr)
627 else:
629 else:
628 #print(ds, getattr(self.dataOut, attr)[ch].shape)
630 #print(ds, getattr(self.dataOut, attr)[ch].shape)
629 aux = getattr(self.dataOut, attr)# block, ch, ...
631 aux = getattr(self.dataOut, attr)# block, ch, ...
630 aux = numpy.swapaxes(aux,0,1) # ch, blocks, ...
632 aux = numpy.swapaxes(aux,0,1) # ch, blocks, ...
631 #print(ds.shape, aux.shape)
633 #print(ds.shape, aux.shape)
632 #ds[:] = getattr(self.dataOut, attr)[ch]
634 #ds[:] = getattr(self.dataOut, attr)[ch]
633 ds[:] = aux[ch]
635 ds[:] = aux[ch]
634
636
635 fp.flush()
637 fp.flush()
636 fp.close()
638 fp.close()
637 self.clean_dataIn()
639 self.clean_dataIn()
638 return
640 return
639
641
640
642
641
643
642 def run(self):
644 def run(self):
643
645
644 if not(self.isConfig):
646 if not(self.isConfig):
645 self.setup()
647 self.setup()
646 self.isConfig = True
648 self.isConfig = True
647
649
648 for nf in range(self.nfiles):
650 for nf in range(self.nfiles):
649 name = None
651 name = None
650 for ch in range(self.nChannels):
652 for ch in range(self.nChannels):
651 name = self.filenameList[ch][nf]
653 name = self.filenameList[ch][nf]
652 filename = os.path.join(self.inPaths[ch], name)
654 filename = os.path.join(self.inPaths[ch], name)
653 fp = h5py.File(filename, 'r')
655 fp = h5py.File(filename, 'r')
654 #print("Opening file: ",filename)
656 #print("Opening file: ",filename)
655 self.readFile(fp,ch)
657 self.readFile(fp,ch)
656 fp.close()
658 fp.close()
657
659
658 if self.blocksPerFile == None:
660 if self.blocksPerFile == None:
659 self.getBlocksPerFile()
661 self.getBlocksPerFile()
660 print("blocks per file: ", self.blocksPerFile)
662 print("blocks per file: ", self.blocksPerFile)
661
663
662 if not self.getDataOut():
664 if not self.getDataOut():
663 print("Error getting DataOut invalid number of blocks")
665 print("Error getting DataOut invalid number of blocks")
664 return
666 return
665 name = name[-16:]
667 name = name[-16:]
666 #print("Final name out: ", name)
668 #print("Final name out: ", name)
667 outFile = os.path.join(self.pathOut, name)
669 outFile = os.path.join(self.pathOut, name)
668 #print("Outfile: ", outFile)
670 #print("Outfile: ", outFile)
669 self.writeData(outFile)
671 self.writeData(outFile)
General Comments 0
You need to be logged in to leave comments. Login now