@@ -242,14 +242,18 the parameters related to distances such as heightList, or heightResolution wich | |||
|
242 | 242 | |
|
243 | 243 | @property |
|
244 | 244 | def ltctime(self): |
|
245 | ||
|
245 | try: | |
|
246 | self.timeZone = self.timeZone.decode("utf-8") | |
|
247 | except Exception as e: | |
|
248 | pass | |
|
249 | ||
|
246 | 250 | if self.useLocalTime: |
|
247 | 251 | if self.timeZone =='lt': |
|
248 | 252 | return self.utctime - 300 * 60 |
|
249 | 253 | elif self.timeZone =='ut': |
|
250 | 254 | return self.utctime |
|
251 | 255 | else: |
|
252 | log.error("No valid timeZone detected") | |
|
256 | log.error("No valid timeZone detected:{}".format(self.timeZone)) | |
|
253 | 257 | return self.utctime |
|
254 | 258 | |
|
255 | 259 | @property |
@@ -107,7 +107,7 class PowerPlot(RTIPlot): | |||
|
107 | 107 | def update(self, dataOut): |
|
108 | 108 | self.update_list(dataOut) |
|
109 | 109 | data = { |
|
110 |
'pow': 10*numpy.log10(dataOut.data_pow |
|
|
110 | 'pow': 10*numpy.log10(dataOut.data_pow) | |
|
111 | 111 | } |
|
112 | 112 | try: |
|
113 | 113 | data['noise'] = 10*numpy.log10(dataOut.getNoise()/dataOut.normFactor) |
@@ -728,13 +728,13 class RTIPlot(Plot): | |||
|
728 | 728 | cmap=plt.get_cmap(self.colormap) |
|
729 | 729 | ) |
|
730 | 730 | if self.showprofile: |
|
731 | ax.plot_profile = self.pf_axes[n].plot( | |
|
732 | data[self.CODE][n], self.y)[0] | |
|
731 | ax.plot_profile = self.pf_axes[n].plot(data[self.CODE][n], self.y)[0] | |
|
733 | 732 | if "noise" in self.data: |
|
733 | ||
|
734 | 734 | ax.plot_noise = self.pf_axes[n].plot(numpy.repeat(data['noise'][n], len(self.y)), self.y, |
|
735 | 735 | color="k", linestyle="dashed", lw=1)[0] |
|
736 | 736 | else: |
|
737 |
ax.collections.remove(ax.collections[0]) |
|
|
737 | ax.collections.remove(ax.collections[0]) | |
|
738 | 738 | ax.plt = ax.pcolormesh(x, y, z[n].T, |
|
739 | 739 | vmin=self.zmin, |
|
740 | 740 | vmax=self.zmax, |
@@ -743,8 +743,7 class RTIPlot(Plot): | |||
|
743 | 743 | if self.showprofile: |
|
744 | 744 | ax.plot_profile.set_data(data[self.CODE][n], self.y) |
|
745 | 745 | if "noise" in self.data: |
|
746 |
ax.plot_noise |
|
|
747 | color="k", linestyle="dashed", lw=1)[0] | |
|
746 | ax.plot_noise.set_data(numpy.repeat(data['noise'][n], len(self.y)), self.y) | |
|
748 | 747 | |
|
749 | 748 | class SpectrogramPlot(Plot): |
|
750 | 749 | ''' |
@@ -819,7 +818,7 class SpectrogramPlot(Plot): | |||
|
819 | 818 | cmap=plt.get_cmap(self.colormap) |
|
820 | 819 | ) |
|
821 | 820 | else: |
|
822 |
|
|
|
821 | ax.collections.remove(ax.collections[0]) # error while running | |
|
823 | 822 | ax.plt = ax.pcolormesh(x, y, z[n].T, |
|
824 | 823 | vmin=self.zmin, |
|
825 | 824 | vmax=self.zmax, |
@@ -1541,7 +1540,7 class NoiselessRTIPlot(RTIPlot): | |||
|
1541 | 1540 | ax.plot_profile = self.pf_axes[n].plot(data['noiseless_rti'][n], self.y)[0] |
|
1542 | 1541 | |
|
1543 | 1542 | else: |
|
1544 |
|
|
|
1543 | ax.collections.remove(ax.collections[0]) # error while running | |
|
1545 | 1544 | ax.plt = ax.pcolormesh(x, y, z[n].T, |
|
1546 | 1545 | vmin=self.zmin, |
|
1547 | 1546 | vmax=self.zmax, |
@@ -1622,7 +1621,7 class OutliersRTIPlot(Plot): | |||
|
1622 | 1621 | else: |
|
1623 | 1622 | if self.zlimits is not None: |
|
1624 | 1623 | self.zmin, self.zmax = self.zlimits[n] |
|
1625 |
|
|
|
1624 | ax.collections.remove(ax.collections[0]) # error while running | |
|
1626 | 1625 | ax.plt = ax.pcolormesh(x, y, z[n].T , |
|
1627 | 1626 | vmin=self.zmin, |
|
1628 | 1627 | vmax=self.zmax, |
@@ -1700,7 +1699,7 class NIncohIntRTIPlot(Plot): | |||
|
1700 | 1699 | else: |
|
1701 | 1700 | if self.zlimits is not None: |
|
1702 | 1701 | self.zmin, self.zmax = self.zlimits[n] |
|
1703 |
|
|
|
1702 | ax.collections.remove(ax.collections[0]) # error while running | |
|
1704 | 1703 | ax.plt = ax.pcolormesh(x, y, z[n].T , |
|
1705 | 1704 | vmin=self.zmin, |
|
1706 | 1705 | vmax=self.zmax, |
@@ -174,8 +174,8 class HDFReader(Reader, ProcessingUnit): | |||
|
174 | 174 | self.__readMetadata2() |
|
175 | 175 | self.__readData() |
|
176 | 176 | self.__setBlockList() |
|
177 | if 'type' in self.meta: | |
|
178 | self.dataOut = eval(self.meta['type'])() | |
|
177 | # if 'type' in self.meta: | |
|
178 | # self.dataOut = eval(self.meta['type'])() | |
|
179 | 179 | |
|
180 | 180 | for attr in self.meta: |
|
181 | 181 | if "processingHeaderObj" in attr: |
@@ -268,6 +268,7 class HDFReader(Reader, ProcessingUnit): | |||
|
268 | 268 | Reads Metadata |
|
269 | 269 | ''' |
|
270 | 270 | meta = {} |
|
271 | ||
|
271 | 272 | if self.description: |
|
272 | 273 | for key, value in self.description['Metadata'].items(): |
|
273 | 274 | meta[key] = self.fp[value][()] |
@@ -165,7 +165,7 class MergeH5(object): | |||
|
165 | 165 | def readFile(self,fp,ch): |
|
166 | 166 | '''Read metadata and data''' |
|
167 | 167 | self.readMetadata(fp,ch) |
|
168 | #print(self.metadataList) | |
|
168 | # print(self.metadataList) | |
|
169 | 169 | data = self.readData(fp) |
|
170 | 170 | for attr in self.meta: |
|
171 | 171 | if "processingHeaderObj" in attr: |
@@ -173,7 +173,7 class MergeH5(object): | |||
|
173 | 173 | if "radarControllerHeaderObj" in attr: |
|
174 | 174 | self.flagControllerHeader=True |
|
175 | 175 | at = attr.split('.') |
|
176 | #print("AT ", at) | |
|
176 | # print("AT ", at) | |
|
177 | 177 | if len(at) > 1: |
|
178 | 178 | setattr(eval("self.ch_dataIn[ch]."+at[0]),at[1], self.meta[attr]) |
|
179 | 179 | else: |
@@ -229,7 +229,7 class MergeH5(object): | |||
|
229 | 229 | setattr(dataIn, attr, data[attr][:]) |
|
230 | 230 | else: |
|
231 | 231 | setattr(dataIn, attr, numpy.squeeze(data[attr][:,:])) |
|
232 | #print("shape in", dataIn.data_spc.shape, len(dataIn.data_spc)) | |
|
232 | # print("shape in", dataIn.data_spc.shape, len(dataIn.data_spc)) | |
|
233 | 233 | if self.flag_spc: |
|
234 | 234 | if dataIn.data_spc.ndim > 3: |
|
235 | 235 | dataIn.data_spc = dataIn.data_spc[0] |
@@ -256,6 +256,7 class MergeH5(object): | |||
|
256 | 256 | setattr(self.ch_dataIn[ich], self.dataList[i], dataAux[0:self.blocksPerFile]) |
|
257 | 257 | # print(getattr(self.ch_dataIn[ich], self.dataList[i]).shape) |
|
258 | 258 | else: |
|
259 | # log.error("Channels number error,iresh_ch=", iresh_ch) | |
|
259 | 260 | return |
|
260 | 261 | def getLabel(self, name, x=None): |
|
261 | 262 | if x is None: |
@@ -289,8 +290,9 class MergeH5(object): | |||
|
289 | 290 | return 'pair{:02d}'.format(x) |
|
290 | 291 | else: |
|
291 | 292 | return 'channel{:02d}'.format(x) |
|
293 | ||
|
292 | 294 | def readData(self, fp): |
|
293 | #print("read fp: ", fp) | |
|
295 | # print("read fp: ", fp) | |
|
294 | 296 | data = {} |
|
295 | 297 | grp = fp['Data'] |
|
296 | 298 | for name in grp: |
@@ -302,7 +304,7 class MergeH5(object): | |||
|
302 | 304 | self.flag_snr = True |
|
303 | 305 | if "nIncohInt" in name: |
|
304 | 306 | self.flag_nIcoh = True |
|
305 | ||
|
307 | # print("spc:",self.flag_spc," pow:",self.flag_pow," snr:", self.flag_snr) | |
|
306 | 308 | if isinstance(grp[name], h5py.Dataset): |
|
307 | 309 | array = grp[name][()] |
|
308 | 310 | elif isinstance(grp[name], h5py.Group): |
@@ -314,7 +316,9 class MergeH5(object): | |||
|
314 | 316 | print('Unknown type: {}'.format(name)) |
|
315 | 317 | data[name] = array |
|
316 | 318 | return data |
|
319 | ||
|
317 | 320 | def getDataOut(self): |
|
321 | # print("Getting DataOut") | |
|
318 | 322 | self.dataOut = self.ch_dataIn[0].copy() #dataIn #blocks, fft, hei for metadata |
|
319 | 323 | if self.flagProcessingHeader: |
|
320 | 324 | self.dataOut.processingHeaderObj = self.ch_dataIn[0].processingHeaderObj.copy() |
@@ -381,8 +385,8 class MergeH5(object): | |||
|
381 | 385 | else: |
|
382 | 386 | self.dataOut.nIncohInt = self.ch_dataIn[0].nIncohInt |
|
383 | 387 | #-------------------------------------------------------------------- |
|
384 | #print("utcTime: ", time.shape) | |
|
385 | #print("data_spc ",self.dataOut.data_spc.shape) | |
|
388 | # print("utcTime: ", time.shape) | |
|
389 | # print("data_spc ",self.dataOut.data_spc.shape) | |
|
386 | 390 | if "data_cspc" in self.dataList: |
|
387 | 391 | pairsList = [pair for pair in itertools.combinations(self.channelList, 2)] |
|
388 | 392 | #print("PairsList: ", pairsList) |
@@ -452,8 +456,11 class MergeH5(object): | |||
|
452 | 456 | |
|
453 | 457 | grp.create_dataset(self.getLabel(attribute), data=value) |
|
454 | 458 | return |
|
459 | ||
|
455 | 460 | def getDsList(self): |
|
461 | # print("Getting DS List", self.dataList) | |
|
456 | 462 | dsList =[] |
|
463 | dataAux = None | |
|
457 | 464 | for i in range(len(self.dataList)): |
|
458 | 465 | dsDict = {} |
|
459 | 466 | if hasattr(self.dataOut, self.dataList[i]): |
@@ -464,7 +471,7 class MergeH5(object): | |||
|
464 | 471 | continue |
|
465 | 472 | if dataAux is None: |
|
466 | 473 | continue |
|
467 |
elif isinstance(dataAux, (int, float, numpy.int |
|
|
474 | elif isinstance(dataAux, (int, float, numpy.int_, numpy.float_)): | |
|
468 | 475 | dsDict['nDim'] = 0 |
|
469 | 476 | else: |
|
470 | 477 | dsDict['nDim'] = len(dataAux.shape) -1 |
@@ -485,8 +492,9 class MergeH5(object): | |||
|
485 | 492 | # dsDict['dsNumber'] = dataAux.shape[0] |
|
486 | 493 | # dsDict['dtype'] = dataAux.dtype |
|
487 | 494 | dsList.append(dsDict) |
|
488 | #print(dsList) | |
|
495 | # print("dsList: ", dsList) | |
|
489 | 496 | self.dsList = dsList |
|
497 | ||
|
490 | 498 | def clean_dataIn(self): |
|
491 | 499 | for ch in range(self.nChannels): |
|
492 | 500 | self.ch_dataIn[ch].data_spc = None |
@@ -494,9 +502,11 class MergeH5(object): | |||
|
494 | 502 | self.ch_dataIn[ch].nIncohInt = None |
|
495 | 503 | self.meta ={} |
|
496 | 504 | self.blocksPerFile = None |
|
505 | ||
|
497 | 506 | def writeData(self, outFilename): |
|
498 | 507 | self.getDsList() |
|
499 | 508 | fp = h5py.File(outFilename, 'w') |
|
509 | # print("--> Merged file: ",fp) | |
|
500 | 510 | self.writeMetadata(fp) |
|
501 | 511 | grp = fp.create_group('Data') |
|
502 | 512 | dtsets = [] |
@@ -514,7 +524,7 class MergeH5(object): | |||
|
514 | 524 | else: |
|
515 | 525 | sgrp = grp |
|
516 | 526 | k = -1*(dsInfo['nDim'] - 1) |
|
517 | #print(k, dsInfo['shape'], dsInfo['shape'][k:]) | |
|
527 | # print(k, dsInfo['shape'], dsInfo['shape'][k:]) | |
|
518 | 528 | for i in range(dsInfo['dsNumber']): |
|
519 | 529 | ds = sgrp.create_dataset( |
|
520 | 530 | self.getLabel(dsInfo['variable'], i),(self.blocksPerFile, ) + dsInfo['shape'][k:], |
@@ -539,6 +549,7 class MergeH5(object): | |||
|
539 | 549 | fp.close() |
|
540 | 550 | self.clean_dataIn() |
|
541 | 551 | return |
|
552 | ||
|
542 | 553 | def run(self): |
|
543 | 554 | if not(self.isConfig): |
|
544 | 555 | self.setup() |
@@ -549,7 +560,7 class MergeH5(object): | |||
|
549 | 560 | name = self.filenameList[ch][nf] |
|
550 | 561 | filename = os.path.join(self.inPaths[ch], name) |
|
551 | 562 | fp = h5py.File(filename, 'r') |
|
552 |
|
|
|
563 | print("Opening file: ",filename) | |
|
553 | 564 | self.readFile(fp,ch) |
|
554 | 565 | fp.close() |
|
555 | 566 | if self.blocksPerFile == None: |
@@ -559,7 +570,7 class MergeH5(object): | |||
|
559 | 570 | print("Error getting DataOut invalid number of blocks") |
|
560 | 571 | return |
|
561 | 572 | name = name[-16:] |
|
562 | #print("Final name out: ", name) | |
|
573 | # print("Final name out: ", name) | |
|
563 | 574 | outFile = os.path.join(self.pathOut, name) |
|
564 | #print("Outfile: ", outFile) | |
|
575 | # print("Outfile: ", outFile) | |
|
565 | 576 | self.writeData(outFile) No newline at end of file |
@@ -137,12 +137,13 class SpectraProc(ProcessingUnit): | |||
|
137 | 137 | |
|
138 | 138 | def run(self, nProfiles=None, nFFTPoints=None, pairsList=None, ippFactor=None, shift_fft=False, |
|
139 | 139 | zeroPad=False, zeroPoints=0, runNextUnit=0): |
|
140 | ||
|
140 | 141 | self.dataIn.runNextUnit = runNextUnit |
|
141 | 142 | try: |
|
142 | type = self.dataIn.type.decode("utf-8") | |
|
143 | self.dataIn.type = type | |
|
143 | _type = self.dataIn.type.decode("utf-8") | |
|
144 | self.dataIn.type = _type | |
|
144 | 145 | except Exception as e: |
|
145 |
# |
|
|
146 | #print("spc -> ",self.dataIn.type, e) | |
|
146 | 147 | pass |
|
147 | 148 | |
|
148 | 149 | if self.dataIn.type == "Spectra": |
General Comments 0
You need to be logged in to leave comments.
Login now