From 101813202a421366f54e15d6c43219d4ae5cc32c 2021-11-25 21:41:40 From: joabAM Date: 2021-11-25 21:41:40 Subject: [PATCH] añadido FaradayIntegration para limpieza de datos, restringida la operación al funcionamiento con la pdata regular --- diff --git a/schainpy/model/io/jroIO_base.py b/schainpy/model/io/jroIO_base.py index b3309f9..b9d8ebc 100644 --- a/schainpy/model/io/jroIO_base.py +++ b/schainpy/model/io/jroIO_base.py @@ -691,6 +691,7 @@ class Reader(object): """Check if the given datetime is in range""" startDateTime= datetime.datetime.combine(startDate,startTime) endDateTime = datetime.datetime.combine(endDate,endTime) + #print("dt eval: ", dt, startDateTime,endDateTime) if startDateTime <= dt <= endDateTime: return True return False diff --git a/schainpy/model/io/jroIO_param.py b/schainpy/model/io/jroIO_param.py index 441b5b4..12fca4a 100644 --- a/schainpy/model/io/jroIO_param.py +++ b/schainpy/model/io/jroIO_param.py @@ -154,7 +154,7 @@ class HDFReader(Reader, ProcessingUnit): self.dataOut = eval(self.meta['type'])() for attr in self.meta: - print("attr: ", attr) + #print("attr: ", attr) setattr(self.dataOut, attr, self.meta[attr]) @@ -273,7 +273,8 @@ class HDFReader(Reader, ProcessingUnit): def getData(self): if not self.isDateTimeInRange(self.startFileDatetime, self.startDate, self.endDate, self.startTime, self.endTime): self.dataOut.flagNoData = True - self.dataOut.error = True + self.blockIndex = self.blocksPerFile + #self.dataOut.error = True TERMINA EL PROGRAMA, removido return for attr in self.data: if self.data[attr].ndim == 1: diff --git a/schainpy/model/proc/jroproc_spectra.py b/schainpy/model/proc/jroproc_spectra.py index d34588b..36c584c 100644 --- a/schainpy/model/proc/jroproc_spectra.py +++ b/schainpy/model/proc/jroproc_spectra.py @@ -937,6 +937,332 @@ class CleanRayleigh(Operation): # plt.show() return array + +class IntegrationFaradaySpectra(Operation): + + __profIndex = 0 + __withOverapping = False + + __byTime = False + __initime = None + __lastdatatime = None + __integrationtime = None + + __buffer_spc = None + __buffer_cspc = None + __buffer_dc = None + + __dataReady = False + + __timeInterval = None + + n = None + + def __init__(self): + + Operation.__init__(self) + + def setup(self, dataOut,n=None, timeInterval=None, overlapping=False, DPL=None): + """ + Set the parameters of the integration class. + + Inputs: + + n : Number of coherent integrations + timeInterval : Time of integration. If the parameter "n" is selected this one does not work + overlapping : + + """ + + self.__initime = None + self.__lastdatatime = 0 + + self.__buffer_spc = [] + self.__buffer_cspc = [] + self.__buffer_dc = 0 + + self.__profIndex = 0 + self.__dataReady = False + self.__byTime = False + + #self.ByLags = dataOut.ByLags ###REDEFINIR + self.ByLags = False + + if DPL != None: + self.DPL=DPL + else: + #self.DPL=dataOut.DPL ###REDEFINIR + self.DPL=0 + + if n is None and timeInterval is None: + raise ValueError("n or timeInterval should be specified ...") + + if n is not None: + self.n = int(n) + else: + + self.__integrationtime = int(timeInterval) + self.n = None + self.__byTime = True + + def putData(self, data_spc, data_cspc, data_dc): + """ + Add a profile to the __buffer_spc and increase in one the __profileIndex + + """ + + self.__buffer_spc.append(data_spc) + + if data_cspc is None: + self.__buffer_cspc = None + else: + self.__buffer_cspc.append(data_cspc) + + if data_dc is None: + self.__buffer_dc = None + else: + self.__buffer_dc += data_dc + + self.__profIndex += 1 + + return + + def hildebrand_sekhon_Integration(self,data,navg): + + sortdata = numpy.sort(data, axis=None) + sortID=data.argsort() + lenOfData = len(sortdata) + nums_min = lenOfData*0.75 + if nums_min <= 5: + nums_min = 5 + sump = 0. + sumq = 0. + j = 0 + cont = 1 + while((cont == 1)and(j < lenOfData)): + sump += sortdata[j] + sumq += sortdata[j]**2 + if j > nums_min: + rtest = float(j)/(j-1) + 1.0/navg + if ((sumq*j) > (rtest*sump**2)): + j = j - 1 + sump = sump - sortdata[j] + sumq = sumq - sortdata[j]**2 + cont = 0 + j += 1 + #lnoise = sump / j + + return j,sortID + + def pushData(self): + """ + Return the sum of the last profiles and the profiles used in the sum. + + Affected: + + self.__profileIndex + + """ + bufferH=None + buffer=None + buffer1=None + buffer_cspc=None + self.__buffer_spc=numpy.array(self.__buffer_spc) + self.__buffer_cspc=numpy.array(self.__buffer_cspc) + freq_dc = int(self.__buffer_spc.shape[2] / 2) + #print("FREQ_DC",freq_dc,self.__buffer_spc.shape,self.nHeights) + for k in range(7,self.nHeights): + buffer_cspc=numpy.copy(self.__buffer_cspc[:,:,:,k]) + outliers_IDs_cspc=[] + cspc_outliers_exist=False + #print("AQUIII") + for i in range(self.nChannels):#dataOut.nChannels): + + buffer1=numpy.copy(self.__buffer_spc[:,i,:,k]) + indexes=[] + #sortIDs=[] + outliers_IDs=[] + + for j in range(self.nProfiles): + # if i==0 and j==freq_dc: #NOT CONSIDERING DC PROFILE AT CHANNEL 0 + # continue + # if i==1 and j==0: #NOT CONSIDERING DC PROFILE AT CHANNEL 1 + # continue + buffer=buffer1[:,j] + index,sortID=self.hildebrand_sekhon_Integration(buffer,1) + + indexes.append(index) + #sortIDs.append(sortID) + outliers_IDs=numpy.append(outliers_IDs,sortID[index:]) + + outliers_IDs=numpy.array(outliers_IDs) + outliers_IDs=outliers_IDs.ravel() + outliers_IDs=numpy.unique(outliers_IDs) + outliers_IDs=outliers_IDs.astype(numpy.dtype('int64')) + indexes=numpy.array(indexes) + indexmin=numpy.min(indexes) + + if indexmin != buffer1.shape[0]: + cspc_outliers_exist=True + ###sortdata=numpy.sort(buffer1,axis=0) + ###avg2=numpy.mean(sortdata[:indexmin,:],axis=0) + lt=outliers_IDs + avg=numpy.mean(buffer1[[t for t in range(buffer1.shape[0]) if t not in lt],:],axis=0) + + for p in list(outliers_IDs): + buffer1[p,:]=avg + + self.__buffer_spc[:,i,:,k]=numpy.copy(buffer1) + ###cspc IDs + #indexmin_cspc+=indexmin_cspc + outliers_IDs_cspc=numpy.append(outliers_IDs_cspc,outliers_IDs) + + #if not breakFlag: + outliers_IDs_cspc=outliers_IDs_cspc.astype(numpy.dtype('int64')) + if cspc_outliers_exist: + #sortdata=numpy.sort(buffer_cspc,axis=0) + #avg=numpy.mean(sortdata[:indexmin_cpsc,:],axis=0) + lt=outliers_IDs_cspc + + avg=numpy.mean(buffer_cspc[[t for t in range(buffer_cspc.shape[0]) if t not in lt],:],axis=0) + for p in list(outliers_IDs_cspc): + buffer_cspc[p,:]=avg + + self.__buffer_cspc[:,:,:,k]=numpy.copy(buffer_cspc) + #else: + #break + + + + + buffer=None + bufferH=None + buffer1=None + buffer_cspc=None + + #print("cpsc",self.__buffer_cspc[:,0,0,0,0]) + #print(self.__profIndex) + #exit() + + buffer=None + #print(self.__buffer_spc[:,1,3,20,0]) + #print(self.__buffer_spc[:,1,5,37,0]) + data_spc = numpy.sum(self.__buffer_spc,axis=0) + data_cspc = numpy.sum(self.__buffer_cspc,axis=0) + + #print(numpy.shape(data_spc)) + #data_spc[1,4,20,0]=numpy.nan + + #data_cspc = self.__buffer_cspc + data_dc = self.__buffer_dc + n = self.__profIndex + + self.__buffer_spc = [] + self.__buffer_cspc = [] + self.__buffer_dc = 0 + self.__profIndex = 0 + + return data_spc, data_cspc, data_dc, n + + def byProfiles(self, *args): + + self.__dataReady = False + avgdata_spc = None + avgdata_cspc = None + avgdata_dc = None + + self.putData(*args) + + if self.__profIndex == self.n: + + avgdata_spc, avgdata_cspc, avgdata_dc, n = self.pushData() + self.n = n + self.__dataReady = True + + return avgdata_spc, avgdata_cspc, avgdata_dc + + def byTime(self, datatime, *args): + + self.__dataReady = False + avgdata_spc = None + avgdata_cspc = None + avgdata_dc = None + + self.putData(*args) + + if (datatime - self.__initime) >= self.__integrationtime: + avgdata_spc, avgdata_cspc, avgdata_dc, n = self.pushData() + self.n = n + self.__dataReady = True + + return avgdata_spc, avgdata_cspc, avgdata_dc + + def integrate(self, datatime, *args): + + if self.__profIndex == 0: + self.__initime = datatime + + if self.__byTime: + avgdata_spc, avgdata_cspc, avgdata_dc = self.byTime( + datatime, *args) + else: + avgdata_spc, avgdata_cspc, avgdata_dc = self.byProfiles(*args) + + if not self.__dataReady: + return None, None, None, None + + return self.__initime, avgdata_spc, avgdata_cspc, avgdata_dc + + def run(self, dataOut, n=None, DPL = None,timeInterval=None, overlapping=False): + if n == 1: + return dataOut + + dataOut.flagNoData = True + + if not self.isConfig: + self.setup(dataOut, n, timeInterval, overlapping,DPL ) + self.isConfig = True + + if not self.ByLags: + self.nProfiles=dataOut.nProfiles + self.nChannels=dataOut.nChannels + self.nHeights=dataOut.nHeights + avgdatatime, avgdata_spc, avgdata_cspc, avgdata_dc = self.integrate(dataOut.utctime, + dataOut.data_spc, + dataOut.data_cspc, + dataOut.data_dc) + else: + self.nProfiles=dataOut.nProfiles + self.nChannels=dataOut.nChannels + self.nHeights=dataOut.nHeights + avgdatatime, avgdata_spc, avgdata_cspc, avgdata_dc = self.integrate(dataOut.utctime, + dataOut.dataLag_spc, + dataOut.dataLag_cspc, + dataOut.dataLag_dc) + + if self.__dataReady: + + if not self.ByLags: + + dataOut.data_spc = numpy.squeeze(avgdata_spc) + dataOut.data_cspc = numpy.squeeze(avgdata_cspc) + dataOut.data_dc = avgdata_dc + else: + dataOut.dataLag_spc = avgdata_spc + dataOut.dataLag_cspc = avgdata_cspc + dataOut.dataLag_dc = avgdata_dc + + dataOut.data_spc=dataOut.dataLag_spc[:,:,:,dataOut.LagPlot] + dataOut.data_cspc=dataOut.dataLag_cspc[:,:,:,dataOut.LagPlot] + dataOut.data_dc=dataOut.dataLag_dc[:,:,dataOut.LagPlot] + + + dataOut.nIncohInt *= self.n + dataOut.utctime = avgdatatime + dataOut.flagNoData = False + + return dataOut + class removeInterference(Operation): def removeInterference2(self):