jroproc_voltage.py
3089 lines
| 105.1 KiB
| text/x-python
|
PythonLexer
|
r723 | import sys | |
r1296 | import numpy,math | ||
|
r837 | from scipy import interpolate | |
|
r1178 | from schainpy.model.proc.jroproc_base import ProcessingUnit, Operation, MPDecorator | |
r1314 | from schainpy.model.data.jrodata import Voltage,hildebrand_sekhon | ||
|
r1168 | from schainpy.utils import log | |
r1553 | from schainpy.model.io.utilsIO import getHei_index | ||
|
r1173 | from time import time | |
r1547 | import datetime | ||
r1417 | import numpy | ||
r1528 | #import copy | ||
from schainpy.model.data import _noise | |||
|
r1287 | ||
r1590 | from matplotlib import pyplot as plt | ||
r1279 | class VoltageProc(ProcessingUnit): | ||
|
r1177 | def __init__(self): | |
|
r897 | ||
|
r1177 | ProcessingUnit.__init__(self) | |
|
r897 | ||
|
r487 | self.dataOut = Voltage() | |
self.flip = 1 | |||
|
r1177 | self.setupReq = False | |
|
r487 | ||
def run(self): | |||
r1506 | #print("running volt proc") | ||
r1553 | |||
|
r491 | if self.dataIn.type == 'AMISR': | |
self.__updateObjFromAmisrInput() | |||
|
r897 | ||
r1506 | if self.dataOut.buffer_empty: | ||
if self.dataIn.type == 'Voltage': | |||
self.dataOut.copy(self.dataIn) | |||
r1559 | self.dataOut.radarControllerHeaderObj = self.dataIn.radarControllerHeaderObj.copy() | ||
self.dataOut.ippSeconds = self.dataIn.ippSeconds | |||
self.dataOut.ipp = self.dataIn.ipp | |||
#update Processing Header: | |||
self.dataOut.processingHeaderObj.heightList = self.dataOut.heightList | |||
self.dataOut.processingHeaderObj.ipp = self.dataOut.ipp | |||
self.dataOut.processingHeaderObj.nCohInt = self.dataOut.nCohInt | |||
self.dataOut.processingHeaderObj.dtype = self.dataOut.type | |||
self.dataOut.processingHeaderObj.channelList = self.dataOut.channelList | |||
self.dataOut.processingHeaderObj.azimuthList = self.dataOut.azimuthList | |||
self.dataOut.processingHeaderObj.elevationList = self.dataOut.elevationList | |||
self.dataOut.processingHeaderObj.codeList = self.dataOut.nChannels | |||
self.dataOut.processingHeaderObj.heightList = self.dataOut.heightList | |||
self.dataOut.processingHeaderObj.heightResolution = self.dataOut.heightList[1] - self.dataOut.heightList[0] | |||
r1579 | |||
r1506 | |||
|
r897 | ||
|
r491 | def __updateObjFromAmisrInput(self): | |
|
r897 | ||
|
r491 | self.dataOut.timeZone = self.dataIn.timeZone | |
self.dataOut.dstFlag = self.dataIn.dstFlag | |||
self.dataOut.errorCount = self.dataIn.errorCount | |||
self.dataOut.useLocalTime = self.dataIn.useLocalTime | |||
|
r897 | ||
|
r491 | self.dataOut.flagNoData = self.dataIn.flagNoData | |
self.dataOut.data = self.dataIn.data | |||
self.dataOut.utctime = self.dataIn.utctime | |||
self.dataOut.channelList = self.dataIn.channelList | |||
|
r1179 | #self.dataOut.timeInterval = self.dataIn.timeInterval | |
|
r491 | self.dataOut.heightList = self.dataIn.heightList | |
self.dataOut.nProfiles = self.dataIn.nProfiles | |||
|
r897 | ||
|
r491 | self.dataOut.nCohInt = self.dataIn.nCohInt | |
self.dataOut.ippSeconds = self.dataIn.ippSeconds | |||
self.dataOut.frequency = self.dataIn.frequency | |||
r1559 | |||
|
r499 | self.dataOut.azimuth = self.dataIn.azimuth | |
self.dataOut.zenith = self.dataIn.zenith | |||
|
r897 | ||
|
r501 | self.dataOut.beam.codeList = self.dataIn.beam.codeList | |
self.dataOut.beam.azimuthList = self.dataIn.beam.azimuthList | |||
self.dataOut.beam.zenithList = self.dataIn.beam.zenithList | |||
|
r897 | ||
|
r1287 | class selectChannels(Operation): | |
r1296 | |||
r1406 | def run(self, dataOut, channelList=None): | ||
self.channelList = channelList | |||
if self.channelList == None: | |||
print("Missing channelList") | |||
return dataOut | |||
|
r487 | channelIndexList = [] | |
r1553 | if not dataOut.buffer_empty: # cuando se usa proc volts como buffer de datos | ||
return dataOut | |||
#print("channel List: ", dataOut.channelList) | |||
r1417 | if type(dataOut.channelList) is not list: #leer array desde HDF5 | ||
try: | |||
dataOut.channelList = dataOut.channelList.tolist() | |||
except Exception as e: | |||
print("Select Channels: ",e) | |||
r1406 | for channel in self.channelList: | ||
if channel not in dataOut.channelList: | |||
raise ValueError("Channel %d is not in %s" %(channel, str(dataOut.channelList))) | |||
index = dataOut.channelList.index(channel) | |||
|
r487 | channelIndexList.append(index) | |
r1406 | dataOut = self.selectChannelsByIndex(dataOut,channelIndexList) | ||
r1559 | |||
#update Processing Header: | |||
dataOut.processingHeaderObj.channelList = dataOut.channelList | |||
dataOut.processingHeaderObj.elevationList = dataOut.elevationList | |||
dataOut.processingHeaderObj.azimuthList = dataOut.azimuthList | |||
dataOut.processingHeaderObj.codeList = dataOut.codeList | |||
dataOut.processingHeaderObj.nChannels = len(dataOut.channelList) | |||
r1406 | return dataOut | ||
|
r897 | ||
r1406 | def selectChannelsByIndex(self, dataOut, channelIndexList): | ||
|
r487 | """ | |
|
r897 | Selecciona un bloque de datos en base a canales segun el channelIndexList | |
|
r487 | Input: | |
|
r897 | channelIndexList : lista sencilla de canales a seleccionar por ej. [2,3,7] | |
|
r487 | Affected: | |
r1406 | dataOut.data | ||
dataOut.channelIndexList | |||
dataOut.nChannels | |||
dataOut.m_ProcessingHeader.totalSpectra | |||
dataOut.systemHeaderObj.numChannels | |||
dataOut.m_ProcessingHeader.blockSize | |||
|
r897 | ||
|
r487 | Return: | |
None | |||
""" | |||
r1417 | #print("selectChannelsByIndex") | ||
r1406 | # for channelIndex in channelIndexList: | ||
# if channelIndex not in dataOut.channelIndexList: | |||
# raise ValueError("The value %d in channelIndexList is not valid" %channelIndex) | |||
|
r897 | ||
r1406 | if dataOut.type == 'Voltage': | ||
if dataOut.flagDataAsBlock: | |||
|
r1287 | """ | |
Si la data es obtenida por bloques, dimension = [nChannels, nProfiles, nHeis] | |||
""" | |||
r1406 | data = dataOut.data[channelIndexList,:,:] | ||
|
r1287 | else: | |
r1406 | data = dataOut.data[channelIndexList,:] | ||
dataOut.data = data | |||
# dataOut.channelList = [dataOut.channelList[i] for i in channelIndexList] | |||
r1553 | dataOut.channelList = [n for n in range(len(channelIndexList))] | ||
r1406 | |||
elif dataOut.type == 'Spectra': | |||
r1417 | if hasattr(dataOut, 'data_spc'): | ||
if dataOut.data_spc is None: | |||
raise ValueError("data_spc is None") | |||
return dataOut | |||
else: | |||
data_spc = dataOut.data_spc[channelIndexList, :] | |||
dataOut.data_spc = data_spc | |||
# if hasattr(dataOut, 'data_dc') :# and | |||
# if dataOut.data_dc is None: | |||
# raise ValueError("data_dc is None") | |||
# return dataOut | |||
# else: | |||
# data_dc = dataOut.data_dc[channelIndexList, :] | |||
# dataOut.data_dc = data_dc | |||
r1406 | # dataOut.channelList = [dataOut.channelList[i] for i in channelIndexList] | ||
dataOut.channelList = channelIndexList | |||
dataOut = self.__selectPairsByChannel(dataOut,channelIndexList) | |||
r1553 | if len(dataOut.elevationList>0): | ||
dataOut.elevationList = dataOut.elevationList[channelIndexList] | |||
dataOut.azimuthList = dataOut.azimuthList[channelIndexList] | |||
dataOut.codeList = dataOut.codeList[channelIndexList] | |||
r1406 | return dataOut | ||
|
r897 | ||
r1406 | def __selectPairsByChannel(self, dataOut, channelList=None): | ||
r1417 | #print("__selectPairsByChannel") | ||
|
r1287 | if channelList == None: | |
return | |||
pairsIndexListSelected = [] | |||
r1406 | for pairIndex in dataOut.pairsIndexList: | ||
|
r1287 | # First pair | |
r1406 | if dataOut.pairsList[pairIndex][0] not in channelList: | ||
|
r1287 | continue | |
# Second pair | |||
r1406 | if dataOut.pairsList[pairIndex][1] not in channelList: | ||
|
r1287 | continue | |
pairsIndexListSelected.append(pairIndex) | |||
if not pairsIndexListSelected: | |||
r1406 | dataOut.data_cspc = None | ||
dataOut.pairsList = [] | |||
|
r1287 | return | |
r1406 | dataOut.data_cspc = dataOut.data_cspc[pairsIndexListSelected] | ||
dataOut.pairsList = [dataOut.pairsList[i] | |||
|
r1287 | for i in pairsIndexListSelected] | |
r1406 | return dataOut | ||
|
r1287 | ||
class selectHeights(Operation): | |||
r1296 | |||
r1344 | def run(self, dataOut, minHei=None, maxHei=None, minIndex=None, maxIndex=None): | ||
|
r487 | """ | |
Selecciona un bloque de datos en base a un grupo de valores de alturas segun el rango | |||
minHei <= height <= maxHei | |||
|
r897 | ||
|
r487 | Input: | |
|
r897 | minHei : valor minimo de altura a considerar | |
|
r487 | maxHei : valor maximo de altura a considerar | |
|
r897 | ||
|
r487 | Affected: | |
Indirectamente son cambiados varios valores a travez del metodo selectHeightsByIndex | |||
|
r897 | ||
|
r487 | Return: | |
1 si el metodo se ejecuto con exito caso contrario devuelve 0 | |||
""" | |||
|
r897 | ||
r1465 | self.dataOut = dataOut | ||
|
r897 | ||
r1344 | if minHei and maxHei: | ||
|
r897 | ||
r1406 | if (minHei < dataOut.heightList[0]): | ||
minHei = dataOut.heightList[0] | |||
|
r684 | ||
r1406 | if (maxHei > dataOut.heightList[-1]): | ||
maxHei = dataOut.heightList[-1] | |||
|
r897 | ||
r1344 | minIndex = 0 | ||
maxIndex = 0 | |||
r1406 | heights = dataOut.heightList | ||
|
r897 | ||
r1344 | inda = numpy.where(heights >= minHei) | ||
indb = numpy.where(heights <= maxHei) | |||
|
r897 | ||
r1344 | try: | ||
minIndex = inda[0][0] | |||
except: | |||
minIndex = 0 | |||
|
r897 | ||
r1344 | try: | ||
maxIndex = indb[0][-1] | |||
except: | |||
maxIndex = len(heights) | |||
|
r487 | ||
self.selectHeightsByIndex(minIndex, maxIndex) | |||
|
r897 | ||
r1559 | #update Processing Header: | ||
dataOut.processingHeaderObj.heightList = dataOut.heightList | |||
r1465 | return dataOut | ||
|
r897 | ||
|
r487 | def selectHeightsByIndex(self, minIndex, maxIndex): | |
""" | |||
Selecciona un bloque de datos en base a un grupo indices de alturas segun el rango | |||
minIndex <= index <= maxIndex | |||
|
r897 | ||
|
r487 | Input: | |
|
r897 | minIndex : valor de indice minimo de altura a considerar | |
|
r487 | maxIndex : valor de indice maximo de altura a considerar | |
|
r897 | ||
|
r487 | Affected: | |
self.dataOut.data | |||
self.dataOut.heightList | |||
|
r897 | ||
|
r487 | Return: | |
1 si el metodo se ejecuto con exito caso contrario devuelve 0 | |||
""" | |||
|
r897 | ||
|
r1287 | if self.dataOut.type == 'Voltage': | |
if (minIndex < 0) or (minIndex > maxIndex): | |||
raise ValueError("Height index range (%d,%d) is not valid" % (minIndex, maxIndex)) | |||
|
r897 | ||
|
r1287 | if (maxIndex >= self.dataOut.nHeights): | |
maxIndex = self.dataOut.nHeights | |||
|
r897 | ||
|
r1287 | #voltage | |
if self.dataOut.flagDataAsBlock: | |||
""" | |||
Si la data es obtenida por bloques, dimension = [nChannels, nProfiles, nHeis] | |||
""" | |||
data = self.dataOut.data[:,:, minIndex:maxIndex] | |||
else: | |||
data = self.dataOut.data[:, minIndex:maxIndex] | |||
# firstHeight = self.dataOut.heightList[minIndex] | |||
self.dataOut.data = data | |||
self.dataOut.heightList = self.dataOut.heightList[minIndex:maxIndex] | |||
if self.dataOut.nHeights <= 1: | |||
raise ValueError("selectHeights: Too few heights. Current number of heights is %d" %(self.dataOut.nHeights)) | |||
elif self.dataOut.type == 'Spectra': | |||
if (minIndex < 0) or (minIndex > maxIndex): | |||
raise ValueError("Error selecting heights: Index range (%d,%d) is not valid" % ( | |||
minIndex, maxIndex)) | |||
|
r487 | ||
|
r1287 | if (maxIndex >= self.dataOut.nHeights): | |
maxIndex = self.dataOut.nHeights - 1 | |||
|
r487 | ||
|
r1287 | # Spectra | |
data_spc = self.dataOut.data_spc[:, :, minIndex:maxIndex + 1] | |||
|
r487 | ||
|
r1287 | data_cspc = None | |
if self.dataOut.data_cspc is not None: | |||
data_cspc = self.dataOut.data_cspc[:, :, minIndex:maxIndex + 1] | |||
|
r897 | ||
|
r1287 | data_dc = None | |
if self.dataOut.data_dc is not None: | |||
data_dc = self.dataOut.data_dc[:, minIndex:maxIndex + 1] | |||
self.dataOut.data_spc = data_spc | |||
self.dataOut.data_cspc = data_cspc | |||
self.dataOut.data_dc = data_dc | |||
self.dataOut.heightList = self.dataOut.heightList[minIndex:maxIndex + 1] | |||
|
r897 | ||
|
r487 | return 1 | |
|
r897 | ||
|
r1287 | class filterByHeights(Operation): | |
r1655 | ifConfig=False | ||
deltaHeight = None | |||
newdelta=None | |||
newheights=None | |||
r=None | |||
h0=None | |||
nHeights=None | |||
|
r1287 | def run(self, dataOut, window): | |
r1655 | |||
# print("1",dataOut.data.shape) | |||
# print(dataOut.nHeights) | |||
|
r487 | if window == None: | |
r1655 | window = (dataOut.radarControllerHeaderObj.txA/dataOut.radarControllerHeaderObj.nBaud) / self.deltaHeight | ||
if not self.ifConfig: #and dataOut.useInputBuffer: | |||
self.deltaHeight = dataOut.heightList[1] - dataOut.heightList[0] | |||
self.ifConfig = True | |||
self.newdelta = self.deltaHeight * window | |||
self.r = dataOut.nHeights % window | |||
self.newheights = (dataOut.nHeights-self.r)/window | |||
self.h0 = dataOut.heightList[0] | |||
self.nHeights = dataOut.nHeights | |||
if self.newheights <= 1: | |||
raise ValueError("filterByHeights: Too few heights. Current number of heights is %d and window is %d" %(dataOut.nHeights, window)) | |||
|
r897 | ||
|
r1287 | if dataOut.flagDataAsBlock: | |
|
r530 | """ | |
Si la data es obtenida por bloques, dimension = [nChannels, nProfiles, nHeis] | |||
""" | |||
r1655 | buffer = dataOut.data[:, :, 0:int(self.nHeights-self.r)] | ||
buffer = buffer.reshape(dataOut.nChannels, dataOut.nProfiles, int(self.nHeights/window), window) | |||
|
r530 | buffer = numpy.sum(buffer,3) | |
|
r897 | ||
|
r530 | else: | |
r1655 | buffer = dataOut.data[:,0:int(self.nHeights-self.r)] | ||
buffer = buffer.reshape(dataOut.nChannels,int(self.nHeights/window),int(window)) | |||
|
r530 | buffer = numpy.sum(buffer,2) | |
|
r1287 | dataOut.data = buffer | |
r1655 | dataOut.heightList = self.h0 + numpy.arange( self.newheights )*self.newdelta | ||
|
r1287 | dataOut.windowOfFilter = window | |
r1559 | #update Processing Header: | ||
dataOut.processingHeaderObj.heightList = dataOut.heightList | |||
dataOut.processingHeaderObj.nWindows = window | |||
r1655 | |||
|
r1287 | return dataOut | |
|
r568 | ||
r1655 | |||
|
r1287 | class setH0(Operation): | |
def run(self, dataOut, h0, deltaHeight = None): | |||
|
r897 | ||
|
r568 | if not deltaHeight: | |
|
r1287 | deltaHeight = dataOut.heightList[1] - dataOut.heightList[0] | |
|
r897 | ||
|
r1287 | nHeights = dataOut.nHeights | |
|
r897 | ||
|
r568 | newHeiRange = h0 + numpy.arange(nHeights)*deltaHeight | |
|
r897 | ||
|
r1287 | dataOut.heightList = newHeiRange | |
r1559 | #update Processing Header: | ||
dataOut.processingHeaderObj.heightList = dataOut.heightList | |||
|
r1287 | return dataOut | |
|
r897 | ||
|
r1287 | class deFlip(Operation): | |
|
r897 | ||
|
r1287 | def run(self, dataOut, channelList = []): | |
|
r897 | ||
|
r1287 | data = dataOut.data.copy() | |
|
r897 | ||
|
r1287 | if dataOut.flagDataAsBlock: | |
|
r530 | flip = self.flip | |
|
r1287 | profileList = list(range(dataOut.nProfiles)) | |
|
r897 | ||
|
r587 | if not channelList: | |
|
r530 | for thisProfile in profileList: | |
data[:,thisProfile,:] = data[:,thisProfile,:]*flip | |||
flip *= -1.0 | |||
else: | |||
for thisChannel in channelList: | |||
|
r1287 | if thisChannel not in dataOut.channelList: | |
|
r586 | continue | |
|
r897 | ||
|
r530 | for thisProfile in profileList: | |
data[thisChannel,thisProfile,:] = data[thisChannel,thisProfile,:]*flip | |||
flip *= -1.0 | |||
|
r897 | ||
|
r530 | self.flip = flip | |
|
r897 | ||
|
r530 | else: | |
|
r587 | if not channelList: | |
|
r530 | data[:,:] = data[:,:]*self.flip | |
else: | |||
for thisChannel in channelList: | |||
|
r1287 | if thisChannel not in dataOut.channelList: | |
|
r586 | continue | |
|
r897 | ||
|
r530 | data[thisChannel,:] = data[thisChannel,:]*self.flip | |
|
r897 | ||
|
r530 | self.flip *= -1. | |
|
r897 | ||
|
r1287 | dataOut.data = data | |
|
r897 | ||
|
r1287 | return dataOut | |
|
r897 | ||
|
r1287 | class setAttribute(Operation): | |
''' | |||
|
r1288 | Set an arbitrary attribute(s) to dataOut | |
|
r1287 | ''' | |
|
r897 | ||
|
r1287 | def __init__(self): | |
Operation.__init__(self) | |||
self._ready = False | |||
def run(self, dataOut, **kwargs): | |||
|
r897 | ||
|
r1287 | for key, value in kwargs.items(): | |
setattr(dataOut, key, value) | |||
return dataOut | |||
|
r1308 | @MPDecorator | |
class printAttribute(Operation): | |||
''' | |||
Print an arbitrary attribute of dataOut | |||
''' | |||
|
r897 | ||
|
r1308 | def __init__(self): | |
Operation.__init__(self) | |||
def run(self, dataOut, attributes): | |||
r1338 | if isinstance(attributes, str): | ||
attributes = [attributes] | |||
|
r1308 | for attr in attributes: | |
if hasattr(dataOut, attr): | |||
log.log(getattr(dataOut, attr), attr) | |||
r1547 | class cleanHeightsInterf(Operation): | ||
r1548 | __slots__ =('heights_indx', 'repeats', 'step', 'factor', 'idate', 'idxs','config','wMask') | ||
def __init__(self): | |||
self.repeats = 0 | |||
self.factor=1 | |||
self.wMask = None | |||
self.config = False | |||
self.idxs = None | |||
self.heights_indx = None | |||
r1547 | |||
def run(self, dataOut, heightsList, repeats=0, step=0, factor=1, idate=None, startH=None, endH=None): | |||
#print(dataOut.data.shape) | |||
startTime = datetime.datetime.combine(idate,startH) | |||
endTime = datetime.datetime.combine(idate,endH) | |||
currentTime = datetime.datetime.fromtimestamp(dataOut.utctime) | |||
if currentTime < startTime or currentTime > endTime: | |||
return dataOut | |||
r1548 | if not self.config: | ||
#print(wMask) | |||
heights = [float(hei) for hei in heightsList] | |||
for r in range(repeats): | |||
heights += [ (h+(step*(r+1))) for h in heights] | |||
#print(heights) | |||
heiList = dataOut.heightList | |||
self.heights_indx = [getHei_index(h,h,heiList)[0] for h in heights] | |||
self.wMask = numpy.asarray(factor) | |||
self.wMask = numpy.tile(self.wMask,(repeats+2)) | |||
self.config = True | |||
""" | |||
getNoisebyHildebrand(self, channel=None, ymin_index=None, ymax_index=None) | |||
""" | |||
#print(self.noise =10*numpy.log10(dataOut.getNoisebyHildebrand(ymin_index=self.min_ref, ymax_index=self.max_ref))) | |||
r1547 | |||
for ch in range(dataOut.data.shape[0]): | |||
i = 0 | |||
r1548 | |||
for hei in self.heights_indx: | |||
h = hei - 1 | |||
r1547 | if dataOut.data.ndim < 3: | ||
r1548 | module = numpy.absolute(dataOut.data[ch,h]) | ||
prev_h1 = numpy.absolute(dataOut.data[ch,h-1]) | |||
dataOut.data[ch,h] = (dataOut.data[ch,h])/module * prev_h1 | |||
#dataOut.data[ch,hei-1] = (dataOut.data[ch,hei-1])*self.wMask[i] | |||
r1547 | else: | ||
r1548 | module = numpy.absolute(dataOut.data[ch,:,h]) | ||
prev_h1 = numpy.absolute(dataOut.data[ch,:,h-1]) | |||
dataOut.data[ch,:,h] = (dataOut.data[ch,:,h])/module * prev_h1 | |||
#dataOut.data[ch,:,hei-1] = (dataOut.data[ch,:,hei-1])*self.wMask[i] | |||
r1547 | #print("done") | ||
i += 1 | |||
r1548 | |||
r1547 | return dataOut | ||
|
r1308 | ||
r1548 | |||
|
r1287 | class interpolateHeights(Operation): | |
def run(self, dataOut, topLim, botLim): | |||
|
r836 | #69 al 72 para julia | |
#82-84 para meteoros | |||
|
r1287 | if len(numpy.shape(dataOut.data))==2: | |
sampInterp = (dataOut.data[:,botLim-1] + dataOut.data[:,topLim+1])/2 | |||
|
r836 | sampInterp = numpy.transpose(numpy.tile(sampInterp,(topLim-botLim + 1,1))) | |
|
r1287 | #dataOut.data[:,botLim:limSup+1] = sampInterp | |
dataOut.data[:,botLim:topLim+1] = sampInterp | |||
|
r836 | else: | |
|
r1287 | nHeights = dataOut.data.shape[2] | |
|
r837 | x = numpy.hstack((numpy.arange(botLim),numpy.arange(topLim+1,nHeights))) | |
|
r1287 | y = dataOut.data[:,:,list(range(botLim))+list(range(topLim+1,nHeights))] | |
|
r837 | f = interpolate.interp1d(x, y, axis = 2) | |
xnew = numpy.arange(botLim,topLim+1) | |||
ynew = f(xnew) | |||
|
r1287 | dataOut.data[:,:,botLim:topLim+1] = ynew | |
|
r897 | ||
|
r1287 | return dataOut | |
|
r837 | ||
|
r1177 | ||
|
r487 | class CohInt(Operation): | |
|
r897 | ||
|
r487 | isConfig = False | |
__profIndex = 0 | |||
__byTime = False | |||
__initime = None | |||
__lastdatatime = None | |||
__integrationtime = None | |||
__buffer = None | |||
|
r1116 | __bufferStride = [] | |
|
r487 | __dataReady = False | |
|
r1116 | __profIndexStride = 0 | |
__dataToPutStride = False | |||
|
r487 | n = None | |
|
r897 | ||
|
r1179 | def __init__(self, **kwargs): | |
|
r897 | ||
|
r1179 | Operation.__init__(self, **kwargs) | |
|
r897 | ||
|
r1116 | def setup(self, n=None, timeInterval=None, stride=None, overlapping=False, byblock=False): | |
|
r487 | """ | |
Set the parameters of the integration class. | |||
|
r897 | ||
|
r487 | Inputs: | |
|
r897 | ||
|
r953 | n : Number of coherent integrations | |
timeInterval : Time of integration. If the parameter "n" is selected this one does not work | |||
overlapping : | |||
|
r487 | """ | |
|
r897 | ||
|
r487 | self.__initime = None | |
self.__lastdatatime = 0 | |||
self.__buffer = None | |||
self.__dataReady = False | |||
|
r495 | self.byblock = byblock | |
|
r1116 | self.stride = stride | |
|
r897 | ||
|
r487 | if n == None and timeInterval == None: | |
|
r1167 | raise ValueError("n or timeInterval should be specified ...") | |
|
r897 | ||
|
r487 | if n != None: | |
self.n = n | |||
self.__byTime = False | |||
else: | |||
|
r510 | self.__integrationtime = timeInterval #* 60. #if (type(timeInterval)!=integer) -> change this line | |
|
r487 | self.n = 9999 | |
self.__byTime = True | |||
|
r897 | ||
|
r487 | if overlapping: | |
|
r1116 | self.__withOverlapping = True | |
|
r487 | self.__buffer = None | |
else: | |||
|
r1116 | self.__withOverlapping = False | |
|
r487 | self.__buffer = 0 | |
|
r897 | ||
|
r487 | self.__profIndex = 0 | |
|
r897 | ||
|
r487 | def putData(self, data): | |
|
r897 | ||
|
r487 | """ | |
Add a profile to the __buffer and increase in one the __profileIndex | |||
|
r897 | ||
|
r487 | """ | |
|
r897 | ||
|
r1116 | if not self.__withOverlapping: | |
|
r487 | self.__buffer += data.copy() | |
|
r897 | self.__profIndex += 1 | |
|
r487 | return | |
|
r897 | ||
|
r487 | #Overlapping data | |
nChannels, nHeis = data.shape | |||
data = numpy.reshape(data, (1, nChannels, nHeis)) | |||
|
r897 | ||
|
r487 | #If the buffer is empty then it takes the data value | |
|
r611 | if self.__buffer is None: | |
|
r487 | self.__buffer = data | |
self.__profIndex += 1 | |||
return | |||
|
r897 | ||
|
r487 | #If the buffer length is lower than n then stakcing the data value | |
if self.__profIndex < self.n: | |||
self.__buffer = numpy.vstack((self.__buffer, data)) | |||
self.__profIndex += 1 | |||
return | |||
|
r897 | ||
#If the buffer length is equal to n then replacing the last buffer value with the data value | |||
|
r487 | self.__buffer = numpy.roll(self.__buffer, -1, axis=0) | |
self.__buffer[self.n-1] = data | |||
self.__profIndex = self.n | |||
return | |||
|
r897 | ||
|
r487 | def pushData(self): | |
""" | |||
Return the sum of the last profiles and the profiles used in the sum. | |||
|
r897 | ||
|
r487 | Affected: | |
|
r897 | ||
|
r487 | self.__profileIndex | |
|
r897 | ||
|
r487 | """ | |
|
r897 | ||
|
r1116 | if not self.__withOverlapping: | |
|
r487 | data = self.__buffer | |
n = self.__profIndex | |||
|
r897 | ||
|
r487 | self.__buffer = 0 | |
self.__profIndex = 0 | |||
|
r897 | ||
|
r487 | return data, n | |
|
r897 | ||
|
r487 | #Integration with Overlapping | |
data = numpy.sum(self.__buffer, axis=0) | |||
|
r1116 | # print data | |
# raise | |||
|
r487 | n = self.__profIndex | |
|
r897 | ||
|
r487 | return data, n | |
|
r897 | ||
|
r487 | def byProfiles(self, data): | |
|
r897 | ||
|
r487 | self.__dataReady = False | |
avgdata = None | |||
|
r1116 | # n = None | |
# print data | |||
# raise | |||
|
r487 | self.putData(data) | |
|
r897 | ||
|
r487 | if self.__profIndex == self.n: | |
avgdata, n = self.pushData() | |||
self.__dataReady = True | |||
|
r897 | ||
|
r487 | return avgdata | |
|
r897 | ||
|
r487 | def byTime(self, data, datatime): | |
|
r897 | ||
|
r487 | self.__dataReady = False | |
avgdata = None | |||
n = None | |||
|
r897 | ||
|
r487 | self.putData(data) | |
|
r897 | ||
|
r487 | if (datatime - self.__initime) >= self.__integrationtime: | |
avgdata, n = self.pushData() | |||
self.n = n | |||
self.__dataReady = True | |||
|
r897 | ||
|
r487 | return avgdata | |
|
r897 | ||
|
r1116 | def integrateByStride(self, data, datatime): | |
# print data | |||
if self.__profIndex == 0: | |||
self.__buffer = [[data.copy(), datatime]] | |||
else: | |||
|
r1117 | self.__buffer.append([data.copy(),datatime]) | |
|
r1116 | self.__profIndex += 1 | |
self.__dataReady = False | |||
if self.__profIndex == self.n * self.stride : | |||
self.__dataToPutStride = True | |||
self.__profIndexStride = 0 | |||
self.__profIndex = 0 | |||
self.__bufferStride = [] | |||
for i in range(self.stride): | |||
current = self.__buffer[i::self.stride] | |||
data = numpy.sum([t[0] for t in current], axis=0) | |||
avgdatatime = numpy.average([t[1] for t in current]) | |||
# print data | |||
self.__bufferStride.append((data, avgdatatime)) | |||
if self.__dataToPutStride: | |||
|
r1117 | self.__dataReady = True | |
|
r1116 | self.__profIndexStride += 1 | |
if self.__profIndexStride == self.stride: | |||
self.__dataToPutStride = False | |||
# print self.__bufferStride[self.__profIndexStride - 1] | |||
# raise | |||
|
r1117 | return self.__bufferStride[self.__profIndexStride - 1] | |
r1279 | |||
|
r1116 | return None, None | |
|
r487 | def integrate(self, data, datatime=None): | |
|
r897 | ||
|
r487 | if self.__initime == None: | |
self.__initime = datatime | |||
|
r897 | ||
|
r487 | if self.__byTime: | |
avgdata = self.byTime(data, datatime) | |||
else: | |||
avgdata = self.byProfiles(data) | |||
|
r897 | ||
|
r487 | self.__lastdatatime = datatime | |
|
r897 | ||
|
r611 | if avgdata is None: | |
|
r487 | return None, None | |
|
r897 | ||
|
r487 | avgdatatime = self.__initime | |
|
r897 | ||
|
r1116 | deltatime = datatime - self.__lastdatatime | |
r1279 | |||
|
r1116 | if not self.__withOverlapping: | |
|
r487 | self.__initime = datatime | |
else: | |||
self.__initime += deltatime | |||
|
r897 | ||
|
r487 | return avgdata, avgdatatime | |
|
r897 | ||
|
r495 | def integrateByBlock(self, dataOut): | |
|
r897 | ||
|
r495 | times = int(dataOut.data.shape[1]/self.n) | |
avgdata = numpy.zeros((dataOut.nChannels, times, dataOut.nHeights), dtype=numpy.complex) | |||
|
r897 | ||
|
r495 | id_min = 0 | |
id_max = self.n | |||
|
r897 | ||
|
r495 | for i in range(times): | |
junk = dataOut.data[:,id_min:id_max,:] | |||
avgdata[:,i,:] = junk.sum(axis=1) | |||
id_min += self.n | |||
|
r897 | id_max += self.n | |
|
r495 | timeInterval = dataOut.ippSeconds*self.n | |
|
r897 | avgdatatime = (times - 1) * timeInterval + dataOut.utctime | |
|
r495 | self.__dataReady = True | |
return avgdata, avgdatatime | |||
r1279 | |||
|
r1116 | def run(self, dataOut, n=None, timeInterval=None, stride=None, overlapping=False, byblock=False, **kwargs): | |
|
r1173 | ||
|
r487 | if not self.isConfig: | |
|
r1116 | self.setup(n=n, stride=stride, timeInterval=timeInterval, overlapping=overlapping, byblock=byblock, **kwargs) | |
|
r487 | self.isConfig = True | |
|
r897 | ||
|
r530 | if dataOut.flagDataAsBlock: | |
""" | |||
Si la data es leida por bloques, dimension = [nChannels, nProfiles, nHeis] | |||
""" | |||
|
r495 | avgdata, avgdatatime = self.integrateByBlock(dataOut) | |
|
r720 | dataOut.nProfiles /= self.n | |
|
r897 | else: | |
r1279 | if stride is None: | ||
|
r1116 | avgdata, avgdatatime = self.integrate(dataOut.data, dataOut.utctime) | |
else: | |||
avgdata, avgdatatime = self.integrateByStride(dataOut.data, dataOut.utctime) | |||
|
r897 | ||
r1279 | |||
|
r1116 | # dataOut.timeInterval *= n | |
|
r487 | dataOut.flagNoData = True | |
|
r897 | ||
|
r487 | if self.__dataReady: | |
dataOut.data = avgdata | |||
|
r1310 | if not dataOut.flagCohInt: | |
dataOut.nCohInt *= self.n | |||
dataOut.flagCohInt = True | |||
|
r487 | dataOut.utctime = avgdatatime | |
|
r1116 | # print avgdata, avgdatatime | |
# raise | |||
# dataOut.timeInterval = dataOut.ippSeconds * dataOut.nCohInt | |||
|
r487 | dataOut.flagNoData = False | |
r1559 | |||
#update Processing Header: | |||
dataOut.processingHeaderObj.nCohInt = dataOut.nCohInt | |||
|
r1173 | return dataOut | |
|
r1177 | ||
|
r487 | class Decoder(Operation): | |
|
r897 | ||
|
r487 | isConfig = False | |
__profIndex = 0 | |||
|
r897 | ||
|
r487 | code = None | |
|
r897 | ||
nCode = None | |||
|
r487 | nBaud = None | |
|
r897 | ||
|
r1179 | def __init__(self, **kwargs): | |
|
r897 | ||
|
r1179 | Operation.__init__(self, **kwargs) | |
|
r897 | ||
|
r495 | self.times = None | |
self.osamp = None | |||
|
r1004 | # self.__setValues = False | |
|
r1179 | self.isConfig = False | |
|
r1173 | self.setupReq = False | |
|
r530 | def setup(self, code, osamp, dataOut): | |
|
r897 | ||
|
r487 | self.__profIndex = 0 | |
|
r897 | ||
|
r487 | self.code = code | |
|
r897 | ||
self.nCode = len(code) | |||
|
r487 | self.nBaud = len(code[0]) | |
|
r530 | if (osamp != None) and (osamp >1): | |
|
r495 | self.osamp = osamp | |
|
r530 | self.code = numpy.repeat(code, repeats=self.osamp, axis=1) | |
|
r495 | self.nBaud = self.nBaud*self.osamp | |
|
r897 | ||
|
r530 | self.__nChannels = dataOut.nChannels | |
self.__nProfiles = dataOut.nProfiles | |||
self.__nHeis = dataOut.nHeights | |||
|
r897 | ||
|
r655 | if self.__nHeis < self.nBaud: | |
|
r1167 | raise ValueError('Number of heights (%d) should be greater than number of bauds (%d)' %(self.__nHeis, self.nBaud)) | |
|
r897 | ||
|
r660 | #Frequency | |
__codeBuffer = numpy.zeros((self.nCode, self.__nHeis), dtype=numpy.complex) | |||
|
r897 | ||
|
r660 | __codeBuffer[:,0:self.nBaud] = self.code | |
|
r897 | ||
|
r660 | self.fft_code = numpy.conj(numpy.fft.fft(__codeBuffer, axis=1)) | |
|
r897 | ||
|
r530 | if dataOut.flagDataAsBlock: | |
|
r897 | ||
|
r534 | self.ndatadec = self.__nHeis #- self.nBaud + 1 | |
|
r897 | ||
|
r530 | self.datadecTime = numpy.zeros((self.__nChannels, self.__nProfiles, self.ndatadec), dtype=numpy.complex) | |
|
r897 | ||
|
r530 | else: | |
|
r897 | ||
|
r660 | #Time | |
|
r534 | self.ndatadec = self.__nHeis #- self.nBaud + 1 | |
|
r897 | ||
self.datadecTime = numpy.zeros((self.__nChannels, self.ndatadec), dtype=numpy.complex) | |||
|
r660 | def __convolutionInFreq(self, data): | |
|
r897 | ||
|
r487 | fft_code = self.fft_code[self.__profIndex].reshape(1,-1) | |
|
r897 | ||
|
r487 | fft_data = numpy.fft.fft(data, axis=1) | |
|
r897 | ||
|
r487 | conv = fft_data*fft_code | |
|
r897 | ||
|
r487 | data = numpy.fft.ifft(conv,axis=1) | |
|
r897 | ||
|
r660 | return data | |
|
r897 | ||
|
r660 | def __convolutionInFreqOpt(self, data): | |
|
r897 | ||
|
r568 | raise NotImplementedError | |
|
r897 | ||
|
r660 | def __convolutionInTime(self, data): | |
|
r897 | ||
|
r487 | code = self.code[self.__profIndex] | |
for i in range(self.__nChannels): | |||
|
r611 | self.datadecTime[i,:] = numpy.correlate(data[i,:], code, mode='full')[self.nBaud-1:] | |
|
r897 | ||
|
r487 | return self.datadecTime | |
|
r897 | ||
|
r660 | def __convolutionByBlockInTime(self, data): | |
|
r897 | ||
r1236 | repetitions = int(self.__nProfiles / self.nCode) | ||
|
r530 | junk = numpy.lib.stride_tricks.as_strided(self.code, (repetitions, self.code.size), (0, self.code.itemsize)) | |
|
r495 | junk = junk.flatten() | |
|
r530 | code_block = numpy.reshape(junk, (self.nCode*repetitions, self.nBaud)) | |
|
r1167 | profilesList = range(self.__nProfiles) | |
r1279 | |||
for i in range(self.__nChannels): | |||
for j in profilesList: | |||
self.datadecTime[i,j,:] = numpy.correlate(data[i,j,:], code_block[j,:], mode='full')[self.nBaud-1:] | |||
return self.datadecTime | |||
|
r897 | ||
|
r660 | def __convolutionByBlockInFreq(self, data): | |
|
r897 | ||
|
r1167 | raise NotImplementedError("Decoder by frequency fro Blocks not implemented") | |
|
r749 | ||
|
r660 | fft_code = self.fft_code[self.__profIndex].reshape(1,-1) | |
|
r897 | ||
|
r660 | fft_data = numpy.fft.fft(data, axis=2) | |
|
r897 | ||
|
r660 | conv = fft_data*fft_code | |
|
r897 | ||
|
r660 | data = numpy.fft.ifft(conv,axis=2) | |
|
r897 | ||
|
r660 | return data | |
|
r897 | ||
r1279 | |||
|
r605 | def run(self, dataOut, code=None, nCode=None, nBaud=None, mode = 0, osamp=None, times=None): | |
|
r897 | ||
|
r624 | if dataOut.flagDecodeData: | |
|
r1167 | print("This data is already decoded, recoding again ...") | |
|
r897 | ||
|
r495 | if not self.isConfig: | |
|
r897 | ||
|
r611 | if code is None: | |
|
r674 | if dataOut.code is None: | |
|
r1167 | raise ValueError("Code could not be read from %s instance. Enter a value in Code parameter" %dataOut.type) | |
|
r897 | ||
|
r530 | code = dataOut.code | |
else: | |||
code = numpy.array(code).reshape(nCode,nBaud) | |||
self.setup(code, osamp, dataOut) | |||
|
r897 | ||
|
r487 | self.isConfig = True | |
|
r897 | ||
|
r723 | if mode == 3: | |
sys.stderr.write("Decoder Warning: mode=%d is not valid, using mode=0\n" %mode) | |||
|
r897 | ||
|
r749 | if times != None: | |
sys.stderr.write("Decoder Warning: Argument 'times' in not used anymore\n") | |||
|
r897 | ||
|
r624 | if self.code is None: | |
|
r1167 | print("Fail decoding: Code is not defined.") | |
|
r624 | return | |
|
r897 | ||
|
r997 | self.__nProfiles = dataOut.nProfiles | |
|
r660 | datadec = None | |
r1279 | |||
|
r749 | if mode == 3: | |
mode = 0 | |||
|
r897 | ||
|
r530 | if dataOut.flagDataAsBlock: | |
""" | |||
Decoding when data have been read as block, | |||
""" | |||
|
r897 | ||
|
r660 | if mode == 0: | |
datadec = self.__convolutionByBlockInTime(dataOut.data) | |||
if mode == 1: | |||
datadec = self.__convolutionByBlockInFreq(dataOut.data) | |||
|
r530 | else: | |
""" | |||
Decoding when data have been read profile by profile | |||
""" | |||
if mode == 0: | |||
|
r660 | datadec = self.__convolutionInTime(dataOut.data) | |
|
r897 | ||
|
r530 | if mode == 1: | |
|
r660 | datadec = self.__convolutionInFreq(dataOut.data) | |
|
r897 | ||
|
r530 | if mode == 2: | |
|
r660 | datadec = self.__convolutionInFreqOpt(dataOut.data) | |
|
r897 | ||
|
r660 | if datadec is None: | |
|
r1167 | raise ValueError("Codification mode selected is not valid: mode=%d. Try selecting 0 or 1" %mode) | |
|
r897 | ||
|
r544 | dataOut.code = self.code | |
dataOut.nCode = self.nCode | |||
dataOut.nBaud = self.nBaud | |||
|
r897 | ||
|
r487 | dataOut.data = datadec | |
|
r660 | dataOut.heightList = dataOut.heightList[0:datadec.shape[-1]] | |
|
r530 | dataOut.flagDecodeData = True #asumo q la data esta decodificada | |
|
r487 | ||
r1560 | |||
r1559 | #update Processing Header: | ||
dataOut.radarControllerHeaderObj.code = self.code | |||
dataOut.radarControllerHeaderObj.nCode = self.nCode | |||
dataOut.radarControllerHeaderObj.nBaud = self.nBaud | |||
dataOut.radarControllerHeaderObj.nOsamp = osamp | |||
#update Processing Header: | |||
dataOut.processingHeaderObj.heightList = dataOut.heightList | |||
dataOut.processingHeaderObj.heightResolution = dataOut.heightList[1]-dataOut.heightList[0] | |||
|
r495 | ||
r1560 | if self.__profIndex == self.nCode-1: | ||
self.__profIndex = 0 | |||
return dataOut | |||
self.__profIndex += 1 | |||
r1559 | return dataOut | ||
|
r1177 | ||
|
r495 | class ProfileConcat(Operation): | |
|
r897 | ||
|
r495 | isConfig = False | |
buffer = None | |||
|
r897 | ||
|
r1179 | def __init__(self, **kwargs): | |
|
r897 | ||
|
r1179 | Operation.__init__(self, **kwargs) | |
|
r495 | self.profileIndex = 0 | |
|
r897 | ||
|
r495 | def reset(self): | |
self.buffer = numpy.zeros_like(self.buffer) | |||
self.start_index = 0 | |||
self.times = 1 | |||
|
r897 | ||
|
r495 | def setup(self, data, m, n=1): | |
self.buffer = numpy.zeros((data.shape[0],data.shape[1]*m),dtype=type(data[0,0])) | |||
|
r808 | self.nHeights = data.shape[1]#.nHeights | |
|
r495 | self.start_index = 0 | |
self.times = 1 | |||
|
r897 | ||
|
r495 | def concat(self, data): | |
|
r897 | ||
|
r808 | self.buffer[:,self.start_index:self.nHeights*self.times] = data.copy() | |
|
r897 | self.start_index = self.start_index + self.nHeights | |
|
r495 | def run(self, dataOut, m): | |
dataOut.flagNoData = True | |||
|
r897 | ||
|
r495 | if not self.isConfig: | |
self.setup(dataOut.data, m, 1) | |||
self.isConfig = True | |||
|
r897 | ||
|
r530 | if dataOut.flagDataAsBlock: | |
|
r1167 | raise ValueError("ProfileConcat can only be used when voltage have been read profile by profile, getBlock = False") | |
|
r897 | ||
|
r530 | else: | |
self.concat(dataOut.data) | |||
self.times += 1 | |||
if self.times > m: | |||
dataOut.data = self.buffer | |||
self.reset() | |||
dataOut.flagNoData = False | |||
# se deben actualizar mas propiedades del header y del objeto dataOut, por ejemplo, las alturas | |||
|
r897 | deltaHeight = dataOut.heightList[1] - dataOut.heightList[0] | |
|
r534 | xf = dataOut.heightList[0] + dataOut.nHeights * deltaHeight * m | |
|
r897 | dataOut.heightList = numpy.arange(dataOut.heightList[0], xf, deltaHeight) | |
|
r534 | dataOut.ippSeconds *= m | |
r1559 | |||
#update Processing Header: | |||
dataOut.processingHeaderObj.heightList = dataOut.heightList | |||
dataOut.processingHeaderObj.ipp = dataOut.ippSeconds | |||
|
r1173 | return dataOut | |
|
r1177 | ||
|
r495 | class ProfileSelector(Operation): | |
|
r897 | ||
|
r495 | profileIndex = None | |
# Tamanho total de los perfiles | |||
nProfiles = None | |||
|
r897 | ||
|
r1179 | def __init__(self, **kwargs): | |
|
r897 | ||
|
r1179 | Operation.__init__(self, **kwargs) | |
|
r495 | self.profileIndex = 0 | |
|
r897 | ||
|
r754 | def incProfileIndex(self): | |
|
r897 | ||
|
r495 | self.profileIndex += 1 | |
|
r897 | ||
|
r495 | if self.profileIndex >= self.nProfiles: | |
self.profileIndex = 0 | |||
|
r897 | ||
|
r534 | def isThisProfileInRange(self, profileIndex, minIndex, maxIndex): | |
|
r897 | ||
|
r534 | if profileIndex < minIndex: | |
|
r495 | return False | |
|
r897 | ||
|
r534 | if profileIndex > maxIndex: | |
|
r495 | return False | |
|
r897 | ||
|
r495 | return True | |
|
r897 | ||
|
r534 | def isThisProfileInList(self, profileIndex, profileList): | |
|
r897 | ||
|
r534 | if profileIndex not in profileList: | |
|
r495 | return False | |
|
r897 | ||
|
r495 | return True | |
|
r897 | ||
|
r600 | def run(self, dataOut, profileList=None, profileRangeList=None, beam=None, byblock=False, rangeList = None, nProfiles=None): | |
|
r530 | ||
""" | |||
ProfileSelector: | |||
|
r897 | ||
|
r534 | Inputs: | |
|
r897 | profileList : Index of profiles selected. Example: profileList = (0,1,2,7,8) | |
|
r534 | profileRangeList : Minimum and maximum profile indexes. Example: profileRangeList = (4, 30) | |
|
r897 | ||
|
r534 | rangeList : List of profile ranges. Example: rangeList = ((4, 30), (32, 64), (128, 256)) | |
|
r897 | ||
|
r530 | """ | |
|
r897 | ||
|
r756 | if rangeList is not None: | |
if type(rangeList[0]) not in (tuple, list): | |||
rangeList = [rangeList] | |||
|
r897 | ||
|
r495 | dataOut.flagNoData = True | |
|
r897 | ||
|
r530 | if dataOut.flagDataAsBlock: | |
""" | |||
data dimension = [nChannels, nProfiles, nHeis] | |||
""" | |||
|
r495 | if profileList != None: | |
dataOut.data = dataOut.data[:,profileList,:] | |||
|
r897 | ||
|
r596 | if profileRangeList != None: | |
|
r534 | minIndex = profileRangeList[0] | |
maxIndex = profileRangeList[1] | |||
|
r1167 | profileList = list(range(minIndex, maxIndex+1)) | |
|
r897 | ||
|
r534 | dataOut.data = dataOut.data[:,minIndex:maxIndex+1,:] | |
|
r897 | ||
|
r596 | if rangeList != None: | |
|
r897 | ||
|
r749 | profileList = [] | |
|
r897 | ||
|
r749 | for thisRange in rangeList: | |
minIndex = thisRange[0] | |||
maxIndex = thisRange[1] | |||
|
r897 | ||
|
r1167 | profileList.extend(list(range(minIndex, maxIndex+1))) | |
|
r897 | ||
|
r749 | dataOut.data = dataOut.data[:,profileList,:] | |
|
r897 | ||
|
r749 | dataOut.nProfiles = len(profileList) | |
dataOut.profileIndex = dataOut.nProfiles - 1 | |||
|
r495 | dataOut.flagNoData = False | |
|
r897 | ||
|
r1183 | return dataOut | |
|
r897 | ||
|
r660 | """ | |
data dimension = [nChannels, nHeis] | |||
""" | |||
|
r897 | ||
|
r660 | if profileList != None: | |
|
r897 | ||
|
r660 | if self.isThisProfileInList(dataOut.profileIndex, profileList): | |
|
r897 | ||
|
r749 | self.nProfiles = len(profileList) | |
dataOut.nProfiles = self.nProfiles | |||
|
r660 | dataOut.profileIndex = self.profileIndex | |
|
r749 | dataOut.flagNoData = False | |
|
r897 | ||
|
r754 | self.incProfileIndex() | |
|
r1183 | return dataOut | |
|
r897 | ||
|
r660 | if profileRangeList != None: | |
|
r897 | ||
|
r660 | minIndex = profileRangeList[0] | |
maxIndex = profileRangeList[1] | |||
|
r897 | ||
|
r660 | if self.isThisProfileInRange(dataOut.profileIndex, minIndex, maxIndex): | |
|
r897 | ||
|
r749 | self.nProfiles = maxIndex - minIndex + 1 | |
dataOut.nProfiles = self.nProfiles | |||
|
r660 | dataOut.profileIndex = self.profileIndex | |
|
r749 | dataOut.flagNoData = False | |
|
r897 | ||
|
r754 | self.incProfileIndex() | |
|
r1183 | return dataOut | |
|
r897 | ||
|
r660 | if rangeList != None: | |
|
r897 | ||
|
r660 | nProfiles = 0 | |
|
r897 | ||
|
r660 | for thisRange in rangeList: | |
minIndex = thisRange[0] | |||
maxIndex = thisRange[1] | |||
|
r897 | ||
|
r660 | nProfiles += maxIndex - minIndex + 1 | |
|
r897 | ||
|
r660 | for thisRange in rangeList: | |
|
r897 | ||
|
r660 | minIndex = thisRange[0] | |
maxIndex = thisRange[1] | |||
|
r897 | ||
|
r660 | if self.isThisProfileInRange(dataOut.profileIndex, minIndex, maxIndex): | |
|
r897 | ||
|
r749 | self.nProfiles = nProfiles | |
dataOut.nProfiles = self.nProfiles | |||
|
r534 | dataOut.profileIndex = self.profileIndex | |
|
r749 | dataOut.flagNoData = False | |
|
r897 | ||
|
r754 | self.incProfileIndex() | |
|
r897 | ||
|
r660 | break | |
|
r897 | ||
|
r1183 | return dataOut | |
|
r897 | ||
|
r660 | if beam != None: #beam is only for AMISR data | |
if self.isThisProfileInList(dataOut.profileIndex, dataOut.beamRangeDict[beam]): | |||
dataOut.flagNoData = False | |||
dataOut.profileIndex = self.profileIndex | |||
|
r897 | ||
|
r754 | self.incProfileIndex() | |
|
r897 | ||
|
r1183 | return dataOut | |
|
r897 | ||
|
r1167 | raise ValueError("ProfileSelector needs profileList, profileRangeList or rangeList parameter") | |
|
r897 | ||
|
r1177 | ||
|
r495 | class Reshaper(Operation): | |
|
r897 | ||
|
r1179 | def __init__(self, **kwargs): | |
|
r897 | ||
|
r1179 | Operation.__init__(self, **kwargs) | |
|
r897 | ||
|
r749 | self.__buffer = None | |
self.__nitems = 0 | |||
|
r897 | ||
|
r749 | def __appendProfile(self, dataOut, nTxs): | |
|
r897 | ||
|
r749 | if self.__buffer is None: | |
shape = (dataOut.nChannels, int(dataOut.nHeights/nTxs) ) | |||
self.__buffer = numpy.empty(shape, dtype = dataOut.data.dtype) | |||
|
r897 | ||
|
r749 | ini = dataOut.nHeights * self.__nitems | |
end = ini + dataOut.nHeights | |||
|
r897 | ||
|
r749 | self.__buffer[:, ini:end] = dataOut.data | |
|
r897 | ||
|
r749 | self.__nitems += 1 | |
|
r897 | ||
|
r749 | return int(self.__nitems*nTxs) | |
|
r897 | ||
|
r749 | def __getBuffer(self): | |
|
r897 | ||
|
r749 | if self.__nitems == int(1./self.__nTxs): | |
|
r897 | ||
|
r749 | self.__nitems = 0 | |
|
r897 | ||
|
r749 | return self.__buffer.copy() | |
|
r897 | ||
|
r749 | return None | |
|
r897 | ||
|
r749 | def __checkInputs(self, dataOut, shape, nTxs): | |
|
r897 | ||
|
r749 | if shape is None and nTxs is None: | |
|
r1167 | raise ValueError("Reshaper: shape of factor should be defined") | |
|
r897 | ||
|
r749 | if nTxs: | |
if nTxs < 0: | |||
|
r1167 | raise ValueError("nTxs should be greater than 0") | |
|
r897 | ||
|
r749 | if nTxs < 1 and dataOut.nProfiles % (1./nTxs) != 0: | |
|
r1167 | raise ValueError("nProfiles= %d is not divisibled by (1./nTxs) = %f" %(dataOut.nProfiles, (1./nTxs))) | |
|
r897 | ||
|
r749 | shape = [dataOut.nChannels, dataOut.nProfiles*nTxs, dataOut.nHeights/nTxs] | |
|
r897 | ||
|
r790 | return shape, nTxs | |
|
r897 | ||
|
r749 | if len(shape) != 2 and len(shape) != 3: | |
|
r1167 | raise ValueError("shape dimension should be equal to 2 or 3. shape = (nProfiles, nHeis) or (nChannels, nProfiles, nHeis). Actually shape = (%d, %d, %d)" %(dataOut.nChannels, dataOut.nProfiles, dataOut.nHeights)) | |
|
r897 | ||
|
r749 | if len(shape) == 2: | |
shape_tuple = [dataOut.nChannels] | |||
shape_tuple.extend(shape) | |||
else: | |||
shape_tuple = list(shape) | |||
|
r897 | ||
|
r790 | nTxs = 1.0*shape_tuple[1]/dataOut.nProfiles | |
|
r897 | ||
|
r749 | return shape_tuple, nTxs | |
|
r897 | ||
|
r749 | def run(self, dataOut, shape=None, nTxs=None): | |
|
r897 | ||
|
r749 | shape_tuple, self.__nTxs = self.__checkInputs(dataOut, shape, nTxs) | |
|
r897 | ||
|
r749 | dataOut.flagNoData = True | |
profileIndex = None | |||
|
r897 | ||
|
r749 | if dataOut.flagDataAsBlock: | |
|
r897 | ||
|
r749 | dataOut.data = numpy.reshape(dataOut.data, shape_tuple) | |
dataOut.flagNoData = False | |||
|
r897 | ||
|
r790 | profileIndex = int(dataOut.nProfiles*self.__nTxs) - 1 | |
|
r897 | ||
|
r749 | else: | |
|
r897 | ||
|
r749 | if self.__nTxs < 1: | |
|
r897 | ||
|
r749 | self.__appendProfile(dataOut, self.__nTxs) | |
new_data = self.__getBuffer() | |||
|
r897 | ||
|
r749 | if new_data is not None: | |
dataOut.data = new_data | |||
dataOut.flagNoData = False | |||
|
r897 | ||
|
r749 | profileIndex = dataOut.profileIndex*nTxs | |
|
r897 | ||
|
r749 | else: | |
|
r1167 | raise ValueError("nTxs should be greater than 0 and lower than 1, or use VoltageReader(..., getblock=True)") | |
|
r897 | ||
|
r749 | deltaHeight = dataOut.heightList[1] - dataOut.heightList[0] | |
|
r897 | ||
|
r749 | dataOut.heightList = numpy.arange(dataOut.nHeights/self.__nTxs) * deltaHeight + dataOut.heightList[0] | |
|
r897 | ||
|
r749 | dataOut.nProfiles = int(dataOut.nProfiles*self.__nTxs) | |
|
r897 | ||
|
r749 | dataOut.profileIndex = profileIndex | |
|
r897 | ||
|
r749 | dataOut.ippSeconds /= self.__nTxs | |
|
r897 | ||
|
r1173 | return dataOut | |
|
r1177 | ||
|
r823 | class SplitProfiles(Operation): | |
|
r897 | ||
|
r1179 | def __init__(self, **kwargs): | |
|
r897 | ||
|
r1179 | Operation.__init__(self, **kwargs) | |
|
r897 | ||
|
r823 | def run(self, dataOut, n): | |
|
r897 | ||
|
r823 | dataOut.flagNoData = True | |
profileIndex = None | |||
|
r897 | ||
|
r823 | if dataOut.flagDataAsBlock: | |
|
r897 | ||
|
r823 | #nchannels, nprofiles, nsamples | |
shape = dataOut.data.shape | |||
|
r897 | ||
|
r823 | if shape[2] % n != 0: | |
|
r1167 | raise ValueError("Could not split the data, n=%d has to be multiple of %d" %(n, shape[2])) | |
r1279 | |||
|
r1168 | new_shape = shape[0], shape[1]*n, int(shape[2]/n) | |
r1279 | |||
|
r823 | dataOut.data = numpy.reshape(dataOut.data, new_shape) | |
dataOut.flagNoData = False | |||
|
r897 | ||
|
r823 | profileIndex = int(dataOut.nProfiles/n) - 1 | |
|
r897 | ||
|
r823 | else: | |
|
r897 | ||
|
r1167 | raise ValueError("Could not split the data when is read Profile by Profile. Use VoltageReader(..., getblock=True)") | |
|
r897 | ||
|
r823 | deltaHeight = dataOut.heightList[1] - dataOut.heightList[0] | |
|
r897 | ||
|
r823 | dataOut.heightList = numpy.arange(dataOut.nHeights/n) * deltaHeight + dataOut.heightList[0] | |
|
r897 | ||
|
r823 | dataOut.nProfiles = int(dataOut.nProfiles*n) | |
|
r897 | ||
|
r823 | dataOut.profileIndex = profileIndex | |
|
r897 | ||
|
r823 | dataOut.ippSeconds /= n | |
|
r897 | ||
|
r1173 | return dataOut | |
|
r1177 | ||
|
r823 | class CombineProfiles(Operation): | |
|
r1179 | def __init__(self, **kwargs): | |
|
r897 | ||
|
r1179 | Operation.__init__(self, **kwargs) | |
|
r897 | ||
|
r823 | self.__remData = None | |
self.__profileIndex = 0 | |||
def run(self, dataOut, n): | |||
|
r897 | ||
|
r823 | dataOut.flagNoData = True | |
profileIndex = None | |||
|
r897 | ||
|
r823 | if dataOut.flagDataAsBlock: | |
|
r897 | ||
|
r823 | #nchannels, nprofiles, nsamples | |
shape = dataOut.data.shape | |||
new_shape = shape[0], shape[1]/n, shape[2]*n | |||
|
r897 | ||
|
r823 | if shape[1] % n != 0: | |
|
r1167 | raise ValueError("Could not split the data, n=%d has to be multiple of %d" %(n, shape[1])) | |
|
r897 | ||
|
r823 | dataOut.data = numpy.reshape(dataOut.data, new_shape) | |
dataOut.flagNoData = False | |||
|
r897 | ||
|
r823 | profileIndex = int(dataOut.nProfiles*n) - 1 | |
|
r897 | ||
|
r823 | else: | |
|
r897 | ||
|
r823 | #nchannels, nsamples | |
if self.__remData is None: | |||
newData = dataOut.data | |||
else: | |||
newData = numpy.concatenate((self.__remData, dataOut.data), axis=1) | |||
|
r897 | ||
|
r823 | self.__profileIndex += 1 | |
|
r897 | ||
|
r823 | if self.__profileIndex < n: | |
self.__remData = newData | |||
#continue | |||
return | |||
|
r897 | ||
|
r823 | self.__profileIndex = 0 | |
self.__remData = None | |||
|
r897 | ||
|
r823 | dataOut.data = newData | |
dataOut.flagNoData = False | |||
|
r897 | ||
|
r823 | profileIndex = dataOut.profileIndex/n | |
|
r897 | ||
|
r823 | deltaHeight = dataOut.heightList[1] - dataOut.heightList[0] | |
|
r897 | ||
|
r823 | dataOut.heightList = numpy.arange(dataOut.nHeights*n) * deltaHeight + dataOut.heightList[0] | |
|
r897 | ||
|
r823 | dataOut.nProfiles = int(dataOut.nProfiles/n) | |
|
r897 | ||
|
r823 | dataOut.profileIndex = profileIndex | |
|
r897 | ||
|
r823 | dataOut.ippSeconds *= n | |
|
r836 | ||
|
r1173 | return dataOut | |
r1296 | |||
class PulsePairVoltage(Operation): | |||
''' | |||
Function PulsePair(Signal Power, Velocity) | |||
The real component of Lag[0] provides Intensity Information | |||
The imag component of Lag[1] Phase provides Velocity Information | |||
Configuration Parameters: | |||
nPRF = Number of Several PRF | |||
theta = Degree Azimuth angel Boundaries | |||
Input: | |||
self.dataOut | |||
lag[N] | |||
Affected: | |||
self.dataOut.spc | |||
''' | |||
isConfig = False | |||
__profIndex = 0 | |||
__initime = None | |||
__lastdatatime = None | |||
__buffer = None | |||
r1303 | noise = None | ||
r1296 | __dataReady = False | ||
n = None | |||
__nch = 0 | |||
__nHeis = 0 | |||
removeDC = False | |||
ipp = None | |||
lambda_ = 0 | |||
def __init__(self,**kwargs): | |||
Operation.__init__(self,**kwargs) | |||
def setup(self, dataOut, n = None, removeDC=False): | |||
''' | |||
n= Numero de PRF's de entrada | |||
''' | |||
self.__initime = None | |||
self.__lastdatatime = 0 | |||
self.__dataReady = False | |||
self.__buffer = 0 | |||
self.__profIndex = 0 | |||
r1303 | self.noise = None | ||
r1296 | self.__nch = dataOut.nChannels | ||
self.__nHeis = dataOut.nHeights | |||
self.removeDC = removeDC | |||
self.lambda_ = 3.0e8/(9345.0e6) | |||
self.ippSec = dataOut.ippSeconds | |||
self.nCohInt = dataOut.nCohInt | |||
r1406 | |||
r1296 | if n == None: | ||
raise ValueError("n should be specified.") | |||
if n != None: | |||
if n<2: | |||
raise ValueError("n should be greater than 2") | |||
self.n = n | |||
self.__nProf = n | |||
self.__buffer = numpy.zeros((dataOut.nChannels, | |||
n, | |||
dataOut.nHeights), | |||
dtype='complex') | |||
def putData(self,data): | |||
''' | |||
Add a profile to he __buffer and increase in one the __profiel Index | |||
''' | |||
self.__buffer[:,self.__profIndex,:]= data | |||
self.__profIndex += 1 | |||
return | |||
r1307 | def pushData(self,dataOut): | ||
r1296 | ''' | ||
Return the PULSEPAIR and the profiles used in the operation | |||
Affected : self.__profileIndex | |||
''' | |||
r1338 | #----------------- Remove DC----------------------------------- | ||
r1296 | if self.removeDC==True: | ||
mean = numpy.mean(self.__buffer,1) | |||
tmp = mean.reshape(self.__nch,1,self.__nHeis) | |||
dc= numpy.tile(tmp,[1,self.__nProf,1]) | |||
self.__buffer = self.__buffer - dc | |||
r1338 | #------------------Calculo de Potencia ------------------------ | ||
r1314 | pair0 = self.__buffer*numpy.conj(self.__buffer) | ||
pair0 = pair0.real | |||
lag_0 = numpy.sum(pair0,1) | |||
r1338 | #------------------Calculo de Ruido x canal-------------------- | ||
r1314 | self.noise = numpy.zeros(self.__nch) | ||
for i in range(self.__nch): | |||
daux = numpy.sort(pair0[i,:,:],axis= None) | |||
self.noise[i]=hildebrand_sekhon( daux ,self.nCohInt) | |||
self.noise = self.noise.reshape(self.__nch,1) | |||
self.noise = numpy.tile(self.noise,[1,self.__nHeis]) | |||
noise_buffer = self.noise.reshape(self.__nch,1,self.__nHeis) | |||
noise_buffer = numpy.tile(noise_buffer,[1,self.__nProf,1]) | |||
r1338 | #------------------ Potencia recibida= P , Potencia senal = S , Ruido= N-- | ||
#------------------ P= S+N ,P=lag_0/N --------------------------------- | |||
#-------------------- Power -------------------------------------------------- | |||
r1314 | data_power = lag_0/(self.n*self.nCohInt) | ||
r1338 | #------------------ Senal --------------------------------------------------- | ||
r1314 | data_intensity = pair0 - noise_buffer | ||
data_intensity = numpy.sum(data_intensity,axis=1)*(self.n*self.nCohInt)#*self.nCohInt) | |||
#data_intensity = (lag_0-self.noise*self.n)*(self.n*self.nCohInt) | |||
for i in range(self.__nch): | |||
for j in range(self.__nHeis): | |||
if data_intensity[i][j] < 0: | |||
data_intensity[i][j] = numpy.min(numpy.absolute(data_intensity[i][j])) | |||
r1296 | |||
r1338 | #----------------- Calculo de Frecuencia y Velocidad doppler-------- | ||
r1303 | pair1 = self.__buffer[:,:-1,:]*numpy.conjugate(self.__buffer[:,1:,:]) | ||
lag_1 = numpy.sum(pair1,1) | |||
r1314 | data_freq = (-1/(2.0*math.pi*self.ippSec*self.nCohInt))*numpy.angle(lag_1) | ||
data_velocity = (self.lambda_/2.0)*data_freq | |||
r1303 | |||
r1338 | #---------------- Potencia promedio estimada de la Senal----------- | ||
r1314 | lag_0 = lag_0/self.n | ||
S = lag_0-self.noise | |||
r1307 | |||
r1338 | #---------------- Frecuencia Doppler promedio --------------------- | ||
r1303 | lag_1 = lag_1/(self.n-1) | ||
R1 = numpy.abs(lag_1) | |||
r1307 | |||
r1338 | #---------------- Calculo del SNR---------------------------------- | ||
r1307 | data_snrPP = S/self.noise | ||
r1314 | for i in range(self.__nch): | ||
for j in range(self.__nHeis): | |||
if data_snrPP[i][j] < 1.e-20: | |||
data_snrPP[i][j] = 1.e-20 | |||
r1307 | |||
r1338 | #----------------- Calculo del ancho espectral ---------------------- | ||
r1303 | L = S/R1 | ||
L = numpy.where(L<0,1,L) | |||
L = numpy.log(L) | |||
tmp = numpy.sqrt(numpy.absolute(L)) | |||
r1314 | data_specwidth = (self.lambda_/(2*math.sqrt(2)*math.pi*self.ippSec*self.nCohInt))*tmp*numpy.sign(L) | ||
r1296 | n = self.__profIndex | ||
self.__buffer = numpy.zeros((self.__nch, self.__nProf,self.__nHeis), dtype='complex') | |||
self.__profIndex = 0 | |||
r1314 | return data_power,data_intensity,data_velocity,data_snrPP,data_specwidth,n | ||
r1296 | |||
r1307 | def pulsePairbyProfiles(self,dataOut): | ||
r1296 | |||
self.__dataReady = False | |||
r1314 | data_power = None | ||
r1296 | data_intensity = None | ||
data_velocity = None | |||
r1303 | data_specwidth = None | ||
r1307 | data_snrPP = None | ||
self.putData(data=dataOut.data) | |||
r1296 | if self.__profIndex == self.n: | ||
r1314 | data_power,data_intensity, data_velocity,data_snrPP,data_specwidth, n = self.pushData(dataOut=dataOut) | ||
r1296 | self.__dataReady = True | ||
r1314 | return data_power, data_intensity, data_velocity, data_snrPP, data_specwidth | ||
r1296 | |||
r1307 | def pulsePairOp(self, dataOut, datatime= None): | ||
r1296 | |||
if self.__initime == None: | |||
self.__initime = datatime | |||
r1314 | data_power, data_intensity, data_velocity, data_snrPP, data_specwidth = self.pulsePairbyProfiles(dataOut) | ||
r1296 | self.__lastdatatime = datatime | ||
r1314 | if data_power is None: | ||
return None, None, None,None,None,None | |||
r1296 | |||
avgdatatime = self.__initime | |||
deltatime = datatime - self.__lastdatatime | |||
self.__initime = datatime | |||
r1314 | return data_power, data_intensity, data_velocity, data_snrPP, data_specwidth, avgdatatime | ||
r1296 | |||
def run(self, dataOut,n = None,removeDC= False, overlapping= False,**kwargs): | |||
if not self.isConfig: | |||
self.setup(dataOut = dataOut, n = n , removeDC=removeDC , **kwargs) | |||
self.isConfig = True | |||
r1314 | data_power, data_intensity, data_velocity,data_snrPP,data_specwidth, avgdatatime = self.pulsePairOp(dataOut, dataOut.utctime) | ||
r1296 | dataOut.flagNoData = True | ||
if self.__dataReady: | |||
dataOut.nCohInt *= self.n | |||
r1314 | dataOut.dataPP_POW = data_intensity # S | ||
dataOut.dataPP_POWER = data_power # P | |||
dataOut.dataPP_DOP = data_velocity | |||
dataOut.dataPP_SNR = data_snrPP | |||
dataOut.dataPP_WIDTH = data_specwidth | |||
r1296 | dataOut.PRFbyAngle = self.n #numero de PRF*cada angulo rotado que equivale a un tiempo. | ||
dataOut.utctime = avgdatatime | |||
dataOut.flagNoData = False | |||
return dataOut | |||
r1314 | |||
|
r624 | # import collections | |
# from scipy.stats import mode | |||
|
r897 | # | |
|
r624 | # class Synchronize(Operation): | |
|
r897 | # | |
|
r624 | # isConfig = False | |
# __profIndex = 0 | |||
|
r897 | # | |
# def __init__(self, **kwargs): | |||
# | |||
# Operation.__init__(self, **kwargs) | |||
|
r624 | # # self.isConfig = False | |
# self.__powBuffer = None | |||
# self.__startIndex = 0 | |||
# self.__pulseFound = False | |||
|
r897 | # | |
|
r624 | # def __findTxPulse(self, dataOut, channel=0, pulse_with = None): | |
|
r897 | # | |
|
r624 | # #Read data | |
|
r897 | # | |
|
r624 | # powerdB = dataOut.getPower(channel = channel) | |
# noisedB = dataOut.getNoise(channel = channel)[0] | |||
|
r897 | # | |
|
r624 | # self.__powBuffer.extend(powerdB.flatten()) | |
|
r897 | # | |
|
r624 | # dataArray = numpy.array(self.__powBuffer) | |
|
r897 | # | |
|
r624 | # filteredPower = numpy.correlate(dataArray, dataArray[0:self.__nSamples], "same") | |
|
r897 | # | |
|
r624 | # maxValue = numpy.nanmax(filteredPower) | |
|
r897 | # | |
|
r624 | # if maxValue < noisedB + 10: | |
# #No se encuentra ningun pulso de transmision | |||
# return None | |||
|
r897 | # | |
|
r624 | # maxValuesIndex = numpy.where(filteredPower > maxValue - 0.1*abs(maxValue))[0] | |
|
r897 | # | |
|
r624 | # if len(maxValuesIndex) < 2: | |
# #Solo se encontro un solo pulso de transmision de un baudio, esperando por el siguiente TX | |||
# return None | |||
|
r897 | # | |
|
r624 | # phasedMaxValuesIndex = maxValuesIndex - self.__nSamples | |
|
r897 | # | |
|
r624 | # #Seleccionar solo valores con un espaciamiento de nSamples | |
# pulseIndex = numpy.intersect1d(maxValuesIndex, phasedMaxValuesIndex) | |||
|
r897 | # | |
|
r624 | # if len(pulseIndex) < 2: | |
# #Solo se encontro un pulso de transmision con ancho mayor a 1 | |||
# return None | |||
|
r897 | # | |
|
r624 | # spacing = pulseIndex[1:] - pulseIndex[:-1] | |
|
r897 | # | |
|
r624 | # #remover senales que se distancien menos de 10 unidades o muestras | |
# #(No deberian existir IPP menor a 10 unidades) | |||
|
r897 | # | |
|
r624 | # realIndex = numpy.where(spacing > 10 )[0] | |
|
r897 | # | |
|
r624 | # if len(realIndex) < 2: | |
# #Solo se encontro un pulso de transmision con ancho mayor a 1 | |||
# return None | |||
|
r897 | # | |
|
r624 | # #Eliminar pulsos anchos (deja solo la diferencia entre IPPs) | |
# realPulseIndex = pulseIndex[realIndex] | |||
|
r897 | # | |
|
r624 | # period = mode(realPulseIndex[1:] - realPulseIndex[:-1])[0][0] | |
|
r897 | # | |
|
r624 | # print "IPP = %d samples" %period | |
|
r897 | # | |
|
r624 | # self.__newNSamples = dataOut.nHeights #int(period) | |
# self.__startIndex = int(realPulseIndex[0]) | |||
|
r897 | # | |
|
r624 | # return 1 | |
|
r897 | # | |
# | |||
|
r624 | # def setup(self, nSamples, nChannels, buffer_size = 4): | |
|
r897 | # | |
|
r624 | # self.__powBuffer = collections.deque(numpy.zeros( buffer_size*nSamples,dtype=numpy.float), | |
# maxlen = buffer_size*nSamples) | |||
|
r897 | # | |
|
r624 | # bufferList = [] | |
|
r897 | # | |
|
r624 | # for i in range(nChannels): | |
# bufferByChannel = collections.deque(numpy.zeros( buffer_size*nSamples, dtype=numpy.complex) + numpy.NAN, | |||
# maxlen = buffer_size*nSamples) | |||
|
r897 | # | |
|
r624 | # bufferList.append(bufferByChannel) | |
|
r897 | # | |
|
r624 | # self.__nSamples = nSamples | |
# self.__nChannels = nChannels | |||
# self.__bufferList = bufferList | |||
|
r897 | # | |
|
r624 | # def run(self, dataOut, channel = 0): | |
|
r897 | # | |
|
r624 | # if not self.isConfig: | |
# nSamples = dataOut.nHeights | |||
# nChannels = dataOut.nChannels | |||
# self.setup(nSamples, nChannels) | |||
# self.isConfig = True | |||
|
r897 | # | |
|
r624 | # #Append new data to internal buffer | |
# for thisChannel in range(self.__nChannels): | |||
# bufferByChannel = self.__bufferList[thisChannel] | |||
# bufferByChannel.extend(dataOut.data[thisChannel]) | |||
|
r897 | # | |
|
r624 | # if self.__pulseFound: | |
# self.__startIndex -= self.__nSamples | |||
|
r897 | # | |
|
r624 | # #Finding Tx Pulse | |
# if not self.__pulseFound: | |||
# indexFound = self.__findTxPulse(dataOut, channel) | |||
|
r897 | # | |
|
r624 | # if indexFound == None: | |
# dataOut.flagNoData = True | |||
# return | |||
|
r897 | # | |
|
r624 | # self.__arrayBuffer = numpy.zeros((self.__nChannels, self.__newNSamples), dtype = numpy.complex) | |
# self.__pulseFound = True | |||
# self.__startIndex = indexFound | |||
|
r897 | # | |
|
r624 | # #If pulse was found ... | |
# for thisChannel in range(self.__nChannels): | |||
# bufferByChannel = self.__bufferList[thisChannel] | |||
|
r897 | # #print self.__startIndex | |
|
r624 | # x = numpy.array(bufferByChannel) | |
# self.__arrayBuffer[thisChannel] = x[self.__startIndex:self.__startIndex+self.__newNSamples] | |||
|
r897 | # | |
|
r624 | # deltaHeight = dataOut.heightList[1] - dataOut.heightList[0] | |
# dataOut.heightList = numpy.arange(self.__newNSamples)*deltaHeight | |||
# # dataOut.ippSeconds = (self.__newNSamples / deltaHeight)/1e6 | |||
|
r897 | # | |
|
r624 | # dataOut.data = self.__arrayBuffer | |
|
r897 | # | |
|
r624 | # self.__startIndex += self.__newNSamples | |
|
r897 | # | |
|
r1173 | # return | |
r1465 | class SSheightProfiles(Operation): | ||
step = None | |||
nsamples = None | |||
bufferShape = None | |||
profileShape = None | |||
sshProfiles = None | |||
profileIndex = None | |||
def __init__(self, **kwargs): | |||
Operation.__init__(self, **kwargs) | |||
self.isConfig = False | |||
def setup(self,dataOut ,step = None , nsamples = None): | |||
if step == None and nsamples == None: | |||
raise ValueError("step or nheights should be specified ...") | |||
self.step = step | |||
self.nsamples = nsamples | |||
self.__nChannels = dataOut.nChannels | |||
self.__nProfiles = dataOut.nProfiles | |||
self.__nHeis = dataOut.nHeights | |||
shape = dataOut.data.shape #nchannels, nprofiles, nsamples | |||
residue = (shape[1] - self.nsamples) % self.step | |||
if residue != 0: | |||
print("The residue is %d, step=%d should be multiple of %d to avoid loss of %d samples"%(residue,step,shape[1] - self.nsamples,residue)) | |||
deltaHeight = dataOut.heightList[1] - dataOut.heightList[0] | |||
numberProfile = self.nsamples | |||
numberSamples = (shape[1] - self.nsamples)/self.step | |||
self.bufferShape = int(shape[0]), int(numberSamples), int(numberProfile) # nchannels, nsamples , nprofiles | |||
self.profileShape = int(shape[0]), int(numberProfile), int(numberSamples) # nchannels, nprofiles, nsamples | |||
self.buffer = numpy.zeros(self.bufferShape , dtype=numpy.complex) | |||
self.sshProfiles = numpy.zeros(self.profileShape, dtype=numpy.complex) | |||
def run(self, dataOut, step, nsamples, code = None, repeat = None): | |||
dataOut.flagNoData = True | |||
profileIndex = None | |||
r1506 | #print("nProfiles, nHeights ",dataOut.nProfiles, dataOut.nHeights) | ||
r1465 | #print(dataOut.getFreqRange(1)/1000.) | ||
#exit(1) | |||
if dataOut.flagDataAsBlock: | |||
dataOut.data = numpy.average(dataOut.data,axis=1) | |||
#print("jee") | |||
dataOut.flagDataAsBlock = False | |||
if not self.isConfig: | |||
self.setup(dataOut, step=step , nsamples=nsamples) | |||
#print("Setup done") | |||
self.isConfig = True | |||
if code is not None: | |||
code = numpy.array(code) | |||
code_block = code | |||
r1506 | |||
r1465 | if repeat is not None: | ||
code_block = numpy.repeat(code_block, repeats=repeat, axis=1) | |||
#print(code_block.shape) | |||
for i in range(self.buffer.shape[1]): | |||
if code is not None: | |||
self.buffer[:,i] = dataOut.data[:,i*self.step:i*self.step + self.nsamples]*code_block | |||
else: | |||
self.buffer[:,i] = dataOut.data[:,i*self.step:i*self.step + self.nsamples]#*code[dataOut.profileIndex,:] | |||
#self.buffer[:,j,self.__nHeis-j*self.step - self.nheights:self.__nHeis-j*self.step] = numpy.flip(dataOut.data[:,j*self.step:j*self.step + self.nheights]) | |||
for j in range(self.buffer.shape[0]): | |||
self.sshProfiles[j] = numpy.transpose(self.buffer[j]) | |||
profileIndex = self.nsamples | |||
deltaHeight = dataOut.heightList[1] - dataOut.heightList[0] | |||
ippSeconds = (deltaHeight*1.0e-6)/(0.15) | |||
#print("ippSeconds, dH: ",ippSeconds,deltaHeight) | |||
try: | |||
if dataOut.concat_m is not None: | |||
ippSeconds= ippSeconds/float(dataOut.concat_m) | |||
#print "Profile concat %d"%dataOut.concat_m | |||
except: | |||
pass | |||
dataOut.data = self.sshProfiles | |||
dataOut.flagNoData = False | |||
dataOut.heightList = numpy.arange(self.buffer.shape[1]) *self.step*deltaHeight + dataOut.heightList[0] | |||
dataOut.nProfiles = int(dataOut.nProfiles*self.nsamples) | |||
dataOut.profileIndex = profileIndex | |||
dataOut.flagDataAsBlock = True | |||
dataOut.ippSeconds = ippSeconds | |||
dataOut.step = self.step | |||
#print(numpy.shape(dataOut.data)) | |||
#exit(1) | |||
r1540 | #print("new data shape and time:", dataOut.data.shape, dataOut.utctime) | ||
r1465 | |||
r1506 | return dataOut | ||
################################################################################3############################3 | |||
################################################################################3############################3 | |||
################################################################################3############################3 | |||
################################################################################3############################3 | |||
class SSheightProfiles2(Operation): | |||
''' | |||
Procesa por perfiles y por bloques | |||
r1566 | Versión corregida y actualizada para trabajar con RemoveProfileSats2 | ||
Usar esto | |||
r1506 | ''' | ||
r1540 | |||
r1506 | bufferShape = None | ||
profileShape = None | |||
sshProfiles = None | |||
profileIndex = None | |||
r1540 | #nsamples = None | ||
#step = None | |||
#deltaHeight = None | |||
#init_range = None | |||
__slots__ = ('step', 'nsamples', 'deltaHeight', 'init_range', 'isConfig', '__nChannels', | |||
'__nProfiles', '__nHeis', 'deltaHeight', 'new_nHeights') | |||
r1506 | |||
def __init__(self, **kwargs): | |||
Operation.__init__(self, **kwargs) | |||
self.isConfig = False | |||
def setup(self,dataOut ,step = None , nsamples = None): | |||
if step == None and nsamples == None: | |||
raise ValueError("step or nheights should be specified ...") | |||
self.step = step | |||
self.nsamples = nsamples | |||
self.__nChannels = int(dataOut.nChannels) | |||
self.__nProfiles = int(dataOut.nProfiles) | |||
self.__nHeis = int(dataOut.nHeights) | |||
residue = (self.__nHeis - self.nsamples) % self.step | |||
if residue != 0: | |||
r1547 | print("The residue is %d, step=%d should be multiple of %d to avoid loss of %d samples"%(residue,step,self.__nProfiles - self.nsamples,residue)) | ||
r1506 | |||
r1528 | self.deltaHeight = dataOut.heightList[1] - dataOut.heightList[0] | ||
self.init_range = dataOut.heightList[0] | |||
r1506 | #numberProfile = self.nsamples | ||
r1540 | numberSamples = (self.__nHeis - self.nsamples)/self.step | ||
r1506 | |||
r1540 | self.new_nHeights = numberSamples | ||
r1506 | |||
r1540 | self.bufferShape = int(self.__nChannels), int(numberSamples), int(self.nsamples) # nchannels, nsamples , nprofiles | ||
self.profileShape = int(self.__nChannels), int(self.nsamples), int(numberSamples) # nchannels, nprofiles, nsamples | |||
r1506 | |||
self.buffer = numpy.zeros(self.bufferShape , dtype=numpy.complex) | |||
self.sshProfiles = numpy.zeros(self.profileShape, dtype=numpy.complex) | |||
def getNewProfiles(self, data, code=None, repeat=None): | |||
if code is not None: | |||
code = numpy.array(code) | |||
code_block = code | |||
if repeat is not None: | |||
code_block = numpy.repeat(code_block, repeats=repeat, axis=1) | |||
r1547 | if data.ndim < 3: | ||
data = data.reshape(self.__nChannels,1,self.__nHeis ) | |||
#print("buff, data, :",self.buffer.shape, data.shape,self.sshProfiles.shape, code_block.shape) | |||
for ch in range(self.__nChannels): | |||
for i in range(int(self.new_nHeights)): #nuevas alturas | |||
if code is not None: | |||
self.buffer[ch,i,:] = data[ch,:,i*self.step:i*self.step + self.nsamples]*code_block | |||
else: | |||
self.buffer[ch,i,:] = data[ch,:,i*self.step:i*self.step + self.nsamples]#*code[dataOut.profileIndex,:] | |||
r1506 | |||
for j in range(self.__nChannels): #en los cananles | |||
r1528 | self.sshProfiles[j,:,:] = numpy.transpose(self.buffer[j,:,:]) | ||
#print("new profs Done") | |||
r1506 | |||
def run(self, dataOut, step, nsamples, code = None, repeat = None): | |||
r1547 | # print("running") | ||
r1528 | if dataOut.flagNoData == True: | ||
return dataOut | |||
r1506 | dataOut.flagNoData = True | ||
#print("init data shape:", dataOut.data.shape) | |||
#print("ch: {} prof: {} hs: {}".format(int(dataOut.nChannels), | |||
# int(dataOut.nProfiles),int(dataOut.nHeights))) | |||
profileIndex = None | |||
# if not dataOut.flagDataAsBlock: | |||
# dataOut.nProfiles = 1 | |||
if not self.isConfig: | |||
self.setup(dataOut, step=step , nsamples=nsamples) | |||
#print("Setup done") | |||
self.isConfig = True | |||
dataBlock = None | |||
nprof = 1 | |||
if dataOut.flagDataAsBlock: | |||
nprof = int(dataOut.nProfiles) | |||
#print("dataOut nProfiles:", dataOut.nProfiles) | |||
for profile in range(nprof): | |||
if dataOut.flagDataAsBlock: | |||
r1528 | #print("read blocks") | ||
r1506 | self.getNewProfiles(dataOut.data[:,profile,:], code=code, repeat=repeat) | ||
else: | |||
r1528 | #print("read profiles") | ||
self.getNewProfiles(dataOut.data, code=code, repeat=repeat) #only one channe | |||
r1506 | if profile == 0: | ||
dataBlock = self.sshProfiles.copy() | |||
else: #by blocks | |||
dataBlock = numpy.concatenate((dataBlock,self.sshProfiles), axis=1) #profile axis | |||
r1528 | #print("by blocks: ",dataBlock.shape, self.sshProfiles.shape) | ||
r1506 | |||
profileIndex = self.nsamples | |||
r1528 | #deltaHeight = dataOut.heightList[1] - dataOut.heightList[0] | ||
ippSeconds = (self.deltaHeight*1.0e-6)/(0.15) | |||
r1506 | |||
dataOut.data = dataBlock | |||
r1528 | #print("show me: ",self.step,self.deltaHeight, dataOut.heightList, self.new_nHeights) | ||
dataOut.heightList = numpy.arange(int(self.new_nHeights)) *self.step*self.deltaHeight + self.init_range | |||
r1553 | dataOut.sampled_heightsFFT = self.nsamples | ||
r1506 | dataOut.ippSeconds = ippSeconds | ||
dataOut.step = self.step | |||
r1553 | dataOut.deltaHeight = self.step*self.deltaHeight | ||
r1506 | dataOut.flagNoData = False | ||
if dataOut.flagDataAsBlock: | |||
dataOut.nProfiles = int(dataOut.nProfiles*self.nsamples) | |||
else: | |||
dataOut.nProfiles = int(self.nsamples) | |||
dataOut.profileIndex = dataOut.nProfiles | |||
dataOut.flagDataAsBlock = True | |||
dataBlock = None | |||
r1540 | |||
#print("new data shape:", dataOut.data.shape, dataOut.utctime) | |||
r1506 | |||
r1559 | #update Processing Header: | ||
dataOut.processingHeaderObj.heightList = dataOut.heightList | |||
dataOut.processingHeaderObj.ipp = ippSeconds | |||
dataOut.processingHeaderObj.heightResolution = dataOut.deltaHeight | |||
#dataOut.processingHeaderObj.profilesPerBlock = nProfiles | |||
r1579 | |||
# # dataOut.data = CH, PROFILES, HEIGHTS | |||
#print(dataOut.data .shape) | |||
if dataOut.flagProfilesByRange: | |||
# #assuming the same remotion for all channels | |||
aux = [ self.nsamples - numpy.count_nonzero(dataOut.data[0, :, h]==0) for h in range(len(dataOut.heightList))] | |||
r1585 | dataOut.nProfilesByRange = (numpy.asarray(aux)).reshape((1,len(dataOut.heightList) )) | ||
#print(dataOut.nProfilesByRange.shape) | |||
r1579 | else: | ||
r1585 | dataOut.nProfilesByRange = numpy.ones((1, len(dataOut.heightList)))*dataOut.nProfiles | ||
r1506 | return dataOut | ||
r1528 | class RemoveProfileSats(Operation): | ||
r1506 | ''' | ||
r1553 | Escrito: Joab Apaza | ||
r1548 | Omite los perfiles contaminados con señal de satélites, usando una altura de referencia | ||
r1506 | In: minHei = min_sat_range | ||
max_sat_range | |||
min_hei_ref | |||
max_hei_ref | |||
th = diference between profiles mean, ref and sats | |||
Out: | |||
profile clean | |||
''' | |||
r1548 | __buffer_data = [] | ||
__buffer_times = [] | |||
buffer = None | |||
outliers_IDs_list = [] | |||
__slots__ = ('n','navg','profileMargin','thHistOutlier','minHei_idx','maxHei_idx','nHeights', | |||
'first_utcBlock','__profIndex','init_prof','end_prof','lenProfileOut','nChannels', | |||
'__count_exec','__initime','__dataReady','__ipp', 'minRef', 'maxRef', 'thdB') | |||
r1506 | def __init__(self, **kwargs): | ||
Operation.__init__(self, **kwargs) | |||
self.isConfig = False | |||
r1553 | def setup(self,dataOut, n=None , navg=0.8, profileMargin=50,thHistOutlier=15, | ||
r1548 | minHei=None, maxHei=None, minRef=None, maxRef=None, thdB=10): | ||
if n == None and timeInterval == None: | |||
raise ValueError("nprofiles or timeInterval should be specified ...") | |||
r1506 | |||
r1548 | if n != None: | ||
self.n = n | |||
r1506 | |||
r1548 | self.navg = navg | ||
self.profileMargin = profileMargin | |||
self.thHistOutlier = thHistOutlier | |||
self.__profIndex = 0 | |||
self.buffer = None | |||
self._ipp = dataOut.ippSeconds | |||
self.n_prof_released = 0 | |||
self.heightList = dataOut.heightList | |||
self.init_prof = 0 | |||
self.end_prof = 0 | |||
self.__count_exec = 0 | |||
self.__profIndex = 0 | |||
self.first_utcBlock = None | |||
#self.__dh = dataOut.heightList[1] - dataOut.heightList[0] | |||
minHei = minHei | |||
maxHei = maxHei | |||
if minHei==None : | |||
minHei = dataOut.heightList[0] | |||
if maxHei==None : | |||
maxHei = dataOut.heightList[-1] | |||
self.minHei_idx,self.maxHei_idx = getHei_index(minHei, maxHei, dataOut.heightList) | |||
r1506 | self.min_ref, self.max_ref = getHei_index(minRef, maxRef, dataOut.heightList) | ||
r1548 | self.nChannels = dataOut.nChannels | ||
self.nHeights = dataOut.nHeights | |||
self.test_counter = 0 | |||
r1506 | self.thdB = thdB | ||
r1548 | def filterSatsProfiles(self): | ||
data = self.__buffer_data | |||
#print(data.shape) | |||
nChannels, profiles, heights = data.shape | |||
indexes=numpy.zeros([], dtype=int) | |||
outliers_IDs=[] | |||
for c in range(nChannels): | |||
#print(self.min_ref,self.max_ref) | |||
noise_ref = 10* numpy.log10((data[c,:,self.min_ref:self.max_ref] * numpy.conjugate(data[c,:,self.min_ref:self.max_ref])).real) | |||
#print("Noise ",numpy.percentile(noise_ref,95)) | |||
p95 = numpy.percentile(noise_ref,95) | |||
noise_ref = noise_ref.mean() | |||
#print("Noise ",noise_ref | |||
r1506 | |||
r1528 | |||
r1548 | for h in range(self.minHei_idx, self.maxHei_idx): | ||
power = 10* numpy.log10((data[c,:,h] * numpy.conjugate(data[c,:,h])).real) | |||
#th = noise_ref + self.thdB | |||
th = noise_ref + 1.5*(p95-noise_ref) | |||
index = numpy.where(power > th ) | |||
if index[0].size > 10 and index[0].size < int(self.navg*profiles): | |||
indexes = numpy.append(indexes, index[0]) | |||
#print(index[0]) | |||
#print(index[0]) | |||
# fig,ax = plt.subplots() | |||
# #ax.set_title(str(k)+" "+str(j)) | |||
# x=range(len(power)) | |||
# ax.scatter(x,power) | |||
# #ax.axvline(index) | |||
# plt.grid() | |||
# plt.show() | |||
#print(indexes) | |||
r1528 | |||
r1548 | #outliers_IDs = outliers_IDs.astype(numpy.dtype('int64')) | ||
#outliers_IDs = numpy.unique(outliers_IDs) | |||
r1506 | |||
r1548 | outs_lines = numpy.unique(indexes) | ||
r1506 | |||
r1548 | #Agrupando el histograma de outliers, | ||
my_bins = numpy.linspace(0,int(profiles), int(profiles/100), endpoint=True) | |||
r1506 | |||
r1548 | hist, bins = numpy.histogram(outs_lines,bins=my_bins) | ||
hist_outliers_indexes = numpy.where(hist > self.thHistOutlier) #es outlier | |||
hist_outliers_indexes = hist_outliers_indexes[0] | |||
r1553 | # if len(hist_outliers_indexes>0): | ||
# hist_outliers_indexes = numpy.append(hist_outliers_indexes,hist_outliers_indexes[-1]+1) | |||
r1548 | #print(hist_outliers_indexes) | ||
#print(bins, hist_outliers_indexes) | |||
bins_outliers_indexes = [int(i) for i in (bins[hist_outliers_indexes])] # | |||
r1553 | outlier_loc_index = [] | ||
# for n in range(len(bins_outliers_indexes)): | |||
# for e in range(bins_outliers_indexes[n]-self.profileMargin,bins_outliers_indexes[n]+ self.profileMargin): | |||
# outlier_loc_index.append(e) | |||
r1548 | outlier_loc_index = [e for n in range(len(bins_outliers_indexes)) for e in range(bins_outliers_indexes[n]-self.profileMargin,bins_outliers_indexes[n]+ profiles//100 + self.profileMargin) ] | ||
outlier_loc_index = numpy.asarray(outlier_loc_index) | |||
r1553 | |||
#print("outliers Ids: ", outlier_loc_index, outlier_loc_index.shape) | |||
outlier_loc_index = outlier_loc_index[ (outlier_loc_index >= 0) & (outlier_loc_index<profiles)] | |||
#print("outliers final: ", outlier_loc_index) | |||
from matplotlib import pyplot as plt | |||
x, y = numpy.meshgrid(numpy.arange(profiles), self.heightList) | |||
fig, ax = plt.subplots(1,2,figsize=(8, 6)) | |||
dat = data[0,:,:].real | |||
dat = 10* numpy.log10((data[0,:,:] * numpy.conjugate(data[0,:,:])).real) | |||
m = numpy.nanmean(dat) | |||
o = numpy.nanstd(dat) | |||
#print(m, o, x.shape, y.shape) | |||
#c = ax[0].pcolormesh(x, y, dat.T, cmap ='YlGnBu', vmin = (m-2*o), vmax = (m+2*o)) | |||
c = ax[0].pcolormesh(x, y, dat.T, cmap ='YlGnBu', vmin = 50, vmax = 75) | |||
ax[0].vlines(outs_lines,200,600, linestyles='dashed', label = 'outs', color='w') | |||
fig.colorbar(c) | |||
ax[0].vlines(outlier_loc_index,650,750, linestyles='dashed', label = 'outs', color='r') | |||
ax[1].hist(outs_lines,bins=my_bins) | |||
plt.show() | |||
self.outliers_IDs_list = outlier_loc_index | |||
#print("outs list: ", self.outliers_IDs_list) | |||
return data | |||
def fillBuffer(self, data, datatime): | |||
if self.__profIndex == 0: | |||
self.__buffer_data = data.copy() | |||
else: | |||
self.__buffer_data = numpy.concatenate((self.__buffer_data,data), axis=1)#en perfiles | |||
self.__profIndex += 1 | |||
self.__buffer_times.append(datatime) | |||
def getData(self, data, datatime=None): | |||
if self.__profIndex == 0: | |||
self.__initime = datatime | |||
self.__dataReady = False | |||
self.fillBuffer(data, datatime) | |||
dataBlock = None | |||
if self.__profIndex == self.n: | |||
#print("apnd : ",data) | |||
dataBlock = self.filterSatsProfiles() | |||
self.__dataReady = True | |||
return dataBlock | |||
if dataBlock is None: | |||
return None, None | |||
return dataBlock | |||
def releaseBlock(self): | |||
if self.n % self.lenProfileOut != 0: | |||
raise ValueError("lenProfileOut %d must be submultiple of nProfiles %d" %(self.lenProfileOut, self.n)) | |||
return None | |||
data = self.buffer[:,self.init_prof:self.end_prof:,:] #ch, prof, alt | |||
self.init_prof = self.end_prof | |||
self.end_prof += self.lenProfileOut | |||
#print("data release shape: ",dataOut.data.shape, self.end_prof) | |||
self.n_prof_released += 1 | |||
return data | |||
def run(self, dataOut, n=None, navg=0.8, nProfilesOut=1, profile_margin=50, | |||
th_hist_outlier=15,minHei=None, maxHei=None, minRef=None, maxRef=None, thdB=10): | |||
if not self.isConfig: | |||
#print("init p idx: ", dataOut.profileIndex ) | |||
self.setup(dataOut,n=n, navg=navg,profileMargin=profile_margin,thHistOutlier=th_hist_outlier, | |||
minHei=minHei, maxHei=maxHei, minRef=minRef, maxRef=maxRef, thdB=thdB) | |||
self.isConfig = True | |||
dataBlock = None | |||
if not dataOut.buffer_empty: #hay datos acumulados | |||
if self.init_prof == 0: | |||
self.n_prof_released = 0 | |||
self.lenProfileOut = nProfilesOut | |||
dataOut.flagNoData = False | |||
#print("tp 2 ",dataOut.data.shape) | |||
self.init_prof = 0 | |||
self.end_prof = self.lenProfileOut | |||
dataOut.nProfiles = self.lenProfileOut | |||
if nProfilesOut == 1: | |||
dataOut.flagDataAsBlock = False | |||
else: | |||
dataOut.flagDataAsBlock = True | |||
#print("prof: ",self.init_prof) | |||
dataOut.flagNoData = False | |||
if numpy.isin(self.n_prof_released, self.outliers_IDs_list): | |||
#print("omitting: ", self.n_prof_released) | |||
dataOut.flagNoData = True | |||
dataOut.ippSeconds = self._ipp | |||
dataOut.utctime = self.first_utcBlock + self.init_prof*self._ipp | |||
# print("time: ", dataOut.utctime, self.first_utcBlock, self.init_prof,self._ipp,dataOut.ippSeconds) | |||
#dataOut.data = self.releaseBlock() | |||
#########################################################3 | |||
if self.n % self.lenProfileOut != 0: | |||
raise ValueError("lenProfileOut %d must be submultiple of nProfiles %d" %(self.lenProfileOut, self.n)) | |||
return None | |||
dataOut.data = None | |||
if nProfilesOut == 1: | |||
dataOut.data = self.buffer[:,self.end_prof-1,:] #ch, prof, alt | |||
else: | |||
dataOut.data = self.buffer[:,self.init_prof:self.end_prof,:] #ch, prof, alt | |||
self.init_prof = self.end_prof | |||
self.end_prof += self.lenProfileOut | |||
#print("data release shape: ",dataOut.data.shape, self.end_prof, dataOut.flagNoData) | |||
self.n_prof_released += 1 | |||
if self.end_prof >= (self.n +self.lenProfileOut): | |||
self.init_prof = 0 | |||
self.__profIndex = 0 | |||
self.buffer = None | |||
dataOut.buffer_empty = True | |||
self.outliers_IDs_list = [] | |||
self.n_prof_released = 0 | |||
dataOut.flagNoData = False #enviar ultimo aunque sea outlier :( | |||
#print("cleaning...", dataOut.buffer_empty) | |||
dataOut.profileIndex = 0 #self.lenProfileOut | |||
#################################################################### | |||
return dataOut | |||
#print("tp 223 ",dataOut.data.shape) | |||
dataOut.flagNoData = True | |||
try: | |||
#dataBlock = self.getData(dataOut.data.reshape(self.nChannels,1,self.nHeights), dataOut.utctime) | |||
dataBlock = self.getData(numpy.reshape(dataOut.data,(self.nChannels,1,self.nHeights)), dataOut.utctime) | |||
self.__count_exec +=1 | |||
except Exception as e: | |||
print("Error getting profiles data",self.__count_exec ) | |||
print(e) | |||
sys.exit() | |||
if self.__dataReady: | |||
#print("omitting: ", len(self.outliers_IDs_list)) | |||
self.__count_exec = 0 | |||
#dataOut.data = | |||
#self.buffer = numpy.flip(dataBlock, axis=1) | |||
self.buffer = dataBlock | |||
self.first_utcBlock = self.__initime | |||
dataOut.utctime = self.__initime | |||
dataOut.nProfiles = self.__profIndex | |||
#dataOut.flagNoData = False | |||
self.init_prof = 0 | |||
self.__profIndex = 0 | |||
self.__initime = None | |||
dataBlock = None | |||
self.__buffer_times = [] | |||
dataOut.error = False | |||
dataOut.useInputBuffer = True | |||
dataOut.buffer_empty = False | |||
#print("1 ch: {} prof: {} hs: {}".format(int(dataOut.nChannels),int(dataOut.nProfiles),int(dataOut.nHeights))) | |||
#print(self.__count_exec) | |||
return dataOut | |||
class RemoveProfileSats2(Operation): | |||
''' | |||
Escrito: Joab Apaza | |||
Omite los perfiles contaminados con señal de satélites, usando una altura de referencia | |||
r1559 | promedia todas las alturas para los cálculos | ||
r1590 | In: | ||
n = Cantidad de perfiles que se acumularan, usualmente 10 segundos | |||
navg = Porcentaje de perfiles que puede considerarse como satélite, máximo 90% | |||
minHei = | |||
minRef = | |||
maxRef = | |||
nBins = | |||
profile_margin = | |||
th_hist_outlier = | |||
nProfilesOut = | |||
Pensado para remover interferencias de las YAGI, se puede adaptar a otras interferencias | |||
remYagi = Activa la funcion de remoción de interferencias de la YAGI | |||
nProfYagi = Cantidad de perfiles que son afectados, acorde NTX de la YAGI | |||
offYagi = | |||
minHJULIA = Altura mÃnima donde aparece la señal referencia de JULIA (-50) | |||
maxHJULIA = Altura máxima donde aparece la señal referencia de JULIA (-15) | |||
debug = Activa los gráficos, recomendable ejecutar para ajustar los parámetros | |||
para un experimento en especÃfico. | |||
** se modifica para remover interferencias puntuales, es decir, desde otros radares. | |||
Inicialmente se ha configurado para omitir también los perfiles de la YAGI en los datos | |||
de AMISR-ISR. | |||
r1553 | Out: | ||
profile clean | |||
''' | |||
__buffer_data = [] | |||
__buffer_times = [] | |||
buffer = None | |||
outliers_IDs_list = [] | |||
__slots__ = ('n','navg','profileMargin','thHistOutlier','minHei_idx','maxHei_idx','nHeights', | |||
r1655 | 'first_utcBlock','__profIndex','init_prof','end_prof','lenProfileOut','nChannels','cohFactor', | ||
r1657 | '__count_exec','__initime','__dataReady','__ipp', 'minRef', 'maxRef', 'debug','prev_pnoise','thfactor') | ||
r1553 | def __init__(self, **kwargs): | ||
Operation.__init__(self, **kwargs) | |||
self.isConfig = False | |||
r1590 | self.currentTime = None | ||
r1553 | |||
r1655 | def setup(self,dataOut, n=None , navg=0.9, profileMargin=50,thHistOutlier=15,minHei=None, maxHei=None, nBins=10, | ||
r1590 | minRef=None, maxRef=None, debug=False, remYagi=False, nProfYagi = 0, offYagi=0, minHJULIA=None, maxHJULIA=None, | ||
r1657 | idate=None,startH=None,endH=None, thfactor=1 ): | ||
r1553 | |||
if n == None and timeInterval == None: | |||
raise ValueError("nprofiles or timeInterval should be specified ...") | |||
if n != None: | |||
self.n = n | |||
self.navg = navg | |||
self.profileMargin = profileMargin | |||
self.thHistOutlier = thHistOutlier | |||
self.__profIndex = 0 | |||
self.buffer = None | |||
self._ipp = dataOut.ippSeconds | |||
self.n_prof_released = 0 | |||
self.heightList = dataOut.heightList | |||
self.init_prof = 0 | |||
self.end_prof = 0 | |||
self.__count_exec = 0 | |||
self.__profIndex = 0 | |||
self.first_utcBlock = None | |||
self.prev_pnoise = None | |||
r1590 | self.nBins = nBins | ||
r1657 | self.thfactor = thfactor | ||
r1553 | #self.__dh = dataOut.heightList[1] - dataOut.heightList[0] | ||
minHei = minHei | |||
maxHei = maxHei | |||
if minHei==None : | |||
minHei = dataOut.heightList[0] | |||
if maxHei==None : | |||
maxHei = dataOut.heightList[-1] | |||
self.minHei_idx,self.maxHei_idx = getHei_index(minHei, maxHei, dataOut.heightList) | |||
self.min_ref, self.max_ref = getHei_index(minRef, maxRef, dataOut.heightList) | |||
self.nChannels = dataOut.nChannels | |||
self.nHeights = dataOut.nHeights | |||
self.test_counter = 0 | |||
r1590 | self.debug = debug | ||
self.remYagi = remYagi | |||
r1655 | self.cohFactor = dataOut.nCohInt | ||
r1590 | if self.remYagi : | ||
if minHJULIA==None or maxHJULIA==None: | |||
raise ValueError("Parameters minHYagi and minHYagi are necessary!") | |||
return | |||
if idate==None or startH==None or endH==None: | |||
raise ValueError("Date and hour parameters are necessary!") | |||
return | |||
self.minHJULIA_idx,self.maxHJULIA_idx = getHei_index(minHJULIA, maxHJULIA, dataOut.heightList) | |||
self.offYagi = offYagi | |||
self.nTxYagi = nProfYagi | |||
self.startTime = datetime.datetime.combine(idate,startH) | |||
self.endTime = datetime.datetime.combine(idate,endH) | |||
r1655 | log.warning("Be careful with the selection of parameters for sats removal! It is avisable to \ | ||
r1590 | activate the debug parameter in this operation for calibration", self.name) | ||
r1553 | |||
def filterSatsProfiles(self): | |||
data = self.__buffer_data.copy() | |||
#print(data.shape) | |||
nChannels, profiles, heights = data.shape | |||
indexes=numpy.zeros([], dtype=int) | |||
indexes = numpy.delete(indexes,0) | |||
r1590 | |||
indexesYagi=numpy.zeros([], dtype=int) | |||
indexesYagi = numpy.delete(indexesYagi,0) | |||
indexesYagi_up=numpy.zeros([], dtype=int) | |||
indexesYagi_up = numpy.delete(indexesYagi_up,0) | |||
indexesYagi_down=numpy.zeros([], dtype=int) | |||
indexesYagi_down = numpy.delete(indexesYagi_down,0) | |||
indexesJULIA=numpy.zeros([], dtype=int) | |||
indexesJULIA = numpy.delete(indexesJULIA,0) | |||
r1553 | outliers_IDs=[] | ||
r1590 | |||
div = profiles//self.nBins | |||
r1553 | |||
for c in range(nChannels): | |||
#print(self.min_ref,self.max_ref) | |||
import scipy.signal | |||
r1655 | b, a = scipy.signal.butter(3, 0.5) | ||
#noise_ref = (data[c,:,self.min_ref:self.max_ref] * numpy.conjugate(data[c,:,self.min_ref:self.max_ref])) | |||
noise_ref = numpy.abs(data[c,:,self.min_ref:self.max_ref]) | |||
lnoise = len(noise_ref[0,:]) | |||
#print(noise_ref.shape) | |||
r1553 | noise_ref = noise_ref.mean(axis=1) | ||
r1655 | #fnoise = noise_ref | ||
r1553 | fnoise = scipy.signal.filtfilt(b, a, noise_ref) | ||
#noise_refdB = 10* numpy.log10(noise_ref) | |||
#print("Noise ",numpy.percentile(noise_ref,95)) | |||
r1657 | p95 = numpy.percentile(fnoise,95) | ||
r1553 | mean_noise = fnoise.mean() | ||
r1655 | |||
r1553 | if self.prev_pnoise != None: | ||
r1657 | if mean_noise < (1.1 * self.prev_pnoise) and mean_noise > (0.9 * self.prev_pnoise): | ||
mean_noise = 0.9*mean_noise + 0.1*self.prev_pnoise | |||
r1553 | self.prev_pnoise = mean_noise | ||
else: | |||
mean_noise = self.prev_pnoise | |||
else: | |||
self.prev_pnoise = mean_noise | |||
std = fnoise.std()+ fnoise.mean() | |||
r1655 | #power = (data[c,:,self.minHei_idx:self.maxHei_idx] * numpy.conjugate(data[c,:,self.minHei_idx:self.maxHei_idx])) | ||
power = numpy.abs(data[c,:,self.minHei_idx:self.maxHei_idx]) | |||
npower = len(power[0,:]) | |||
#print(power.shape) | |||
power = power.mean(axis=1) | |||
r1553 | |||
fpower = scipy.signal.filtfilt(b, a, power) | |||
#print(power.shape) | |||
#powerdB = 10* numpy.log10(power) | |||
r1657 | #th = p95 * self.thfactor | ||
th = mean_noise * self.thfactor | |||
r1553 | index = numpy.where(fpower > th ) | ||
r1655 | #print("Noise ",mean_noise, p95) | ||
r1553 | #print(index) | ||
r1657 | |||
r1655 | |||
r1657 | if index[0].size <= int(self.navg*profiles): #outliers from sats | ||
r1553 | indexes = numpy.append(indexes, index[0]) | ||
r1657 | index2low = numpy.where(fpower < (th*0.5 )) #outliers from no TX | ||
if index2low[0].size <= int(self.navg*profiles): | |||
indexes = numpy.append(indexes, index2low[0]) | |||
r1590 | #print("sdas ", noise_ref.mean()) | ||
if self.remYagi : | |||
#print(self.minHJULIA_idx, self.maxHJULIA_idx) | |||
powerJULIA = (data[c,:,self.minHJULIA_idx:self.maxHJULIA_idx] * numpy.conjugate(data[c,:,self.minHJULIA_idx:self.maxHJULIA_idx])).real | |||
powerJULIA = powerJULIA.mean(axis=1) | |||
th_JULIA = powerJULIA.mean()*0.85 | |||
indexJULIA = numpy.where(powerJULIA >= th_JULIA ) | |||
indexesJULIA= numpy.append(indexesJULIA, indexJULIA[0]) | |||
# fig, ax = plt.subplots() | |||
# ax.plot(powerJULIA) | |||
# ax.axhline(th_JULIA, color='r') | |||
# plt.grid() | |||
# plt.show() | |||
r1554 | |||
r1655 | if self.debug: | ||
fig, ax = plt.subplots() | |||
ax.plot(fpower, label="power") | |||
#ax.plot(fnoise, label="noise ref") | |||
ax.axhline(th, color='g', label="th") | |||
#ax.axhline(std, color='b', label="mean") | |||
ax.legend() | |||
plt.grid() | |||
plt.show() | |||
r1554 | |||
#print(indexes) | |||
r1553 | |||
#outliers_IDs = outliers_IDs.astype(numpy.dtype('int64')) | |||
#outliers_IDs = numpy.unique(outliers_IDs) | |||
r1590 | # print(indexesJULIA) | ||
if len(indexesJULIA > 1): | |||
iJ = indexesJULIA | |||
locs = [ (iJ[n]-iJ[n-1]) > 5 for n in range(len(iJ))] | |||
locs_2 = numpy.where(locs)[0] | |||
#print(locs_2, indexesJULIA[locs_2-1]) | |||
indexesYagi_up = numpy.append(indexesYagi_up, indexesJULIA[locs_2-1]) | |||
indexesYagi_down = numpy.append(indexesYagi_down, indexesJULIA[locs_2]) | |||
indexesYagi_up = numpy.append(indexesYagi_up,indexesJULIA[-1]) | |||
indexesYagi_down = numpy.append(indexesYagi_down,indexesJULIA[0]) | |||
indexesYagi_up = numpy.unique(indexesYagi_up) | |||
indexesYagi_down = numpy.unique(indexesYagi_down) | |||
aux_ind = [ numpy.arange( (self.offYagi + k)+1, (self.offYagi + k + self.nTxYagi)+1, 1, dtype=int) for k in indexesYagi_up] | |||
indexesYagi_up = (numpy.asarray(aux_ind)).flatten() | |||
aux_ind2 = [ numpy.arange( (k - self.nTxYagi)+1, k+1 , 1, dtype=int) for k in indexesYagi_down] | |||
indexesYagi_down = (numpy.asarray(aux_ind2)).flatten() | |||
indexesYagi = numpy.append(indexesYagi,indexesYagi_up) | |||
indexesYagi = numpy.append(indexesYagi,indexesYagi_down) | |||
indexesYagi = indexesYagi[ (indexesYagi >= 0) & (indexesYagi<profiles)] | |||
indexesYagi = numpy.unique(indexesYagi) | |||
r1553 | |||
r1655 | #print("indexes: " ,indexes) | ||
r1553 | outs_lines = numpy.unique(indexes) | ||
r1655 | #print(outs_lines) | ||
r1553 | |||
#Agrupando el histograma de outliers, | |||
my_bins = numpy.linspace(0,int(profiles), div, endpoint=True) | |||
hist, bins = numpy.histogram(outs_lines,bins=my_bins) | |||
r1655 | #print("hist: ",hist) | ||
hist_outliers_indexes = numpy.where(hist >= self.thHistOutlier)[0] #es outlier | |||
r1657 | # print(hist_outliers_indexes) | ||
r1553 | if len(hist_outliers_indexes>0): | ||
hist_outliers_indexes = numpy.append(hist_outliers_indexes,hist_outliers_indexes[-1]+1) | |||
r1657 | bins_outliers_indexes = [int(i)+1 for i in (bins[hist_outliers_indexes])] # | ||
r1553 | outlier_loc_index = [] | ||
r1554 | #print("out indexes ", bins_outliers_indexes) | ||
r1657 | |||
# if len(bins_outliers_indexes) <= 2: | |||
# extprof = 0 | |||
# else: | |||
# extprof = self.profileMargin | |||
extprof = self.profileMargin | |||
r1554 | outlier_loc_index = [e for n in range(len(bins_outliers_indexes)) for e in range(bins_outliers_indexes[n]-extprof,bins_outliers_indexes[n] + extprof) ] | ||
r1553 | outlier_loc_index = numpy.asarray(outlier_loc_index) | ||
# if len(outlier_loc_index)>1: | |||
# ipmax = numpy.where(fpower==fpower.max())[0] | |||
# print("pmax: ",ipmax) | |||
r1590 | |||
r1553 | |||
r1548 | #print("outliers Ids: ", outlier_loc_index, outlier_loc_index.shape) | ||
outlier_loc_index = outlier_loc_index[ (outlier_loc_index >= 0) & (outlier_loc_index<profiles)] | |||
#print("outliers final: ", outlier_loc_index) | |||
r1590 | |||
if self.debug: | |||
x, y = numpy.meshgrid(numpy.arange(profiles), self.heightList) | |||
fig, ax = plt.subplots(nChannels,2,figsize=(8, 6)) | |||
for i in range(nChannels): | |||
dat = data[i,:,:].real | |||
dat = 10* numpy.log10((data[i,:,:] * numpy.conjugate(data[i,:,:])).real) | |||
m = numpy.nanmean(dat) | |||
o = numpy.nanstd(dat) | |||
if nChannels>1: | |||
c = ax[i][0].pcolormesh(x, y, dat.T, cmap ='jet', vmin = 60, vmax = 70) | |||
ax[i][0].vlines(outs_lines,650,700, linestyles='dashed', label = 'outs', color='w') | |||
#fig.colorbar(c) | |||
ax[i][0].vlines(outlier_loc_index,700,750, linestyles='dashed', label = 'outs', color='r') | |||
ax[i][1].hist(outs_lines,bins=my_bins) | |||
if self.remYagi : | |||
ax[0].vlines(indexesYagi,750,850, linestyles='dashed', label = 'yagi', color='m') | |||
else: | |||
r1655 | c = ax[0].pcolormesh(x, y, dat.T, cmap ='jet', vmin = 60, vmax = (70+2*self.cohFactor)) | ||
r1590 | ax[0].vlines(outs_lines,650,700, linestyles='dashed', label = 'outs', color='w') | ||
#fig.colorbar(c) | |||
ax[0].vlines(outlier_loc_index,700,750, linestyles='dashed', label = 'outs', color='r') | |||
ax[1].hist(outs_lines,bins=my_bins) | |||
if self.remYagi : | |||
ax[0].vlines(indexesYagi,750,850, linestyles='dashed', label = 'yagi', color='m') | |||
plt.show() | |||
r1506 | |||
r1590 | if self.remYagi and (self.currentTime < self.startTime and self.currentTime < self.endTime): | ||
outlier_loc_index = numpy.append(outlier_loc_index,indexesYagi) | |||
r1506 | |||
r1590 | self.outliers_IDs_list = numpy.unique(outlier_loc_index) | ||
r1548 | #print("outs list: ", self.outliers_IDs_list) | ||
r1553 | return self.__buffer_data | ||
r1548 | |||
def fillBuffer(self, data, datatime): | |||
if self.__profIndex == 0: | |||
self.__buffer_data = data.copy() | |||
r1506 | else: | ||
r1548 | self.__buffer_data = numpy.concatenate((self.__buffer_data,data), axis=1)#en perfiles | ||
self.__profIndex += 1 | |||
self.__buffer_times.append(datatime) | |||
r1506 | |||
r1548 | def getData(self, data, datatime=None): | ||
r1506 | |||
r1548 | if self.__profIndex == 0: | ||
self.__initime = datatime | |||
r1506 | |||
r1548 | self.__dataReady = False | ||
self.fillBuffer(data, datatime) | |||
dataBlock = None | |||
if self.__profIndex == self.n: | |||
#print("apnd : ",data) | |||
dataBlock = self.filterSatsProfiles() | |||
self.__dataReady = True | |||
return dataBlock | |||
if dataBlock is None: | |||
return None, None | |||
r1506 | |||
r1548 | |||
return dataBlock | |||
def releaseBlock(self): | |||
if self.n % self.lenProfileOut != 0: | |||
raise ValueError("lenProfileOut %d must be submultiple of nProfiles %d" %(self.lenProfileOut, self.n)) | |||
return None | |||
data = self.buffer[:,self.init_prof:self.end_prof:,:] #ch, prof, alt | |||
self.init_prof = self.end_prof | |||
self.end_prof += self.lenProfileOut | |||
#print("data release shape: ",dataOut.data.shape, self.end_prof) | |||
self.n_prof_released += 1 | |||
return data | |||
r1590 | def run(self, dataOut, n=None, navg=0.9, nProfilesOut=1, profile_margin=50, th_hist_outlier=15,minHei=None,nBins=10, | ||
maxHei=None, minRef=None, maxRef=None, debug=False, remYagi=False, nProfYagi = 0, offYagi=0, minHJULIA=None, maxHJULIA=None, | |||
r1657 | idate=None,startH=None,endH=None, thfactor=1): | ||
r1506 | |||
if not self.isConfig: | |||
r1548 | #print("init p idx: ", dataOut.profileIndex ) | ||
r1590 | self.setup(dataOut,n=n, navg=navg,profileMargin=profile_margin,thHistOutlier=th_hist_outlier,minHei=minHei, | ||
nBins=10, maxHei=maxHei, minRef=minRef, maxRef=maxRef, debug=debug, remYagi=remYagi, nProfYagi = nProfYagi, | |||
r1657 | offYagi=offYagi, minHJULIA=minHJULIA,maxHJULIA=maxHJULIA,idate=idate,startH=startH,endH=endH, thfactor=thfactor) | ||
r1590 | |||
r1506 | self.isConfig = True | ||
r1548 | dataBlock = None | ||
r1590 | self.currentTime = datetime.datetime.fromtimestamp(dataOut.utctime) | ||
r1548 | |||
if not dataOut.buffer_empty: #hay datos acumulados | |||
if self.init_prof == 0: | |||
self.n_prof_released = 0 | |||
self.lenProfileOut = nProfilesOut | |||
dataOut.flagNoData = False | |||
#print("tp 2 ",dataOut.data.shape) | |||
self.init_prof = 0 | |||
self.end_prof = self.lenProfileOut | |||
dataOut.nProfiles = self.lenProfileOut | |||
if nProfilesOut == 1: | |||
dataOut.flagDataAsBlock = False | |||
else: | |||
dataOut.flagDataAsBlock = True | |||
#print("prof: ",self.init_prof) | |||
dataOut.flagNoData = False | |||
if numpy.isin(self.n_prof_released, self.outliers_IDs_list): | |||
#print("omitting: ", self.n_prof_released) | |||
dataOut.flagNoData = True | |||
dataOut.ippSeconds = self._ipp | |||
dataOut.utctime = self.first_utcBlock + self.init_prof*self._ipp | |||
# print("time: ", dataOut.utctime, self.first_utcBlock, self.init_prof,self._ipp,dataOut.ippSeconds) | |||
#dataOut.data = self.releaseBlock() | |||
#########################################################3 | |||
if self.n % self.lenProfileOut != 0: | |||
raise ValueError("lenProfileOut %d must be submultiple of nProfiles %d" %(self.lenProfileOut, self.n)) | |||
return None | |||
r1553 | dataOut.data = None | ||
if nProfilesOut == 1: | |||
dataOut.data = self.buffer[:,self.end_prof-1,:] #ch, prof, alt | |||
else: | |||
dataOut.data = self.buffer[:,self.init_prof:self.end_prof,:] #ch, prof, alt | |||
r1548 | |||
self.init_prof = self.end_prof | |||
self.end_prof += self.lenProfileOut | |||
#print("data release shape: ",dataOut.data.shape, self.end_prof, dataOut.flagNoData) | |||
self.n_prof_released += 1 | |||
if self.end_prof >= (self.n +self.lenProfileOut): | |||
self.init_prof = 0 | |||
self.__profIndex = 0 | |||
self.buffer = None | |||
dataOut.buffer_empty = True | |||
self.outliers_IDs_list = [] | |||
self.n_prof_released = 0 | |||
dataOut.flagNoData = False #enviar ultimo aunque sea outlier :( | |||
#print("cleaning...", dataOut.buffer_empty) | |||
r1566 | dataOut.profileIndex = self.__profIndex | ||
r1548 | #################################################################### | ||
return dataOut | |||
r1506 | |||
r1548 | #print("tp 223 ",dataOut.data.shape) | ||
dataOut.flagNoData = True | |||
try: | |||
#dataBlock = self.getData(dataOut.data.reshape(self.nChannels,1,self.nHeights), dataOut.utctime) | |||
dataBlock = self.getData(numpy.reshape(dataOut.data,(self.nChannels,1,self.nHeights)), dataOut.utctime) | |||
self.__count_exec +=1 | |||
except Exception as e: | |||
print("Error getting profiles data",self.__count_exec ) | |||
print(e) | |||
sys.exit() | |||
if self.__dataReady: | |||
#print("omitting: ", len(self.outliers_IDs_list)) | |||
self.__count_exec = 0 | |||
#dataOut.data = | |||
#self.buffer = numpy.flip(dataBlock, axis=1) | |||
self.buffer = dataBlock | |||
self.first_utcBlock = self.__initime | |||
dataOut.utctime = self.__initime | |||
dataOut.nProfiles = self.__profIndex | |||
#dataOut.flagNoData = False | |||
self.init_prof = 0 | |||
self.__profIndex = 0 | |||
self.__initime = None | |||
dataBlock = None | |||
self.__buffer_times = [] | |||
dataOut.error = False | |||
dataOut.useInputBuffer = True | |||
dataOut.buffer_empty = False | |||
#print("1 ch: {} prof: {} hs: {}".format(int(dataOut.nChannels),int(dataOut.nProfiles),int(dataOut.nHeights))) | |||
#print(self.__count_exec) | |||
r1506 | |||
r1465 | return dataOut | ||
r1579 | |||
class remHeightsIppInterf(Operation): | |||
r1583 | def __init__(self, **kwargs): | ||
r1579 | |||
r1583 | Operation.__init__(self, **kwargs) | ||
self.isConfig = False | |||
r1579 | |||
self.heights_indx = None | |||
self.heightsList = [] | |||
self.ipp1 = None | |||
self.ipp2 = None | |||
self.tx1 = None | |||
self.tx2 = None | |||
self.dh1 = None | |||
def setup(self, dataOut, ipp1=None, ipp2=None, tx1=None, tx2=None, dh1=None, | |||
idate=None, startH=None, endH=None): | |||
self.ipp1 = ipp1 | |||
self.ipp2 = ipp2 | |||
self.tx1 = tx1 | |||
self.tx2 = tx2 | |||
self.dh1 = dh1 | |||
_maxIpp1R = dataOut.heightList.max() | |||
_n_repeats = int(_maxIpp1R / ipp2) | |||
_init_hIntf = (tx1 + ipp2/2)+ dh1 | |||
_n_hIntf = int(tx2 / dh1) | |||
self.heightsList = [_init_hIntf+n*ipp2 for n in range(_n_repeats) ] | |||
heiList = dataOut.heightList | |||
self.heights_indx = [getHei_index(h,h,heiList)[0] for h in self.heightsList] | |||
self.heights_indx = [ numpy.asarray([k for k in range(_n_hIntf+2)])+(getHei_index(h,h,heiList)[0] -1) for h in self.heightsList] | |||
self.heights_indx = numpy.asarray(self.heights_indx ) | |||
r1583 | self.isConfig = True | ||
r1579 | self.startTime = datetime.datetime.combine(idate,startH) | ||
self.endTime = datetime.datetime.combine(idate,endH) | |||
#print(self.startTime, self.endTime) | |||
#print("nrepeats: ", _n_repeats, " _nH: ",_n_hIntf ) | |||
r1583 | |||
log.warning("Heights set to zero (km): ", self.name) | |||
log.warning(str((dataOut.heightList[self.heights_indx].flatten())), self.name) | |||
log.warning("Be careful with the selection of heights for noise calculation!") | |||
r1579 | |||
def run(self, dataOut, ipp1=None, ipp2=None, tx1=None, tx2=None, dh1=None, idate=None, | |||
startH=None, endH=None): | |||
#print(locals().values()) | |||
if None in locals().values(): | |||
log.warning('Missing kwargs, invalid values """None""" ', self.name) | |||
return dataOut | |||
r1583 | if not self.isConfig: | ||
r1579 | self.setup(dataOut, ipp1=ipp1, ipp2=ipp2, tx1=tx1, tx2=tx2, dh1=dh1, | ||
idate=idate, startH=startH, endH=endH) | |||
r1583 | dataOut.flagProfilesByRange = False | ||
r1579 | currentTime = datetime.datetime.fromtimestamp(dataOut.utctime) | ||
if currentTime < self.startTime or currentTime > self.endTime: | |||
return dataOut | |||
for ch in range(dataOut.data.shape[0]): | |||
for hk in self.heights_indx.flatten(): | |||
if dataOut.data.ndim < 3: | |||
r1641 | dataOut.data[ch,hk] = 0.0 + 0.0j | ||
r1579 | else: | ||
r1641 | dataOut.data[ch,:,hk] = 0.0 + 0.0j | ||
r1579 | |||
dataOut.flagProfilesByRange = True | |||
r1583 | |||
r1657 | return dataOut | ||
r1660 | |||
class profiles2Block(Operation): | |||
''' | |||
Escrito: Joab Apaza | |||
genera un bloque de perfiles | |||
Out: | |||
block | |||
''' | |||
isConfig = False | |||
__buffer_data = [] | |||
__buffer_times = [] | |||
__profIndex = 0 | |||
__byTime = False | |||
__initime = None | |||
__lastdatatime = None | |||
buffer = None | |||
n = None | |||
__dataReady = False | |||
__nChannels = None | |||
__nHeis = None | |||
def __init__(self, **kwargs): | |||
Operation.__init__(self, **kwargs) | |||
self.isConfig = False | |||
def setup(self,n=None, timeInterval=None): | |||
if n == None and timeInterval == None: | |||
raise ValueError("n or timeInterval should be specified ...") | |||
if n != None: | |||
self.n = n | |||
self.__byTime = False | |||
else: | |||
self.__integrationtime = timeInterval #* 60. #if (type(timeInterval)!=integer) -> change this line | |||
self.n = 9999 | |||
self.__byTime = True | |||
self.__profIndex = 0 | |||
def fillBuffer(self, data, datatime): | |||
if self.__profIndex == 0: | |||
self.__buffer_data = data.copy() | |||
else: | |||
self.__buffer_data = numpy.concatenate((self.__buffer_data,data), axis=1)#en perfiles | |||
self.__profIndex += 1 | |||
self.__buffer_times.append(datatime) | |||
def getData(self, data, datatime=None): | |||
if self.__initime == None: | |||
self.__initime = datatime | |||
if data.ndim < 3: | |||
data = data.reshape(self.__nChannels,1,self.__nHeis ) | |||
if self.__byTime: | |||
dataBlock = self.byTime(data, datatime) | |||
else: | |||
dataBlock = self.byProfiles(data, datatime) | |||
self.__lastdatatime = datatime | |||
if dataBlock is None: | |||
return None, None | |||
return dataBlock, self.__buffer_times | |||
def byProfiles(self, data, datatime): | |||
self.__dataReady = False | |||
dataBlock = None | |||
# n = None | |||
# print data | |||
# raise | |||
self.fillBuffer(data, datatime) | |||
if self.__profIndex == self.n: | |||
dataBlock = self.__buffer_data | |||
self.__dataReady = True | |||
return dataBlock | |||
def byTime(self, data, datatime): | |||
self.__dataReady = False | |||
dataBlock = None | |||
n = None | |||
self.fillBuffer(data, datatime) | |||
if (datatime - self.__initime) >= self.__integrationtime: | |||
dataBlock = self.__buffer_data | |||
self.n = self.__profIndex | |||
self.__dataReady = True | |||
return dataBlock | |||
def run(self, dataOut, n=None, timeInterval=None, **kwargs): | |||
if not self.isConfig: | |||
self.setup(n=n, timeInterval=timeInterval, **kwargs) | |||
self.__nChannels = dataOut.nChannels | |||
self.__nHeis = len(dataOut.heightList) | |||
self.isConfig = True | |||
if dataOut.flagDataAsBlock: | |||
""" | |||
Si la data es leida por bloques, dimension = [nChannels, nProfiles, nHeis] | |||
""" | |||
raise ValueError("The data is already a block") | |||
return | |||
else: | |||
dataBlock, timeBlock = self.getData(dataOut.data, dataOut.utctime) | |||
# print(dataOut.data.shape) | |||
# dataOut.timeInterval *= n | |||
dataOut.flagNoData = True | |||
if self.__dataReady: | |||
dataOut.data = dataBlock | |||
dataOut.flagDataAsBlock = True | |||
dataOut.utctime = timeBlock[-1] | |||
dataOut.nProfiles = self.__profIndex | |||
# print avgdata, avgdatatime | |||
# raise | |||
dataOut.flagNoData = False | |||
self.__profIndex = 0 | |||
self.__initime = None | |||
#update Processing Header: | |||
# print(dataOut.data.shape) | |||
return dataOut |