diff --git a/schainpy/controller.py b/schainpy/controller.py index 8514d8e..ede0310 100644 --- a/schainpy/controller.py +++ b/schainpy/controller.py @@ -57,7 +57,7 @@ def MPProject(project, n=cpu_count()): nFiles = len(files) if nFiles == 0: continue - skip = int(math.ceil(nFiles / n)) + skip = int(math.ceil(nFiles / n)) while nFiles > cursor * skip: rconf.update(startDate=dt_str, endDate=dt_str, cursor=cursor, skip=skip) @@ -81,11 +81,11 @@ def MPProject(project, n=cpu_count()): time.sleep(3) def wait(context): - + time.sleep(1) c = zmq.Context() receiver = c.socket(zmq.SUB) - receiver.connect('ipc:///tmp/schain_{}_pub'.format(self.id)) + receiver.connect('ipc:///tmp/schain_{}_pub'.format(self.id)) receiver.setsockopt(zmq.SUBSCRIBE, self.id.encode()) msg = receiver.recv_multipart()[1] context.terminate() @@ -262,7 +262,7 @@ class ParameterConf(): parmElement.set('name', self.name) parmElement.set('value', self.value) parmElement.set('format', self.format) - + def readXml(self, parmElement): self.id = parmElement.get('id') @@ -417,7 +417,7 @@ class OperationConf(): self.name = opElement.get('name') self.type = opElement.get('type') self.priority = opElement.get('priority') - self.project_id = str(project_id) + self.project_id = str(project_id) # Compatible with old signal chain version # Use of 'run' method instead 'init' @@ -476,7 +476,7 @@ class ProcUnitConf(): self.id = None self.datatype = None self.name = None - self.inputId = None + self.inputId = None self.opConfObjList = [] self.procUnitObj = None self.opObjDict = {} @@ -497,7 +497,7 @@ class ProcUnitConf(): return self.id - def updateId(self, new_id): + def updateId(self, new_id): ''' new_id = int(parentId) * 10 + (int(self.id) % 10) new_inputId = int(parentId) * 10 + (int(self.inputId) % 10) @@ -556,7 +556,7 @@ class ProcUnitConf(): id sera el topico a publicar inputId sera el topico a subscribirse ''' - + # Compatible with old signal chain version if datatype == None and name == None: raise ValueError('datatype or name should be defined') @@ -581,7 +581,7 @@ class ProcUnitConf(): self.lock = lock self.opConfObjList = [] - self.addOperation(name='run', optype='self') + self.addOperation(name='run', optype='self') def removeOperations(self): @@ -679,28 +679,32 @@ class ProcUnitConf(): ''' className = eval(self.name) + #print(self.name) kwargs = self.getKwargs() + #print(kwargs) + #print("mark_a") procUnitObj = className(self.id, self.inputId, self.project_id, self.err_queue, self.lock, 'ProcUnit', **kwargs) + #print("mark_b") log.success('creating process...', self.name) for opConfObj in self.opConfObjList: - + if opConfObj.type == 'self' and opConfObj.name == 'run': continue elif opConfObj.type == 'self': opObj = getattr(procUnitObj, opConfObj.name) else: opObj = opConfObj.createObject() - + log.success('adding operation: {}, type:{}'.format( opConfObj.name, opConfObj.type), self.name) - + procUnitObj.addOperation(opConfObj, opObj) - + procUnitObj.start() self.procUnitObj = procUnitObj - + def close(self): for opConfObj in self.opConfObjList: @@ -732,8 +736,8 @@ class ReadUnitConf(ProcUnitConf): def getElementName(self): - return self.ELEMENTNAME - + return self.ELEMENTNAME + def setup(self, project_id, id, name, datatype, err_queue, path='', startDate='', endDate='', startTime='', endTime='', server=None, **kwargs): @@ -745,7 +749,7 @@ class ReadUnitConf(ProcUnitConf): kwargs deben ser trasmitidos en la instanciacion ''' - + # Compatible with old signal chain version if datatype == None and name == None: raise ValueError('datatype or name should be defined') @@ -768,12 +772,13 @@ class ReadUnitConf(ProcUnitConf): self.datatype = datatype if path != '': self.path = os.path.abspath(path) + print (self.path) self.startDate = startDate self.endDate = endDate self.startTime = startTime self.endTime = endTime self.server = server - self.err_queue = err_queue + self.err_queue = err_queue self.addRunOperation(**kwargs) def update(self, **kwargs): @@ -804,7 +809,7 @@ class ReadUnitConf(ProcUnitConf): def addRunOperation(self, **kwargs): - opObj = self.addOperation(name='run', optype='self') + opObj = self.addOperation(name='run', optype='self') if self.server is None: opObj.addParameter( @@ -942,7 +947,7 @@ class Project(Process): print('*' * 19) print(' ') self.id = str(id) - self.description = description + self.description = description self.email = email self.alarm = alarm if name: @@ -977,7 +982,7 @@ class Project(Process): readUnitConfObj = ReadUnitConf() readUnitConfObj.setup(self.id, idReadUnit, name, datatype, self.err_queue, **kwargs) self.procUnitConfObjDict[readUnitConfObj.getId()] = readUnitConfObj - + return readUnitConfObj def addProcUnit(self, inputId='0', datatype=None, name=None): @@ -994,7 +999,7 @@ class Project(Process): idProcUnit = self.__getNewId() procUnitConfObj = ProcUnitConf() - input_proc = self.procUnitConfObjDict[inputId] + input_proc = self.procUnitConfObjDict[inputId] procUnitConfObj.setup(self.id, idProcUnit, name, datatype, inputId, self.err_queue, input_proc.lock) self.procUnitConfObjDict[procUnitConfObj.getId()] = procUnitConfObj @@ -1152,14 +1157,14 @@ class Project(Process): t = Thread(target=self.__monitor, args=(self.err_queue, self.ctx)) t.start() - + def __monitor(self, queue, ctx): import socket - + procs = 0 err_msg = '' - + while True: msg = queue.get() if '#_start_#' in msg: @@ -1168,11 +1173,11 @@ class Project(Process): procs -=1 else: err_msg = msg - - if procs == 0 or 'Traceback' in err_msg: + + if procs == 0 or 'Traceback' in err_msg: break time.sleep(0.1) - + if '|' in err_msg: name, err = err_msg.split('|') if 'SchainWarning' in err: @@ -1181,9 +1186,9 @@ class Project(Process): log.error(err.split('SchainError:')[-1].split('\n')[0].strip(), name) else: log.error(err, name) - else: + else: name, err = self.name, err_msg - + time.sleep(2) for conf in self.procUnitConfObjDict.values(): @@ -1191,7 +1196,7 @@ class Project(Process): if confop.type == 'external': confop.opObj.terminate() conf.procUnitObj.terminate() - + ctx.term() message = ''.join(err) @@ -1217,7 +1222,7 @@ class Project(Process): subtitle += '[End time = %s]\n' % readUnitConfObj.endTime a = Alarm( - modes=self.alarm, + modes=self.alarm, email=self.email, message=message, subject=subject, @@ -1266,7 +1271,7 @@ class Project(Process): if not os.path.exists('/tmp/schain'): os.mkdir('/tmp/schain') - + self.ctx = zmq.Context() xpub = self.ctx.socket(zmq.XPUB) xpub.bind('ipc:///tmp/schain/{}_pub'.format(self.id)) @@ -1282,9 +1287,9 @@ class Project(Process): def run(self): log.success('Starting {}: {}'.format(self.name, self.id), tag='') - self.start_time = time.time() - self.createObjects() - self.setProxy() + self.start_time = time.time() + self.createObjects() + self.setProxy() log.success('{} Done (Time: {}s)'.format( self.name, time.time()-self.start_time), '') diff --git a/schainpy/model/data/jrodata.py b/schainpy/model/data/jrodata.py index dbaea8f..2ee7936 100644 --- a/schainpy/model/data/jrodata.py +++ b/schainpy/model/data/jrodata.py @@ -114,7 +114,7 @@ class GenericData(object): flagNoData = True def copy(self, inputObj=None): - + if inputObj == None: return copy.deepcopy(self) @@ -548,7 +548,7 @@ class Spectra(JROData): deltav = self.getVmax() / (self.nFFTPoints * self.ippFactor) velrange = deltav * (numpy.arange(self.nFFTPoints + extrapoints) - self.nFFTPoints / 2.) - + if self.nmodes: return velrange/self.nmodes else: @@ -1104,7 +1104,7 @@ class PlotterData(object): MAXNUMY = 100 def __init__(self, code, throttle_value, exp_code, buffering=True, snr=False): - + self.key = code self.throttle = throttle_value self.exp_code = exp_code @@ -1139,7 +1139,7 @@ class PlotterData(object): return len(self.__times) def __getitem__(self, key): - + if key not in self.data: raise KeyError(log.error('Missing key: {}'.format(key))) if 'spc' in key or not self.buffering: @@ -1172,7 +1172,7 @@ class PlotterData(object): elif 'spc_moments' == plot: plot = 'moments' self.data[plot] = {} - + if 'spc' in self.data or 'rti' in self.data or 'cspc' in self.data or 'moments' in self.data: self.data['noise'] = {} self.data['rti'] = {} @@ -1180,7 +1180,7 @@ class PlotterData(object): self.plottypes.append('noise') if 'rti' not in self.plottypes: self.plottypes.append('rti') - + def shape(self, key): ''' Get the shape of the one-element data for the given key @@ -1196,17 +1196,17 @@ class PlotterData(object): ''' Update data object with new dataOut ''' - + if tm in self.__times: return self.profileIndex = dataOut.profileIndex self.tm = tm self.type = dataOut.type self.parameters = getattr(dataOut, 'parameters', []) - + if hasattr(dataOut, 'meta'): self.meta.update(dataOut.meta) - + self.pairs = dataOut.pairsList self.interval = dataOut.getTimeInterval() self.localtime = dataOut.useLocalTime @@ -1217,7 +1217,7 @@ class PlotterData(object): self.__heights.append(dataOut.heightList) self.__all_heights.update(dataOut.heightList) self.__times.append(tm) - + for plot in self.plottypes: if plot in ('spc', 'spc_moments'): z = dataOut.data_spc/dataOut.normFactor @@ -1250,8 +1250,8 @@ class PlotterData(object): if plot == 'scope': buffer = dataOut.data self.flagDataAsBlock = dataOut.flagDataAsBlock - self.nProfiles = dataOut.nProfiles - + self.nProfiles = dataOut.nProfiles + if plot == 'spc': self.data['spc'] = buffer elif plot == 'cspc': @@ -1326,7 +1326,7 @@ class PlotterData(object): else: meta['xrange'] = [] - meta.update(self.meta) + meta.update(self.meta) ret['metadata'] = meta return json.dumps(ret) diff --git a/schainpy/model/data/jroheaderIO.py b/schainpy/model/data/jroheaderIO.py index 4d1eeca..8971c52 100644 --- a/schainpy/model/data/jroheaderIO.py +++ b/schainpy/model/data/jroheaderIO.py @@ -218,7 +218,7 @@ class SystemHeader(Header): structure = SYSTEM_STRUCTURE def __init__(self, nSamples=0, nProfiles=0, nChannels=0, adcResolution=14, pciDioBusWidth=0): - + self.size = 24 self.nSamples = nSamples self.nProfiles = nProfiles @@ -903,4 +903,4 @@ def get_procflag_dtype(index): def get_dtype_width(index): - return DTYPE_WIDTH[index] \ No newline at end of file + return DTYPE_WIDTH[index] diff --git a/schainpy/model/graphics/jroplot_base.py b/schainpy/model/graphics/jroplot_base.py index d6c2607..2bbf382 100644 --- a/schainpy/model/graphics/jroplot_base.py +++ b/schainpy/model/graphics/jroplot_base.py @@ -228,7 +228,7 @@ class Plot(Operation): self.__throttle_plot = apply_throttle(self.throttle) self.data = PlotterData( self.CODE, self.throttle, self.exp_code, self.buffering, snr=self.showSNR) - + if self.plot_server: if not self.plot_server.startswith('tcp://'): self.plot_server = 'tcp://{}'.format(self.plot_server) @@ -246,7 +246,7 @@ class Plot(Operation): self.setup() - self.time_label = 'LT' if self.localtime else 'UTC' + self.time_label = 'LT' if self.localtime else 'UTC' if self.width is None: self.width = 8 @@ -305,7 +305,7 @@ class Plot(Operation): cmap = plt.get_cmap(self.colormap) cmap.set_bad(self.bgcolor, 1.) self.cmaps.append(cmap) - + for fig in self.figures: fig.canvas.mpl_connect('key_press_event', self.OnKeyPress) fig.canvas.mpl_connect('scroll_event', self.OnBtnScroll) @@ -474,11 +474,11 @@ class Plot(Operation): xmax += time.timezone else: xmax = self.xmax - + ymin = self.ymin if self.ymin else numpy.nanmin(self.y) ymax = self.ymax if self.ymax else numpy.nanmax(self.y) #Y = numpy.array([1, 2, 5, 10, 20, 50, 100, 200, 500, 1000, 2000, 5000, 10000, 20000, 50000]) - + #i = 1 if numpy.where( # abs(ymax-ymin) <= Y)[0][0] < 0 else numpy.where(abs(ymax-ymin) <= Y)[0][0] #ystep = Y[i] / 10. @@ -492,14 +492,14 @@ class Plot(Operation): ystep = ystep/5 ystep = ystep/(10**digD) - else: + else: ystep = ((ymax + (10**(dig)))//10**(dig))*(10**(dig)) ystep = ystep/5 - + if self.xaxis is not 'time': - + dig = int(numpy.log10(xmax)) - + if dig <= 0: digD = len(str(xmax)) - 2 xdec = xmax*(10**digD) @@ -508,11 +508,11 @@ class Plot(Operation): xstep = ((xdec + (10**(dig)))//10**(dig))*(10**(dig)) xstep = xstep*0.5 xstep = xstep/(10**digD) - - else: + + else: xstep = ((xmax + (10**(dig)))//10**(dig))*(10**(dig)) xstep = xstep/5 - + for n, ax in enumerate(self.axes): if ax.firsttime: ax.set_facecolor(self.bgcolor) @@ -610,7 +610,7 @@ class Plot(Operation): if self.save: self.save_figure(n) - + if self.plot_server: self.send_to_server() # t = Thread(target=self.send_to_server) @@ -643,11 +643,10 @@ class Plot(Operation): '{}{}_{}.png'.format( self.CODE, label, - self.getDateTime(self.data.max_time).strftime( - '%Y%m%d_%H%M%S' - ), + self.getDateTime(self.data.max_time).strftime('%Y%m%d_%H%M%S'), ) ) + log.log('Saving figure: {}'.format(figname), self.name) if not os.path.isdir(os.path.dirname(figname)): os.makedirs(os.path.dirname(figname)) @@ -718,7 +717,7 @@ class Plot(Operation): self.ncols: number of cols self.nplots: number of plots (channels or pairs) self.ylabel: label for Y axes - self.titles: list of axes title + self.titles: list of axes title ''' raise NotImplementedError @@ -728,18 +727,18 @@ class Plot(Operation): Must be defined in the child class ''' raise NotImplementedError - + def run(self, dataOut, **kwargs): ''' Main plotting routine ''' - + if self.isConfig is False: self.__setup(**kwargs) if dataOut.type == 'Parameters': t = dataOut.utctimeInit else: - t = dataOut.utctime + t = dataOut.utctime if dataOut.useLocalTime: self.getDateTime = datetime.datetime.fromtimestamp @@ -749,15 +748,15 @@ class Plot(Operation): self.getDateTime = datetime.datetime.utcfromtimestamp if self.localtime: t -= time.timezone - + if 'buffer' in self.plot_type: if self.xmin is None: self.tmin = t else: self.tmin = ( self.getDateTime(t).replace( - hour=self.xmin, - minute=0, + hour=self.xmin, + minute=0, second=0) - self.getDateTime(0)).total_seconds() self.data.setup() @@ -779,7 +778,7 @@ class Plot(Operation): if dataOut.useLocalTime and not self.localtime: tm += time.timezone - if self.xaxis is 'time' and self.data and (tm - self.tmin) >= self.xrange*60*60: + if self.xaxis is 'time' and self.data and (tm - self.tmin) >= self.xrange*60*60: self.save_counter = self.save_period self.__plot() self.xmin += self.xrange @@ -807,4 +806,3 @@ class Plot(Operation): self.__plot() if self.data and self.pause: figpause(10) - diff --git a/schainpy/model/io/jroIO_kamisr.py b/schainpy/model/io/jroIO_kamisr.py index da7adb1..a9aa8fb 100644 --- a/schainpy/model/io/jroIO_kamisr.py +++ b/schainpy/model/io/jroIO_kamisr.py @@ -21,9 +21,10 @@ except: from schainpy.model.data.jroheaderIO import RadarControllerHeader, SystemHeader from schainpy.model.data.jrodata import Voltage -from schainpy.model.proc.jroproc_base import ProcessingUnit, Operation +from schainpy.model.proc.jroproc_base import ProcessingUnit, Operation, MPDecorator from numpy import imag +@MPDecorator class AMISRReader(ProcessingUnit): ''' classdocs @@ -33,9 +34,9 @@ class AMISRReader(ProcessingUnit): ''' Constructor ''' - + ProcessingUnit.__init__(self) - + self.set = None self.subset = None self.extension_file = '.h5' @@ -50,40 +51,41 @@ class AMISRReader(ProcessingUnit): self.flagIsNewFile = 0 self.filename = '' self.amisrFilePointer = None - - - self.dataset = None - - - + + + #self.dataset = None + + + self.profileIndex = 0 - - + + self.beamCodeByFrame = None self.radacTimeByFrame = None - + self.dataset = None - - - - + + + + self.__firstFile = True - + self.buffer = None - - + + self.timezone = 'ut' - + self.__waitForNewFile = 20 - self.__filename_online = None + self.__filename_online = None #Is really necessary create the output object in the initializer self.dataOut = Voltage() - + self.dataOut.error=False + def setup(self,path=None, - startDate=None, - endDate=None, - startTime=None, + startDate=None, + endDate=None, + startTime=None, endTime=None, walk=True, timezone='ut', @@ -92,41 +94,42 @@ class AMISRReader(ProcessingUnit): nCode = 0, nBaud = 0, online=False): - + + #print ("T",path) + self.timezone = timezone self.all = all self.online = online - + self.code = code self.nCode = int(nCode) self.nBaud = int(nBaud) - - - + + + #self.findFiles() if not(online): #Busqueda de archivos offline self.searchFilesOffLine(path, startDate, endDate, startTime, endTime, walk) else: self.searchFilesOnLine(path, startDate, endDate, startTime,endTime,walk) - + if not(self.filenameList): print("There is no files into the folder: %s"%(path)) - sys.exit(-1) - + self.fileIndex = -1 - - self.readNextFile(online) - + + self.readNextFile(online) + ''' Add code - ''' + ''' self.isConfig = True - + pass - - + + def readAMISRHeader(self,fp): header = 'Raw11/Data/RadacHeader' self.beamCodeByPulse = fp.get(header+'/BeamCode') # LIST OF BEAMS PER PROFILE, TO BE USED ON REARRANGE @@ -142,26 +145,26 @@ class AMISRReader(ProcessingUnit): self.rangeFromFile = fp.get('Raw11/Data/Samples/Range') self.frequency = fp.get('Rx/Frequency') txAus = fp.get('Raw11/Data/Pulsewidth') - - + + self.nblocks = self.pulseCount.shape[0] #nblocks - + self.nprofiles = self.pulseCount.shape[1] #nprofile self.nsa = self.nsamplesPulse[0,0] #ngates self.nchannels = self.beamCode.shape[1] self.ippSeconds = (self.radacTime[0][1] -self.radacTime[0][0]) #Ipp in seconds #self.__waitForNewFile = self.nblocks # wait depending on the number of blocks since each block is 1 sec self.__waitForNewFile = self.nblocks * self.nprofiles * self.ippSeconds # wait until new file is created - + #filling radar controller header parameters self.__ippKm = self.ippSeconds *.15*1e6 # in km self.__txA = (txAus.value)*.15 #(ipp[us]*.15km/1us) in km self.__txB = 0 nWindows=1 - self.__nSamples = self.nsa + self.__nSamples = self.nsa self.__firstHeight = self.rangeFromFile[0][0]/1000 #in km - self.__deltaHeight = (self.rangeFromFile[0][1] - self.rangeFromFile[0][0])/1000 - + self.__deltaHeight = (self.rangeFromFile[0][1] - self.rangeFromFile[0][0])/1000 + #for now until understand why the code saved is different (code included even though code not in tuf file) #self.__codeType = 0 # self.__nCode = None @@ -173,20 +176,20 @@ class AMISRReader(ProcessingUnit): self.__nCode = self.nCode self.__nBaud = self.nBaud #self.__code = 0 - + #filling system header parameters self.__nSamples = self.nsa - self.newProfiles = self.nprofiles/self.nchannels + self.newProfiles = self.nprofiles/self.nchannels self.__channelList = list(range(self.nchannels)) - + self.__frequency = self.frequency[0][0] - - + + def createBuffers(self): - - pass - + + pass + def __setParameters(self,path='', startDate='',endDate='',startTime='', endTime='', walk=''): self.path = path self.startDate = startDate @@ -194,35 +197,35 @@ class AMISRReader(ProcessingUnit): self.startTime = startTime self.endTime = endTime self.walk = walk - + def __checkPath(self): if os.path.exists(self.path): self.status = 1 else: self.status = 0 print('Path:%s does not exists'%self.path) - + return - - + + def __selDates(self, amisr_dirname_format): try: year = int(amisr_dirname_format[0:4]) month = int(amisr_dirname_format[4:6]) dom = int(amisr_dirname_format[6:8]) thisDate = datetime.date(year,month,dom) - + if (thisDate>=self.startDate and thisDate <= self.endDate): return amisr_dirname_format except: return None - - + + def __findDataForDates(self,online=False): - + if not(self.status): return None - + pat = '\d+.\d+' dirnameList = [re.search(pat,x) for x in os.listdir(self.path)] dirnameList = [x for x in dirnameList if x!=None] @@ -237,7 +240,7 @@ class AMISRReader(ProcessingUnit): else: self.status = 0 return None - + def __getTimeFromData(self): startDateTime_Reader = datetime.datetime.combine(self.startDate,self.startTime) endDateTime_Reader = datetime.datetime.combine(self.endDate,self.endTime) @@ -251,33 +254,35 @@ class AMISRReader(ProcessingUnit): filename = self.filenameList[i] fp = h5py.File(filename,'r') time_str = fp.get('Time/RadacTimeString') - - startDateTimeStr_File = time_str[0][0].split('.')[0] + + startDateTimeStr_File = time_str[0][0].decode('UTF-8').split('.')[0] + #startDateTimeStr_File = "2019-12-16 09:21:11" junk = time.strptime(startDateTimeStr_File, '%Y-%m-%d %H:%M:%S') startDateTime_File = datetime.datetime(junk.tm_year,junk.tm_mon,junk.tm_mday,junk.tm_hour, junk.tm_min, junk.tm_sec) - - endDateTimeStr_File = time_str[-1][-1].split('.')[0] + + #endDateTimeStr_File = "2019-12-16 11:10:11" + endDateTimeStr_File = time_str[-1][-1].decode('UTF-8').split('.')[0] junk = time.strptime(endDateTimeStr_File, '%Y-%m-%d %H:%M:%S') endDateTime_File = datetime.datetime(junk.tm_year,junk.tm_mon,junk.tm_mday,junk.tm_hour, junk.tm_min, junk.tm_sec) - + fp.close() - + + #print("check time", startDateTime_File) if self.timezone == 'lt': startDateTime_File = startDateTime_File - datetime.timedelta(minutes = 300) endDateTime_File = endDateTime_File - datetime.timedelta(minutes = 300) - if (endDateTime_File>=startDateTime_Reader and endDateTime_File=endDateTime_Reader): break - - + + filter_filenameList.sort() self.filenameList = filter_filenameList return 1 - + def __filterByGlob1(self, dirName): filter_files = glob.glob1(dirName, '*.*%s'%self.extension_file) filter_files.sort() @@ -285,24 +290,24 @@ class AMISRReader(ProcessingUnit): filterDict.setdefault(dirName) filterDict[dirName] = filter_files return filterDict - + def __getFilenameList(self, fileListInKeys, dirList): for value in fileListInKeys: dirName = list(value.keys())[0] for file in value[dirName]: filename = os.path.join(dirName, file) self.filenameList.append(filename) - - + + def __selectDataForTimes(self, online=False): #aun no esta implementado el filtro for tiempo if not(self.status): return None - + dirList = [os.path.join(self.path,x) for x in self.dirnameList] - + fileListInKeys = [self.__filterByGlob1(x) for x in dirList] - + self.__getFilenameList(fileListInKeys, dirList) if not(online): #filtro por tiempo @@ -315,11 +320,11 @@ class AMISRReader(ProcessingUnit): else: self.status = 0 return None - + else: #get the last file - 1 self.filenameList = [self.filenameList[-2]] - + new_dirnameList = [] for dirname in self.dirnameList: junk = numpy.array([dirname in x for x in self.filenameList]) @@ -328,27 +333,27 @@ class AMISRReader(ProcessingUnit): new_dirnameList.append(dirname) self.dirnameList = new_dirnameList return 1 - + def searchFilesOnLine(self, path, startDate, endDate, startTime=datetime.time(0,0,0), endTime=datetime.time(23,59,59),walk=True): - + if endDate ==None: startDate = datetime.datetime.utcnow().date() endDate = datetime.datetime.utcnow().date() - + self.__setParameters(path=path, startDate=startDate, endDate=endDate,startTime = startTime,endTime=endTime, walk=walk) - + self.__checkPath() - + self.__findDataForDates(online=True) - + self.dirnameList = [self.dirnameList[-1]] - + self.__selectDataForTimes(online=True) - + return - - + + def searchFilesOffLine(self, path, startDate, @@ -356,20 +361,20 @@ class AMISRReader(ProcessingUnit): startTime=datetime.time(0,0,0), endTime=datetime.time(23,59,59), walk=True): - + self.__setParameters(path, startDate, endDate, startTime, endTime, walk) - + self.__checkPath() - + self.__findDataForDates() - + self.__selectDataForTimes() - + for i in range(len(self.filenameList)): print("%s" %(self.filenameList[i])) - - return - + + return + def __setNextFileOffline(self): idFile = self.fileIndex @@ -378,12 +383,13 @@ class AMISRReader(ProcessingUnit): if not(idFile < len(self.filenameList)): self.flagNoMoreFiles = 1 print("No more Files") + self.dataOut.error = True return 0 filename = self.filenameList[idFile] amisrFilePointer = h5py.File(filename,'r') - + break self.flagIsNewFile = 1 @@ -395,8 +401,8 @@ class AMISRReader(ProcessingUnit): print("Setting the file: %s"%self.filename) return 1 - - + + def __setNextFileOnline(self): filename = self.filenameList[0] if self.__filename_online != None: @@ -411,54 +417,56 @@ class AMISRReader(ProcessingUnit): self.__selectDataForTimes(online=True) filename = self.filenameList[0] wait += 1 - + self.__filename_online = filename - + self.amisrFilePointer = h5py.File(filename,'r') self.flagIsNewFile = 1 self.filename = filename print("Setting the file: %s"%self.filename) return 1 - - + + def readData(self): buffer = self.amisrFilePointer.get('Raw11/Data/Samples/Data') re = buffer[:,:,:,0] im = buffer[:,:,:,1] dataset = re + im*1j + self.radacTime = self.amisrFilePointer.get('Raw11/Data/RadacHeader/RadacTime') timeset = self.radacTime[:,0] + return dataset,timeset - + def reshapeData(self): - #self.beamCodeByPulse, self.beamCode, self.nblocks, self.nprofiles, self.nsa, + #self.beamCodeByPulse, self.beamCode, self.nblocks, self.nprofiles, self.nsa, channels = self.beamCodeByPulse[0,:] nchan = self.nchannels #self.newProfiles = self.nprofiles/nchan #must be defined on filljroheader nblocks = self.nblocks nsamples = self.nsa - + #Dimensions : nChannels, nProfiles, nSamples - new_block = numpy.empty((nblocks, nchan, self.newProfiles, nsamples), dtype="complex64") + new_block = numpy.empty((nblocks, nchan, numpy.int_(self.newProfiles), nsamples), dtype="complex64") ############################################ - + for thisChannel in range(nchan): new_block[:,thisChannel,:,:] = self.dataset[:,numpy.where(channels==self.beamCode[0][thisChannel])[0],:] - + new_block = numpy.transpose(new_block, (1,0,2,3)) new_block = numpy.reshape(new_block, (nchan,-1, nsamples)) - - return new_block - + + return new_block + def updateIndexes(self): - + pass - + def fillJROHeader(self): - + #fill radar controller header - self.dataOut.radarControllerHeaderObj = RadarControllerHeader(ippKm=self.__ippKm, + self.dataOut.radarControllerHeaderObj = RadarControllerHeader(ipp=self.__ippKm, txA=self.__txA, txB=0, nWindows=1, @@ -469,161 +477,173 @@ class AMISRReader(ProcessingUnit): nCode=self.__nCode, nBaud=self.__nBaud, code = self.__code, fClock=1) - - - + #fill system header self.dataOut.systemHeaderObj = SystemHeader(nSamples=self.__nSamples, nProfiles=self.newProfiles, nChannels=len(self.__channelList), adcResolution=14, - pciDioBusWith=32) - + pciDioBusWidth=32) + self.dataOut.type = "Voltage" - + self.dataOut.data = None - + self.dataOut.dtype = numpy.dtype([('real','endDateTime_Reader): return 0 - + self.jrodataset = self.reshapeData() #----self.updateIndexes() self.profileIndex = 0 - + return 1 - - + + def __hasNotDataInBuffer(self): if self.profileIndex >= (self.newProfiles*self.nblocks): return 1 return 0 - - + + def getData(self): - + if self.flagNoMoreFiles: self.dataOut.flagNoData = True return 0 - + if self.__hasNotDataInBuffer(): if not (self.readNextFile(self.online)): return 0 - - if self.dataset is None: # setear esta condicion cuando no hayan datos por leers - self.dataOut.flagNoData = True + + if self.dataset is None: # setear esta condicion cuando no hayan datos por leer + self.dataOut.flagNoData = True return 0 - + #self.dataOut.data = numpy.reshape(self.jrodataset[self.profileIndex,:],(1,-1)) - + self.dataOut.data = self.jrodataset[:,self.profileIndex,:] - + + #print("R_t",self.timeset) + #self.dataOut.utctime = self.jrotimeset[self.profileIndex] #verificar basic header de jro data y ver si es compatible con este valor #self.dataOut.utctime = self.timeset + (self.profileIndex * self.ippSeconds * self.nchannels) indexprof = numpy.mod(self.profileIndex, self.newProfiles) indexblock = self.profileIndex/self.newProfiles - #print indexblock, indexprof - self.dataOut.utctime = self.timeset[indexblock] + (indexprof * self.ippSeconds * self.nchannels) + #print (indexblock, indexprof) + diffUTC = 1.8e4 #UTC diference from peru in seconds --Joab + diffUTC = 0 + t_comp = (indexprof * self.ippSeconds * self.nchannels) + diffUTC # + #cambio posible 18/02/2020 + + + + #print("utc :",indexblock," __ ",t_comp) + #print(numpy.shape(self.timeset)) + self.dataOut.utctime = self.timeset[numpy.int_(indexblock)] + t_comp + #self.dataOut.utctime = self.timeset[self.profileIndex] + t_comp + #print(self.dataOut.utctime) self.dataOut.profileIndex = self.profileIndex self.dataOut.flagNoData = False # if indexprof == 0: # print self.dataOut.utctime - + self.profileIndex += 1 - + return self.dataOut.data - - + + def run(self, **kwargs): ''' This method will be called many times so here you should put all your code ''' - + if not self.isConfig: self.setup(**kwargs) self.isConfig = True - + self.getData() diff --git a/schainpy/model/io/jroIO_param.py b/schainpy/model/io/jroIO_param.py index cdb91da..252a815 100644 --- a/schainpy/model/io/jroIO_param.py +++ b/schainpy/model/io/jroIO_param.py @@ -183,7 +183,7 @@ class ParamReader(JRODataReader,ProcessingUnit): except IOError: traceback.print_exc() raise IOError("The file %s can't be opened" %(filename)) - + #In case has utctime attribute grp2 = grp1['utctime'] # thisUtcTime = grp2.value[0] - 5*3600 #To convert to local time @@ -497,7 +497,7 @@ class ParamWriter(Operation): setType = None def __init__(self): - + Operation.__init__(self) return @@ -530,9 +530,9 @@ class ParamWriter(Operation): dsDict['variable'] = self.dataList[i] #--------------------- Conditionals ------------------------ #There is no data - + if dataAux is None: - + return 0 if isinstance(dataAux, (int, float, numpy.integer, numpy.float)): @@ -704,7 +704,7 @@ class ParamWriter(Operation): return False def setNextFile(self): - + ext = self.ext path = self.path setFile = self.setFile @@ -717,7 +717,7 @@ class ParamWriter(Operation): if os.path.exists(fullpath): filesList = os.listdir( fullpath ) - filesList = [k for k in filesList if 'M' in k] + ##filesList = [k for k in filesList if 'M' in k] if len( filesList ) > 0: filesList = sorted( filesList, key=str.lower ) filen = filesList[-1] @@ -785,7 +785,7 @@ class ParamWriter(Operation): for j in range(dsInfo['dsNumber']): dsInfo = dsList[i] tableName = dsInfo['dsName'] - + if dsInfo['nDim'] == 3: shape = dsInfo['shape'].astype(int) @@ -954,7 +954,7 @@ class ParamWriter(Operation): self.dataOut = dataOut if not(self.isConfig): - self.setup(dataOut, path=path, blocksPerFile=blocksPerFile, + self.setup(dataOut, path=path, blocksPerFile=blocksPerFile, metadataList=metadataList, dataList=dataList, mode=mode, setType=setType) @@ -963,7 +963,7 @@ class ParamWriter(Operation): self.putData() return - + @MPDecorator class ParameterReader(Reader, ProcessingUnit): @@ -992,43 +992,43 @@ class ParameterReader(Reader, ProcessingUnit): self.set_kwargs(**kwargs) if not self.ext.startswith('.'): - self.ext = '.{}'.format(self.ext) + self.ext = '.{}'.format(self.ext) if self.online: log.log("Searching files in online mode...", self.name) for nTries in range(self.nTries): fullpath = self.searchFilesOnLine(self.path, self.startDate, - self.endDate, self.expLabel, self.ext, self.walk, + self.endDate, self.expLabel, self.ext, self.walk, self.filefmt, self.folderfmt) try: fullpath = next(fullpath) except: fullpath = None - + if fullpath: break log.warning( 'Waiting {} sec for a valid file in {}: try {} ...'.format( - self.delay, self.path, nTries + 1), + self.delay, self.path, nTries + 1), self.name) time.sleep(self.delay) if not(fullpath): raise schainpy.admin.SchainError( - 'There isn\'t any valid file in {}'.format(self.path)) + 'There isn\'t any valid file in {}'.format(self.path)) pathname, filename = os.path.split(fullpath) self.year = int(filename[1:5]) self.doy = int(filename[5:8]) - self.set = int(filename[8:11]) - 1 + self.set = int(filename[8:11]) - 1 else: log.log("Searching files in {}".format(self.path), self.name) - self.filenameList = self.searchFilesOffLine(self.path, self.startDate, + self.filenameList = self.searchFilesOffLine(self.path, self.startDate, self.endDate, self.expLabel, self.ext, self.walk, self.filefmt, self.folderfmt) - + self.setNextFile() return @@ -1036,11 +1036,11 @@ class ParameterReader(Reader, ProcessingUnit): def readFirstHeader(self): '''Read metadata and data''' - self.__readMetadata() + self.__readMetadata() self.__readData() self.__setBlockList() self.blockIndex = 0 - + return def __setBlockList(self): @@ -1099,7 +1099,7 @@ class ParameterReader(Reader, ProcessingUnit): else: data = gp[name].value listMetaname.append(name) - listMetadata.append(data) + listMetadata.append(data) elif self.metadata: metadata = json.loads(self.metadata) listShapes = {} @@ -1115,7 +1115,7 @@ class ParameterReader(Reader, ProcessingUnit): self.listShapes = listShapes self.listMetaname = listMetaname - self.listMeta = listMetadata + self.listMeta = listMetadata return @@ -1123,7 +1123,7 @@ class ParameterReader(Reader, ProcessingUnit): listdataname = [] listdata = [] - + if 'Data' in self.fp: grp = self.fp['Data'] for item in list(grp.items()): @@ -1137,7 +1137,7 @@ class ParameterReader(Reader, ProcessingUnit): for i in range(dim): array.append(grp[name]['table{:02d}'.format(i)].value) array = numpy.array(array) - + listdata.append(array) elif self.metadata: metadata = json.loads(self.metadata) @@ -1160,7 +1160,7 @@ class ParameterReader(Reader, ProcessingUnit): self.listDataname = listdataname self.listData = listdata return - + def getData(self): for i in range(len(self.listMeta)): @@ -1230,7 +1230,7 @@ class ParameterWriter(Operation): lastTime = None def __init__(self): - + Operation.__init__(self) return @@ -1257,7 +1257,7 @@ class ParameterWriter(Operation): dsDict['nDim'] = len(dataAux.shape) dsDict['shape'] = dataAux.shape dsDict['dsNumber'] = dataAux.shape[0] - + dsList.append(dsDict) tableList.append((self.dataList[i], dsDict['nDim'])) @@ -1274,7 +1274,7 @@ class ParameterWriter(Operation): self.lastTime = currentTime self.currentDay = dataDay return False - + timeDiff = currentTime - self.lastTime #Si el dia es diferente o si la diferencia entre un dato y otro supera la hora @@ -1292,7 +1292,7 @@ class ParameterWriter(Operation): self.dataOut = dataOut if not(self.isConfig): - self.setup(path=path, blocksPerFile=blocksPerFile, + self.setup(path=path, blocksPerFile=blocksPerFile, metadataList=metadataList, dataList=dataList, setType=setType) @@ -1301,9 +1301,9 @@ class ParameterWriter(Operation): self.putData() return - + def setNextFile(self): - + ext = self.ext path = self.path setFile = self.setFile @@ -1369,17 +1369,17 @@ class ParameterWriter(Operation): return def writeData(self, fp): - + grp = fp.create_group("Data") dtsets = [] data = [] - + for dsInfo in self.dsList: if dsInfo['nDim'] == 0: ds = grp.create_dataset( - dsInfo['variable'], + dsInfo['variable'], (self.blocksPerFile, ), - chunks=True, + chunks=True, dtype=numpy.float64) dtsets.append(ds) data.append((dsInfo['variable'], -1)) @@ -1387,7 +1387,7 @@ class ParameterWriter(Operation): sgrp = grp.create_group(dsInfo['variable']) for i in range(dsInfo['dsNumber']): ds = sgrp.create_dataset( - 'table{:02d}'.format(i), + 'table{:02d}'.format(i), (self.blocksPerFile, ) + dsInfo['shape'][1:], chunks=True) dtsets.append(ds) @@ -1395,7 +1395,7 @@ class ParameterWriter(Operation): fp.flush() log.log('Creating file: {}'.format(fp.filename), self.name) - + self.ds = dtsets self.data = data self.firsttime = True diff --git a/schainpy/model/proc/jroproc_base.py b/schainpy/model/proc/jroproc_base.py index 75e7ffe..9f914c4 100644 --- a/schainpy/model/proc/jroproc_base.py +++ b/schainpy/model/proc/jroproc_base.py @@ -4,8 +4,8 @@ Author : Sergio Cortez Jan 2018 Abstract: Base class for processing units and operations. A decorator provides multiprocessing features and interconnect the processes created. - The argument (kwargs) sent from the controller is parsed and filtered via the decorator for each processing unit or operation instantiated. - The decorator handle also the methods inside the processing unit to be called from the main script (not as operations) (OPERATION -> type ='self'). + The argument (kwargs) sent from the controller is parsed and filtered via the decorator for each processing unit or operation instantiated. + The decorator handle also the methods inside the processing unit to be called from the main script (not as operations) (OPERATION -> type ='self'). Based on: $Author: murco $ @@ -33,14 +33,14 @@ class ProcessingUnit(object): """ Update - Jan 2018 - MULTIPROCESSING - All the "call" methods present in the previous base were removed. + All the "call" methods present in the previous base were removed. The majority of operations are independant processes, thus - the decorator is in charge of communicate the operation processes + the decorator is in charge of communicate the operation processes with the proccessing unit via IPC. The constructor does not receive any argument. The remaining methods are related with the operations to execute. - + """ proc_type = 'processing' @@ -62,7 +62,7 @@ class ProcessingUnit(object): def addOperation(self, conf, operation): """ - This method is used in the controller, and update the dictionary containing the operations to execute. The dict + This method is used in the controller, and update the dictionary containing the operations to execute. The dict posses the id of the operation process (IPC purposes) Agrega un objeto del tipo "Operation" (opObj) a la lista de objetos "self.objectList" y retorna el @@ -79,7 +79,7 @@ class ProcessingUnit(object): self.operations.append( (operation, conf.type, conf.id, conf.getKwargs())) - + if 'plot' in self.name.lower(): self.plots.append(operation.CODE) @@ -181,7 +181,7 @@ class Operation(object): return class InputQueue(Thread): - + ''' Class to hold input data for Proccessing Units and external Operations, ''' @@ -212,26 +212,26 @@ class InputQueue(Thread): def get(self): if not self.islocked and self.size/1000000 > 512: - self.lock.n.value += 1 + self.lock.n.value += 1 self.islocked = True self.lock.clear() elif self.islocked and self.size/1000000 <= 512: self.islocked = False self.lock.n.value -= 1 if self.lock.n.value == 0: - self.lock.set() - + self.lock.set() + obj = self.queue.get() self.size -= sys.getsizeof(obj) return pickle.loads(obj) - + def MPDecorator(BaseClass): """ Multiprocessing class decorator This function add multiprocessing features to a BaseClass. Also, it handle - the communication beetween processes (readers, procUnits and operations). + the communication beetween processes (readers, procUnits and operations). """ class MPClass(BaseClass, Process): @@ -248,11 +248,11 @@ def MPDecorator(BaseClass): self.t = time.time() self.name = BaseClass.__name__ self.__doc__ = BaseClass.__doc__ - + if 'plot' in self.name.lower() and not self.name.endswith('_'): self.name = '{}{}'.format(self.CODE.upper(), 'Plot') - - self.start_time = time.time() + + self.start_time = time.time() self.id = args[0] self.inputId = args[1] self.project_id = args[2] @@ -269,21 +269,21 @@ def MPDecorator(BaseClass): ''' self.queue.start() - + def listen(self): ''' This function waits for objects ''' - - return self.queue.get() + + return self.queue.get() def set_publisher(self): ''' - This function create a zmq socket for publishing objects. + This function create a zmq socket for publishing objects. ''' time.sleep(0.5) - + c = zmq.Context() self.sender = c.socket(zmq.PUB) self.sender.connect( @@ -293,12 +293,11 @@ def MPDecorator(BaseClass): ''' This function publish an object, to an specific topic. It blocks publishing when receiver queue is full to avoid data loss - ''' - + ''' + if self.inputId is None: self.lock.wait() self.sender.send_multipart([str(id).encode(), pickle.dumps(data)]) - def runReader(self): ''' Run fuction for read units @@ -308,13 +307,13 @@ def MPDecorator(BaseClass): try: BaseClass.run(self, **self.kwargs) except: - err = traceback.format_exc() + err = traceback.format_exc() if 'No more files' in err: log.warning('No more files to read', self.name) else: self.err_queue.put('{}|{}'.format(self.name, err)) - self.dataOut.error = True - + self.dataOut.error = True + for op, optype, opId, kwargs in self.operations: if optype == 'self' and not self.dataOut.flagNoData: op(**kwargs) @@ -327,8 +326,7 @@ def MPDecorator(BaseClass): continue self.publish(self.dataOut, self.id) - - if self.dataOut.error: + if self.dataOut.error: break time.sleep(0.5) @@ -339,7 +337,7 @@ def MPDecorator(BaseClass): ''' while True: - self.dataIn = self.listen() + self.dataIn = self.listen() if self.dataIn.flagNoData and self.dataIn.error is None: continue @@ -352,23 +350,23 @@ def MPDecorator(BaseClass): elif self.dataIn.error: self.dataOut.error = self.dataIn.error self.dataOut.flagNoData = True - + for op, optype, opId, kwargs in self.operations: if optype == 'self' and not self.dataOut.flagNoData: op(**kwargs) elif optype == 'other' and not self.dataOut.flagNoData: self.dataOut = op.run(self.dataOut, **kwargs) - elif optype == 'external' and not self.dataOut.flagNoData: + elif optype == 'external' and not self.dataOut.flagNoData: self.publish(self.dataOut, opId) - + self.publish(self.dataOut, self.id) for op, optype, opId, kwargs in self.operations: - if optype == 'external' and self.dataOut.error: + if optype == 'external' and self.dataOut.error: self.publish(self.dataOut, opId) - + if self.dataOut.error: break - + time.sleep(0.5) def runOp(self): @@ -376,7 +374,7 @@ def MPDecorator(BaseClass): Run function for external operations (this operations just receive data ex: plots, writers, publishers) ''' - + while True: dataOut = self.listen() @@ -388,21 +386,20 @@ def MPDecorator(BaseClass): self.err_queue.put('{}|{}'.format(self.name, traceback.format_exc())) dataOut.error = True else: - break + break def run(self): if self.typeProc is "ProcUnit": if self.inputId is not None: self.subscribe() - + self.set_publisher() if 'Reader' not in BaseClass.__name__: self.runProc() else: self.runReader() - elif self.typeProc is "Operation": self.subscribe() diff --git a/schainpy/model/proc/jroproc_parameters.py b/schainpy/model/proc/jroproc_parameters.py index 3a209b0..116facc 100755 --- a/schainpy/model/proc/jroproc_parameters.py +++ b/schainpy/model/proc/jroproc_parameters.py @@ -8,12 +8,12 @@ import copy import sys import importlib import itertools -from multiprocessing import Pool, TimeoutError +from multiprocessing import Pool, TimeoutError from multiprocessing.pool import ThreadPool import time from scipy.optimize import fmin_l_bfgs_b #optimize with bounds on state papameters -from .jroproc_base import ProcessingUnit, Operation, MPDecorator +from .jroproc_base import ProcessingUnit, Operation, MPDecorator from schainpy.model.data.jrodata import Parameters, hildebrand_sekhon from scipy import asarray as ar,exp from scipy.optimize import curve_fit @@ -47,13 +47,13 @@ def _unpickle_method(func_name, obj, cls): @MPDecorator class ParametersProc(ProcessingUnit): - + METHODS = {} nSeconds = None def __init__(self): ProcessingUnit.__init__(self) - + # self.objectDict = {} self.buffer = None self.firstdatatime = None @@ -62,14 +62,14 @@ class ParametersProc(ProcessingUnit): self.setupReq = False #Agregar a todas las unidades de proc def __updateObjFromInput(self): - + self.dataOut.inputUnit = self.dataIn.type - + self.dataOut.timeZone = self.dataIn.timeZone self.dataOut.dstFlag = self.dataIn.dstFlag self.dataOut.errorCount = self.dataIn.errorCount self.dataOut.useLocalTime = self.dataIn.useLocalTime - + self.dataOut.radarControllerHeaderObj = self.dataIn.radarControllerHeaderObj.copy() self.dataOut.systemHeaderObj = self.dataIn.systemHeaderObj.copy() self.dataOut.channelList = self.dataIn.channelList @@ -91,27 +91,27 @@ class ParametersProc(ProcessingUnit): self.dataOut.ippSeconds = self.dataIn.ippSeconds # self.dataOut.windowOfFilter = self.dataIn.windowOfFilter self.dataOut.timeInterval1 = self.dataIn.timeInterval - self.dataOut.heightList = self.dataIn.getHeiRange() + self.dataOut.heightList = self.dataIn.getHeiRange() self.dataOut.frequency = self.dataIn.frequency # self.dataOut.noise = self.dataIn.noise - + def run(self): #---------------------- Voltage Data --------------------------- - + if self.dataIn.type == "Voltage": self.__updateObjFromInput() self.dataOut.data_pre = self.dataIn.data.copy() self.dataOut.flagNoData = False self.dataOut.utctimeInit = self.dataIn.utctime - self.dataOut.paramInterval = self.dataIn.nProfiles*self.dataIn.nCohInt*self.dataIn.ippSeconds + self.dataOut.paramInterval = self.dataIn.nProfiles*self.dataIn.nCohInt*self.dataIn.ippSeconds return - + #---------------------- Spectra Data --------------------------- - + if self.dataIn.type == "Spectra": self.dataOut.data_pre = (self.dataIn.data_spc, self.dataIn.data_cspc) @@ -125,243 +125,244 @@ class ParametersProc(ProcessingUnit): self.dataOut.spc_noise = self.dataIn.getNoise() self.dataOut.spc_range = (self.dataIn.getFreqRange(1) , self.dataIn.getAcfRange(1) , self.dataIn.getVelRange(1)) # self.dataOut.normFactor = self.dataIn.normFactor - self.dataOut.pairsList = self.dataIn.pairsList + self.dataOut.pairsList = self.dataIn.pairsList self.dataOut.groupList = self.dataIn.pairsList - self.dataOut.flagNoData = False - + self.dataOut.flagNoData = False + if hasattr(self.dataIn, 'ChanDist'): #Distances of receiver channels self.dataOut.ChanDist = self.dataIn.ChanDist - else: self.dataOut.ChanDist = None - + else: self.dataOut.ChanDist = None + #if hasattr(self.dataIn, 'VelRange'): #Velocities range # self.dataOut.VelRange = self.dataIn.VelRange #else: self.dataOut.VelRange = None - + if hasattr(self.dataIn, 'RadarConst'): #Radar Constant self.dataOut.RadarConst = self.dataIn.RadarConst - + if hasattr(self.dataIn, 'NPW'): #NPW self.dataOut.NPW = self.dataIn.NPW - + if hasattr(self.dataIn, 'COFA'): #COFA self.dataOut.COFA = self.dataIn.COFA - - - + + + #---------------------- Correlation Data --------------------------- - + if self.dataIn.type == "Correlation": acf_ind, ccf_ind, acf_pairs, ccf_pairs, data_acf, data_ccf = self.dataIn.splitFunctions() - + self.dataOut.data_pre = (self.dataIn.data_cf[acf_ind,:], self.dataIn.data_cf[ccf_ind,:,:]) self.dataOut.normFactor = (self.dataIn.normFactor[acf_ind,:], self.dataIn.normFactor[ccf_ind,:]) self.dataOut.groupList = (acf_pairs, ccf_pairs) - + self.dataOut.abscissaList = self.dataIn.lagRange self.dataOut.noise = self.dataIn.noise self.dataOut.data_SNR = self.dataIn.SNR self.dataOut.flagNoData = False self.dataOut.nAvg = self.dataIn.nAvg - + #---------------------- Parameters Data --------------------------- - + if self.dataIn.type == "Parameters": self.dataOut.copy(self.dataIn) self.dataOut.flagNoData = False - + return True - + self.__updateObjFromInput() self.dataOut.utctimeInit = self.dataIn.utctime self.dataOut.paramInterval = self.dataIn.timeInterval - + + return def target(tups): - + obj, args = tups - + return obj.FitGau(args) - - + + class SpectralFilters(Operation): - + '''This class allows the Rainfall / Wind Selection for CLAIRE RADAR - + LimitR : It is the limit in m/s of Rainfall LimitW : It is the limit in m/s for Winds - + Input: - + self.dataOut.data_pre : SPC and CSPC self.dataOut.spc_range : To select wind and rainfall velocities - + Affected: - + self.dataOut.data_pre : It is used for the new SPC and CSPC ranges of wind - self.dataOut.spcparam_range : Used in SpcParamPlot + self.dataOut.spcparam_range : Used in SpcParamPlot self.dataOut.SPCparam : Used in PrecipitationProc - - + + ''' - + def __init__(self): Operation.__init__(self) self.i=0 - - def run(self, dataOut, PositiveLimit=1.5, NegativeLimit=2.5): - - - #Limite de vientos + + def run(self, dataOut, PositiveLimit=1.5, NegativeLimit=2.5): + + + #Limite de vientos LimitR = PositiveLimit LimitN = NegativeLimit - + self.spc = dataOut.data_pre[0].copy() self.cspc = dataOut.data_pre[1].copy() - + self.Num_Hei = self.spc.shape[2] self.Num_Bin = self.spc.shape[1] self.Num_Chn = self.spc.shape[0] - + VelRange = dataOut.spc_range[2] TimeRange = dataOut.spc_range[1] FrecRange = dataOut.spc_range[0] - + Vmax= 2*numpy.max(dataOut.spc_range[2]) Tmax= 2*numpy.max(dataOut.spc_range[1]) Fmax= 2*numpy.max(dataOut.spc_range[0]) - + Breaker1R=VelRange[numpy.abs(VelRange-(-LimitN)).argmin()] Breaker1R=numpy.where(VelRange == Breaker1R) - - Delta = self.Num_Bin/2 - Breaker1R[0] - - + + Delta = self.Num_Bin/2 - Breaker1R[0] + + '''Reacomodando SPCrange''' VelRange=numpy.roll(VelRange,-(int(self.Num_Bin/2)) ,axis=0) - + VelRange[-(int(self.Num_Bin/2)):]+= Vmax - + FrecRange=numpy.roll(FrecRange,-(int(self.Num_Bin/2)),axis=0) - + FrecRange[-(int(self.Num_Bin/2)):]+= Fmax - + TimeRange=numpy.roll(TimeRange,-(int(self.Num_Bin/2)),axis=0) - + TimeRange[-(int(self.Num_Bin/2)):]+= Tmax - + ''' ------------------ ''' - + Breaker2R=VelRange[numpy.abs(VelRange-(LimitR)).argmin()] Breaker2R=numpy.where(VelRange == Breaker2R) - - + + SPCroll = numpy.roll(self.spc,-(int(self.Num_Bin/2)) ,axis=1) - + SPCcut = SPCroll.copy() for i in range(self.Num_Chn): - + SPCcut[i,0:int(Breaker2R[0]),:] = dataOut.noise[i] SPCcut[i,-int(Delta):,:] = dataOut.noise[i] - + SPCcut[i]=SPCcut[i]- dataOut.noise[i] SPCcut[ numpy.where( SPCcut<0 ) ] = 1e-20 - + SPCroll[i]=SPCroll[i]-dataOut.noise[i] SPCroll[ numpy.where( SPCroll<0 ) ] = 1e-20 - + SPC_ch1 = SPCroll - + SPC_ch2 = SPCcut - + SPCparam = (SPC_ch1, SPC_ch2, self.spc) - dataOut.SPCparam = numpy.asarray(SPCparam) - - + dataOut.SPCparam = numpy.asarray(SPCparam) + + dataOut.spcparam_range=numpy.zeros([self.Num_Chn,self.Num_Bin+1]) - + dataOut.spcparam_range[2]=VelRange dataOut.spcparam_range[1]=TimeRange dataOut.spcparam_range[0]=FrecRange return dataOut - + class GaussianFit(Operation): - + ''' - Function that fit of one and two generalized gaussians (gg) based - on the PSD shape across an "power band" identified from a cumsum of + Function that fit of one and two generalized gaussians (gg) based + on the PSD shape across an "power band" identified from a cumsum of the measured spectrum - noise. - + Input: self.dataOut.data_pre : SelfSpectra - + Output: self.dataOut.SPCparam : SPC_ch1, SPC_ch2 - + ''' def __init__(self): Operation.__init__(self) self.i=0 - - + + def run(self, dataOut, num_intg=7, pnoise=1., SNRlimit=-9): #num_intg: Incoherent integrations, pnoise: Noise, vel_arr: range of velocities, similar to the ftt points """This routine will find a couple of generalized Gaussians to a power spectrum input: spc output: Amplitude0,shift0,width0,p0,Amplitude1,shift1,width1,p1,noise """ - + self.spc = dataOut.data_pre[0].copy() self.Num_Hei = self.spc.shape[2] self.Num_Bin = self.spc.shape[1] self.Num_Chn = self.spc.shape[0] Vrange = dataOut.abscissaList - + GauSPC = numpy.empty([self.Num_Chn,self.Num_Bin,self.Num_Hei]) SPC_ch1 = numpy.empty([self.Num_Bin,self.Num_Hei]) SPC_ch2 = numpy.empty([self.Num_Bin,self.Num_Hei]) SPC_ch1[:] = numpy.NaN SPC_ch2[:] = numpy.NaN - + start_time = time.time() - + noise_ = dataOut.spc_noise[0].copy() - - - pool = Pool(processes=self.Num_Chn) + + + pool = Pool(processes=self.Num_Chn) args = [(Vrange, Ch, pnoise, noise_, num_intg, SNRlimit) for Ch in range(self.Num_Chn)] - objs = [self for __ in range(self.Num_Chn)] - attrs = list(zip(objs, args)) + objs = [self for __ in range(self.Num_Chn)] + attrs = list(zip(objs, args)) gauSPC = pool.map(target, attrs) dataOut.SPCparam = numpy.asarray(SPCparam) - + ''' Parameters: 1. Amplitude 2. Shift 3. Width 4. Power ''' - + def FitGau(self, X): - + Vrange, ch, pnoise, noise_, num_intg, SNRlimit = X - + SPCparam = [] SPC_ch1 = numpy.empty([self.Num_Bin,self.Num_Hei]) SPC_ch2 = numpy.empty([self.Num_Bin,self.Num_Hei]) SPC_ch1[:] = 0#numpy.NaN SPC_ch2[:] = 0#numpy.NaN - - - + + + for ht in range(self.Num_Hei): - - + + spc = numpy.asarray(self.spc)[ch,:,ht] - + ############################################# # normalizing spc and noise # This part differs from gg1 @@ -369,60 +370,60 @@ class GaussianFit(Operation): #spc = spc / spc_norm_max pnoise = pnoise #/ spc_norm_max ############################################# - + fatspectra=1.0 - + wnoise = noise_ #/ spc_norm_max #wnoise,stdv,i_max,index =enoise(spc,num_intg) #noise estimate using Hildebrand Sekhon, only wnoise is used - #if wnoise>1.1*pnoise: # to be tested later + #if wnoise>1.1*pnoise: # to be tested later # wnoise=pnoise - noisebl=wnoise*0.9; + noisebl=wnoise*0.9; noisebh=wnoise*1.1 spc=spc-wnoise - + minx=numpy.argmin(spc) - #spcs=spc.copy() + #spcs=spc.copy() spcs=numpy.roll(spc,-minx) cum=numpy.cumsum(spcs) tot_noise=wnoise * self.Num_Bin #64; - + snr = sum(spcs)/tot_noise snrdB=10.*numpy.log10(snr) - + if snrdB < SNRlimit : snr = numpy.NaN SPC_ch1[:,ht] = 0#numpy.NaN SPC_ch1[:,ht] = 0#numpy.NaN SPCparam = (SPC_ch1,SPC_ch2) continue - - + + #if snrdB<-18 or numpy.isnan(snrdB) or num_intg<4: # return [None,]*4,[None,]*4,None,snrdB,None,None,[None,]*5,[None,]*9,None - - cummax=max(cum); + + cummax=max(cum); epsi=0.08*fatspectra # cumsum to narrow down the energy region - cumlo=cummax*epsi; + cumlo=cummax*epsi; cumhi=cummax*(1-epsi) powerindex=numpy.array(numpy.where(numpy.logical_and(cum>cumlo, cum-12: # when SNR is strong pick the peak with least shift (LOS velocity) error if oneG: choice=0 @@ -480,10 +481,10 @@ class GaussianFit(Operation): w1=lsq2[0][1]; w2=lsq2[0][5] a1=lsq2[0][2]; a2=lsq2[0][6] p1=lsq2[0][3]; p2=lsq2[0][7] - s1=(2**(1+1./p1))*scipy.special.gamma(1./p1)/p1; + s1=(2**(1+1./p1))*scipy.special.gamma(1./p1)/p1; s2=(2**(1+1./p2))*scipy.special.gamma(1./p2)/p2; gp1=a1*w1*s1; gp2=a2*w2*s2 # power content of each ggaussian with proper p scaling - + if gp1>gp2: if a1>0.7*a2: choice=1 @@ -498,157 +499,157 @@ class GaussianFit(Operation): choice=numpy.argmax([a1,a2])+1 #else: #choice=argmin([std2a,std2b])+1 - + else: # with low SNR go to the most energetic peak choice=numpy.argmax([lsq1[0][2]*lsq1[0][1],lsq2[0][2]*lsq2[0][1],lsq2[0][6]*lsq2[0][5]]) - - - shift0=lsq2[0][0]; + + + shift0=lsq2[0][0]; vel0=Vrange[0] + shift0*(Vrange[1]-Vrange[0]) - shift1=lsq2[0][4]; + shift1=lsq2[0][4]; vel1=Vrange[0] + shift1*(Vrange[1]-Vrange[0]) - + max_vel = 1.0 - + #first peak will be 0, second peak will be 1 if vel0 > -1.0 and vel0 < max_vel : #first peak is in the correct range shift0=lsq2[0][0] width0=lsq2[0][1] Amplitude0=lsq2[0][2] p0=lsq2[0][3] - + shift1=lsq2[0][4] width1=lsq2[0][5] Amplitude1=lsq2[0][6] p1=lsq2[0][7] - noise=lsq2[0][8] + noise=lsq2[0][8] else: shift1=lsq2[0][0] width1=lsq2[0][1] Amplitude1=lsq2[0][2] p1=lsq2[0][3] - + shift0=lsq2[0][4] width0=lsq2[0][5] Amplitude0=lsq2[0][6] - p0=lsq2[0][7] - noise=lsq2[0][8] - + p0=lsq2[0][7] + noise=lsq2[0][8] + if Amplitude0<0.05: # in case the peak is noise - shift0,width0,Amplitude0,p0 = [0,0,0,0]#4*[numpy.NaN] + shift0,width0,Amplitude0,p0 = [0,0,0,0]#4*[numpy.NaN] if Amplitude1<0.05: - shift1,width1,Amplitude1,p1 = [0,0,0,0]#4*[numpy.NaN] - - + shift1,width1,Amplitude1,p1 = [0,0,0,0]#4*[numpy.NaN] + + SPC_ch1[:,ht] = noise + Amplitude0*numpy.exp(-0.5*(abs(x-shift0))/width0)**p0 SPC_ch2[:,ht] = noise + Amplitude1*numpy.exp(-0.5*(abs(x-shift1))/width1)**p1 SPCparam = (SPC_ch1,SPC_ch2) - - + + return GauSPC - + def y_model1(self,x,state): shift0,width0,amplitude0,power0,noise=state model0=amplitude0*numpy.exp(-0.5*abs((x-shift0)/width0)**power0) - + model0u=amplitude0*numpy.exp(-0.5*abs((x-shift0- self.Num_Bin )/width0)**power0) - + model0d=amplitude0*numpy.exp(-0.5*abs((x-shift0+ self.Num_Bin )/width0)**power0) return model0+model0u+model0d+noise - - def y_model2(self,x,state): #Equation for two generalized Gaussians with Nyquist + + def y_model2(self,x,state): #Equation for two generalized Gaussians with Nyquist shift0,width0,amplitude0,power0,shift1,width1,amplitude1,power1,noise=state model0=amplitude0*numpy.exp(-0.5*abs((x-shift0)/width0)**power0) - + model0u=amplitude0*numpy.exp(-0.5*abs((x-shift0- self.Num_Bin )/width0)**power0) - + model0d=amplitude0*numpy.exp(-0.5*abs((x-shift0+ self.Num_Bin )/width0)**power0) model1=amplitude1*numpy.exp(-0.5*abs((x-shift1)/width1)**power1) - + model1u=amplitude1*numpy.exp(-0.5*abs((x-shift1- self.Num_Bin )/width1)**power1) - + model1d=amplitude1*numpy.exp(-0.5*abs((x-shift1+ self.Num_Bin )/width1)**power1) return model0+model0u+model0d+model1+model1u+model1d+noise - - def misfit1(self,state,y_data,x,num_intg): # This function compares how close real data is with the model data, the close it is, the better it is. + + def misfit1(self,state,y_data,x,num_intg): # This function compares how close real data is with the model data, the close it is, the better it is. return num_intg*sum((numpy.log(y_data)-numpy.log(self.y_model1(x,state)))**2)#/(64-5.) # /(64-5.) can be commented - + def misfit2(self,state,y_data,x,num_intg): return num_intg*sum((numpy.log(y_data)-numpy.log(self.y_model2(x,state)))**2)#/(64-9.) - - + + class PrecipitationProc(Operation): - + ''' Operator that estimates Reflectivity factor (Z), and estimates rainfall Rate (R) - - Input: + + Input: self.dataOut.data_pre : SelfSpectra - - Output: - - self.dataOut.data_output : Reflectivity factor, rainfall Rate - - - Parameters affected: + + Output: + + self.dataOut.data_output : Reflectivity factor, rainfall Rate + + + Parameters affected: ''' - + def __init__(self): Operation.__init__(self) self.i=0 - - + + def gaus(self,xSamples,Amp,Mu,Sigma): return ( Amp / ((2*numpy.pi)**0.5 * Sigma) ) * numpy.exp( -( xSamples - Mu )**2 / ( 2 * (Sigma**2) )) - - - + + + def Moments(self, ySamples, xSamples): Pot = numpy.nansum( ySamples ) # Potencia, momento 0 yNorm = ySamples / Pot - + Vr = numpy.nansum( yNorm * xSamples ) # Velocidad radial, mu, corrimiento doppler, primer momento - Sigma2 = abs(numpy.nansum( yNorm * ( xSamples - Vr )**2 )) # Segundo Momento + Sigma2 = abs(numpy.nansum( yNorm * ( xSamples - Vr )**2 )) # Segundo Momento Desv = Sigma2**0.5 # Desv. Estandar, Ancho espectral - - return numpy.array([Pot, Vr, Desv]) - - def run(self, dataOut, radar=None, Pt=5000, Gt=295.1209, Gr=70.7945, Lambda=0.6741, aL=2.5118, + + return numpy.array([Pot, Vr, Desv]) + + def run(self, dataOut, radar=None, Pt=5000, Gt=295.1209, Gr=70.7945, Lambda=0.6741, aL=2.5118, tauW=4e-06, ThetaT=0.1656317, ThetaR=0.36774087, Km = 0.93, Altitude=3350): - - + + Velrange = dataOut.spcparam_range[2] FrecRange = dataOut.spcparam_range[0] - + dV= Velrange[1]-Velrange[0] dF= FrecRange[1]-FrecRange[0] - + if radar == "MIRA35C" : - + self.spc = dataOut.data_pre[0].copy() self.Num_Hei = self.spc.shape[2] self.Num_Bin = self.spc.shape[1] self.Num_Chn = self.spc.shape[0] Ze = self.dBZeMODE2(dataOut) - + else: - + self.spc = dataOut.SPCparam[1].copy() #dataOut.data_pre[0].copy() # - + """NOTA SE DEBE REMOVER EL RANGO DEL PULSO TX""" - - self.spc[:,:,0:7]= numpy.NaN - + + self.spc[:,:,0:7]= numpy.NaN + """##########################################""" - + self.Num_Hei = self.spc.shape[2] self.Num_Bin = self.spc.shape[1] self.Num_Chn = self.spc.shape[0] - + ''' Se obtiene la constante del RADAR ''' - + self.Pt = Pt self.Gt = Gt self.Gr = Gr @@ -657,30 +658,30 @@ class PrecipitationProc(Operation): self.tauW = tauW self.ThetaT = ThetaT self.ThetaR = ThetaR - + Numerator = ( (4*numpy.pi)**3 * aL**2 * 16 * numpy.log(2) ) Denominator = ( Pt * Gt * Gr * Lambda**2 * SPEED_OF_LIGHT * tauW * numpy.pi * ThetaT * ThetaR) RadarConstant = 10e-26 * Numerator / Denominator # - + ''' ============================= ''' - - self.spc[0] = (self.spc[0]-dataOut.noise[0]) - self.spc[1] = (self.spc[1]-dataOut.noise[1]) - self.spc[2] = (self.spc[2]-dataOut.noise[2]) - + + self.spc[0] = (self.spc[0]-dataOut.noise[0]) + self.spc[1] = (self.spc[1]-dataOut.noise[1]) + self.spc[2] = (self.spc[2]-dataOut.noise[2]) + self.spc[ numpy.where(self.spc < 0)] = 0 - - SPCmean = (numpy.mean(self.spc,0) - numpy.mean(dataOut.noise)) + + SPCmean = (numpy.mean(self.spc,0) - numpy.mean(dataOut.noise)) SPCmean[ numpy.where(SPCmean < 0)] = 0 - + ETAn = numpy.zeros([self.Num_Bin,self.Num_Hei]) ETAv = numpy.zeros([self.Num_Bin,self.Num_Hei]) ETAd = numpy.zeros([self.Num_Bin,self.Num_Hei]) - + Pr = SPCmean[:,:] - + VelMeteoro = numpy.mean(SPCmean,axis=0) - + D_range = numpy.zeros([self.Num_Bin,self.Num_Hei]) SIGMA = numpy.zeros([self.Num_Bin,self.Num_Hei]) N_dist = numpy.zeros([self.Num_Bin,self.Num_Hei]) @@ -689,102 +690,102 @@ class PrecipitationProc(Operation): Z = numpy.zeros(self.Num_Hei) Ze = numpy.zeros(self.Num_Hei) RR = numpy.zeros(self.Num_Hei) - + Range = dataOut.heightList*1000. - + for R in range(self.Num_Hei): - + h = Range[R] + Altitude #Range from ground to radar pulse altitude del_V[R] = 1 + 3.68 * 10**-5 * h + 1.71 * 10**-9 * h**2 #Density change correction for velocity - + D_range[:,R] = numpy.log( (9.65 - (Velrange[0:self.Num_Bin] / del_V[R])) / 10.3 ) / -0.6 #Diameter range [m]x10**-3 - + '''NOTA: ETA(n) dn = ETA(f) df - + dn = 1 Diferencial de muestreo df = ETA(n) / ETA(f) - + ''' - + ETAn[:,R] = RadarConstant * Pr[:,R] * (Range[R] )**2 #Reflectivity (ETA) - + ETAv[:,R]=ETAn[:,R]/dV - + ETAd[:,R]=ETAv[:,R]*6.18*exp(-0.6*D_range[:,R]) - + SIGMA[:,R] = Km * (D_range[:,R] * 1e-3 )**6 * numpy.pi**5 / Lambda**4 #Equivalent Section of drops (sigma) - - N_dist[:,R] = ETAn[:,R] / SIGMA[:,R] - + + N_dist[:,R] = ETAn[:,R] / SIGMA[:,R] + DMoments = self.Moments(Pr[:,R], Velrange[0:self.Num_Bin]) - + try: popt01,pcov = curve_fit(self.gaus, Velrange[0:self.Num_Bin] , Pr[:,R] , p0=DMoments) - except: + except: popt01=numpy.zeros(3) popt01[1]= DMoments[1] - + if popt01[1]<0 or popt01[1]>20: popt01[1]=numpy.NaN - - + + V_mean[R]=popt01[1] - + Z[R] = numpy.nansum( N_dist[:,R] * (D_range[:,R])**6 )#*10**-18 - + RR[R] = 0.0006*numpy.pi * numpy.nansum( D_range[:,R]**3 * N_dist[:,R] * Velrange[0:self.Num_Bin] ) #Rainfall rate - + Ze[R] = (numpy.nansum( ETAn[:,R]) * Lambda**4) / ( 10**-18*numpy.pi**5 * Km) - - - + + + RR2 = (Z/200)**(1/1.6) dBRR = 10*numpy.log10(RR) dBRR2 = 10*numpy.log10(RR2) - + dBZe = 10*numpy.log10(Ze) dBZ = 10*numpy.log10(Z) - + dataOut.data_output = RR[8] dataOut.data_param = numpy.ones([3,self.Num_Hei]) dataOut.channelList = [0,1,2] - + dataOut.data_param[0]=dBZ dataOut.data_param[1]=V_mean dataOut.data_param[2]=RR return dataOut - + def dBZeMODE2(self, dataOut): # Processing for MIRA35C - + NPW = dataOut.NPW COFA = dataOut.COFA - + SNR = numpy.array([self.spc[0,:,:] / NPW[0]]) #, self.spc[1,:,:] / NPW[1]]) RadarConst = dataOut.RadarConst #frequency = 34.85*10**9 - + ETA = numpy.zeros(([self.Num_Chn ,self.Num_Hei])) data_output = numpy.ones([self.Num_Chn , self.Num_Hei])*numpy.NaN - + ETA = numpy.sum(SNR,1) - + ETA = numpy.where(ETA is not 0. , ETA, numpy.NaN) - + Ze = numpy.ones([self.Num_Chn, self.Num_Hei] ) - + for r in range(self.Num_Hei): - + Ze[0,r] = ( ETA[0,r] ) * COFA[0,r][0] * RadarConst * ((r/5000.)**2) #Ze[1,r] = ( ETA[1,r] ) * COFA[1,r][0] * RadarConst * ((r/5000.)**2) - + return Ze - + # def GetRadarConstant(self): -# -# """ +# +# """ # Constants: -# +# # Pt: Transmission Power dB 5kW 5000 # Gt: Transmission Gain dB 24.7 dB 295.1209 # Gr: Reception Gain dB 18.5 dB 70.7945 @@ -793,63 +794,63 @@ class PrecipitationProc(Operation): # tauW: Width of transmission pulse s 4us 4e-6 # ThetaT: Transmission antenna bean angle rad 0.1656317 rad 0.1656317 # ThetaR: Reception antenna beam angle rad 0.36774087 rad 0.36774087 -# +# # """ -# +# # Numerator = ( (4*numpy.pi)**3 * aL**2 * 16 * numpy.log(2) ) # Denominator = ( Pt * Gt * Gr * Lambda**2 * SPEED_OF_LIGHT * TauW * numpy.pi * ThetaT * TheraR) # RadarConstant = Numerator / Denominator -# +# # return RadarConstant - - - -class FullSpectralAnalysis(Operation): - + + + +class FullSpectralAnalysis(Operation): + """ Function that implements Full Spectral Analisys technique. - - Input: + + Input: self.dataOut.data_pre : SelfSpectra and CrossSPectra data self.dataOut.groupList : Pairlist of channels self.dataOut.ChanDist : Physical distance between receivers - - - Output: - - self.dataOut.data_output : Zonal wind, Meridional wind and Vertical wind - - + + + Output: + + self.dataOut.data_output : Zonal wind, Meridional wind and Vertical wind + + Parameters affected: Winds, height range, SNR - + """ def run(self, dataOut, Xi01=None, Xi02=None, Xi12=None, Eta01=None, Eta02=None, Eta12=None, SNRlimit=7): - - self.indice=int(numpy.random.rand()*1000) - + + self.indice=int(numpy.random.rand()*1000) + spc = dataOut.data_pre[0].copy() cspc = dataOut.data_pre[1] - + """NOTA SE DEBE REMOVER EL RANGO DEL PULSO TX""" - + SNRspc = spc.copy() SNRspc[:,:,0:7]= numpy.NaN - + """##########################################""" - - + + nChannel = spc.shape[0] nProfiles = spc.shape[1] nHeights = spc.shape[2] - + pairsList = dataOut.groupList if dataOut.ChanDist is not None : ChanDist = dataOut.ChanDist else: ChanDist = numpy.array([[Xi01, Eta01],[Xi02,Eta02],[Xi12,Eta12]]) - + FrecRange = dataOut.spc_range[0] - + ySamples=numpy.ones([nChannel,nProfiles]) phase=numpy.ones([nChannel,nProfiles]) CSPCSamples=numpy.ones([nChannel,nProfiles],dtype=numpy.complex_) @@ -857,82 +858,82 @@ class FullSpectralAnalysis(Operation): PhaseSlope=numpy.ones(nChannel) PhaseInter=numpy.ones(nChannel) data_SNR=numpy.zeros([nProfiles]) - + data = dataOut.data_pre noise = dataOut.noise - + dataOut.data_SNR = (numpy.mean(SNRspc,axis=1)- noise[0]) / noise[0] - + dataOut.data_SNR[numpy.where( dataOut.data_SNR <0 )] = 1e-20 - - + + data_output=numpy.ones([spc.shape[0],spc.shape[2]])*numpy.NaN - + velocityX=[] velocityY=[] velocityV=[] PhaseLine=[] - + dbSNR = 10*numpy.log10(dataOut.data_SNR) dbSNR = numpy.average(dbSNR,0) - + for Height in range(nHeights): - + [Vzon,Vmer,Vver, GaussCenter, PhaseSlope, FitGaussCSPC]= self.WindEstimation(spc, cspc, pairsList, ChanDist, Height, noise, dataOut.spc_range, dbSNR[Height], SNRlimit) PhaseLine = numpy.append(PhaseLine, PhaseSlope) - + if abs(Vzon)<100. and abs(Vzon)> 0.: velocityX=numpy.append(velocityX, Vzon)#Vmag - + else: velocityX=numpy.append(velocityX, numpy.NaN) - + if abs(Vmer)<100. and abs(Vmer) > 0.: velocityY=numpy.append(velocityY, -Vmer)#Vang - + else: velocityY=numpy.append(velocityY, numpy.NaN) - + if dbSNR[Height] > SNRlimit: velocityV=numpy.append(velocityV, -Vver)#FirstMoment[Height]) else: velocityV=numpy.append(velocityV, numpy.NaN) - - + + '''Nota: Cambiar el signo de numpy.array(velocityX) cuando se intente procesar datos de BLTR''' data_output[0] = numpy.array(velocityX) #self.moving_average(numpy.array(velocityX) , N=1) data_output[1] = numpy.array(velocityY) #self.moving_average(numpy.array(velocityY) , N=1) data_output[2] = velocityV#FirstMoment - + xFrec=FrecRange[0:spc.shape[1]] - + dataOut.data_output=data_output - + return dataOut - - + + def moving_average(self,x, N=2): return numpy.convolve(x, numpy.ones((N,))/N)[(N-1):] - + def gaus(self,xSamples,Amp,Mu,Sigma): return ( Amp / ((2*numpy.pi)**0.5 * Sigma) ) * numpy.exp( -( xSamples - Mu )**2 / ( 2 * (Sigma**2) )) - - - + + + def Moments(self, ySamples, xSamples): Pot = numpy.nansum( ySamples ) # Potencia, momento 0 yNorm = ySamples / Pot Vr = numpy.nansum( yNorm * xSamples ) # Velocidad radial, mu, corrimiento doppler, primer momento - Sigma2 = abs(numpy.nansum( yNorm * ( xSamples - Vr )**2 )) # Segundo Momento + Sigma2 = abs(numpy.nansum( yNorm * ( xSamples - Vr )**2 )) # Segundo Momento Desv = Sigma2**0.5 # Desv. Estandar, Ancho espectral - + return numpy.array([Pot, Vr, Desv]) - + def WindEstimation(self, spc, cspc, pairsList, ChanDist, Height, noise, AbbsisaRange, dbSNR, SNRlimit): - - + + ySamples=numpy.ones([spc.shape[0],spc.shape[1]]) phase=numpy.ones([spc.shape[0],spc.shape[1]]) CSPCSamples=numpy.ones([spc.shape[0],spc.shape[1]],dtype=numpy.complex_) @@ -943,84 +944,84 @@ class FullSpectralAnalysis(Operation): xVel =AbbsisaRange[2][0:spc.shape[1]] Vv=numpy.empty(spc.shape[2])*0 SPCav = numpy.average(spc, axis=0)-numpy.average(noise) #spc[0]-noise[0]# - - SPCmoments = self.Moments(SPCav[:,Height], xVel ) + + SPCmoments = self.Moments(SPCav[:,Height], xVel ) CSPCmoments = [] cspcNoise = numpy.empty(3) - + '''Getting Eij and Nij''' - + Xi01=ChanDist[0][0] Eta01=ChanDist[0][1] - + Xi02=ChanDist[1][0] Eta02=ChanDist[1][1] - + Xi12=ChanDist[2][0] Eta12=ChanDist[2][1] - + z = spc.copy() z = numpy.where(numpy.isfinite(z), z, numpy.NAN) - - for i in range(spc.shape[0]): - + + for i in range(spc.shape[0]): + '''****** Line of Data SPC ******''' zline=z[i,:,Height].copy() - noise[i] # Se resta ruido - + '''****** SPC is normalized ******''' SmoothSPC =self.moving_average(zline.copy(),N=1) # Se suaviza el ruido - FactNorm = SmoothSPC/numpy.nansum(SmoothSPC) # SPC Normalizado y suavizado - + FactNorm = SmoothSPC/numpy.nansum(SmoothSPC) # SPC Normalizado y suavizado + xSamples = xFrec # Se toma el rango de frecuncias ySamples[i] = FactNorm # Se toman los valores de SPC normalizado - + for i in range(spc.shape[0]): - + '''****** Line of Data CSPC ******''' cspcLine = ( cspc[i,:,Height].copy())# - noise[i] ) # no! Se resta el ruido SmoothCSPC =self.moving_average(cspcLine,N=1) # Se suaviza el ruido cspcNorm = SmoothCSPC/numpy.nansum(SmoothCSPC) # CSPC normalizado y suavizado - + '''****** CSPC is normalized with respect to Briggs and Vincent ******''' chan_index0 = pairsList[i][0] chan_index1 = pairsList[i][1] - - CSPCFactor= numpy.abs(numpy.nansum(ySamples[chan_index0]))**2 * numpy.abs(numpy.nansum(ySamples[chan_index1]))**2 + + CSPCFactor= numpy.abs(numpy.nansum(ySamples[chan_index0]))**2 * numpy.abs(numpy.nansum(ySamples[chan_index1]))**2 CSPCNorm = cspcNorm / numpy.sqrt(CSPCFactor) - + CSPCSamples[i] = CSPCNorm - + coherence[i] = numpy.abs(CSPCSamples[i]) / numpy.sqrt(CSPCFactor) - + #coherence[i]= self.moving_average(coherence[i],N=1) - + phase[i] = self.moving_average( numpy.arctan2(CSPCSamples[i].imag, CSPCSamples[i].real),N=1)#*180/numpy.pi - + CSPCmoments = numpy.vstack([self.Moments(numpy.abs(CSPCSamples[0]), xSamples), self.Moments(numpy.abs(CSPCSamples[1]), xSamples), - self.Moments(numpy.abs(CSPCSamples[2]), xSamples)]) - - + self.Moments(numpy.abs(CSPCSamples[2]), xSamples)]) + + popt=[1e-10,0,1e-10] - popt01, popt02, popt12 = [1e-10,1e-10,1e-10], [1e-10,1e-10,1e-10] ,[1e-10,1e-10,1e-10] + popt01, popt02, popt12 = [1e-10,1e-10,1e-10], [1e-10,1e-10,1e-10] ,[1e-10,1e-10,1e-10] FitGauss01, FitGauss02, FitGauss12 = numpy.empty(len(xSamples))*0, numpy.empty(len(xSamples))*0, numpy.empty(len(xSamples))*0 - + CSPCMask01 = numpy.abs(CSPCSamples[0]) CSPCMask02 = numpy.abs(CSPCSamples[1]) CSPCMask12 = numpy.abs(CSPCSamples[2]) - + mask01 = ~numpy.isnan(CSPCMask01) mask02 = ~numpy.isnan(CSPCMask02) mask12 = ~numpy.isnan(CSPCMask12) - + #mask = ~numpy.isnan(CSPCMask01) CSPCMask01 = CSPCMask01[mask01] CSPCMask02 = CSPCMask02[mask02] CSPCMask12 = CSPCMask12[mask12] #CSPCMask01 = numpy.ma.masked_invalid(CSPCMask01) - - - + + + '''***Fit Gauss CSPC01***''' if dbSNR > SNRlimit and numpy.abs(SPCmoments[1])<3 : try: @@ -1034,87 +1035,87 @@ class FullSpectralAnalysis(Operation): FitGauss01=numpy.ones(len(xSamples))*numpy.mean(numpy.abs(CSPCSamples[0])) FitGauss02=numpy.ones(len(xSamples))*numpy.mean(numpy.abs(CSPCSamples[1])) FitGauss12=numpy.ones(len(xSamples))*numpy.mean(numpy.abs(CSPCSamples[2])) - - + + CSPCopt = numpy.vstack([popt01,popt02,popt12]) - + '''****** Getting fij width ******''' - - yMean = numpy.average(ySamples, axis=0) # ySamples[0] - + + yMean = numpy.average(ySamples, axis=0) # ySamples[0] + '''******* Getting fitting Gaussian *******''' - meanGauss = sum(xSamples*yMean) / len(xSamples) # Mu, velocidad radial (frecuencia) - sigma2 = sum(yMean*(xSamples-meanGauss)**2) / len(xSamples) # Varianza, Ancho espectral (frecuencia) - + meanGauss = sum(xSamples*yMean) / len(xSamples) # Mu, velocidad radial (frecuencia) + sigma2 = sum(yMean*(xSamples-meanGauss)**2) / len(xSamples) # Varianza, Ancho espectral (frecuencia) + yMoments = self.Moments(yMean, xSamples) - + if dbSNR > SNRlimit and numpy.abs(SPCmoments[1])<3: # and abs(meanGauss/sigma2) > 0.00001: try: popt,pcov = curve_fit(self.gaus,xSamples,yMean,p0=yMoments) FitGauss=self.gaus(xSamples,*popt) - + except :#RuntimeError: FitGauss=numpy.ones(len(xSamples))*numpy.mean(yMean) - - + + else: FitGauss=numpy.ones(len(xSamples))*numpy.mean(yMean) - - - + + + '''****** Getting Fij ******''' Fijcspc = CSPCopt[:,2]/2*3 - - + + GaussCenter = popt[1] #xFrec[GCpos] #Punto en Eje X de la Gaussiana donde se encuentra el centro ClosestCenter = xSamples[numpy.abs(xSamples-GaussCenter).argmin()] PointGauCenter = numpy.where(xSamples==ClosestCenter)[0][0] - - #Punto e^-1 hubicado en la Gaussiana + + #Punto e^-1 hubicado en la Gaussiana PeMinus1 = numpy.max(FitGauss)* numpy.exp(-1) FijClosest = FitGauss[numpy.abs(FitGauss-PeMinus1).argmin()] # El punto mas cercano a "Peminus1" dentro de "FitGauss" PointFij = numpy.where(FitGauss==FijClosest)[0][0] - + if xSamples[PointFij] > xSamples[PointGauCenter]: Fij = xSamples[PointFij] - xSamples[PointGauCenter] - + else: Fij = xSamples[PointGauCenter] - xSamples[PointFij] - - + + '''****** Taking frequency ranges from SPCs ******''' - - + + #GaussCenter = popt[1] #Primer momento 01 GauWidth = popt[2] *3/2 #Ancho de banda de Gau01 Range = numpy.empty(2) Range[0] = GaussCenter - GauWidth - Range[1] = GaussCenter + GauWidth - #Punto en Eje X de la Gaussiana donde se encuentra ancho de banda (min:max) + Range[1] = GaussCenter + GauWidth + #Punto en Eje X de la Gaussiana donde se encuentra ancho de banda (min:max) ClosRangeMin = xSamples[numpy.abs(xSamples-Range[0]).argmin()] ClosRangeMax = xSamples[numpy.abs(xSamples-Range[1]).argmin()] - + PointRangeMin = numpy.where(xSamples==ClosRangeMin)[0][0] PointRangeMax = numpy.where(xSamples==ClosRangeMax)[0][0] - + Range=numpy.array([ PointRangeMin, PointRangeMax ]) - + FrecRange = xFrec[ Range[0] : Range[1] ] VelRange = xVel[ Range[0] : Range[1] ] - - + + '''****** Getting SCPC Slope ******''' - + for i in range(spc.shape[0]): - + if len(FrecRange)>5 and len(FrecRange) m): ss1 = m - - valid = numpy.asarray(list(range(int(m + bb0 - ss1 + 1)))) + ss1 + + valid = numpy.asarray(list(range(int(m + bb0 - ss1 + 1)))) + ss1 power = ((spec2[valid] - n0)*fwindow[valid]).sum() fd = ((spec2[valid]- n0)*freq[valid]*fwindow[valid]).sum()/power w = math.sqrt(((spec2[valid] - n0)*fwindow[valid]*(freq[valid]- fd)**2).sum()/power) - snr = (spec2.mean()-n0)/n0 - - if (snr < 1.e-20) : + snr = (spec2.mean()-n0)/n0 + + if (snr < 1.e-20) : snr = 1.e-20 - + vec_power[ind] = power vec_fd[ind] = fd vec_w[ind] = w vec_snr[ind] = snr - + moments = numpy.vstack((vec_snr, vec_power, vec_fd, vec_w)) return moments - + #------------------ Get SA Parameters -------------------------- - + def GetSAParameters(self): #SA en frecuencia pairslist = self.dataOut.groupList num_pairs = len(pairslist) - + vel = self.dataOut.abscissaList spectra = self.dataOut.data_pre cspectra = self.dataIn.data_cspc - delta_v = vel[1] - vel[0] - + delta_v = vel[1] - vel[0] + #Calculating the power spectrum spc_pow = numpy.sum(spectra, 3)*delta_v #Normalizing Spectra norm_spectra = spectra/spc_pow #Calculating the norm_spectra at peak - max_spectra = numpy.max(norm_spectra, 3) - + max_spectra = numpy.max(norm_spectra, 3) + #Normalizing Cross Spectra norm_cspectra = numpy.zeros(cspectra.shape) - + for i in range(num_chan): norm_cspectra[i,:,:] = cspectra[i,:,:]/numpy.sqrt(spc_pow[pairslist[i][0],:]*spc_pow[pairslist[i][1],:]) - + max_cspectra = numpy.max(norm_cspectra,2) max_cspectra_index = numpy.argmax(norm_cspectra, 2) - + for i in range(num_pairs): cspc_par[i,:,:] = __calculateMoments(norm_cspectra) #------------------- Get Lags ---------------------------------- - + class SALags(Operation): ''' Function GetMoments() @@ -1339,19 +1340,19 @@ class SALags(Operation): self.dataOut.data_SNR self.dataOut.groupList self.dataOut.nChannels - + Affected: self.dataOut.data_param - + ''' - def run(self, dataOut): + def run(self, dataOut): data_acf = dataOut.data_pre[0] data_ccf = dataOut.data_pre[1] normFactor_acf = dataOut.normFactor[0] normFactor_ccf = dataOut.normFactor[1] pairs_acf = dataOut.groupList[0] pairs_ccf = dataOut.groupList[1] - + nHeights = dataOut.nHeights absc = dataOut.abscissaList noise = dataOut.noise @@ -1362,97 +1363,97 @@ class SALags(Operation): for l in range(len(pairs_acf)): data_acf[l,:,:] = data_acf[l,:,:]/normFactor_acf[l,:] - + for l in range(len(pairs_ccf)): data_ccf[l,:,:] = data_ccf[l,:,:]/normFactor_ccf[l,:] - + dataOut.data_param = numpy.zeros((len(pairs_ccf)*2 + 1, nHeights)) dataOut.data_param[:-1,:] = self.__calculateTaus(data_acf, data_ccf, absc) dataOut.data_param[-1,:] = self.__calculateLag1Phase(data_acf, absc) return - + # def __getPairsAutoCorr(self, pairsList, nChannels): -# +# # pairsAutoCorr = numpy.zeros(nChannels, dtype = 'int')*numpy.nan -# -# for l in range(len(pairsList)): +# +# for l in range(len(pairsList)): # firstChannel = pairsList[l][0] # secondChannel = pairsList[l][1] -# -# #Obteniendo pares de Autocorrelacion +# +# #Obteniendo pares de Autocorrelacion # if firstChannel == secondChannel: # pairsAutoCorr[firstChannel] = int(l) -# +# # pairsAutoCorr = pairsAutoCorr.astype(int) -# +# # pairsCrossCorr = range(len(pairsList)) # pairsCrossCorr = numpy.delete(pairsCrossCorr,pairsAutoCorr) -# +# # return pairsAutoCorr, pairsCrossCorr - + def __calculateTaus(self, data_acf, data_ccf, lagRange): - + lag0 = data_acf.shape[1]/2 #Funcion de Autocorrelacion mean_acf = stats.nanmean(data_acf, axis = 0) - + #Obtencion Indice de TauCross ind_ccf = data_ccf.argmax(axis = 1) #Obtencion Indice de TauAuto ind_acf = numpy.zeros(ind_ccf.shape,dtype = 'int') ccf_lag0 = data_ccf[:,lag0,:] - + for i in range(ccf_lag0.shape[0]): ind_acf[i,:] = numpy.abs(mean_acf - ccf_lag0[i,:]).argmin(axis = 0) - + #Obtencion de TauCross y TauAuto tau_ccf = lagRange[ind_ccf] tau_acf = lagRange[ind_acf] - + Nan1, Nan2 = numpy.where(tau_ccf == lagRange[0]) - + tau_ccf[Nan1,Nan2] = numpy.nan tau_acf[Nan1,Nan2] = numpy.nan tau = numpy.vstack((tau_ccf,tau_acf)) - + return tau - + def __calculateLag1Phase(self, data, lagTRange): data1 = stats.nanmean(data, axis = 0) lag1 = numpy.where(lagTRange == 0)[0][0] + 1 phase = numpy.angle(data1[lag1,:]) - + return phase - + class SpectralFitting(Operation): ''' Function GetMoments() - + Input: Output: Variables modified: ''' - - def run(self, dataOut, getSNR = True, path=None, file=None, groupList=None): - - + + def run(self, dataOut, getSNR = True, path=None, file=None, groupList=None): + + if path != None: sys.path.append(path) self.dataOut.library = importlib.import_module(file) - + #To be inserted as a parameter groupArray = numpy.array(groupList) -# groupArray = numpy.array([[0,1],[2,3]]) +# groupArray = numpy.array([[0,1],[2,3]]) self.dataOut.groupList = groupArray - + nGroups = groupArray.shape[0] nChannels = self.dataIn.nChannels nHeights=self.dataIn.heightList.size - + #Parameters Array self.dataOut.data_param = None - + #Set constants constants = self.dataOut.library.setConstants(self.dataIn) self.dataOut.constants = constants @@ -1461,24 +1462,24 @@ class SpectralFitting(Operation): ippSeconds = self.dataIn.ippSeconds K = self.dataIn.nIncohInt pairsArray = numpy.array(self.dataIn.pairsList) - + #List of possible combinations listComb = itertools.combinations(numpy.arange(groupArray.shape[1]),2) indCross = numpy.zeros(len(list(listComb)), dtype = 'int') - + if getSNR: listChannels = groupArray.reshape((groupArray.size)) listChannels.sort() noise = self.dataIn.getNoise() self.dataOut.data_SNR = self.__getSNR(self.dataIn.data_spc[listChannels,:,:], noise[listChannels]) - - for i in range(nGroups): + + for i in range(nGroups): coord = groupArray[i,:] - + #Input data array data = self.dataIn.data_spc[coord,:,:]/(M*N) data = data.reshape((data.shape[0]*data.shape[1],data.shape[2])) - + #Cross Spectra data array for Covariance Matrixes ind = 0 for pairs in listComb: @@ -1487,9 +1488,9 @@ class SpectralFitting(Operation): ind += 1 dataCross = self.dataIn.data_cspc[indCross,:,:]/(M*N) dataCross = dataCross**2/K - + for h in range(nHeights): - + #Input d = data[:,h] @@ -1498,7 +1499,7 @@ class SpectralFitting(Operation): ind = 0 for pairs in listComb: #Coordinates in Covariance Matrix - x = pairs[0] + x = pairs[0] y = pairs[1] #Channel Index S12 = dataCross[ind,:,h] @@ -1512,15 +1513,15 @@ class SpectralFitting(Operation): LT=L.T dp = numpy.dot(LT,d) - + #Initial values data_spc = self.dataIn.data_spc[coord,:,h] - + if (h>0)and(error1[3]<5): p0 = self.dataOut.data_param[i,:,h-1] else: p0 = numpy.array(self.dataOut.library.initialValuesFunction(data_spc, constants, i)) - + try: #Least Squares minp,covp,infodict,mesg,ier = optimize.leastsq(self.__residFunction,p0,args=(dp,LT,constants),full_output=True) @@ -1533,30 +1534,30 @@ class SpectralFitting(Operation): minp = p0*numpy.nan error0 = numpy.nan error1 = p0*numpy.nan - + #Save if self.dataOut.data_param is None: self.dataOut.data_param = numpy.zeros((nGroups, p0.size, nHeights))*numpy.nan self.dataOut.data_error = numpy.zeros((nGroups, p0.size + 1, nHeights))*numpy.nan - + self.dataOut.data_error[i,:,h] = numpy.hstack((error0,error1)) self.dataOut.data_param[i,:,h] = minp return - + def __residFunction(self, p, dp, LT, constants): fm = self.dataOut.library.modelFunction(p, constants) fmp=numpy.dot(LT,fm) - + return dp-fmp def __getSNR(self, z, noise): - + avg = numpy.average(z, axis=1) SNR = (avg.T-noise)/noise SNR = SNR.T return SNR - + def __chisq(p,chindex,hindex): #similar to Resid but calculates CHI**2 [LT,d,fm]=setupLTdfm(p,chindex,hindex) @@ -1564,53 +1565,53 @@ class SpectralFitting(Operation): fmp=numpy.dot(LT,fm) chisq=numpy.dot((dp-fmp).T,(dp-fmp)) return chisq - + class WindProfiler(Operation): - + __isConfig = False - + __initime = None __lastdatatime = None __integrationtime = None - + __buffer = None - + __dataReady = False - + __firstdata = None - + n = None - - def __init__(self): + + def __init__(self): Operation.__init__(self) - + def __calculateCosDir(self, elev, azim): zen = (90 - elev)*numpy.pi/180 azim = azim*numpy.pi/180 - cosDirX = numpy.sqrt((1-numpy.cos(zen)**2)/((1+numpy.tan(azim)**2))) + cosDirX = numpy.sqrt((1-numpy.cos(zen)**2)/((1+numpy.tan(azim)**2))) cosDirY = numpy.sqrt(1-numpy.cos(zen)**2-cosDirX**2) - + signX = numpy.sign(numpy.cos(azim)) signY = numpy.sign(numpy.sin(azim)) - + cosDirX = numpy.copysign(cosDirX, signX) cosDirY = numpy.copysign(cosDirY, signY) return cosDirX, cosDirY - + def __calculateAngles(self, theta_x, theta_y, azimuth): - + dir_cosw = numpy.sqrt(1-theta_x**2-theta_y**2) zenith_arr = numpy.arccos(dir_cosw) azimuth_arr = numpy.arctan2(theta_x,theta_y) + azimuth*math.pi/180 - + dir_cosu = numpy.sin(azimuth_arr)*numpy.sin(zenith_arr) dir_cosv = numpy.cos(azimuth_arr)*numpy.sin(zenith_arr) - + return azimuth_arr, zenith_arr, dir_cosu, dir_cosv, dir_cosw def __calculateMatA(self, dir_cosu, dir_cosv, dir_cosw, horOnly): - -# + +# if horOnly: A = numpy.c_[dir_cosu,dir_cosv] else: @@ -1624,37 +1625,37 @@ class WindProfiler(Operation): listPhi = phi.tolist() maxid = listPhi.index(max(listPhi)) minid = listPhi.index(min(listPhi)) - - rango = list(range(len(phi))) + + rango = list(range(len(phi))) # rango = numpy.delete(rango,maxid) - + heiRang1 = heiRang*math.cos(phi[maxid]) heiRangAux = heiRang*math.cos(phi[minid]) indOut = (heiRang1 < heiRangAux[0]).nonzero() heiRang1 = numpy.delete(heiRang1,indOut) - + velRadial1 = numpy.zeros([len(phi),len(heiRang1)]) SNR1 = numpy.zeros([len(phi),len(heiRang1)]) - + for i in rango: x = heiRang*math.cos(phi[i]) y1 = velRadial[i,:] f1 = interpolate.interp1d(x,y1,kind = 'cubic') - + x1 = heiRang1 y11 = f1(x1) - + y2 = SNR[i,:] f2 = interpolate.interp1d(x,y2,kind = 'cubic') y21 = f2(x1) - + velRadial1[i,:] = y11 SNR1[i,:] = y21 - + return heiRang1, velRadial1, SNR1 def __calculateVelUVW(self, A, velRadial): - + #Operacion Matricial # velUVW = numpy.zeros((velRadial.shape[1],3)) # for ind in range(velRadial.shape[1]): @@ -1662,27 +1663,27 @@ class WindProfiler(Operation): # velUVW = velUVW.transpose() velUVW = numpy.zeros((A.shape[0],velRadial.shape[1])) velUVW[:,:] = numpy.dot(A,velRadial) - - + + return velUVW - + # def techniqueDBS(self, velRadial0, dirCosx, disrCosy, azimuth, correct, horizontalOnly, heiRang, SNR0): - + def techniqueDBS(self, kwargs): """ Function that implements Doppler Beam Swinging (DBS) technique. - + Input: Radial velocities, Direction cosines (x and y) of the Beam, Antenna azimuth, Direction correction (if necessary), Ranges and SNR - + Output: Winds estimation (Zonal, Meridional and Vertical) - + Parameters affected: Winds, height range, SNR """ velRadial0 = kwargs['velRadial'] heiRang = kwargs['heightList'] SNR0 = kwargs['SNR'] - + if 'dirCosx' in kwargs and 'dirCosy' in kwargs: theta_x = numpy.array(kwargs['dirCosx']) theta_y = numpy.array(kwargs['dirCosy']) @@ -1690,7 +1691,7 @@ class WindProfiler(Operation): elev = numpy.array(kwargs['elevation']) azim = numpy.array(kwargs['azimuth']) theta_x, theta_y = self.__calculateCosDir(elev, azim) - azimuth = kwargs['correctAzimuth'] + azimuth = kwargs['correctAzimuth'] if 'horizontalOnly' in kwargs: horizontalOnly = kwargs['horizontalOnly'] else: horizontalOnly = False @@ -1705,22 +1706,22 @@ class WindProfiler(Operation): param = param[arrayChannel,:,:] theta_x = theta_x[arrayChannel] theta_y = theta_y[arrayChannel] - - azimuth_arr, zenith_arr, dir_cosu, dir_cosv, dir_cosw = self.__calculateAngles(theta_x, theta_y, azimuth) - heiRang1, velRadial1, SNR1 = self.__correctValues(heiRang, zenith_arr, correctFactor*velRadial0, SNR0) + + azimuth_arr, zenith_arr, dir_cosu, dir_cosv, dir_cosw = self.__calculateAngles(theta_x, theta_y, azimuth) + heiRang1, velRadial1, SNR1 = self.__correctValues(heiRang, zenith_arr, correctFactor*velRadial0, SNR0) A = self.__calculateMatA(dir_cosu, dir_cosv, dir_cosw, horizontalOnly) - + #Calculo de Componentes de la velocidad con DBS winds = self.__calculateVelUVW(A,velRadial1) - + return winds, heiRang1, SNR1 - + def __calculateDistance(self, posx, posy, pairs_ccf, azimuth = None): - + nPairs = len(pairs_ccf) posx = numpy.asarray(posx) posy = numpy.asarray(posy) - + #Rotacion Inversa para alinear con el azimuth if azimuth!= None: azimuth = azimuth*math.pi/180 @@ -1729,126 +1730,126 @@ class WindProfiler(Operation): else: posx1 = posx posy1 = posy - + #Calculo de Distancias distx = numpy.zeros(nPairs) disty = numpy.zeros(nPairs) dist = numpy.zeros(nPairs) ang = numpy.zeros(nPairs) - + for i in range(nPairs): distx[i] = posx1[pairs_ccf[i][1]] - posx1[pairs_ccf[i][0]] - disty[i] = posy1[pairs_ccf[i][1]] - posy1[pairs_ccf[i][0]] + disty[i] = posy1[pairs_ccf[i][1]] - posy1[pairs_ccf[i][0]] dist[i] = numpy.sqrt(distx[i]**2 + disty[i]**2) ang[i] = numpy.arctan2(disty[i],distx[i]) - + return distx, disty, dist, ang - #Calculo de Matrices + #Calculo de Matrices # nPairs = len(pairs) # ang1 = numpy.zeros((nPairs, 2, 1)) # dist1 = numpy.zeros((nPairs, 2, 1)) -# +# # for j in range(nPairs): # dist1[j,0,0] = dist[pairs[j][0]] # dist1[j,1,0] = dist[pairs[j][1]] # ang1[j,0,0] = ang[pairs[j][0]] # ang1[j,1,0] = ang[pairs[j][1]] -# +# # return distx,disty, dist1,ang1 - + def __calculateVelVer(self, phase, lagTRange, _lambda): Ts = lagTRange[1] - lagTRange[0] velW = -_lambda*phase/(4*math.pi*Ts) - + return velW - + def __calculateVelHorDir(self, dist, tau1, tau2, ang): nPairs = tau1.shape[0] nHeights = tau1.shape[1] - vel = numpy.zeros((nPairs,3,nHeights)) + vel = numpy.zeros((nPairs,3,nHeights)) dist1 = numpy.reshape(dist, (dist.size,1)) - + angCos = numpy.cos(ang) angSin = numpy.sin(ang) - - vel0 = dist1*tau1/(2*tau2**2) + + vel0 = dist1*tau1/(2*tau2**2) vel[:,0,:] = (vel0*angCos).sum(axis = 1) vel[:,1,:] = (vel0*angSin).sum(axis = 1) - + ind = numpy.where(numpy.isinf(vel)) vel[ind] = numpy.nan - + return vel - + # def __getPairsAutoCorr(self, pairsList, nChannels): -# +# # pairsAutoCorr = numpy.zeros(nChannels, dtype = 'int')*numpy.nan -# -# for l in range(len(pairsList)): +# +# for l in range(len(pairsList)): # firstChannel = pairsList[l][0] # secondChannel = pairsList[l][1] -# -# #Obteniendo pares de Autocorrelacion +# +# #Obteniendo pares de Autocorrelacion # if firstChannel == secondChannel: # pairsAutoCorr[firstChannel] = int(l) -# +# # pairsAutoCorr = pairsAutoCorr.astype(int) -# +# # pairsCrossCorr = range(len(pairsList)) # pairsCrossCorr = numpy.delete(pairsCrossCorr,pairsAutoCorr) -# +# # return pairsAutoCorr, pairsCrossCorr - + # def techniqueSA(self, pairsSelected, pairsList, nChannels, tau, azimuth, _lambda, position_x, position_y, lagTRange, correctFactor): def techniqueSA(self, kwargs): - - """ + + """ Function that implements Spaced Antenna (SA) technique. - + Input: Radial velocities, Direction cosines (x and y) of the Beam, Antenna azimuth, Direction correction (if necessary), Ranges and SNR - + Output: Winds estimation (Zonal, Meridional and Vertical) - + Parameters affected: Winds """ position_x = kwargs['positionX'] position_y = kwargs['positionY'] azimuth = kwargs['azimuth'] - + if 'correctFactor' in kwargs: correctFactor = kwargs['correctFactor'] else: correctFactor = 1 - + groupList = kwargs['groupList'] pairs_ccf = groupList[1] tau = kwargs['tau'] _lambda = kwargs['_lambda'] - + #Cross Correlation pairs obtained # pairsAutoCorr, pairsCrossCorr = self.__getPairsAutoCorr(pairssList, nChannels) # pairsArray = numpy.array(pairsList)[pairsCrossCorr] # pairsSelArray = numpy.array(pairsSelected) # pairs = [] -# +# # #Wind estimation pairs obtained # for i in range(pairsSelArray.shape[0]/2): # ind1 = numpy.where(numpy.all(pairsArray == pairsSelArray[2*i], axis = 1))[0][0] # ind2 = numpy.where(numpy.all(pairsArray == pairsSelArray[2*i + 1], axis = 1))[0][0] # pairs.append((ind1,ind2)) - + indtau = tau.shape[0]/2 tau1 = tau[:indtau,:] tau2 = tau[indtau:-1,:] # tau1 = tau1[pairs,:] # tau2 = tau2[pairs,:] phase1 = tau[-1,:] - + #--------------------------------------------------------------------- - #Metodo Directo + #Metodo Directo distx, disty, dist, ang = self.__calculateDistance(position_x, position_y, pairs_ccf,azimuth) winds = self.__calculateVelHorDir(dist, tau1, tau2, ang) winds = stats.nanmean(winds, axis=0) @@ -1864,97 +1865,97 @@ class WindProfiler(Operation): winds[2,:] = self.__calculateVelVer(phase1, lagTRange, _lambda) winds = correctFactor*winds return winds - + def __checkTime(self, currentTime, paramInterval, outputInterval): - + dataTime = currentTime + paramInterval deltaTime = dataTime - self.__initime - + if deltaTime >= outputInterval or deltaTime < 0: self.__dataReady = True - return - + return + def techniqueMeteors(self, arrayMeteor, meteorThresh, heightMin, heightMax): ''' Function that implements winds estimation technique with detected meteors. - + Input: Detected meteors, Minimum meteor quantity to wind estimation - + Output: Winds estimation (Zonal and Meridional) - + Parameters affected: Winds - ''' + ''' #Settings nInt = (heightMax - heightMin)/2 nInt = int(nInt) - winds = numpy.zeros((2,nInt))*numpy.nan - + winds = numpy.zeros((2,nInt))*numpy.nan + #Filter errors error = numpy.where(arrayMeteor[:,-1] == 0)[0] finalMeteor = arrayMeteor[error,:] - + #Meteor Histogram finalHeights = finalMeteor[:,2] hist = numpy.histogram(finalHeights, bins = nInt, range = (heightMin,heightMax)) nMeteorsPerI = hist[0] heightPerI = hist[1] - + #Sort of meteors indSort = finalHeights.argsort() finalMeteor2 = finalMeteor[indSort,:] - + # Calculating winds ind1 = 0 - ind2 = 0 - + ind2 = 0 + for i in range(nInt): nMet = nMeteorsPerI[i] ind1 = ind2 ind2 = ind1 + nMet - + meteorAux = finalMeteor2[ind1:ind2,:] - + if meteorAux.shape[0] >= meteorThresh: vel = meteorAux[:, 6] zen = meteorAux[:, 4]*numpy.pi/180 azim = meteorAux[:, 3]*numpy.pi/180 - + n = numpy.cos(zen) # m = (1 - n**2)/(1 - numpy.tan(azim)**2) # l = m*numpy.tan(azim) l = numpy.sin(zen)*numpy.sin(azim) m = numpy.sin(zen)*numpy.cos(azim) - + A = numpy.vstack((l, m)).transpose() A1 = numpy.dot(numpy.linalg.inv( numpy.dot(A.transpose(),A) ),A.transpose()) windsAux = numpy.dot(A1, vel) - + winds[0,i] = windsAux[0] winds[1,i] = windsAux[1] - + return winds, heightPerI[:-1] - + def techniqueNSM_SA(self, **kwargs): metArray = kwargs['metArray'] heightList = kwargs['heightList'] timeList = kwargs['timeList'] - + rx_location = kwargs['rx_location'] groupList = kwargs['groupList'] azimuth = kwargs['azimuth'] dfactor = kwargs['dfactor'] k = kwargs['k'] - + azimuth1, dist = self.__calculateAzimuth1(rx_location, groupList, azimuth) d = dist*dfactor #Phase calculation metArray1 = self.__getPhaseSlope(metArray, heightList, timeList) - + metArray1[:,-2] = metArray1[:,-2]*metArray1[:,2]*1000/(k*d[metArray1[:,1].astype(int)]) #angles into velocities - + velEst = numpy.zeros((heightList.size,2))*numpy.nan azimuth1 = azimuth1*numpy.pi/180 - + for i in range(heightList.size): h = heightList[i] indH = numpy.where((metArray1[:,2] == h)&(numpy.abs(metArray1[:,-2]) < 100))[0] @@ -1967,71 +1968,71 @@ class WindProfiler(Operation): A = numpy.asmatrix(A) A1 = numpy.linalg.pinv(A.transpose()*A)*A.transpose() velHor = numpy.dot(A1,velAux) - + velEst[i,:] = numpy.squeeze(velHor) return velEst - + def __getPhaseSlope(self, metArray, heightList, timeList): meteorList = [] #utctime sec1 height SNR velRad ph0 ph1 ph2 coh0 coh1 coh2 #Putting back together the meteor matrix utctime = metArray[:,0] uniqueTime = numpy.unique(utctime) - + phaseDerThresh = 0.5 ippSeconds = timeList[1] - timeList[0] sec = numpy.where(timeList>1)[0][0] nPairs = metArray.shape[1] - 6 nHeights = len(heightList) - + for t in uniqueTime: metArray1 = metArray[utctime==t,:] # phaseDerThresh = numpy.pi/4 #reducir Phase thresh tmet = metArray1[:,1].astype(int) hmet = metArray1[:,2].astype(int) - + metPhase = numpy.zeros((nPairs, heightList.size, timeList.size - 1)) metPhase[:,:] = numpy.nan metPhase[:,hmet,tmet] = metArray1[:,6:].T - + #Delete short trails metBool = ~numpy.isnan(metPhase[0,:,:]) heightVect = numpy.sum(metBool, axis = 1) metBool[heightVect phaseDerThresh)) metPhase[phDerAux] = numpy.nan - + #--------------------------METEOR DETECTION ----------------------------------------- indMet = numpy.where(numpy.any(metBool,axis=1))[0] - + for p in numpy.arange(nPairs): phase = metPhase[p,:,:] phDer = metDer[p,:,:] - + for h in indMet: height = heightList[h] phase1 = phase[h,:] #82 phDer1 = phDer[h,:] - + phase1[~numpy.isnan(phase1)] = numpy.unwrap(phase1[~numpy.isnan(phase1)]) #Unwrap - + indValid = numpy.where(~numpy.isnan(phase1))[0] initMet = indValid[0] endMet = 0 - + for i in range(len(indValid)-1): - + #Time difference inow = indValid[i] inext = indValid[i+1] idiff = inext - inow #Phase difference - phDiff = numpy.abs(phase1[inext] - phase1[inow]) - + phDiff = numpy.abs(phase1[inext] - phase1[inow]) + if idiff>sec or phDiff>numpy.pi/4 or inext==indValid[-1]: #End of Meteor sizeTrail = inow - initMet + 1 if sizeTrail>3*sec: #Too short meteors @@ -2047,43 +2048,43 @@ class WindProfiler(Operation): vel = slope#*height*1000/(k*d) estAux = numpy.array([utctime,p,height, vel, rsq]) meteorList.append(estAux) - initMet = inext + initMet = inext metArray2 = numpy.array(meteorList) - + return metArray2 - + def __calculateAzimuth1(self, rx_location, pairslist, azimuth0): - + azimuth1 = numpy.zeros(len(pairslist)) dist = numpy.zeros(len(pairslist)) - + for i in range(len(rx_location)): ch0 = pairslist[i][0] ch1 = pairslist[i][1] - + diffX = rx_location[ch0][0] - rx_location[ch1][0] diffY = rx_location[ch0][1] - rx_location[ch1][1] azimuth1[i] = numpy.arctan2(diffY,diffX)*180/numpy.pi dist[i] = numpy.sqrt(diffX**2 + diffY**2) - + azimuth1 -= azimuth0 return azimuth1, dist - + def techniqueNSM_DBS(self, **kwargs): metArray = kwargs['metArray'] heightList = kwargs['heightList'] - timeList = kwargs['timeList'] + timeList = kwargs['timeList'] azimuth = kwargs['azimuth'] theta_x = numpy.array(kwargs['theta_x']) theta_y = numpy.array(kwargs['theta_y']) - + utctime = metArray[:,0] cmet = metArray[:,1].astype(int) hmet = metArray[:,3].astype(int) SNRmet = metArray[:,4] vmet = metArray[:,5] spcmet = metArray[:,6] - + nChan = numpy.max(cmet) + 1 nHeights = len(heightList) @@ -2099,20 +2100,20 @@ class WindProfiler(Operation): thisH = (h1met>=hmin) & (h1met8) & (vmet<50) & (spcmet<10) indthisH = numpy.where(thisH) - + if numpy.size(indthisH) > 3: - + vel_aux = vmet[thisH] chan_aux = cmet[thisH] cosu_aux = dir_cosu[chan_aux] cosv_aux = dir_cosv[chan_aux] cosw_aux = dir_cosw[chan_aux] - - nch = numpy.size(numpy.unique(chan_aux)) + + nch = numpy.size(numpy.unique(chan_aux)) if nch > 1: A = self.__calculateMatA(cosu_aux, cosv_aux, cosw_aux, True) velEst[i,:] = numpy.dot(A,vel_aux) - + return velEst def run(self, dataOut, technique, nHours=1, hmin=70, hmax=110, **kwargs): @@ -2123,39 +2124,39 @@ class WindProfiler(Operation): # noise = dataOut.noise heightList = dataOut.heightList SNR = dataOut.data_SNR - + if technique == 'DBS': - - kwargs['velRadial'] = param[:,1,:] #Radial velocity + + kwargs['velRadial'] = param[:,1,:] #Radial velocity kwargs['heightList'] = heightList kwargs['SNR'] = SNR - + dataOut.data_output, dataOut.heightList, dataOut.data_SNR = self.techniqueDBS(kwargs) #DBS Function dataOut.utctimeInit = dataOut.utctime dataOut.outputInterval = dataOut.paramInterval - + elif technique == 'SA': - + #Parameters # position_x = kwargs['positionX'] # position_y = kwargs['positionY'] # azimuth = kwargs['azimuth'] -# +# # if kwargs.has_key('crosspairsList'): # pairs = kwargs['crosspairsList'] # else: -# pairs = None -# +# pairs = None +# # if kwargs.has_key('correctFactor'): # correctFactor = kwargs['correctFactor'] # else: # correctFactor = 1 - + # tau = dataOut.data_param # _lambda = dataOut.C/dataOut.frequency # pairsList = dataOut.groupList # nChannels = dataOut.nChannels - + kwargs['groupList'] = dataOut.groupList kwargs['tau'] = dataOut.data_param kwargs['_lambda'] = dataOut.C/dataOut.frequency @@ -2163,30 +2164,30 @@ class WindProfiler(Operation): dataOut.data_output = self.techniqueSA(kwargs) dataOut.utctimeInit = dataOut.utctime dataOut.outputInterval = dataOut.timeInterval - - elif technique == 'Meteors': + + elif technique == 'Meteors': dataOut.flagNoData = True self.__dataReady = False - + if 'nHours' in kwargs: nHours = kwargs['nHours'] - else: + else: nHours = 1 - + if 'meteorsPerBin' in kwargs: meteorThresh = kwargs['meteorsPerBin'] else: meteorThresh = 6 - + if 'hmin' in kwargs: hmin = kwargs['hmin'] else: hmin = 70 if 'hmax' in kwargs: hmax = kwargs['hmax'] else: hmax = 110 - + dataOut.outputInterval = nHours*3600 - + if self.__isConfig == False: # self.__initime = dataOut.datatime.replace(minute = 0, second = 0, microsecond = 03) #Get Initial LTC time @@ -2194,29 +2195,29 @@ class WindProfiler(Operation): self.__initime = (self.__initime.replace(minute = 0, second = 0, microsecond = 0) - datetime.datetime(1970, 1, 1)).total_seconds() self.__isConfig = True - + if self.__buffer is None: self.__buffer = dataOut.data_param self.__firstdata = copy.copy(dataOut) else: self.__buffer = numpy.vstack((self.__buffer, dataOut.data_param)) - + self.__checkTime(dataOut.utctime, dataOut.paramInterval, dataOut.outputInterval) #Check if the buffer is ready - + if self.__dataReady: dataOut.utctimeInit = self.__initime - + self.__initime += dataOut.outputInterval #to erase time offset - + dataOut.data_output, dataOut.heightList = self.techniqueMeteors(self.__buffer, meteorThresh, hmin, hmax) dataOut.flagNoData = False self.__buffer = None - + elif technique == 'Meteors1': dataOut.flagNoData = True self.__dataReady = False - + if 'nMins' in kwargs: nMins = kwargs['nMins'] else: nMins = 20 @@ -2231,7 +2232,7 @@ class WindProfiler(Operation): if 'mode' in kwargs: mode = kwargs['mode'] if 'theta_x' in kwargs: - theta_x = kwargs['theta_x'] + theta_x = kwargs['theta_x'] if 'theta_y' in kwargs: theta_y = kwargs['theta_y'] else: mode = 'SA' @@ -2244,10 +2245,10 @@ class WindProfiler(Operation): freq = 50e6 lamb = C/freq k = 2*numpy.pi/lamb - + timeList = dataOut.abscissaList heightList = dataOut.heightList - + if self.__isConfig == False: dataOut.outputInterval = nMins*60 # self.__initime = dataOut.datatime.replace(minute = 0, second = 0, microsecond = 03) @@ -2258,20 +2259,20 @@ class WindProfiler(Operation): self.__initime = (initime.replace(minute = minuteNew, second = 0, microsecond = 0) - datetime.datetime(1970, 1, 1)).total_seconds() self.__isConfig = True - + if self.__buffer is None: self.__buffer = dataOut.data_param self.__firstdata = copy.copy(dataOut) else: self.__buffer = numpy.vstack((self.__buffer, dataOut.data_param)) - + self.__checkTime(dataOut.utctime, dataOut.paramInterval, dataOut.outputInterval) #Check if the buffer is ready - + if self.__dataReady: dataOut.utctimeInit = self.__initime self.__initime += dataOut.outputInterval #to erase time offset - + metArray = self.__buffer if mode == 'SA': dataOut.data_output = self.techniqueNSM_SA(rx_location=rx_location, groupList=groupList, azimuth=azimuth, dfactor=dfactor, k=k,metArray=metArray, heightList=heightList,timeList=timeList) @@ -2282,71 +2283,71 @@ class WindProfiler(Operation): self.__buffer = None return - + class EWDriftsEstimation(Operation): - - def __init__(self): - Operation.__init__(self) - + + def __init__(self): + Operation.__init__(self) + def __correctValues(self, heiRang, phi, velRadial, SNR): listPhi = phi.tolist() maxid = listPhi.index(max(listPhi)) minid = listPhi.index(min(listPhi)) - - rango = list(range(len(phi))) + + rango = list(range(len(phi))) # rango = numpy.delete(rango,maxid) - + heiRang1 = heiRang*math.cos(phi[maxid]) heiRangAux = heiRang*math.cos(phi[minid]) indOut = (heiRang1 < heiRangAux[0]).nonzero() heiRang1 = numpy.delete(heiRang1,indOut) - + velRadial1 = numpy.zeros([len(phi),len(heiRang1)]) SNR1 = numpy.zeros([len(phi),len(heiRang1)]) - + for i in rango: x = heiRang*math.cos(phi[i]) y1 = velRadial[i,:] f1 = interpolate.interp1d(x,y1,kind = 'cubic') - + x1 = heiRang1 y11 = f1(x1) - + y2 = SNR[i,:] f2 = interpolate.interp1d(x,y2,kind = 'cubic') y21 = f2(x1) - + velRadial1[i,:] = y11 SNR1[i,:] = y21 - + return heiRang1, velRadial1, SNR1 def run(self, dataOut, zenith, zenithCorrection): heiRang = dataOut.heightList velRadial = dataOut.data_param[:,3,:] SNR = dataOut.data_SNR - + zenith = numpy.array(zenith) - zenith -= zenithCorrection + zenith -= zenithCorrection zenith *= numpy.pi/180 - + heiRang1, velRadial1, SNR1 = self.__correctValues(heiRang, numpy.abs(zenith), velRadial, SNR) - + alp = zenith[0] bet = zenith[1] - + w_w = velRadial1[0,:] w_e = velRadial1[1,:] - - w = (w_w*numpy.sin(bet) - w_e*numpy.sin(alp))/(numpy.cos(alp)*numpy.sin(bet) - numpy.cos(bet)*numpy.sin(alp)) - u = (w_w*numpy.cos(bet) - w_e*numpy.cos(alp))/(numpy.sin(alp)*numpy.cos(bet) - numpy.sin(bet)*numpy.cos(alp)) - + + w = (w_w*numpy.sin(bet) - w_e*numpy.sin(alp))/(numpy.cos(alp)*numpy.sin(bet) - numpy.cos(bet)*numpy.sin(alp)) + u = (w_w*numpy.cos(bet) - w_e*numpy.cos(alp))/(numpy.sin(alp)*numpy.cos(bet) - numpy.sin(bet)*numpy.cos(alp)) + winds = numpy.vstack((u,w)) - + dataOut.heightList = heiRang1 dataOut.data_output = winds dataOut.data_SNR = SNR1 - + dataOut.utctimeInit = dataOut.utctime dataOut.outputInterval = dataOut.timeInterval return @@ -2359,11 +2360,11 @@ class NonSpecularMeteorDetection(Operation): data_acf = dataOut.data_pre[0] data_ccf = dataOut.data_pre[1] pairsList = dataOut.groupList[1] - + lamb = dataOut.C/dataOut.frequency tSamp = dataOut.ippSeconds*dataOut.nCohInt paramInterval = dataOut.paramInterval - + nChannels = data_acf.shape[0] nLags = data_acf.shape[1] nProfiles = data_acf.shape[2] @@ -2373,7 +2374,7 @@ class NonSpecularMeteorDetection(Operation): heightList = dataOut.heightList ippSeconds = dataOut.ippSeconds*dataOut.nCohInt*dataOut.nAvg utctime = dataOut.utctime - + dataOut.abscissaList = numpy.arange(0,paramInterval+ippSeconds,ippSeconds) #------------------------ SNR -------------------------------------- @@ -2385,7 +2386,7 @@ class NonSpecularMeteorDetection(Operation): SNR[i] = (power[i]-noise[i])/noise[i] SNRm = numpy.nanmean(SNR, axis = 0) SNRdB = 10*numpy.log10(SNR) - + if mode == 'SA': dataOut.groupList = dataOut.groupList[1] nPairs = data_ccf.shape[0] @@ -2393,22 +2394,22 @@ class NonSpecularMeteorDetection(Operation): phase = numpy.zeros(data_ccf[:,0,:,:].shape) # phase1 = numpy.copy(phase) coh1 = numpy.zeros(data_ccf[:,0,:,:].shape) - + for p in range(nPairs): ch0 = pairsList[p][0] ch1 = pairsList[p][1] ccf = data_ccf[p,0,:,:]/numpy.sqrt(data_acf[ch0,0,:,:]*data_acf[ch1,0,:,:]) - phase[p,:,:] = ndimage.median_filter(numpy.angle(ccf), size = (5,1)) #median filter -# phase1[p,:,:] = numpy.angle(ccf) #median filter - coh1[p,:,:] = ndimage.median_filter(numpy.abs(ccf), 5) #median filter -# coh1[p,:,:] = numpy.abs(ccf) #median filter + phase[p,:,:] = ndimage.median_filter(numpy.angle(ccf), size = (5,1)) #median filter +# phase1[p,:,:] = numpy.angle(ccf) #median filter + coh1[p,:,:] = ndimage.median_filter(numpy.abs(ccf), 5) #median filter +# coh1[p,:,:] = numpy.abs(ccf) #median filter coh = numpy.nanmax(coh1, axis = 0) # struc = numpy.ones((5,1)) # coh = ndimage.morphology.grey_dilation(coh, size=(10,1)) #---------------------- Radial Velocity ---------------------------- phaseAux = numpy.mean(numpy.angle(data_acf[:,1,:,:]), axis = 0) velRad = phaseAux*lamb/(4*numpy.pi*tSamp) - + if allData: boolMetFin = ~numpy.isnan(SNRm) # coh[:-1,:] = numpy.nanmean(numpy.abs(phase[:,1:,:] - phase[:,:-1,:]),axis=0) @@ -2416,31 +2417,31 @@ class NonSpecularMeteorDetection(Operation): #------------------------ Meteor mask --------------------------------- # #SNR mask # boolMet = (SNRdB>SNRthresh)#|(~numpy.isnan(SNRdB)) -# +# # #Erase small objects -# boolMet1 = self.__erase_small(boolMet, 2*sec, 5) -# +# boolMet1 = self.__erase_small(boolMet, 2*sec, 5) +# # auxEEJ = numpy.sum(boolMet1,axis=0) # indOver = auxEEJ>nProfiles*0.8 #Use this later # indEEJ = numpy.where(indOver)[0] # indNEEJ = numpy.where(~indOver)[0] -# +# # boolMetFin = boolMet1 -# +# # if indEEJ.size > 0: -# boolMet1[:,indEEJ] = False #Erase heights with EEJ -# +# boolMet1[:,indEEJ] = False #Erase heights with EEJ +# # boolMet2 = coh > cohThresh # boolMet2 = self.__erase_small(boolMet2, 2*sec,5) -# +# # #Final Meteor mask # boolMetFin = boolMet1|boolMet2 - + #Coherence mask boolMet1 = coh > 0.75 struc = numpy.ones((30,1)) boolMet1 = ndimage.morphology.binary_dilation(boolMet1, structure=struc) - + #Derivative mask derPhase = numpy.nanmean(numpy.abs(phase[:,1:,:] - phase[:,:-1,:]),axis=0) boolMet2 = derPhase < 0.2 @@ -2457,7 +2458,7 @@ class NonSpecularMeteorDetection(Operation): tmet = coordMet[0] hmet = coordMet[1] - + data_param = numpy.zeros((tmet.size, 6 + nPairs)) data_param[:,0] = utctime data_param[:,1] = tmet @@ -2466,7 +2467,7 @@ class NonSpecularMeteorDetection(Operation): data_param[:,4] = velRad[tmet,hmet] data_param[:,5] = coh[tmet,hmet] data_param[:,6:] = phase[:,tmet,hmet].T - + elif mode == 'DBS': dataOut.groupList = numpy.arange(nChannels) @@ -2474,7 +2475,7 @@ class NonSpecularMeteorDetection(Operation): phase = numpy.angle(data_acf[:,1,:,:]) # phase = ndimage.median_filter(numpy.angle(data_acf[:,1,:,:]), size = (1,5,1)) velRad = phase*lamb/(4*numpy.pi*tSamp) - + #Spectral width # acf1 = ndimage.median_filter(numpy.abs(data_acf[:,1,:,:]), size = (1,5,1)) # acf2 = ndimage.median_filter(numpy.abs(data_acf[:,2,:,:]), size = (1,5,1)) @@ -2489,24 +2490,24 @@ class NonSpecularMeteorDetection(Operation): #SNR boolMet1 = (SNRdB>SNRthresh) #SNR mask boolMet1 = ndimage.median_filter(boolMet1, size=(1,5,5)) - + #Radial velocity boolMet2 = numpy.abs(velRad) < 20 boolMet2 = ndimage.median_filter(boolMet2, (1,5,5)) - + #Spectral Width boolMet3 = spcWidth < 30 boolMet3 = ndimage.median_filter(boolMet3, (1,5,5)) # boolMetFin = self.__erase_small(boolMet1, 10,5) boolMetFin = boolMet1&boolMet2&boolMet3 - + #Creating data_param coordMet = numpy.where(boolMetFin) cmet = coordMet[0] tmet = coordMet[1] hmet = coordMet[2] - + data_param = numpy.zeros((tmet.size, 7)) data_param[:,0] = utctime data_param[:,1] = cmet @@ -2515,7 +2516,7 @@ class NonSpecularMeteorDetection(Operation): data_param[:,4] = SNR[cmet,tmet,hmet].T data_param[:,5] = velRad[cmet,tmet,hmet].T data_param[:,6] = spcWidth[cmet,tmet,hmet].T - + # self.dataOut.data_param = data_int if len(data_param) == 0: dataOut.flagNoData = True @@ -2525,21 +2526,21 @@ class NonSpecularMeteorDetection(Operation): def __erase_small(self, binArray, threshX, threshY): labarray, numfeat = ndimage.measurements.label(binArray) binArray1 = numpy.copy(binArray) - + for i in range(1,numfeat + 1): auxBin = (labarray==i) auxSize = auxBin.sum() - + x,y = numpy.where(auxBin) widthX = x.max() - x.min() widthY = y.max() - y.min() - + #width X: 3 seg -> 12.5*3 - #width Y: - + #width Y: + if (auxSize < 50) or (widthX < threshX) or (widthY < threshY): binArray1[auxBin] = False - + return binArray1 #--------------- Specular Meteor ---------------- @@ -2549,36 +2550,36 @@ class SMDetection(Operation): Function DetectMeteors() Project developed with paper: HOLDSWORTH ET AL. 2004 - + Input: self.dataOut.data_pre - + centerReceiverIndex: From the channels, which is the center receiver - + hei_ref: Height reference for the Beacon signal extraction tauindex: predefinedPhaseShifts: Predefined phase offset for the voltge signals - + cohDetection: Whether to user Coherent detection or not cohDet_timeStep: Coherent Detection calculation time step cohDet_thresh: Coherent Detection phase threshold to correct phases - + noise_timeStep: Noise calculation time step noise_multiple: Noise multiple to define signal threshold - + multDet_timeLimit: Multiple Detection Removal time limit in seconds multDet_rangeLimit: Multiple Detection Removal range limit in km - + phaseThresh: Maximum phase difference between receiver to be consider a meteor - SNRThresh: Minimum SNR threshold of the meteor signal to be consider a meteor - + SNRThresh: Minimum SNR threshold of the meteor signal to be consider a meteor + hmin: Minimum Height of the meteor to use it in the further wind estimations hmax: Maximum Height of the meteor to use it in the further wind estimations azimuth: Azimuth angle correction - + Affected: self.dataOut.data_param - + Rejection Criteria (Errors): 0: No error; analysis OK 1: SNR < SNR threshold @@ -2597,9 +2598,9 @@ class SMDetection(Operation): 14: height ambiguous echo: more then one possible height within 70 to 110 km 15: radial drift velocity or projected horizontal velocity exceeds 200 m/s 16: oscilatory echo, indicating event most likely not an underdense echo - + 17: phase difference in meteor Reestimation - + Data Storage: Meteors for Wind Estimation (8): Utc Time | Range Height @@ -2607,19 +2608,19 @@ class SMDetection(Operation): VelRad errorVelRad Phase0 Phase1 Phase2 Phase3 TypeError - - ''' - + + ''' + def run(self, dataOut, hei_ref = None, tauindex = 0, phaseOffsets = None, - cohDetection = False, cohDet_timeStep = 1, cohDet_thresh = 25, + cohDetection = False, cohDet_timeStep = 1, cohDet_thresh = 25, noise_timeStep = 4, noise_multiple = 4, multDet_timeLimit = 1, multDet_rangeLimit = 3, phaseThresh = 20, SNRThresh = 5, hmin = 50, hmax=150, azimuth = 0, channelPositions = None) : - - + + #Getting Pairslist if channelPositions is None: # channelPositions = [(2.5,0), (0,2.5), (0,0), (0,4.5), (-2,0)] #T @@ -2629,53 +2630,53 @@ class SMDetection(Operation): heiRang = dataOut.getHeiRange() #Get Beacon signal - No Beacon signal anymore # newheis = numpy.where(self.dataOut.heightList>self.dataOut.radarControllerHeaderObj.Taus[tauindex]) -# +# # if hei_ref != None: # newheis = numpy.where(self.dataOut.heightList>hei_ref) -# - - +# + + #****************REMOVING HARDWARE PHASE DIFFERENCES*************** # see if the user put in pre defined phase shifts voltsPShift = dataOut.data_pre.copy() - + # if predefinedPhaseShifts != None: # hardwarePhaseShifts = numpy.array(predefinedPhaseShifts)*numpy.pi/180 -# +# # # elif beaconPhaseShifts: # # #get hardware phase shifts using beacon signal # # hardwarePhaseShifts = self.__getHardwarePhaseDiff(self.dataOut.data_pre, pairslist, newheis, 10) # # hardwarePhaseShifts = numpy.insert(hardwarePhaseShifts,centerReceiverIndex,0) -# +# # else: -# hardwarePhaseShifts = numpy.zeros(5) -# +# hardwarePhaseShifts = numpy.zeros(5) +# # voltsPShift = numpy.zeros((self.dataOut.data_pre.shape[0],self.dataOut.data_pre.shape[1],self.dataOut.data_pre.shape[2]), dtype = 'complex') # for i in range(self.dataOut.data_pre.shape[0]): # voltsPShift[i,:,:] = self.__shiftPhase(self.dataOut.data_pre[i,:,:], hardwarePhaseShifts[i]) #******************END OF REMOVING HARDWARE PHASE DIFFERENCES********* - + #Remove DC voltsDC = numpy.mean(voltsPShift,1) voltsDC = numpy.mean(voltsDC,1) for i in range(voltsDC.shape[0]): voltsPShift[i] = voltsPShift[i] - voltsDC[i] - - #Don't considerate last heights, theyre used to calculate Hardware Phase Shift + + #Don't considerate last heights, theyre used to calculate Hardware Phase Shift # voltsPShift = voltsPShift[:,:,:newheis[0][0]] - + #************ FIND POWER OF DATA W/COH OR NON COH DETECTION (3.4) ********** #Coherent Detection if cohDetection: #use coherent detection to get the net power cohDet_thresh = cohDet_thresh*numpy.pi/180 voltsPShift = self.__coherentDetection(voltsPShift, cohDet_timeStep, dataOut.timeInterval, pairslist0, cohDet_thresh) - + #Non-coherent detection! powerNet = numpy.nansum(numpy.abs(voltsPShift[:,:,:])**2,0) #********** END OF COH/NON-COH POWER CALCULATION********************** - + #********** FIND THE NOISE LEVEL AND POSSIBLE METEORS **************** #Get noise noise, noise1 = self.__getNoise(powerNet, noise_timeStep, dataOut.timeInterval) @@ -2685,7 +2686,7 @@ class SMDetection(Operation): #Meteor echoes detection listMeteors = self.__findMeteors(powerNet, signalThresh) #******* END OF NOISE LEVEL AND POSSIBLE METEORS CACULATION ********** - + #************** REMOVE MULTIPLE DETECTIONS (3.5) *************************** #Parameters heiRange = dataOut.getHeiRange() @@ -2695,7 +2696,7 @@ class SMDetection(Operation): #Multiple detection removals listMeteors1 = self.__removeMultipleDetections(listMeteors, rangeLimit, timeLimit) #************ END OF REMOVE MULTIPLE DETECTIONS ********************** - + #********************* METEOR REESTIMATION (3.7, 3.8, 3.9, 3.10) ******************** #Parameters phaseThresh = phaseThresh*numpy.pi/180 @@ -2706,40 +2707,40 @@ class SMDetection(Operation): #Estimation of decay times (Errors N 7, 8, 11) listMeteors3 = self.__estimateDecayTime(listMeteors2, listMeteorsPower, dataOut.timeInterval, dataOut.frequency) #******************* END OF METEOR REESTIMATION ******************* - + #********************* METEOR PARAMETERS CALCULATION (3.11, 3.12, 3.13) ************************** #Calculating Radial Velocity (Error N 15) radialStdThresh = 10 - listMeteors4 = self.__getRadialVelocity(listMeteors3, listMeteorsVolts, radialStdThresh, pairslist0, dataOut.timeInterval) + listMeteors4 = self.__getRadialVelocity(listMeteors3, listMeteorsVolts, radialStdThresh, pairslist0, dataOut.timeInterval) if len(listMeteors4) > 0: #Setting New Array date = dataOut.utctime arrayParameters = self.__setNewArrays(listMeteors4, date, heiRang) - + #Correcting phase offset if phaseOffsets != None: phaseOffsets = numpy.array(phaseOffsets)*numpy.pi/180 arrayParameters[:,8:12] = numpy.unwrap(arrayParameters[:,8:12] + phaseOffsets) - + #Second Pairslist pairsList = [] pairx = (0,1) pairy = (2,3) pairsList.append(pairx) pairsList.append(pairy) - + jph = numpy.array([0,0,0,0]) h = (hmin,hmax) arrayParameters = meteorOps.getMeteorParams(arrayParameters, azimuth, h, pairsList, distances, jph) - + # #Calculate AOA (Error N 3, 4) # #JONES ET AL. 1998 # error = arrayParameters[:,-1] # AOAthresh = numpy.pi/8 # phases = -arrayParameters[:,9:13] # arrayParameters[:,4:7], arrayParameters[:,-1] = meteorOps.getAOA(phases, pairsList, error, AOAthresh, azimuth) -# +# # #Calculate Heights (Error N 13 and 14) # error = arrayParameters[:,-1] # Ranges = arrayParameters[:,2] @@ -2747,73 +2748,73 @@ class SMDetection(Operation): # arrayParameters[:,3], arrayParameters[:,-1] = meteorOps.getHeights(Ranges, zenith, error, hmin, hmax) # error = arrayParameters[:,-1] #********************* END OF PARAMETERS CALCULATION ************************** - - #***************************+ PASS DATA TO NEXT STEP ********************** + + #***************************+ PASS DATA TO NEXT STEP ********************** # arrayFinal = arrayParameters.reshape((1,arrayParameters.shape[0],arrayParameters.shape[1])) dataOut.data_param = arrayParameters - + if arrayParameters is None: dataOut.flagNoData = True else: dataOut.flagNoData = True - + return - + def __getHardwarePhaseDiff(self, voltage0, pairslist, newheis, n): - + minIndex = min(newheis[0]) maxIndex = max(newheis[0]) - + voltage = voltage0[:,:,minIndex:maxIndex+1] nLength = voltage.shape[1]/n nMin = 0 nMax = 0 phaseOffset = numpy.zeros((len(pairslist),n)) - + for i in range(n): nMax += nLength phaseCCF = -numpy.angle(self.__calculateCCF(voltage[:,nMin:nMax,:], pairslist, [0])) phaseCCF = numpy.mean(phaseCCF, axis = 2) - phaseOffset[:,i] = phaseCCF.transpose() + phaseOffset[:,i] = phaseCCF.transpose() nMin = nMax # phaseDiff, phaseArrival = self.estimatePhaseDifference(voltage, pairslist) - + #Remove Outliers factor = 2 wt = phaseOffset - signal.medfilt(phaseOffset,(1,5)) dw = numpy.std(wt,axis = 1) dw = dw.reshape((dw.size,1)) - ind = numpy.where(numpy.logical_or(wt>dw*factor,wt<-dw*factor)) + ind = numpy.where(numpy.logical_or(wt>dw*factor,wt<-dw*factor)) phaseOffset[ind] = numpy.nan - phaseOffset = stats.nanmean(phaseOffset, axis=1) - + phaseOffset = stats.nanmean(phaseOffset, axis=1) + return phaseOffset - + def __shiftPhase(self, data, phaseShift): #this will shift the phase of a complex number - dataShifted = numpy.abs(data) * numpy.exp((numpy.angle(data)+phaseShift)*1j) + dataShifted = numpy.abs(data) * numpy.exp((numpy.angle(data)+phaseShift)*1j) return dataShifted - + def __estimatePhaseDifference(self, array, pairslist): nChannel = array.shape[0] nHeights = array.shape[2] numPairs = len(pairslist) # phaseCCF = numpy.zeros((nChannel, 5, nHeights)) phaseCCF = numpy.angle(self.__calculateCCF(array, pairslist, [-2,-1,0,1,2])) - + #Correct phases derPhaseCCF = phaseCCF[:,1:,:] - phaseCCF[:,0:-1,:] indDer = numpy.where(numpy.abs(derPhaseCCF) > numpy.pi) - - if indDer[0].shape[0] > 0: + + if indDer[0].shape[0] > 0: for i in range(indDer[0].shape[0]): signo = -numpy.sign(derPhaseCCF[indDer[0][i],indDer[1][i],indDer[2][i]]) phaseCCF[indDer[0][i],indDer[1][i]+1:,:] += signo*2*numpy.pi - + # for j in range(numSides): # phaseCCFAux = self.calculateCCF(arrayCenter, arraySides[j,:,:], [-2,1,0,1,2]) # phaseCCF[j,:,:] = numpy.angle(phaseCCFAux) -# +# #Linear phaseInt = numpy.zeros((numPairs,1)) angAllCCF = phaseCCF[:,[0,1,3,4],0] @@ -2823,16 +2824,16 @@ class SMDetection(Operation): #Phase Differences phaseDiff = phaseInt - phaseCCF[:,2,:] phaseArrival = phaseInt.reshape(phaseInt.size) - + #Dealias phaseArrival = numpy.angle(numpy.exp(1j*phaseArrival)) # indAlias = numpy.where(phaseArrival > numpy.pi) # phaseArrival[indAlias] -= 2*numpy.pi # indAlias = numpy.where(phaseArrival < -numpy.pi) # phaseArrival[indAlias] += 2*numpy.pi - + return phaseDiff, phaseArrival - + def __coherentDetection(self, volts, timeSegment, timeInterval, pairslist, thresh): #this function will run the coherent detection used in Holdworth et al. 2004 and return the net power #find the phase shifts of each channel over 1 second intervals @@ -2842,25 +2843,25 @@ class SMDetection(Operation): numHeights = volts.shape[2] nChannel = volts.shape[0] voltsCohDet = volts.copy() - + pairsarray = numpy.array(pairslist) indSides = pairsarray[:,1] # indSides = numpy.array(range(nChannel)) # indSides = numpy.delete(indSides, indCenter) -# +# # listCenter = numpy.array_split(volts[indCenter,:,:], numBlocks, 0) listBlocks = numpy.array_split(volts, numBlocks, 1) - + startInd = 0 endInd = 0 - + for i in range(numBlocks): startInd = endInd - endInd = endInd + listBlocks[i].shape[1] - + endInd = endInd + listBlocks[i].shape[1] + arrayBlock = listBlocks[i] # arrayBlockCenter = listCenter[i] - + #Estimate the Phase Difference phaseDiff, aux = self.__estimatePhaseDifference(arrayBlock, pairslist) #Phase Difference RMS @@ -2872,21 +2873,21 @@ class SMDetection(Operation): for j in range(indSides.size): arrayBlock[indSides[j],:,indPhase] = self.__shiftPhase(arrayBlock[indSides[j],:,indPhase], phaseDiff[j,indPhase].transpose()) voltsCohDet[:,startInd:endInd,:] = arrayBlock - + return voltsCohDet - + def __calculateCCF(self, volts, pairslist ,laglist): - + nHeights = volts.shape[2] - nPoints = volts.shape[1] + nPoints = volts.shape[1] voltsCCF = numpy.zeros((len(pairslist), len(laglist), nHeights),dtype = 'complex') - + for i in range(len(pairslist)): volts1 = volts[pairslist[i][0]] - volts2 = volts[pairslist[i][1]] - + volts2 = volts[pairslist[i][1]] + for t in range(len(laglist)): - idxT = laglist[t] + idxT = laglist[t] if idxT >= 0: vStacked = numpy.vstack((volts2[idxT:,:], numpy.zeros((idxT, nHeights),dtype='complex'))) @@ -2894,10 +2895,10 @@ class SMDetection(Operation): vStacked = numpy.vstack((numpy.zeros((-idxT, nHeights),dtype='complex'), volts2[:(nPoints + idxT),:])) voltsCCF[i,t,:] = numpy.sum((numpy.conjugate(volts1)*vStacked),axis=0) - + vStacked = None return voltsCCF - + def __getNoise(self, power, timeSegment, timeInterval): numProfPerBlock = numpy.ceil(timeSegment/timeInterval) numBlocks = int(power.shape[0]/numProfPerBlock) @@ -2906,100 +2907,100 @@ class SMDetection(Operation): listPower = numpy.array_split(power, numBlocks, 0) noise = numpy.zeros((power.shape[0], power.shape[1])) noise1 = numpy.zeros((power.shape[0], power.shape[1])) - + startInd = 0 endInd = 0 - + for i in range(numBlocks): #split por canal startInd = endInd - endInd = endInd + listPower[i].shape[0] - + endInd = endInd + listPower[i].shape[0] + arrayBlock = listPower[i] noiseAux = numpy.mean(arrayBlock, 0) # noiseAux = numpy.median(noiseAux) # noiseAux = numpy.mean(arrayBlock) - noise[startInd:endInd,:] = noise[startInd:endInd,:] + noiseAux - + noise[startInd:endInd,:] = noise[startInd:endInd,:] + noiseAux + noiseAux1 = numpy.mean(arrayBlock) - noise1[startInd:endInd,:] = noise1[startInd:endInd,:] + noiseAux1 - + noise1[startInd:endInd,:] = noise1[startInd:endInd,:] + noiseAux1 + return noise, noise1 - + def __findMeteors(self, power, thresh): nProf = power.shape[0] nHeights = power.shape[1] listMeteors = [] - + for i in range(nHeights): powerAux = power[:,i] threshAux = thresh[:,i] - + indUPthresh = numpy.where(powerAux > threshAux)[0] indDNthresh = numpy.where(powerAux <= threshAux)[0] - + j = 0 - + while (j < indUPthresh.size - 2): if (indUPthresh[j + 2] == indUPthresh[j] + 2): indDNAux = numpy.where(indDNthresh > indUPthresh[j]) indDNthresh = indDNthresh[indDNAux] - + if (indDNthresh.size > 0): indEnd = indDNthresh[0] - 1 indInit = indUPthresh[j] - + meteor = powerAux[indInit:indEnd + 1] indPeak = meteor.argmax() + indInit FLA = sum(numpy.conj(meteor)*numpy.hstack((meteor[1:],0))) - + listMeteors.append(numpy.array([i,indInit,indPeak,indEnd,FLA])) #CHEQUEAR!!!!! j = numpy.where(indUPthresh == indEnd)[0] + 1 else: j+=1 else: j+=1 - + return listMeteors - + def __removeMultipleDetections(self,listMeteors, rangeLimit, timeLimit): - - arrayMeteors = numpy.asarray(listMeteors) + + arrayMeteors = numpy.asarray(listMeteors) listMeteors1 = [] - + while arrayMeteors.shape[0] > 0: FLAs = arrayMeteors[:,4] maxFLA = FLAs.argmax() listMeteors1.append(arrayMeteors[maxFLA,:]) - + MeteorInitTime = arrayMeteors[maxFLA,1] MeteorEndTime = arrayMeteors[maxFLA,3] MeteorHeight = arrayMeteors[maxFLA,0] - + #Check neighborhood maxHeightIndex = MeteorHeight + rangeLimit minHeightIndex = MeteorHeight - rangeLimit minTimeIndex = MeteorInitTime - timeLimit maxTimeIndex = MeteorEndTime + timeLimit - + #Check Heights indHeight = numpy.logical_and(arrayMeteors[:,0] >= minHeightIndex, arrayMeteors[:,0] <= maxHeightIndex) indTime = numpy.logical_and(arrayMeteors[:,3] >= minTimeIndex, arrayMeteors[:,1] <= maxTimeIndex) indBoth = numpy.where(numpy.logical_and(indTime,indHeight)) - + arrayMeteors = numpy.delete(arrayMeteors, indBoth, axis = 0) - + return listMeteors1 - + def __meteorReestimation(self, listMeteors, volts, pairslist, thresh, noise, timeInterval,frequency): numHeights = volts.shape[2] nChannel = volts.shape[0] - + thresholdPhase = thresh[0] thresholdNoise = thresh[1] thresholdDB = float(thresh[2]) - + thresholdDB1 = 10**(thresholdDB/10) pairsarray = numpy.array(pairslist) indSides = pairsarray[:,1] - + pairslist1 = list(pairslist) pairslist1.append((0,1)) pairslist1.append((3,4)) @@ -3008,31 +3009,31 @@ class SMDetection(Operation): listPowerSeries = [] listVoltageSeries = [] #volts has the war data - + if frequency == 30e6: timeLag = 45*10**-3 else: timeLag = 15*10**-3 lag = numpy.ceil(timeLag/timeInterval) - + for i in range(len(listMeteors)): - + ###################### 3.6 - 3.7 PARAMETERS REESTIMATION ######################### meteorAux = numpy.zeros(16) - + #Loading meteor Data (mHeight, mStart, mPeak, mEnd) mHeight = listMeteors[i][0] mStart = listMeteors[i][1] mPeak = listMeteors[i][2] mEnd = listMeteors[i][3] - + #get the volt data between the start and end times of the meteor meteorVolts = volts[:,mStart:mEnd+1,mHeight] meteorVolts = meteorVolts.reshape(meteorVolts.shape[0], meteorVolts.shape[1], 1) - + #3.6. Phase Difference estimation phaseDiff, aux = self.__estimatePhaseDifference(meteorVolts, pairslist) - + #3.7. Phase difference removal & meteor start, peak and end times reestimated #meteorVolts0.- all Channels, all Profiles meteorVolts0 = volts[:,:,mHeight] @@ -3040,15 +3041,15 @@ class SMDetection(Operation): meteorNoise = noise[:,mHeight] meteorVolts0[indSides,:] = self.__shiftPhase(meteorVolts0[indSides,:], phaseDiff) #Phase Shifting powerNet0 = numpy.nansum(numpy.abs(meteorVolts0)**2, axis = 0) #Power - + #Times reestimation mStart1 = numpy.where(powerNet0[:mPeak] < meteorThresh[:mPeak])[0] if mStart1.size > 0: mStart1 = mStart1[-1] + 1 - - else: + + else: mStart1 = mPeak - + mEnd1 = numpy.where(powerNet0[mPeak:] < meteorThresh[mPeak:])[0][0] + mPeak - 1 mEndDecayTime1 = numpy.where(powerNet0[mPeak:] < meteorNoise[mPeak:])[0] if mEndDecayTime1.size == 0: @@ -3056,7 +3057,7 @@ class SMDetection(Operation): else: mEndDecayTime1 = mEndDecayTime1[0] + mPeak - 1 # mPeak1 = meteorVolts0[mStart1:mEnd1 + 1].argmax() - + #meteorVolts1.- all Channels, from start to end meteorVolts1 = meteorVolts0[:,mStart1:mEnd1 + 1] meteorVolts2 = meteorVolts0[:,mPeak + lag:mEnd1 + 1] @@ -3065,17 +3066,17 @@ class SMDetection(Operation): meteorVolts1 = meteorVolts1.reshape(meteorVolts1.shape[0], meteorVolts1.shape[1], 1) meteorVolts2 = meteorVolts2.reshape(meteorVolts2.shape[0], meteorVolts2.shape[1], 1) ##################### END PARAMETERS REESTIMATION ######################### - + ##################### 3.8 PHASE DIFFERENCE REESTIMATION ######################## # if mEnd1 - mStart1 > 4: #Error Number 6: echo less than 5 samples long; too short for analysis - if meteorVolts2.shape[1] > 0: + if meteorVolts2.shape[1] > 0: #Phase Difference re-estimation phaseDiff1, phaseDiffint = self.__estimatePhaseDifference(meteorVolts2, pairslist1) #Phase Difference Estimation # phaseDiff1, phaseDiffint = self.estimatePhaseDifference(meteorVolts2, pairslist) meteorVolts2 = meteorVolts2.reshape(meteorVolts2.shape[0], meteorVolts2.shape[1]) phaseDiff11 = numpy.reshape(phaseDiff1, (phaseDiff1.shape[0],1)) meteorVolts2[indSides,:] = self.__shiftPhase(meteorVolts2[indSides,:], phaseDiff11[0:4]) #Phase Shifting - + #Phase Difference RMS phaseRMS1 = numpy.sqrt(numpy.mean(numpy.square(phaseDiff1))) powerNet1 = numpy.nansum(numpy.abs(meteorVolts1[:,:])**2,0) @@ -3090,27 +3091,27 @@ class SMDetection(Operation): #Vectorize meteorAux[0:7] = [mHeight, mStart1, mPeak1, mEnd1, mPeakPower1, mSNR1, phaseRMS1] meteorAux[7:11] = phaseDiffint[0:4] - + #Rejection Criterions if phaseRMS1 > thresholdPhase: #Error Number 17: Phase variation meteorAux[-1] = 17 elif mSNR1 < thresholdDB1: #Error Number 1: SNR < threshold dB meteorAux[-1] = 1 - - - else: + + + else: meteorAux[0:4] = [mHeight, mStart, mPeak, mEnd] meteorAux[-1] = 6 #Error Number 6: echo less than 5 samples long; too short for analysis PowerSeries = 0 - + listMeteors1.append(meteorAux) listPowerSeries.append(PowerSeries) listVoltageSeries.append(meteorVolts1) - - return listMeteors1, listPowerSeries, listVoltageSeries - + + return listMeteors1, listPowerSeries, listVoltageSeries + def __estimateDecayTime(self, listMeteors, listPower, timeInterval, frequency): - + threshError = 10 #Depending if it is 30 or 50 MHz if frequency == 30e6: @@ -3118,22 +3119,22 @@ class SMDetection(Operation): else: timeLag = 15*10**-3 lag = numpy.ceil(timeLag/timeInterval) - + listMeteors1 = [] - + for i in range(len(listMeteors)): meteorPower = listPower[i] meteorAux = listMeteors[i] - + if meteorAux[-1] == 0: - try: + try: indmax = meteorPower.argmax() indlag = indmax + lag - + y = meteorPower[indlag:] x = numpy.arange(0, y.size)*timeLag - + #first guess a = y[0] tau = timeLag @@ -3142,26 +3143,26 @@ class SMDetection(Operation): y1 = self.__exponential_function(x, *popt) #error estimation error = sum((y - y1)**2)/(numpy.var(y)*(y.size - popt.size)) - + decayTime = popt[1] riseTime = indmax*timeInterval meteorAux[11:13] = [decayTime, error] - + #Table items 7, 8 and 11 if (riseTime > 0.3): #Number 7: Echo rise exceeds 0.3s - meteorAux[-1] = 7 + meteorAux[-1] = 7 elif (decayTime < 2*riseTime) : #Number 8: Echo decay time less than than twice rise time meteorAux[-1] = 8 if (error > threshError): #Number 11: Poor fit to amplitude for estimation of decay time - meteorAux[-1] = 11 - - + meteorAux[-1] = 11 + + except: - meteorAux[-1] = 11 - - + meteorAux[-1] = 11 + + listMeteors1.append(meteorAux) - + return listMeteors1 #Exponential Function @@ -3169,9 +3170,9 @@ class SMDetection(Operation): def __exponential_function(self, x, a, tau): y = a*numpy.exp(-x/tau) return y - + def __getRadialVelocity(self, listMeteors, listVolts, radialStdThresh, pairslist, timeInterval): - + pairslist1 = list(pairslist) pairslist1.append((0,1)) pairslist1.append((3,4)) @@ -3181,33 +3182,33 @@ class SMDetection(Operation): c = 3e8 lag = numpy.ceil(timeLag/timeInterval) freq = 30e6 - + listMeteors1 = [] - + for i in range(len(listMeteors)): meteorAux = listMeteors[i] if meteorAux[-1] == 0: mStart = listMeteors[i][1] - mPeak = listMeteors[i][2] + mPeak = listMeteors[i][2] mLag = mPeak - mStart + lag - + #get the volt data between the start and end times of the meteor meteorVolts = listVolts[i] meteorVolts = meteorVolts.reshape(meteorVolts.shape[0], meteorVolts.shape[1], 1) #Get CCF allCCFs = self.__calculateCCF(meteorVolts, pairslist1, [-2,-1,0,1,2]) - + #Method 2 slopes = numpy.zeros(numPairs) time = numpy.array([-2,-1,1,2])*timeInterval angAllCCF = numpy.angle(allCCFs[:,[0,1,3,4],0]) - + #Correct phases derPhaseCCF = angAllCCF[:,1:] - angAllCCF[:,0:-1] indDer = numpy.where(numpy.abs(derPhaseCCF) > numpy.pi) - - if indDer[0].shape[0] > 0: + + if indDer[0].shape[0] > 0: for i in range(indDer[0].shape[0]): signo = -numpy.sign(derPhaseCCF[indDer[0][i],indDer[1][i]]) angAllCCF[indDer[0][i],indDer[1][i]+1:] += signo*2*numpy.pi @@ -3216,51 +3217,51 @@ class SMDetection(Operation): for j in range(numPairs): fit = stats.linregress(time, angAllCCF[j,:]) slopes[j] = fit[0] - + #Remove Outlier # indOut = numpy.argmax(numpy.abs(slopes - numpy.mean(slopes))) # slopes = numpy.delete(slopes,indOut) # indOut = numpy.argmax(numpy.abs(slopes - numpy.mean(slopes))) # slopes = numpy.delete(slopes,indOut) - + radialVelocity = -numpy.mean(slopes)*(0.25/numpy.pi)*(c/freq) radialError = numpy.std(slopes)*(0.25/numpy.pi)*(c/freq) meteorAux[-2] = radialError meteorAux[-3] = radialVelocity - + #Setting Error #Number 15: Radial Drift velocity or projected horizontal velocity exceeds 200 m/s - if numpy.abs(radialVelocity) > 200: + if numpy.abs(radialVelocity) > 200: meteorAux[-1] = 15 #Number 12: Poor fit to CCF variation for estimation of radial drift velocity elif radialError > radialStdThresh: meteorAux[-1] = 12 - + listMeteors1.append(meteorAux) return listMeteors1 - + def __setNewArrays(self, listMeteors, date, heiRang): - + #New arrays arrayMeteors = numpy.array(listMeteors) arrayParameters = numpy.zeros((len(listMeteors), 13)) - + #Date inclusion # date = re.findall(r'\((.*?)\)', date) # date = date[0].split(',') # date = map(int, date) -# +# # if len(date)<6: # date.append(0) -# +# # date = [date[0]*10000 + date[1]*100 + date[2], date[3]*10000 + date[4]*100 + date[5]] # arrayDate = numpy.tile(date, (len(listMeteors), 1)) arrayDate = numpy.tile(date, (len(listMeteors))) - + #Meteor array # arrayMeteors[:,0] = heiRang[arrayMeteors[:,0].astype(int)] # arrayMeteors = numpy.hstack((arrayDate, arrayMeteors)) - + #Parameters Array arrayParameters[:,0] = arrayDate #Date arrayParameters[:,1] = heiRang[arrayMeteors[:,0].astype(int)] #Range @@ -3268,13 +3269,13 @@ class SMDetection(Operation): arrayParameters[:,8:12] = arrayMeteors[:,7:11] #Phases arrayParameters[:,-1] = arrayMeteors[:,-1] #Error - + return arrayParameters - + class CorrectSMPhases(Operation): - + def run(self, dataOut, phaseOffsets, hmin = 50, hmax = 150, azimuth = 45, channelPositions = None): - + arrayParameters = dataOut.data_param pairsList = [] pairx = (0,1) @@ -3282,49 +3283,49 @@ class CorrectSMPhases(Operation): pairsList.append(pairx) pairsList.append(pairy) jph = numpy.zeros(4) - + phaseOffsets = numpy.array(phaseOffsets)*numpy.pi/180 # arrayParameters[:,8:12] = numpy.unwrap(arrayParameters[:,8:12] + phaseOffsets) arrayParameters[:,8:12] = numpy.angle(numpy.exp(1j*(arrayParameters[:,8:12] + phaseOffsets))) - + meteorOps = SMOperations() if channelPositions is None: # channelPositions = [(2.5,0), (0,2.5), (0,0), (0,4.5), (-2,0)] #T channelPositions = [(4.5,2), (2,4.5), (2,2), (2,0), (0,2)] #Estrella - + pairslist0, distances = meteorOps.getPhasePairs(channelPositions) h = (hmin,hmax) - + arrayParameters = meteorOps.getMeteorParams(arrayParameters, azimuth, h, pairsList, distances, jph) - + dataOut.data_param = arrayParameters return class SMPhaseCalibration(Operation): - + __buffer = None __initime = None __dataReady = False - + __isConfig = False - + def __checkTime(self, currentTime, initTime, paramInterval, outputInterval): - + dataTime = currentTime + paramInterval deltaTime = dataTime - initTime - + if deltaTime >= outputInterval or deltaTime < 0: return True - + return False - + def __getGammas(self, pairs, d, phases): gammas = numpy.zeros(2) - + for i in range(len(pairs)): - + pairi = pairs[i] phip3 = phases[:,pairi[0]] @@ -3338,7 +3339,7 @@ class SMPhaseCalibration(Operation): jgamma = numpy.angle(numpy.exp(1j*jgamma)) # jgamma[jgamma>numpy.pi] -= 2*numpy.pi # jgamma[jgamma<-numpy.pi] += 2*numpy.pi - + #Revised distribution jgammaArray = numpy.hstack((jgamma,jgamma+0.5*numpy.pi,jgamma-0.5*numpy.pi)) @@ -3347,39 +3348,39 @@ class SMPhaseCalibration(Operation): rmin = -0.5*numpy.pi rmax = 0.5*numpy.pi phaseHisto = numpy.histogram(jgammaArray, bins=nBins, range=(rmin,rmax)) - + meteorsY = phaseHisto[0] phasesX = phaseHisto[1][:-1] width = phasesX[1] - phasesX[0] phasesX += width/2 - + #Gaussian aproximation bpeak = meteorsY.argmax() peak = meteorsY.max() jmin = bpeak - 5 jmax = bpeak + 5 + 1 - + if jmin<0: jmin = 0 jmax = 6 elif jmax > meteorsY.size: jmin = meteorsY.size - 6 jmax = meteorsY.size - + x0 = numpy.array([peak,bpeak,50]) coeff = optimize.leastsq(self.__residualFunction, x0, args=(meteorsY[jmin:jmax], phasesX[jmin:jmax])) - + #Gammas gammas[i] = coeff[0][1] - + return gammas - + def __residualFunction(self, coeffs, y, t): - + return y - self.__gauss_function(t, coeffs) def __gauss_function(self, t, coeffs): - + return coeffs[0]*numpy.exp(-0.5*((t - coeffs[1]) / coeffs[2])**2) def __getPhases(self, azimuth, h, pairsList, d, gammas, meteorsArray): @@ -3400,16 +3401,16 @@ class SMPhaseCalibration(Operation): max_xangle = range_angle[iz]/2 + center_xangle min_yangle = -range_angle[iz]/2 + center_yangle max_yangle = range_angle[iz]/2 + center_yangle - + inc_x = (max_xangle-min_xangle)/nstepsx inc_y = (max_yangle-min_yangle)/nstepsy - + alpha_y = numpy.arange(nstepsy)*inc_y + min_yangle alpha_x = numpy.arange(nstepsx)*inc_x + min_xangle penalty = numpy.zeros((nstepsx,nstepsy)) jph_array = numpy.zeros((nchan,nstepsx,nstepsy)) jph = numpy.zeros(nchan) - + # Iterations looking for the offset for iy in range(int(nstepsy)): for ix in range(int(nstepsx)): @@ -3417,46 +3418,46 @@ class SMPhaseCalibration(Operation): d2 = d[pairsList[1][1]] d5 = d[pairsList[0][0]] d4 = d[pairsList[0][1]] - + alp2 = alpha_y[iy] #gamma 1 - alp4 = alpha_x[ix] #gamma 0 - + alp4 = alpha_x[ix] #gamma 0 + alp3 = -alp2*d3/d2 - gammas[1] alp5 = -alp4*d5/d4 - gammas[0] # jph[pairy[1]] = alpha_y[iy] -# jph[pairy[0]] = -gammas[1] - alpha_y[iy]*d[pairy[1]]/d[pairy[0]] - +# jph[pairy[0]] = -gammas[1] - alpha_y[iy]*d[pairy[1]]/d[pairy[0]] + # jph[pairx[1]] = alpha_x[ix] # jph[pairx[0]] = -gammas[0] - alpha_x[ix]*d[pairx[1]]/d[pairx[0]] jph[pairsList[0][1]] = alp4 jph[pairsList[0][0]] = alp5 jph[pairsList[1][0]] = alp3 - jph[pairsList[1][1]] = alp2 + jph[pairsList[1][1]] = alp2 jph_array[:,ix,iy] = jph # d = [2.0,2.5,2.5,2.0] - #falta chequear si va a leer bien los meteoros + #falta chequear si va a leer bien los meteoros meteorsArray1 = meteorOps.getMeteorParams(meteorsArray, azimuth, h, pairsList, d, jph) error = meteorsArray1[:,-1] ind1 = numpy.where(error==0)[0] penalty[ix,iy] = ind1.size - + i,j = numpy.unravel_index(penalty.argmax(), penalty.shape) phOffset = jph_array[:,i,j] - + center_xangle = phOffset[pairx[1]] center_yangle = phOffset[pairy[1]] - + phOffset = numpy.angle(numpy.exp(1j*jph_array[:,i,j])) - phOffset = phOffset*180/numpy.pi + phOffset = phOffset*180/numpy.pi return phOffset - - + + def run(self, dataOut, hmin, hmax, channelPositions=None, nHours = 1): - + dataOut.flagNoData = True - self.__dataReady = False + self.__dataReady = False dataOut.outputInterval = nHours*3600 - + if self.__isConfig == False: # self.__initime = dataOut.datatime.replace(minute = 0, second = 0, microsecond = 03) #Get Initial LTC time @@ -3464,19 +3465,19 @@ class SMPhaseCalibration(Operation): self.__initime = (self.__initime.replace(minute = 0, second = 0, microsecond = 0) - datetime.datetime(1970, 1, 1)).total_seconds() self.__isConfig = True - + if self.__buffer is None: self.__buffer = dataOut.data_param.copy() else: self.__buffer = numpy.vstack((self.__buffer, dataOut.data_param)) - + self.__dataReady = self.__checkTime(dataOut.utctime, self.__initime, dataOut.paramInterval, dataOut.outputInterval) #Check if the buffer is ready - + if self.__dataReady: dataOut.utctimeInit = self.__initime self.__initime += dataOut.outputInterval #to erase time offset - + freq = dataOut.frequency c = dataOut.C #m/s lamb = c/freq @@ -3498,13 +3499,13 @@ class SMPhaseCalibration(Operation): pairs.append((1,0)) else: pairs.append((0,1)) - + if distances[3] > distances[2]: pairs.append((3,2)) else: pairs.append((2,3)) # distances1 = [-distances[0]*lamb, distances[1]*lamb, -distances[2]*lamb, distances[3]*lamb] - + meteorsArray = self.__buffer error = meteorsArray[:,-1] boolError = (error==0)|(error==3)|(error==4)|(error==13)|(error==14) @@ -3512,7 +3513,7 @@ class SMPhaseCalibration(Operation): meteorsArray = meteorsArray[ind1,:] meteorsArray[:,-1] = 0 phases = meteorsArray[:,8:12] - + #Calculate Gammas gammas = self.__getGammas(pairs, distances, phases) # gammas = numpy.array([-21.70409463,45.76935864])*numpy.pi/180 @@ -3522,22 +3523,22 @@ class SMPhaseCalibration(Operation): dataOut.data_output = -phasesOff dataOut.flagNoData = False self.__buffer = None - - + + return - + class SMOperations(): - + def __init__(self): - + return - + def getMeteorParams(self, arrayParameters0, azimuth, h, pairsList, distances, jph): - + arrayParameters = arrayParameters0.copy() hmin = h[0] hmax = h[1] - + #Calculate AOA (Error N 3, 4) #JONES ET AL. 1998 AOAthresh = numpy.pi/8 @@ -3545,72 +3546,72 @@ class SMOperations(): phases = -arrayParameters[:,8:12] + jph # phases = numpy.unwrap(phases) arrayParameters[:,3:6], arrayParameters[:,-1] = self.__getAOA(phases, pairsList, distances, error, AOAthresh, azimuth) - + #Calculate Heights (Error N 13 and 14) error = arrayParameters[:,-1] Ranges = arrayParameters[:,1] zenith = arrayParameters[:,4] arrayParameters[:,2], arrayParameters[:,-1] = self.__getHeights(Ranges, zenith, error, hmin, hmax) - + #----------------------- Get Final data ------------------------------------ # error = arrayParameters[:,-1] # ind1 = numpy.where(error==0)[0] # arrayParameters = arrayParameters[ind1,:] - + return arrayParameters - + def __getAOA(self, phases, pairsList, directions, error, AOAthresh, azimuth): - + arrayAOA = numpy.zeros((phases.shape[0],3)) cosdir0, cosdir = self.__getDirectionCosines(phases, pairsList,directions) - + arrayAOA[:,:2] = self.__calculateAOA(cosdir, azimuth) cosDirError = numpy.sum(numpy.abs(cosdir0 - cosdir), axis = 1) arrayAOA[:,2] = cosDirError - + azimuthAngle = arrayAOA[:,0] zenithAngle = arrayAOA[:,1] - + #Setting Error indError = numpy.where(numpy.logical_or(error == 3, error == 4))[0] error[indError] = 0 #Number 3: AOA not fesible indInvalid = numpy.where(numpy.logical_and((numpy.logical_or(numpy.isnan(zenithAngle), numpy.isnan(azimuthAngle))),error == 0))[0] - error[indInvalid] = 3 + error[indInvalid] = 3 #Number 4: Large difference in AOAs obtained from different antenna baselines indInvalid = numpy.where(numpy.logical_and(cosDirError > AOAthresh,error == 0))[0] - error[indInvalid] = 4 + error[indInvalid] = 4 return arrayAOA, error - + def __getDirectionCosines(self, arrayPhase, pairsList, distances): - + #Initializing some variables ang_aux = numpy.array([-8,-7,-6,-5,-4,-3,-2,-1,0,1,2,3,4,5,6,7,8])*2*numpy.pi ang_aux = ang_aux.reshape(1,ang_aux.size) - + cosdir = numpy.zeros((arrayPhase.shape[0],2)) cosdir0 = numpy.zeros((arrayPhase.shape[0],2)) - - + + for i in range(2): ph0 = arrayPhase[:,pairsList[i][0]] ph1 = arrayPhase[:,pairsList[i][1]] d0 = distances[pairsList[i][0]] d1 = distances[pairsList[i][1]] - - ph0_aux = ph0 + ph1 + + ph0_aux = ph0 + ph1 ph0_aux = numpy.angle(numpy.exp(1j*ph0_aux)) # ph0_aux[ph0_aux > numpy.pi] -= 2*numpy.pi -# ph0_aux[ph0_aux < -numpy.pi] += 2*numpy.pi +# ph0_aux[ph0_aux < -numpy.pi] += 2*numpy.pi #First Estimation cosdir0[:,i] = (ph0_aux)/(2*numpy.pi*(d0 - d1)) - + #Most-Accurate Second Estimation phi1_aux = ph0 - ph1 phi1_aux = phi1_aux.reshape(phi1_aux.size,1) #Direction Cosine 1 cosdir1 = (phi1_aux + ang_aux)/(2*numpy.pi*(d0 + d1)) - + #Searching the correct Direction Cosine cosdir0_aux = cosdir0[:,i] cosdir0_aux = cosdir0_aux.reshape(cosdir0_aux.size,1) @@ -3619,59 +3620,59 @@ class SMOperations(): indcos = cosDiff.argmin(axis = 1) #Saving Value obtained cosdir[:,i] = cosdir1[numpy.arange(len(indcos)),indcos] - + return cosdir0, cosdir - + def __calculateAOA(self, cosdir, azimuth): cosdirX = cosdir[:,0] cosdirY = cosdir[:,1] - + zenithAngle = numpy.arccos(numpy.sqrt(1 - cosdirX**2 - cosdirY**2))*180/numpy.pi azimuthAngle = numpy.arctan2(cosdirX,cosdirY)*180/numpy.pi + azimuth#0 deg north, 90 deg east angles = numpy.vstack((azimuthAngle, zenithAngle)).transpose() - + return angles - + def __getHeights(self, Ranges, zenith, error, minHeight, maxHeight): - + Ramb = 375 #Ramb = c/(2*PRF) Re = 6371 #Earth Radius heights = numpy.zeros(Ranges.shape) - + R_aux = numpy.array([0,1,2])*Ramb R_aux = R_aux.reshape(1,R_aux.size) Ranges = Ranges.reshape(Ranges.size,1) - + Ri = Ranges + R_aux hi = numpy.sqrt(Re**2 + Ri**2 + (2*Re*numpy.cos(zenith*numpy.pi/180)*Ri.transpose()).transpose()) - Re - + #Check if there is a height between 70 and 110 km h_bool = numpy.sum(numpy.logical_and(hi > minHeight, hi < maxHeight), axis = 1) ind_h = numpy.where(h_bool == 1)[0] - + hCorr = hi[ind_h, :] ind_hCorr = numpy.where(numpy.logical_and(hi > minHeight, hi < maxHeight)) - + hCorr = hi[ind_hCorr][:len(ind_h)] heights[ind_h] = hCorr - + #Setting Error #Number 13: Height unresolvable echo: not valid height within 70 to 110 km - #Number 14: Height ambiguous echo: more than one possible height within 70 to 110 km + #Number 14: Height ambiguous echo: more than one possible height within 70 to 110 km indError = numpy.where(numpy.logical_or(error == 13, error == 14))[0] error[indError] = 0 - indInvalid2 = numpy.where(numpy.logical_and(h_bool > 1, error == 0))[0] + indInvalid2 = numpy.where(numpy.logical_and(h_bool > 1, error == 0))[0] error[indInvalid2] = 14 indInvalid1 = numpy.where(numpy.logical_and(h_bool == 0, error == 0))[0] - error[indInvalid1] = 13 - + error[indInvalid1] = 13 + return heights, error - + def getPhasePairs(self, channelPositions): chanPos = numpy.array(channelPositions) listOper = list(itertools.combinations(list(range(5)),2)) - + distances = numpy.zeros(4) axisX = [] axisY = [] @@ -3679,15 +3680,15 @@ class SMOperations(): distY = numpy.zeros(3) ix = 0 iy = 0 - + pairX = numpy.zeros((2,2)) pairY = numpy.zeros((2,2)) - + for i in range(len(listOper)): pairi = listOper[i] - + posDif = numpy.abs(chanPos[pairi[0],:] - chanPos[pairi[1],:]) - + if posDif[0] == 0: axisY.append(pairi) distY[iy] = posDif[1] @@ -3696,7 +3697,7 @@ class SMOperations(): axisX.append(pairi) distX[ix] = posDif[0] ix += 1 - + for i in range(2): if i==0: dist0 = distX @@ -3704,7 +3705,7 @@ class SMOperations(): else: dist0 = distY axis0 = axisY - + side = numpy.argsort(dist0)[:-1] axis0 = numpy.array(axis0)[side,:] chanC = int(numpy.intersect1d(axis0[0,:], axis0[1,:])[0]) @@ -3712,7 +3713,7 @@ class SMOperations(): side = axis1[axis1 != chanC] diff1 = chanPos[chanC,i] - chanPos[side[0],i] diff2 = chanPos[chanC,i] - chanPos[side[1],i] - if diff1<0: + if diff1<0: chan2 = side[0] d2 = numpy.abs(diff1) chan1 = side[1] @@ -3722,7 +3723,7 @@ class SMOperations(): d2 = numpy.abs(diff2) chan1 = side[0] d1 = numpy.abs(diff1) - + if i==0: chanCX = chanC chan1X = chan1 @@ -3734,10 +3735,10 @@ class SMOperations(): chan2Y = chan2 distances[2:4] = numpy.array([d1,d2]) # axisXsides = numpy.reshape(axisX[ix,:],4) -# +# # channelCentX = int(numpy.intersect1d(pairX[0,:], pairX[1,:])[0]) # channelCentY = int(numpy.intersect1d(pairY[0,:], pairY[1,:])[0]) -# +# # ind25X = numpy.where(pairX[0,:] != channelCentX)[0][0] # ind20X = numpy.where(pairX[1,:] != channelCentX)[0][0] # channel25X = int(pairX[0,ind25X]) @@ -3746,59 +3747,59 @@ class SMOperations(): # ind20Y = numpy.where(pairY[1,:] != channelCentY)[0][0] # channel25Y = int(pairY[0,ind25Y]) # channel20Y = int(pairY[1,ind20Y]) - + # pairslist = [(channelCentX, channel25X),(channelCentX, channel20X),(channelCentY,channel25Y),(channelCentY, channel20Y)] - pairslist = [(chanCX, chan1X),(chanCX, chan2X),(chanCY,chan1Y),(chanCY, chan2Y)] - + pairslist = [(chanCX, chan1X),(chanCX, chan2X),(chanCY,chan1Y),(chanCY, chan2Y)] + return pairslist, distances # def __getAOA(self, phases, pairsList, error, AOAthresh, azimuth): -# +# # arrayAOA = numpy.zeros((phases.shape[0],3)) # cosdir0, cosdir = self.__getDirectionCosines(phases, pairsList) -# +# # arrayAOA[:,:2] = self.__calculateAOA(cosdir, azimuth) # cosDirError = numpy.sum(numpy.abs(cosdir0 - cosdir), axis = 1) # arrayAOA[:,2] = cosDirError -# +# # azimuthAngle = arrayAOA[:,0] # zenithAngle = arrayAOA[:,1] -# +# # #Setting Error # #Number 3: AOA not fesible # indInvalid = numpy.where(numpy.logical_and((numpy.logical_or(numpy.isnan(zenithAngle), numpy.isnan(azimuthAngle))),error == 0))[0] -# error[indInvalid] = 3 +# error[indInvalid] = 3 # #Number 4: Large difference in AOAs obtained from different antenna baselines # indInvalid = numpy.where(numpy.logical_and(cosDirError > AOAthresh,error == 0))[0] -# error[indInvalid] = 4 +# error[indInvalid] = 4 # return arrayAOA, error -# +# # def __getDirectionCosines(self, arrayPhase, pairsList): -# +# # #Initializing some variables # ang_aux = numpy.array([-8,-7,-6,-5,-4,-3,-2,-1,0,1,2,3,4,5,6,7,8])*2*numpy.pi # ang_aux = ang_aux.reshape(1,ang_aux.size) -# +# # cosdir = numpy.zeros((arrayPhase.shape[0],2)) # cosdir0 = numpy.zeros((arrayPhase.shape[0],2)) -# -# +# +# # for i in range(2): # #First Estimation # phi0_aux = arrayPhase[:,pairsList[i][0]] + arrayPhase[:,pairsList[i][1]] # #Dealias # indcsi = numpy.where(phi0_aux > numpy.pi) -# phi0_aux[indcsi] -= 2*numpy.pi +# phi0_aux[indcsi] -= 2*numpy.pi # indcsi = numpy.where(phi0_aux < -numpy.pi) -# phi0_aux[indcsi] += 2*numpy.pi +# phi0_aux[indcsi] += 2*numpy.pi # #Direction Cosine 0 # cosdir0[:,i] = -(phi0_aux)/(2*numpy.pi*0.5) -# +# # #Most-Accurate Second Estimation # phi1_aux = arrayPhase[:,pairsList[i][0]] - arrayPhase[:,pairsList[i][1]] # phi1_aux = phi1_aux.reshape(phi1_aux.size,1) # #Direction Cosine 1 # cosdir1 = -(phi1_aux + ang_aux)/(2*numpy.pi*4.5) -# +# # #Searching the correct Direction Cosine # cosdir0_aux = cosdir0[:,i] # cosdir0_aux = cosdir0_aux.reshape(cosdir0_aux.size,1) @@ -3807,51 +3808,50 @@ class SMOperations(): # indcos = cosDiff.argmin(axis = 1) # #Saving Value obtained # cosdir[:,i] = cosdir1[numpy.arange(len(indcos)),indcos] -# +# # return cosdir0, cosdir -# +# # def __calculateAOA(self, cosdir, azimuth): # cosdirX = cosdir[:,0] # cosdirY = cosdir[:,1] -# +# # zenithAngle = numpy.arccos(numpy.sqrt(1 - cosdirX**2 - cosdirY**2))*180/numpy.pi # azimuthAngle = numpy.arctan2(cosdirX,cosdirY)*180/numpy.pi + azimuth #0 deg north, 90 deg east # angles = numpy.vstack((azimuthAngle, zenithAngle)).transpose() -# +# # return angles -# +# # def __getHeights(self, Ranges, zenith, error, minHeight, maxHeight): -# +# # Ramb = 375 #Ramb = c/(2*PRF) # Re = 6371 #Earth Radius # heights = numpy.zeros(Ranges.shape) -# +# # R_aux = numpy.array([0,1,2])*Ramb # R_aux = R_aux.reshape(1,R_aux.size) -# +# # Ranges = Ranges.reshape(Ranges.size,1) -# +# # Ri = Ranges + R_aux # hi = numpy.sqrt(Re**2 + Ri**2 + (2*Re*numpy.cos(zenith*numpy.pi/180)*Ri.transpose()).transpose()) - Re -# +# # #Check if there is a height between 70 and 110 km # h_bool = numpy.sum(numpy.logical_and(hi > minHeight, hi < maxHeight), axis = 1) # ind_h = numpy.where(h_bool == 1)[0] -# +# # hCorr = hi[ind_h, :] # ind_hCorr = numpy.where(numpy.logical_and(hi > minHeight, hi < maxHeight)) -# -# hCorr = hi[ind_hCorr] +# +# hCorr = hi[ind_hCorr] # heights[ind_h] = hCorr -# +# # #Setting Error # #Number 13: Height unresolvable echo: not valid height within 70 to 110 km -# #Number 14: Height ambiguous echo: more than one possible height within 70 to 110 km -# -# indInvalid2 = numpy.where(numpy.logical_and(h_bool > 1, error == 0))[0] +# #Number 14: Height ambiguous echo: more than one possible height within 70 to 110 km +# +# indInvalid2 = numpy.where(numpy.logical_and(h_bool > 1, error == 0))[0] # error[indInvalid2] = 14 # indInvalid1 = numpy.where(numpy.logical_and(h_bool == 0, error == 0))[0] -# error[indInvalid1] = 13 -# -# return heights, error - \ No newline at end of file +# error[indInvalid1] = 13 +# +# return heights, error diff --git a/schainpy/model/proc/jroproc_spectra.py b/schainpy/model/proc/jroproc_spectra.py index 1851995..9b21136 100644 --- a/schainpy/model/proc/jroproc_spectra.py +++ b/schainpy/model/proc/jroproc_spectra.py @@ -291,16 +291,16 @@ class SpectraProc(ProcessingUnit): # self.dataOut.channelList = [self.dataOut.channelList[i] for i in channelIndexList] self.dataOut.channelList = range(len(channelIndexList)) self.__selectPairsByChannel(channelIndexList) - + return 1 - - + + def selectFFTs(self, minFFT, maxFFT ): """ - Selecciona un bloque de datos en base a un grupo de valores de puntos FFTs segun el rango + Selecciona un bloque de datos en base a un grupo de valores de puntos FFTs segun el rango minFFT<= FFT <= maxFFT """ - + if (minFFT > maxFFT): raise ValueError("Error selecting heights: Height range (%d,%d) is not valid" % (minFFT, maxFFT)) @@ -330,20 +330,20 @@ class SpectraProc(ProcessingUnit): self.selectFFTsByIndex(minIndex, maxIndex) return 1 - - + + def setH0(self, h0, deltaHeight = None): - + if not deltaHeight: deltaHeight = self.dataOut.heightList[1] - self.dataOut.heightList[0] - + nHeights = self.dataOut.nHeights - + newHeiRange = h0 + numpy.arange(nHeights)*deltaHeight - + self.dataOut.heightList = newHeiRange - - + + def selectHeights(self, minHei, maxHei): """ Selecciona un bloque de datos en base a un grupo de valores de alturas segun el rango @@ -360,7 +360,7 @@ class SpectraProc(ProcessingUnit): 1 si el metodo se ejecuto con exito caso contrario devuelve 0 """ - + if (minHei > maxHei): raise ValueError("Error selecting heights: Height range (%d,%d) is not valid" % (minHei, maxHei)) @@ -388,7 +388,7 @@ class SpectraProc(ProcessingUnit): maxIndex = len(heights) self.selectHeightsByIndex(minIndex, maxIndex) - + return 1 @@ -436,7 +436,7 @@ class SpectraProc(ProcessingUnit): def selectFFTsByIndex(self, minIndex, maxIndex): """ - + """ if (minIndex < 0) or (minIndex > maxIndex): @@ -459,7 +459,7 @@ class SpectraProc(ProcessingUnit): self.dataOut.data_spc = data_spc self.dataOut.data_cspc = data_cspc self.dataOut.data_dc = data_dc - + self.dataOut.ippSeconds = self.dataOut.ippSeconds*(self.dataOut.nFFTPoints / numpy.shape(data_cspc)[1]) self.dataOut.nFFTPoints = numpy.shape(data_cspc)[1] self.dataOut.profilesPerBlock = numpy.shape(data_cspc)[1] @@ -552,7 +552,7 @@ class SpectraProc(ProcessingUnit): xx_inv = numpy.linalg.inv(xx) xx_aux = xx_inv[0, :] - for ich in range(num_chan): + for ich in range(num_chan): yy = jspectra[ich, ind_vel, :] jspectra[ich, freq_dc, :] = numpy.dot(xx_aux, yy) @@ -574,12 +574,12 @@ class SpectraProc(ProcessingUnit): return 1 def removeInterference2(self): - + cspc = self.dataOut.data_cspc spc = self.dataOut.data_spc - Heights = numpy.arange(cspc.shape[2]) + Heights = numpy.arange(cspc.shape[2]) realCspc = numpy.abs(cspc) - + for i in range(cspc.shape[0]): LinePower= numpy.sum(realCspc[i], axis=0) Threshold = numpy.amax(LinePower)-numpy.sort(LinePower)[len(Heights)-int(len(Heights)*0.1)] @@ -587,17 +587,17 @@ class SpectraProc(ProcessingUnit): InterferenceSum = numpy.sum( realCspc[i,:,SelectedHeights], axis=0 ) InterferenceThresholdMin = numpy.sort(InterferenceSum)[int(len(InterferenceSum)*0.98)] InterferenceThresholdMax = numpy.sort(InterferenceSum)[int(len(InterferenceSum)*0.99)] - - + + InterferenceRange = numpy.where( ([InterferenceSum > InterferenceThresholdMin]))# , InterferenceSum < InterferenceThresholdMax]) ) #InterferenceRange = numpy.where( ([InterferenceRange < InterferenceThresholdMax])) if len(InterferenceRange)1): self.osamp = osamp self.code = numpy.repeat(code, repeats=self.osamp, axis=1) @@ -621,7 +620,7 @@ class Decoder(Operation): #Frequency __codeBuffer = numpy.zeros((self.nCode, self.__nHeis), dtype=numpy.complex) - + __codeBuffer[:,0:self.nBaud] = self.code self.fft_code = numpy.conj(numpy.fft.fft(__codeBuffer, axis=1)) @@ -670,11 +669,11 @@ class Decoder(Operation): junk = junk.flatten() code_block = numpy.reshape(junk, (self.nCode*repetitions, self.nBaud)) profilesList = range(self.__nProfiles) - - for i in range(self.__nChannels): - for j in profilesList: - self.datadecTime[i,j,:] = numpy.correlate(data[i,j,:], code_block[j,:], mode='full')[self.nBaud-1:] - return self.datadecTime + + for i in range(self.__nChannels): + for j in profilesList: + self.datadecTime[i,j,:] = numpy.correlate(data[i,j,:], code_block[j,:], mode='full')[self.nBaud-1:] + return self.datadecTime def __convolutionByBlockInFreq(self, data): @@ -691,7 +690,7 @@ class Decoder(Operation): return data - + def run(self, dataOut, code=None, nCode=None, nBaud=None, mode = 0, osamp=None, times=None): if dataOut.flagDecodeData: @@ -722,7 +721,7 @@ class Decoder(Operation): self.__nProfiles = dataOut.nProfiles datadec = None - + if mode == 3: mode = 0 @@ -1105,9 +1104,9 @@ class SplitProfiles(Operation): if shape[2] % n != 0: raise ValueError("Could not split the data, n=%d has to be multiple of %d" %(n, shape[2])) - + new_shape = shape[0], shape[1]*n, int(shape[2]/n) - + dataOut.data = numpy.reshape(dataOut.data, new_shape) dataOut.flagNoData = False diff --git a/schainpy/model/utils/jroutils_ftp.py b/schainpy/model/utils/jroutils_ftp.py index a1cc981..252e6fc 100644 --- a/schainpy/model/utils/jroutils_ftp.py +++ b/schainpy/model/utils/jroutils_ftp.py @@ -167,12 +167,12 @@ class Remote(Thread): self.mutex.acquire() # init = time.time() - # + # # while(self.bussy): # sleep(0.1) # if time.time() - init > 2*self.period: # return 0 - + self.fileList = fileList self.mutex.release() return 1 @@ -195,7 +195,7 @@ class Remote(Thread): if self.stopFlag: break - + # self.bussy = True self.mutex.acquire() @@ -399,19 +399,19 @@ class SSHClient(Remote): """ This method is used to set SSH parameters and establish a connection to a remote server - + Inputs: - server - remote server IP Address - - username - remote server Username - + server - remote server IP Address + + username - remote server Username + password - remote server password - + remotefolder - remote server current working directory - + Return: void - - Affects: + + Affects: self.status - in case of error or fail connection this parameter is set to 0 else 1 """ @@ -483,10 +483,10 @@ class SSHClient(Remote): def __execute(self, command): """ __execute a command on remote server - + Input: command - Exmaple 'ls -l' - + Return: 0 in error case else 1 """ @@ -508,10 +508,10 @@ class SSHClient(Remote): def mkdir(self, remotefolder): """ mkdir is used to make a new directory in remote server - + Input: remotefolder - directory name - + Return: 0 in error case else 1 """ @@ -529,14 +529,14 @@ class SSHClient(Remote): def cd(self, remotefolder): """ cd is used to change remote working directory on server - + Input: remotefolder - current working directory - + Affects: self.remotefolder - - Return: + + Return: 0 in case of error else 1 """ if not self.status: @@ -580,8 +580,8 @@ class SendToServer(ProcessingUnit): ProcessingUnit.__init__(self, **kwargs) self.isConfig = False - self.clientObj = None - + self.clientObj = None + def setup(self, server, username, password, remotefolder, localfolder, ext='.png', period=60, protocol='ftp', **kwargs): self.clientObj = None @@ -641,11 +641,11 @@ class SendToServer(ProcessingUnit): self.init = time.time() self.setup(**kwargs) self.isConfig = True - + if not self.clientObj.is_alive(): print("[Remote Server]: Restarting connection ") self.setup(**kwargs) - + if time.time() - self.init >= self.period: fullfilenameList = self.findFiles() @@ -706,9 +706,9 @@ class FTP(object): try: self.ftp = ftplib.FTP(self.server) self.ftp.login(self.username,self.password) - self.ftp.cwd(self.remotefolder) + self.ftp.cwd(self.remotefolder) # print 'Connect to FTP Server: Successfully' - + except ftplib.all_errors: print('Error FTP Service') self.status = 1 @@ -1005,4 +1005,4 @@ class SendByFTP(Operation): self.counter = 0 - self.status = 1 \ No newline at end of file + self.status = 1 diff --git a/schainpy/model/utils/jroutils_publish.py b/schainpy/model/utils/jroutils_publish.py index d631130..44a52a5 100644 --- a/schainpy/model/utils/jroutils_publish.py +++ b/schainpy/model/utils/jroutils_publish.py @@ -47,7 +47,7 @@ PLOT_CODES = { def get_plot_code(s): label = s.split('_')[0] codes = [key for key in PLOT_CODES if key in label] - if codes: + if codes: return PLOT_CODES[codes[0]] else: return 24 @@ -69,7 +69,7 @@ class PublishData(Operation): self.counter = 0 self.delay = kwargs.get('delay', 0) self.cnt = 0 - self.verbose = verbose + self.verbose = verbose context = zmq.Context() self.zmq_socket = context.socket(zmq.PUSH) server = kwargs.get('server', 'zmq.pipe') @@ -85,7 +85,7 @@ class PublishData(Operation): def publish_data(self): self.dataOut.finished = False - + if self.verbose: log.log( 'Sending {} - {}'.format(self.dataOut.type, self.dataOut.datatime), @@ -103,12 +103,12 @@ class PublishData(Operation): time.sleep(self.delay) def close(self): - + self.dataOut.finished = True self.zmq_socket.send_pyobj(self.dataOut) time.sleep(0.1) self.zmq_socket.close() - + class ReceiverData(ProcessingUnit): @@ -195,7 +195,7 @@ class SendToFTP(Operation): self.ftp.close() self.ftp = None self.ready = False - return + return try: self.ftp.login(self.username, self.password) @@ -244,8 +244,8 @@ class SendToFTP(Operation): def upload(self, src, dst): log.log('Uploading {} -> {} '.format( - src.split('/')[-1], dst.split('/')[-1]), - self.name, + src.split('/')[-1], dst.split('/')[-1]), + self.name, nl=False ) @@ -273,7 +273,7 @@ class SendToFTP(Operation): fp.close() log.success('OK', tag='') return 1 - + def send_files(self): for x, pattern in enumerate(self.patterns): @@ -282,35 +282,35 @@ class SendToFTP(Operation): srcname = self.find_files(local, ext) src = os.path.join(local, srcname) if os.path.getmtime(src) < time.time() - 30*60: - log.warning('Skipping old file {}'.format(srcname)) + log.warning('Skipping old file {}'.format(srcname)) continue if srcname is None or srcname == self.latest[x]: - log.warning('File alreday uploaded {}'.format(srcname)) + log.warning('File alreday uploaded {}'.format(srcname)) continue - + if 'png' in ext: dstname = self.getftpname(srcname, int(exp_code), int(sub_exp_code)) else: - dstname = srcname - + dstname = srcname + dst = os.path.join(remote, dstname) if self.upload(src, dst): self.times[x] = time.time() self.latest[x] = srcname - else: + else: self.ready = False - break + break def run(self, dataOut, server, username, password, timeout=10, **kwargs): if not self.isConfig: self.setup( - server=server, - username=username, - password=password, - timeout=timeout, + server=server, + username=username, + password=password, + timeout=timeout, **kwargs ) self.isConfig = True