From 36f30f86830d70571bcb67059d5b7085ff100263 2017-10-20 23:47:16 From: jespinoza Date: 2017-10-20 23:47:16 Subject: [PATCH] fixing merge conflicts --- diff --git a/.gitignore b/.gitignore index e257102..9c1e2ac 100644 --- a/.gitignore +++ b/.gitignore @@ -100,16 +100,16 @@ ENV/ # eclipse .project .pydevproject - # vscode .vscode -schainpy/scripts/ schaingui/node_modules/ +schainpy/scripts/ .svn/ *.png *.pyc -*.xml +schainpy/scripts +.vscode +trash *.log -trash \ No newline at end of file diff --git a/schaincli/README.md b/schaincli/README.md index ced14bb..1cc9847 100644 --- a/schaincli/README.md +++ b/schaincli/README.md @@ -1,4 +1,4 @@ -# schaing +# schain Command Line Interface for SIGNAL CHAIN - jro diff --git a/schaincli/asdasd.py b/schaincli/asdasd.py deleted file mode 100644 index ca5629d..0000000 --- a/schaincli/asdasd.py +++ /dev/null @@ -1,34 +0,0 @@ -from schainpy.controller import Project - -desc = "asdasddsad" - -controller = Project() -controller.setup(id='191', name="asdasd", description=desc) - -readUnitConf = controller.addReadUnit(datatype='VoltageReader', - path="/home/nanosat/schain/schain-cli", - startDate="1970/01/01", - endDate="2017/12/31", - startTime="00:00:00", - endTime="23:59:59", - online=0, - verbose=1, - walk=1, - ) - -procUnitConf1 = controller.addProcUnit(datatype='VoltageProc', inputId=readUnitConf.getId()) - -opObj11 = procUnitConf1.addOperation(name='ProfileSelector', optype='other') -opObj11.addParameter(name='profileRangeList', value='120,183', format='intlist') - -opObj11 = procUnitConf1.addOperation(name='RTIPlot', optype='other') -opObj11.addParameter(name='wintitle', value='Jicamarca Radio Observatory', format='str') -opObj11.addParameter(name='showprofile', value='0', format='int') -opObj11.addParameter(name='xmin', value='0', format='int') -opObj11.addParameter(name='xmax', value='24', format='int') -opObj11.addParameter(name='figpath', value="/home/nanosat/schain/schain-cli/figs", format='str') -opObj11.addParameter(name='wr_period', value='5', format='int') -opObj11.addParameter(name='exp_code', value='22', format='int') - - -controller.start() diff --git a/schaincli/cli.py b/schaincli/cli.py index 5b03de2..46e2cbb 100644 --- a/schaincli/cli.py +++ b/schaincli/cli.py @@ -31,18 +31,17 @@ PREFIX = 'experiment' @click.command() @click.option('--version', '-v', is_flag=True, callback=print_version, help='SChain version', type=str) -@click.option('--xml', '-x', default=None, help='run an XML file', type=click.Path(exists=True, resolve_path=True)) @click.argument('command', default='run', required=True) @click.argument('nextcommand', default=None, required=False, type=str) -def main(command, nextcommand, version, xml): +def main(command, nextcommand, version): """COMMAND LINE INTERFACE FOR SIGNAL CHAIN - JICAMARCA RADIO OBSERVATORY \n Available commands.\n --xml: runs a schain XML generated file\n run: runs any python script starting 'experiment_'\n generate: generates a template schain script\n search: return avilable operations, procs or arguments of the give operation/proc\n""" - if xml is not None: - runFromXML(xml) + if command == 'xml': + runFromXML(nextcommand) elif command == 'generate': generate() elif command == 'test': @@ -54,6 +53,7 @@ def main(command, nextcommand, version, xml): else: log.error('Command {} is not defined'.format(command)) + def check_module(possible, instance): def check(x): try: @@ -77,19 +77,23 @@ def search(nextcommand): log.error('There is no Operation/ProcessingUnit to search') elif nextcommand == 'procs': procs = paramsFinder.getProcs() - log.success('Current ProcessingUnits are:\n\033[1m{}\033[0m'.format('\n'.join(procs))) + log.success( + 'Current ProcessingUnits are:\n\033[1m{}\033[0m'.format('\n'.join(procs))) elif nextcommand == 'operations': operations = paramsFinder.getOperations() - log.success('Current Operations are:\n\033[1m{}\033[0m'.format('\n'.join(operations))) + log.success('Current Operations are:\n\033[1m{}\033[0m'.format( + '\n'.join(operations))) else: try: args = paramsFinder.getArgs(nextcommand) - log.warning('Use this feature with caution. It may not return all the allowed arguments') + log.warning( + 'Use this feature with caution. It may not return all the allowed arguments') if len(args) == 0: log.success('{} has no arguments'.format(nextcommand)) else: - log.success('Showing arguments of {} are:\n\033[1m{}\033[0m'.format(nextcommand, '\n'.join(args))) + log.success('Showing {} arguments:\n\033[1m{}\033[0m'.format( + nextcommand, '\n'.join(args))) except Exception as e: log.error('Module {} does not exists'.format(nextcommand)) allModules = paramsFinder.getAll() @@ -117,12 +121,18 @@ def runschain(nextcommand): def basicInputs(): inputs = {} - inputs['desc'] = click.prompt('Enter a description', default="A schain project", type=str) - inputs['name'] = click.prompt('Name of the project', default="project", type=str) - inputs['path'] = click.prompt('Data path', default=os.getcwd(), type=click.Path(exists=True, resolve_path=True)) - inputs['startDate'] = click.prompt('Start date', default='1970/01/01', type=str) - inputs['endDate'] = click.prompt('End date', default='2017/12/31', type=str) - inputs['startHour'] = click.prompt('Start hour', default='00:00:00', type=str) + inputs['desc'] = click.prompt( + 'Enter a description', default="A schain project", type=str) + inputs['name'] = click.prompt( + 'Name of the project', default="project", type=str) + inputs['path'] = click.prompt('Data path', default=os.getcwd( + ), type=click.Path(exists=True, resolve_path=True)) + inputs['startDate'] = click.prompt( + 'Start date', default='1970/01/01', type=str) + inputs['endDate'] = click.prompt( + 'End date', default='2017/12/31', type=str) + inputs['startHour'] = click.prompt( + 'Start hour', default='00:00:00', type=str) inputs['endHour'] = click.prompt('End hour', default='23:59:59', type=str) inputs['figpath'] = inputs['path'] + '/figs' return inputs @@ -132,7 +142,8 @@ def generate(): inputs = basicInputs() inputs['multiprocess'] = click.confirm('Is this a multiprocess script?') if inputs['multiprocess']: - inputs['nProcess'] = click.prompt('How many process?', default=cpu_count(), type=int) + inputs['nProcess'] = click.prompt( + 'How many process?', default=cpu_count(), type=int) current = templates.multiprocess.format(**inputs) else: current = templates.basic.format(**inputs) diff --git a/schaincli/templates.py b/schaincli/templates.py index ce08f4c..9f1efde 100644 --- a/schaincli/templates.py +++ b/schaincli/templates.py @@ -1,11 +1,10 @@ basic = '''from schainpy.controller import Project desc = "{desc}" +project = Project() +project.setup(id='200', name="{name}", description=desc) -controller = Project() -controller.setup(id='191', name="{name}", description=desc) - -readUnitConf = controller.addReadUnit(datatype='VoltageReader', +voltage_reader = project.addReadUnit(datatype='VoltageReader', path="{path}", startDate="{startDate}", endDate="{endDate}", @@ -16,60 +15,76 @@ readUnitConf = controller.addReadUnit(datatype='VoltageReader', walk=1, ) -procUnitConf1 = controller.addProcUnit(datatype='VoltageProc', inputId=readUnitConf.getId()) +voltage_proc = project.addProcUnit(datatype='VoltageProc', inputId=voltage_reader.getId()) -opObj11 = procUnitConf1.addOperation(name='ProfileSelector', optype='other') -opObj11.addParameter(name='profileRangeList', value='120,183', format='intlist') +profile = voltage_proc.addOperation(name='ProfileSelector', optype='other') +profile.addParameter(name='profileRangeList', value='120,183', format='intlist') -opObj11 = procUnitConf1.addOperation(name='RTIPlot', optype='other') -opObj11.addParameter(name='wintitle', value='Jicamarca Radio Observatory', format='str') -opObj11.addParameter(name='showprofile', value='0', format='int') -opObj11.addParameter(name='xmin', value='0', format='int') -opObj11.addParameter(name='xmax', value='24', format='int') -opObj11.addParameter(name='figpath', value="{figpath}", format='str') -opObj11.addParameter(name='wr_period', value='5', format='int') -opObj11.addParameter(name='exp_code', value='22', format='int') +rti = voltage_proc.addOperation(name='RTIPlot', optype='other') +rti.addParameter(name='wintitle', value='Jicamarca Radio Observatory', format='str') +rti.addParameter(name='showprofile', value='0', format='int') +rti.addParameter(name='xmin', value='0', format='int') +rti.addParameter(name='xmax', value='24', format='int') +rti.addParameter(name='figpath', value="{figpath}", format='str') +rti.addParameter(name='wr_period', value='5', format='int') +rti.addParameter(name='exp_code', value='22', format='int') controller.start() ''' -multiprocess = '''from schainpy.controller import Project, multiSchain - +multiprocess = '''from schainpy.controller import Project, MPProject +from time import sleep desc = "{desc}" -def fiber(cursor, skip, q, day): - controller = Project() - controller.setup(id='191', name="{name}", description=desc) - - readUnitConf = controller.addReadUnit(datatype='SpectraReader', - path="{path}", - startDate=day, - endDate=day, - startTime="{startHour}", - endTime="{endHour}", - online=0, - queue=q, - cursor=cursor, - skip=skip, - verbose=1, - walk=1, - ) - - procUnitConf1 = controller.addProcUnit(datatype='Spectra', inputId=readUnitConf.getId()) - - procUnitConf2 = controller.addProcUnit(datatype='ParametersProc', inputId=readUnitConf.getId()) - opObj11 = procUnitConf2.addOperation(name='SpectralMoments', optype='other') - - opObj12 = procUnitConf2.addOperation(name='PublishData', optype='other') - opObj12.addParameter(name='zeromq', value=1, format='int') - opObj12.addParameter(name='verbose', value=0, format='bool') - - controller.start() - - -if __name__ == '__main__': - multiSchain(fiber, nProcess={nProcess}, startDate="{startDate}", endDate="{endDate}") +#################### +# PLOTTER RECEIVER # +#################### +plotter = Project() +plotter.setup(id='100', name='receiver', description=desc) + +receiver_plot = plotter.addProcUnit(name='PlotterReceiver') +receiver_plot.addParameter(name='throttle', value=20, format='int') +receiver_plot.addParameter(name='plottypes', value='rti', format='str') + +rti = receiver_plot.addOperation(name='PlotRTIData', optype='other') +rti.addParameter(name='zmin', value='-40.0', format='float') +rti.addParameter(name='zmax', value='100.0', format='float') +rti.addParameter(name='decimation', value='200', format='int') +rti.addParameter(name='xmin', value='0.0', format='int') +rti.addParameter(name='colormap', value='jet', format='str') + +plotter.start() + +sleep(2) + +################ +# DATA EMITTER # +################ +project = Project() +project.setup(id='200', name="{name}", description=desc) + +spectra_reader = project.addReadUnit(datatype='SpectraReader', + path="{path}", + startDate={startDate}, + endDate={endDate}, + startTime="{startHour}", + endTime="{endHour}", + online=0, + verbose=1, + walk=1, + ) + +spectra_proc = project.addProcUnit(datatype='Spectra', inputId=spectra_reader.getId()) + +parameters_proc = project.addProcUnit(datatype='ParametersProc', inputId=spectra_proc.getId()) +moments = parameters_proc.addOperation(name='SpectralMoments', optype='other') + +publish = parameters_proc.addOperation(name='PublishData', optype='other') +publish.addParameter(name='zeromq', value=1, format='int') +publish.addParameter(name='verbose', value=0, format='bool') + +MPProject(project, 16) ''' diff --git a/schainpy/CHANGELOG.md b/schainpy/CHANGELOG.md index e2ae29a..5f8cfa0 100644 --- a/schainpy/CHANGELOG.md +++ b/schainpy/CHANGELOG.md @@ -1,11 +1,11 @@ ## CHANGELOG: ### 2.3 -* Added high order function `multiSchain` for multiprocessing scripts. +* Added high order function `MPProject` for multiprocessing scripts. * Added two new Processing Units `PublishData` and `ReceiverData` for receiving and sending dataOut through multiple ways (tcp, ipc, inproc). * Added a new graphics Processing Unit `PlotterReceiver`. It is decoupled from normal processing sequence with support for data generated by multiprocessing scripts. * Added support for sending realtime graphic to web server. -* GUI command `schain` is now `schainGUI`. +* GUI command `schain` is now `schainGUI`. * Added a CLI tool named `schain`. * Scripts templates can be now generated with `schain generate`. * Now it is possible to search Processing Units and Operations with `schain search [module]` to get the right name and its allowed parameters. @@ -21,7 +21,7 @@ ### 2.2.6 * Graphics generated by the GUI are now the same as generated by scripts. Issue #1074. * Added support for C extensions. -* function `hildebrand_sehkon` optimized with a C wrapper. +* Function `hildebrand_sehkon` optimized with a C wrapper. * Numpy version updated. * Migration to GIT. diff --git a/schainpy/controller.py b/schainpy/controller.py index 1fd2a4d..4b3039e 100644 --- a/schainpy/controller.py +++ b/schainpy/controller.py @@ -24,6 +24,7 @@ DTYPES = { 'Spectra': '.pdata' } + def MPProject(project, n=cpu_count()): ''' Project wrapper to run schain in n processes @@ -34,8 +35,8 @@ def MPProject(project, n=cpu_count()): dt1 = op.getParameterValue('startDate') dt2 = op.getParameterValue('endDate') days = (dt2 - dt1).days - - for day in range(days+1): + + for day in range(days + 1): skip = 0 cursor = 0 processes = [] @@ -43,17 +44,17 @@ def MPProject(project, n=cpu_count()): dt_str = dt.strftime('%Y/%m/%d') reader = JRODataReader() paths, files = reader.searchFilesOffLine(path=rconf.path, - startDate=dt, - endDate=dt, - ext=DTYPES[rconf.datatype]) + startDate=dt, + endDate=dt, + ext=DTYPES[rconf.datatype]) nFiles = len(files) if nFiles == 0: continue - skip = int(math.ceil(nFiles/n)) - while nFiles > cursor*skip: - rconf.update(startDate=dt_str, endDate=dt_str, cursor=cursor, - skip=skip) - p = project.clone() + skip = int(math.ceil(nFiles / n)) + while nFiles > cursor * skip: + rconf.update(startDate=dt_str, endDate=dt_str, cursor=cursor, + skip=skip) + p = project.clone() p.start() processes.append(p) cursor += 1 @@ -72,6 +73,7 @@ def MPProject(project, n=cpu_count()): time.sleep(3) + class ParameterConf(): id = None @@ -108,7 +110,7 @@ class ParameterConf(): return self.__formated_value if value == '': - raise ValueError, '%s: This parameter value is empty' %self.name + raise ValueError, '%s: This parameter value is empty' % self.name if format == 'list': strList = value.split(',') @@ -174,16 +176,16 @@ class ParameterConf(): new_value = ast.literal_eval(value) if type(new_value) not in (tuple, list): - raise ValueError, '%s has to be a tuple or list of pairs' %value + raise ValueError, '%s has to be a tuple or list of pairs' % value if type(new_value[0]) not in (tuple, list): if len(new_value) != 2: - raise ValueError, '%s has to be a tuple or list of pairs' %value + raise ValueError, '%s has to be a tuple or list of pairs' % value new_value = [new_value] for thisPair in new_value: if len(thisPair) != 2: - raise ValueError, '%s has to be a tuple or list of pairs' %value + raise ValueError, '%s has to be a tuple or list of pairs' % value self.__formated_value = new_value @@ -253,13 +255,14 @@ class ParameterConf(): self.value = parmElement.get('value') self.format = str.lower(parmElement.get('format')) - #Compatible with old signal chain version + # Compatible with old signal chain version if self.format == 'int' and self.name == 'idfigure': self.name = 'id' def printattr(self): - print 'Parameter[%s]: name = %s, value = %s, format = %s' %(self.id, self.name, self.value, self.format) + print 'Parameter[%s]: name = %s, value = %s, format = %s' % (self.id, self.name, self.value, self.format) + class OperationConf(): @@ -279,10 +282,9 @@ class OperationConf(): self.priority = None self.type = 'self' - def __getNewId(self): - return int(self.id)*10 + len(self.parmConfObjList) + 1 + return int(self.id) * 10 + len(self.parmConfObjList) + 1 def updateId(self, new_id): @@ -291,7 +293,7 @@ class OperationConf(): n = 1 for parmObj in self.parmConfObjList: - idParm = str(int(new_id)*10 + n) + idParm = str(int(new_id) * 10 + n) parmObj.updateId(idParm) n += 1 @@ -329,15 +331,14 @@ class OperationConf(): def getParameterValue(self, parameterName): parameterObj = self.getParameterObj(parameterName) - + # if not parameterObj: # return None - + value = parameterObj.getValue() return value - def getKwargs(self): kwargs = {} @@ -367,7 +368,7 @@ class OperationConf(): self.parmConfObjList = [] def addParameter(self, name, value, format='str'): - + if value is None: return None id = self.__getNewId() @@ -405,8 +406,8 @@ class OperationConf(): self.type = opElement.get('type') self.priority = opElement.get('priority') - #Compatible with old signal chain version - #Use of 'run' method instead 'init' + # Compatible with old signal chain version + # Use of 'run' method instead 'init' if self.type == 'self' and self.name == 'init': self.name = 'run' @@ -418,8 +419,8 @@ class OperationConf(): parmConfObj = ParameterConf() parmConfObj.readXml(parmElement) - #Compatible with old signal chain version - #If an 'plot' OPERATION is found, changes name operation by the value of its type PARAMETER + # Compatible with old signal chain version + # If an 'plot' OPERATION is found, changes name operation by the value of its type PARAMETER if self.type != 'self' and self.name == 'Plot': if parmConfObj.format == 'str' and parmConfObj.name == 'type': self.name = parmConfObj.value @@ -429,22 +430,21 @@ class OperationConf(): def printattr(self): - print '%s[%s]: name = %s, type = %s, priority = %s' %(self.ELEMENTNAME, - self.id, - self.name, - self.type, - self.priority) + print '%s[%s]: name = %s, type = %s, priority = %s' % (self.ELEMENTNAME, + self.id, + self.name, + self.type, + self.priority) for parmConfObj in self.parmConfObjList: parmConfObj.printattr() def createObject(self, plotter_queue=None): - if self.type == 'self': raise ValueError, 'This operation type cannot be created' - if self.type == 'plotter': + if self.type == 'plotter': if not plotter_queue: raise ValueError, 'plotter_queue is not defined. Use:\nmyProject = Project()\nmyProject.setPlotterQueue(plotter_queue)' @@ -489,11 +489,11 @@ class ProcUnitConf(): def __getPriority(self): - return len(self.opConfObjList)+1 + return len(self.opConfObjList) + 1 def __getNewId(self): - return int(self.id)*10 + len(self.opConfObjList) + 1 + return int(self.id) * 10 + len(self.opConfObjList) + 1 def getElementName(self): @@ -505,18 +505,17 @@ class ProcUnitConf(): def updateId(self, new_id, parentId=parentId): + new_id = int(parentId) * 10 + (int(self.id) % 10) + new_inputId = int(parentId) * 10 + (int(self.inputId) % 10) - new_id = int(parentId)*10 + (int(self.id) % 10) - new_inputId = int(parentId)*10 + (int(self.inputId) % 10) - - #If this proc unit has not inputs + # If this proc unit has not inputs if self.inputId == '0': new_inputId = 0 n = 1 for opConfObj in self.opConfObjList: - idOp = str(int(new_id)*10 + n) + idOp = str(int(new_id) * 10 + n) opConfObj.updateId(idOp) n += 1 @@ -525,7 +524,6 @@ class ProcUnitConf(): self.id = str(new_id) self.inputId = str(new_inputId) - def getInputId(self): return self.inputId @@ -559,18 +557,18 @@ class ProcUnitConf(): def setup(self, id, name, datatype, inputId, parentId=None): - #Compatible with old signal chain version - if datatype==None and name==None: + # Compatible with old signal chain version + if datatype == None and name == None: raise ValueError, 'datatype or name should be defined' - if name==None: + if name == None: if 'Proc' in datatype: name = datatype else: - name = '%sProc' %(datatype) + name = '%sProc' % (datatype) - if datatype==None: - datatype = name.replace('Proc','') + if datatype == None: + datatype = name.replace('Proc', '') self.id = str(id) self.name = name @@ -650,16 +648,15 @@ class ProcUnitConf(): def printattr(self): - print '%s[%s]: name = %s, datatype = %s, inputId = %s' %(self.ELEMENTNAME, - self.id, - self.name, - self.datatype, - self.inputId) - + print '%s[%s]: name = %s, datatype = %s, inputId = %s' % (self.ELEMENTNAME, + self.id, + self.name, + self.datatype, + self.inputId) + for opConfObj in self.opConfObjList: opConfObj.printattr() - def getKwargs(self): opObj = self.opConfObjList[0] @@ -675,10 +672,11 @@ class ProcUnitConf(): for opConfObj in self.opConfObjList: - if opConfObj.type=='self' and self.name=='run': + if opConfObj.type == 'self' and self.name == 'run': continue - elif opConfObj.type=='self': - procUnitObj.addOperationKwargs(opConfObj.id, **opConfObj.getKwargs()) + elif opConfObj.type == 'self': + procUnitObj.addOperationKwargs( + opConfObj.id, **opConfObj.getKwargs()) continue opObj = opConfObj.createObject(plotter_queue) @@ -704,10 +702,10 @@ class ProcUnitConf(): kwargs[parmConfObj.name] = parmConfObj.getValue() - sts = self.procUnitObj.call(opType = opConfObj.type, - opName = opConfObj.name, - opId = opConfObj.id) - + sts = self.procUnitObj.call(opType=opConfObj.type, + opName=opConfObj.name, + opId=opConfObj.id) + is_ok = is_ok or sts return is_ok @@ -725,6 +723,7 @@ class ProcUnitConf(): return + class ReadUnitConf(ProcUnitConf): path = None @@ -754,10 +753,9 @@ class ReadUnitConf(ProcUnitConf): def setup(self, id, name, datatype, path='', startDate='', endDate='', startTime='', endTime='', parentId=None, server=None, **kwargs): - #Compatible with old signal chain version - if datatype==None and name==None: + # Compatible with old signal chain version + if datatype == None and name == None: raise ValueError, 'datatype or name should be defined' - if name == None: if 'Reader' in datatype: name = datatype @@ -792,15 +790,16 @@ class ReadUnitConf(ProcUnitConf): if 'Reader' in datatype: self.name = datatype else: - self.name = '%sReader' %(datatype) + self.name = '%sReader' % (datatype) self.datatype = self.name.replace('Reader', '') - attrs = ('path', 'startDate', 'endDate', 'startTime', 'endTime', 'parentId') - + attrs = ('path', 'startDate', 'endDate', + 'startTime', 'endTime', 'parentId') + for attr in attrs: if attr in kwargs: setattr(self, attr, kwargs.pop(attr)) - + self.inputId = '0' self.updateRunOperation(**kwargs) @@ -813,21 +812,26 @@ class ReadUnitConf(ProcUnitConf): def addRunOperation(self, **kwargs): - opObj = self.addOperation(name = 'run', optype = 'self') + opObj = self.addOperation(name='run', optype='self') if self.server is None: - opObj.addParameter(name='datatype', value=self.datatype, format='str') + opObj.addParameter( + name='datatype', value=self.datatype, format='str') opObj.addParameter(name='path', value=self.path, format='str') - opObj.addParameter(name='startDate', value=self.startDate, format='date') - opObj.addParameter(name='endDate', value=self.endDate, format='date') - opObj.addParameter(name='startTime', value=self.startTime, format='time') - opObj.addParameter(name='endTime', value=self.endTime, format='time') - + opObj.addParameter( + name='startDate', value=self.startDate, format='date') + opObj.addParameter( + name='endDate', value=self.endDate, format='date') + opObj.addParameter( + name='startTime', value=self.startTime, format='time') + opObj.addParameter( + name='endTime', value=self.endTime, format='time') + for key, value in kwargs.items(): - opObj.addParameter(name=key, value=value, format=type(value).__name__) + opObj.addParameter(name=key, value=value, + format=type(value).__name__) else: - opObj.addParameter(name='server' , value=self.server, format='str') - + opObj.addParameter(name='server', value=self.server, format='str') return opObj @@ -838,16 +842,19 @@ class ReadUnitConf(ProcUnitConf): opObj.addParameter(name='datatype', value=self.datatype, format='str') opObj.addParameter(name='path', value=self.path, format='str') - opObj.addParameter(name='startDate', value=self.startDate, format='date') + opObj.addParameter( + name='startDate', value=self.startDate, format='date') opObj.addParameter(name='endDate', value=self.endDate, format='date') - opObj.addParameter(name='startTime', value=self.startTime, format='time') + opObj.addParameter( + name='startTime', value=self.startTime, format='time') opObj.addParameter(name='endTime', value=self.endTime, format='time') - + for key, value in kwargs.items(): - opObj.addParameter(name=key, value=value, format=type(value).__name__) + opObj.addParameter(name=key, value=value, + format=type(value).__name__) return opObj - + def readXml(self, upElement): self.id = upElement.get('id') @@ -877,6 +884,7 @@ class ReadUnitConf(ProcUnitConf): self.startTime = opConfObj.getParameterValue('startTime') self.endTime = opConfObj.getParameterValue('endTime') + class Project(Process): id = None @@ -905,7 +913,7 @@ class Project(Process): idList = self.procUnitConfObjDict.keys() - id = int(self.id)*10 + id = int(self.id) * 10 while True: id += 1 @@ -938,8 +946,8 @@ class Project(Process): for procKey in keyList: procUnitConfObj = self.procUnitConfObjDict[procKey] - idProcUnit = str(int(self.id)*10 + n) - procUnitConfObj.updateId(idProcUnit, parentId = self.id) + idProcUnit = str(int(self.id) * 10 + n) + procUnitConfObj.updateId(idProcUnit, parentId=self.id) newProcUnitConfObjDict[idProcUnit] = procUnitConfObj n += 1 @@ -949,9 +957,9 @@ class Project(Process): def setup(self, id, name='', description=''): print - print '*'*60 + print '*' * 60 print ' Starting SIGNAL CHAIN PROCESSING v%s ' % schainpy.__version__ - print '*'*60 + print '*' * 60 print self.id = str(id) self.description = description @@ -974,7 +982,8 @@ class Project(Process): idReadUnit = str(id) readUnitConfObj = ReadUnitConf() - readUnitConfObj.setup(idReadUnit, name, datatype, parentId=self.id, **kwargs) + readUnitConfObj.setup(idReadUnit, name, datatype, + parentId=self.id, **kwargs) self.procUnitConfObjDict[readUnitConfObj.getId()] = readUnitConfObj @@ -985,7 +994,8 @@ class Project(Process): idProcUnit = self.__getNewId() procUnitConfObj = ProcUnitConf() - procUnitConfObj.setup(idProcUnit, name, datatype, inputId, parentId=self.id) + procUnitConfObj.setup(idProcUnit, name, datatype, + inputId, parentId=self.id) self.procUnitConfObjDict[procUnitConfObj.getId()] = procUnitConfObj @@ -1059,11 +1069,11 @@ class Project(Process): abs_file = os.path.abspath(filename) if not os.access(os.path.dirname(abs_file), os.W_OK): - print 'No write permission on %s' %os.path.dirname(abs_file) + print 'No write permission on %s' % os.path.dirname(abs_file) return 0 if os.path.isfile(abs_file) and not(os.access(abs_file, os.W_OK)): - print 'File %s already exists and it could not be overwriten' %abs_file + print 'File %s already exists and it could not be overwriten' % abs_file return 0 self.makeXml() @@ -1074,7 +1084,7 @@ class Project(Process): return 1 - def readXml(self, filename = None): + def readXml(self, filename=None): if not filename: print 'filename is not defined' @@ -1083,7 +1093,7 @@ class Project(Process): abs_file = os.path.abspath(filename) if not os.path.isfile(abs_file): - print '%s file does not exist' %abs_file + print '%s file does not exist' % abs_file return 0 self.projectElement = None @@ -1092,16 +1102,17 @@ class Project(Process): try: self.projectElement = ElementTree().parse(abs_file) except: - print 'Error reading %s, verify file format' %filename + print 'Error reading %s, verify file format' % filename return 0 self.project = self.projectElement.tag self.id = self.projectElement.get('id') self.name = self.projectElement.get('name') - self.description = self.projectElement.get('description') - - readUnitElementList = self.projectElement.iter(ReadUnitConf().getElementName()) + self.description = self.projectElement.get('description') + + readUnitElementList = self.projectElement.iter( + ReadUnitConf().getElementName()) for readUnitElement in readUnitElementList: readUnitConfObj = ReadUnitConf() @@ -1112,7 +1123,8 @@ class Project(Process): self.procUnitConfObjDict[readUnitConfObj.getId()] = readUnitConfObj - procUnitElementList = self.projectElement.iter(ProcUnitConf().getElementName()) + procUnitElementList = self.projectElement.iter( + ProcUnitConf().getElementName()) for procUnitElement in procUnitElementList: procUnitConfObj = ProcUnitConf() @@ -1129,10 +1141,10 @@ class Project(Process): def printattr(self): - print 'Project[%s]: name = %s, description = %s' %(self.id, - self.name, - self.description) - + print 'Project[%s]: name = %s, description = %s' % (self.id, + self.name, + self.description) + for procUnitConfObj in self.procUnitConfObjDict.values(): procUnitConfObj.printattr() @@ -1154,11 +1166,11 @@ class Project(Process): if int(inputId) == 0: continue - #Get input object + # Get input object puConfINObj = self.procUnitConfObjDict[inputId] puObjIN = puConfINObj.getProcUnitObj() - #Get current object + # Get current object thisPUObj = thisPUConfObj.getProcUnitObj() self.__connect(puObjIN, thisPUObj) @@ -1168,11 +1180,11 @@ class Project(Process): import socket err = traceback.format_exception(sys.exc_info()[0], - sys.exc_info()[1], - sys.exc_info()[2]) - - print '***** Error occurred in %s *****' %(procUnitConfObj.name) - print '***** %s' %err[-1] + sys.exc_info()[1], + sys.exc_info()[2]) + + print '***** Error occurred in %s *****' % (procUnitConfObj.name) + print '***** %s' % err[-1] message = ''.join(err) @@ -1181,30 +1193,33 @@ class Project(Process): if not send_email: return - subject = 'SChain v%s: Error running %s\n' %(schainpy.__version__, procUnitConfObj.name) + subject = 'SChain v%s: Error running %s\n' % ( + schainpy.__version__, procUnitConfObj.name) - subtitle = '%s: %s\n' %(procUnitConfObj.getElementName() ,procUnitConfObj.name) - subtitle += 'Hostname: %s\n' %socket.gethostbyname(socket.gethostname()) - subtitle += 'Working directory: %s\n' %os.path.abspath('./') - subtitle += 'Configuration file: %s\n' %self.filename - subtitle += 'Time: %s\n' %str(datetime.datetime.now()) + subtitle = '%s: %s\n' % ( + procUnitConfObj.getElementName(), procUnitConfObj.name) + subtitle += 'Hostname: %s\n' % socket.gethostbyname( + socket.gethostname()) + subtitle += 'Working directory: %s\n' % os.path.abspath('./') + subtitle += 'Configuration file: %s\n' % self.filename + subtitle += 'Time: %s\n' % str(datetime.datetime.now()) readUnitConfObj = self.getReadUnitObj() if readUnitConfObj: subtitle += '\nInput parameters:\n' - subtitle += '[Data path = %s]\n' %readUnitConfObj.path - subtitle += '[Data type = %s]\n' %readUnitConfObj.datatype - subtitle += '[Start date = %s]\n' %readUnitConfObj.startDate - subtitle += '[End date = %s]\n' %readUnitConfObj.endDate - subtitle += '[Start time = %s]\n' %readUnitConfObj.startTime - subtitle += '[End time = %s]\n' %readUnitConfObj.endTime + subtitle += '[Data path = %s]\n' % readUnitConfObj.path + subtitle += '[Data type = %s]\n' % readUnitConfObj.datatype + subtitle += '[Start date = %s]\n' % readUnitConfObj.startDate + subtitle += '[End date = %s]\n' % readUnitConfObj.endDate + subtitle += '[Start time = %s]\n' % readUnitConfObj.startTime + subtitle += '[End time = %s]\n' % readUnitConfObj.endTime adminObj = schainpy.admin.SchainNotify() adminObj.sendAlert(message=message, - subject=subject, - subtitle=subtitle, - filename=self.filename) - + subject=subject, + subtitle=subtitle, + filename=self.filename) + def isPaused(self): return 0 @@ -1255,7 +1270,7 @@ class Project(Process): def run(self): log.success('Starting {}'.format(self.name)) - + self.createObjects() self.connectObjects() @@ -1287,14 +1302,14 @@ class Project(Process): is_ok = False break - #If every process unit finished so end process + # If every process unit finished so end process if not(is_ok): break if not self.runController(): break - #Closing every process + # Closing every process for procKey in keyList: procUnitConfObj = self.procUnitConfObjDict[procKey] procUnitConfObj.close() diff --git a/schainpy/gui/viewcontroller/parametersModel.py b/schainpy/gui/viewcontroller/parametersModel.py index a3f1c17..665c01f 100644 --- a/schainpy/gui/viewcontroller/parametersModel.py +++ b/schainpy/gui/viewcontroller/parametersModel.py @@ -50,7 +50,6 @@ class ProjectParms(): indexDatatype = 2 if 'usrp' in self.datatype.lower(): indexDatatype = 3 - return indexDatatype def getExt(self): @@ -65,7 +64,6 @@ class ProjectParms(): ext = '.fits' if self.datatype.lower() == 'usrp': ext = '.hdf5' - return ext def set(self, project_name, datatype, ext, dpath, online, diff --git a/schainpy/model/__init__.py b/schainpy/model/__init__.py index 3d76a6d..ae3ce46 100644 --- a/schainpy/model/__init__.py +++ b/schainpy/model/__init__.py @@ -5,8 +5,8 @@ # from schainpy.model.utils.jroutils import * # from schainpy.serializer import * +from graphics import * from data import * from io import * from proc import * -from graphics import * -from utils import * \ No newline at end of file +from utils import * diff --git a/schainpy/model/data/jrodata.py b/schainpy/model/data/jrodata.py index bed4822..5cf58e6 100644 --- a/schainpy/model/data/jrodata.py +++ b/schainpy/model/data/jrodata.py @@ -292,11 +292,9 @@ class JROData(GenericData): return fmax def getFmax(self): - PRF = 1./(self.ippSeconds * self.nCohInt) fmax = PRF - return fmax def getVmax(self): diff --git a/schainpy/model/data/jroheaderIO.py b/schainpy/model/data/jroheaderIO.py index c44bef9..c85ae5d 100644 --- a/schainpy/model/data/jroheaderIO.py +++ b/schainpy/model/data/jroheaderIO.py @@ -7,6 +7,7 @@ import sys import numpy import copy import datetime +import inspect SPEED_OF_LIGHT = 299792458 SPEED_OF_LIGHT = 3e8 @@ -82,7 +83,25 @@ class Header(object): def write(self): raise NotImplementedError + + def getAllowedArgs(self): + args = inspect.getargspec(self.__init__).args + try: + args.remove('self') + except: + pass + return args + + def getAsDict(self): + args = self.getAllowedArgs() + asDict = {} + for x in args: + asDict[x] = self[x] + return asDict + def __getitem__(self, name): + return getattr(self, name) + def printInfo(self): message = "#"*50 + "\n" @@ -115,6 +134,7 @@ class BasicHeader(Header): dstFlag = None errorCount = None datatime = None + structure = BASIC_STRUCTURE __LOCALTIME = None def __init__(self, useLocalTime=True): @@ -189,16 +209,17 @@ class SystemHeader(Header): nChannels = None adcResolution = None pciDioBusWidth = None - - def __init__(self, nSamples=0, nProfiles=0, nChannels=0, adcResolution=14, pciDioBusWith=0): + structure = SYSTEM_STRUCTURE + + def __init__(self, nSamples=0, nProfiles=0, nChannels=0, adcResolution=14, pciDioBusWidth=0): self.size = 24 self.nSamples = nSamples self.nProfiles = nProfiles self.nChannels = nChannels self.adcResolution = adcResolution - self.pciDioBusWidth = pciDioBusWith - + self.pciDioBusWidth = pciDioBusWidth + def read(self, fp): self.length = 0 try: @@ -260,15 +281,15 @@ class RadarControllerHeader(Header): line5Function = None fClock = None prePulseBefore = None - prePulserAfter = None + prePulseAfter = None rangeIpp = None rangeTxA = None rangeTxB = None - + structure = RADAR_STRUCTURE __size = None def __init__(self, expType=2, nTx=1, - ippKm=None, txA=0, txB=0, + ipp=None, txA=0, txB=0, nWindows=None, nHeights=None, firstHeight=None, deltaHeight=None, numTaus=0, line6Function=0, line5Function=0, fClock=None, prePulseBefore=0, prePulseAfter=0, @@ -278,10 +299,10 @@ class RadarControllerHeader(Header): # self.size = 116 self.expType = expType self.nTx = nTx - self.ipp = ippKm + self.ipp = ipp self.txA = txA self.txB = txB - self.rangeIpp = ippKm + self.rangeIpp = ipp self.rangeTxA = txA self.rangeTxB = txB @@ -292,7 +313,7 @@ class RadarControllerHeader(Header): self.line5Function = line5Function self.fClock = fClock self.prePulseBefore = prePulseBefore - self.prePulserAfter = prePulseAfter + self.prePulseAfter = prePulseAfter self.nHeights = nHeights self.firstHeight = firstHeight @@ -342,7 +363,7 @@ class RadarControllerHeader(Header): self.line5Function = int(header['nLine5Function'][0]) self.fClock = float(header['fClock'][0]) self.prePulseBefore = int(header['nPrePulseBefore'][0]) - self.prePulserAfter = int(header['nPrePulseAfter'][0]) + self.prePulseAfter = int(header['nPrePulseAfter'][0]) self.rangeIpp = header['sRangeIPP'][0] self.rangeTxA = header['sRangeTxA'][0] self.rangeTxB = header['sRangeTxB'][0] @@ -450,7 +471,7 @@ class RadarControllerHeader(Header): self.line5Function, self.fClock, self.prePulseBefore, - self.prePulserAfter, + self.prePulseAfter, self.rangeIpp, self.rangeTxA, self.rangeTxB) @@ -540,15 +561,18 @@ class ProcessingHeader(Header): nCohInt = None nIncohInt = None totalSpectra = None - + structure = PROCESSING_STRUCTURE flag_dc = None flag_cspc = None - def __init__(self): + def __init__(self, dtype=0, blockSize=0, profilesPerBlock=0, dataBlocksPerFile=0, nWindows=0,processFlags=0, nCohInt=0, + nIncohInt=0, totalSpectra=0, nHeights=0, firstHeight=0, deltaHeight=0, samplesWin=0, spectraComb=0, nCode=0, + code=0, nBaud=None, shif_fft=False, flag_dc=False, flag_cspc=False, flag_decode=False, flag_deflip=False + ): # self.size = 0 - self.dtype = 0 - self.blockSize = 0 + self.dtype = dtype + self.blockSize = blockSize self.profilesPerBlock = 0 self.dataBlocksPerFile = 0 self.nWindows = 0 @@ -572,6 +596,7 @@ class ProcessingHeader(Header): self.flag_decode = False self.flag_deflip = False self.length = 0 + def read(self, fp): self.length = 0 try: diff --git a/schainpy/model/graphics/jroplot_correlation.py b/schainpy/model/graphics/jroplot_correlation.py index 474b860..759f8c6 100644 --- a/schainpy/model/graphics/jroplot_correlation.py +++ b/schainpy/model/graphics/jroplot_correlation.py @@ -8,78 +8,78 @@ from figure import Figure, isRealtime class CorrelationPlot(Figure): isConfig = None __nsubplots = None - + WIDTHPROF = None HEIGHTPROF = None PREFIX = 'corr' - - def __init__(self): - + + def __init__(self, **kwargs): + Figure.__init__(self, **kwargs) self.isConfig = False self.__nsubplots = 1 - + self.WIDTH = 280 self.HEIGHT = 250 self.WIDTHPROF = 120 self.HEIGHTPROF = 0 self.counter_imagwr = 0 - + self.PLOT_CODE = 1 self.FTP_WEI = None self.EXP_CODE = None self.SUB_EXP_CODE = None self.PLOT_POS = None - + def getSubplots(self): - + ncol = int(numpy.sqrt(self.nplots)+0.9) nrow = int(self.nplots*1./ncol + 0.9) - + return nrow, ncol - + def setup(self, id, nplots, wintitle, showprofile=False, show=True): - - showprofile = False + + showprofile = False self.__showprofile = showprofile self.nplots = nplots - + ncolspan = 1 colspan = 1 if showprofile: ncolspan = 3 colspan = 2 self.__nsubplots = 2 - + self.createFigure(id = id, wintitle = wintitle, widthplot = self.WIDTH + self.WIDTHPROF, heightplot = self.HEIGHT + self.HEIGHTPROF, show=show) - + nrow, ncol = self.getSubplots() - + counter = 0 for y in range(nrow): for x in range(ncol): - + if counter >= self.nplots: break - + self.addAxes(nrow, ncol*ncolspan, y, x*ncolspan, colspan, 1) - + if showprofile: self.addAxes(nrow, ncol*ncolspan, y, x*ncolspan+colspan, 1, 1) - + counter += 1 - + def run(self, dataOut, id, wintitle="", channelList=None, showprofile=False, xmin=None, xmax=None, ymin=None, ymax=None, zmin=None, zmax=None, save=False, figpath='./', figfile=None, show=True, ftp=False, wr_period=1, server=None, folder=None, username=None, password=None, ftp_wei=0, exp_code=0, sub_exp_code=0, plot_pos=0, realtime=False): - + """ - + Input: dataOut : id : @@ -93,15 +93,15 @@ class CorrelationPlot(Figure): zmin : None, zmax : None """ - + if dataOut.flagNoData: return None - + if realtime: if not(isRealtime(utcdatatime = dataOut.utctime)): print 'Skipping this plot function' return - + if channelList == None: channelIndexList = dataOut.channelIndexList else: @@ -110,53 +110,53 @@ class CorrelationPlot(Figure): if channel not in dataOut.channelList: raise ValueError, "Channel %d is not in dataOut.channelList" channelIndexList.append(dataOut.channelList.index(channel)) - + factor = dataOut.normFactor lenfactor = factor.shape[1] x = dataOut.getLagTRange(1) y = dataOut.getHeiRange() - + z = copy.copy(dataOut.data_corr[:,:,0,:]) for i in range(dataOut.data_corr.shape[0]): - z[i,:,:] = z[i,:,:]/factor[i,:] + z[i,:,:] = z[i,:,:]/factor[i,:] zdB = numpy.abs(z) - + avg = numpy.average(z, axis=1) # avg = numpy.nanmean(z, axis=1) # noise = dataOut.noise/factor - + #thisDatetime = dataOut.datatime thisDatetime = datetime.datetime.utcfromtimestamp(dataOut.getTimeRange()[0]) - title = wintitle + " Correlation" + title = wintitle + " Correlation" xlabel = "Lag T (s)" ylabel = "Range (Km)" - + if not self.isConfig: - - nplots = dataOut.data_corr.shape[0] - + + nplots = dataOut.data_corr.shape[0] + self.setup(id=id, nplots=nplots, wintitle=wintitle, showprofile=showprofile, show=show) - + if xmin == None: xmin = numpy.nanmin(x) if xmax == None: xmax = numpy.nanmax(x) if ymin == None: ymin = numpy.nanmin(y) if ymax == None: ymax = numpy.nanmax(y) if zmin == None: zmin = 0 if zmax == None: zmax = 1 - + self.FTP_WEI = ftp_wei self.EXP_CODE = exp_code self.SUB_EXP_CODE = sub_exp_code self.PLOT_POS = plot_pos - + self.isConfig = True - + self.setWinTitle(title) - + for i in range(self.nplots): str_datetime = '%s %s'%(thisDatetime.strftime("%Y/%m/%d"),thisDatetime.strftime("%H:%M:%S")) title = "Channel %d and %d: : %s" %(dataOut.pairsList[i][0],dataOut.pairsList[i][1] , str_datetime) @@ -165,7 +165,7 @@ class CorrelationPlot(Figure): xmin=xmin, xmax=xmax, ymin=ymin, ymax=ymax, zmin=zmin, zmax=zmax, xlabel=xlabel, ylabel=ylabel, title=title, ticksize=9, cblabel='') - + # if self.__showprofile: # axes = self.axesList[i*self.__nsubplots +1] # axes.pline(avgdB[i], y, @@ -173,15 +173,15 @@ class CorrelationPlot(Figure): # xlabel='dB', ylabel='', title='', # ytick_visible=False, # grid='x') -# +# # noiseline = numpy.repeat(noisedB[i], len(y)) # axes.addpline(noiseline, y, idline=1, color="black", linestyle="dashed", lw=2) - + self.draw() - + self.save(figpath=figpath, figfile=figfile, save=save, ftp=ftp, wr_period=wr_period, - thisDatetime=thisDatetime) + thisDatetime=thisDatetime) diff --git a/schainpy/model/graphics/jroplot_data.py b/schainpy/model/graphics/jroplot_data.py index 010ec93..1e2e4db 100644 --- a/schainpy/model/graphics/jroplot_data.py +++ b/schainpy/model/graphics/jroplot_data.py @@ -16,8 +16,10 @@ from schainpy.model.proc.jroproc_base import Operation from schainpy.utils import log jet_values = matplotlib.pyplot.get_cmap("jet", 100)(numpy.arange(100))[10:90] -blu_values = matplotlib.pyplot.get_cmap("seismic_r", 20)(numpy.arange(20))[10:15] -ncmap = matplotlib.colors.LinearSegmentedColormap.from_list("jro", numpy.vstack((blu_values, jet_values))) +blu_values = matplotlib.pyplot.get_cmap( + "seismic_r", 20)(numpy.arange(20))[10:15] +ncmap = matplotlib.colors.LinearSegmentedColormap.from_list( + "jro", numpy.vstack((blu_values, jet_values))) matplotlib.pyplot.register_cmap(cmap=ncmap) CMAPS = [plt.get_cmap(s) for s in ('jro', 'jet', 'RdBu_r', 'seismic')] @@ -52,7 +54,7 @@ class PlotData(Operation, Process): self.kwargs['code'] = self.CODE self.mp = False self.data = None - self.isConfig = False + self.isConfig = False self.figures = [] self.axes = [] self.cb_axes = [] @@ -72,13 +74,13 @@ class PlotData(Operation, Process): self.zmin = kwargs.get('zmin', None) self.zmax = kwargs.get('zmax', None) self.zlimits = kwargs.get('zlimits', None) - self.xmin = kwargs.get('xmin', None) + self.xmin = kwargs.get('xmin', None) self.xmax = kwargs.get('xmax', None) self.xrange = kwargs.get('xrange', 24) self.ymin = kwargs.get('ymin', None) self.ymax = kwargs.get('ymax', None) self.xlabel = kwargs.get('xlabel', None) - self.__MAXNUMY = kwargs.get('decimation', 100) + self.__MAXNUMY = kwargs.get('decimation', 100) self.showSNR = kwargs.get('showSNR', False) self.oneFigure = kwargs.get('oneFigure', True) self.width = kwargs.get('width', None) @@ -115,18 +117,18 @@ class PlotData(Operation, Process): self.pf_axes = [] self.cmaps = [] - size = '15%' if self.ncols==1 else '30%' - pad = '4%' if self.ncols==1 else '8%' + size = '15%' if self.ncols == 1 else '30%' + pad = '4%' if self.ncols == 1 else '8%' if self.oneFigure: if self.height is None: - self.height = 1.4*self.nrows + 1 + self.height = 1.4 * self.nrows + 1 fig = plt.figure(figsize=(self.width, self.height), edgecolor='k', facecolor='w') self.figures.append(fig) - for n in range(self.nplots): - ax = fig.add_subplot(self.nrows, self.ncols, n+1) + for n in range(self.nplots): + ax = fig.add_subplot(self.nrows, self.ncols, n + 1) ax.tick_params(labelsize=8) ax.firsttime = True ax.index = 0 @@ -134,15 +136,15 @@ class PlotData(Operation, Process): self.axes.append(ax) if self.showprofile: cax = self.__add_axes(ax, size=size, pad=pad) - cax.tick_params(labelsize=8) + cax.tick_params(labelsize=8) self.pf_axes.append(cax) else: if self.height is None: self.height = 3 for n in range(self.nplots): fig = plt.figure(figsize=(self.width, self.height), - edgecolor='k', - facecolor='w') + edgecolor='k', + facecolor='w') ax = fig.add_subplot(1, 1, 1) ax.tick_params(labelsize=8) ax.firsttime = True @@ -152,12 +154,12 @@ class PlotData(Operation, Process): self.axes.append(ax) if self.showprofile: cax = self.__add_axes(ax, size=size, pad=pad) - cax.tick_params(labelsize=8) + cax.tick_params(labelsize=8) self.pf_axes.append(cax) - + for n in range(self.nrows): if self.colormaps is not None: - cmap = plt.get_cmap(self.colormaps[n]) + cmap = plt.get_cmap(self.colormaps[n]) else: cmap = plt.get_cmap(self.colormap) cmap.set_bad(self.bgcolor, 1.) @@ -269,7 +271,7 @@ class PlotData(Operation, Process): ''' divider = make_axes_locatable(ax) nax = divider.new_horizontal(size=size, pad=pad) - ax.figure.add_axes(nax) + ax.figure.add_axes(nax) return nax self.setup() @@ -278,7 +280,7 @@ class PlotData(Operation, Process): ''' This method should be implemented in the child class, the following attributes should be set: - + self.nrows: number of rows self.ncols: number of cols self.nplots: number of plots (channels or pairs) @@ -298,26 +300,26 @@ class PlotData(Operation, Process): deltas = x_buffer[1:] - x_buffer[0:-1] x_median = numpy.median(deltas) - index = numpy.where(deltas > 5*x_median) + index = numpy.where(deltas > 5 * x_median) if len(index[0]) != 0: z_buffer[::, index[0], ::] = self.__missing z_buffer = numpy.ma.masked_inside(z_buffer, - 0.99*self.__missing, - 1.01*self.__missing) + 0.99 * self.__missing, + 1.01 * self.__missing) return x_buffer, y_buffer, z_buffer def decimate(self): # dx = int(len(self.x)/self.__MAXNUMX) + 1 - dy = int(len(self.y)/self.__MAXNUMY) + 1 + dy = int(len(self.y) / self.__MAXNUMY) + 1 # x = self.x[::dx] x = self.x y = self.y[::dy] z = self.z[::, ::, ::dy] - + return x, y, z def format(self): @@ -337,7 +339,7 @@ class PlotData(Operation, Process): xmin = self.xmin if self.xmax is None: - xmax = xmin+self.xrange*60*60 + xmax = xmin + self.xrange * 60 * 60 else: if self.xaxis is 'time': dt = self.getDateTime(self.max_time) @@ -354,7 +356,9 @@ class PlotData(Operation, Process): i = 1 if numpy.where(ymax < Y)[0][0] < 0 else numpy.where(ymax < Y)[0][0] ystep = Y[i-1]/5 - for n, ax in enumerate(self.axes): + ystep = 200 if ymax >= 800 else 100 if ymax >= 400 else 50 if ymax >= 200 else 20 + + for n, ax in enumerate(self.axes): if ax.firsttime: ax.set_facecolor(self.bgcolor) ax.yaxis.set_major_locator(MultipleLocator(ystep)) @@ -363,14 +367,15 @@ class PlotData(Operation, Process): ax.xaxis.set_major_locator(LinearLocator(9)) if self.xlabel is not None: ax.set_xlabel(self.xlabel) - ax.set_ylabel(self.ylabel) + ax.set_ylabel(self.ylabel) ax.firsttime = False if self.showprofile: self.pf_axes[n].set_ylim(ymin, ymax) - self.pf_axes[n].set_xlim(self.zmin, self.zmax) + self.pf_axes[n].set_xlim(self.zmin, self.zmax) self.pf_axes[n].set_xlabel('dB') self.pf_axes[n].grid(b=True, axis='x') - [tick.set_visible(False) for tick in self.pf_axes[n].get_yticklabels()] + [tick.set_visible(False) + for tick in self.pf_axes[n].get_yticklabels()] if self.colorbar: ax.cbar = plt.colorbar(ax.plt, ax=ax, pad=0.02, aspect=10) ax.cbar.ax.tick_params(labelsize=8) @@ -379,7 +384,7 @@ class PlotData(Operation, Process): ax.cbar.set_label(self.cb_label, size=8) elif self.cb_labels: ax.cbar.set_label(self.cb_labels[n], size=8) - + ax.set_title('{} - {} {}'.format( self.titles[n], self.getDateTime(self.max_time).strftime('%H:%M:%S'), @@ -392,10 +397,10 @@ class PlotData(Operation, Process): ''' ''' log.success('Plotting', self.name) - + self.plot() self.format() - + for n, fig in enumerate(self.figures): if self.nrows == 0 or self.nplots == 0: log.warning('No data', self.name) @@ -405,7 +410,7 @@ class PlotData(Operation, Process): fig.canvas.manager.set_window_title('{} - {}'.format(self.title, self.getDateTime(self.max_time).strftime('%Y/%m/%d'))) # fig.canvas.draw() - + if self.save and self.data.ended: channels = range(self.nrows) if self.oneFigure: @@ -438,9 +443,10 @@ class PlotData(Operation, Process): receiver.setsockopt(zmq.CONFLATE, self.CONFLATE) if 'server' in self.kwargs['parent']: - receiver.connect('ipc:///tmp/{}.plots'.format(self.kwargs['parent']['server'])) + receiver.connect( + 'ipc:///tmp/{}.plots'.format(self.kwargs['parent']['server'])) else: - receiver.connect("ipc:///tmp/zmq.plots") + receiver.connect("ipc:///tmp/zmq.plots") while True: try: @@ -460,7 +466,7 @@ class PlotData(Operation, Process): if self.isConfig is False: self.__setup() self.isConfig = True - + self.__plot() except zmq.Again as e: @@ -474,23 +480,24 @@ class PlotData(Operation, Process): if self.data: self.__plot() + class PlotSpectraData(PlotData): ''' Plot for Spectra data ''' CODE = 'spc' - colormap = 'jro' + colormap = 'jro' def setup(self): self.nplots = len(self.data.channels) - self.ncols = int(numpy.sqrt(self.nplots)+ 0.9) - self.nrows = int((1.0*self.nplots/self.ncols) + 0.9) - self.width = 3.4*self.ncols - self.height = 3*self.nrows + self.ncols = int(numpy.sqrt(self.nplots) + 0.9) + self.nrows = int((1.0 * self.nplots / self.ncols) + 0.9) + self.width = 3.4 * self.ncols + self.height = 3 * self.nrows self.cb_label = 'dB' - if self.showprofile: - self.width += 0.8*self.ncols + if self.showprofile: + self.width += 0.8 * self.ncols self.ylabel = 'Range [Km]' @@ -514,7 +521,7 @@ class PlotSpectraData(PlotData): y = self.data.heights self.y = y z = self.data['spc'] - + for n, ax in enumerate(self.axes): noise = self.data['noise'][n][-1] if self.CODE == 'spc_mean': @@ -525,15 +532,16 @@ class PlotSpectraData(PlotData): self.zmin = self.zmin if self.zmin else numpy.nanmin(z) self.zmax = self.zmax if self.zmax else numpy.nanmax(z) ax.plt = ax.pcolormesh(x, y, z[n].T, - vmin=self.zmin, - vmax=self.zmax, - cmap=plt.get_cmap(self.colormap) - ) + vmin=self.zmin, + vmax=self.zmax, + cmap=plt.get_cmap(self.colormap) + ) if self.showprofile: - ax.plt_profile= self.pf_axes[n].plot(self.data['rti'][n][-1], y)[0] + ax.plt_profile = self.pf_axes[n].plot( + self.data['rti'][n][-1], y)[0] ax.plt_noise = self.pf_axes[n].plot(numpy.repeat(noise, len(y)), y, - color="k", linestyle="dashed", lw=1)[0] + color="k", linestyle="dashed", lw=1)[0] if self.CODE == 'spc_mean': ax.plt_mean = ax.plot(mean, y, color='k')[0] else: @@ -554,17 +562,17 @@ class PlotCrossSpectraData(PlotData): zmin_coh = None zmax_coh = None zmin_phase = None - zmax_phase = None + zmax_phase = None def setup(self): self.ncols = 4 self.nrows = len(self.data.pairs) - self.nplots = self.nrows*4 - self.width = 3.4*self.ncols - self.height = 3*self.nrows + self.nplots = self.nrows * 4 + self.width = 3.4 * self.ncols + self.height = 3 * self.nrows self.ylabel = 'Range [Km]' - self.showprofile = False + self.showprofile = False def plot(self): @@ -588,24 +596,24 @@ class PlotCrossSpectraData(PlotData): for n in range(self.nrows): noise = self.data['noise'][n][-1] pair = self.data.pairs[n] - ax = self.axes[4*n] - ax3 = self.axes[4*n+3] + ax = self.axes[4 * n] + ax3 = self.axes[4 * n + 3] if ax.firsttime: self.xmax = self.xmax if self.xmax else numpy.nanmax(x) self.xmin = self.xmin if self.xmin else -self.xmax self.zmin = self.zmin if self.zmin else numpy.nanmin(spc) - self.zmax = self.zmax if self.zmax else numpy.nanmax(spc) + self.zmax = self.zmax if self.zmax else numpy.nanmax(spc) ax.plt = ax.pcolormesh(x, y, spc[pair[0]].T, vmin=self.zmin, vmax=self.zmax, cmap=plt.get_cmap(self.colormap) - ) + ) else: ax.plt.set_array(spc[pair[0]].T.ravel()) self.titles.append('CH {}: {:3.2f}dB'.format(n, noise)) - ax = self.axes[4*n+1] - if ax.firsttime: + ax = self.axes[4 * n + 1] + if ax.firsttime: ax.plt = ax.pcolormesh(x, y, spc[pair[1]].T, vmin=self.zmin, vmax=self.zmax, @@ -615,12 +623,12 @@ class PlotCrossSpectraData(PlotData): ax.plt.set_array(spc[pair[1]].T.ravel()) self.titles.append('CH {}: {:3.2f}dB'.format(n, noise)) - out = cspc[n]/numpy.sqrt(spc[pair[0]]*spc[pair[1]]) + out = cspc[n] / numpy.sqrt(spc[pair[0]] * spc[pair[1]]) coh = numpy.abs(out) - phase = numpy.arctan2(out.imag, out.real)*180/numpy.pi - - ax = self.axes[4*n+2] - if ax.firsttime: + phase = numpy.arctan2(out.imag, out.real) * 180 / numpy.pi + + ax = self.axes[4 * n + 2] + if ax.firsttime: ax.plt = ax.pcolormesh(x, y, coh.T, vmin=0, vmax=1, @@ -628,9 +636,10 @@ class PlotCrossSpectraData(PlotData): ) else: ax.plt.set_array(coh.T.ravel()) - self.titles.append('Coherence Ch{} * Ch{}'.format(pair[0], pair[1])) + self.titles.append( + 'Coherence Ch{} * Ch{}'.format(pair[0], pair[1])) - ax = self.axes[4*n+3] + ax = self.axes[4 * n + 3] if ax.firsttime: ax.plt = ax.pcolormesh(x, y, phase.T, vmin=-180, @@ -640,7 +649,7 @@ class PlotCrossSpectraData(PlotData): else: ax.plt.set_array(phase.T.ravel()) self.titles.append('Phase CH{} * CH{}'.format(pair[0], pair[1])) - + self.saveTime = self.max_time @@ -662,12 +671,13 @@ class PlotRTIData(PlotData): def setup(self): self.xaxis = 'time' - self.ncols = 1 + self.ncols = 1 self.nrows = len(self.data.channels) self.nplots = len(self.data.channels) self.ylabel = 'Range [Km]' self.cb_label = 'dB' - self.titles = ['{} Channel {}'.format(self.CODE.upper(), x) for x in range(self.nrows)] + self.titles = ['{} Channel {}'.format( + self.CODE.upper(), x) for x in range(self.nrows)] def plot(self): self.x = self.times @@ -676,17 +686,18 @@ class PlotRTIData(PlotData): self.z = numpy.ma.masked_invalid(self.z) for n, ax in enumerate(self.axes): - x, y, z = self.fill_gaps(*self.decimate()) + x, y, z = self.fill_gaps(*self.decimate()) self.zmin = self.zmin if self.zmin else numpy.min(self.z) self.zmax = self.zmax if self.zmax else numpy.max(self.z) - if ax.firsttime: + if ax.firsttime: ax.plt = ax.pcolormesh(x, y, z[n].T, - vmin=self.zmin, - vmax=self.zmax, - cmap=plt.get_cmap(self.colormap) - ) + vmin=self.zmin, + vmax=self.zmax, + cmap=plt.get_cmap(self.colormap) + ) if self.showprofile: - ax.plot_profile= self.pf_axes[n].plot(self.data['rti'][n][-1], self.y)[0] + ax.plot_profile = self.pf_axes[n].plot( + self.data['rti'][n][-1], self.y)[0] ax.plot_noise = self.pf_axes[n].plot(numpy.repeat(self.data['noise'][n][-1], len(self.y)), self.y, color="k", linestyle="dashed", lw=1)[0] else: @@ -695,12 +706,13 @@ class PlotRTIData(PlotData): vmin=self.zmin, vmax=self.zmax, cmap=plt.get_cmap(self.colormap) - ) + ) if self.showprofile: ax.plot_profile.set_data(self.data['rti'][n][-1], self.y) - ax.plot_noise.set_data(numpy.repeat(self.data['noise'][n][-1], len(self.y)), self.y) + ax.plot_noise.set_data(numpy.repeat( + self.data['noise'][n][-1], len(self.y)), self.y) - self.saveTime = self.min_time + self.saveTime = self.min_time class PlotCOHData(PlotRTIData): @@ -715,13 +727,15 @@ class PlotCOHData(PlotRTIData): self.ncols = 1 self.nrows = len(self.data.pairs) self.nplots = len(self.data.pairs) - self.ylabel = 'Range [Km]' + self.ylabel = 'Range [Km]' if self.CODE == 'coh': self.cb_label = '' - self.titles = ['Coherence Map Ch{} * Ch{}'.format(x[0], x[1]) for x in self.data.pairs] + self.titles = [ + 'Coherence Map Ch{} * Ch{}'.format(x[0], x[1]) for x in self.data.pairs] else: self.cb_label = 'Degrees' - self.titles = ['Phase Map Ch{} * Ch{}'.format(x[0], x[1]) for x in self.data.pairs] + self.titles = [ + 'Phase Map Ch{} * Ch{}'.format(x[0], x[1]) for x in self.data.pairs] class PlotPHASEData(PlotCOHData): @@ -753,9 +767,9 @@ class PlotNoiseData(PlotData): x = self.times xmin = self.min_time - xmax = xmin+self.xrange*60*60 + xmax = xmin + self.xrange * 60 * 60 Y = self.data[self.CODE] - + if self.axes[0].firsttime: for ch in self.data.channels: y = Y[ch] @@ -765,7 +779,7 @@ class PlotNoiseData(PlotData): for ch in self.data.channels: y = Y[ch] self.axes[0].lines[ch].set_data(x, y) - + self.ymin = numpy.nanmin(Y) - 5 self.ymax = numpy.nanmax(Y) + 5 self.saveTime = self.min_time @@ -813,26 +827,27 @@ class PlotSkyMapData(PlotData): else: self.figure.clf() - self.ax = plt.subplot2grid((self.nrows, self.ncols), (0, 0), 1, 1, polar=True) + self.ax = plt.subplot2grid( + (self.nrows, self.ncols), (0, 0), 1, 1, polar=True) self.ax.firsttime = True - def plot(self): - arrayParameters = numpy.concatenate([self.data['param'][t] for t in self.times]) - error = arrayParameters[:,-1] + arrayParameters = numpy.concatenate( + [self.data['param'][t] for t in self.times]) + error = arrayParameters[:, -1] indValid = numpy.where(error == 0)[0] - finalMeteor = arrayParameters[indValid,:] - finalAzimuth = finalMeteor[:,3] - finalZenith = finalMeteor[:,4] + finalMeteor = arrayParameters[indValid, :] + finalAzimuth = finalMeteor[:, 3] + finalZenith = finalMeteor[:, 4] - x = finalAzimuth*numpy.pi/180 + x = finalAzimuth * numpy.pi / 180 y = finalZenith if self.ax.firsttime: self.ax.plot = self.ax.plot(x, y, 'bo', markersize=5)[0] - self.ax.set_ylim(0,90) - self.ax.set_yticks(numpy.arange(0,90,20)) + self.ax.set_ylim(0, 90) + self.ax.set_yticks(numpy.arange(0, 90, 20)) self.ax.set_xlabel(self.xlabel) self.ax.set_ylabel(self.ylabel) self.ax.yaxis.labelpad = 40 @@ -847,9 +862,9 @@ class PlotSkyMapData(PlotData): dt2, len(x)) self.ax.set_title(title, size=8) - self.saveTime = self.max_time + class PlotParamData(PlotRTIData): ''' Plot for data_param object @@ -866,7 +881,7 @@ class PlotParamData(PlotRTIData): if self.showSNR: self.nrows += 1 self.nplots += 1 - + self.ylabel = 'Height [Km]' self.titles = self.data.parameters \ if self.data.parameters else ['Param {}'.format(x) for x in xrange(self.nrows)] @@ -874,10 +889,10 @@ class PlotParamData(PlotRTIData): self.titles.append('SNR') def plot(self): - self.data.normalize_heights() + self.data.normalize_heights() self.x = self.times self.y = self.data.heights - if self.showSNR: + if self.showSNR: self.z = numpy.concatenate( (self.data[self.CODE], self.data['snr']) ) @@ -900,7 +915,7 @@ class PlotParamData(PlotRTIData): vmin=self.zmin, vmax=self.zmax, cmap=self.cmaps[n] - ) + ) else: if self.zlimits is not None: self.zmin, self.zmax = self.zlimits[n] @@ -909,7 +924,7 @@ class PlotParamData(PlotRTIData): vmin=self.zmin, vmax=self.zmax, cmap=self.cmaps[n] - ) + ) self.saveTime = self.min_time diff --git a/schainpy/model/graphics/jroplot_heispectra.py b/schainpy/model/graphics/jroplot_heispectra.py index f8d4512..b0c9c5b 100644 --- a/schainpy/model/graphics/jroplot_heispectra.py +++ b/schainpy/model/graphics/jroplot_heispectra.py @@ -11,79 +11,80 @@ from figure import Figure, isRealtime from plotting_codes import * class SpectraHeisScope(Figure): - - + + isConfig = None __nsubplots = None - + WIDTHPROF = None HEIGHTPROF = None PREFIX = 'spc' - - def __init__(self): - + + def __init__(self, **kwargs): + + Figure.__init__(self, **kwargs) self.isConfig = False self.__nsubplots = 1 - + self.WIDTH = 230 self.HEIGHT = 250 self.WIDTHPROF = 120 self.HEIGHTPROF = 0 self.counter_imagwr = 0 - + self.PLOT_CODE = SPEC_CODE - + def getSubplots(self): - + ncol = int(numpy.sqrt(self.nplots)+0.9) nrow = int(self.nplots*1./ncol + 0.9) - + return nrow, ncol - + def setup(self, id, nplots, wintitle, show): - + showprofile = False self.__showprofile = showprofile self.nplots = nplots - + ncolspan = 1 colspan = 1 if showprofile: ncolspan = 3 colspan = 2 self.__nsubplots = 2 - + self.createFigure(id = id, wintitle = wintitle, widthplot = self.WIDTH + self.WIDTHPROF, heightplot = self.HEIGHT + self.HEIGHTPROF, show = show) - + nrow, ncol = self.getSubplots() - + counter = 0 for y in range(nrow): for x in range(ncol): - + if counter >= self.nplots: break - + self.addAxes(nrow, ncol*ncolspan, y, x*ncolspan, colspan, 1) - + if showprofile: self.addAxes(nrow, ncol*ncolspan, y, x*ncolspan+colspan, 1, 1) - + counter += 1 - + def run(self, dataOut, id, wintitle="", channelList=None, xmin=None, xmax=None, ymin=None, ymax=None, save=False, figpath='./', figfile=None, ftp=False, wr_period=1, show=True, server=None, folder=None, username=None, password=None, ftp_wei=0, exp_code=0, sub_exp_code=0, plot_pos=0): - + """ - + Input: dataOut : id : @@ -94,12 +95,12 @@ class SpectraHeisScope(Figure): ymin : None, ymax : None, """ - + if dataOut.realtime: if not(isRealtime(utcdatatime = dataOut.utctime)): print 'Skipping this plot function' return - + if channelList == None: channelIndexList = dataOut.channelIndexList else: @@ -108,9 +109,9 @@ class SpectraHeisScope(Figure): if channel not in dataOut.channelList: raise ValueError, "Channel %d is not in dataOut.channelList" channelIndexList.append(dataOut.channelList.index(channel)) - + # x = dataOut.heightList - c = 3E8 + c = 3E8 deltaHeight = dataOut.heightList[1] - dataOut.heightList[0] #deberia cambiar para el caso de 1Mhz y 100KHz x = numpy.arange(-1*dataOut.nHeights/2.,dataOut.nHeights/2.)*(c/(2*deltaHeight*dataOut.nHeights*1000)) @@ -122,7 +123,7 @@ class SpectraHeisScope(Figure): data = dataOut.data_spc / factor datadB = 10.*numpy.log10(data) y = datadB - + #thisDatetime = dataOut.datatime thisDatetime = datetime.datetime.utcfromtimestamp(dataOut.getTimeRange()[0]) title = wintitle + " Scope: %s" %(thisDatetime.strftime("%d-%b-%Y %H:%M:%S")) @@ -130,29 +131,29 @@ class SpectraHeisScope(Figure): #para 1Mhz descomentar la siguiente linea #xlabel = "Frequency x 10000" ylabel = "Intensity (dB)" - + if not self.isConfig: nplots = len(channelIndexList) - + self.setup(id=id, nplots=nplots, wintitle=wintitle, show=show) - + if xmin == None: xmin = numpy.nanmin(x) if xmax == None: xmax = numpy.nanmax(x) if ymin == None: ymin = numpy.nanmin(y) if ymax == None: ymax = numpy.nanmax(y) - + self.FTP_WEI = ftp_wei self.EXP_CODE = exp_code self.SUB_EXP_CODE = sub_exp_code self.PLOT_POS = plot_pos - + self.isConfig = True - + self.setWinTitle(title) - + for i in range(len(self.axesList)): ychannel = y[i,:] str_datetime = '%s %s'%(thisDatetime.strftime("%Y/%m/%d"),thisDatetime.strftime("%H:%M:%S")) @@ -161,10 +162,10 @@ class SpectraHeisScope(Figure): axes.pline(x, ychannel, xmin=xmin, xmax=xmax, ymin=ymin, ymax=ymax, xlabel=xlabel, ylabel=ylabel, title=title, grid='both') - - + + self.draw() - + self.save(figpath=figpath, figfile=figfile, save=save, @@ -173,18 +174,18 @@ class SpectraHeisScope(Figure): thisDatetime=thisDatetime) class RTIfromSpectraHeis(Figure): - + isConfig = None __nsubplots = None PREFIX = 'rtinoise' - - def __init__(self): - + + def __init__(self, **kwargs): + Figure.__init__(self, **kwargs) self.timerange = 24*60*60 self.isConfig = False self.__nsubplots = 1 - + self.WIDTH = 820 self.HEIGHT = 200 self.WIDTHPROF = 120 @@ -193,43 +194,43 @@ class RTIfromSpectraHeis(Figure): self.xdata = None self.ydata = None self.figfile = None - + self.PLOT_CODE = RTI_CODE - + def getSubplots(self): - + ncol = 1 nrow = 1 - + return nrow, ncol - + def setup(self, id, nplots, wintitle, showprofile=True, show=True): - + self.__showprofile = showprofile self.nplots = nplots - + ncolspan = 7 colspan = 6 self.__nsubplots = 2 - + self.createFigure(id = id, wintitle = wintitle, widthplot = self.WIDTH+self.WIDTHPROF, heightplot = self.HEIGHT+self.HEIGHTPROF, show = show) - + nrow, ncol = self.getSubplots() - + self.addAxes(nrow, ncol*ncolspan, 0, 0, colspan, 1) - - + + def run(self, dataOut, id, wintitle="", channelList=None, showprofile='True', xmin=None, xmax=None, ymin=None, ymax=None, timerange=None, save=False, figpath='./', figfile=None, ftp=False, wr_period=1, show=True, server=None, folder=None, username=None, password=None, ftp_wei=0, exp_code=0, sub_exp_code=0, plot_pos=0): - + if channelList == None: channelIndexList = dataOut.channelIndexList channelList = dataOut.channelList @@ -239,86 +240,86 @@ class RTIfromSpectraHeis(Figure): if channel not in dataOut.channelList: raise ValueError, "Channel %d is not in dataOut.channelList" channelIndexList.append(dataOut.channelList.index(channel)) - + if timerange != None: self.timerange = timerange - + x = dataOut.getTimeRange() y = dataOut.getHeiRange() - + factor = dataOut.normFactor data = dataOut.data_spc / factor data = numpy.average(data,axis=1) datadB = 10*numpy.log10(data) - + # factor = dataOut.normFactor # noise = dataOut.getNoise()/factor # noisedB = 10*numpy.log10(noise) - + #thisDatetime = dataOut.datatime thisDatetime = datetime.datetime.utcfromtimestamp(dataOut.getTimeRange()[0]) title = wintitle + " RTI: %s" %(thisDatetime.strftime("%d-%b-%Y")) xlabel = "Local Time" ylabel = "Intensity (dB)" - + if not self.isConfig: - + nplots = 1 - + self.setup(id=id, nplots=nplots, wintitle=wintitle, showprofile=showprofile, show=show) - + self.tmin, self.tmax = self.getTimeLim(x, xmin, xmax) - + if ymin == None: ymin = numpy.nanmin(datadB) if ymax == None: ymax = numpy.nanmax(datadB) - + self.name = thisDatetime.strftime("%Y%m%d_%H%M%S") self.isConfig = True self.figfile = figfile self.xdata = numpy.array([]) self.ydata = numpy.array([]) - + self.FTP_WEI = ftp_wei self.EXP_CODE = exp_code self.SUB_EXP_CODE = sub_exp_code self.PLOT_POS = plot_pos - + self.setWinTitle(title) - - + + # title = "RTI %s" %(thisDatetime.strftime("%d-%b-%Y")) title = "RTI - %s" %(thisDatetime.strftime("%d-%b-%Y %H:%M:%S")) - + legendlabels = ["channel %d"%idchannel for idchannel in channelList] axes = self.axesList[0] - + self.xdata = numpy.hstack((self.xdata, x[0:1])) - + if len(self.ydata)==0: self.ydata = datadB[channelIndexList].reshape(-1,1) else: self.ydata = numpy.hstack((self.ydata, datadB[channelIndexList].reshape(-1,1))) - - + + axes.pmultilineyaxis(x=self.xdata, y=self.ydata, xmin=self.tmin, xmax=self.tmax, ymin=ymin, ymax=ymax, xlabel=xlabel, ylabel=ylabel, title=title, legendlabels=legendlabels, marker='.', markersize=8, linestyle="solid", grid='both', XAxisAsTime=True ) - + self.draw() - + update_figfile = False - + if dataOut.ltctime >= self.tmax: self.counter_imagwr = wr_period self.isConfig = False update_figfile = True - + self.save(figpath=figpath, figfile=figfile, save=save, diff --git a/schainpy/model/graphics/jroplot_spectra.py b/schainpy/model/graphics/jroplot_spectra.py index eafac9f..99882c0 100644 --- a/schainpy/model/graphics/jroplot_spectra.py +++ b/schainpy/model/graphics/jroplot_spectra.py @@ -25,8 +25,8 @@ class SpectraPlot(Figure): self.isConfig = False self.__nsubplots = 1 - self.WIDTH = 250 - self.HEIGHT = 250 + self.WIDTH = 300 + self.HEIGHT = 300 self.WIDTHPROF = 120 self.HEIGHTPROF = 0 self.counter_imagwr = 0 diff --git a/schainpy/model/graphics/jroplot_voltage.py b/schainpy/model/graphics/jroplot_voltage.py index 927b819..ed4dfc5 100644 --- a/schainpy/model/graphics/jroplot_voltage.py +++ b/schainpy/model/graphics/jroplot_voltage.py @@ -113,7 +113,7 @@ class Scope(Figure): def run(self, dataOut, id, wintitle="", channelList=None, xmin=None, xmax=None, ymin=None, ymax=None, save=False, figpath='./', figfile=None, show=True, wr_period=1, - ftp=False, server=None, folder=None, username=None, password=None, type='power'): + ftp=False, server=None, folder=None, username=None, password=None, type='power', **kwargs): """ diff --git a/schainpy/model/graphics/mpldriver.py b/schainpy/model/graphics/mpldriver.py index 0b3b227..c1e31ac 100644 --- a/schainpy/model/graphics/mpldriver.py +++ b/schainpy/model/graphics/mpldriver.py @@ -4,32 +4,36 @@ import sys import matplotlib if 'linux' in sys.platform: - matplotlib.use("GTK3Agg") + matplotlib.use("TkAgg") if 'darwin' in sys.platform: matplotlib.use('TKAgg') -#Qt4Agg', 'GTK', 'GTKAgg', 'ps', 'agg', 'cairo', 'MacOSX', 'GTKCairo', 'WXAgg', 'template', 'TkAgg', 'GTK3Cairo', 'GTK3Agg', 'svg', 'WebAgg', 'CocoaAgg', 'emf', 'gdk', 'WX' +# Qt4Agg', 'GTK', 'GTKAgg', 'ps', 'agg', 'cairo', 'MacOSX', 'GTKCairo', 'WXAgg', 'template', 'TkAgg', 'GTK3Cairo', 'GTK3Agg', 'svg', 'WebAgg', 'CocoaAgg', 'emf', 'gdk', 'WX' import matplotlib.pyplot from mpl_toolkits.axes_grid1 import make_axes_locatable from matplotlib.ticker import FuncFormatter, LinearLocator ########################################### -#Actualizacion de las funciones del driver +# Actualizacion de las funciones del driver ########################################### # create jro colormap jet_values = matplotlib.pyplot.get_cmap("jet", 100)(numpy.arange(100))[10:90] -blu_values = matplotlib.pyplot.get_cmap("seismic_r", 20)(numpy.arange(20))[10:15] -ncmap = matplotlib.colors.LinearSegmentedColormap.from_list("jro", numpy.vstack((blu_values, jet_values))) +blu_values = matplotlib.pyplot.get_cmap( + "seismic_r", 20)(numpy.arange(20))[10:15] +ncmap = matplotlib.colors.LinearSegmentedColormap.from_list( + "jro", numpy.vstack((blu_values, jet_values))) matplotlib.pyplot.register_cmap(cmap=ncmap) -def createFigure(id, wintitle, width, height, facecolor="w", show=True, dpi = 80): + +def createFigure(id, wintitle, width, height, facecolor="w", show=True, dpi=80): matplotlib.pyplot.ioff() - fig = matplotlib.pyplot.figure(num=id, facecolor=facecolor, figsize=(1.0*width/dpi, 1.0*height/dpi)) + fig = matplotlib.pyplot.figure(num=id, facecolor=facecolor, figsize=( + 1.0 * width / dpi, 1.0 * height / dpi)) fig.canvas.manager.set_window_title(wintitle) # fig.canvas.manager.resize(width, height) matplotlib.pyplot.ion() @@ -39,10 +43,11 @@ def createFigure(id, wintitle, width, height, facecolor="w", show=True, dpi = 80 return fig + def closeFigure(show=False, fig=None): -# matplotlib.pyplot.ioff() -# matplotlib.pyplot.pause(0) + # matplotlib.pyplot.ioff() + # matplotlib.pyplot.pause(0) if show: matplotlib.pyplot.show() @@ -60,47 +65,52 @@ def closeFigure(show=False, fig=None): return + def saveFigure(fig, filename): -# matplotlib.pyplot.ioff() + # matplotlib.pyplot.ioff() fig.savefig(filename, dpi=matplotlib.pyplot.gcf().dpi) # matplotlib.pyplot.ion() + def clearFigure(fig): fig.clf() + def setWinTitle(fig, title): fig.canvas.manager.set_window_title(title) + def setTitle(fig, title): fig.suptitle(title) + def createAxes(fig, nrow, ncol, xpos, ypos, colspan, rowspan, polar=False): matplotlib.pyplot.ioff() matplotlib.pyplot.figure(fig.number) axes = matplotlib.pyplot.subplot2grid((nrow, ncol), - (xpos, ypos), - colspan=colspan, - rowspan=rowspan, - polar=polar) - - axes.grid(True) + (xpos, ypos), + colspan=colspan, + rowspan=rowspan, + polar=polar) matplotlib.pyplot.ion() return axes + def setAxesText(ax, text): ax.annotate(text, - xy = (.1, .99), - xycoords = 'figure fraction', - horizontalalignment = 'left', - verticalalignment = 'top', - fontsize = 10) + xy=(.1, .99), + xycoords='figure fraction', + horizontalalignment='left', + verticalalignment='top', + fontsize=10) + def printLabels(ax, xlabel, ylabel, title): @@ -108,11 +118,11 @@ def printLabels(ax, xlabel, ylabel, title): ax.set_ylabel(ylabel, size=11) ax.set_title(title, size=8) + def createPline(ax, x, y, xmin, xmax, ymin, ymax, xlabel='', ylabel='', title='', ticksize=9, xtick_visible=True, ytick_visible=True, nxticks=4, nyticks=10, - grid=None,color='blue'): - + grid=None, color='blue'): """ Input: @@ -121,18 +131,19 @@ def createPline(ax, x, y, xmin, xmax, ymin, ymax, xlabel='', ylabel='', title='' matplotlib.pyplot.ioff() - ax.set_xlim([xmin,xmax]) - ax.set_ylim([ymin,ymax]) + ax.set_xlim([xmin, xmax]) + ax.set_ylim([ymin, ymax]) printLabels(ax, xlabel, ylabel, title) ###################################################### - if (xmax-xmin)<=1: - xtickspos = numpy.linspace(xmin,xmax,nxticks) - xtickspos = numpy.array([float("%.1f"%i) for i in xtickspos]) + if (xmax - xmin) <= 1: + xtickspos = numpy.linspace(xmin, xmax, nxticks) + xtickspos = numpy.array([float("%.1f" % i) for i in xtickspos]) ax.set_xticks(xtickspos) else: - xtickspos = numpy.arange(nxticks)*int((xmax-xmin)/(nxticks)) + int(xmin) + xtickspos = numpy.arange(nxticks) * \ + int((xmax - xmin) / (nxticks)) + int(xmin) # xtickspos = numpy.arange(nxticks)*float(xmax-xmin)/float(nxticks) + int(xmin) ax.set_xticks(xtickspos) @@ -170,26 +181,29 @@ def createPline(ax, x, y, xmin, xmax, ymin, ymax, xlabel='', ylabel='', title='' return iplot + def set_linedata(ax, x, y, idline): - ax.lines[idline].set_data(x,y) + ax.lines[idline].set_data(x, y) + def pline(iplot, x, y, xlabel='', ylabel='', title=''): - ax = iplot.get_axes() + ax = iplot.axes printLabels(ax, xlabel, ylabel, title) set_linedata(ax, x, y, idline=0) + def addpline(ax, x, y, color, linestyle, lw): - ax.plot(x,y,color=color,linestyle=linestyle,lw=lw) + ax.plot(x, y, color=color, linestyle=linestyle, lw=lw) def createPcolor(ax, x, y, z, xmin, xmax, ymin, ymax, zmin, zmax, - xlabel='', ylabel='', title='', ticksize = 9, - colormap='jet',cblabel='', cbsize="5%", + xlabel='', ylabel='', title='', ticksize=9, + colormap='jet', cblabel='', cbsize="5%", XAxisAsTime=False): matplotlib.pyplot.ioff() @@ -199,16 +213,16 @@ def createPcolor(ax, x, y, z, xmin, xmax, ymin, ymax, zmin, zmax, fig = ax.get_figure() fig.add_axes(ax_cb) - ax.set_xlim([xmin,xmax]) - ax.set_ylim([ymin,ymax]) + ax.set_xlim([xmin, xmax]) + ax.set_ylim([ymin, ymax]) printLabels(ax, xlabel, ylabel, title) z = numpy.ma.masked_invalid(z) - cmap=matplotlib.pyplot.get_cmap(colormap) - cmap.set_bad('white',1.) - imesh = ax.pcolormesh(x,y,z.T, vmin=zmin, vmax=zmax, cmap=cmap) - cb = matplotlib.pyplot.colorbar(imesh, cax=ax_cb) + cmap = matplotlib.pyplot.get_cmap(colormap) + cmap.set_bad('black', 1.) + imesh = ax.pcolormesh(x, y, z.T, vmin=zmin, vmax=zmax, cmap=cmap) + cb = matplotlib.pyplot.colorbar(imesh, cax=ax_cb) cb.set_label(cblabel) # for tl in ax_cb.get_yticklabels(): @@ -237,36 +251,30 @@ def createPcolor(ax, x, y, z, xmin, xmax, ymin, ymax, zmin, zmax, if XAxisAsTime: - func = lambda x, pos: ('%s') %(datetime.datetime.utcfromtimestamp(x).strftime("%H:%M:%S")) + def func(x, pos): return ('%s') % ( + datetime.datetime.utcfromtimestamp(x).strftime("%H:%M:%S")) ax.xaxis.set_major_formatter(FuncFormatter(func)) ax.xaxis.set_major_locator(LinearLocator(7)) - ax.grid(True) matplotlib.pyplot.ion() return imesh -def pcolor(imesh, z, xlabel='', ylabel='', title=''): - z = numpy.ma.masked_invalid(z) - - cmap=matplotlib.pyplot.get_cmap('jet') - cmap.set_bad('white',1.) +def pcolor(imesh, z, xlabel='', ylabel='', title=''): z = z.T - ax = imesh.get_axes() + ax = imesh.axes printLabels(ax, xlabel, ylabel, title) imesh.set_array(z.ravel()) - ax.grid(True) def addpcolor(ax, x, y, z, zmin, zmax, xlabel='', ylabel='', title='', colormap='jet'): printLabels(ax, xlabel, ylabel, title) - z = numpy.ma.masked_invalid(z) - cmap=matplotlib.pyplot.get_cmap(colormap) - cmap.set_bad('white',1.) - ax.pcolormesh(x,y,z.T,vmin=zmin,vmax=zmax, cmap=matplotlib.pyplot.get_cmap(colormap)) - ax.grid(True) + + ax.pcolormesh(x, y, z.T, vmin=zmin, vmax=zmax, + cmap=matplotlib.pyplot.get_cmap(colormap)) + def addpcolorbuffer(ax, x, y, z, zmin, zmax, xlabel='', ylabel='', title='', colormap='jet'): @@ -275,19 +283,17 @@ def addpcolorbuffer(ax, x, y, z, zmin, zmax, xlabel='', ylabel='', title='', col ax.collections.remove(ax.collections[0]) z = numpy.ma.masked_invalid(z) - - cmap=matplotlib.pyplot.get_cmap(colormap) - cmap.set_bad('white',1.) - ax.pcolormesh(x,y,z.T,vmin=zmin,vmax=zmax, cmap=cmap) - ax.grid(True) + cmap = matplotlib.pyplot.get_cmap(colormap) + cmap.set_bad('black', 1.) + ax.pcolormesh(x, y, z.T, vmin=zmin, vmax=zmax, cmap=cmap) -def createPmultiline(ax, x, y, xmin, xmax, ymin, ymax, xlabel='', ylabel='', title='', legendlabels=None, - ticksize=9, xtick_visible=True, ytick_visible=True, - nxticks=4, nyticks=10, - grid=None): +def createPmultiline(ax, x, y, xmin, xmax, ymin, ymax, xlabel='', ylabel='', title='', legendlabels=None, + ticksize=9, xtick_visible=True, ytick_visible=True, + nxticks=4, nyticks=10, + grid=None): """ Input: @@ -299,11 +305,12 @@ def createPmultiline(ax, x, y, xmin, xmax, ymin, ymax, xlabel='', ylabel='', tit lines = ax.plot(x.T, y) leg = ax.legend(lines, legendlabels, loc='upper right') leg.get_frame().set_alpha(0.5) - ax.set_xlim([xmin,xmax]) - ax.set_ylim([ymin,ymax]) + ax.set_xlim([xmin, xmax]) + ax.set_ylim([ymin, ymax]) printLabels(ax, xlabel, ylabel, title) - xtickspos = numpy.arange(nxticks)*int((xmax-xmin)/(nxticks)) + int(xmin) + xtickspos = numpy.arange(nxticks) * \ + int((xmax - xmin) / (nxticks)) + int(xmin) ax.set_xticks(xtickspos) for tick in ax.get_xticklabels(): @@ -340,19 +347,19 @@ def createPmultiline(ax, x, y, xmin, xmax, ymin, ymax, xlabel='', ylabel='', tit def pmultiline(iplot, x, y, xlabel='', ylabel='', title=''): - ax = iplot.get_axes() + ax = iplot.axes printLabels(ax, xlabel, ylabel, title) for i in range(len(ax.lines)): line = ax.lines[i] - line.set_data(x[i,:],y) + line.set_data(x[i, :], y) -def createPmultilineYAxis(ax, x, y, xmin, xmax, ymin, ymax, xlabel='', ylabel='', title='', legendlabels=None, - ticksize=9, xtick_visible=True, ytick_visible=True, - nxticks=4, nyticks=10, marker='.', markersize=10, linestyle="None", - grid=None, XAxisAsTime=False): +def createPmultilineYAxis(ax, x, y, xmin, xmax, ymin, ymax, xlabel='', ylabel='', title='', legendlabels=None, + ticksize=9, xtick_visible=True, ytick_visible=True, + nxticks=4, nyticks=10, marker='.', markersize=10, linestyle="None", + grid=None, XAxisAsTime=False): """ Input: @@ -369,10 +376,11 @@ def createPmultilineYAxis(ax, x, y, xmin, xmax, ymin, ymax, xlabel='', ylabel='' leg = ax.legend(lines, legendlabels, loc='upper right', bbox_to_anchor=(1.16, 1), borderaxespad=0) - for label in leg.get_texts(): label.set_fontsize(9) + for label in leg.get_texts(): + label.set_fontsize(9) - ax.set_xlim([xmin,xmax]) - ax.set_ylim([ymin,ymax]) + ax.set_xlim([xmin, xmax]) + ax.set_ylim([ymin, ymax]) printLabels(ax, xlabel, ylabel, title) # xtickspos = numpy.arange(nxticks)*int((xmax-xmin)/(nxticks)) + int(xmin) @@ -407,7 +415,8 @@ def createPmultilineYAxis(ax, x, y, xmin, xmax, ymin, ymax, xlabel='', ylabel='' if XAxisAsTime: - func = lambda x, pos: ('%s') %(datetime.datetime.utcfromtimestamp(x).strftime("%H:%M:%S")) + def func(x, pos): return ('%s') % ( + datetime.datetime.utcfromtimestamp(x).strftime("%H:%M:%S")) ax.xaxis.set_major_formatter(FuncFormatter(func)) ax.xaxis.set_major_locator(LinearLocator(7)) @@ -415,30 +424,33 @@ def createPmultilineYAxis(ax, x, y, xmin, xmax, ymin, ymax, xlabel='', ylabel='' return iplot + def pmultilineyaxis(iplot, x, y, xlabel='', ylabel='', title=''): - ax = iplot.get_axes() + ax = iplot.axes + printLabels(ax, xlabel, ylabel, title) for i in range(len(ax.lines)): line = ax.lines[i] - line.set_data(x,y[i,:]) + line.set_data(x, y[i, :]) + def createPolar(ax, x, y, - xlabel='', ylabel='', title='', ticksize = 9, - colormap='jet',cblabel='', cbsize="5%", - XAxisAsTime=False): + xlabel='', ylabel='', title='', ticksize=9, + colormap='jet', cblabel='', cbsize="5%", + XAxisAsTime=False): matplotlib.pyplot.ioff() - ax.plot(x,y,'bo', markersize=5) + ax.plot(x, y, 'bo', markersize=5) # ax.set_rmax(90) - ax.set_ylim(0,90) - ax.set_yticks(numpy.arange(0,90,20)) + ax.set_ylim(0, 90) + ax.set_yticks(numpy.arange(0, 90, 20)) # ax.text(0, -110, ylabel, rotation='vertical', va ='center', ha = 'center' ,size='11') # ax.text(0, 50, ylabel, rotation='vertical', va ='center', ha = 'left' ,size='11') # ax.text(100, 100, 'example', ha='left', va='center', rotation='vertical') - ax.yaxis.labelpad = 230 + ax.yaxis.labelpad = 40 printLabels(ax, xlabel, ylabel, title) iplot = ax.lines[-1] @@ -457,18 +469,19 @@ def createPolar(ax, x, y, matplotlib.pyplot.ion() - return iplot + def polar(iplot, x, y, xlabel='', ylabel='', title=''): - ax = iplot.get_axes() + ax = iplot.axes # ax.text(0, -110, ylabel, rotation='vertical', va ='center', ha = 'center',size='11') printLabels(ax, xlabel, ylabel, title) set_linedata(ax, x, y, idline=0) + def draw(fig): if type(fig) == 'int': @@ -476,6 +489,7 @@ def draw(fig): fig.canvas.draw() + def pause(interval=0.000001): matplotlib.pyplot.pause(interval) diff --git a/schainpy/model/graphics/mpldriver2.py b/schainpy/model/graphics/mpldriver2.py deleted file mode 100644 index d6cbfd1..0000000 --- a/schainpy/model/graphics/mpldriver2.py +++ /dev/null @@ -1,469 +0,0 @@ -import numpy -import datetime -import sys -import matplotlib - -if 'linux' in sys.platform: - matplotlib.use("TKAgg") - -if 'darwin' in sys.platform: - matplotlib.use('TKAgg') -#Qt4Agg', 'GTK', 'GTKAgg', 'ps', 'agg', 'cairo', 'MacOSX', 'GTKCairo', 'WXAgg', 'template', 'TkAgg', 'GTK3Cairo', 'GTK3Agg', 'svg', 'WebAgg', 'CocoaAgg', 'emf', 'gdk', 'WX' -import matplotlib.pyplot - -from mpl_toolkits.axes_grid1 import make_axes_locatable -from matplotlib.ticker import FuncFormatter, LinearLocator - -########################################### -#Actualizacion de las funciones del driver -########################################### - -jet_values = matplotlib.pyplot.get_cmap("jet", 100)(numpy.arange(100))[10:90] -blu_values = matplotlib.pyplot.get_cmap("seismic_r", 20)(numpy.arange(20))[10:15] -ncmap = matplotlib.colors.LinearSegmentedColormap.from_list("jro", numpy.vstack((blu_values, jet_values))) -matplotlib.pyplot.register_cmap(cmap=ncmap) - -def createFigure(id, wintitle, width, height, facecolor="w", show=True, dpi = 80): - - matplotlib.pyplot.ioff() - - fig = matplotlib.pyplot.figure(num=id, facecolor=facecolor, figsize=(1.0*width/dpi, 1.0*height/dpi)) - fig.canvas.manager.set_window_title(wintitle) -# fig.canvas.manager.resize(width, height) - matplotlib.pyplot.ion() - - - if show: - matplotlib.pyplot.show() - - return fig - -def closeFigure(show=False, fig=None): - -# matplotlib.pyplot.ioff() -# matplotlib.pyplot.pause(0) - - if show: - matplotlib.pyplot.show() - - if fig != None: - matplotlib.pyplot.close(fig) -# matplotlib.pyplot.pause(0) -# matplotlib.pyplot.ion() - - return - - matplotlib.pyplot.close("all") -# matplotlib.pyplot.pause(0) -# matplotlib.pyplot.ion() - - return - -def saveFigure(fig, filename): - -# matplotlib.pyplot.ioff() - fig.savefig(filename, dpi=matplotlib.pyplot.gcf().dpi) -# matplotlib.pyplot.ion() - -def clearFigure(fig): - - fig.clf() - -def setWinTitle(fig, title): - - fig.canvas.manager.set_window_title(title) - -def setTitle(fig, title): - - fig.suptitle(title) - -def createAxes(fig, nrow, ncol, xpos, ypos, colspan, rowspan, polar=False): - - matplotlib.pyplot.ioff() - matplotlib.pyplot.figure(fig.number) - axes = matplotlib.pyplot.subplot2grid((nrow, ncol), - (xpos, ypos), - colspan=colspan, - rowspan=rowspan, - polar=polar) - - axes.grid(True) - matplotlib.pyplot.ion() - return axes - -def setAxesText(ax, text): - - ax.annotate(text, - xy = (.1, .99), - xycoords = 'figure fraction', - horizontalalignment = 'left', - verticalalignment = 'top', - fontsize = 10) - -def printLabels(ax, xlabel, ylabel, title): - - ax.set_xlabel(xlabel, size=11) - ax.set_ylabel(ylabel, size=11) - ax.set_title(title, size=8) - -def createPline(ax, x, y, xmin, xmax, ymin, ymax, xlabel='', ylabel='', title='', - ticksize=9, xtick_visible=True, ytick_visible=True, - nxticks=4, nyticks=10, - grid=None,color='blue'): - - """ - - Input: - grid : None, 'both', 'x', 'y' - """ - - matplotlib.pyplot.ioff() - - ax.set_xlim([xmin,xmax]) - ax.set_ylim([ymin,ymax]) - - printLabels(ax, xlabel, ylabel, title) - - ###################################################### - if (xmax-xmin)<=1: - xtickspos = numpy.linspace(xmin,xmax,nxticks) - xtickspos = numpy.array([float("%.1f"%i) for i in xtickspos]) - ax.set_xticks(xtickspos) - else: - xtickspos = numpy.arange(nxticks)*int((xmax-xmin)/(nxticks)) + int(xmin) -# xtickspos = numpy.arange(nxticks)*float(xmax-xmin)/float(nxticks) + int(xmin) - ax.set_xticks(xtickspos) - - for tick in ax.get_xticklabels(): - tick.set_visible(xtick_visible) - - for tick in ax.xaxis.get_major_ticks(): - tick.label.set_fontsize(ticksize) - - ###################################################### - for tick in ax.get_yticklabels(): - tick.set_visible(ytick_visible) - - for tick in ax.yaxis.get_major_ticks(): - tick.label.set_fontsize(ticksize) - - ax.plot(x, y, color=color) - iplot = ax.lines[-1] - - ###################################################### - if '0.' in matplotlib.__version__[0:2]: - print "The matplotlib version has to be updated to 1.1 or newer" - return iplot - - if '1.0.' in matplotlib.__version__[0:4]: - print "The matplotlib version has to be updated to 1.1 or newer" - return iplot - - if grid != None: - ax.grid(b=True, which='major', axis=grid) - - matplotlib.pyplot.tight_layout() - - matplotlib.pyplot.ion() - - return iplot - -def set_linedata(ax, x, y, idline): - - ax.lines[idline].set_data(x,y) - -def pline(iplot, x, y, xlabel='', ylabel='', title=''): - - ax = iplot.get_axes() - - printLabels(ax, xlabel, ylabel, title) - - set_linedata(ax, x, y, idline=0) - -def addpline(ax, x, y, color, linestyle, lw): - - ax.plot(x,y,color=color,linestyle=linestyle,lw=lw) - - -def createPcolor(ax, x, y, z, xmin, xmax, ymin, ymax, zmin, zmax, - xlabel='', ylabel='', title='', ticksize = 9, - colormap='jet',cblabel='', cbsize="5%", - XAxisAsTime=False): - - matplotlib.pyplot.ioff() - - divider = make_axes_locatable(ax) - ax_cb = divider.new_horizontal(size=cbsize, pad=0.05) - fig = ax.get_figure() - fig.add_axes(ax_cb) - - ax.set_xlim([xmin,xmax]) - ax.set_ylim([ymin,ymax]) - - printLabels(ax, xlabel, ylabel, title) - - z = numpy.ma.masked_invalid(z) - cmap=matplotlib.pyplot.get_cmap(colormap) - cmap.set_bad('white',1.) - imesh = ax.pcolormesh(x,y,z.T, vmin=zmin, vmax=zmax, cmap=cmap) - cb = matplotlib.pyplot.colorbar(imesh, cax=ax_cb) - cb.set_label(cblabel) - -# for tl in ax_cb.get_yticklabels(): -# tl.set_visible(True) - - for tick in ax.yaxis.get_major_ticks(): - tick.label.set_fontsize(ticksize) - - for tick in ax.xaxis.get_major_ticks(): - tick.label.set_fontsize(ticksize) - - for tick in cb.ax.get_yticklabels(): - tick.set_fontsize(ticksize) - - ax_cb.yaxis.tick_right() - - if '0.' in matplotlib.__version__[0:2]: - print "The matplotlib version has to be updated to 1.1 or newer" - return imesh - - if '1.0.' in matplotlib.__version__[0:4]: - print "The matplotlib version has to be updated to 1.1 or newer" - return imesh - - matplotlib.pyplot.tight_layout() - - if XAxisAsTime: - - func = lambda x, pos: ('%s') %(datetime.datetime.utcfromtimestamp(x).strftime("%H:%M:%S")) - ax.xaxis.set_major_formatter(FuncFormatter(func)) - ax.xaxis.set_major_locator(LinearLocator(7)) - ax.grid(True) - matplotlib.pyplot.ion() - return imesh - -def pcolor(imesh, z, xlabel='', ylabel='', title=''): - - z = z.T - ax = imesh.get_axes() - printLabels(ax, xlabel, ylabel, title) - imesh.set_array(z.ravel()) - ax.grid(True) - -def addpcolor(ax, x, y, z, zmin, zmax, xlabel='', ylabel='', title='', colormap='jet'): - - printLabels(ax, xlabel, ylabel, title) - ax.pcolormesh(x,y,z.T,vmin=zmin,vmax=zmax, cmap=matplotlib.pyplot.get_cmap(colormap)) - ax.grid(True) - -def addpcolorbuffer(ax, x, y, z, zmin, zmax, xlabel='', ylabel='', title='', colormap='jet'): - - printLabels(ax, xlabel, ylabel, title) - - ax.collections.remove(ax.collections[0]) - - z = numpy.ma.masked_invalid(z) - - cmap=matplotlib.pyplot.get_cmap(colormap) - cmap.set_bad('white',1.) - - ax.pcolormesh(x,y,z.T,vmin=zmin,vmax=zmax, cmap=cmap) - ax.grid(True) - -def createPmultiline(ax, x, y, xmin, xmax, ymin, ymax, xlabel='', ylabel='', title='', legendlabels=None, - ticksize=9, xtick_visible=True, ytick_visible=True, - nxticks=4, nyticks=10, - grid=None): - - """ - - Input: - grid : None, 'both', 'x', 'y' - """ - - matplotlib.pyplot.ioff() - - lines = ax.plot(x.T, y) - leg = ax.legend(lines, legendlabels, loc='upper right') - leg.get_frame().set_alpha(0.5) - ax.set_xlim([xmin,xmax]) - ax.set_ylim([ymin,ymax]) - printLabels(ax, xlabel, ylabel, title) - - xtickspos = numpy.arange(nxticks)*int((xmax-xmin)/(nxticks)) + int(xmin) - ax.set_xticks(xtickspos) - - for tick in ax.get_xticklabels(): - tick.set_visible(xtick_visible) - - for tick in ax.xaxis.get_major_ticks(): - tick.label.set_fontsize(ticksize) - - for tick in ax.get_yticklabels(): - tick.set_visible(ytick_visible) - - for tick in ax.yaxis.get_major_ticks(): - tick.label.set_fontsize(ticksize) - - iplot = ax.lines[-1] - - if '0.' in matplotlib.__version__[0:2]: - print "The matplotlib version has to be updated to 1.1 or newer" - return iplot - - if '1.0.' in matplotlib.__version__[0:4]: - print "The matplotlib version has to be updated to 1.1 or newer" - return iplot - - if grid != None: - ax.grid(b=True, which='major', axis=grid) - - matplotlib.pyplot.tight_layout() - - matplotlib.pyplot.ion() - - return iplot - - -def pmultiline(iplot, x, y, xlabel='', ylabel='', title=''): - - ax = iplot.get_axes() - - printLabels(ax, xlabel, ylabel, title) - - for i in range(len(ax.lines)): - line = ax.lines[i] - line.set_data(x[i,:],y) - -def createPmultilineYAxis(ax, x, y, xmin, xmax, ymin, ymax, xlabel='', ylabel='', title='', legendlabels=None, - ticksize=9, xtick_visible=True, ytick_visible=True, - nxticks=4, nyticks=10, marker='.', markersize=10, linestyle="None", - grid=None, XAxisAsTime=False): - - """ - - Input: - grid : None, 'both', 'x', 'y' - """ - - matplotlib.pyplot.ioff() - -# lines = ax.plot(x, y.T, marker=marker,markersize=markersize,linestyle=linestyle) - lines = ax.plot(x, y.T) -# leg = ax.legend(lines, legendlabels, loc=2, bbox_to_anchor=(1.01, 1.00), numpoints=1, handlelength=1.5, \ -# handletextpad=0.5, borderpad=0.5, labelspacing=0.5, borderaxespad=0.) - - leg = ax.legend(lines, legendlabels, - loc='upper right', bbox_to_anchor=(1.16, 1), borderaxespad=0) - - for label in leg.get_texts(): label.set_fontsize(9) - - ax.set_xlim([xmin,xmax]) - ax.set_ylim([ymin,ymax]) - printLabels(ax, xlabel, ylabel, title) - -# xtickspos = numpy.arange(nxticks)*int((xmax-xmin)/(nxticks)) + int(xmin) -# ax.set_xticks(xtickspos) - - for tick in ax.get_xticklabels(): - tick.set_visible(xtick_visible) - - for tick in ax.xaxis.get_major_ticks(): - tick.label.set_fontsize(ticksize) - - for tick in ax.get_yticklabels(): - tick.set_visible(ytick_visible) - - for tick in ax.yaxis.get_major_ticks(): - tick.label.set_fontsize(ticksize) - - iplot = ax.lines[-1] - - if '0.' in matplotlib.__version__[0:2]: - print "The matplotlib version has to be updated to 1.1 or newer" - return iplot - - if '1.0.' in matplotlib.__version__[0:4]: - print "The matplotlib version has to be updated to 1.1 or newer" - return iplot - - if grid != None: - ax.grid(b=True, which='major', axis=grid) - - matplotlib.pyplot.tight_layout() - - if XAxisAsTime: - - func = lambda x, pos: ('%s') %(datetime.datetime.utcfromtimestamp(x).strftime("%H:%M:%S")) - ax.xaxis.set_major_formatter(FuncFormatter(func)) - ax.xaxis.set_major_locator(LinearLocator(7)) - - matplotlib.pyplot.ion() - - return iplot - -def pmultilineyaxis(iplot, x, y, xlabel='', ylabel='', title=''): - - ax = iplot.get_axes() - - printLabels(ax, xlabel, ylabel, title) - - for i in range(len(ax.lines)): - line = ax.lines[i] - line.set_data(x,y[i,:]) - -def createPolar(ax, x, y, - xlabel='', ylabel='', title='', ticksize = 9, - colormap='jet',cblabel='', cbsize="5%", - XAxisAsTime=False): - - matplotlib.pyplot.ioff() - - ax.plot(x,y,'bo', markersize=5) -# ax.set_rmax(90) - ax.set_ylim(0,90) - ax.set_yticks(numpy.arange(0,90,20)) -# ax.text(0, -110, ylabel, rotation='vertical', va ='center', ha = 'center' ,size='11') -# ax.text(0, 50, ylabel, rotation='vertical', va ='center', ha = 'left' ,size='11') -# ax.text(100, 100, 'example', ha='left', va='center', rotation='vertical') - ax.yaxis.labelpad = 230 - printLabels(ax, xlabel, ylabel, title) - iplot = ax.lines[-1] - - if '0.' in matplotlib.__version__[0:2]: - print "The matplotlib version has to be updated to 1.1 or newer" - return iplot - - if '1.0.' in matplotlib.__version__[0:4]: - print "The matplotlib version has to be updated to 1.1 or newer" - return iplot - -# if grid != None: -# ax.grid(b=True, which='major', axis=grid) - - matplotlib.pyplot.tight_layout() - - matplotlib.pyplot.ion() - - - return iplot - -def polar(iplot, x, y, xlabel='', ylabel='', title=''): - - ax = iplot.get_axes() - -# ax.text(0, -110, ylabel, rotation='vertical', va ='center', ha = 'center',size='11') - printLabels(ax, xlabel, ylabel, title) - - set_linedata(ax, x, y, idline=0) - -def draw(fig): - - if type(fig) == 'int': - raise ValueError, "Error drawing: Fig parameter should be a matplotlib figure object figure" - - fig.canvas.draw() - -def pause(interval=0.000001): - - matplotlib.pyplot.pause(interval) diff --git a/schainpy/model/io/MIRAtest.py b/schainpy/model/io/MIRAtest.py index c48bc8b..ea8e94f 100644 --- a/schainpy/model/io/MIRAtest.py +++ b/schainpy/model/io/MIRAtest.py @@ -1,4 +1,5 @@ -import os, sys +import os +import sys import glob import fnmatch import datetime @@ -6,11 +7,9 @@ import time import re import h5py import numpy -import matplotlib.pyplot as plt -import pylab as plb from scipy.optimize import curve_fit -from scipy import asarray as ar,exp +from scipy import asarray as ar, exp from scipy import stats from duplicity.path import Path @@ -31,113 +30,130 @@ from schainpy.model.proc.jroproc_base import ProcessingUnit, Operation from numpy import imag, shape, NaN -startFp = open('/home/erick/Documents/MIRA35C/20160117/20160117_0000.zspc',"rb") - - -FILE_HEADER = numpy.dtype([ #HEADER 1024bytes - ('Hname',numpy.str_,32), #Original file name - ('Htime',numpy.str_,32), #Date and time when the file was created - ('Hoper',numpy.str_,64), #Name of operator who created the file - ('Hplace',numpy.str_,128), #Place where the measurements was carried out - ('Hdescr',numpy.str_,256), #Description of measurements - ('Hdummy',numpy.str_,512), #Reserved space - #Main chunk - ('Msign','=5 - ('SPARrawGate2','=5 + ('SPARrawGate1', ' dateFile) or (endDate < dateFile): continue - + self.fileList.append(thisFile) self.dateFileList.append(dateFile) @@ -192,14 +193,15 @@ class BLTRParamReader(JRODataReader, ProcessingUnit): filename = os.path.join(self.path, self.fileList[file_id]) dirname, name = os.path.split(filename) - self.siteFile = name.split('.')[0] # 'peru2' ---> Piura - 'peru1' ---> Huancayo or Porcuya + # 'peru2' ---> Piura - 'peru1' ---> Huancayo or Porcuya + self.siteFile = name.split('.')[0] if self.filename is not None: self.fp.close() self.filename = filename self.fp = open(self.filename, 'rb') self.header_file = numpy.fromfile(self.fp, FILE_HEADER_STRUCTURE, 1) self.nrecords = self.header_file['nrec'][0] - self.sizeOfFile = os.path.getsize(self.filename) + self.sizeOfFile = os.path.getsize(self.filename) self.counter_records = 0 self.flagIsNewFile = 0 self.fileIndex += 1 @@ -238,7 +240,7 @@ class BLTRParamReader(JRODataReader, ProcessingUnit): pointer = self.fp.tell() header_rec = numpy.fromfile(self.fp, REC_HEADER_STRUCTURE, 1) - self.nchannels = header_rec['nchan'][0]/2 + self.nchannels = header_rec['nchan'][0] / 2 self.kchan = header_rec['nrxs'][0] self.nmodes = header_rec['nmodes'][0] self.nranges = header_rec['nranges'][0] @@ -249,7 +251,7 @@ class BLTRParamReader(JRODataReader, ProcessingUnit): self.flagDiscontinuousBlock = 0 for mode in range(self.nmodes): - self.readHeader() + self.readHeader() data = self.readData() self.height[mode] = (data[0] - self.correction) / 1000. self.buffer[mode] = data[1] @@ -263,7 +265,7 @@ class BLTRParamReader(JRODataReader, ProcessingUnit): ''' RecordHeader of BLTR rawdata file ''' - + header_structure = numpy.dtype( REC_HEADER_STRUCTURE.descr + [ ('antenna_coord', 'f4', (2, self.nchannels)), @@ -277,7 +279,7 @@ class BLTRParamReader(JRODataReader, ProcessingUnit): self.lon = self.header_rec['lon'][0] self.delta = self.header_rec['delta_r'][0] self.correction = self.header_rec['dmode_rngcorr'][0] - self.imode = self.header_rec['dmode_index'][0] + self.imode = self.header_rec['dmode_index'][0] self.antenna = self.header_rec['antenna_coord'] self.rx_gains = self.header_rec['rx_gains'] self.time = self.header_rec['time'][0] @@ -308,22 +310,23 @@ class BLTRParamReader(JRODataReader, ProcessingUnit): data = numpy.fromfile(self.fp, data_structure, self.nranges) height = data['range'] - winds = numpy.array((data['zonal'], data['meridional'], data['vertical'])) + winds = numpy.array( + (data['zonal'], data['meridional'], data['vertical'])) snr = data['rx_snr'].T - winds[numpy.where(winds == -9999.)] = numpy.nan + winds[numpy.where(winds == -9999.)] = numpy.nan winds[:, numpy.where(data['status'] != self.status_value)] = numpy.nan snr[numpy.where(snr == -9999.)] = numpy.nan snr[:, numpy.where(data['status'] != self.status_value)] = numpy.nan - snr = numpy.power(10, snr / 10) - + snr = numpy.power(10, snr / 10) + return height, winds, snr def set_output(self): ''' Storing data from databuffer to dataOut object ''' - + self.dataOut.data_SNR = self.snr self.dataOut.height = self.height self.dataOut.data = self.buffer @@ -333,7 +336,7 @@ class BLTRParamReader(JRODataReader, ProcessingUnit): self.dataOut.paramInterval = 157 self.dataOut.timezone = self.timezone self.dataOut.site = self.siteFile - self.dataOut.nrecords = self.nrecords/self.nmodes + self.dataOut.nrecords = self.nrecords / self.nmodes self.dataOut.sizeOfFile = self.sizeOfFile self.dataOut.lat = self.lat self.dataOut.lon = self.lon @@ -357,7 +360,7 @@ class BLTRParamReader(JRODataReader, ProcessingUnit): log.success('No file left to process', 'BLTRParamReader') return 0 - if not self.readNextBlock(): + if not self.readNextBlock(): self.dataOut.flagNoData = True return 0 diff --git a/schainpy/model/io/jroIO_base.py b/schainpy/model/io/jroIO_base.py index ae5d5a1..ebb4ccd 100644 --- a/schainpy/model/io/jroIO_base.py +++ b/schainpy/model/io/jroIO_base.py @@ -10,7 +10,8 @@ import time import numpy import fnmatch import inspect -import time, datetime +import time +import datetime import traceback import zmq @@ -24,6 +25,7 @@ from schainpy.model.data.jroheaderIO import get_dtype_index, get_numpy_dtype, ge LOCALTIME = True + def isNumber(cad): """ Chequea si el conjunto de caracteres que componen un string puede ser convertidos a un numero. @@ -38,11 +40,12 @@ def isNumber(cad): False : no es un string numerico """ try: - float( cad ) + float(cad) return True except: return False + def isFileInEpoch(filename, startUTSeconds, endUTSeconds): """ Esta funcion determina si un archivo de datos se encuentra o no dentro del rango de fecha especificado. @@ -67,16 +70,16 @@ def isFileInEpoch(filename, startUTSeconds, endUTSeconds): basicHeaderObj = BasicHeader(LOCALTIME) try: - fp = open(filename,'rb') + fp = open(filename, 'rb') except IOError: - print "The file %s can't be opened" %(filename) + print "The file %s can't be opened" % (filename) return 0 sts = basicHeaderObj.read(fp) fp.close() if not(sts): - print "Skipping the file %s because it has not a valid header" %(filename) + print "Skipping the file %s because it has not a valid header" % (filename) return 0 if not ((startUTSeconds <= basicHeaderObj.utc) and (endUTSeconds > basicHeaderObj.utc)): @@ -84,19 +87,18 @@ def isFileInEpoch(filename, startUTSeconds, endUTSeconds): return 1 -def isTimeInRange(thisTime, startTime, endTime): +def isTimeInRange(thisTime, startTime, endTime): if endTime >= startTime: if (thisTime < startTime) or (thisTime > endTime): return 0 - return 1 else: if (thisTime < startTime) and (thisTime > endTime): return 0 - return 1 + def isFileInTimeRange(filename, startDate, endDate, startTime, endTime): """ Retorna 1 si el archivo de datos se encuentra dentro del rango de horas especificado. @@ -122,11 +124,10 @@ def isFileInTimeRange(filename, startDate, endDate, startTime, endTime): """ - try: - fp = open(filename,'rb') + fp = open(filename, 'rb') except IOError: - print "The file %s can't be opened" %(filename) + print "The file %s can't be opened" % (filename) return None firstBasicHeaderObj = BasicHeader(LOCALTIME) @@ -139,7 +140,7 @@ def isFileInTimeRange(filename, startDate, endDate, startTime, endTime): sts = firstBasicHeaderObj.read(fp) if not(sts): - print "[Reading] Skipping the file %s because it has not a valid header" %(filename) + print "[Reading] Skipping the file %s because it has not a valid header" % (filename) return None if not systemHeaderObj.read(fp): @@ -153,10 +154,10 @@ def isFileInTimeRange(filename, startDate, endDate, startTime, endTime): filesize = os.path.getsize(filename) - offset = processingHeaderObj.blockSize + 24 #header size + offset = processingHeaderObj.blockSize + 24 # header size if filesize <= offset: - print "[Reading] %s: This file has not enough data" %filename + print "[Reading] %s: This file has not enough data" % filename return None fp.seek(-offset, 2) @@ -172,7 +173,7 @@ def isFileInTimeRange(filename, startDate, endDate, startTime, endTime): thisDate = thisDatetime.date() thisTime_first_block = thisDatetime.time() - #General case + # General case # o>>>>>>>>>>>>>><<<<<<<<<<<<<>>>>>>>>>> #-----------o----------------------------o----------- @@ -201,6 +201,7 @@ def isFileInTimeRange(filename, startDate, endDate, startTime, endTime): return thisDatetime + def isFolderInDateRange(folder, startDate=None, endDate=None): """ Retorna 1 si el archivo de datos se encuentra dentro del rango de horas especificado. @@ -227,7 +228,7 @@ def isFolderInDateRange(folder, startDate=None, endDate=None): basename = os.path.basename(folder) if not isRadarFolder(basename): - print "The folder %s has not the rigth format" %folder + print "The folder %s has not the rigth format" % folder return 0 if startDate and endDate: @@ -241,6 +242,7 @@ def isFolderInDateRange(folder, startDate=None, endDate=None): return 1 + def isFileInDateRange(filename, startDate=None, endDate=None): """ Retorna 1 si el archivo de datos se encuentra dentro del rango de horas especificado. @@ -269,7 +271,7 @@ def isFileInDateRange(filename, startDate=None, endDate=None): basename = os.path.basename(filename) if not isRadarFile(basename): - print "The filename %s has not the rigth format" %filename + print "The filename %s has not the rigth format" % filename return 0 if startDate and endDate: @@ -283,6 +285,7 @@ def isFileInDateRange(filename, startDate=None, endDate=None): return 1 + def getFileFromSet(path, ext, set): validFilelist = [] fileList = os.listdir(path) @@ -293,7 +296,7 @@ def getFileFromSet(path, ext, set): for thisFile in fileList: try: year = int(thisFile[1:5]) - doy = int(thisFile[5:8]) + doy = int(thisFile[5:8]) except: continue @@ -302,21 +305,23 @@ def getFileFromSet(path, ext, set): validFilelist.append(thisFile) - myfile = fnmatch.filter(validFilelist,'*%4.4d%3.3d%3.3d*'%(year,doy,set)) + myfile = fnmatch.filter( + validFilelist, '*%4.4d%3.3d%3.3d*' % (year, doy, set)) - if len(myfile)!= 0: + if len(myfile) != 0: return myfile[0] else: - filename = '*%4.4d%3.3d%3.3d%s'%(year,doy,set,ext.lower()) - print 'the filename %s does not exist'%filename + filename = '*%4.4d%3.3d%3.3d%s' % (year, doy, set, ext.lower()) + print 'the filename %s does not exist' % filename print '...going to the last file: ' if validFilelist: - validFilelist = sorted( validFilelist, key=str.lower ) + validFilelist = sorted(validFilelist, key=str.lower) return validFilelist[-1] return None + def getlastFileFromPath(path, ext): """ Depura el fileList dejando solo los que cumplan el formato de "PYYYYDDDSSS.ext" @@ -354,11 +359,12 @@ def getlastFileFromPath(path, ext): validFilelist.append(thisFile) if validFilelist: - validFilelist = sorted( validFilelist, key=str.lower ) + validFilelist = sorted(validFilelist, key=str.lower) return validFilelist[-1] return None + def checkForRealPath(path, foldercounter, year, doy, set, ext): """ Por ser Linux Case Sensitive entonces checkForRealPath encuentra el nombre correcto de un path, @@ -386,28 +392,32 @@ def checkForRealPath(path, foldercounter, year, doy, set, ext): find_flag = False filename = None - prefixDirList = [None,'d','D'] - if ext.lower() == ".r": #voltage - prefixFileList = ['d','D'] - elif ext.lower() == ".pdata": #spectra - prefixFileList = ['p','P'] + prefixDirList = [None, 'd', 'D'] + if ext.lower() == ".r": # voltage + prefixFileList = ['d', 'D'] + elif ext.lower() == ".pdata": # spectra + prefixFileList = ['p', 'P'] else: return None, filename - #barrido por las combinaciones posibles + # barrido por las combinaciones posibles for prefixDir in prefixDirList: thispath = path if prefixDir != None: - #formo el nombre del directorio xYYYYDDD (x=d o x=D) + # formo el nombre del directorio xYYYYDDD (x=d o x=D) if foldercounter == 0: - thispath = os.path.join(path, "%s%04d%03d" % ( prefixDir, year, doy )) + thispath = os.path.join(path, "%s%04d%03d" % + (prefixDir, year, doy)) else: - thispath = os.path.join(path, "%s%04d%03d_%02d" % ( prefixDir, year, doy , foldercounter)) - for prefixFile in prefixFileList: #barrido por las dos combinaciones posibles de "D" - filename = "%s%04d%03d%03d%s" % ( prefixFile, year, doy, set, ext ) #formo el nombre del file xYYYYDDDSSS.ext - fullfilename = os.path.join( thispath, filename ) #formo el path completo - - if os.path.exists( fullfilename ): #verifico que exista + thispath = os.path.join(path, "%s%04d%03d_%02d" % ( + prefixDir, year, doy, foldercounter)) + for prefixFile in prefixFileList: # barrido por las dos combinaciones posibles de "D" + # formo el nombre del file xYYYYDDDSSS.ext + filename = "%s%04d%03d%03d%s" % (prefixFile, year, doy, set, ext) + fullfilename = os.path.join( + thispath, filename) # formo el path completo + + if os.path.exists(fullfilename): # verifico que exista find_flag = True break if find_flag: @@ -418,6 +428,7 @@ def checkForRealPath(path, foldercounter, year, doy, set, ext): return fullfilename, filename + def isRadarFolder(folder): try: year = int(folder[1:5]) @@ -427,15 +438,17 @@ def isRadarFolder(folder): return 1 + def isRadarFile(file): - try: - year = int(file[1:5]) - doy = int(file[5:8]) - set = int(file[8:11]) - except: - return 0 + try: + year = int(file[1:5]) + doy = int(file[5:8]) + set = int(file[8:11]) + except: + return 0 + + return 1 - return 1 def getDateFromRadarFile(file): try: @@ -445,9 +458,10 @@ def getDateFromRadarFile(file): except: return None - thisDate = datetime.date(year, 1, 1) + datetime.timedelta(doy-1) + thisDate = datetime.date(year, 1, 1) + datetime.timedelta(doy - 1) return thisDate + def getDateFromRadarFolder(folder): try: year = int(folder[1:5]) @@ -455,9 +469,10 @@ def getDateFromRadarFolder(folder): except: return None - thisDate = datetime.date(year, 1, 1) + datetime.timedelta(doy-1) + thisDate = datetime.date(year, 1, 1) + datetime.timedelta(doy - 1) return thisDate + class JRODataIO: c = 3E8 @@ -540,6 +555,7 @@ class JRODataIO: def getAllowedArgs(self): return inspect.getargspec(self.run).args + class JRODataReader(JRODataIO): online = 0 @@ -548,11 +564,11 @@ class JRODataReader(JRODataIO): nReadBlocks = 0 - delay = 10 #number of seconds waiting a new file + delay = 10 # number of seconds waiting a new file - nTries = 3 #quantity tries + nTries = 3 # quantity tries - nFiles = 3 #number of files for searching + nFiles = 3 # number of files for searching path = None @@ -572,14 +588,13 @@ class JRODataReader(JRODataIO): txIndex = None - #Added-------------------- + # Added-------------------- selBlocksize = None selBlocktime = None def __init__(self): - """ This class is used to find data files @@ -590,7 +605,6 @@ class JRODataReader(JRODataIO): """ pass - def createObjByDefault(self): """ @@ -605,8 +619,8 @@ class JRODataReader(JRODataIO): path, startDate=None, endDate=None, - startTime=datetime.time(0,0,0), - endTime=datetime.time(23,59,59), + startTime=datetime.time(0, 0, 0), + endTime=datetime.time(23, 59, 59), set=None, expLabel='', ext='.r', @@ -619,22 +633,23 @@ class JRODataReader(JRODataIO): pathList = [] - dateList, pathList = self.findDatafiles(path, startDate, endDate, expLabel, ext, walk, include_path=True) + dateList, pathList = self.findDatafiles( + path, startDate, endDate, expLabel, ext, walk, include_path=True) if dateList == []: return [], [] if len(dateList) > 1: - print "[Reading] Data found for date range [%s - %s]: total days = %d" %(startDate, endDate, len(dateList)) + print "[Reading] Data found for date range [%s - %s]: total days = %d" % (startDate, endDate, len(dateList)) else: - print "[Reading] Data found for date range [%s - %s]: date = %s" %(startDate, endDate, dateList[0]) + print "[Reading] Data found for date range [%s - %s]: date = %s" % (startDate, endDate, dateList[0]) filenameList = [] datetimeList = [] for thisPath in pathList: - fileList = glob.glob1(thisPath, "*%s" %ext) + fileList = glob.glob1(thisPath, "*%s" % ext) fileList.sort() skippedFileList = [] @@ -644,19 +659,21 @@ class JRODataReader(JRODataIO): if skip == 0: skippedFileList = [] else: - skippedFileList = fileList[cursor*skip: cursor*skip + skip] + skippedFileList = fileList[cursor * + skip: cursor * skip + skip] else: skippedFileList = fileList for file in skippedFileList: - filename = os.path.join(thisPath,file) + filename = os.path.join(thisPath, file) if not isFileInDateRange(filename, startDate, endDate): continue - thisDatetime = isFileInTimeRange(filename, startDate, endDate, startTime, endTime) + thisDatetime = isFileInTimeRange( + filename, startDate, endDate, startTime, endTime) if not(thisDatetime): continue @@ -665,10 +682,10 @@ class JRODataReader(JRODataIO): datetimeList.append(thisDatetime) if not(filenameList): - print "[Reading] Time range selected invalid [%s - %s]: No *%s files in %s)" %(startTime, endTime, ext, path) + print "[Reading] Time range selected invalid [%s - %s]: No *%s files in %s)" % (startTime, endTime, ext, path) return [], [] - print "[Reading] %d file(s) was(were) found in time range: %s - %s" %(len(filenameList), startTime, endTime) + print "[Reading] %d file(s) was(were) found in time range: %s - %s" % (len(filenameList), startTime, endTime) print # for i in range(len(filenameList)): @@ -679,8 +696,7 @@ class JRODataReader(JRODataIO): return pathList, filenameList - def __searchFilesOnLine(self, path, expLabel = "", ext = None, walk=True, set=None): - + def __searchFilesOnLine(self, path, expLabel="", ext=None, walk=True, set=None): """ Busca el ultimo archivo de la ultima carpeta (determinada o no por startDateTime) y devuelve el archivo encontrado ademas de otros datos. @@ -712,9 +728,9 @@ class JRODataReader(JRODataIO): fullpath = path foldercounter = 0 else: - #Filtra solo los directorios + # Filtra solo los directorios for thisPath in os.listdir(path): - if not os.path.isdir(os.path.join(path,thisPath)): + if not os.path.isdir(os.path.join(path, thisPath)): continue if not isRadarFolder(thisPath): continue @@ -724,14 +740,14 @@ class JRODataReader(JRODataIO): if not(dirList): return None, None, None, None, None, None - dirList = sorted( dirList, key=str.lower ) + dirList = sorted(dirList, key=str.lower) doypath = dirList[-1] - foldercounter = int(doypath.split('_')[1]) if len(doypath.split('_'))>1 else 0 + foldercounter = int(doypath.split('_')[1]) if len( + doypath.split('_')) > 1 else 0 fullpath = os.path.join(path, doypath, expLabel) - - print "[Reading] %s folder was found: " %(fullpath ) + print "[Reading] %s folder was found: " % (fullpath) if set == None: filename = getlastFileFromPath(fullpath, ext) @@ -741,14 +757,14 @@ class JRODataReader(JRODataIO): if not(filename): return None, None, None, None, None, None - print "[Reading] %s file was found" %(filename) + print "[Reading] %s file was found" % (filename) if not(self.__verifyFile(os.path.join(fullpath, filename))): return None, None, None, None, None, None - year = int( filename[1:5] ) - doy = int( filename[5:8] ) - set = int( filename[8:11] ) + year = int(filename[1:5]) + doy = int(filename[5:8]) + set = int(filename[8:11]) return fullpath, foldercounter, filename, year, doy, set @@ -769,7 +785,7 @@ class JRODataReader(JRODataIO): continue fileSize = os.path.getsize(filename) - fp = open(filename,'rb') + fp = open(filename, 'rb') break self.flagIsNewFile = 1 @@ -813,29 +829,32 @@ class JRODataReader(JRODataIO): self.set = 0 self.foldercounter += 1 - #busca el 1er file disponible - fullfilename, filename = checkForRealPath( self.path, self.foldercounter, self.year, self.doy, self.set, self.ext ) + # busca el 1er file disponible + fullfilename, filename = checkForRealPath( + self.path, self.foldercounter, self.year, self.doy, self.set, self.ext) if fullfilename: if self.__verifyFile(fullfilename, False): fileOk_flag = True - #si no encuentra un file entonces espera y vuelve a buscar + # si no encuentra un file entonces espera y vuelve a buscar if not(fileOk_flag): - for nFiles in range(self.nFiles+1): #busco en los siguientes self.nFiles+1 files posibles + # busco en los siguientes self.nFiles+1 files posibles + for nFiles in range(self.nFiles + 1): - if firstTime_flag: #si es la 1era vez entonces hace el for self.nTries veces + if firstTime_flag: # si es la 1era vez entonces hace el for self.nTries veces tries = self.nTries else: - tries = 1 #si no es la 1era vez entonces solo lo hace una vez + tries = 1 # si no es la 1era vez entonces solo lo hace una vez - for nTries in range( tries ): + for nTries in range(tries): if firstTime_flag: - print "\t[Reading] Waiting %0.2f sec for the next file: \"%s\" , try %03d ..." % ( self.delay, filename, nTries+1 ) - sleep( self.delay ) + print "\t[Reading] Waiting %0.2f sec for the next file: \"%s\" , try %03d ..." % (self.delay, filename, nTries + 1) + sleep(self.delay) else: print "\t[Reading] Searching the next \"%s%04d%03d%03d%s\" file ..." % (self.optchar, self.year, self.doy, self.set, self.ext) - fullfilename, filename = checkForRealPath( self.path, self.foldercounter, self.year, self.doy, self.set, self.ext ) + fullfilename, filename = checkForRealPath( + self.path, self.foldercounter, self.year, self.doy, self.set, self.ext) if fullfilename: if self.__verifyFile(fullfilename): fileOk_flag = True @@ -849,16 +868,18 @@ class JRODataReader(JRODataIO): print "\t[Reading] Skipping the file \"%s\" due to this file doesn't exist" % filename self.set += 1 - if nFiles == (self.nFiles-1): #si no encuentro el file buscado cambio de carpeta y busco en la siguiente carpeta + # si no encuentro el file buscado cambio de carpeta y busco en la siguiente carpeta + if nFiles == (self.nFiles - 1): self.set = 0 self.doy += 1 self.foldercounter = 0 if fileOk_flag: - self.fileSize = os.path.getsize( fullfilename ) + self.fileSize = os.path.getsize(fullfilename) self.filename = fullfilename self.flagIsNewFile = 1 - if self.fp != None: self.fp.close() + if self.fp != None: + self.fp.close() self.fp = open(fullfilename, 'rb') self.flagNoMoreFiles = 0 # print '[Reading] Setting the file: %s' % fullfilename @@ -908,48 +929,47 @@ class JRODataReader(JRODataIO): neededSize = self.processingHeaderObj.blockSize + self.basicHeaderSize - for nTries in range( self.nTries ): + for nTries in range(self.nTries): self.fp.close() - self.fp = open( self.filename, 'rb' ) - self.fp.seek( currentPointer ) + self.fp = open(self.filename, 'rb') + self.fp.seek(currentPointer) - self.fileSize = os.path.getsize( self.filename ) + self.fileSize = os.path.getsize(self.filename) currentSize = self.fileSize - currentPointer - if ( currentSize >= neededSize ): + if (currentSize >= neededSize): self.basicHeaderObj.read(self.fp) return 1 if self.fileSize == self.fileSizeByHeader: -# self.flagEoF = True + # self.flagEoF = True return 0 - print "[Reading] Waiting %0.2f seconds for the next block, try %03d ..." % (self.delay, nTries+1) - sleep( self.delay ) - + print "[Reading] Waiting %0.2f seconds for the next block, try %03d ..." % (self.delay, nTries + 1) + sleep(self.delay) return 0 - def waitDataBlock(self,pointer_location): + def waitDataBlock(self, pointer_location): currentPointer = pointer_location - neededSize = self.processingHeaderObj.blockSize #+ self.basicHeaderSize + neededSize = self.processingHeaderObj.blockSize # + self.basicHeaderSize - for nTries in range( self.nTries ): + for nTries in range(self.nTries): self.fp.close() - self.fp = open( self.filename, 'rb' ) - self.fp.seek( currentPointer ) + self.fp = open(self.filename, 'rb') + self.fp.seek(currentPointer) - self.fileSize = os.path.getsize( self.filename ) + self.fileSize = os.path.getsize(self.filename) currentSize = self.fileSize - currentPointer - if ( currentSize >= neededSize ): + if (currentSize >= neededSize): return 1 - print "[Reading] Waiting %0.2f seconds for the next block, try %03d ..." % (self.delay, nTries+1) - sleep( self.delay ) + print "[Reading] Waiting %0.2f seconds for the next block, try %03d ..." % (self.delay, nTries + 1) + sleep(self.delay) return 0 @@ -961,7 +981,7 @@ class JRODataReader(JRODataIO): csize = self.fileSize - self.fp.tell() blocksize = self.processingHeaderObj.blockSize - #salta el primer bloque de datos + # salta el primer bloque de datos if csize > self.processingHeaderObj.blockSize: self.fp.seek(self.fp.tell() + blocksize) else: @@ -971,7 +991,7 @@ class JRODataReader(JRODataIO): neededsize = self.processingHeaderObj.blockSize + self.basicHeaderSize while True: - if self.fp.tell()= neededSize): @@ -1018,11 +1038,11 @@ class JRODataReader(JRODataIO): if self.__waitNewBlock(): self.lastUTTime = self.basicHeaderObj.utc return 1 - #if self.server is None: + # if self.server is None: if not(self.setNextFile()): return 0 - deltaTime = self.basicHeaderObj.utc - self.lastUTTime # + deltaTime = self.basicHeaderObj.utc - self.lastUTTime self.lastUTTime = self.basicHeaderObj.utc self.flagDiscontinuousBlock = 0 @@ -1034,29 +1054,27 @@ class JRODataReader(JRODataIO): def readNextBlock(self): - #Skip block out of startTime and endTime - while True: - if not(self.__setNewBlock()): + # Skip block out of startTime and endTime + while True: + if not(self.__setNewBlock()): return 0 - + if not(self.readBlock()): return 0 self.getBasicHeader() - - if not isTimeInRange(self.dataOut.datatime.time(), self.startTime, self.endTime): - - print "[Reading] Block No. %d/%d -> %s [Skipping]" %(self.nReadBlocks, - self.processingHeaderObj.dataBlocksPerFile, - self.dataOut.datatime.ctime()) + if (self.dataOut.datatime < datetime.datetime.combine(self.startDate, self.startTime)) or (self.dataOut.datatime > datetime.datetime.combine(self.endDate, self.endTime)): + print "[Reading] Block No. %d/%d -> %s [Skipping]" % (self.nReadBlocks, + self.processingHeaderObj.dataBlocksPerFile, + self.dataOut.datatime.ctime()) continue break if self.verbose: - print "[Reading] Block No. %d/%d -> %s" %(self.nReadBlocks, - self.processingHeaderObj.dataBlocksPerFile, - self.dataOut.datatime.ctime()) + print "[Reading] Block No. %d/%d -> %s" % (self.nReadBlocks, + self.processingHeaderObj.dataBlocksPerFile, + self.dataOut.datatime.ctime()) return 1 def __readFirstHeader(self): @@ -1068,27 +1086,30 @@ class JRODataReader(JRODataIO): self.firstHeaderSize = self.basicHeaderObj.size - datatype = int(numpy.log2((self.processingHeaderObj.processFlags & PROCFLAG.DATATYPE_MASK))-numpy.log2(PROCFLAG.DATATYPE_CHAR)) + datatype = int(numpy.log2((self.processingHeaderObj.processFlags & + PROCFLAG.DATATYPE_MASK)) - numpy.log2(PROCFLAG.DATATYPE_CHAR)) if datatype == 0: - datatype_str = numpy.dtype([('real',' 0: - filesList = sorted( filesList, key=str.lower ) + filesList = os.listdir(fullpath) + if len(filesList) > 0: + filesList = sorted(filesList, key=str.lower) filen = filesList[-1] # el filename debera tener el siguiente formato # 0 1234 567 89A BCDE (hex) # x YYYY DDD SSS .ext - if isNumber( filen[8:11] ): - setFile = int( filen[8:11] ) #inicializo mi contador de seteo al seteo del ultimo file + if isNumber(filen[8:11]): + # inicializo mi contador de seteo al seteo del ultimo file + setFile = int(filen[8:11]) else: setFile = -1 else: - setFile = -1 #inicializo mi contador de seteo + setFile = -1 # inicializo mi contador de seteo setFile += 1 - #If this is a new day it resets some values + # If this is a new day it resets some values if self.dataOut.datatime.date() > self.fileDate: setFile = 0 self.nTotalBlocks = 0 - filen = '%s%4.4d%3.3d%3.3d%s' % (self.optchar, timeTuple.tm_year, timeTuple.tm_yday, setFile, ext ) + filen = '%s%4.4d%3.3d%3.3d%s' % ( + self.optchar, timeTuple.tm_year, timeTuple.tm_yday, setFile, ext) - filename = os.path.join( path, subfolder, filen ) + filename = os.path.join(path, subfolder, filen) - fp = open( filename,'wb' ) + fp = open(filename, 'wb') self.blockIndex = 0 - #guardando atributos + # guardando atributos self.filename = filename self.subfolder = subfolder self.fp = fp @@ -1738,7 +1760,7 @@ class JRODataWriter(JRODataIO): self.setFirstHeader() - print '[Writing] Opening file: %s'%self.filename + print '[Writing] Opening file: %s' % self.filename self.__writeFirstHeader() @@ -1783,7 +1805,7 @@ class JRODataWriter(JRODataIO): self.dataOut = dataOut self.fileDate = self.dataOut.datatime.date() - #By default + # By default self.dtype = self.dataOut.dtype if datatype is not None: @@ -1801,7 +1823,8 @@ class JRODataWriter(JRODataIO): if not(self.isConfig): - self.setup(dataOut, path, blocksPerFile, profilesPerBlock=profilesPerBlock, set=set, ext=ext, datatype=datatype, **kwargs) + self.setup(dataOut, path, blocksPerFile, profilesPerBlock=profilesPerBlock, + set=set, ext=ext, datatype=datatype, **kwargs) self.isConfig = True self.putData() diff --git a/schainpy/model/io/jroIO_bltr.py b/schainpy/model/io/jroIO_bltr.py index fe46699..4049dd2 100644 --- a/schainpy/model/io/jroIO_bltr.py +++ b/schainpy/model/io/jroIO_bltr.py @@ -1,4 +1,5 @@ -import os, sys +import os +import sys import glob import fnmatch import datetime @@ -6,7 +7,6 @@ import time import re import h5py import numpy -import matplotlib.pyplot as plt import pylab as plb from scipy.optimize import curve_fit @@ -33,73 +33,71 @@ from jroIO_base import JRODataReader class Header(object): - + def __init__(self): raise NotImplementedError - - + def read(self): - + raise NotImplementedError - + def write(self): - + raise NotImplementedError - + def printInfo(self): - - message = "#"*50 + "\n" + + message = "#" * 50 + "\n" message += self.__class__.__name__.upper() + "\n" - message += "#"*50 + "\n" - + message += "#" * 50 + "\n" + keyList = self.__dict__.keys() keyList.sort() - + for key in keyList: - message += "%s = %s" %(key, self.__dict__[key]) + "\n" - + message += "%s = %s" % (key, self.__dict__[key]) + "\n" + if "size" not in keyList: attr = getattr(self, "size") - - if attr: - message += "%s = %s" %("size", attr) + "\n" - - #print message + if attr: + message += "%s = %s" % ("size", attr) + "\n" + # print message +FILE_STRUCTURE = numpy.dtype([ # HEADER 48bytes + ('FileMgcNumber', ' vertical) + ('BeamAngleZen', ' vertical) - ('AntennaCoord0',' endFp: - sys.stderr.write("Warning %s: Size value read from System Header is lower than it has to be\n" %fp) + sys.stderr.write( + "Warning %s: Size value read from System Header is lower than it has to be\n" % fp) return 0 - + if OffRHeader < endFp: - sys.stderr.write("Warning %s: Size value read from System Header size is greater than it has to be\n" %fp) + sys.stderr.write( + "Warning %s: Size value read from System Header size is greater than it has to be\n" % fp) return 0 - + return 1 - + class BLTRSpectraReader (ProcessingUnit, FileHeaderBLTR, RecordHeaderBLTR, JRODataReader): - + path = None startDate = None endDate = None @@ -435,28 +465,25 @@ class BLTRSpectraReader (ProcessingUnit, FileHeaderBLTR, RecordHeaderBLTR, JRODa endTime = None walk = None isConfig = False - - - fileList= None - - #metadata - TimeZone= None - Interval= None - heightList= None - - #data - data= None - utctime= None - - - + + fileList = None + + # metadata + TimeZone = None + Interval = None + heightList = None + + # data + data = None + utctime = None + def __init__(self, **kwargs): - - #Eliminar de la base la herencia + + # Eliminar de la base la herencia ProcessingUnit.__init__(self, **kwargs) - + #self.isConfig = False - + #self.pts2read_SelfSpectra = 0 #self.pts2read_CrossSpectra = 0 #self.pts2read_DCchannels = 0 @@ -464,60 +491,59 @@ class BLTRSpectraReader (ProcessingUnit, FileHeaderBLTR, RecordHeaderBLTR, JRODa self.utc = None self.ext = ".fdt" self.optchar = "P" - self.fpFile=None + self.fpFile = None self.fp = None - self.BlockCounter=0 + self.BlockCounter = 0 self.dtype = None self.fileSizeByHeader = None self.filenameList = [] self.fileSelector = 0 - self.Off2StartNxtRec=0 - self.RecCounter=0 + self.Off2StartNxtRec = 0 + self.RecCounter = 0 self.flagNoMoreFiles = 0 - self.data_spc=None - self.data_cspc=None - self.data_output=None + self.data_spc = None + self.data_cspc = None + self.data_output = None self.path = None - self.OffsetStartHeader=0 - self.Off2StartData=0 + self.OffsetStartHeader = 0 + self.Off2StartData = 0 self.ipp = 0 - self.nFDTdataRecors=0 + self.nFDTdataRecors = 0 self.blocksize = 0 self.dataOut = Spectra() - self.profileIndex = 1 #Always - self.dataOut.flagNoData=False + self.profileIndex = 1 # Always + self.dataOut.flagNoData = False self.dataOut.nRdPairs = 0 self.dataOut.pairsList = [] - self.dataOut.data_spc=None - self.dataOut.noise=[] - self.dataOut.velocityX=[] - self.dataOut.velocityY=[] - self.dataOut.velocityV=[] - - + self.dataOut.data_spc = None + self.dataOut.noise = [] + self.dataOut.velocityX = [] + self.dataOut.velocityY = [] + self.dataOut.velocityV = [] def Files2Read(self, fp): ''' Function that indicates the number of .fdt files that exist in the folder to be read. It also creates an organized list with the names of the files to read. ''' - #self.__checkPath() - - ListaData=os.listdir(fp) #Gets the list of files within the fp address - ListaData=sorted(ListaData) #Sort the list of files from least to largest by names - nFiles=0 #File Counter - FileList=[] #A list is created that will contain the .fdt files - for IndexFile in ListaData : - if '.fdt' in IndexFile: + # self.__checkPath() + + # Gets the list of files within the fp address + ListaData = os.listdir(fp) + # Sort the list of files from least to largest by names + ListaData = sorted(ListaData) + nFiles = 0 # File Counter + FileList = [] # A list is created that will contain the .fdt files + for IndexFile in ListaData: + if '.fdt' in IndexFile: FileList.append(IndexFile) - nFiles+=1 - - #print 'Files2Read' - #print 'Existen '+str(nFiles)+' archivos .fdt' - - self.filenameList=FileList #List of files from least to largest by names - - + nFiles += 1 + + # print 'Files2Read' + # print 'Existen '+str(nFiles)+' archivos .fdt' + + self.filenameList = FileList # List of files from least to largest by names + def run(self, **kwargs): ''' This method will be the one that will initiate the data entry, will be called constantly. @@ -527,341 +553,350 @@ class BLTRSpectraReader (ProcessingUnit, FileHeaderBLTR, RecordHeaderBLTR, JRODa if not self.isConfig: self.setup(**kwargs) self.isConfig = True - + self.getData() - #print 'running' - - + # print 'running' + def setup(self, path=None, - startDate=None, - endDate=None, - startTime=None, - endTime=None, - walk=True, - timezone='utc', - code = None, - online=False, - ReadMode=None, - **kwargs): - + startDate=None, + endDate=None, + startTime=None, + endTime=None, + walk=True, + timezone='utc', + code=None, + online=False, + ReadMode=None, + **kwargs): + self.isConfig = True - - self.path=path - self.startDate=startDate - self.endDate=endDate - self.startTime=startTime - self.endTime=endTime - self.walk=walk - self.ReadMode=int(ReadMode) - + + self.path = path + self.startDate = startDate + self.endDate = endDate + self.startTime = startTime + self.endTime = endTime + self.walk = walk + self.ReadMode = int(ReadMode) + pass - - + def getData(self): ''' Before starting this function, you should check that there is still an unread file, If there are still blocks to read or if the data block is empty. - + You should call the file "read". - + ''' - + if self.flagNoMoreFiles: self.dataOut.flagNoData = True print 'NoData se vuelve true' return 0 - self.fp=self.path + self.fp = self.path self.Files2Read(self.fp) self.readFile(self.fp) self.dataOut.data_spc = self.data_spc - self.dataOut.data_cspc =self.data_cspc - self.dataOut.data_output=self.data_output - + self.dataOut.data_cspc = self.data_cspc + self.dataOut.data_output = self.data_output + print 'self.dataOut.data_output', shape(self.dataOut.data_output) - - #self.removeDC() - return self.dataOut.data_spc - - - def readFile(self,fp): + + # self.removeDC() + return self.dataOut.data_spc + + def readFile(self, fp): ''' You must indicate if you are reading in Online or Offline mode and load the The parameters for this file reading mode. - + Then you must do 2 actions: - + 1. Get the BLTR FileHeader. 2. Start reading the first block. ''' - - #The address of the folder is generated the name of the .fdt file that will be read - print "File: ",self.fileSelector+1 - + + # The address of the folder is generated the name of the .fdt file that will be read + print "File: ", self.fileSelector + 1 + if self.fileSelector < len(self.filenameList): - - self.fpFile=str(fp)+'/'+str(self.filenameList[self.fileSelector]) - #print self.fpFile + + self.fpFile = str(fp) + '/' + \ + str(self.filenameList[self.fileSelector]) + # print self.fpFile fheader = FileHeaderBLTR() - fheader.FHread(self.fpFile) #Bltr FileHeader Reading - self.nFDTdataRecors=fheader.nFDTdataRecors - - self.readBlock() #Block reading + fheader.FHread(self.fpFile) # Bltr FileHeader Reading + self.nFDTdataRecors = fheader.nFDTdataRecors + + self.readBlock() # Block reading else: print 'readFile FlagNoData becomes true' - self.flagNoMoreFiles=True + self.flagNoMoreFiles = True self.dataOut.flagNoData = True - return 0 - + return 0 + def getVelRange(self, extrapoints=0): - Lambda= SPEED_OF_LIGHT/50000000 - PRF = self.dataOut.PRF#1./(self.dataOut.ippSeconds * self.dataOut.nCohInt) - Vmax=-Lambda/(4.*(1./PRF)*self.dataOut.nCohInt*2.) - deltafreq = PRF / (self.nProfiles) - deltavel = (Vmax*2) / (self.nProfiles) - freqrange = deltafreq*(numpy.arange(self.nProfiles)-self.nProfiles/2.) - deltafreq/2 - velrange = deltavel*(numpy.arange(self.nProfiles)-self.nProfiles/2.) - return velrange - - def readBlock(self): + Lambda = SPEED_OF_LIGHT / 50000000 + # 1./(self.dataOut.ippSeconds * self.dataOut.nCohInt) + PRF = self.dataOut.PRF + Vmax = -Lambda / (4. * (1. / PRF) * self.dataOut.nCohInt * 2.) + deltafreq = PRF / (self.nProfiles) + deltavel = (Vmax * 2) / (self.nProfiles) + freqrange = deltafreq * \ + (numpy.arange(self.nProfiles) - self.nProfiles / 2.) - deltafreq / 2 + velrange = deltavel * \ + (numpy.arange(self.nProfiles) - self.nProfiles / 2.) + return velrange + + def readBlock(self): ''' It should be checked if the block has data, if it is not passed to the next file. - + Then the following is done: - + 1. Read the RecordHeader 2. Fill the buffer with the current block number. - + ''' - - if self.BlockCounter < self.nFDTdataRecors-2: + + if self.BlockCounter < self.nFDTdataRecors - 2: print self.nFDTdataRecors, 'CONDICION!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!' - if self.ReadMode==1: - rheader = RecordHeaderBLTR(RecCounter=self.BlockCounter+1) - elif self.ReadMode==0: + if self.ReadMode == 1: + rheader = RecordHeaderBLTR(RecCounter=self.BlockCounter + 1) + elif self.ReadMode == 0: rheader = RecordHeaderBLTR(RecCounter=self.BlockCounter) - - rheader.RHread(self.fpFile) #Bltr FileHeader Reading - - self.OffsetStartHeader=rheader.OffsetStartHeader - self.RecCounter=rheader.RecCounter - self.Off2StartNxtRec=rheader.Off2StartNxtRec - self.Off2StartData=rheader.Off2StartData - self.nProfiles=rheader.nProfiles - self.nChannels=rheader.nChannels - self.nHeights=rheader.nHeights - self.frequency=rheader.TransmitFrec - self.DualModeIndex=rheader.DualModeIndex - - self.pairsList =[(0,1),(0,2),(1,2)] + + rheader.RHread(self.fpFile) # Bltr FileHeader Reading + + self.OffsetStartHeader = rheader.OffsetStartHeader + self.RecCounter = rheader.RecCounter + self.Off2StartNxtRec = rheader.Off2StartNxtRec + self.Off2StartData = rheader.Off2StartData + self.nProfiles = rheader.nProfiles + self.nChannels = rheader.nChannels + self.nHeights = rheader.nHeights + self.frequency = rheader.TransmitFrec + self.DualModeIndex = rheader.DualModeIndex + + self.pairsList = [(0, 1), (0, 2), (1, 2)] self.dataOut.pairsList = self.pairsList - - self.nRdPairs=len(self.dataOut.pairsList) + + self.nRdPairs = len(self.dataOut.pairsList) self.dataOut.nRdPairs = self.nRdPairs - - self.__firstHeigth=rheader.StartRangeSamp - self.__deltaHeigth=rheader.SampResolution - self.dataOut.heightList= self.__firstHeigth + numpy.array(range(self.nHeights))*self.__deltaHeigth + + self.__firstHeigth = rheader.StartRangeSamp + self.__deltaHeigth = rheader.SampResolution + self.dataOut.heightList = self.__firstHeigth + \ + numpy.array(range(self.nHeights)) * self.__deltaHeigth self.dataOut.channelList = range(self.nChannels) - self.dataOut.nProfiles=rheader.nProfiles - self.dataOut.nIncohInt=rheader.nIncohInt - self.dataOut.nCohInt=rheader.nCohInt - self.dataOut.ippSeconds= 1/float(rheader.PRFhz) - self.dataOut.PRF=rheader.PRFhz - self.dataOut.nFFTPoints=rheader.nProfiles - self.dataOut.utctime=rheader.nUtime - self.dataOut.timeZone=0 - self.dataOut.normFactor= self.dataOut.nProfiles*self.dataOut.nIncohInt*self.dataOut.nCohInt - self.dataOut.outputInterval= self.dataOut.ippSeconds * self.dataOut.nCohInt * self.dataOut.nIncohInt * self.nProfiles - - self.data_output=numpy.ones([3,rheader.nHeights])*numpy.NaN + self.dataOut.nProfiles = rheader.nProfiles + self.dataOut.nIncohInt = rheader.nIncohInt + self.dataOut.nCohInt = rheader.nCohInt + self.dataOut.ippSeconds = 1 / float(rheader.PRFhz) + self.dataOut.PRF = rheader.PRFhz + self.dataOut.nFFTPoints = rheader.nProfiles + self.dataOut.utctime = rheader.nUtime + self.dataOut.timeZone = 0 + self.dataOut.normFactor = self.dataOut.nProfiles * \ + self.dataOut.nIncohInt * self.dataOut.nCohInt + self.dataOut.outputInterval = self.dataOut.ippSeconds * \ + self.dataOut.nCohInt * self.dataOut.nIncohInt * self.nProfiles + + self.data_output = numpy.ones([3, rheader.nHeights]) * numpy.NaN print 'self.data_output', shape(self.data_output) - self.dataOut.velocityX=[] - self.dataOut.velocityY=[] - self.dataOut.velocityV=[] - + self.dataOut.velocityX = [] + self.dataOut.velocityY = [] + self.dataOut.velocityV = [] + '''Block Reading, the Block Data is received and Reshape is used to give it shape. - ''' - - #Procedure to take the pointer to where the date block starts - startDATA = open(self.fpFile,"rb") - OffDATA= self.OffsetStartHeader + self.RecCounter*self.Off2StartNxtRec+self.Off2StartData + ''' + + # Procedure to take the pointer to where the date block starts + startDATA = open(self.fpFile, "rb") + OffDATA = self.OffsetStartHeader + self.RecCounter * \ + self.Off2StartNxtRec + self.Off2StartData startDATA.seek(OffDATA, os.SEEK_SET) - + def moving_average(x, N=2): - return numpy.convolve(x, numpy.ones((N,))/N)[(N-1):] - - def gaus(xSamples,a,x0,sigma): - return a*exp(-(xSamples-x0)**2/(2*sigma**2)) - - def Find(x,value): + return numpy.convolve(x, numpy.ones((N,)) / N)[(N - 1):] + + def gaus(xSamples, a, x0, sigma): + return a * exp(-(xSamples - x0)**2 / (2 * sigma**2)) + + def Find(x, value): for index in range(len(x)): - if x[index]==value: - return index - + if x[index] == value: + return index + def pol2cart(rho, phi): x = rho * numpy.cos(phi) y = rho * numpy.sin(phi) return(x, y) - - - - - if self.DualModeIndex==self.ReadMode: - - self.data_fft = numpy.fromfile( startDATA, [('complex',' 0.0001) : -# -# try: +# +# try: # popt,pcov = curve_fit(gaus,xSamples,yMean,p0=[1,meanGauss,sigma]) -# +# # if numpy.amax(popt)>numpy.amax(yMean)*0.3: # FitGauss=gaus(xSamples,*popt) -# -# else: +# +# else: # FitGauss=numpy.ones(len(xSamples))*numpy.mean(yMean) # print 'Verificador: Dentro', Height # except RuntimeError: -# +# # try: # for j in range(len(ySamples[1])): # yMean2=numpy.append(yMean2,numpy.average([ySamples[1,j],ySamples[2,j]])) @@ -869,7 +904,7 @@ class BLTRSpectraReader (ProcessingUnit, FileHeaderBLTR, RecordHeaderBLTR, JRODa # FitGauss=gaus(xSamples,*popt) # print 'Verificador: Exepcion1', Height # except RuntimeError: -# +# # try: # popt,pcov = curve_fit(gaus,xSamples,ySamples[1],p0=[1,meanGauss,sigma]) # FitGauss=gaus(xSamples,*popt) @@ -880,12 +915,12 @@ class BLTRSpectraReader (ProcessingUnit, FileHeaderBLTR, RecordHeaderBLTR, JRODa # else: # FitGauss=numpy.ones(len(xSamples))*numpy.mean(yMean) # #print 'Verificador: Fuera', Height -# -# -# +# +# +# # Maximun=numpy.amax(yMean) # eMinus1=Maximun*numpy.exp(-1) -# +# # HWpos=Find(FitGauss,min(FitGauss, key=lambda value:abs(value-eMinus1))) # HalfWidth= xFrec[HWpos] # GCpos=Find(FitGauss, numpy.amax(FitGauss)) @@ -894,39 +929,39 @@ class BLTRSpectraReader (ProcessingUnit, FileHeaderBLTR, RecordHeaderBLTR, JRODa # #Vpos=Find(FactNorm, min(FactNorm, key=lambda value:abs(value- numpy.mean(FactNorm) ))) # #print 'GCpos',GCpos, numpy.amax(FitGauss), 'HWpos',HWpos # '''****** Getting Fij ******''' -# +# # GaussCenter=xFrec[GCpos] # if (GaussCenter<0 and HalfWidth>0) or (GaussCenter>0 and HalfWidth<0): # Fij=abs(GaussCenter)+abs(HalfWidth)+0.0000001 # else: # Fij=abs(GaussCenter-HalfWidth)+0.0000001 -# +# # '''****** Getting Frecuency range of significant data ******''' -# +# # Rangpos=Find(FitGauss,min(FitGauss, key=lambda value:abs(value-Maximun*0.10))) -# +# # if Rangpos5 and len(FrecRange) 0.: # self.dataOut.velocityX=numpy.append(self.dataOut.velocityX, Vzon) #Vmag # #print 'Vmag',Vmag # else: # self.dataOut.velocityX=numpy.append(self.dataOut.velocityX, NaN) -# +# # if abs(Vx)<100 and abs(Vx) > 0.: # self.dataOut.velocityY=numpy.append(self.dataOut.velocityY, Vmer) #Vang -# #print 'Vang',Vang +# #print 'Vang',Vang # else: # self.dataOut.velocityY=numpy.append(self.dataOut.velocityY, NaN) -# +# # if abs(GaussCenter)<2: # self.dataOut.velocityV=numpy.append(self.dataOut.velocityV, xFrec[Vpos]) -# +# # else: # self.dataOut.velocityV=numpy.append(self.dataOut.velocityV, NaN) -# -# +# +# # # print '********************************************' # # print 'HalfWidth ', HalfWidth # # print 'Maximun ', Maximun @@ -1033,25 +1068,25 @@ class BLTRSpectraReader (ProcessingUnit, FileHeaderBLTR, RecordHeaderBLTR, JRODa # # print 'PhaseSlope ',PhaseSlope[2] # # print '********************************************' # #print 'data_output',shape(self.dataOut.velocityX), shape(self.dataOut.velocityY) -# +# # #print 'self.dataOut.velocityX', len(self.dataOut.velocityX) # #print 'self.dataOut.velocityY', len(self.dataOut.velocityY) # #print 'self.dataOut.velocityV', self.dataOut.velocityV -# +# # self.data_output[0]=numpy.array(self.dataOut.velocityX) # self.data_output[1]=numpy.array(self.dataOut.velocityY) # self.data_output[2]=numpy.array(self.dataOut.velocityV) -# +# # prin= self.data_output[0][~numpy.isnan(self.data_output[0])] # print ' ' -# print 'VmagAverage',numpy.mean(prin) +# print 'VmagAverage',numpy.mean(prin) # print ' ' # # plt.figure(5) # # plt.subplot(211) # # plt.plot(self.dataOut.velocityX,'yo:') # # plt.subplot(212) # # plt.plot(self.dataOut.velocityY,'yo:') -# +# # # plt.figure(1) # # # plt.subplot(121) # # # plt.plot(xFrec,ySamples[0],'k',label='Ch0') @@ -1060,7 +1095,7 @@ class BLTRSpectraReader (ProcessingUnit, FileHeaderBLTR, RecordHeaderBLTR, JRODa # # # plt.plot(xFrec,FitGauss,'yo:',label='fit') # # # plt.legend() # # plt.title('DATOS A ALTURA DE 2850 METROS') -# # +# # # # plt.xlabel('Frecuencia (KHz)') # # plt.ylabel('Magnitud') # # # plt.subplot(122) @@ -1070,7 +1105,7 @@ class BLTRSpectraReader (ProcessingUnit, FileHeaderBLTR, RecordHeaderBLTR, JRODa # # plt.plot(xFrec,FactNorm) # # plt.axis([-4, 4, 0, 0.15]) # # # plt.xlabel('SelfSpectra KHz') -# # +# # # # plt.figure(10) # # # plt.subplot(121) # # plt.plot(xFrec,ySamples[0],'b',label='Ch0') @@ -1079,7 +1114,7 @@ class BLTRSpectraReader (ProcessingUnit, FileHeaderBLTR, RecordHeaderBLTR, JRODa # # # plt.plot(xFrec,FitGauss,'yo:',label='fit') # # plt.legend() # # plt.title('SELFSPECTRA EN CANALES') -# # +# # # # plt.xlabel('Frecuencia (KHz)') # # plt.ylabel('Magnitud') # # # plt.subplot(122) @@ -1089,19 +1124,19 @@ class BLTRSpectraReader (ProcessingUnit, FileHeaderBLTR, RecordHeaderBLTR, JRODa # # # plt.plot(xFrec,FactNorm) # # # plt.axis([-4, 4, 0, 0.15]) # # # plt.xlabel('SelfSpectra KHz') -# # +# # # # plt.figure(9) -# # -# # +# # +# # # # plt.title('DATOS SUAVIZADOS') # # plt.xlabel('Frecuencia (KHz)') # # plt.ylabel('Magnitud') # # plt.plot(xFrec,SmoothSPC,'g') -# # +# # # # #plt.plot(xFrec,FactNorm) # # plt.axis([-4, 4, 0, 0.15]) # # # plt.xlabel('SelfSpectra KHz') -# # # +# # # # # plt.figure(2) # # # #plt.subplot(121) # # plt.plot(xFrec,yMean,'r',label='Mean SelfSpectra') @@ -1115,7 +1150,7 @@ class BLTRSpectraReader (ProcessingUnit, FileHeaderBLTR, RecordHeaderBLTR, JRODa # # plt.xlabel('Frecuencia (KHz)') # # plt.ylabel('Magnitud') # # plt.legend() -# # # +# # # # # # plt.figure(3) # # # plt.subplot(311) # # # #plt.plot(xFrec,phase[0]) @@ -1125,30 +1160,23 @@ class BLTRSpectraReader (ProcessingUnit, FileHeaderBLTR, RecordHeaderBLTR, JRODa # # # plt.subplot(313) # # # plt.plot(xFrec,phase[2],'g') # # # #plt.plot(xFrec,phase[2]) -# # # +# # # # # # plt.figure(4) -# # # +# # # # # # plt.plot(xSamples,coherence[0],'b') # # # plt.plot(xSamples,coherence[1],'r') # # # plt.plot(xSamples,coherence[2],'g') # # plt.show() -# # # +# # # # # # plt.clf() # # # plt.cla() -# # # plt.close() -# -# print ' ' - - - - self.BlockCounter+=2 - +# # # plt.close() +# +# print ' ' + + self.BlockCounter += 2 + else: - self.fileSelector+=1 - self.BlockCounter=0 + self.fileSelector += 1 + self.BlockCounter = 0 print "Next File" - - - - - diff --git a/schainpy/model/io/jroIO_digitalRF.py b/schainpy/model/io/jroIO_digitalRF.py new file mode 100644 index 0000000..45233b8 --- /dev/null +++ b/schainpy/model/io/jroIO_digitalRF.py @@ -0,0 +1,751 @@ + +''' +Created on Jul 3, 2014 + +@author: roj-idl71 +''' +# SUBCHANNELS EN VEZ DE CHANNELS +# BENCHMARKS -> PROBLEMAS CON ARCHIVOS GRANDES -> INCONSTANTE EN EL TIEMPO +# ACTUALIZACION DE VERSION +# HEADERS +# MODULO DE ESCRITURA +# METADATA + +import os +import datetime +import numpy +import timeit +from profilehooks import coverage, profile +from fractions import Fraction + +try: + from gevent import sleep +except: + from time import sleep + +from schainpy.model.data.jroheaderIO import RadarControllerHeader, SystemHeader +from schainpy.model.data.jrodata import Voltage +from schainpy.model.proc.jroproc_base import ProcessingUnit, Operation +from time import time + +import cPickle +try: + import digital_rf +except: + print 'You should install "digital_rf" module if you want to read Digital RF data' + +class DigitalRFReader(ProcessingUnit): + ''' + classdocs + ''' + + def __init__(self, **kwargs): + ''' + Constructor + ''' + + ProcessingUnit.__init__(self, **kwargs) + + self.dataOut = Voltage() + self.__printInfo = True + self.__flagDiscontinuousBlock = False + self.__bufferIndex = 9999999 + self.__ippKm = None + self.__codeType = 0 + self.__nCode = None + self.__nBaud = None + self.__code = None + self.dtype = None + + def close(self): + print 'Average of writing to digital rf format is ', self.oldAverage * 1000 + return + + def __getCurrentSecond(self): + + return self.__thisUnixSample/self.__sample_rate + + thisSecond = property(__getCurrentSecond, "I'm the 'thisSecond' property.") + + def __setFileHeader(self): + ''' + In this method will be initialized every parameter of dataOut object (header, no data) + ''' + ippSeconds = 1.0*self.__nSamples/self.__sample_rate + + nProfiles = 1.0/ippSeconds # Number of profiles in one second + + try: + self.dataOut.radarControllerHeaderObj = RadarControllerHeader(self.__radarControllerHeader) + except: + self.dataOut.radarControllerHeaderObj = RadarControllerHeader( + txA=0, + txB=0, + nWindows=1, + nHeights=self.__nSamples, + firstHeight=self.__firstHeigth, + deltaHeight=self.__deltaHeigth, + codeType=self.__codeType, + nCode=self.__nCode, nBaud=self.__nBaud, + code = self.__code) + + try: + self.dataOut.systemHeaderObj = SystemHeader(self.__systemHeader) + except: + self.dataOut.systemHeaderObj = SystemHeader(nSamples=self.__nSamples, + nProfiles=nProfiles, + nChannels=len(self.__channelList), + adcResolution=14) + self.dataOut.type = "Voltage" + + self.dataOut.data = None + + self.dataOut.dtype = self.dtype + + # self.dataOut.nChannels = 0 + + # self.dataOut.nHeights = 0 + + self.dataOut.nProfiles = int(nProfiles) + + self.dataOut.heightList = self.__firstHeigth + numpy.arange(self.__nSamples, dtype = numpy.float)*self.__deltaHeigth + + self.dataOut.channelList = range(self.__num_subchannels) + + self.dataOut.blocksize = self.dataOut.getNChannels() * self.dataOut.getNHeights() + + # self.dataOut.channelIndexList = None + + self.dataOut.flagNoData = True + + self.dataOut.flagDataAsBlock = False + # Set to TRUE if the data is discontinuous + self.dataOut.flagDiscontinuousBlock = False + + self.dataOut.utctime = None + + self.dataOut.timeZone = self.__timezone/60 # timezone like jroheader, difference in minutes between UTC and localtime + + self.dataOut.dstFlag = 0 + + self.dataOut.errorCount = 0 + + try: + self.dataOut.nCohInt = self.fixed_metadata_dict.get('nCohInt', 1) + + self.dataOut.flagDecodeData = self.fixed_metadata_dict['flagDecodeData'] # asumo que la data esta decodificada + + self.dataOut.flagDeflipData = self.fixed_metadata_dict['flagDeflipData'] # asumo que la data esta sin flip + + self.dataOut.flagShiftFFT = self.fixed_metadata_dict['flagShiftFFT'] + + self.dataOut.useLocalTime = self.fixed_metadata_dict['useLocalTime'] + except: + pass + + + self.dataOut.ippSeconds = ippSeconds + + # Time interval between profiles + # self.dataOut.timeInterval = self.dataOut.ippSeconds * self.dataOut.nCohInt + + self.dataOut.frequency = self.__frequency + + self.dataOut.realtime = self.__online + + def findDatafiles(self, path, startDate=None, endDate=None): + + if not os.path.isdir(path): + return [] + + try: + digitalReadObj = digital_rf.DigitalRFReader(path, load_all_metadata=True) + except: + digitalReadObj = digital_rf.DigitalRFReader(path) + + channelNameList = digitalReadObj.get_channels() + + if not channelNameList: + return [] + + metadata_dict = digitalReadObj.get_rf_file_metadata(channelNameList[0]) + + sample_rate = metadata_dict['sample_rate'][0] + + this_metadata_file = digitalReadObj.get_metadata(channelNameList[0]) + + try: + timezone = this_metadata_file['timezone'].value + except: + timezone = 0 + + startUTCSecond, endUTCSecond = digitalReadObj.get_bounds(channelNameList[0])/sample_rate - timezone + + startDatetime = datetime.datetime.utcfromtimestamp(startUTCSecond) + endDatatime = datetime.datetime.utcfromtimestamp(endUTCSecond) + + if not startDate: + startDate = startDatetime.date() + + if not endDate: + endDate = endDatatime.date() + + dateList = [] + + thisDatetime = startDatetime + + while(thisDatetime<=endDatatime): + + thisDate = thisDatetime.date() + + if thisDate < startDate: + continue + + if thisDate > endDate: + break + + dateList.append(thisDate) + thisDatetime += datetime.timedelta(1) + + return dateList + + def setup(self, path = None, + startDate = None, + endDate = None, + startTime = datetime.time(0,0,0), + endTime = datetime.time(23,59,59), + channelList = None, + nSamples = None, + online = False, + delay = 60, + buffer_size = 1024, + ippKm=None, + **kwargs): + ''' + In this method we should set all initial parameters. + + Inputs: + path + startDate + endDate + startTime + endTime + set + expLabel + ext + online + delay + ''' + self.i = 0 + if not os.path.isdir(path): + raise ValueError, "[Reading] Directory %s does not exist" %path + + try: + self.digitalReadObj = digital_rf.DigitalRFReader(path, load_all_metadata=True) + except: + self.digitalReadObj = digital_rf.DigitalRFReader(path) + + channelNameList = self.digitalReadObj.get_channels() + + if not channelNameList: + raise ValueError, "[Reading] Directory %s does not have any files" %path + + if not channelList: + channelList = range(len(channelNameList)) + + + ########## Reading metadata ###################### + + top_properties = self.digitalReadObj.get_properties(channelNameList[channelList[0]]) + + + self.__num_subchannels = top_properties['num_subchannels'] + self.__sample_rate = 1.0 * top_properties['sample_rate_numerator'] / top_properties['sample_rate_denominator'] + # self.__samples_per_file = top_properties['samples_per_file'][0] + self.__deltaHeigth = 1e6*0.15/self.__sample_rate ## why 0.15? + + this_metadata_file = self.digitalReadObj.get_digital_metadata(channelNameList[channelList[0]]) + metadata_bounds = this_metadata_file.get_bounds() + self.fixed_metadata_dict = this_metadata_file.read(metadata_bounds[0])[metadata_bounds[0]] ## GET FIRST HEADER + + try: + self.__processingHeader = self.fixed_metadata_dict['processingHeader'] + self.__radarControllerHeader = self.fixed_metadata_dict['radarControllerHeader'] + self.__systemHeader = self.fixed_metadata_dict['systemHeader'] + self.dtype = cPickle.loads(self.fixed_metadata_dict['dtype']) + except: + pass + + + self.__frequency = None + + self.__frequency = self.fixed_metadata_dict.get('frequency', 1) + + self.__timezone = self.fixed_metadata_dict.get('timezone', 300) + + + try: + nSamples = self.fixed_metadata_dict['nSamples'] + except: + nSamples = None + + self.__firstHeigth = 0 + + try: + codeType = self.__radarControllerHeader['codeType'] + except: + codeType = 0 + + nCode = 1 + nBaud = 1 + code = numpy.ones((nCode, nBaud), dtype=numpy.int) + + try: + if codeType: + nCode = self.__radarControllerHeader['nCode'] + nBaud = self.__radarControllerHeader['nBaud'] + code = self.__radarControllerHeader['code'] + except: + pass + + + if not ippKm: + try: + # seconds to km + ippKm = self.__radarControllerHeader['ipp'] + except: + ippKm = None + #################################################### + self.__ippKm = ippKm + startUTCSecond = None + endUTCSecond = None + + if startDate: + startDatetime = datetime.datetime.combine(startDate, startTime) + startUTCSecond = (startDatetime-datetime.datetime(1970,1,1)).total_seconds() + self.__timezone + + if endDate: + endDatetime = datetime.datetime.combine(endDate, endTime) + endUTCSecond = (endDatetime-datetime.datetime(1970,1,1)).total_seconds() + self.__timezone + + start_index, end_index = self.digitalReadObj.get_bounds(channelNameList[channelList[0]]) + + if not startUTCSecond: + startUTCSecond = start_index/self.__sample_rate + + if start_index > startUTCSecond*self.__sample_rate: + startUTCSecond = start_index/self.__sample_rate + + if not endUTCSecond: + endUTCSecond = end_index/self.__sample_rate + + if end_index < endUTCSecond*self.__sample_rate: + endUTCSecond = end_index/self.__sample_rate + if not nSamples: + if not ippKm: + raise ValueError, "[Reading] nSamples or ippKm should be defined" + nSamples = int(ippKm / (1e6*0.15/self.__sample_rate)) + channelBoundList = [] + channelNameListFiltered = [] + + for thisIndexChannel in channelList: + thisChannelName = channelNameList[thisIndexChannel] + start_index, end_index = self.digitalReadObj.get_bounds(thisChannelName) + channelBoundList.append((start_index, end_index)) + channelNameListFiltered.append(thisChannelName) + + self.profileIndex = 0 + self.i= 0 + self.__delay = delay + + self.__codeType = codeType + self.__nCode = nCode + self.__nBaud = nBaud + self.__code = code + + self.__datapath = path + self.__online = online + self.__channelList = channelList + self.__channelNameList = channelNameListFiltered + self.__channelBoundList = channelBoundList + self.__nSamples = nSamples + self.__samples_to_read = long(nSamples) # FIJO: AHORA 40 + self.__nChannels = len(self.__channelList) + + self.__startUTCSecond = startUTCSecond + self.__endUTCSecond = endUTCSecond + + self.__timeInterval = 1.0 * self.__samples_to_read/self.__sample_rate # Time interval + + if online: + # self.__thisUnixSample = int(endUTCSecond*self.__sample_rate - 4*self.__samples_to_read) + startUTCSecond = numpy.floor(endUTCSecond) + + self.__thisUnixSample = long(startUTCSecond*self.__sample_rate) - self.__samples_to_read ## por que en el otro metodo lo primero q se hace es sumar samplestoread + + self.__data_buffer = numpy.zeros((self.__num_subchannels, self.__samples_to_read), dtype = numpy.complex) + + self.__setFileHeader() + self.isConfig = True + + print "[Reading] Digital RF Data was found from %s to %s " %( + datetime.datetime.utcfromtimestamp(self.__startUTCSecond - self.__timezone), + datetime.datetime.utcfromtimestamp(self.__endUTCSecond - self.__timezone) + ) + + print "[Reading] Starting process from %s to %s" %(datetime.datetime.utcfromtimestamp(startUTCSecond - self.__timezone), + datetime.datetime.utcfromtimestamp(endUTCSecond - self.__timezone) + ) + self.oldAverage = None + self.count = 0 + self.executionTime = 0 + def __reload(self): + # print + # print "%s not in range [%s, %s]" %( + # datetime.datetime.utcfromtimestamp(self.thisSecond - self.__timezone), + # datetime.datetime.utcfromtimestamp(self.__startUTCSecond - self.__timezone), + # datetime.datetime.utcfromtimestamp(self.__endUTCSecond - self.__timezone) + # ) + print "[Reading] reloading metadata ..." + + try: + self.digitalReadObj.reload(complete_update=True) + except: + self.digitalReadObj.reload() + + start_index, end_index = self.digitalReadObj.get_bounds(self.__channelNameList[self.__channelList[0]]) + + if start_index > self.__startUTCSecond*self.__sample_rate: + self.__startUTCSecond = 1.0*start_index/self.__sample_rate + + if end_index > self.__endUTCSecond*self.__sample_rate: + self.__endUTCSecond = 1.0*end_index/self.__sample_rate + print + print "[Reading] New timerange found [%s, %s] " %( + datetime.datetime.utcfromtimestamp(self.__startUTCSecond - self.__timezone), + datetime.datetime.utcfromtimestamp(self.__endUTCSecond - self.__timezone) + ) + + return True + + return False + + def timeit(self, toExecute): + t0 = time() + toExecute() + self.executionTime = time() - t0 + if self.oldAverage is None: self.oldAverage = self.executionTime + self.oldAverage = (self.executionTime + self.count*self.oldAverage) / (self.count + 1.0) + self.count = self.count + 1.0 + return + + def __readNextBlock(self, seconds=30, volt_scale = 1): + ''' + ''' + + # Set the next data + self.__flagDiscontinuousBlock = False + self.__thisUnixSample += self.__samples_to_read + + if self.__thisUnixSample + 2*self.__samples_to_read > self.__endUTCSecond*self.__sample_rate: + print "[Reading] There are no more data into selected time-range" + if self.__online: + self.__reload() + else: + return False + + if self.__thisUnixSample + 2*self.__samples_to_read > self.__endUTCSecond*self.__sample_rate: + return False + self.__thisUnixSample -= self.__samples_to_read + + indexChannel = 0 + + dataOk = False + for thisChannelName in self.__channelNameList: ##TODO VARIOS CHANNELS? + for indexSubchannel in range(self.__num_subchannels): + try: + t0 = time() + result = self.digitalReadObj.read_vector_c81d(self.__thisUnixSample, + self.__samples_to_read, + thisChannelName, sub_channel=indexSubchannel) + self.executionTime = time() - t0 + if self.oldAverage is None: self.oldAverage = self.executionTime + self.oldAverage = (self.executionTime + self.count*self.oldAverage) / (self.count + 1.0) + self.count = self.count + 1.0 + + except IOError, e: + #read next profile + self.__flagDiscontinuousBlock = True + print "[Reading] %s" %datetime.datetime.utcfromtimestamp(self.thisSecond - self.__timezone), e + break + + if result.shape[0] != self.__samples_to_read: + self.__flagDiscontinuousBlock = True + print "[Reading] %s: Too few samples were found, just %d/%d samples" %(datetime.datetime.utcfromtimestamp(self.thisSecond - self.__timezone), + result.shape[0], + self.__samples_to_read) + break + + self.__data_buffer[indexSubchannel,:] = result*volt_scale + + indexChannel += 1 + + dataOk = True + + self.__utctime = self.__thisUnixSample/self.__sample_rate + + if not dataOk: + return False + + print "[Reading] %s: %d samples <> %f sec" %(datetime.datetime.utcfromtimestamp(self.thisSecond - self.__timezone), + self.__samples_to_read, + self.__timeInterval) + + self.__bufferIndex = 0 + + return True + + def __isBufferEmpty(self): + return self.__bufferIndex > self.__samples_to_read - self.__nSamples #40960 - 40 + + def getData(self, seconds=30, nTries=5): + + ''' + This method gets the data from files and put the data into the dataOut object + + In addition, increase el the buffer counter in one. + + Return: + data : retorna un perfil de voltages (alturas * canales) copiados desde el + buffer. Si no hay mas archivos a leer retorna None. + + Affected: + self.dataOut + self.profileIndex + self.flagDiscontinuousBlock + self.flagIsNewBlock + ''' + + err_counter = 0 + self.dataOut.flagNoData = True + + if self.__isBufferEmpty(): + self.__flagDiscontinuousBlock = False + + while True: + if self.__readNextBlock(): + break + if self.__thisUnixSample > self.__endUTCSecond*self.__sample_rate: + return False + + if self.__flagDiscontinuousBlock: + print '[Reading] discontinuous block found ... continue with the next block' + continue + + if not self.__online: + return False + + err_counter += 1 + if err_counter > nTries: + return False + + print '[Reading] waiting %d seconds to read a new block' %seconds + sleep(seconds) + + self.dataOut.data = self.__data_buffer[:,self.__bufferIndex:self.__bufferIndex+self.__nSamples] + self.dataOut.utctime = (self.__thisUnixSample + self.__bufferIndex)/self.__sample_rate + self.dataOut.flagNoData = False + self.dataOut.flagDiscontinuousBlock = self.__flagDiscontinuousBlock + self.dataOut.profileIndex = self.profileIndex + + self.__bufferIndex += self.__nSamples + self.profileIndex += 1 + + if self.profileIndex == self.dataOut.nProfiles: + self.profileIndex = 0 + + return True + + def printInfo(self): + ''' + ''' + if self.__printInfo == False: + return + + # self.systemHeaderObj.printInfo() + # self.radarControllerHeaderObj.printInfo() + + self.__printInfo = False + + def printNumberOfBlock(self): + ''' + ''' + return + # print self.profileIndex + + + def run(self, **kwargs): + ''' + This method will be called many times so here you should put all your code + ''' + + if not self.isConfig: + self.setup(**kwargs) + #self.i = self.i+1 + self.getData(seconds=self.__delay) + + return + +class DigitalRFWriter(Operation): + ''' + classdocs + ''' + + def __init__(self, **kwargs): + ''' + Constructor + ''' + Operation.__init__(self, **kwargs) + self.metadata_dict = {} + self.dataOut = None + self.dtype = None + + def setHeader(self): + + self.metadata_dict['frequency'] = self.dataOut.frequency + self.metadata_dict['timezone'] = self.dataOut.timeZone + self.metadata_dict['dtype'] = cPickle.dumps(self.dataOut.dtype) + self.metadata_dict['nProfiles'] = self.dataOut.nProfiles + self.metadata_dict['heightList'] = self.dataOut.heightList + self.metadata_dict['channelList'] = self.dataOut.channelList + self.metadata_dict['flagDecodeData'] = self.dataOut.flagDecodeData + self.metadata_dict['flagDeflipData'] = self.dataOut.flagDeflipData + self.metadata_dict['flagShiftFFT'] = self.dataOut.flagShiftFFT + self.metadata_dict['flagDataAsBlock'] = self.dataOut.flagDataAsBlock + self.metadata_dict['useLocalTime'] = self.dataOut.useLocalTime + self.metadata_dict['nCohInt'] = self.dataOut.nCohInt + + return + + def setup(self, dataOut, path, frequency, fileCadence, dirCadence, metadataCadence, set=0, metadataFile='metadata', ext='.h5'): + ''' + In this method we should set all initial parameters. + Input: + dataOut: Input data will also be outputa data + ''' + self.setHeader() + self.__ippSeconds = dataOut.ippSeconds + self.__deltaH = dataOut.getDeltaH() + self.__sample_rate = 1e6*0.15/self.__deltaH + self.__dtype = dataOut.dtype + if len(dataOut.dtype) == 2: + self.__dtype = dataOut.dtype[0] + self.__nSamples = dataOut.systemHeaderObj.nSamples + self.__nProfiles = dataOut.nProfiles + self.__blocks_per_file = dataOut.processingHeaderObj.dataBlocksPerFile + + self.arr_data = arr_data = numpy.ones((self.__nSamples, len(self.dataOut.channelList)), dtype=[('r', self.__dtype), ('i', self.__dtype)]) + + file_cadence_millisecs = long(1.0 * self.__blocks_per_file * self.__nProfiles * self.__nSamples / self.__sample_rate) * 1000 + sub_cadence_secs = file_cadence_millisecs / 500 + + sample_rate_fraction = Fraction(self.__sample_rate).limit_denominator() + sample_rate_numerator = long(sample_rate_fraction.numerator) + sample_rate_denominator = long(sample_rate_fraction.denominator) + start_global_index = dataOut.utctime * self.__sample_rate + + uuid = 'prueba' + compression_level = 1 + checksum = False + is_complex = True + num_subchannels = len(dataOut.channelList) + is_continuous = True + marching_periods = False + + self.digitalWriteObj = digital_rf.DigitalRFWriter(path, self.__dtype, dirCadence, + fileCadence, start_global_index, + sample_rate_numerator, sample_rate_denominator, uuid, compression_level, checksum, + is_complex, num_subchannels, is_continuous, marching_periods) + + metadata_dir = os.path.join(path, 'metadata') + os.system('mkdir %s' % (metadata_dir)) + + self.digitalMetadataWriteObj = digital_rf.DigitalMetadataWriter(metadata_dir, dirCadence, 1, ##236, file_cadence_millisecs / 1000 + sample_rate_numerator, sample_rate_denominator, + metadataFile) + + + self.isConfig = True + self.currentSample = 0 + self.oldAverage = 0 + self.count = 0 + return + + def writeMetadata(self): + print '[Writing] - Writing metadata' + start_idx = self.__sample_rate * self.dataOut.utctime + + self.metadata_dict['processingHeader'] = self.dataOut.processingHeaderObj.getAsDict() + self.metadata_dict['radarControllerHeader'] = self.dataOut.radarControllerHeaderObj.getAsDict() + self.metadata_dict['systemHeader'] = self.dataOut.systemHeaderObj.getAsDict() + self.digitalMetadataWriteObj.write(start_idx, self.metadata_dict) + return + + + def timeit(self, toExecute): + t0 = time() + toExecute() + self.executionTime = time() - t0 + if self.oldAverage is None: self.oldAverage = self.executionTime + self.oldAverage = (self.executionTime + self.count*self.oldAverage) / (self.count + 1.0) + self.count = self.count + 1.0 + return + + + def writeData(self): + for i in range(self.dataOut.systemHeaderObj.nSamples): + for channel in self.dataOut.channelList: + self.arr_data[i][channel]['r'] = self.dataOut.data[channel][i].real + self.arr_data[i][channel]['i'] = self.dataOut.data[channel][i].imag + + def f(): return self.digitalWriteObj.rf_write(self.arr_data) + self.timeit(f) + + return + + def run(self, dataOut, frequency=49.92e6, path=None, fileCadence=100, dirCadence=25, metadataCadence=1, **kwargs): + ''' + This method will be called many times so here you should put all your code + Inputs: + dataOut: object with the data + ''' + # print dataOut.__dict__ + self.dataOut = dataOut + if not self.isConfig: + self.setup(dataOut, path, frequency, fileCadence, dirCadence, metadataCadence, **kwargs) + self.writeMetadata() + + self.writeData() + + ## self.currentSample += 1 + ## if self.dataOut.flagDataAsBlock or self.currentSample == 1: + ## self.writeMetadata() + ## if self.currentSample == self.__nProfiles: self.currentSample = 0 + + def close(self): + print '[Writing] - Closing files ' + print 'Average of writing to digital rf format is ', self.oldAverage * 1000 + try: + self.digitalWriteObj.close() + except: + pass + + # raise +if __name__ == '__main__': + + readObj = DigitalRFReader() + + while True: + readObj.run(path='/home/jchavez/jicamarca/mocked_data/') + # readObj.printInfo() + # readObj.printNumberOfBlock() diff --git a/schainpy/model/io/jroIO_mira35c.py b/schainpy/model/io/jroIO_mira35c.py index 632bc93..684b106 100644 --- a/schainpy/model/io/jroIO_mira35c.py +++ b/schainpy/model/io/jroIO_mira35c.py @@ -1,4 +1,5 @@ -import os, sys +import os +import sys import glob import fnmatch import datetime @@ -6,11 +7,9 @@ import time import re import h5py import numpy -import matplotlib.pyplot as plt -import pylab as plb from scipy.optimize import curve_fit -from scipy import asarray as ar,exp +from scipy import asarray as ar, exp from scipy import stats from numpy.ma.core import getdata @@ -30,152 +29,168 @@ from schainpy.model.proc.jroproc_base import ProcessingUnit, Operation from numpy import imag, shape, NaN, empty - class Header(object): - + def __init__(self): raise NotImplementedError - - + def read(self): - + raise NotImplementedError - + def write(self): - + raise NotImplementedError - + def printInfo(self): - - message = "#"*50 + "\n" + + message = "#" * 50 + "\n" message += self.__class__.__name__.upper() + "\n" - message += "#"*50 + "\n" - + message += "#" * 50 + "\n" + keyList = self.__dict__.keys() keyList.sort() - + for key in keyList: - message += "%s = %s" %(key, self.__dict__[key]) + "\n" - + message += "%s = %s" % (key, self.__dict__[key]) + "\n" + if "size" not in keyList: attr = getattr(self, "size") - + if attr: - message += "%s = %s" %("size", attr) + "\n" - - #print message - - -FILE_HEADER = numpy.dtype([ #HEADER 1024bytes - ('Hname','a32'), #Original file name - ('Htime',numpy.str_,32), #Date and time when the file was created - ('Hoper',numpy.str_,64), #Name of operator who created the file - ('Hplace',numpy.str_,128), #Place where the measurements was carried out - ('Hdescr',numpy.str_,256), #Description of measurements - ('Hdummy',numpy.str_,512), #Reserved space - #Main chunk 8bytes - ('Msign',numpy.str_,4), #Main chunk signature FZKF or NUIG - ('MsizeData','=5 - ('SPARrawGate2','=5 + ('SPARrawGate1', ' 1180: - self.fp.seek(self.PointerReader , os.SEEK_SET) + self.fp.seek(self.PointerReader, os.SEEK_SET) self.FirstPoint = self.PointerReader - - else : + + else: self.FirstPoint = 1180 - - - + self.srviHeader = SRVIHeader() - - self.srviHeader.SRVIread(self.fp) #Se obtiene la cabecera del SRVI - - self.blocksize = self.srviHeader.SizeOfDataBlock1 # Se obtiene el tamao del bloque - + + self.srviHeader.SRVIread(self.fp) # Se obtiene la cabecera del SRVI + + self.blocksize = self.srviHeader.SizeOfDataBlock1 # Se obtiene el tamao del bloque + if self.blocksize == 148: print 'blocksize == 148 bug' - jump = numpy.fromfile(self.fp,[('jump',numpy.str_,140)] ,1) - - self.srviHeader.SRVIread(self.fp) #Se obtiene la cabecera del SRVI - + jump = numpy.fromfile(self.fp, [('jump', numpy.str_, 140)], 1) + + # Se obtiene la cabecera del SRVI + self.srviHeader.SRVIread(self.fp) + if not self.srviHeader.SizeOfSRVI1: - self.fileSelector+=1 - self.nextfileflag==True + self.fileSelector += 1 + self.nextfileflag == True self.FileHeaderFlag == True - + self.recordheader = RecordHeader() self.recordheader.RHread(self.fp) self.RadarConst = self.recordheader.RadarConst dwell = self.recordheader.time_t npw1 = self.recordheader.npw1 npw2 = self.recordheader.npw2 - - + self.dataOut.channelList = range(1) - self.dataOut.nIncohInt = self.Num_inCoh + self.dataOut.nIncohInt = self.Num_inCoh self.dataOut.nProfiles = self.Num_Bins self.dataOut.nCohInt = 1 self.dataOut.windowOfFilter = 1 self.dataOut.utctime = dwell - self.dataOut.timeZone=0 - + self.dataOut.timeZone = 0 + self.dataOut.outputInterval = self.dataOut.getTimeInterval() - self.dataOut.heightList = self.SPARrawGate1*self.__deltaHeigth + numpy.array(range(self.Num_Hei))*self.__deltaHeigth - - - - self.HSDVsign = numpy.fromfile( self.fp, [('HSDV',numpy.str_,4)],1) - self.SizeHSDV = numpy.fromfile( self.fp, [('SizeHSDV',' 0. , self.data_spc, 0) - - self.dataOut.COFA = numpy.array([self.COFA_Co , self.COFA_Cx]) - + + self.data_spc = numpy.where(self.data_spc > 0., self.data_spc, 0) + + self.dataOut.COFA = numpy.array([self.COFA_Co, self.COFA_Cx]) + print ' ' - print 'SPC',numpy.shape(self.dataOut.data_spc) - #print 'SPC',self.dataOut.data_spc - + print 'SPC', numpy.shape(self.dataOut.data_spc) + # print 'SPC',self.dataOut.data_spc + noinor1 = 713031680 noinor2 = 30 - - npw1 = 1#0**(npw1/10) * noinor1 * noinor2 - npw2 = 1#0**(npw2/10) * noinor1 * noinor2 + + npw1 = 1 # 0**(npw1/10) * noinor1 * noinor2 + npw2 = 1 # 0**(npw2/10) * noinor1 * noinor2 self.dataOut.NPW = numpy.array([npw1, npw2]) - + print ' ' - - self.data_spc = numpy.transpose(self.data_spc, (2,1,0)) - self.data_spc = numpy.fft.fftshift(self.data_spc, axes = 1) - + + self.data_spc = numpy.transpose(self.data_spc, (2, 1, 0)) + self.data_spc = numpy.fft.fftshift(self.data_spc, axes=1) + self.data_spc = numpy.fliplr(self.data_spc) - - self.data_spc = numpy.where(self.data_spc > 0. , self.data_spc, 0) - self.dataOut_spc= numpy.ones([1, self.Num_Bins , self.Num_Hei]) - self.dataOut_spc[0,:,:] = self.data_spc[0,:,:] - #print 'SHAPE', self.dataOut_spc.shape - #For nyquist correction: - #fix = 20 # ~3m/s + + self.data_spc = numpy.where(self.data_spc > 0., self.data_spc, 0) + self.dataOut_spc = numpy.ones([1, self.Num_Bins, self.Num_Hei]) + self.dataOut_spc[0, :, :] = self.data_spc[0, :, :] + # print 'SHAPE', self.dataOut_spc.shape + # For nyquist correction: + # fix = 20 # ~3m/s #shift = self.Num_Bins/2 + fix #self.data_spc = numpy.array([ self.data_spc[: , self.Num_Bins-shift+1: , :] , self.data_spc[: , 0:self.Num_Bins-shift , :]]) - - - + '''Block Reading, the Block Data is received and Reshape is used to give it shape. ''' - + self.PointerReader = self.fp.tell() - - - - - - - \ No newline at end of file diff --git a/schainpy/model/io/jroIO_spectra.py b/schainpy/model/io/jroIO_spectra.py index 9daf643..d4f55df 100644 --- a/schainpy/model/io/jroIO_spectra.py +++ b/schainpy/model/io/jroIO_spectra.py @@ -11,7 +11,6 @@ from schainpy.model.data.jroheaderIO import PROCFLAG, BasicHeader, SystemHeader, from schainpy.model.data.jrodata import Spectra class SpectraReader(JRODataReader, ProcessingUnit): - """ Esta clase permite leer datos de espectros desde archivos procesados (.pdata). La lectura de los datos siempre se realiza por bloques. Los datos leidos (array de 3 dimensiones) @@ -21,7 +20,6 @@ class SpectraReader(JRODataReader, ProcessingUnit): paresCanalesDiferentes * alturas * perfiles (Cross Spectra) canales * alturas (DC Channels) - Esta clase contiene instancias (objetos) de las clases BasicHeader, SystemHeader, RadarControllerHeader y Spectra. Los tres primeros se usan para almacenar informacion de la cabecera de datos (metadata), y el cuarto (Spectra) para obtener y almacenar un bloque de @@ -76,7 +74,6 @@ class SpectraReader(JRODataReader, ProcessingUnit): Inicializador de la clase SpectraReader para la lectura de datos de espectros. Inputs: - dataOut : Objeto de la clase Spectra. Este objeto sera utilizado para almacenar un perfil de datos cada vez que se haga un requerimiento (getData). El perfil sera obtenido a partir del buffer de datos, @@ -84,107 +81,104 @@ class SpectraReader(JRODataReader, ProcessingUnit): bloque de datos. Si este parametro no es pasado se creara uno internamente. - - Affected: - + Affected: self.dataOut Return : None """ - #Eliminar de la base la herencia ProcessingUnit.__init__(self, **kwargs) - + # self.isConfig = False - + self.pts2read_SelfSpectra = 0 - + self.pts2read_CrossSpectra = 0 - + self.pts2read_DCchannels = 0 - + self.datablock = None - + self.utc = None - + self.ext = ".pdata" - + self.optchar = "P" - + self.basicHeaderObj = BasicHeader(LOCALTIME) - + self.systemHeaderObj = SystemHeader() - + self.radarControllerHeaderObj = RadarControllerHeader() - + self.processingHeaderObj = ProcessingHeader() - + self.online = 0 - + self.fp = None - + self.idFile = None - + self.dtype = None - + self.fileSizeByHeader = None - + self.filenameList = [] - + self.filename = None - + self.fileSize = None - + self.firstHeaderSize = 0 - + self.basicHeaderSize = 24 - + self.pathList = [] self.lastUTTime = 0 - + self.maxTimeStep = 30 - + self.flagNoMoreFiles = 0 - + self.set = 0 - + self.path = None self.delay = 60 #seconds - + self.nTries = 3 #quantity tries - + self.nFiles = 3 #number of files for searching - + self.nReadBlocks = 0 - + self.flagIsNewFile = 1 - + self.__isFirstTimeOnline = 1 - + # self.ippSeconds = 0 - - self.flagDiscontinuousBlock = 0 - + + self.flagDiscontinuousBlock = 0 + self.flagIsNewBlock = 0 - + self.nTotalBlocks = 0 - + self.blocksize = 0 - + self.dataOut = self.createObjByDefault() - + self.profileIndex = 1 #Always def createObjByDefault(self): - + dataObj = Spectra() - + return dataObj - + def __hasNotDataInBuffer(self): return 1 @@ -192,7 +186,7 @@ class SpectraReader(JRODataReader, ProcessingUnit): def getBlockDimension(self): """ Obtiene la cantidad de puntos a leer por cada bloque de datos - + Affected: self.nRdChannels self.nRdPairs @@ -212,39 +206,37 @@ class SpectraReader(JRODataReader, ProcessingUnit): for i in range(0, self.processingHeaderObj.totalSpectra*2, 2): if self.processingHeaderObj.spectraComb[i] == self.processingHeaderObj.spectraComb[i+1]: - self.nRdChannels = self.nRdChannels + 1 #par de canales iguales - + self.nRdChannels = self.nRdChannels + 1 #par de canales iguales else: self.nRdPairs = self.nRdPairs + 1 #par de canales diferentes self.rdPairList.append((self.processingHeaderObj.spectraComb[i], self.processingHeaderObj.spectraComb[i+1])) pts2read = self.processingHeaderObj.nHeights * self.processingHeaderObj.profilesPerBlock - + self.pts2read_SelfSpectra = int(self.nRdChannels * pts2read) self.blocksize = self.pts2read_SelfSpectra - + if self.processingHeaderObj.flag_cspc: self.pts2read_CrossSpectra = int(self.nRdPairs * pts2read) self.blocksize += self.pts2read_CrossSpectra - + if self.processingHeaderObj.flag_dc: self.pts2read_DCchannels = int(self.systemHeaderObj.nChannels * self.processingHeaderObj.nHeights) self.blocksize += self.pts2read_DCchannels - + # self.blocksize = self.pts2read_SelfSpectra + self.pts2read_CrossSpectra + self.pts2read_DCchannels - + def readBlock(self): """ Lee el bloque de datos desde la posicion actual del puntero del archivo (self.fp) y actualiza todos los parametros relacionados al bloque de datos (metadata + data). La data leida es almacenada en el buffer y el contador del buffer es seteado a 0 - + Return: None - + Variables afectadas: - self.flagIsNewFile self.flagIsNewBlock @@ -253,10 +245,9 @@ class SpectraReader(JRODataReader, ProcessingUnit): self.data_cspc self.data_dc - Exceptions: + Exceptions: Si un bloque leido no es un bloque valido """ - blockOk_flag = False fpointer = self.fp.tell() @@ -266,32 +257,30 @@ class SpectraReader(JRODataReader, ProcessingUnit): if self.processingHeaderObj.flag_cspc: cspc = numpy.fromfile( self.fp, self.dtype, self.pts2read_CrossSpectra ) cspc = cspc.reshape( (self.nRdPairs, self.processingHeaderObj.nHeights, self.processingHeaderObj.profilesPerBlock) ) #transforma a un arreglo 3D - + if self.processingHeaderObj.flag_dc: dc = numpy.fromfile( self.fp, self.dtype, self.pts2read_DCchannels ) #int(self.processingHeaderObj.nHeights*self.systemHeaderObj.nChannels) ) dc = dc.reshape( (self.systemHeaderObj.nChannels, self.processingHeaderObj.nHeights) ) #transforma a un arreglo 2D - - + + if not(self.processingHeaderObj.shif_fft): #desplaza a la derecha en el eje 2 determinadas posiciones shift = int(self.processingHeaderObj.profilesPerBlock/2) spc = numpy.roll( spc, shift , axis=2 ) - + if self.processingHeaderObj.flag_cspc: #desplaza a la derecha en el eje 2 determinadas posiciones cspc = numpy.roll( cspc, shift, axis=2 ) - + #Dimensions : nChannels, nProfiles, nSamples spc = numpy.transpose( spc, (0,2,1) ) self.data_spc = spc - - if self.processingHeaderObj.flag_cspc: + if self.processingHeaderObj.flag_cspc: cspc = numpy.transpose( cspc, (0,2,1) ) self.data_cspc = cspc['real'] + cspc['imag']*1j else: self.data_cspc = None - if self.processingHeaderObj.flag_dc: self.data_dc = dc['real'] + dc['imag']*1j @@ -305,60 +294,60 @@ class SpectraReader(JRODataReader, ProcessingUnit): self.nReadBlocks += 1 return 1 - + def getFirstHeader(self): - + self.getBasicHeader() - + self.dataOut.systemHeaderObj = self.systemHeaderObj.copy() - + self.dataOut.radarControllerHeaderObj = self.radarControllerHeaderObj.copy() - + # self.dataOut.ippSeconds = self.ippSeconds - + # self.dataOut.timeInterval = self.radarControllerHeaderObj.ippSeconds * self.processingHeaderObj.nCohInt * self.processingHeaderObj.nIncohInt * self.processingHeaderObj.profilesPerBlock self.dataOut.dtype = self.dtype - + # self.dataOut.nPairs = self.nPairs - + self.dataOut.pairsList = self.rdPairList - + self.dataOut.nProfiles = self.processingHeaderObj.profilesPerBlock - + self.dataOut.nFFTPoints = self.processingHeaderObj.profilesPerBlock - + self.dataOut.nCohInt = self.processingHeaderObj.nCohInt - + self.dataOut.nIncohInt = self.processingHeaderObj.nIncohInt - + xf = self.processingHeaderObj.firstHeight + self.processingHeaderObj.nHeights*self.processingHeaderObj.deltaHeight - self.dataOut.heightList = numpy.arange(self.processingHeaderObj.firstHeight, xf, self.processingHeaderObj.deltaHeight) - + self.dataOut.heightList = numpy.arange(self.processingHeaderObj.firstHeight, xf, self.processingHeaderObj.deltaHeight) + self.dataOut.channelList = range(self.systemHeaderObj.nChannels) - + self.dataOut.flagShiftFFT = True #Data is always shifted - + self.dataOut.flagDecodeData = self.processingHeaderObj.flag_decode #asumo q la data no esta decodificada - - self.dataOut.flagDeflipData = self.processingHeaderObj.flag_deflip #asumo q la data esta sin flip - + + self.dataOut.flagDeflipData = self.processingHeaderObj.flag_deflip #asumo q la data esta sin flip + def getData(self): """ First method to execute before "RUN" is called. - + Copia el buffer de lectura a la clase "Spectra", con todos los parametros asociados a este (metadata). cuando no hay datos en el buffer de lectura es necesario hacer una nueva lectura de los bloques de datos usando "readNextBlock" - + Return: 0 : Si no hay mas archivos disponibles 1 : Si hizo una buena copia del buffer - + Affected: self.dataOut - + self.flagDiscontinuousBlock self.flagIsNewBlock """ @@ -367,70 +356,68 @@ class SpectraReader(JRODataReader, ProcessingUnit): self.dataOut.flagNoData = True print 'Process finished' return 0 - + self.flagDiscontinuousBlock = 0 self.flagIsNewBlock = 0 - - if self.__hasNotDataInBuffer(): + + if self.__hasNotDataInBuffer(): if not( self.readNextBlock() ): self.dataOut.flagNoData = True return 0 - #data es un numpy array de 3 dmensiones (perfiles, alturas y canales) if self.data_spc is None: self.dataOut.flagNoData = True return 0 - + self.getBasicHeader() - + self.getFirstHeader() self.dataOut.data_spc = self.data_spc - + self.dataOut.data_cspc = self.data_cspc - + self.dataOut.data_dc = self.data_dc - + self.dataOut.flagNoData = False - + self.dataOut.realtime = self.online - + return self.dataOut.data_spc class SpectraWriter(JRODataWriter, Operation): - - """ + + """ Esta clase permite escribir datos de espectros a archivos procesados (.pdata). La escritura - de los datos siempre se realiza por bloques. + de los datos siempre se realiza por bloques. """ - + ext = ".pdata" - + optchar = "P" - + shape_spc_Buffer = None - + shape_cspc_Buffer = None - + shape_dc_Buffer = None - + data_spc = None - + data_cspc = None - + data_dc = None - + # dataOut = None - - def __init__(self): - """ + + def __init__(self, **kwargs): + """ Inicializador de la clase SpectraWriter para la escritura de datos de espectros. - - Affected: + Affected: self.dataOut self.basicHeaderObj self.systemHeaderObj @@ -439,51 +426,49 @@ class SpectraWriter(JRODataWriter, Operation): Return: None """ - - Operation.__init__(self) - + + Operation.__init__(self, **kwargs) + self.isConfig = False - + self.nTotalBlocks = 0 - + self.data_spc = None - + self.data_cspc = None - self.data_dc = None self.fp = None self.flagIsNewFile = 1 - - self.nTotalBlocks = 0 - + + self.nTotalBlocks = 0 + self.flagIsNewBlock = 0 self.setFile = None - + self.dtype = None - + self.path = None - + self.noMoreFiles = 0 - + self.filename = None - + self.basicHeaderObj = BasicHeader(LOCALTIME) - + self.systemHeaderObj = SystemHeader() - + self.radarControllerHeaderObj = RadarControllerHeader() - + self.processingHeaderObj = ProcessingHeader() - + def hasAllDataInBuffer(self): return 1 - def setBlockDimension(self): """ @@ -503,15 +488,14 @@ class SpectraWriter(JRODataWriter, Operation): self.shape_cspc_Buffer = (self.dataOut.nPairs, self.processingHeaderObj.nHeights, self.processingHeaderObj.profilesPerBlock) - + self.shape_dc_Buffer = (self.dataOut.nChannels, self.processingHeaderObj.nHeights) - + def writeBlock(self): """ Escribe el buffer en el file designado - Affected: self.data_spc @@ -520,11 +504,11 @@ class SpectraWriter(JRODataWriter, Operation): self.flagIsNewFile self.flagIsNewBlock self.nTotalBlocks - self.nWriteBlocks - + self.nWriteBlocks + Return: None """ - + spc = numpy.transpose( self.data_spc, (0,2,1) ) if not( self.processingHeaderObj.shif_fft ): spc = numpy.roll( spc, self.processingHeaderObj.profilesPerBlock/2, axis=2 ) #desplaza a la derecha en el eje 2 determinadas posiciones @@ -541,7 +525,6 @@ class SpectraWriter(JRODataWriter, Operation): data['imag'] = cspc.imag data = data.reshape((-1)) data.tofile(self.fp) - if self.data_dc is not None: data = numpy.zeros( self.shape_dc_Buffer, self.dtype ) @@ -552,147 +535,145 @@ class SpectraWriter(JRODataWriter, Operation): data.tofile(self.fp) # self.data_spc.fill(0) -# +# # if self.data_dc is not None: # self.data_dc.fill(0) -# +# # if self.data_cspc is not None: # self.data_cspc.fill(0) - self.flagIsNewFile = 0 self.flagIsNewBlock = 1 self.nTotalBlocks += 1 self.nWriteBlocks += 1 self.blockIndex += 1 - + # print "[Writing] Block = %d04" %self.blockIndex - + def putData(self): """ - Setea un bloque de datos y luego los escribe en un file - + Setea un bloque de datos y luego los escribe en un file Affected: self.data_spc self.data_cspc self.data_dc - Return: - 0 : Si no hay data o no hay mas files que puedan escribirse + Return: + 0 : Si no hay data o no hay mas files que puedan escribirse 1 : Si se escribio la data de un bloque en un file """ - + if self.dataOut.flagNoData: return 0 - + self.flagIsNewBlock = 0 - + if self.dataOut.flagDiscontinuousBlock: self.data_spc.fill(0) - self.data_cspc.fill(0) - self.data_dc.fill(0) + if self.dataOut.data_cspc is not None: + self.data_cspc.fill(0) + if self.dataOut.data_dc is not None: + self.data_dc.fill(0) self.setNextFile() - + if self.flagIsNewFile == 0: self.setBasicHeader() - + self.data_spc = self.dataOut.data_spc.copy() - + if self.dataOut.data_cspc is not None: self.data_cspc = self.dataOut.data_cspc.copy() - + if self.dataOut.data_dc is not None: self.data_dc = self.dataOut.data_dc.copy() - + # #self.processingHeaderObj.dataBlocksPerFile) if self.hasAllDataInBuffer(): # self.setFirstHeader() self.writeNextBlock() - + return 1 - def __getBlockSize(self): ''' Este metodos determina el cantidad de bytes para un bloque de datos de tipo Spectra ''' - + dtype_width = self.getDtypeWidth() - + pts2write = self.dataOut.nHeights * self.dataOut.nFFTPoints - + pts2write_SelfSpectra = int(self.dataOut.nChannels * pts2write) blocksize = (pts2write_SelfSpectra*dtype_width) - + if self.dataOut.data_cspc is not None: pts2write_CrossSpectra = int(self.dataOut.nPairs * pts2write) blocksize += (pts2write_CrossSpectra*dtype_width*2) - + if self.dataOut.data_dc is not None: pts2write_DCchannels = int(self.dataOut.nChannels * self.dataOut.nHeights) blocksize += (pts2write_DCchannels*dtype_width*2) - + # blocksize = blocksize #* datatypeValue * 2 #CORREGIR ESTO return blocksize - + def setFirstHeader(self): - + """ Obtiene una copia del First Header - + Affected: self.systemHeaderObj self.radarControllerHeaderObj self.dtype - Return: + Return: None """ - + self.systemHeaderObj = self.dataOut.systemHeaderObj.copy() self.systemHeaderObj.nChannels = self.dataOut.nChannels self.radarControllerHeaderObj = self.dataOut.radarControllerHeaderObj.copy() - + self.processingHeaderObj.dtype = 1 # Spectra self.processingHeaderObj.blockSize = self.__getBlockSize() self.processingHeaderObj.profilesPerBlock = self.dataOut.nFFTPoints self.processingHeaderObj.dataBlocksPerFile = self.blocksPerFile self.processingHeaderObj.nWindows = 1 #podria ser 1 o self.dataOut.processingHeaderObj.nWindows self.processingHeaderObj.nCohInt = self.dataOut.nCohInt# Se requiere para determinar el valor de timeInterval - self.processingHeaderObj.nIncohInt = self.dataOut.nIncohInt + self.processingHeaderObj.nIncohInt = self.dataOut.nIncohInt self.processingHeaderObj.totalSpectra = self.dataOut.nPairs + self.dataOut.nChannels self.processingHeaderObj.shif_fft = self.dataOut.flagShiftFFT - if self.processingHeaderObj.totalSpectra > 0: channelList = [] for channel in range(self.dataOut.nChannels): channelList.append(channel) channelList.append(channel) - + pairsList = [] if self.dataOut.nPairs > 0: for pair in self.dataOut.pairsList: pairsList.append(pair[0]) pairsList.append(pair[1]) - + spectraComb = channelList + pairsList spectraComb = numpy.array(spectraComb, dtype="u1") self.processingHeaderObj.spectraComb = spectraComb - + if self.dataOut.code is not None: self.processingHeaderObj.code = self.dataOut.code self.processingHeaderObj.nCode = self.dataOut.nCode self.processingHeaderObj.nBaud = self.dataOut.nBaud - + if self.processingHeaderObj.nWindows != 0: self.processingHeaderObj.firstHeight = self.dataOut.heightList[0] self.processingHeaderObj.deltaHeight = self.dataOut.heightList[1] - self.dataOut.heightList[0] self.processingHeaderObj.nHeights = self.dataOut.nHeights self.processingHeaderObj.samplesWin = self.dataOut.nHeights - + self.processingHeaderObj.processFlags = self.getProcessFlags() - + self.setBasicHeader() diff --git a/schainpy/model/io/jroIO_voltage.py b/schainpy/model/io/jroIO_voltage.py index a34c870..29aa208 100644 --- a/schainpy/model/io/jroIO_voltage.py +++ b/schainpy/model/io/jroIO_voltage.py @@ -15,6 +15,7 @@ import tempfile from StringIO import StringIO # from _sha import blocksize + class VoltageReader(JRODataReader, ProcessingUnit): """ Esta clase permite leer datos de voltage desde archivos en formato rawdata (.r). La lectura @@ -134,13 +135,13 @@ class VoltageReader(JRODataReader, ProcessingUnit): self.path = None - self.profileIndex = 2**32-1 + self.profileIndex = 2**32 - 1 - self.delay = 3 #seconds + self.delay = 3 # seconds - self.nTries = 3 #quantity tries + self.nTries = 3 # quantity tries - self.nFiles = 3 #number of files for searching + self.nFiles = 3 # number of files for searching self.nReadBlocks = 0 @@ -172,51 +173,49 @@ class VoltageReader(JRODataReader, ProcessingUnit): def __hasNotDataInBuffer(self): - if self.profileIndex >= self.processingHeaderObj.profilesPerBlock*self.nTxs: + if self.profileIndex >= self.processingHeaderObj.profilesPerBlock * self.nTxs: return 1 return 0 - def getBlockDimension(self): """ Obtiene la cantidad de puntos a leer por cada bloque de datos - + Affected: self.blocksize Return: None """ - pts2read = self.processingHeaderObj.profilesPerBlock * self.processingHeaderObj.nHeights * self.systemHeaderObj.nChannels + pts2read = self.processingHeaderObj.profilesPerBlock * \ + self.processingHeaderObj.nHeights * self.systemHeaderObj.nChannels self.blocksize = pts2read - - def readBlock(self): """ readBlock lee el bloque de datos desde la posicion actual del puntero del archivo (self.fp) y actualiza todos los parametros relacionados al bloque de datos (metadata + data). La data leida es almacenada en el buffer y el contador del buffer es seteado a 0 - + Inputs: None - + Return: None - + Affected: self.profileIndex self.datablock self.flagIsNewFile self.flagIsNewBlock self.nTotalBlocks - + Exceptions: Si un bloque leido no es un bloque valido """ - + # if self.server is not None: # self.zBlock = self.receiver.recv() # self.zHeader = self.zBlock[:24] @@ -227,22 +226,24 @@ class VoltageReader(JRODataReader, ProcessingUnit): # self.systemHeaderObj.nChannels # else: current_pointer_location = self.fp.tell() - junk = numpy.fromfile( self.fp, self.dtype, self.blocksize ) + junk = numpy.fromfile(self.fp, self.dtype, self.blocksize) try: - junk = junk.reshape( (self.processingHeaderObj.profilesPerBlock, self.processingHeaderObj.nHeights, self.systemHeaderObj.nChannels) ) + junk = junk.reshape((self.processingHeaderObj.profilesPerBlock, + self.processingHeaderObj.nHeights, self.systemHeaderObj.nChannels)) except: - #print "The read block (%3d) has not enough data" %self.nReadBlocks + # print "The read block (%3d) has not enough data" %self.nReadBlocks if self.waitDataBlock(pointer_location=current_pointer_location): - junk = numpy.fromfile( self.fp, self.dtype, self.blocksize ) - junk = junk.reshape( (self.processingHeaderObj.profilesPerBlock, self.processingHeaderObj.nHeights, self.systemHeaderObj.nChannels) ) + junk = numpy.fromfile(self.fp, self.dtype, self.blocksize) + junk = junk.reshape((self.processingHeaderObj.profilesPerBlock, + self.processingHeaderObj.nHeights, self.systemHeaderObj.nChannels)) # return 0 - #Dimensions : nChannels, nProfiles, nSamples + # Dimensions : nChannels, nProfiles, nSamples - junk = numpy.transpose(junk, (2,0,1)) - self.datablock = junk['real'] + junk['imag']*1j + junk = numpy.transpose(junk, (2, 0, 1)) + self.datablock = junk['real'] + junk['imag'] * 1j self.profileIndex = 0 @@ -263,9 +264,8 @@ class VoltageReader(JRODataReader, ProcessingUnit): self.dataOut.radarControllerHeaderObj = self.radarControllerHeaderObj.copy() if self.nTxs > 1: - self.dataOut.radarControllerHeaderObj.ippSeconds = self.radarControllerHeaderObj.ippSeconds/self.nTxs - - #Time interval and code are propierties of dataOut. Its value depends of radarControllerHeaderObj. + self.dataOut.radarControllerHeaderObj.ippSeconds = self.radarControllerHeaderObj.ippSeconds / self.nTxs + # Time interval and code are propierties of dataOut. Its value depends of radarControllerHeaderObj. # self.dataOut.timeInterval = self.radarControllerHeaderObj.ippSeconds * self.processingHeaderObj.nCohInt # @@ -281,15 +281,18 @@ class VoltageReader(JRODataReader, ProcessingUnit): self.dataOut.nProfiles = self.processingHeaderObj.profilesPerBlock - self.dataOut.heightList = numpy.arange(self.processingHeaderObj.nHeights) *self.processingHeaderObj.deltaHeight + self.processingHeaderObj.firstHeight + self.dataOut.heightList = numpy.arange( + self.processingHeaderObj.nHeights) * self.processingHeaderObj.deltaHeight + self.processingHeaderObj.firstHeight self.dataOut.channelList = range(self.systemHeaderObj.nChannels) self.dataOut.nCohInt = self.processingHeaderObj.nCohInt - self.dataOut.flagDecodeData = self.processingHeaderObj.flag_decode #asumo q la data no esta decodificada + # asumo q la data no esta decodificada + self.dataOut.flagDecodeData = self.processingHeaderObj.flag_decode - self.dataOut.flagDeflipData = self.processingHeaderObj.flag_deflip #asumo q la data no esta sin flip + # asumo q la data no esta sin flip + self.dataOut.flagDeflipData = self.processingHeaderObj.flag_deflip self.dataOut.flagShiftFFT = self.processingHeaderObj.shif_fft @@ -301,51 +304,57 @@ class VoltageReader(JRODataReader, ProcessingUnit): if self.nTxs == 1: return - if self.nTxs < 1 and self.processingHeaderObj.profilesPerBlock % (1./self.nTxs) != 0: - raise ValueError, "1./nTxs (=%f), should be a multiple of nProfiles (=%d)" %(1./self.nTxs, self.processingHeaderObj.profilesPerBlock) + if self.nTxs < 1 and self.processingHeaderObj.profilesPerBlock % (1. / self.nTxs) != 0: + raise ValueError, "1./nTxs (=%f), should be a multiple of nProfiles (=%d)" % ( + 1. / self.nTxs, self.processingHeaderObj.profilesPerBlock) if self.nTxs > 1 and self.processingHeaderObj.nHeights % self.nTxs != 0: - raise ValueError, "nTxs (=%d), should be a multiple of nHeights (=%d)" %(self.nTxs, self.processingHeaderObj.nHeights) + raise ValueError, "nTxs (=%d), should be a multiple of nHeights (=%d)" % ( + self.nTxs, self.processingHeaderObj.nHeights) - self.datablock = self.datablock.reshape((self.systemHeaderObj.nChannels, self.processingHeaderObj.profilesPerBlock*self.nTxs, self.processingHeaderObj.nHeights/self.nTxs)) + self.datablock = self.datablock.reshape( + (self.systemHeaderObj.nChannels, self.processingHeaderObj.profilesPerBlock * self.nTxs, self.processingHeaderObj.nHeights / self.nTxs)) - self.dataOut.nProfiles = self.processingHeaderObj.profilesPerBlock*self.nTxs - self.dataOut.heightList = numpy.arange(self.processingHeaderObj.nHeights/self.nTxs) *self.processingHeaderObj.deltaHeight + self.processingHeaderObj.firstHeight - self.dataOut.radarControllerHeaderObj.ippSeconds = self.radarControllerHeaderObj.ippSeconds/self.nTxs + self.dataOut.nProfiles = self.processingHeaderObj.profilesPerBlock * self.nTxs + self.dataOut.heightList = numpy.arange(self.processingHeaderObj.nHeights / self.nTxs) * \ + self.processingHeaderObj.deltaHeight + self.processingHeaderObj.firstHeight + self.dataOut.radarControllerHeaderObj.ippSeconds = self.radarControllerHeaderObj.ippSeconds / self.nTxs return def readFirstHeaderFromServer(self): - + self.getFirstHeader() self.firstHeaderSize = self.basicHeaderObj.size - datatype = int(numpy.log2((self.processingHeaderObj.processFlags & PROCFLAG.DATATYPE_MASK))-numpy.log2(PROCFLAG.DATATYPE_CHAR)) + datatype = int(numpy.log2((self.processingHeaderObj.processFlags & + PROCFLAG.DATATYPE_MASK)) - numpy.log2(PROCFLAG.DATATYPE_CHAR)) if datatype == 0: - datatype_str = numpy.dtype([('real',' 0.47m HDD 1000'' +200samples -> 6 HDD 100ms file 100s folder +200samples -> 0.48ms HDD 100ms file 1000s folder +200samples -> 116ms HDD 5000ms file 100s folder +200samples -> 182 HDD 10000ms file 100s folder +200samples -> 143 HDD 10000ms file 100s folder SSD + +Escritura +200samples -> 0.78m HDD 100ms file 1000s folder +200samples -> 0.066m HDD 100ms file 1000s folder +200samples -> 0.30 HDD 100ms file 100s folder +200samples -> 0.23 HDD 5000ms file 100s folder +200samples -> 0.176 HDD 10000ms file 100s folder + diff --git a/schainpy/model/proc/jroproc_amisr.py b/schainpy/model/proc/jroproc_amisr.py index f7b235c..13e571d 100644 --- a/schainpy/model/proc/jroproc_amisr.py +++ b/schainpy/model/proc/jroproc_amisr.py @@ -6,8 +6,8 @@ from jroproc_base import ProcessingUnit, Operation from schainpy.model.data.jroamisr import AMISR class AMISRProc(ProcessingUnit): - def __init__(self): - ProcessingUnit.__init__(self) + def __init__(self, **kwargs): + ProcessingUnit.__init__(self, **kwargs) self.objectDict = {} self.dataOut = AMISR() @@ -17,7 +17,8 @@ class AMISRProc(ProcessingUnit): class PrintInfo(Operation): - def __init__(self): + def __init__(self, **kwargs): + Operation.__init__(self, **kwargs) self.__isPrinted = False def run(self, dataOut): @@ -42,8 +43,8 @@ class BeamSelector(Operation): profileIndex = None nProfiles = None - def __init__(self): - + def __init__(self, **kwargs): + Operation.__init__(self, **kwargs) self.profileIndex = 0 self.__isConfig = False @@ -98,7 +99,8 @@ class BeamSelector(Operation): class ProfileToChannels(Operation): - def __init__(self): + def __init__(self, **kwargs): + Operation.__init__(self, **kwargs) self.__isConfig = False self.__counter_chan = 0 self.buffer = None diff --git a/schainpy/model/proc/jroproc_parameters.py b/schainpy/model/proc/jroproc_parameters.py index 8fe1cf2..5e52bfa 100644 --- a/schainpy/model/proc/jroproc_parameters.py +++ b/schainpy/model/proc/jroproc_parameters.py @@ -17,7 +17,6 @@ from functools import partial import time #from sklearn.cluster import KMeans -import matplotlib.pyplot as plt from scipy.optimize import fmin_l_bfgs_b #optimize with bounds on state papameters from jroproc_base import ProcessingUnit, Operation @@ -1766,8 +1765,8 @@ class WindProfiler(Operation): n = None - def __init__(self): - Operation.__init__(self) + def __init__(self, **kwargs): + Operation.__init__(self, **kwargs) def __calculateCosDir(self, elev, azim): zen = (90 - elev)*numpy.pi/180 @@ -2473,8 +2472,8 @@ class WindProfiler(Operation): class EWDriftsEstimation(Operation): - def __init__(self): - Operation.__init__(self) + def __init__(self, **kwargs): + Operation.__init__(self, **kwargs) def __correctValues(self, heiRang, phi, velRadial, SNR): listPhi = phi.tolist() diff --git a/schainpy/model/proc/jroproc_spectra.py b/schainpy/model/proc/jroproc_spectra.py index 5df7042..3832045 100644 --- a/schainpy/model/proc/jroproc_spectra.py +++ b/schainpy/model/proc/jroproc_spectra.py @@ -142,7 +142,7 @@ class SpectraProc(ProcessingUnit): if self.dataIn.flagDataAsBlock: #data dimension: [nChannels, nProfiles, nSamples] nVoltProfiles = self.dataIn.data.shape[1] -# nVoltProfiles = self.dataIn.nProfiles + # nVoltProfiles = self.dataIn.nProfiles if nVoltProfiles == nProfiles: self.buffer = self.dataIn.data.copy() @@ -197,7 +197,6 @@ class SpectraProc(ProcessingUnit): self.dataOut.data_cspc = self.dataOut.data_cspc[pairsIndex] self.dataOut.pairsList = pairs - self.dataOut.pairsIndexList = pairsIndex return @@ -877,7 +876,6 @@ class IncohInt(Operation): return self.__initime, avgdata_spc, avgdata_cspc, avgdata_dc def run(self, dataOut, n=None, timeInterval=None, overlapping=False): - if n==1: return @@ -901,4 +899,3 @@ class IncohInt(Operation): dataOut.nIncohInt *= self.n dataOut.utctime = avgdatatime dataOut.flagNoData = False - diff --git a/schainpy/scripts/schain.xml b/schainpy/scripts/schain.xml index 04af99d..80c9912 100644 --- a/schainpy/scripts/schain.xml +++ b/schainpy/scripts/schain.xml @@ -1 +1 @@ - \ No newline at end of file + diff --git a/schainpy/scripts/testDigitalRF.py b/schainpy/scripts/testDigitalRF.py new file mode 100644 index 0000000..4d584df --- /dev/null +++ b/schainpy/scripts/testDigitalRF.py @@ -0,0 +1,117 @@ +#!/usr/bin/env python +''' +Created on Jul 7, 2014 + +@author: roj-idl71 +''' +import os, sys + +from schainpy.controller import Project + +def main(): + + desc = "Testing USRP data reader" + filename = "schain.xml" + figpath = "./" + remotefolder = "/home/wmaster/graficos" + + #this controller object save all user configuration and then execute each module + #with their parameters. + controllerObj = Project() + + controllerObj.setup(id = '191', name='test01', description=desc) + + #Creating a reader object with its parameters + #schainpy.model.io.jroIO_usrp.USRPReader.setup() + readUnitConfObj = controllerObj.addReadUnit(datatype='DigitalRF', + path='/home/nanosat/data/', + startDate='2000/07/03', + endDate='2017/07/03', + startTime='00:00:00', + endTime='23:59:59', + online=0) + + # procUnitConfObj0 = controllerObj.addProcUnit(datatype='Voltage', + # inputId=readUnitConfObj.getId()) + +# opObj10 = procUnitConfObj0.addOperation(name='selectHeights') +# opObj10.addParameter(name='minHei', value='0', format='float') +# opObj10.addParameter(name='maxHei', value='8', format='float') + +# opObj10 = procUnitConfObj0.addOperation(name='setH0') +# opObj10.addParameter(name='h0', value='5.4', format='float') + +# opObj10 = procUnitConfObj0.addOperation(name='Decoder', optype='external') +# opObj10.addParameter(name='code', value='1,-1', format='intlist') +# opObj10.addParameter(name='nCode', value='2', format='float') +# opObj10.addParameter(name='nBaud', value='1', format='float') + + # opObj10 = procUnitConfObj0.addOperation(name='CohInt', optype='external') + # opObj10.addParameter(name='n', value='128', format='float') + + # opObj11 = procUnitConfObj0.addOperation(name='Scope', optype='external') + # opObj11.addParameter(name='id', value='121', format='int') + # opObj11.addParameter(name='wintitle', value='Scope', format='str') + + # procUnitConfObj1 = controllerObj.addProcUnit(datatype='Spectra', + # inputId=procUnitConfObj0.getId()) + + # #Creating a processing object with its parameters + # #schainpy.model.proc.jroproc_spectra.SpectraProc.run() + # #If you need to add more parameters can use the "addParameter method" + # procUnitConfObj1.addParameter(name='nFFTPoints', value='8', format='int') + # procUnitConfObj1.addParameter(name='pairsList', value='(0,1)', format='pairslist') + +# opObj10 = procUnitConfObj1.addOperation(name='IncohInt', optype='external') +# opObj10.addParameter(name='n', value='2', format='float') +# + #Using internal methods + #schainpy.model.proc.jroproc_spectra.SpectraProc.selectChannels() +# opObj10 = procUnitConfObj1.addOperation(name='selectChannels') +# opObj10.addParameter(name='channelList', value='0,1', format='intlist') + + #Using internal methods + #schainpy.model.proc.jroproc_spectra.SpectraProc.selectHeights() +# opObj10 = procUnitConfObj1.addOperation(name='selectHeights') +# opObj10.addParameter(name='minHei', value='90', format='float') +# opObj10.addParameter(name='maxHei', value='180', format='float') + + #Using external methods (new modules) +# #schainpy.model.proc.jroproc_spectra.IncohInt.setup() +# opObj12 = procUnitConfObj1.addOperation(name='IncohInt', optype='other') +# opObj12.addParameter(name='n', value='1', format='int') + + #Using external methods (new modules) + #schainpy.model.graphics.jroplot_spectra.SpectraPlot.setup() + # opObj11 = procUnitConfObj1.addOperation(name='SpectraPlot', optype='external') + # opObj11.addParameter(name='id', value='11', format='int') + # opObj11.addParameter(name='wintitle', value='SpectraPlot', format='str') +# opObj11.addParameter(name='zmin', value='0', format='int') +# opObj11.addParameter(name='zmax', value='90', format='int') +# opObj11.addParameter(name='save', value='1', format='int') +# opObj11.addParameter(name='xmin', value='-20', format='float') +# opObj11.addParameter(name='xmax', value='20', format='float') + + #Using external methods (new modules) + #schainpy.model.graphics.jroplot_spectra.RTIPlot.setup() +# opObj11 = procUnitConfObj1.addOperation(name='RTIPlot', optype='other') +# opObj11.addParameter(name='id', value='30', format='int') +# opObj11.addParameter(name='wintitle', value='RTI', format='str') +# # opObj11.addParameter(name='zmin', value='0', format='int') +# # opObj11.addParameter(name='zmax', value='90', format='int') +# opObj11.addParameter(name='showprofile', value='1', format='int') +# opObj11.addParameter(name='timerange', value=str(2*60*60), format='int') +# opObj11.addParameter(name='xmin', value='19.5', format='float') +# opObj11.addParameter(name='xmax', value='20', format='float') + + # opObj11 = procUnitConfObj1.addOperation(name='CrossSpectraPlot', optype='other') + # opObj11.addParameter(name='id', value='3', format='int') + # opObj11.addParameter(name='wintitle', value='CrossSpectraPlot', format='str') +# opObj11.addParameter(name='zmin', value='30', format='int') +# opObj11.addParameter(name='zmax', value='120', format='int') +# opObj11.addParameter(name='pairsList', value='(0,1)', format='pairslist') + + controllerObj.start() + +if __name__ == '__main__': + main() diff --git a/schainpy/scripts/testDigitalRFWriter.py b/schainpy/scripts/testDigitalRFWriter.py new file mode 100644 index 0000000..c1addc5 --- /dev/null +++ b/schainpy/scripts/testDigitalRFWriter.py @@ -0,0 +1,98 @@ +import os, sys + +from schainpy.controller import Project + +if __name__ == '__main__': + + desc = "Segundo Test" + filename = "schain.xml" + + controllerObj = Project() + + controllerObj.setup(id = '191', name='test01', description=desc) + + readUnitConfObj = controllerObj.addReadUnit(datatype='VoltageReader', + path='/home/nanosat/data/John', + startDate='2010/10/28', + endDate='2017/10/28', + startTime='00:00:00', + endTime='23:59:59', + online=0, + walk=0) + + opObj00 = readUnitConfObj.addOperation(name='printNumberOfBlock') + + procUnitConfObj0 = controllerObj.addProcUnit(datatype='VoltageProc', + inputId=readUnitConfObj.getId()) + + # opObj11 = procUnitConfObj0.addOperation(name='Scope', optype='external') + # opObj11.addParameter(name='id', value='121', format='int') + # opObj11.addParameter(name='wintitle', value='Scope', format='str') + + opObj10 = procUnitConfObj0.addOperation(name='DigitalRFWriter', optype='other') + opObj10.addParameter(name='path', value='/home/nanosat/data/digitalrf', format='str') + # opObj10.addParameter(name='minHei', value='0', format='float') + # opObj10.addParameter(name='maxHei', value='8', format='float') + + # opObj10 = procUnitConfObj0.addOperation(name='filterByHeights') + # opObj10.addParameter(name='window', value='2', format='float') + + # opObj10 = procUnitConfObj0.addOperation(name='Decoder', optype='external') + # opObj10.addParameter(name='code', value='1,-1', format='intlist') + # opObj10.addParameter(name='nCode', value='2', format='float') + # opObj10.addParameter(name='nBaud', value='1', format='float') + + + # opObj10 = procUnitConfObj0.addOperation(name='CohInt', optype='external') + # opObj10.addParameter(name='n', value='1296', format='float') + + # procUnitConfObj1 = controllerObj.addProcUnit(datatype='SpectraProc', + # inputId=procUnitConfObj0.getId()) + + #Creating a processing object with its parameters + #schainpy.model.proc.jroproc_spectra.SpectraProc.run() + #If you need to add more parameters can use the "addParameter method" + # procUnitConfObj1.addParameter(name='nFFTPoints', value='128', format='int') + + # opObj10 = procUnitConfObj1.addOperation(name='IncohInt', optype='external') + # opObj10.addParameter(name='n', value='2', format='float') + + #Using internal methods + #schainpy.model.proc.jroproc_spectra.SpectraProc.selectChannels() +# opObj10 = procUnitConfObj1.addOperation(name='selectChannels') +# opObj10.addParameter(name='channelList', value='0,1', format='intlist') + + #Using internal methods + #schainpy.model.proc.jroproc_spectra.SpectraProc.selectHeights() +# opObj10 = procUnitConfObj1.addOperation(name='selectHeights') +# opObj10.addParameter(name='minHei', value='90', format='float') +# opObj10.addParameter(name='maxHei', value='180', format='float') + + #Using external methods (new modules) +# #schainpy.model.proc.jroproc_spectra.IncohInt.setup() +# opObj12 = procUnitConfObj1.addOperation(name='IncohInt', optype='other') +# opObj12.addParameter(name='n', value='1', format='int') + + #Using external methods (new modules) + #schainpy.model.graphics.jroplot_spectra.SpectraPlot.setup() +# opObj11 = procUnitConfObj1.addOperation(name='SpectraPlot', optype='external') +# opObj11.addParameter(name='id', value='11', format='int') +# opObj11.addParameter(name='wintitle', value='SpectraPlot', format='str') +# opObj11.addParameter(name='zmin', value='-60', format='int') +# opObj11.addParameter(name='zmax', value='10', format='int') +# opObj11.addParameter(name='save', value='1', format='int') + +# #Using external methods (new modules) +# #schainpy.model.graphics.jroplot_spectra.RTIPlot.setup() +# opObj11 = procUnitConfObj1.addOperation(name='RTIPlot', optype='other') +# opObj11.addParameter(name='id', value='30', format='int') +# opObj11.addParameter(name='wintitle', value='RTI', format='str') +# opObj11.addParameter(name='zmin', value='-60', format='int') +# opObj11.addParameter(name='zmax', value='-10', format='int') +# opObj11.addParameter(name='showprofile', value='1', format='int') +# # opObj11.addParameter(name='timerange', value=str(5*60*60*60), format='int') +# opObj11.addParameter(name='xmin', value='14', format='float') +# opObj11.addParameter(name='xmax', value='23.9', format='float') +# opObj11.addParameter(name='save', value='1', format='int') + + controllerObj.start() diff --git a/schainpy/utils/log.py b/schainpy/utils/log.py index bbb532b..0a5b43d 100644 --- a/schainpy/utils/log.py +++ b/schainpy/utils/log.py @@ -17,6 +17,7 @@ SCHAINPY - LOG import click + def warning(message, tag='Warning'): click.echo(click.style('[{}] {}'.format(tag, message), fg='yellow')) pass @@ -39,6 +40,6 @@ def log(message, tag='Info'): def makelogger(tag, bg='reset', fg='reset'): def func(message): - click.echo(click.style('[{}] {}'.format(tag.upper(), message), - bg=bg, fg=fg)) + click.echo(click.style('[{}] {}'.format( + tag.upper(), message), bg=bg, fg=fg)) return func diff --git a/schainpy/utils/trash b/schainpy/utils/trash new file mode 100644 index 0000000..384299d --- /dev/null +++ b/schainpy/utils/trash @@ -0,0 +1 @@ +You should install "digital_rf_hdf5" module if you want to read USRP data diff --git a/setup.py b/setup.py index be416d7..2384c56 100644 --- a/setup.py +++ b/setup.py @@ -8,6 +8,7 @@ from setuptools import setup, Extension from setuptools.command.build_ext import build_ext as _build_ext from schainpy import __version__ + class build_ext(_build_ext): def finalize_options(self): _build_ext.finalize_options(self) @@ -23,7 +24,7 @@ setup(name="schainpy", author="Miguel Urco", author_email="miguel.urco@jro.igp.gob.pe", url="http://jro.igp.gob.pe", - packages = {'schainpy', + packages={'schainpy', 'schainpy.model', 'schainpy.model.data', 'schainpy.model.graphics', @@ -39,31 +40,30 @@ setup(name="schainpy", ext_package='schainpy', py_modules=[''], package_data={'': ['schain.conf.template'], - 'schainpy.gui.figures': ['*.png','*.jpg'], + 'schainpy.gui.figures': ['*.png', '*.jpg'], }, include_package_data=False, - scripts =['schainpy/gui/schainGUI'], + scripts=['schainpy/gui/schainGUI'], ext_modules=[ - Extension("cSchain", ["schainpy/model/proc/extensions.c"] - )], + Extension("cSchain", ["schainpy/model/proc/extensions.c"] + )], entry_points={ - 'console_scripts': [ - 'schain = schaincli.cli:main', - ], + 'console_scripts': [ + 'schain = schaincli.cli:main', + ], }, - cmdclass={'build_ext':build_ext}, + cmdclass={'build_ext': build_ext}, setup_requires=["numpy >= 1.11.2"], install_requires=[ - "scipy >= 0.14.0", - "h5py >= 2.2.1", - "matplotlib >= 1.4.2", - "pyfits >= 3.4", - "paramiko >= 2.1.2", - "paho-mqtt >= 1.2", - "zmq", - "fuzzywuzzy", - "click", - "colorama", - "python-Levenshtein" - ], - ) + "scipy >= 0.14.0", + "h5py >= 2.2.1", + "matplotlib >= 1.4.2", + "pyfits >= 3.4", + "paramiko >= 2.1.2", + "paho-mqtt >= 1.2", + "zmq", + "fuzzywuzzy", + "click", + "python-Levenshtein" + ], + )