From 1f521b07c958f7b97c138eff1637cbcf2bcbd155 2018-07-03 19:56:23 From: George Yong Date: 2018-07-03 19:56:23 Subject: [PATCH] Python 2to3, Spectra (all operations) working --- diff --git a/schainpy/__init__.py b/schainpy/__init__.py index 4654380..d51d41f 100644 --- a/schainpy/__init__.py +++ b/schainpy/__init__.py @@ -1,7 +1,7 @@ ''' -Created on Feb 7, 2012 +Created on Jul 3, 2018 @author $Author$ @version $Id$ ''' -__version__ = '2.3' +__version__ = '3.0' diff --git a/schainpy/admin.py b/schainpy/admin.py index ef2ed57..48bfe7e 100644 --- a/schainpy/admin.py +++ b/schainpy/admin.py @@ -11,8 +11,8 @@ import sys import time import traceback import smtplib -import ConfigParser -import StringIO +import configparser +import io from threading import Thread from multiprocessing import Process from email.mime.text import MIMEText @@ -65,7 +65,7 @@ class Alarm(Process): @staticmethod def send_email(**kwargs): notifier = SchainNotify() - print kwargs + print(kwargs) notifier.notify(**kwargs) @staticmethod @@ -144,10 +144,10 @@ class SchainConfigure(): return # create Parser using standard module ConfigParser - self.__parser = ConfigParser.ConfigParser() + self.__parser = configparser.ConfigParser() # read conf file into a StringIO with "[madrigal]\n" section heading prepended - strConfFile = StringIO.StringIO("[schain]\n" + self.__confFile.read()) + strConfFile = io.StringIO("[schain]\n" + self.__confFile.read()) # parse StringIO configuration file self.__parser.readfp(strConfFile) @@ -355,7 +355,7 @@ class SchainNotify: if not self.__emailToAddress: return 0 - print "***** Sending alert to %s *****" %self.__emailToAddress + print("***** Sending alert to %s *****" %self.__emailToAddress) # set up message sent=self.sendEmail(email_from=self.__emailFromAddress, @@ -500,4 +500,4 @@ if __name__ == '__main__': test.sendAlert('This is a message from the python module SchainNotify', 'Test from SchainNotify') - print 'Hopefully message sent - check.' + print('Hopefully message sent - check.') \ No newline at end of file diff --git a/schainpy/controller.py b/schainpy/controller.py index ce97a95..93bd843 100644 --- a/schainpy/controller.py +++ b/schainpy/controller.py @@ -67,7 +67,7 @@ def MPProject(project, n=cpu_count()): for process in processes: process.terminate() process.join() - print traceback.print_tb(trace) + print(traceback.print_tb(trace)) sys.excepthook = beforeExit @@ -114,7 +114,7 @@ class ParameterConf(): return self.__formated_value if value == '': - raise ValueError, '%s: This parameter value is empty' % self.name + raise ValueError('%s: This parameter value is empty' % self.name) if format == 'list': strList = value.split(',') @@ -180,16 +180,16 @@ class ParameterConf(): new_value = ast.literal_eval(value) if type(new_value) not in (tuple, list): - raise ValueError, '%s has to be a tuple or list of pairs' % value + raise ValueError('%s has to be a tuple or list of pairs' % value) if type(new_value[0]) not in (tuple, list): if len(new_value) != 2: - raise ValueError, '%s has to be a tuple or list of pairs' % value + raise ValueError('%s has to be a tuple or list of pairs' % value) new_value = [new_value] for thisPair in new_value: if len(thisPair) != 2: - raise ValueError, '%s has to be a tuple or list of pairs' % value + raise ValueError('%s has to be a tuple or list of pairs' % value) self.__formated_value = new_value @@ -265,7 +265,7 @@ class ParameterConf(): def printattr(self): - print 'Parameter[%s]: name = %s, value = %s, format = %s' % (self.id, self.name, self.value, self.format) + print('Parameter[%s]: name = %s, value = %s, format = %s' % (self.id, self.name, self.value, self.format)) class OperationConf(): @@ -434,11 +434,11 @@ class OperationConf(): def printattr(self): - print '%s[%s]: name = %s, type = %s, priority = %s' % (self.ELEMENTNAME, + print('%s[%s]: name = %s, type = %s, priority = %s' % (self.ELEMENTNAME, self.id, self.name, self.type, - self.priority) + self.priority)) for parmConfObj in self.parmConfObjList: parmConfObj.printattr() @@ -446,11 +446,11 @@ class OperationConf(): def createObject(self, plotter_queue=None): if self.type == 'self': - raise ValueError, 'This operation type cannot be created' + raise ValueError('This operation type cannot be created') if self.type == 'plotter': if not plotter_queue: - raise ValueError, 'plotter_queue is not defined. Use:\nmyProject = Project()\nmyProject.setPlotterQueue(plotter_queue)' + raise ValueError('plotter_queue is not defined. Use:\nmyProject = Project()\nmyProject.setPlotterQueue(plotter_queue)') opObj = Plotter(self.name, plotter_queue) @@ -563,7 +563,7 @@ class ProcUnitConf(): # Compatible with old signal chain version if datatype == None and name == None: - raise ValueError, 'datatype or name should be defined' + raise ValueError('datatype or name should be defined') if name == None: if 'Proc' in datatype: @@ -652,11 +652,11 @@ class ProcUnitConf(): def printattr(self): - print '%s[%s]: name = %s, datatype = %s, inputId = %s' % (self.ELEMENTNAME, + print('%s[%s]: name = %s, datatype = %s, inputId = %s' % (self.ELEMENTNAME, self.id, self.name, self.datatype, - self.inputId) + self.inputId)) for opConfObj in self.opConfObjList: opConfObj.printattr() @@ -759,7 +759,7 @@ class ReadUnitConf(ProcUnitConf): # Compatible with old signal chain version if datatype == None and name == None: - raise ValueError, 'datatype or name should be defined' + raise ValueError('datatype or name should be defined') if name == None: if 'Reader' in datatype: name = datatype @@ -831,7 +831,7 @@ class ReadUnitConf(ProcUnitConf): opObj.addParameter( name='endTime', value=self.endTime, format='time') - for key, value in kwargs.items(): + for key, value in list(kwargs.items()): opObj.addParameter(name=key, value=value, format=type(value).__name__) else: @@ -853,7 +853,7 @@ class ReadUnitConf(ProcUnitConf): name='startTime', value=self.startTime, format='time') opObj.addParameter(name='endTime', value=self.endTime, format='time') - for key, value in kwargs.items(): + for key, value in list(kwargs.items()): opObj.addParameter(name=key, value=value, format=type(value).__name__) @@ -914,7 +914,7 @@ class Project(Process): def __getNewId(self): - idList = self.procUnitConfObjDict.keys() + idList = list(self.procUnitConfObjDict.keys()) id = int(self.id) * 10 @@ -940,7 +940,7 @@ class Project(Process): self.id = str(new_id) - keyList = self.procUnitConfObjDict.keys() + keyList = list(self.procUnitConfObjDict.keys()) keyList.sort() n = 1 @@ -958,11 +958,11 @@ class Project(Process): def setup(self, id, name='', description='', email=None, alarm=[]): - print - print '*' * 60 - print ' Starting SIGNAL CHAIN PROCESSING v%s ' % schainpy.__version__ - print '*' * 60 - print + print() + print('*' * 60) + print(' Starting SIGNAL CHAIN PROCESSING v%s ' % schainpy.__version__) + print('*' * 60) + print() self.id = str(id) self.description = description self.email = email @@ -970,7 +970,7 @@ class Project(Process): def update(self, **kwargs): - for key, value in kwargs.items(): + for key, value in list(kwargs.items()): setattr(self, key, value) def clone(self): @@ -1008,7 +1008,7 @@ class Project(Process): def removeProcUnit(self, id): - if id in self.procUnitConfObjDict.keys(): + if id in list(self.procUnitConfObjDict.keys()): self.procUnitConfObjDict.pop(id) def getReadUnitId(self): @@ -1019,7 +1019,7 @@ class Project(Process): def getReadUnitObj(self): - for obj in self.procUnitConfObjDict.values(): + for obj in list(self.procUnitConfObjDict.values()): if obj.getElementName() == 'ReadUnit': return obj @@ -1037,7 +1037,7 @@ class Project(Process): def getProcUnitObjByName(self, name): - for obj in self.procUnitConfObjDict.values(): + for obj in list(self.procUnitConfObjDict.values()): if obj.name == name: return obj @@ -1045,7 +1045,7 @@ class Project(Process): def procUnitItems(self): - return self.procUnitConfObjDict.items() + return list(self.procUnitConfObjDict.items()) def makeXml(self): @@ -1054,7 +1054,7 @@ class Project(Process): projectElement.set('name', self.name) projectElement.set('description', self.description) - for procUnitConfObj in self.procUnitConfObjDict.values(): + for procUnitConfObj in list(self.procUnitConfObjDict.values()): procUnitConfObj.makeXml(projectElement) self.projectElement = projectElement @@ -1068,17 +1068,17 @@ class Project(Process): filename = 'schain.xml' if not filename: - print 'filename has not been defined. Use setFilename(filename) for do it.' + print('filename has not been defined. Use setFilename(filename) for do it.') return 0 abs_file = os.path.abspath(filename) if not os.access(os.path.dirname(abs_file), os.W_OK): - print 'No write permission on %s' % os.path.dirname(abs_file) + print('No write permission on %s' % os.path.dirname(abs_file)) return 0 if os.path.isfile(abs_file) and not(os.access(abs_file, os.W_OK)): - print 'File %s already exists and it could not be overwriten' % abs_file + print('File %s already exists and it could not be overwriten' % abs_file) return 0 self.makeXml() @@ -1092,13 +1092,13 @@ class Project(Process): def readXml(self, filename=None): if not filename: - print 'filename is not defined' + print('filename is not defined') return 0 abs_file = os.path.abspath(filename) if not os.path.isfile(abs_file): - print '%s file does not exist' % abs_file + print('%s file does not exist' % abs_file) return 0 self.projectElement = None @@ -1107,7 +1107,7 @@ class Project(Process): try: self.projectElement = ElementTree().parse(abs_file) except: - print 'Error reading %s, verify file format' % filename + print('Error reading %s, verify file format' % filename) return 0 self.project = self.projectElement.tag @@ -1146,16 +1146,16 @@ class Project(Process): def printattr(self): - print 'Project[%s]: name = %s, description = %s' % (self.id, + print('Project[%s]: name = %s, description = %s' % (self.id, self.name, - self.description) + self.description)) - for procUnitConfObj in self.procUnitConfObjDict.values(): + for procUnitConfObj in list(self.procUnitConfObjDict.values()): procUnitConfObj.printattr() def createObjects(self): - for procUnitConfObj in self.procUnitConfObjDict.values(): + for procUnitConfObj in list(self.procUnitConfObjDict.values()): procUnitConfObj.createObjects(self.plotterQueue) def __connect(self, objIN, thisObj): @@ -1164,7 +1164,7 @@ class Project(Process): def connectObjects(self): - for thisPUConfObj in self.procUnitConfObjDict.values(): + for thisPUConfObj in list(self.procUnitConfObjDict.values()): inputId = thisPUConfObj.getInputId() @@ -1245,7 +1245,7 @@ class Project(Process): ''' if self.isPaused(): - print 'Process suspended' + print('Process suspended') while True: time.sleep(0.1) @@ -1256,10 +1256,10 @@ class Project(Process): if self.isStopped(): break - print 'Process reinitialized' + print('Process reinitialized') if self.isStopped(): - print 'Process stopped' + print('Process stopped') return 0 return 1 @@ -1270,15 +1270,15 @@ class Project(Process): def setPlotterQueue(self, plotter_queue): - raise NotImplementedError, 'Use schainpy.controller_api.ControllerThread instead Project class' + raise NotImplementedError('Use schainpy.controller_api.ControllerThread instead Project class') def getPlotterQueue(self): - raise NotImplementedError, 'Use schainpy.controller_api.ControllerThread instead Project class' + raise NotImplementedError('Use schainpy.controller_api.ControllerThread instead Project class') def useExternalPlotter(self): - raise NotImplementedError, 'Use schainpy.controller_api.ControllerThread instead Project class' + raise NotImplementedError('Use schainpy.controller_api.ControllerThread instead Project class') def run(self): @@ -1287,7 +1287,7 @@ class Project(Process): self.createObjects() self.connectObjects() - keyList = self.procUnitConfObjDict.keys() + keyList = list(self.procUnitConfObjDict.keys()) keyList.sort() err = None @@ -1310,7 +1310,7 @@ class Project(Process): except KeyboardInterrupt: is_ok = False break - except ValueError, e: + except ValueError as e: time.sleep(0.5) err = self.__handleError(procUnitConfObj) is_ok = False @@ -1339,4 +1339,4 @@ class Project(Process): log.success('{} finished (time: {}s)'.format( self.name, - time.time()-self.start_time)) + time.time()-self.start_time)) \ No newline at end of file diff --git a/schainpy/controller_api.py b/schainpy/controller_api.py index 2d1775e..1d07f11 100644 --- a/schainpy/controller_api.py +++ b/schainpy/controller_api.py @@ -1,5 +1,5 @@ import threading -from Queue import Queue +from queue import Queue from schainpy.controller import Project from schainpy.model.graphics.jroplotter import PlotManager @@ -77,7 +77,7 @@ class ControllerThread(threading.Thread, Project): plotterList = PlotManager.plotterList - for thisPUConfObj in self.procUnitConfObjDict.values(): + for thisPUConfObj in list(self.procUnitConfObjDict.values()): inputId = thisPUConfObj.getInputId() @@ -176,4 +176,4 @@ class ControllerThread(threading.Thread, Project): # self.emit( SIGNAL( "jobStarted( PyQt_PyObject )" ), 1) # Project.run(self) # self.emit( SIGNAL( "jobFinished( PyQt_PyObject )" ), 1) -# +# \ No newline at end of file diff --git a/schainpy/model/__init__.py b/schainpy/model/__init__.py index ae3ce46..27b8ad6 100644 --- a/schainpy/model/__init__.py +++ b/schainpy/model/__init__.py @@ -5,8 +5,8 @@ # from schainpy.model.utils.jroutils import * # from schainpy.serializer import * -from graphics import * -from data import * -from io import * -from proc import * -from utils import * +from .graphics import * +from .data import * +from .io import * +from .proc import * +from .utils import * diff --git a/schainpy/model/data/BLTRheaderIO.py b/schainpy/model/data/BLTRheaderIO.py index 69e0d25..534f109 100644 --- a/schainpy/model/data/BLTRheaderIO.py +++ b/schainpy/model/data/BLTRheaderIO.py @@ -7,7 +7,7 @@ import sys import numpy import copy import datetime -from __builtin__ import None + SPEED_OF_LIGHT = 299792458 SPEED_OF_LIGHT = 3e8 @@ -78,7 +78,7 @@ class Header(object): message += self.__class__.__name__.upper() + "\n" message += "#"*50 + "\n" - keyList = self.__dict__.keys() + keyList = list(self.__dict__.keys()) keyList.sort() for key in keyList: @@ -90,7 +90,7 @@ class Header(object): if attr: message += "%s = %s" %("size", attr) + "\n" - print message + print(message) class FileHeader(Header): @@ -134,9 +134,9 @@ class FileHeader(Header): ''' - except Exception, e: - print "FileHeader: " - print eBasicHeader + except Exception as e: + print("FileHeader: ") + print(eBasicHeader) return 0 self.FileMgcNumber= byte(header['FileMgcNumber'][0]) @@ -279,8 +279,8 @@ class RecordHeader(Header): try: header = numpy.fromfile(fp,RECORD_STRUCTURE,1) - except Exception, e: - print "System Header: " + e + except Exception as e: + print("System Header: " + e) return 0 self.RecMgcNumber = header['RecMgcNumber'][0] #0x23030001 diff --git a/schainpy/model/data/__init__.py b/schainpy/model/data/__init__.py index 6052841..c9dbc75 100644 --- a/schainpy/model/data/__init__.py +++ b/schainpy/model/data/__init__.py @@ -1,3 +1,3 @@ -from jrodata import * -from jroheaderIO import * -from jroamisr import * \ No newline at end of file +from .jrodata import * +from .jroheaderIO import * +from .jroamisr import * \ No newline at end of file diff --git a/schainpy/model/data/jroamisr.py b/schainpy/model/data/jroamisr.py index dc87704..954502b 100644 --- a/schainpy/model/data/jroamisr.py +++ b/schainpy/model/data/jroamisr.py @@ -68,7 +68,7 @@ class AMISR: if inputObj is None: return copy.deepcopy(self) - for key in inputObj.__dict__.keys(): + for key in list(inputObj.__dict__.keys()): self.__dict__[key] = inputObj.__dict__[key] def getNHeights(self): diff --git a/schainpy/model/data/jrodata.py b/schainpy/model/data/jrodata.py index 96ad913..c0e5d49 100644 --- a/schainpy/model/data/jrodata.py +++ b/schainpy/model/data/jrodata.py @@ -8,8 +8,8 @@ import copy import numpy import datetime -from jroheaderIO import SystemHeader, RadarControllerHeader -from schainpy import cSchain +from .jroheaderIO import SystemHeader, RadarControllerHeader +# from schainpy import cSchain def getNumpyDtype(dataTypeCode): @@ -27,7 +27,7 @@ def getNumpyDtype(dataTypeCode): elif dataTypeCode == 5: numpyDtype = numpy.dtype([('real', ' nums_min: -# rtest = float(j)/(j-1) + 1.0/navg -# if ((sumq*j) > (rtest*sump**2)): -# j = j - 1 -# sump = sump - sortdata[j] -# sumq = sumq - sortdata[j]**2 -# cont = 0 -# -# j += 1 -# -# lnoise = sump /j -# -# return lnoise + lenOfData = len(sortdata) + nums_min = lenOfData*0.2 + + if nums_min <= 5: + nums_min = 5 + + sump = 0. + + sumq = 0. + + j = 0 + + cont = 1 + + while((cont==1)and(j nums_min: + rtest = float(j)/(j-1) + 1.0/navg + if ((sumq*j) > (rtest*sump**2)): + j = j - 1 + sump = sump - sortdata[j] + sumq = sumq - sortdata[j]**2 + cont = 0 + + j += 1 + + lnoise = sump /j + + return lnoise - return cSchain.hildebrand_sekhon(sortdata, navg) + # return cSchain.hildebrand_sekhon(sortdata, navg) class Beam: @@ -122,7 +122,7 @@ class GenericData(object): if inputObj == None: return copy.deepcopy(self) - for key in inputObj.__dict__.keys(): + for key in list(inputObj.__dict__.keys()): attribute = inputObj.__dict__[key] @@ -241,7 +241,7 @@ class JROData(GenericData): def getChannelIndexList(self): - return range(self.nChannels) + return list(range(self.nChannels)) def getNHeights(self): @@ -662,7 +662,7 @@ class Spectra(JROData): def getPairsIndexList(self): - return range(self.nPairs) + return list(range(self.nPairs)) def getNormFactor(self): @@ -714,8 +714,8 @@ class Spectra(JROData): pairsIndexList = [] for pair in pairsList: if pair not in self.pairsList: - raise ValueError, "Pair %s is not in dataOut.pairsList" % ( - pair) + raise ValueError("Pair %s is not in dataOut.pairsList" % ( + pair)) pairsIndexList.append(self.pairsList.index(pair)) for i in range(len(pairsIndexList)): pair = self.pairsList[pairsIndexList[i]] @@ -736,7 +736,7 @@ class Spectra(JROData): def setValue(self, value): - print "This property should not be initialized" + print("This property should not be initialized") return @@ -941,7 +941,7 @@ class Fits(JROData): def getChannelIndexList(self): - return range(self.nChannels) + return list(range(self.nChannels)) def getNoise(self, type=1): @@ -1068,7 +1068,7 @@ class Correlation(JROData): ind_vel = numpy.array([-2, -1, 1, 2]) + freq_dc if ind_vel[0] < 0: - ind_vel[range(0, 1)] = ind_vel[range(0, 1)] + self.num_prof + ind_vel[list(range(0, 1))] = ind_vel[list(range(0, 1))] + self.num_prof if mode == 1: jspectra[:, freq_dc, :] = ( @@ -1080,7 +1080,7 @@ class Correlation(JROData): xx = numpy.zeros([4, 4]) for fil in range(4): - xx[fil, :] = vel[fil]**numpy.asarray(range(4)) + xx[fil, :] = vel[fil]**numpy.asarray(list(range(4))) xx_inv = numpy.linalg.inv(xx) xx_aux = xx_inv[0, :] @@ -1239,7 +1239,7 @@ class Parameters(Spectra): def setValue(self, value): - print "This property should not be initialized" + print("This property should not be initialized") return @@ -1248,4 +1248,4 @@ class Parameters(Spectra): return self.spc_noise timeInterval = property(getTimeInterval) - noise = property(getNoise, setValue, "I'm the 'Noise' property.") + noise = property(getNoise, setValue, "I'm the 'Noise' property.") \ No newline at end of file diff --git a/schainpy/model/data/jroheaderIO.py b/schainpy/model/data/jroheaderIO.py index 2f0b4f9..bdb5905 100644 --- a/schainpy/model/data/jroheaderIO.py +++ b/schainpy/model/data/jroheaderIO.py @@ -8,6 +8,7 @@ import numpy import copy import datetime import inspect +from schainpy.utils import log SPEED_OF_LIGHT = 299792458 SPEED_OF_LIGHT = 3e8 @@ -110,7 +111,7 @@ class Header(object): message += self.__class__.__name__.upper() + "\n" message += "#" * 50 + "\n" - keyList = self.__dict__.keys() + keyList = list(self.__dict__.keys()) keyList.sort() for key in keyList: @@ -122,7 +123,7 @@ class Header(object): if attr: message += "%s = %s" % ("size", attr) + "\n" - print message + print(message) class BasicHeader(Header): @@ -161,9 +162,9 @@ class BasicHeader(Header): header = numpy.fromfile(fp, BASIC_STRUCTURE, 1) else: header = numpy.fromstring(fp, BASIC_STRUCTURE, 1) - except Exception, e: - print "BasicHeader: " - print e + except Exception as e: + print("BasicHeader: ") + print(e) return 0 self.size = int(header['nSize'][0]) @@ -229,7 +230,7 @@ class SystemHeader(Header): self.length = 0 try: startFp = fp.tell() - except Exception, e: + except Exception as e: startFp = None pass @@ -238,8 +239,8 @@ class SystemHeader(Header): header = numpy.fromfile(fp, SYSTEM_STRUCTURE, 1) else: header = numpy.fromstring(fp, SYSTEM_STRUCTURE, 1) - except Exception, e: - print "System Header: " + str(e) + except Exception as e: + print("System Header: " + str(e)) return 0 self.size = header['nSize'][0] @@ -344,7 +345,7 @@ class RadarControllerHeader(Header): self.length = 0 try: startFp = fp.tell() - except Exception, e: + except Exception as e: startFp = None pass @@ -354,8 +355,8 @@ class RadarControllerHeader(Header): else: header = numpy.fromstring(fp, RADAR_STRUCTURE, 1) self.length += header.nbytes - except Exception, e: - print "RadarControllerHeader: " + str(e) + except Exception as e: + print("RadarControllerHeader: " + str(e)) return 0 size = int(header['nSize'][0]) @@ -384,8 +385,8 @@ class RadarControllerHeader(Header): samplingWindow = numpy.fromstring( fp[self.length:], SAMPLING_STRUCTURE, self.nWindows) self.length += samplingWindow.nbytes - except Exception, e: - print "RadarControllerHeader: " + str(e) + except Exception as e: + print("RadarControllerHeader: " + str(e)) return 0 self.nHeights = int(numpy.sum(samplingWindow['nsa'])) self.firstHeight = samplingWindow['h0'] @@ -399,8 +400,8 @@ class RadarControllerHeader(Header): self.Taus = numpy.fromstring( fp[self.length:], ' endFp: @@ -557,7 +559,7 @@ class RadarControllerHeader(Header): def set_size(self, value): - raise IOError, "size is a property and it cannot be set, just read" + raise IOError("size is a property and it cannot be set, just read") return @@ -617,7 +619,7 @@ class ProcessingHeader(Header): self.length = 0 try: startFp = fp.tell() - except Exception, e: + except Exception as e: startFp = None pass @@ -627,8 +629,8 @@ class ProcessingHeader(Header): else: header = numpy.fromstring(fp, PROCESSING_STRUCTURE, 1) self.length += header.nbytes - except Exception, e: - print "ProcessingHeader: " + str(e) + except Exception as e: + print("ProcessingHeader: " + str(e)) return 0 size = int(header['nSize'][0]) @@ -650,8 +652,8 @@ class ProcessingHeader(Header): samplingWindow = numpy.fromstring( fp[self.length:], SAMPLING_STRUCTURE, self.nWindows) self.length += samplingWindow.nbytes - except Exception, e: - print "ProcessingHeader: " + str(e) + except Exception as e: + print("ProcessingHeader: " + str(e)) return 0 self.nHeights = int(numpy.sum(samplingWindow['nsa'])) @@ -667,8 +669,8 @@ class ProcessingHeader(Header): self.spectraComb = numpy.fromstring( fp[self.length:], 'u1', 2 * self.totalSpectra) self.length += self.spectraComb.nbytes - except Exception, e: - print "ProcessingHeader: " + str(e) + except Exception as e: + print("ProcessingHeader: " + str(e)) return 0 if ((self.processFlags & PROCFLAG.DEFINE_PROCESS_CODE) == PROCFLAG.DEFINE_PROCESS_CODE): @@ -783,7 +785,7 @@ class ProcessingHeader(Header): def set_size(self, value): - raise IOError, "size is a property and it cannot be set, just read" + raise IOError("size is a property and it cannot be set, just read") return @@ -902,4 +904,4 @@ def get_procflag_dtype(index): def get_dtype_width(index): - return DTYPE_WIDTH[index] + return DTYPE_WIDTH[index] \ No newline at end of file diff --git a/schainpy/model/graphics/__init__.py b/schainpy/model/graphics/__init__.py index 4b0542b..98b5033 100644 --- a/schainpy/model/graphics/__init__.py +++ b/schainpy/model/graphics/__init__.py @@ -1,7 +1,7 @@ -from jroplot_voltage import * -from jroplot_spectra import * -from jroplot_heispectra import * -from jroplot_correlation import * -from jroplot_parameters import * -from jroplot_data import * -from jroplotter import * +from .jroplot_voltage import * +from .jroplot_spectra import * +from .jroplot_heispectra import * +from .jroplot_correlation import * +from .jroplot_parameters import * +from .jroplot_data import * +from .jroplotter import * diff --git a/schainpy/model/graphics/figure.py b/schainpy/model/graphics/figure.py index 85fd5bd..931e126 100644 --- a/schainpy/model/graphics/figure.py +++ b/schainpy/model/graphics/figure.py @@ -1,7 +1,7 @@ import os import numpy import time, datetime -import mpldriver +from schainpy.model.graphics import mpldriver from schainpy.model.proc.jroproc_base import Operation @@ -130,7 +130,7 @@ class Figure(Operation): def init(self, id, nplots, wintitle): - raise NotImplementedError, "This method has been replaced by createFigure" + raise NotImplementedError("This method has been replaced by createFigure") def createFigure(self, id, wintitle, widthplot=None, heightplot=None, show=True): @@ -188,11 +188,11 @@ class Figure(Operation): def setTextFromAxes(self, text): - raise NotImplementedError, "This method has been replaced with Axes.setText" + raise NotImplementedError("This method has been replaced with Axes.setText") def makeAxes(self, nrow, ncol, xpos, ypos, colspan, rowspan): - raise NotImplementedError, "This method has been replaced with Axes.addAxes" + raise NotImplementedError("This method has been replaced with Axes.addAxes") def addAxes(self, *args): """ @@ -234,7 +234,7 @@ class Figure(Operation): if not figfile: if not thisDatetime: - raise ValueError, "Saving figure: figfile or thisDatetime should be defined" + raise ValueError("Saving figure: figfile or thisDatetime should be defined") return str_datetime = thisDatetime.strftime("%Y%m%d_%H%M%S") @@ -654,4 +654,4 @@ class Axes: z_buffer[index[0],::] = self.__missing z_buffer = numpy.ma.masked_inside(z_buffer,0.99*self.__missing,1.01*self.__missing) - return x_buffer, y_buffer, z_buffer + return x_buffer, y_buffer, z_buffer \ No newline at end of file diff --git a/schainpy/model/graphics/jroplot_correlation.py b/schainpy/model/graphics/jroplot_correlation.py index 759f8c6..9dea381 100644 --- a/schainpy/model/graphics/jroplot_correlation.py +++ b/schainpy/model/graphics/jroplot_correlation.py @@ -3,7 +3,7 @@ import datetime import numpy import copy from schainpy.model import * -from figure import Figure, isRealtime +from .figure import Figure, isRealtime class CorrelationPlot(Figure): isConfig = None @@ -99,7 +99,7 @@ class CorrelationPlot(Figure): if realtime: if not(isRealtime(utcdatatime = dataOut.utctime)): - print 'Skipping this plot function' + print('Skipping this plot function') return if channelList == None: @@ -108,7 +108,7 @@ class CorrelationPlot(Figure): channelIndexList = [] for channel in channelList: if channel not in dataOut.channelList: - raise ValueError, "Channel %d is not in dataOut.channelList" + raise ValueError("Channel %d is not in dataOut.channelList") channelIndexList.append(dataOut.channelList.index(channel)) factor = dataOut.normFactor @@ -184,4 +184,4 @@ class CorrelationPlot(Figure): save=save, ftp=ftp, wr_period=wr_period, - thisDatetime=thisDatetime) + thisDatetime=thisDatetime) \ No newline at end of file diff --git a/schainpy/model/graphics/jroplot_data.py b/schainpy/model/graphics/jroplot_data.py index 6253a0a..07a1df5 100644 --- a/schainpy/model/graphics/jroplot_data.py +++ b/schainpy/model/graphics/jroplot_data.py @@ -339,7 +339,7 @@ class PlotData(Operation, Process): self.titles: list of axes title ''' - raise(NotImplementedError, 'Implement this method in child class') + raise NotImplementedError def fill_gaps(self, x_buffer, y_buffer, z_buffer): ''' @@ -490,7 +490,7 @@ class PlotData(Operation, Process): if self.save_labels: labels = self.save_labels else: - labels = range(self.nrows) + labels = list(range(self.nrows)) if self.oneFigure: label = '' @@ -514,7 +514,7 @@ class PlotData(Operation, Process): def plot(self): ''' ''' - raise(NotImplementedError, 'Implement this method in child class') + raise NotImplementedError def run(self): @@ -961,7 +961,7 @@ class PlotParamData(PlotRTIData): self.ylabel = 'Height [km]' if not self.titles: self.titles = self.data.parameters \ - if self.data.parameters else ['Param {}'.format(x) for x in xrange(self.nrows)] + if self.data.parameters else ['Param {}'.format(x) for x in range(self.nrows)] if self.showSNR: self.titles.append('SNR') @@ -1041,7 +1041,7 @@ class PlotPolarMapData(PlotData): else: self.nplots = self.data.shape(self.CODE)[0] self.nrows = self.nplots - self.channels = range(self.nplots) + self.channels = list(range(self.nplots)) if self.mode == 'E': self.xlabel = 'Longitude' self.ylabel = 'Latitude' @@ -1145,4 +1145,4 @@ class PlotPolarMapData(PlotData): self.titles = ['{} {}'.format(self.data.parameters[x], title) for x in self.channels] self.saveTime = self.max_time - + \ No newline at end of file diff --git a/schainpy/model/graphics/jroplot_heispectra.py b/schainpy/model/graphics/jroplot_heispectra.py index b0c9c5b..bfc6e18 100644 --- a/schainpy/model/graphics/jroplot_heispectra.py +++ b/schainpy/model/graphics/jroplot_heispectra.py @@ -7,8 +7,8 @@ import os import datetime import numpy -from figure import Figure, isRealtime -from plotting_codes import * +from .figure import Figure, isRealtime +from .plotting_codes import * class SpectraHeisScope(Figure): @@ -98,7 +98,7 @@ class SpectraHeisScope(Figure): if dataOut.realtime: if not(isRealtime(utcdatatime = dataOut.utctime)): - print 'Skipping this plot function' + print('Skipping this plot function') return if channelList == None: @@ -107,7 +107,7 @@ class SpectraHeisScope(Figure): channelIndexList = [] for channel in channelList: if channel not in dataOut.channelList: - raise ValueError, "Channel %d is not in dataOut.channelList" + raise ValueError("Channel %d is not in dataOut.channelList") channelIndexList.append(dataOut.channelList.index(channel)) # x = dataOut.heightList @@ -238,7 +238,7 @@ class RTIfromSpectraHeis(Figure): channelIndexList = [] for channel in channelList: if channel not in dataOut.channelList: - raise ValueError, "Channel %d is not in dataOut.channelList" + raise ValueError("Channel %d is not in dataOut.channelList") channelIndexList.append(dataOut.channelList.index(channel)) if timerange != None: @@ -326,4 +326,4 @@ class RTIfromSpectraHeis(Figure): ftp=ftp, wr_period=wr_period, thisDatetime=thisDatetime, - update_figfile=update_figfile) + update_figfile=update_figfile) \ No newline at end of file diff --git a/schainpy/model/graphics/jroplot_parameters.py b/schainpy/model/graphics/jroplot_parameters.py index 4e810c6..dbdfb52 100644 --- a/schainpy/model/graphics/jroplot_parameters.py +++ b/schainpy/model/graphics/jroplot_parameters.py @@ -2,8 +2,8 @@ import os import datetime import numpy import inspect -from figure import Figure, isRealtime, isTimeInHourRange -from plotting_codes import * +from .figure import Figure, isRealtime, isTimeInHourRange +from .plotting_codes import * class FitGauPlot(Figure): @@ -101,7 +101,7 @@ class FitGauPlot(Figure): """ if realtime: if not(isRealtime(utcdatatime = dataOut.utctime)): - print 'Skipping this plot function' + print('Skipping this plot function') return if channelList == None: @@ -110,7 +110,7 @@ class FitGauPlot(Figure): channelIndexList = [] for channel in channelList: if channel not in dataOut.channelList: - raise ValueError, "Channel %d is not in dataOut.channelList" %channel + raise ValueError("Channel %d is not in dataOut.channelList" %channel) channelIndexList.append(dataOut.channelList.index(channel)) # if normFactor is None: @@ -134,7 +134,7 @@ class FitGauPlot(Figure): y = dataOut.getHeiRange() z = dataOut.GauSPC[:,GauSelector,:,:] #GauSelector] #dataOut.data_spc/factor - print 'GausSPC', z[0,32,10:40] + print('GausSPC', z[0,32,10:40]) z = numpy.where(numpy.isfinite(z), z, numpy.NAN) zdB = 10*numpy.log10(z) @@ -311,7 +311,7 @@ class MomentsPlot(Figure): if realtime: if not(isRealtime(utcdatatime = dataOut.utctime)): - print 'Skipping this plot function' + print('Skipping this plot function') return if channelList == None: @@ -320,7 +320,7 @@ class MomentsPlot(Figure): channelIndexList = [] for channel in channelList: if channel not in dataOut.channelList: - raise ValueError, "Channel %d is not in dataOut.channelList" + raise ValueError("Channel %d is not in dataOut.channelList") channelIndexList.append(dataOut.channelList.index(channel)) factor = dataOut.normFactor @@ -701,7 +701,7 @@ class WindProfilerPlot(Figure): if ymin == None: ymin = numpy.nanmin(y) if ymax == None: ymax = numpy.nanmax(y) - if zmax == None: zmax = numpy.nanmax(abs(z[range(2),:])) + if zmax == None: zmax = numpy.nanmax(abs(z[list(range(2)),:])) #if numpy.isnan(zmax): zmax = 50 if zmin == None: zmin = -zmax @@ -875,12 +875,12 @@ class ParametersPlot(Figure): return if channelList == None: - channelIndexList = range(dataOut.data_param.shape[0]) + channelIndexList = list(range(dataOut.data_param.shape[0])) else: channelIndexList = [] for channel in channelList: if channel not in dataOut.channelList: - raise ValueError, "Channel %d is not in dataOut.channelList" + raise ValueError("Channel %d is not in dataOut.channelList") channelIndexList.append(dataOut.channelList.index(channel)) x = dataOut.getTimeRange1(dataOut.paramInterval) @@ -2148,4 +2148,4 @@ class NSMeteorDetection2Plot(Figure): save=save, ftp=ftp, wr_period=wr_period, - thisDatetime=thisDatetime) + thisDatetime=thisDatetime) \ No newline at end of file diff --git a/schainpy/model/graphics/jroplot_spectra.py b/schainpy/model/graphics/jroplot_spectra.py index eafac9f..4eeb92b 100644 --- a/schainpy/model/graphics/jroplot_spectra.py +++ b/schainpy/model/graphics/jroplot_spectra.py @@ -7,8 +7,8 @@ import os import datetime import numpy -from figure import Figure, isRealtime, isTimeInHourRange -from plotting_codes import * +from .figure import Figure, isRealtime, isTimeInHourRange +from .plotting_codes import * class SpectraPlot(Figure): @@ -106,7 +106,7 @@ class SpectraPlot(Figure): """ if realtime: if not(isRealtime(utcdatatime = dataOut.utctime)): - print 'Skipping this plot function' + print('Skipping this plot function') return if channelList == None: @@ -115,7 +115,7 @@ class SpectraPlot(Figure): channelIndexList = [] for channel in channelList: if channel not in dataOut.channelList: - raise ValueError, "Channel %d is not in dataOut.channelList" %channel + raise ValueError("Channel %d is not in dataOut.channelList" %channel) channelIndexList.append(dataOut.channelList.index(channel)) if normFactor is None: @@ -307,7 +307,7 @@ class CrossSpectraPlot(Figure): pairsIndexList = [] for pair in pairsList: if pair not in dataOut.pairsList: - raise ValueError, "Pair %s is not in dataOut.pairsList" %str(pair) + raise ValueError("Pair %s is not in dataOut.pairsList" %str(pair)) pairsIndexList.append(dataOut.pairsList.index(pair)) if not pairsIndexList: @@ -554,7 +554,7 @@ class RTIPlot(Figure): channelIndexList = [] for channel in channelList: if channel not in dataOut.channelList: - raise ValueError, "Channel %d is not in dataOut.channelList" + raise ValueError("Channel %d is not in dataOut.channelList") channelIndexList.append(dataOut.channelList.index(channel)) if normFactor is None: @@ -581,7 +581,7 @@ class RTIPlot(Figure): update_figfile = False - if dataOut.ltctime >= self.xmax: + if self.xmax is not None and dataOut.ltctime >= self.xmax: #yong self.counter_imagwr = wr_period self.isConfig = False update_figfile = True @@ -732,7 +732,7 @@ class CoherenceMap(Figure): pairsIndexList = [] for pair in pairsList: if pair not in dataOut.pairsList: - raise ValueError, "Pair %s is not in dataOut.pairsList" %(pair) + raise ValueError("Pair %s is not in dataOut.pairsList" %(pair)) pairsIndexList.append(dataOut.pairsList.index(pair)) if pairsIndexList == []: @@ -915,7 +915,7 @@ class PowerProfilePlot(Figure): channelIndexList = [] for channel in channelList: if channel not in dataOut.channelList: - raise ValueError, "Channel %d is not in dataOut.channelList" + raise ValueError("Channel %d is not in dataOut.channelList") channelIndexList.append(dataOut.channelList.index(channel)) factor = dataOut.normFactor @@ -1040,7 +1040,7 @@ class SpectraCutPlot(Figure): channelIndexList = [] for channel in channelList: if channel not in dataOut.channelList: - raise ValueError, "Channel %d is not in dataOut.channelList" + raise ValueError("Channel %d is not in dataOut.channelList") channelIndexList.append(dataOut.channelList.index(channel)) factor = dataOut.normFactor @@ -1219,7 +1219,7 @@ class Noise(Figure): channelIndexList = [] for channel in channelList: if channel not in dataOut.channelList: - raise ValueError, "Channel %d is not in dataOut.channelList" + raise ValueError("Channel %d is not in dataOut.channelList") channelIndexList.append(dataOut.channelList.index(channel)) x = dataOut.getTimeRange() @@ -1408,7 +1408,7 @@ class BeaconPhase(Figure): pairsIndexList = [] for pair in pairsList: if pair not in dataOut.pairsList: - raise ValueError, "Pair %s is not in dataOut.pairsList" %(pair) + raise ValueError("Pair %s is not in dataOut.pairsList" %(pair)) pairsIndexList.append(dataOut.pairsList.index(pair)) if pairsIndexList == []: @@ -1539,4 +1539,4 @@ class BeaconPhase(Figure): ftp=ftp, wr_period=wr_period, thisDatetime=thisDatetime, - update_figfile=update_figfile) + update_figfile=update_figfile) \ No newline at end of file diff --git a/schainpy/model/graphics/jroplot_voltage.py b/schainpy/model/graphics/jroplot_voltage.py index ed4dfc5..e5754e2 100644 --- a/schainpy/model/graphics/jroplot_voltage.py +++ b/schainpy/model/graphics/jroplot_voltage.py @@ -7,7 +7,7 @@ import os import datetime import numpy -from figure import Figure +from .figure import Figure class Scope(Figure): @@ -134,7 +134,7 @@ class Scope(Figure): channelIndexList = [] for channel in channelList: if channel not in dataOut.channelList: - raise ValueError, "Channel %d is not in dataOut.channelList" + raise ValueError("Channel %d is not in dataOut.channelList") channelIndexList.append(dataOut.channelList.index(channel)) thisDatetime = datetime.datetime.utcfromtimestamp(dataOut.getTimeRange()[0]) @@ -222,4 +222,4 @@ class Scope(Figure): save=save, ftp=ftp, wr_period=wr_period, - thisDatetime=thisDatetime) + thisDatetime=thisDatetime) \ No newline at end of file diff --git a/schainpy/model/graphics/jroplotter.py b/schainpy/model/graphics/jroplotter.py index 118ca9b..d222d1c 100644 --- a/schainpy/model/graphics/jroplotter.py +++ b/schainpy/model/graphics/jroplotter.py @@ -17,11 +17,11 @@ import schainpy.admin from schainpy.model.proc.jroproc_base import Operation from schainpy.model.serializer.data import obj2Dict, dict2Obj -from jroplot_correlation import * -from jroplot_heispectra import * -from jroplot_parameters import * -from jroplot_spectra import * -from jroplot_voltage import * +from .jroplot_correlation import * +from .jroplot_heispectra import * +from .jroplot_parameters import * +from .jroplot_spectra import * +from .jroplot_voltage import * class Plotter(Operation): @@ -46,7 +46,7 @@ class Plotter(Operation): def setup(self, **kwargs): - print "Initializing ..." + print("Initializing ...") def run(self, dataOut, id=None, **kwargs): @@ -106,8 +106,8 @@ class PlotManager(): sys.exc_info()[1], sys.exc_info()[2]) - print "***** Error occurred in PlotManager *****" - print "***** [%s]: %s" %(name, err[-1]) + print("***** Error occurred in PlotManager *****") + print("***** [%s]: %s" %(name, err[-1])) message = "\nError ocurred in %s:\n" %name message += "".join(err) @@ -168,7 +168,7 @@ class PlotManager(): dataPlot = serial_data['data'] - if plot_id not in self.plotInstanceDict.keys(): + if plot_id not in list(self.plotInstanceDict.keys()): className = eval(plot_name) self.plotInstanceDict[plot_id] = className(**kwargs) @@ -198,7 +198,7 @@ class PlotManager(): self.__lock.acquire() - for plot_id in self.plotInstanceDict.keys(): + for plot_id in list(self.plotInstanceDict.keys()): plotter = self.plotInstanceDict[plot_id] plotter.close() @@ -211,7 +211,7 @@ class PlotManager(): def start(self): if not self.controllerThreadObj.isRunning(): - raise RuntimeError, "controllerThreadObj has not been initialized. Use controllerThreadObj.start() before call this method" + raise RuntimeError("controllerThreadObj has not been initialized. Use controllerThreadObj.start() before call this method") self.join() @@ -237,4 +237,4 @@ class PlotManager(): self.__lock.release() - return err + return err \ No newline at end of file diff --git a/schainpy/model/graphics/mpldriver.py b/schainpy/model/graphics/mpldriver.py index 6173cd6..536daa2 100644 --- a/schainpy/model/graphics/mpldriver.py +++ b/schainpy/model/graphics/mpldriver.py @@ -171,11 +171,11 @@ def createPline(ax, x, y, xmin, xmax, ymin, ymax, xlabel='', ylabel='', title='' ###################################################### if '0.' in matplotlib.__version__[0:2]: - print "The matplotlib version has to be updated to 1.1 or newer" + print("The matplotlib version has to be updated to 1.1 or newer") return iplot if '1.0.' in matplotlib.__version__[0:4]: - print "The matplotlib version has to be updated to 1.1 or newer" + print("The matplotlib version has to be updated to 1.1 or newer") return iplot if grid != None: @@ -246,11 +246,11 @@ def createPcolor(ax, x, y, z, xmin, xmax, ymin, ymax, zmin, zmax, ax_cb.yaxis.tick_right() if '0.' in matplotlib.__version__[0:2]: - print "The matplotlib version has to be updated to 1.1 or newer" + print("The matplotlib version has to be updated to 1.1 or newer") return imesh if '1.0.' in matplotlib.__version__[0:4]: - print "The matplotlib version has to be updated to 1.1 or newer" + print("The matplotlib version has to be updated to 1.1 or newer") return imesh matplotlib.pyplot.tight_layout() @@ -334,11 +334,11 @@ def createPmultiline(ax, x, y, xmin, xmax, ymin, ymax, xlabel='', ylabel='', tit iplot = ax.lines[-1] if '0.' in matplotlib.__version__[0:2]: - print "The matplotlib version has to be updated to 1.1 or newer" + print("The matplotlib version has to be updated to 1.1 or newer") return iplot if '1.0.' in matplotlib.__version__[0:4]: - print "The matplotlib version has to be updated to 1.1 or newer" + print("The matplotlib version has to be updated to 1.1 or newer") return iplot if grid != None: @@ -407,11 +407,11 @@ def createPmultilineYAxis(ax, x, y, xmin, xmax, ymin, ymax, xlabel='', ylabel='' iplot = ax.lines[-1] if '0.' in matplotlib.__version__[0:2]: - print "The matplotlib version has to be updated to 1.1 or newer" + print("The matplotlib version has to be updated to 1.1 or newer") return iplot if '1.0.' in matplotlib.__version__[0:4]: - print "The matplotlib version has to be updated to 1.1 or newer" + print("The matplotlib version has to be updated to 1.1 or newer") return iplot if grid != None: @@ -461,11 +461,11 @@ def createPolar(ax, x, y, iplot = ax.lines[-1] if '0.' in matplotlib.__version__[0:2]: - print "The matplotlib version has to be updated to 1.1 or newer" + print("The matplotlib version has to be updated to 1.1 or newer") return iplot if '1.0.' in matplotlib.__version__[0:4]: - print "The matplotlib version has to be updated to 1.1 or newer" + print("The matplotlib version has to be updated to 1.1 or newer") return iplot # if grid != None: @@ -491,11 +491,11 @@ def polar(iplot, x, y, xlabel='', ylabel='', title=''): def draw(fig): if type(fig) == 'int': - raise ValueError, "Error drawing: Fig parameter should be a matplotlib figure object figure" + raise ValueError("Error drawing: Fig parameter should be a matplotlib figure object figure") fig.canvas.draw() def pause(interval=0.000001): - matplotlib.pyplot.pause(interval) + matplotlib.pyplot.pause(interval) \ No newline at end of file diff --git a/schainpy/model/io/MIRAtest.py b/schainpy/model/io/MIRAtest.py index ea8e94f..a4e8e20 100644 --- a/schainpy/model/io/MIRAtest.py +++ b/schainpy/model/io/MIRAtest.py @@ -291,8 +291,8 @@ RadarConst5 = RadarConst # print 'OffsetStartHeader ',self.OffsetStartHeader,'RecCounter ', self.RecCounter, 'Off2StartNxtRec ' , self.Off2StartNxtRec #OffRHeader= self.OffsetStartHeader + self.RecCounter*self.Off2StartNxtRec #startFp.seek(OffRHeader, os.SEEK_SET) -print 'debe ser 48, RecCounter*811248', self.OffsetStartHeader, self.RecCounter, self.Off2StartNxtRec -print 'Posicion del bloque: ', OffRHeader +print('debe ser 48, RecCounter*811248', self.OffsetStartHeader, self.RecCounter, self.Off2StartNxtRec) +print('Posicion del bloque: ', OffRHeader) header = numpy.fromfile(startFp, SRVI_STRUCTURE, 1) @@ -326,6 +326,6 @@ self.Datasize = self.nProfiles * self.nChannels * self.nHeights * 2 * 4 # print 'Datasize',self.Datasize endFp = self.OffsetStartHeader + self.RecCounter * self.Off2StartNxtRec -print '==============================================' +print('==============================================') -print '==============================================' +print('==============================================') \ No newline at end of file diff --git a/schainpy/model/io/__init__.py b/schainpy/model/io/__init__.py index e35b986..2bcee44 100644 --- a/schainpy/model/io/__init__.py +++ b/schainpy/model/io/__init__.py @@ -4,20 +4,20 @@ $Author: murco $ $Id: JRODataIO.py 169 2012-11-19 21:57:03Z murco $ ''' -from jroIO_voltage import * -from jroIO_spectra import * -from jroIO_heispectra import * -from jroIO_usrp import * -from jroIO_digitalRF import * -from jroIO_kamisr import * -from jroIO_param import * -from jroIO_hf import * +from .jroIO_voltage import * +from .jroIO_spectra import * +from .jroIO_heispectra import * +from .jroIO_usrp import * +from .jroIO_digitalRF import * +from .jroIO_kamisr import * +from .jroIO_param import * +from .jroIO_hf import * -from jroIO_madrigal import * +from .jroIO_madrigal import * -from bltrIO_param import * -from jroIO_bltr import * -from jroIO_mira35c import * -from julIO_param import * +from .bltrIO_param import * +from .jroIO_bltr import * +from .jroIO_mira35c import * +from .julIO_param import * -from pxIO_param import * \ No newline at end of file +from .pxIO_param import * \ No newline at end of file diff --git a/schainpy/model/io/bltrIO_param.py b/schainpy/model/io/bltrIO_param.py index 1d286f7..546fc99 100644 --- a/schainpy/model/io/bltrIO_param.py +++ b/schainpy/model/io/bltrIO_param.py @@ -121,7 +121,7 @@ class BLTRParamReader(JRODataReader, ProcessingUnit): self.datatime = datetime.datetime(1900,1,1) if self.path is None: - raise ValueError, "The path is not valid" + raise ValueError("The path is not valid") if ext is None: ext = self.ext @@ -131,8 +131,8 @@ class BLTRParamReader(JRODataReader, ProcessingUnit): self.fileIndex = 0 if not self.fileList: - raise Warning, "There is no files matching these date in the folder: %s. \n Check 'startDate' and 'endDate' " % ( - path) + raise Warning("There is no files matching these date in the folder: %s. \n Check 'startDate' and 'endDate' " % ( + path)) self.setNextFile() @@ -340,7 +340,7 @@ class BLTRParamReader(JRODataReader, ProcessingUnit): self.dataOut.sizeOfFile = self.sizeOfFile self.dataOut.lat = self.lat self.dataOut.lon = self.lon - self.dataOut.channelList = range(self.nchannels) + self.dataOut.channelList = list(range(self.nchannels)) self.dataOut.kchan = self.kchan self.dataOut.delta = self.delta self.dataOut.correction = self.correction @@ -366,4 +366,4 @@ class BLTRParamReader(JRODataReader, ProcessingUnit): self.set_output() - return 1 + return 1 \ No newline at end of file diff --git a/schainpy/model/io/jroIO_amisr.py b/schainpy/model/io/jroIO_amisr.py index 3be5aaa..9e5c294 100644 --- a/schainpy/model/io/jroIO_amisr.py +++ b/schainpy/model/io/jroIO_amisr.py @@ -144,7 +144,7 @@ class AMISRReader(ProcessingUnit): self.status = 1 else: self.status = 0 - print 'Path:%s does not exists'%self.path + print('Path:%s does not exists'%self.path) return @@ -169,11 +169,11 @@ class AMISRReader(ProcessingUnit): pat = '\d+.\d+' dirnameList = [re.search(pat,x) for x in os.listdir(self.path)] - dirnameList = filter(lambda x:x!=None,dirnameList) + dirnameList = [x for x in dirnameList if x!=None] dirnameList = [x.string for x in dirnameList] if not(online): dirnameList = [self.__selDates(x) for x in dirnameList] - dirnameList = filter(lambda x:x!=None,dirnameList) + dirnameList = [x for x in dirnameList if x!=None] if len(dirnameList)>0: self.status = 1 self.dirnameList = dirnameList @@ -186,8 +186,8 @@ class AMISRReader(ProcessingUnit): startDateTime_Reader = datetime.datetime.combine(self.startDate,self.startTime) endDateTime_Reader = datetime.datetime.combine(self.endDate,self.endTime) - print 'Filtering Files from %s to %s'%(startDateTime_Reader, endDateTime_Reader) - print '........................................' + print('Filtering Files from %s to %s'%(startDateTime_Reader, endDateTime_Reader)) + print('........................................') filter_filenameList = [] self.filenameList.sort() for i in range(len(self.filenameList)-1): @@ -226,7 +226,7 @@ class AMISRReader(ProcessingUnit): def __getFilenameList(self, fileListInKeys, dirList): for value in fileListInKeys: - dirName = value.keys()[0] + dirName = list(value.keys())[0] for file in value[dirName]: filename = os.path.join(dirName, file) self.filenameList.append(filename) @@ -304,7 +304,7 @@ class AMISRReader(ProcessingUnit): self.__selectDataForTimes() for i in range(len(self.filenameList)): - print "%s" %(self.filenameList[i]) + print("%s" %(self.filenameList[i])) return @@ -315,7 +315,7 @@ class AMISRReader(ProcessingUnit): idFile += 1 if not(idFile < len(self.filenameList)): self.flagNoMoreFiles = 1 - print "No more Files" + print("No more Files") return 0 filename = self.filenameList[idFile] @@ -330,7 +330,7 @@ class AMISRReader(ProcessingUnit): self.amisrFilePointer = amisrFilePointer - print "Setting the file: %s"%self.filename + print("Setting the file: %s"%self.filename) return 1 @@ -341,7 +341,7 @@ class AMISRReader(ProcessingUnit): self.__selectDataForTimes(online=True) filename = self.filenameList[0] while self.__filename_online == filename: - print 'waiting %d seconds to get a new file...'%(self.__waitForNewFile) + print('waiting %d seconds to get a new file...'%(self.__waitForNewFile)) sleep(self.__waitForNewFile) self.__selectDataForTimes(online=True) filename = self.filenameList[0] @@ -351,7 +351,7 @@ class AMISRReader(ProcessingUnit): self.amisrFilePointer = h5py.File(filename,'r') self.flagIsNewFile = 1 self.filename = filename - print "Setting the file: %s"%self.filename + print("Setting the file: %s"%self.filename) return 1 @@ -368,12 +368,12 @@ class AMISRReader(ProcessingUnit): #looking index list for data start_index = self.radacHeaderObj.pulseCount[0,:][0] end_index = self.radacHeaderObj.npulses - range4data = range(start_index, end_index) + range4data = list(range(start_index, end_index)) self.index4_schain_datablock = numpy.array(range4data) buffer_start_index = 0 buffer_end_index = self.radacHeaderObj.pulseCount[0,:][0] - range4buffer = range(buffer_start_index, buffer_end_index) + range4buffer = list(range(buffer_start_index, buffer_end_index)) self.index4_buffer = numpy.array(range4buffer) self.linear_pulseCount = numpy.array(range4data + range4buffer) @@ -403,8 +403,8 @@ class AMISRReader(ProcessingUnit): just4record0 = self.radacHeaderObj.beamCodeByPulse[0,:] - for i in range(len(self.beamCodeDict.values())): - xx = numpy.where(just4record0==self.beamCodeDict.values()[i][0]) + for i in range(len(list(self.beamCodeDict.values()))): + xx = numpy.where(just4record0==list(self.beamCodeDict.values())[i][0]) indexPulseByBeam = self.linear_pulseCount[xx[0]] self.beamRangeDict[i] = indexPulseByBeam @@ -499,7 +499,7 @@ class AMISRReader(ProcessingUnit): self.searchFilesOnLine(path, walk) if not(self.filenameList): - print "There is no files into the folder: %s"%(path) + print("There is no files into the folder: %s"%(path)) sys.exit(-1) @@ -632,8 +632,8 @@ class AMISRReader(ProcessingUnit): return 0 def printUTC(self): - print self.dataOut.utctime - print '' + print(self.dataOut.utctime) + print('') def setObjProperties(self): @@ -661,7 +661,7 @@ class AMISRReader(ProcessingUnit): if self.flagNoMoreFiles: self.dataOut.flagNoData = True - print 'Process finished' + print('Process finished') return 0 if self.__hasNotDataInBuffer(): @@ -689,4 +689,4 @@ class AMISRReader(ProcessingUnit): self.setObjProperties() self.isConfig = True - self.getData() + self.getData() \ No newline at end of file diff --git a/schainpy/model/io/jroIO_base.py b/schainpy/model/io/jroIO_base.py index 790e5ac..9d8012d 100644 --- a/schainpy/model/io/jroIO_base.py +++ b/schainpy/model/io/jroIO_base.py @@ -75,14 +75,14 @@ def isFileInEpoch(filename, startUTSeconds, endUTSeconds): try: fp = open(filename, 'rb') except IOError: - print "The file %s can't be opened" % (filename) + print("The file %s can't be opened" % (filename)) return 0 sts = basicHeaderObj.read(fp) fp.close() if not(sts): - print "Skipping the file %s because it has not a valid header" % (filename) + print("Skipping the file %s because it has not a valid header" % (filename)) return 0 if not ((startUTSeconds <= basicHeaderObj.utc) and (endUTSeconds > basicHeaderObj.utc)): @@ -130,7 +130,7 @@ def isFileInTimeRange(filename, startDate, endDate, startTime, endTime): try: fp = open(filename, 'rb') except IOError: - print "The file %s can't be opened" % (filename) + print("The file %s can't be opened" % (filename)) return None firstBasicHeaderObj = BasicHeader(LOCALTIME) @@ -143,7 +143,7 @@ def isFileInTimeRange(filename, startDate, endDate, startTime, endTime): sts = firstBasicHeaderObj.read(fp) if not(sts): - print "[Reading] Skipping the file %s because it has not a valid header" % (filename) + print("[Reading] Skipping the file %s because it has not a valid header" % (filename)) return None if not systemHeaderObj.read(fp): @@ -160,7 +160,7 @@ def isFileInTimeRange(filename, startDate, endDate, startTime, endTime): offset = processingHeaderObj.blockSize + 24 # header size if filesize <= offset: - print "[Reading] %s: This file has not enough data" % filename + print("[Reading] %s: This file has not enough data" % filename) return None fp.seek(-offset, 2) @@ -231,7 +231,7 @@ def isFolderInDateRange(folder, startDate=None, endDate=None): basename = os.path.basename(folder) if not isRadarFolder(basename): - print "The folder %s has not the rigth format" % folder + print("The folder %s has not the rigth format" % folder) return 0 if startDate and endDate: @@ -274,7 +274,7 @@ def isFileInDateRange(filename, startDate=None, endDate=None): basename = os.path.basename(filename) if not isRadarFile(basename): - print "The filename %s has not the rigth format" % filename + print("The filename %s has not the rigth format" % filename) return 0 if startDate and endDate: @@ -315,8 +315,8 @@ def getFileFromSet(path, ext, set): return myfile[0] else: filename = '*%4.4d%3.3d%3.3d%s' % (year, doy, set, ext.lower()) - print 'the filename %s does not exist' % filename - print '...going to the last file: ' + print('the filename %s does not exist' % filename) + print('...going to the last file: ') if validFilelist: validFilelist = sorted(validFilelist, key=str.lower) @@ -646,9 +646,9 @@ class JRODataReader(JRODataIO): return [], [] if len(dateList) > 1: - print "[Reading] Data found for date range [%s - %s]: total days = %d" % (startDate, endDate, len(dateList)) + print("[Reading] Data found for date range [%s - %s]: total days = %d" % (startDate, endDate, len(dateList))) else: - print "[Reading] Data found for date range [%s - %s]: date = %s" % (startDate, endDate, dateList[0]) + print("[Reading] Data found for date range [%s - %s]: date = %s" % (startDate, endDate, dateList[0])) filenameList = [] datetimeList = [] @@ -679,10 +679,10 @@ class JRODataReader(JRODataIO): datetimeList = datetimeList[cursor * skip:cursor * skip + skip] if not(filenameList): - print "[Reading] Time range selected invalid [%s - %s]: No *%s files in %s)" % (startTime, endTime, ext, path) + print("[Reading] Time range selected invalid [%s - %s]: No *%s files in %s)" % (startTime, endTime, ext, path)) return [], [] - print "[Reading] %d file(s) was(were) found in time range: %s - %s" % (len(filenameList), startTime, endTime) + print("[Reading] %d file(s) was(were) found in time range: %s - %s" % (len(filenameList), startTime, endTime)) # for i in range(len(filenameList)): # print "[Reading] %s -> [%s]" %(filenameList[i], datetimeList[i].ctime()) @@ -743,7 +743,7 @@ class JRODataReader(JRODataIO): doypath.split('_')) > 1 else 0 fullpath = os.path.join(path, doypath, expLabel) - print "[Reading] %s folder was found: " % (fullpath) + print("[Reading] %s folder was found: " % (fullpath)) if set == None: filename = getlastFileFromPath(fullpath, ext) @@ -753,7 +753,7 @@ class JRODataReader(JRODataIO): if not(filename): return None, None, None, None, None, None - print "[Reading] %s file was found" % (filename) + print("[Reading] %s file was found" % (filename)) if not(self.__verifyFile(os.path.join(fullpath, filename))): return None, None, None, None, None, None @@ -844,10 +844,10 @@ class JRODataReader(JRODataIO): for nTries in range(tries): if firstTime_flag: - print "\t[Reading] Waiting %0.2f sec for the next file: \"%s\" , try %03d ..." % (self.delay, filename, nTries + 1) + print("\t[Reading] Waiting %0.2f sec for the next file: \"%s\" , try %03d ..." % (self.delay, filename, nTries + 1)) sleep(self.delay) else: - print "\t[Reading] Searching the next \"%s%04d%03d%03d%s\" file ..." % (self.optchar, self.year, self.doy, self.set, self.ext) + print("\t[Reading] Searching the next \"%s%04d%03d%03d%s\" file ..." % (self.optchar, self.year, self.doy, self.set, self.ext)) fullfilename, filename = checkForRealPath( self.path, self.foldercounter, self.year, self.doy, self.set, self.ext) @@ -902,7 +902,7 @@ class JRODataReader(JRODataIO): return 0 if self.verbose: - print '[Reading] Setting the file: %s' % self.filename + print('[Reading] Setting the file: %s' % self.filename) self.__readFirstHeader() self.nReadBlocks = 0 @@ -941,7 +941,7 @@ class JRODataReader(JRODataIO): # self.flagEoF = True return 0 - print "[Reading] Waiting %0.2f seconds for the next block, try %03d ..." % (self.delay, nTries + 1) + print("[Reading] Waiting %0.2f seconds for the next block, try %03d ..." % (self.delay, nTries + 1)) sleep(self.delay) return 0 @@ -963,7 +963,7 @@ class JRODataReader(JRODataIO): if (currentSize >= neededSize): return 1 - print "[Reading] Waiting %0.2f seconds for the next block, try %03d ..." % (self.delay, nTries + 1) + print("[Reading] Waiting %0.2f seconds for the next block, try %03d ..." % (self.delay, nTries + 1)) sleep(self.delay) return 0 @@ -1052,7 +1052,7 @@ class JRODataReader(JRODataIO): # Skip block out of startTime and endTime while True: if not(self.__setNewBlock()): - raise(schainpy.admin.SchainWarning('No more files')) + raise schainpy return 0 if not(self.readBlock()): @@ -1060,17 +1060,17 @@ class JRODataReader(JRODataIO): self.getBasicHeader() if (self.dataOut.datatime < datetime.datetime.combine(self.startDate, self.startTime)) or (self.dataOut.datatime > datetime.datetime.combine(self.endDate, self.endTime)): - print "[Reading] Block No. %d/%d -> %s [Skipping]" % (self.nReadBlocks, + print("[Reading] Block No. %d/%d -> %s [Skipping]" % (self.nReadBlocks, self.processingHeaderObj.dataBlocksPerFile, - self.dataOut.datatime.ctime()) + self.dataOut.datatime.ctime())) continue break if self.verbose: - print "[Reading] Block No. %d/%d -> %s" % (self.nReadBlocks, + print("[Reading] Block No. %d/%d -> %s" % (self.nReadBlocks, self.processingHeaderObj.dataBlocksPerFile, - self.dataOut.datatime.ctime()) + self.dataOut.datatime.ctime())) return 1 def __readFirstHeader(self): @@ -1097,7 +1097,7 @@ class JRODataReader(JRODataIO): elif datatype == 5: datatype_str = numpy.dtype([('real', ' endFp: sys.stderr.write( @@ -590,7 +590,7 @@ class BLTRSpectraReader (ProcessingUnit, FileHeaderBLTR, RecordHeaderBLTR, JRODa if self.flagNoMoreFiles: self.dataOut.flagNoData = True - print 'NoData se vuelve true' + print('NoData se vuelve true') return 0 self.fp = self.path @@ -600,7 +600,7 @@ class BLTRSpectraReader (ProcessingUnit, FileHeaderBLTR, RecordHeaderBLTR, JRODa self.dataOut.data_cspc = self.data_cspc self.dataOut.data_output = self.data_output - print 'self.dataOut.data_output', shape(self.dataOut.data_output) + print('self.dataOut.data_output', shape(self.dataOut.data_output)) # self.removeDC() return self.dataOut.data_spc @@ -617,7 +617,7 @@ class BLTRSpectraReader (ProcessingUnit, FileHeaderBLTR, RecordHeaderBLTR, JRODa ''' # The address of the folder is generated the name of the .fdt file that will be read - print "File: ", self.fileSelector + 1 + print("File: ", self.fileSelector + 1) if self.fileSelector < len(self.filenameList): @@ -630,7 +630,7 @@ class BLTRSpectraReader (ProcessingUnit, FileHeaderBLTR, RecordHeaderBLTR, JRODa self.readBlock() # Block reading else: - print 'readFile FlagNoData becomes true' + print('readFile FlagNoData becomes true') self.flagNoMoreFiles = True self.dataOut.flagNoData = True return 0 @@ -660,7 +660,7 @@ class BLTRSpectraReader (ProcessingUnit, FileHeaderBLTR, RecordHeaderBLTR, JRODa ''' if self.BlockCounter < self.nFDTdataRecors - 2: - print self.nFDTdataRecors, 'CONDICION!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!' + print(self.nFDTdataRecors, 'CONDICION!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!') if self.ReadMode == 1: rheader = RecordHeaderBLTR(RecCounter=self.BlockCounter + 1) elif self.ReadMode == 0: @@ -687,8 +687,8 @@ class BLTRSpectraReader (ProcessingUnit, FileHeaderBLTR, RecordHeaderBLTR, JRODa self.__firstHeigth = rheader.StartRangeSamp self.__deltaHeigth = rheader.SampResolution self.dataOut.heightList = self.__firstHeigth + \ - numpy.array(range(self.nHeights)) * self.__deltaHeigth - self.dataOut.channelList = range(self.nChannels) + numpy.array(list(range(self.nHeights))) * self.__deltaHeigth + self.dataOut.channelList = list(range(self.nChannels)) self.dataOut.nProfiles = rheader.nProfiles self.dataOut.nIncohInt = rheader.nIncohInt self.dataOut.nCohInt = rheader.nCohInt @@ -703,7 +703,7 @@ class BLTRSpectraReader (ProcessingUnit, FileHeaderBLTR, RecordHeaderBLTR, JRODa self.dataOut.nCohInt * self.dataOut.nIncohInt * self.nProfiles self.data_output = numpy.ones([3, rheader.nHeights]) * numpy.NaN - print 'self.data_output', shape(self.data_output) + print('self.data_output', shape(self.data_output)) self.dataOut.velocityX = [] self.dataOut.velocityY = [] self.dataOut.velocityV = [] @@ -757,11 +757,11 @@ class BLTRSpectraReader (ProcessingUnit, FileHeaderBLTR, RecordHeaderBLTR, JRODa z = self.data_spc.copy() # /factor z = numpy.where(numpy.isfinite(z), z, numpy.NAN) #zdB = 10*numpy.log10(z) - print ' ' - print 'Z: ' - print shape(z) - print ' ' - print ' ' + print(' ') + print('Z: ') + print(shape(z)) + print(' ') + print(' ') self.dataOut.data_spc = self.data_spc @@ -1177,4 +1177,4 @@ class BLTRSpectraReader (ProcessingUnit, FileHeaderBLTR, RecordHeaderBLTR, JRODa else: self.fileSelector += 1 self.BlockCounter = 0 - print "Next File" + print("Next File") \ No newline at end of file diff --git a/schainpy/model/io/jroIO_digitalRF.py b/schainpy/model/io/jroIO_digitalRF.py index d9e120f..26aa60d 100644 --- a/schainpy/model/io/jroIO_digitalRF.py +++ b/schainpy/model/io/jroIO_digitalRF.py @@ -27,11 +27,11 @@ from schainpy.model.data.jrodata import Voltage from schainpy.model.proc.jroproc_base import ProcessingUnit, Operation from time import time -import cPickle +import pickle try: import digital_rf except: - print 'You should install "digital_rf" module if you want to read Digital RF data' + print('You should install "digital_rf" module if you want to read Digital RF data') class DigitalRFReader(ProcessingUnit): @@ -59,7 +59,7 @@ class DigitalRFReader(ProcessingUnit): self.oldAverage = None def close(self): - print 'Average of writing to digital rf format is ', self.oldAverage * 1000 + print('Average of writing to digital rf format is ', self.oldAverage * 1000) return def __getCurrentSecond(self): @@ -115,7 +115,7 @@ class DigitalRFReader(ProcessingUnit): numpy.arange(self.__nSamples, dtype=numpy.float) * \ self.__deltaHeigth - self.dataOut.channelList = range(self.__num_subchannels) + self.dataOut.channelList = list(range(self.__num_subchannels)) self.dataOut.blocksize = self.dataOut.getNChannels() * self.dataOut.getNHeights() @@ -256,7 +256,7 @@ class DigitalRFReader(ProcessingUnit): self.flagDecodeData = flagDecodeData self.i = 0 if not os.path.isdir(path): - raise ValueError, "[Reading] Directory %s does not exist" % path + raise ValueError("[Reading] Directory %s does not exist" % path) try: self.digitalReadObj = digital_rf.DigitalRFReader( @@ -267,10 +267,10 @@ class DigitalRFReader(ProcessingUnit): channelNameList = self.digitalReadObj.get_channels() if not channelNameList: - raise ValueError, "[Reading] Directory %s does not have any files" % path + raise ValueError("[Reading] Directory %s does not have any files" % path) if not channelList: - channelList = range(len(channelNameList)) + channelList = list(range(len(channelNameList))) ########## Reading metadata ###################### @@ -294,7 +294,7 @@ class DigitalRFReader(ProcessingUnit): self.__processingHeader = self.fixed_metadata_dict['processingHeader'] self.__radarControllerHeader = self.fixed_metadata_dict['radarControllerHeader'] self.__systemHeader = self.fixed_metadata_dict['systemHeader'] - self.dtype = cPickle.loads(self.fixed_metadata_dict['dtype']) + self.dtype = pickle.loads(self.fixed_metadata_dict['dtype']) except: pass @@ -361,7 +361,7 @@ class DigitalRFReader(ProcessingUnit): endUTCSecond = end_index / self.__sample_rate if not nSamples: if not ippKm: - raise ValueError, "[Reading] nSamples or ippKm should be defined" + raise ValueError("[Reading] nSamples or ippKm should be defined") nSamples = int(ippKm / (1e6 * 0.15 / self.__sample_rate)) channelBoundList = [] channelNameListFiltered = [] @@ -388,7 +388,7 @@ class DigitalRFReader(ProcessingUnit): self.__channelNameList = channelNameListFiltered self.__channelBoundList = channelBoundList self.__nSamples = nSamples - self.__samples_to_read = long(nSamples) # FIJO: AHORA 40 + self.__samples_to_read = int(nSamples) # FIJO: AHORA 40 self.__nChannels = len(self.__channelList) self.__startUTCSecond = startUTCSecond @@ -402,7 +402,7 @@ class DigitalRFReader(ProcessingUnit): startUTCSecond = numpy.floor(endUTCSecond) # por que en el otro metodo lo primero q se hace es sumar samplestoread - self.__thisUnixSample = long( + self.__thisUnixSample = int( startUTCSecond * self.__sample_rate) - self.__samples_to_read self.__data_buffer = numpy.zeros( @@ -411,17 +411,17 @@ class DigitalRFReader(ProcessingUnit): self.__setFileHeader() self.isConfig = True - print "[Reading] Digital RF Data was found from %s to %s " % ( + print("[Reading] Digital RF Data was found from %s to %s " % ( datetime.datetime.utcfromtimestamp( self.__startUTCSecond - self.__timezone), datetime.datetime.utcfromtimestamp( self.__endUTCSecond - self.__timezone) - ) + )) - print "[Reading] Starting process from %s to %s" % (datetime.datetime.utcfromtimestamp(startUTCSecond - self.__timezone), + print("[Reading] Starting process from %s to %s" % (datetime.datetime.utcfromtimestamp(startUTCSecond - self.__timezone), datetime.datetime.utcfromtimestamp( endUTCSecond - self.__timezone) - ) + )) self.oldAverage = None self.count = 0 self.executionTime = 0 @@ -433,7 +433,7 @@ class DigitalRFReader(ProcessingUnit): # datetime.datetime.utcfromtimestamp(self.__startUTCSecond - self.__timezone), # datetime.datetime.utcfromtimestamp(self.__endUTCSecond - self.__timezone) # ) - print "[Reading] reloading metadata ..." + print("[Reading] reloading metadata ...") try: self.digitalReadObj.reload(complete_update=True) @@ -448,13 +448,13 @@ class DigitalRFReader(ProcessingUnit): if end_index > self.__endUTCSecond * self.__sample_rate: self.__endUTCSecond = 1.0 * end_index / self.__sample_rate - print - print "[Reading] New timerange found [%s, %s] " % ( + print() + print("[Reading] New timerange found [%s, %s] " % ( datetime.datetime.utcfromtimestamp( self.__startUTCSecond - self.__timezone), datetime.datetime.utcfromtimestamp( self.__endUTCSecond - self.__timezone) - ) + )) return True @@ -480,7 +480,7 @@ class DigitalRFReader(ProcessingUnit): self.__thisUnixSample += self.__samples_to_read if self.__thisUnixSample + 2 * self.__samples_to_read > self.__endUTCSecond * self.__sample_rate: - print "[Reading] There are no more data into selected time-range" + print("[Reading] There are no more data into selected time-range") if self.__online: self.__reload() else: @@ -507,17 +507,17 @@ class DigitalRFReader(ProcessingUnit): self.executionTime + self.count * self.oldAverage) / (self.count + 1.0) self.count = self.count + 1.0 - except IOError, e: + except IOError as e: # read next profile self.__flagDiscontinuousBlock = True - print "[Reading] %s" % datetime.datetime.utcfromtimestamp(self.thisSecond - self.__timezone), e + print("[Reading] %s" % datetime.datetime.utcfromtimestamp(self.thisSecond - self.__timezone), e) break if result.shape[0] != self.__samples_to_read: self.__flagDiscontinuousBlock = True - print "[Reading] %s: Too few samples were found, just %d/%d samples" % (datetime.datetime.utcfromtimestamp(self.thisSecond - self.__timezone), + print("[Reading] %s: Too few samples were found, just %d/%d samples" % (datetime.datetime.utcfromtimestamp(self.thisSecond - self.__timezone), result.shape[0], - self.__samples_to_read) + self.__samples_to_read)) break self.__data_buffer[indexSubchannel, :] = result * volt_scale @@ -531,9 +531,9 @@ class DigitalRFReader(ProcessingUnit): if not dataOk: return False - print "[Reading] %s: %d samples <> %f sec" % (datetime.datetime.utcfromtimestamp(self.thisSecond - self.__timezone), + print("[Reading] %s: %d samples <> %f sec" % (datetime.datetime.utcfromtimestamp(self.thisSecond - self.__timezone), self.__samples_to_read, - self.__timeInterval) + self.__timeInterval)) self.__bufferIndex = 0 @@ -572,7 +572,7 @@ class DigitalRFReader(ProcessingUnit): return False if self.__flagDiscontinuousBlock: - print '[Reading] discontinuous block found ... continue with the next block' + print('[Reading] discontinuous block found ... continue with the next block') continue if not self.__online: @@ -582,7 +582,7 @@ class DigitalRFReader(ProcessingUnit): if err_counter > nTries: return False - print '[Reading] waiting %d seconds to read a new block' % seconds + print('[Reading] waiting %d seconds to read a new block' % seconds) sleep(seconds) self.dataOut.data = self.__data_buffer[:, @@ -650,7 +650,7 @@ class DigitalRFWriter(Operation): self.metadata_dict['frequency'] = self.dataOut.frequency self.metadata_dict['timezone'] = self.dataOut.timeZone - self.metadata_dict['dtype'] = cPickle.dumps(self.dataOut.dtype) + self.metadata_dict['dtype'] = pickle.dumps(self.dataOut.dtype) self.metadata_dict['nProfiles'] = self.dataOut.nProfiles self.metadata_dict['heightList'] = self.dataOut.heightList self.metadata_dict['channelList'] = self.dataOut.channelList @@ -690,8 +690,8 @@ class DigitalRFWriter(Operation): file_cadence_millisecs = 1000 sample_rate_fraction = Fraction(self.__sample_rate).limit_denominator() - sample_rate_numerator = long(sample_rate_fraction.numerator) - sample_rate_denominator = long(sample_rate_fraction.denominator) + sample_rate_numerator = int(sample_rate_fraction.numerator) + sample_rate_denominator = int(sample_rate_fraction.denominator) start_global_index = dataOut.utctime * self.__sample_rate uuid = 'prueba' @@ -781,8 +781,8 @@ class DigitalRFWriter(Operation): ## if self.currentSample == self.__nProfiles: self.currentSample = 0 def close(self): - print '[Writing] - Closing files ' - print 'Average of writing to digital rf format is ', self.oldAverage * 1000 + print('[Writing] - Closing files ') + print('Average of writing to digital rf format is ', self.oldAverage * 1000) try: self.digitalWriteObj.close() except: @@ -797,4 +797,4 @@ if __name__ == '__main__': while True: readObj.run(path='/home/jchavez/jicamarca/mocked_data/') # readObj.printInfo() - # readObj.printNumberOfBlock() + # readObj.printNumberOfBlock() \ No newline at end of file diff --git a/schainpy/model/io/jroIO_heispectra.py b/schainpy/model/io/jroIO_heispectra.py index ddbffab..bbf8748 100644 --- a/schainpy/model/io/jroIO_heispectra.py +++ b/schainpy/model/io/jroIO_heispectra.py @@ -13,12 +13,12 @@ from time import sleep try: import pyfits -except ImportError, e: - print "Fits data cannot be used. Install pyfits module" +except ImportError as e: + print("Fits data cannot be used. Install pyfits module") from xml.etree.ElementTree import ElementTree -from jroIO_base import isRadarFolder, isNumber +from .jroIO_base import isRadarFolder, isNumber from schainpy.model.data.jrodata import Fits from schainpy.model.proc.jroproc_base import Operation, ProcessingUnit @@ -240,7 +240,7 @@ class FitsWriter(Operation): self.setFile = setFile self.flagIsNewFile = 1 - print 'Writing the file: %s'%self.filename + print('Writing the file: %s'%self.filename) self.setFitsHeader(self.dataOut, self.metadatafile) @@ -327,7 +327,7 @@ class FitsReader(ProcessingUnit): try: fitsObj = pyfits.open(filename,'readonly') except: - print "File %s can't be opened" %(filename) + print("File %s can't be opened" %(filename)) return None header = fitsObj[0].header @@ -355,7 +355,7 @@ class FitsReader(ProcessingUnit): idFile += 1 if not(idFile < len(self.filenameList)): self.flagNoMoreFiles = 1 - print "No more Files" + print("No more Files") return 0 filename = self.filenameList[idFile] @@ -373,7 +373,7 @@ class FitsReader(ProcessingUnit): self.fileSize = fileSize self.fitsObj = fitsObj self.blockIndex = 0 - print "Setting the file: %s"%self.filename + print("Setting the file: %s"%self.filename) return 1 @@ -398,10 +398,10 @@ class FitsReader(ProcessingUnit): headerObj = self.fitsObj[0] self.header_dict = headerObj.header - if 'EXPNAME' in headerObj.header.keys(): + if 'EXPNAME' in list(headerObj.header.keys()): self.expName = headerObj.header['EXPNAME'] - if 'DATATYPE' in headerObj.header.keys(): + if 'DATATYPE' in list(headerObj.header.keys()): self.dataType = headerObj.header['DATATYPE'] self.datetimestr = headerObj.header['DATETIME'] @@ -421,7 +421,7 @@ class FitsReader(ProcessingUnit): # self.timeInterval = self.ippSeconds * self.nCohInt * self.nIncohInt - if 'COMMENT' in headerObj.header.keys(): + if 'COMMENT' in list(headerObj.header.keys()): self.comments = headerObj.header['COMMENT'] self.readHeightList() @@ -498,10 +498,10 @@ class FitsReader(ProcessingUnit): thisDate += datetime.timedelta(1) if pathList == []: - print "Any folder was found for the date range: %s-%s" %(startDate, endDate) + print("Any folder was found for the date range: %s-%s" %(startDate, endDate)) return None, None - print "%d folder(s) was(were) found for the date range: %s - %s" %(len(pathList), startDate, endDate) + print("%d folder(s) was(were) found for the date range: %s - %s" %(len(pathList), startDate, endDate)) filenameList = [] datetimeList = [] @@ -525,14 +525,14 @@ class FitsReader(ProcessingUnit): datetimeList.append(thisDatetime) if not(filenameList): - print "Any file was found for the time range %s - %s" %(startTime, endTime) + print("Any file was found for the time range %s - %s" %(startTime, endTime)) return None, None - print "%d file(s) was(were) found for the time range: %s - %s" %(len(filenameList), startTime, endTime) - print + print("%d file(s) was(were) found for the time range: %s - %s" %(len(filenameList), startTime, endTime)) + print() for i in range(len(filenameList)): - print "%s -> [%s]" %(filenameList[i], datetimeList[i].ctime()) + print("%s -> [%s]" %(filenameList[i], datetimeList[i].ctime())) self.filenameList = filenameList self.datetimeList = datetimeList @@ -552,22 +552,22 @@ class FitsReader(ProcessingUnit): walk = True): if path == None: - raise ValueError, "The path is not valid" + raise ValueError("The path is not valid") if ext == None: ext = self.ext if not(online): - print "Searching files in offline mode ..." + print("Searching files in offline mode ...") pathList, filenameList = self.searchFilesOffLine(path, startDate=startDate, endDate=endDate, startTime=startTime, endTime=endTime, set=set, expLabel=expLabel, ext=ext, walk=walk) if not(pathList): - print "No *%s files into the folder %s \nfor the range: %s - %s"%(ext, path, + print("No *%s files into the folder %s \nfor the range: %s - %s"%(ext, path, datetime.datetime.combine(startDate,startTime).ctime(), - datetime.datetime.combine(endDate,endTime).ctime()) + datetime.datetime.combine(endDate,endTime).ctime())) sys.exit(-1) @@ -582,11 +582,11 @@ class FitsReader(ProcessingUnit): if not(self.setNextFile()): if (startDate!=None) and (endDate!=None): - print "No files in range: %s - %s" %(datetime.datetime.combine(startDate,startTime).ctime(), datetime.datetime.combine(endDate,endTime).ctime()) + print("No files in range: %s - %s" %(datetime.datetime.combine(startDate,startTime).ctime(), datetime.datetime.combine(endDate,endTime).ctime())) elif startDate != None: - print "No files in range: %s" %(datetime.datetime.combine(startDate,startTime).ctime()) + print("No files in range: %s" %(datetime.datetime.combine(startDate,startTime).ctime())) else: - print "No files" + print("No files") sys.exit(-1) @@ -638,7 +638,7 @@ class FitsReader(ProcessingUnit): self.__rdBasicHeader() return 1 - print "\tWaiting %0.2f seconds for the next block, try %03d ..." % (self.delay, nTries+1) + print("\tWaiting %0.2f seconds for the next block, try %03d ..." % (self.delay, nTries+1)) sleep( self.delay ) @@ -691,7 +691,7 @@ class FitsReader(ProcessingUnit): if self.flagNoMoreFiles: self.dataOut.flagNoData = True - print 'Process finished' + print('Process finished') return 0 self.flagDiscontinuousBlock = 0 @@ -845,4 +845,4 @@ class SpectraHeisWriter(Operation): self.setup(dataOut, **kwargs) self.isConfig = True - self.putData() + self.putData() \ No newline at end of file diff --git a/schainpy/model/io/jroIO_hf.py b/schainpy/model/io/jroIO_hf.py index 5ba1787..e262a44 100644 --- a/schainpy/model/io/jroIO_hf.py +++ b/schainpy/model/io/jroIO_hf.py @@ -68,10 +68,10 @@ def getFileFromSet(path, ext, set=None): if set == None: return validFilelist[-1] - print "set =" ,set + print("set =" ,set) for thisFile in validFilelist: if set <= int(thisFile[6:16]): - print thisFile,int(thisFile[6:16]) + print(thisFile,int(thisFile[6:16])) return thisFile return validFilelist[-1] @@ -83,8 +83,8 @@ def getFileFromSet(path, ext, set=None): return myfile[0] else: filename = '*%10.10d%s'%(set,ext.lower()) - print 'the filename %s does not exist'%filename - print '...going to the last file: ' + print('the filename %s does not exist'%filename) + print('...going to the last file: ') if validFilelist: validFilelist = sorted( validFilelist, key=str.lower ) @@ -115,7 +115,7 @@ Depura el fileList dejando solo los que cumplan el formato de "res-xxxxxx.ext" try: number= int(thisFile[6:16]) except: - print "There is a file or folder with different format" + print("There is a file or folder with different format") if not isNumber(number): continue @@ -256,7 +256,7 @@ class HFReader(ProcessingUnit): self.status=1 else: self.status=0 - print 'Path %s does not exits'%self.path + print('Path %s does not exits'%self.path) return return @@ -282,12 +282,12 @@ class HFReader(ProcessingUnit): pat = '\d+.\d+' dirnameList = [re.search(pat,x) for x in os.listdir(self.path)] - dirnameList = filter(lambda x:x!=None,dirnameList) + dirnameList = [x for x in dirnameList if x!=None] dirnameList = [x.string for x in dirnameList] if not(online): dirnameList = [self.__selDates(x) for x in dirnameList] - dirnameList = filter(lambda x:x!=None,dirnameList) + dirnameList = [x for x in dirnameList if x!=None] if len(dirnameList)>0: self.status = 1 @@ -301,8 +301,8 @@ class HFReader(ProcessingUnit): def __getTimeFromData(self): startDateTime_Reader = datetime.datetime.combine(self.startDate,self.startTime) endDateTime_Reader = datetime.datetime.combine(self.endDate,self.endTime) - print 'Filtering Files from %s to %s'%(startDateTime_Reader, endDateTime_Reader) - print '........................................' + print('Filtering Files from %s to %s'%(startDateTime_Reader, endDateTime_Reader)) + print('........................................') filter_filenameList=[] self.filenameList.sort() for i in range(len(self.filenameList)-1): @@ -363,24 +363,24 @@ class HFReader(ProcessingUnit): self.flag_nextfile=False else: - print filename - print "PRIMERA CONDICION" + print(filename) + print("PRIMERA CONDICION") #if self.filename_next_set== int(filename[6:16]): - print "TODO BIEN" + print("TODO BIEN") if filename == None: - raise ValueError, "corregir" + raise ValueError("corregir") self.dirnameList=[filename] fullfilename=self.path+"/"+filename self.filenameList=[fullfilename] self.filename_next_set=int(filename[6:16])+10 - print "Setting next file",self.filename_next_set + print("Setting next file",self.filename_next_set) self.set=int(filename[6:16]) if True: pass else: - print "ESTOY AQUI PORQUE NO EXISTE EL SIGUIENTE ARCHIVO" + print("ESTOY AQUI PORQUE NO EXISTE EL SIGUIENTE ARCHIVO") else: filename =getlastFileFromPath(self.path,self.ext) @@ -394,24 +394,24 @@ class HFReader(ProcessingUnit): self.flag_nextfile=False else: filename=getFileFromSet(self.path,self.ext,self.set) - print filename - print "PRIMERA CONDICION" + print(filename) + print("PRIMERA CONDICION") #if self.filename_next_set== int(filename[6:16]): - print "TODO BIEN" + print("TODO BIEN") if filename == None: - raise ValueError, "corregir" + raise ValueError("corregir") self.dirnameList=[filename] fullfilename=self.path+"/"+filename self.filenameList=[fullfilename] self.filename_next_set=int(filename[6:16])+10 - print "Setting next file",self.filename_next_set + print("Setting next file",self.filename_next_set) self.set=int(filename[6:16]) if True: pass else: - print "ESTOY AQUI PORQUE NO EXISTE EL SIGUIENTE ARCHIVO" + print("ESTOY AQUI PORQUE NO EXISTE EL SIGUIENTE ARCHIVO") @@ -434,7 +434,7 @@ class HFReader(ProcessingUnit): self.__selectDataForTimes() for i in range(len(self.filenameList)): - print "%s"% (self.filenameList[i]) + print("%s"% (self.filenameList[i])) return @@ -456,7 +456,7 @@ class HFReader(ProcessingUnit): self.__checkPath() fullpath=path - print "%s folder was found: " %(fullpath ) + print("%s folder was found: " %(fullpath )) if set == None: self.set=None @@ -518,7 +518,7 @@ class HFReader(ProcessingUnit): idFile += 1 if not (idFile < len(self.filenameList)): self.flagNoMoreFiles = 1 - print "No more Files" + print("No more Files") return 0 filename = self.filenameList[idFile] hfFilePointer =h5py.File(filename,'r') @@ -534,14 +534,14 @@ class HFReader(ProcessingUnit): self.hfFilePointer = hfFilePointer hfFilePointer.close() self.__t0=epoc - print "Setting the file: %s"%self.filename + print("Setting the file: %s"%self.filename) return 1 def __setNextFileOnline(self): """ """ - print "SOY NONE",self.set + print("SOY NONE",self.set) if self.set==None: pass else: @@ -552,7 +552,7 @@ class HFReader(ProcessingUnit): self.__selectDataForTimes(online=True) filename = self.filenameList[0] while self.filename_online == filename: - print 'waiting %d seconds to get a new file...'%(self.__waitForNewFile) + print('waiting %d seconds to get a new file...'%(self.__waitForNewFile)) time.sleep(self.__waitForNewFile) #self.__findDataForDates(online=True) self.set=self.filename_next_set @@ -563,27 +563,27 @@ class HFReader(ProcessingUnit): #print filename sizeoffile=os.path.getsize(filename) if sizeoffile<1670240: - print "%s is not the rigth size"%filename + print("%s is not the rigth size"%filename) delay=50 - print 'waiting %d seconds for delay...'%(delay) + print('waiting %d seconds for delay...'%(delay)) time.sleep(delay) sizeoffile=os.path.getsize(filename) if sizeoffile<1670240: delay=50 - print 'waiting %d more seconds for delay...'%(delay) + print('waiting %d more seconds for delay...'%(delay)) time.sleep(delay) sizeoffile=os.path.getsize(filename) if sizeoffile<1670240: delay=50 - print 'waiting %d more seconds for delay...'%(delay) + print('waiting %d more seconds for delay...'%(delay)) time.sleep(delay) try: hfFilePointer=h5py.File(filename,'r') except: - print "Error reading file %s"%filename + print("Error reading file %s"%filename) self.filename_online=filename epoc=hfFilePointer['t'].value @@ -596,7 +596,7 @@ class HFReader(ProcessingUnit): self.flagIsNewFile = 1 self.filename = filename - print "Setting the file: %s"%self.filename + print("Setting the file: %s"%self.filename) return 1 def __getExpParameters(self): @@ -622,7 +622,7 @@ class HFReader(ProcessingUnit): ''' if path==None: - raise ValueError,"The path is not valid" + raise ValueError("The path is not valid") if ext==None: ext = self.ext @@ -634,11 +634,11 @@ class HFReader(ProcessingUnit): #print set if not(online): - print "Searching files in offline mode..." + print("Searching files in offline mode...") self.searchFilesOffLine(path, startDate, endDate, ext, startTime, endTime, walk) else: - print "Searching files in online mode..." + print("Searching files in online mode...") self.searchFilesOnLine(path, walk,ext,set=set) if set==None: pass @@ -659,7 +659,7 @@ class HFReader(ProcessingUnit): if not(self.filenameList): - print "There is no files into the folder: %s"%(path) + print("There is no files into the folder: %s"%(path)) sys.exit(-1) self.__getExpParameters() @@ -745,7 +745,7 @@ class HFReader(ProcessingUnit): self.dataOut.heightList = self.__firstHeigth + numpy.arange(self.__nSamples, dtype = numpy.float)*self.__deltaHeigth - self.dataOut.channelList = range(self.nChannels) + self.dataOut.channelList = list(range(self.nChannels)) #self.dataOut.channelIndexList = None @@ -833,7 +833,7 @@ class HFReader(ProcessingUnit): def getData(self): if self.flagNoMoreFiles: self.dataOut.flagNoData = True - print 'Process finished' + print('Process finished') return 0 if self.__hasNotDataInBuffer(): @@ -860,4 +860,4 @@ class HFReader(ProcessingUnit): if not self.isConfig: self.setup(**kwargs) self.isConfig = True - self.getData() + self.getData() \ No newline at end of file diff --git a/schainpy/model/io/jroIO_kamisr.py b/schainpy/model/io/jroIO_kamisr.py index 7e4d14a..bd27fb6 100644 --- a/schainpy/model/io/jroIO_kamisr.py +++ b/schainpy/model/io/jroIO_kamisr.py @@ -111,7 +111,7 @@ class AMISRReader(ProcessingUnit): self.searchFilesOnLine(path, startDate, endDate, startTime,endTime,walk) if not(self.filenameList): - print "There is no files into the folder: %s"%(path) + print("There is no files into the folder: %s"%(path)) sys.exit(-1) @@ -177,7 +177,7 @@ class AMISRReader(ProcessingUnit): #filling system header parameters self.__nSamples = self.nsa self.newProfiles = self.nprofiles/self.nchannels - self.__channelList = range(self.nchannels) + self.__channelList = list(range(self.nchannels)) self.__frequency = self.frequency[0][0] @@ -200,7 +200,7 @@ class AMISRReader(ProcessingUnit): self.status = 1 else: self.status = 0 - print 'Path:%s does not exists'%self.path + print('Path:%s does not exists'%self.path) return @@ -225,11 +225,11 @@ class AMISRReader(ProcessingUnit): pat = '\d+.\d+' dirnameList = [re.search(pat,x) for x in os.listdir(self.path)] - dirnameList = filter(lambda x:x!=None,dirnameList) + dirnameList = [x for x in dirnameList if x!=None] dirnameList = [x.string for x in dirnameList] if not(online): dirnameList = [self.__selDates(x) for x in dirnameList] - dirnameList = filter(lambda x:x!=None,dirnameList) + dirnameList = [x for x in dirnameList if x!=None] if len(dirnameList)>0: self.status = 1 self.dirnameList = dirnameList @@ -242,8 +242,8 @@ class AMISRReader(ProcessingUnit): startDateTime_Reader = datetime.datetime.combine(self.startDate,self.startTime) endDateTime_Reader = datetime.datetime.combine(self.endDate,self.endTime) - print 'Filtering Files from %s to %s'%(startDateTime_Reader, endDateTime_Reader) - print '........................................' + print('Filtering Files from %s to %s'%(startDateTime_Reader, endDateTime_Reader)) + print('........................................') filter_filenameList = [] self.filenameList.sort() #for i in range(len(self.filenameList)-1): @@ -288,7 +288,7 @@ class AMISRReader(ProcessingUnit): def __getFilenameList(self, fileListInKeys, dirList): for value in fileListInKeys: - dirName = value.keys()[0] + dirName = list(value.keys())[0] for file in value[dirName]: filename = os.path.join(dirName, file) self.filenameList.append(filename) @@ -366,7 +366,7 @@ class AMISRReader(ProcessingUnit): self.__selectDataForTimes() for i in range(len(self.filenameList)): - print "%s" %(self.filenameList[i]) + print("%s" %(self.filenameList[i])) return @@ -377,7 +377,7 @@ class AMISRReader(ProcessingUnit): idFile += 1 if not(idFile < len(self.filenameList)): self.flagNoMoreFiles = 1 - print "No more Files" + print("No more Files") return 0 filename = self.filenameList[idFile] @@ -392,7 +392,7 @@ class AMISRReader(ProcessingUnit): self.amisrFilePointer = amisrFilePointer - print "Setting the file: %s"%self.filename + print("Setting the file: %s"%self.filename) return 1 @@ -404,7 +404,7 @@ class AMISRReader(ProcessingUnit): filename = self.filenameList[0] wait = 0 while self.__filename_online == filename: - print 'waiting %d seconds to get a new file...'%(self.__waitForNewFile) + print('waiting %d seconds to get a new file...'%(self.__waitForNewFile)) if wait == 5: return 0 sleep(self.__waitForNewFile) @@ -417,7 +417,7 @@ class AMISRReader(ProcessingUnit): self.amisrFilePointer = h5py.File(filename,'r') self.flagIsNewFile = 1 self.filename = filename - print "Setting the file: %s"%self.filename + print("Setting the file: %s"%self.filename) return 1 @@ -585,7 +585,7 @@ class AMISRReader(ProcessingUnit): if self.flagNoMoreFiles: self.dataOut.flagNoData = True - print 'Process finished' + print('Process finished') return 0 if self.__hasNotDataInBuffer(): diff --git a/schainpy/model/io/jroIO_madrigal.py b/schainpy/model/io/jroIO_madrigal.py index eb764d0..8a14ba3 100644 --- a/schainpy/model/io/jroIO_madrigal.py +++ b/schainpy/model/io/jroIO_madrigal.py @@ -63,10 +63,10 @@ def load_json(obj): iterable = obj if isinstance(iterable, dict): - return {str(k): load_json(v) if isinstance(v, dict) else str(v) if isinstance(v, unicode) else v - for k, v in iterable.items()} + return {str(k): load_json(v) if isinstance(v, dict) else str(v) if isinstance(v, str) else v + for k, v in list(iterable.items())} elif isinstance(iterable, (list, tuple)): - return [str(v) if isinstance(v, unicode) else v for v in iterable] + return [str(v) if isinstance(v, str) else v for v in iterable] return iterable @@ -107,10 +107,10 @@ class MADReader(JRODataReader, ProcessingUnit): self.ind2DList = load_json(kwargs.get('ind2DList', "[\"GDALT\"]")) if self.path is None: - raise ValueError, 'The path is not valid' + raise ValueError('The path is not valid') if format is None: - raise ValueError, 'The format is not valid choose simple or hdf5' + raise ValueError('The format is not valid choose simple or hdf5') elif format.lower() in ('simple', 'txt'): self.ext = '.txt' elif format.lower() in ('cedar',): @@ -122,7 +122,7 @@ class MADReader(JRODataReader, ProcessingUnit): self.fileId = 0 if not self.fileList: - raise Warning, 'There is no files matching these date in the folder: {}. \n Check startDate and endDate'.format(path) + raise Warning('There is no files matching these date in the folder: {}. \n Check startDate and endDate'.format(path)) self.setNextFile() @@ -198,7 +198,7 @@ class MADReader(JRODataReader, ProcessingUnit): log.success('Spatial parameters: {}'.format(','.join(s_parameters)), 'MADReader') - for param in self.oneDDict.keys(): + for param in list(self.oneDDict.keys()): if param.lower() not in self.parameters: log.warning( 'Parameter {} not found will be ignored'.format( @@ -206,7 +206,7 @@ class MADReader(JRODataReader, ProcessingUnit): 'MADReader') self.oneDDict.pop(param, None) - for param, value in self.twoDDict.items(): + for param, value in list(self.twoDDict.items()): if param.lower() not in self.parameters: log.warning( 'Parameter {} not found, it will be ignored'.format( @@ -352,11 +352,11 @@ class MADReader(JRODataReader, ProcessingUnit): parameters = [None for __ in self.parameters] - for param, attr in self.oneDDict.items(): + for param, attr in list(self.oneDDict.items()): x = self.parameters.index(param.lower()) setattr(self.dataOut, attr, self.buffer[0][x]) - for param, value in self.twoDDict.items(): + for param, value in list(self.twoDDict.items()): x = self.parameters.index(param.lower()) if self.ext == '.txt': y = self.parameters.index(self.ind2DList[0].lower()) @@ -376,7 +376,7 @@ class MADReader(JRODataReader, ProcessingUnit): self.output[value[0]][value[1]] = dummy parameters[value[1]] = param - for key, value in self.output.items(): + for key, value in list(self.output.items()): setattr(self.dataOut, key, numpy.array(value)) self.dataOut.parameters = [s for s in parameters if s] @@ -508,7 +508,7 @@ class MADWriter(Operation): 'Creating file: {}'.format(self.fullname), 'MADWriter') self.fp = madrigal.cedar.MadrigalCedarFile(self.fullname, True) - except ValueError, e: + except ValueError as e: log.error( 'Impossible to create a cedar object with "madrigal.cedar.MadrigalCedarFile"', 'MADWriter') @@ -528,7 +528,7 @@ class MADWriter(Operation): heights = self.dataOut.heightList if self.ext == '.dat': - for key, value in self.twoDDict.items(): + for key, value in list(self.twoDDict.items()): if isinstance(value, str): data = getattr(self.dataOut, value) invalid = numpy.isnan(data) @@ -540,7 +540,7 @@ class MADWriter(Operation): data[invalid] = self.missing out = {} - for key, value in self.twoDDict.items(): + for key, value in list(self.twoDDict.items()): key = key.lower() if isinstance(value, str): if 'db' in value.lower(): @@ -576,8 +576,8 @@ class MADWriter(Operation): endTime.minute, endTime.second, endTime.microsecond/10000, - self.oneDDict.keys(), - self.twoDDict.keys(), + list(self.oneDDict.keys()), + list(self.twoDDict.keys()), len(index), **self.extra_args ) @@ -639,4 +639,4 @@ class MADWriter(Operation): def close(self): if self.counter > 0: - self.setHeader() + self.setHeader() \ No newline at end of file diff --git a/schainpy/model/io/jroIO_matlab.py b/schainpy/model/io/jroIO_matlab.py index 4e7512c..175ba9a 100644 --- a/schainpy/model/io/jroIO_matlab.py +++ b/schainpy/model/io/jroIO_matlab.py @@ -29,7 +29,7 @@ class matoffReader(ProcessingUnit): def __setHeader(self, datastuff): self.dataOut.pairsList=[(0,1)] - self.dataOut.channelList = range(np.array(datastuff.get('power')).shape[1]) + self.dataOut.channelList = list(range(np.array(datastuff.get('power')).shape[1])) self.dataOut.nProfiles = len(np.array(datastuff.get('vel')).flatten()) #this! self.dataOut.nIncohInt = 20 self.dataOut.nCohInt = 1 #this! @@ -39,7 +39,7 @@ class matoffReader(ProcessingUnit): self.dataOut.heightList = np.array(datastuff.get('hts')).flatten() def __readFile(self, currentfile): - print "Reading from this file:" + currentfile + print("Reading from this file:" + currentfile) #filesplit=currentfile.split("\\") filesplit=currentfile.split("/") @@ -64,7 +64,7 @@ class matoffReader(ProcessingUnit): # self.utcmatcounter=0 # print self.utcmatcounter - print self.utcfirst + print(self.utcfirst) try: datastuff=sio.loadmat(currentfile) except: @@ -115,7 +115,7 @@ class matoffReader(ProcessingUnit): utclist=[] if not dirList: - print "No directories found" + print("No directories found") return [] #if self.online: @@ -146,7 +146,7 @@ class matoffReader(ProcessingUnit): utclist.append(utctime) if not dirListFiltered: - print "filtro" + print("filtro") return [] for thisDir in dirListFiltered: @@ -188,7 +188,7 @@ class matoffReader(ProcessingUnit): if nTries > 3: break - print "Waiting %d seconds ..." %seconds + print("Waiting %d seconds ..." %seconds) time.sleep(40) if not (len(filelist) > ncurrentfiles): @@ -227,7 +227,7 @@ class matoffReader(ProcessingUnit): self.fileList = fileList - print "fin setup" + print("fin setup") def run(self,path=None,startDate=None, endDate=None, startTime=datetime.time(0,0,0), @@ -251,7 +251,7 @@ class matoffReader(ProcessingUnit): if not self.fileList: self.dataOut.flagNoData = True - print "lista vacia" + print("lista vacia") return currentfile = self.__getNextFile() diff --git a/schainpy/model/io/jroIO_mira35c.py b/schainpy/model/io/jroIO_mira35c.py index 5ecc67a..19acf20 100644 --- a/schainpy/model/io/jroIO_mira35c.py +++ b/schainpy/model/io/jroIO_mira35c.py @@ -48,7 +48,7 @@ class Header(object): message += self.__class__.__name__.upper() + "\n" message += "#" * 50 + "\n" - keyList = self.__dict__.keys() + keyList = list(self.__dict__.keys()) keyList.sort() for key in keyList: @@ -333,7 +333,7 @@ class SRVIHeader(Header): self.DataBlockTitleSRVI1 = str(header['DataBlockTitleSRVI1'][0]) self.SizeOfSRVI1 = header['SizeOfSRVI1'][0] # 16 - print 'Pointer fp SRVIheader', fp.tell() + print('Pointer fp SRVIheader', fp.tell()) SRVI_STRUCTURE = numpy.dtype([ @@ -435,9 +435,9 @@ class RecordHeader(Header): # print 'Datasize',self.Datasize #endFp = self.OffsetStartHeader + self.RecCounter*self.Off2StartNxtRec - print '==============================================' + print('==============================================') - print '==============================================' + print('==============================================') return 1 @@ -572,7 +572,7 @@ class MIRA35CReader (ProcessingUnit, FileHeaderMIRA35c, SRVIHeader, RecordHeader if self.flagNoMoreFiles: self.dataOut.flagNoData = True - print 'NoData se vuelve true' + print('NoData se vuelve true') return 0 self.fp = self.path @@ -602,7 +602,7 @@ class MIRA35CReader (ProcessingUnit, FileHeaderMIRA35c, SRVIHeader, RecordHeader ''' # The address of the folder is generated the name of the .fdt file that will be read - print "File: ", self.fileSelector + 1 + print("File: ", self.fileSelector + 1) if self.fileSelector < len(self.filenameList): @@ -642,7 +642,7 @@ class MIRA35CReader (ProcessingUnit, FileHeaderMIRA35c, SRVIHeader, RecordHeader self.readBlock() # Block reading else: - print 'readFile FlagNoData becomes true' + print('readFile FlagNoData becomes true') self.flagNoMoreFiles = True self.dataOut.flagNoData = True self.FileHeaderFlag == True @@ -673,7 +673,7 @@ class MIRA35CReader (ProcessingUnit, FileHeaderMIRA35c, SRVIHeader, RecordHeader self.blocksize = self.srviHeader.SizeOfDataBlock1 # Se obtiene el tamao del bloque if self.blocksize == 148: - print 'blocksize == 148 bug' + print('blocksize == 148 bug') jump = numpy.fromfile(self.fp, [('jump', numpy.str_, 140)], 1) # Se obtiene la cabecera del SRVI @@ -691,7 +691,7 @@ class MIRA35CReader (ProcessingUnit, FileHeaderMIRA35c, SRVIHeader, RecordHeader npw1 = self.recordheader.npw1 npw2 = self.recordheader.npw2 - self.dataOut.channelList = range(1) + self.dataOut.channelList = list(range(1)) self.dataOut.nIncohInt = self.Num_inCoh self.dataOut.nProfiles = self.Num_Bins self.dataOut.nCohInt = 1 @@ -701,7 +701,7 @@ class MIRA35CReader (ProcessingUnit, FileHeaderMIRA35c, SRVIHeader, RecordHeader self.dataOut.outputInterval = self.dataOut.getTimeInterval() self.dataOut.heightList = self.SPARrawGate1 * self.__deltaHeigth + \ - numpy.array(range(self.Num_Hei)) * self.__deltaHeigth + numpy.array(list(range(self.Num_Hei))) * self.__deltaHeigth self.HSDVsign = numpy.fromfile(self.fp, [('HSDV', numpy.str_, 4)], 1) self.SizeHSDV = numpy.fromfile(self.fp, [('SizeHSDV', ' 1: - print "[Reading] %d days were found in date range: %s - %s" %(len(dateList), startDate, endDate) + print("[Reading] %d days were found in date range: %s - %s" %(len(dateList), startDate, endDate)) else: - print "[Reading] data was found for the date %s" %(dateList[0]) + print("[Reading] data was found for the date %s" %(dateList[0])) filenameList = [] datetimeList = [] @@ -172,11 +172,11 @@ class ParamReader(ProcessingUnit): datetimeList.append(thisDatetime) if not(filenameList): - print "[Reading] Any file was found int time range %s - %s" %(datetime.datetime.combine(startDate,startTime).ctime(), datetime.datetime.combine(endDate,endTime).ctime()) + print("[Reading] Any file was found int time range %s - %s" %(datetime.datetime.combine(startDate,startTime).ctime(), datetime.datetime.combine(endDate,endTime).ctime())) return None, None - print "[Reading] %d file(s) was(were) found in time range: %s - %s" %(len(filenameList), startTime, endTime) - print + print("[Reading] %d file(s) was(were) found in time range: %s - %s" %(len(filenameList), startTime, endTime)) + print() # for i in range(len(filenameList)): # print "[Reading] %s -> [%s]" %(filenameList[i], datetimeList[i].ctime()) @@ -218,7 +218,7 @@ class ParamReader(ProcessingUnit): except IOError: traceback.print_exc() - raise IOError, "The file %s can't be opened" %(filename) + raise IOError("The file %s can't be opened" %(filename)) #chino rata #In case has utctime attribute grp2 = grp1['utctime'] @@ -271,7 +271,7 @@ class ParamReader(ProcessingUnit): idFile = self.fileIndex if not(idFile < len(self.filenameList)): - print "No more Files" + print("No more Files") return 0 filename = self.filenameList[idFile] @@ -282,7 +282,7 @@ class ParamReader(ProcessingUnit): self.fp = filePointer - print "Setting the file: %s"%self.filename + print("Setting the file: %s"%self.filename) # self.__readMetadata() self.__setBlockList() @@ -361,7 +361,7 @@ class ParamReader(ProcessingUnit): listMetaname = [] listMetadata = [] - for item in gp.items(): + for item in list(gp.items()): name = item[0] if name=='array dimensions': @@ -389,7 +389,7 @@ class ParamReader(ProcessingUnit): listdataname = [] listdata = [] - for item in grp.items(): + for item in list(grp.items()): name = item[0] listdataname.append(name) @@ -921,7 +921,7 @@ class ParamWriter(Operation): # self.nDims = nDims # self.nDimsForDs = nDimsForDs #Saving variables - print 'Writing the file: %s'%filename + print('Writing the file: %s'%filename) self.filename = filename # self.fp = fp # self.grp = grp @@ -1092,4 +1092,4 @@ class ParamWriter(Operation): self.setNextFile() self.putData() - return + return \ No newline at end of file diff --git a/schainpy/model/io/jroIO_spectra.py b/schainpy/model/io/jroIO_spectra.py index bebef63..410f942 100644 --- a/schainpy/model/io/jroIO_spectra.py +++ b/schainpy/model/io/jroIO_spectra.py @@ -5,7 +5,7 @@ Created on Jul 2, 2014 ''' import numpy -from jroIO_base import LOCALTIME, JRODataReader, JRODataWriter +from schainpy.model.io.jroIO_base import LOCALTIME, JRODataReader, JRODataWriter from schainpy.model.proc.jroproc_base import ProcessingUnit, Operation from schainpy.model.data.jroheaderIO import PROCFLAG, BasicHeader, SystemHeader, RadarControllerHeader, ProcessingHeader from schainpy.model.data.jrodata import Spectra @@ -325,7 +325,7 @@ class SpectraReader(JRODataReader, ProcessingUnit): self.dataOut.heightList = numpy.arange(self.processingHeaderObj.firstHeight, xf, self.processingHeaderObj.deltaHeight) - self.dataOut.channelList = range(self.systemHeaderObj.nChannels) + self.dataOut.channelList = list(range(self.systemHeaderObj.nChannels)) self.dataOut.flagShiftFFT = True #Data is always shifted @@ -354,7 +354,7 @@ class SpectraReader(JRODataReader, ProcessingUnit): if self.flagNoMoreFiles: self.dataOut.flagNoData = True - print 'Process finished' + print('Process finished') return 0 self.flagDiscontinuousBlock = 0 @@ -676,4 +676,4 @@ class SpectraWriter(JRODataWriter, Operation): self.processingHeaderObj.processFlags = self.getProcessFlags() - self.setBasicHeader() + self.setBasicHeader() \ No newline at end of file diff --git a/schainpy/model/io/jroIO_usrp.py b/schainpy/model/io/jroIO_usrp.py index 71bf270..c58254f 100644 --- a/schainpy/model/io/jroIO_usrp.py +++ b/schainpy/model/io/jroIO_usrp.py @@ -19,7 +19,7 @@ from schainpy.model.proc.jroproc_base import ProcessingUnit, Operation try: import digital_rf_hdf5 except: - print 'You should install "digital_rf_hdf5" module if you want to read USRP data' + print('You should install "digital_rf_hdf5" module if you want to read USRP data') class USRPReader(ProcessingUnit): ''' @@ -209,7 +209,7 @@ class USRPReader(ProcessingUnit): ''' if not os.path.isdir(path): - raise ValueError, "[Reading] Directory %s does not exist" %path + raise ValueError("[Reading] Directory %s does not exist" %path) try: self.digitalReadObj = digital_rf_hdf5.read_hdf5(path, load_all_metadata=True) @@ -219,10 +219,10 @@ class USRPReader(ProcessingUnit): channelNameList = self.digitalReadObj.get_channels() if not channelNameList: - raise ValueError, "[Reading] Directory %s does not have any files" %path + raise ValueError("[Reading] Directory %s does not have any files" %path) if not channelList: - channelList = range(len(channelNameList)) + channelList = list(range(len(channelNameList))) ########## Reading metadata ###################### @@ -241,7 +241,7 @@ class USRPReader(ProcessingUnit): self.__frequency = this_metadata_file['fc'].value if not self.__frequency: - raise ValueError, "Center Frequency is not defined in metadata file" + raise ValueError("Center Frequency is not defined in metadata file") try: self.__timezone = this_metadata_file['timezone'].value @@ -299,7 +299,7 @@ class USRPReader(ProcessingUnit): if not nSamples: if not ippKm: - raise ValueError, "[Reading] nSamples or ippKm should be defined" + raise ValueError("[Reading] nSamples or ippKm should be defined") nSamples = int(ippKm / (1e6*0.15/self.__sample_rate)) @@ -346,14 +346,14 @@ class USRPReader(ProcessingUnit): self.__setFileHeader() self.isConfig = True - print "[Reading] USRP Data was found from %s to %s " %( + print("[Reading] USRP Data was found from %s to %s " %( datetime.datetime.utcfromtimestamp(self.__startUTCSecond - self.__timezone), datetime.datetime.utcfromtimestamp(self.__endUTCSecond - self.__timezone) - ) + )) - print "[Reading] Starting process from %s to %s" %(datetime.datetime.utcfromtimestamp(startUTCSecond - self.__timezone), + print("[Reading] Starting process from %s to %s" %(datetime.datetime.utcfromtimestamp(startUTCSecond - self.__timezone), datetime.datetime.utcfromtimestamp(endUTCSecond - self.__timezone) - ) + )) def __reload(self): @@ -366,7 +366,7 @@ class USRPReader(ProcessingUnit): # datetime.datetime.utcfromtimestamp(self.__startUTCSecond - self.__timezone), # datetime.datetime.utcfromtimestamp(self.__endUTCSecond - self.__timezone) # ) - print "[Reading] reloading metadata ..." + print("[Reading] reloading metadata ...") try: self.digitalReadObj.reload(complete_update=True) @@ -380,11 +380,11 @@ class USRPReader(ProcessingUnit): if end_index > self.__endUTCSecond*self.__sample_rate: self.__endUTCSecond = 1.0*end_index/self.__sample_rate - print - print "[Reading] New timerange found [%s, %s] " %( + print() + print("[Reading] New timerange found [%s, %s] " %( datetime.datetime.utcfromtimestamp(self.__startUTCSecond - self.__timezone), datetime.datetime.utcfromtimestamp(self.__endUTCSecond - self.__timezone) - ) + )) return True @@ -399,7 +399,7 @@ class USRPReader(ProcessingUnit): self.__thisUnixSample += self.__samples_to_read if self.__thisUnixSample + 2*self.__samples_to_read > self.__endUTCSecond*self.__sample_rate: - print "[Reading] There are no more data into selected time-range" + print("[Reading] There are no more data into selected time-range") self.__reload() @@ -418,17 +418,17 @@ class USRPReader(ProcessingUnit): self.__samples_to_read, thisChannelName) - except IOError, e: + except IOError as e: #read next profile self.__flagDiscontinuousBlock = True - print "[Reading] %s" %datetime.datetime.utcfromtimestamp(self.thisSecond - self.__timezone), e + print("[Reading] %s" %datetime.datetime.utcfromtimestamp(self.thisSecond - self.__timezone), e) break if result.shape[0] != self.__samples_to_read: self.__flagDiscontinuousBlock = True - print "[Reading] %s: Too few samples were found, just %d/%d samples" %(datetime.datetime.utcfromtimestamp(self.thisSecond - self.__timezone), + print("[Reading] %s: Too few samples were found, just %d/%d samples" %(datetime.datetime.utcfromtimestamp(self.thisSecond - self.__timezone), result.shape[0], - self.__samples_to_read) + self.__samples_to_read)) break self.__data_buffer[indexChannel,:] = result*volt_scale @@ -442,9 +442,9 @@ class USRPReader(ProcessingUnit): if not dataOk: return False - print "[Reading] %s: %d samples <> %f sec" %(datetime.datetime.utcfromtimestamp(self.thisSecond - self.__timezone), + print("[Reading] %s: %d samples <> %f sec" %(datetime.datetime.utcfromtimestamp(self.thisSecond - self.__timezone), self.__samples_to_read, - self.__timeInterval) + self.__timeInterval)) self.__bufferIndex = 0 @@ -490,7 +490,7 @@ class USRPReader(ProcessingUnit): return False if self.__flagDiscontinuousBlock: - print '[Reading] discontinuous block found ... continue with the next block' + print('[Reading] discontinuous block found ... continue with the next block') continue if not self.__online: @@ -500,7 +500,7 @@ class USRPReader(ProcessingUnit): if err_counter > nTries: return False - print '[Reading] waiting %d seconds to read a new block' %seconds + print('[Reading] waiting %d seconds to read a new block' %seconds) sleep(seconds) self.dataOut.data = self.__data_buffer[:,self.__bufferIndex:self.__bufferIndex+self.__nSamples] @@ -532,7 +532,7 @@ class USRPReader(ProcessingUnit): ''' ''' - print self.profileIndex + print(self.profileIndex) def run(self, **kwargs): ''' @@ -597,4 +597,4 @@ if __name__ == '__main__': while True: readObj.run(path='/Volumes/DATA/haystack/passive_radar/') # readObj.printInfo() - readObj.printNumberOfBlock() + readObj.printNumberOfBlock() \ No newline at end of file diff --git a/schainpy/model/io/jroIO_usrp_api.py b/schainpy/model/io/jroIO_usrp_api.py index 62c4321..1fc35e0 100644 --- a/schainpy/model/io/jroIO_usrp_api.py +++ b/schainpy/model/io/jroIO_usrp_api.py @@ -5,7 +5,7 @@ Created on Jul 15, 2014 ''' import time import threading -import cPickle +import pickle # try: # from gevent import sleep @@ -109,9 +109,9 @@ class USRPReaderAPI(USRPReader, threading.Thread): ''' if not self.isConfig: - raise RuntimeError, 'setup() method has to be called before start()' + raise RuntimeError('setup() method has to be called before start()') - print "Running ..." + print("Running ...") while True: @@ -122,7 +122,7 @@ class USRPReaderAPI(USRPReader, threading.Thread): if not self.getData(): break - print ".", + print(".", end=' ') self.__mySerial = obj2Serial(self.dataOut, keyList = self.__DATAKEYLIST, @@ -134,6 +134,6 @@ class USRPReaderAPI(USRPReader, threading.Thread): # sleep(0.1) - print "Closing thread" + print("Closing thread") return \ No newline at end of file diff --git a/schainpy/model/io/jroIO_voltage.py b/schainpy/model/io/jroIO_voltage.py index 95a897f..4485896 100644 --- a/schainpy/model/io/jroIO_voltage.py +++ b/schainpy/model/io/jroIO_voltage.py @@ -6,13 +6,13 @@ Created on Jul 2, 2014 import numpy -from jroIO_base import LOCALTIME, JRODataReader, JRODataWriter +from .jroIO_base import LOCALTIME, JRODataReader, JRODataWriter from schainpy.model.proc.jroproc_base import ProcessingUnit, Operation from schainpy.model.data.jroheaderIO import PROCFLAG, BasicHeader, SystemHeader, RadarControllerHeader, ProcessingHeader from schainpy.model.data.jrodata import Voltage import zmq import tempfile -from StringIO import StringIO +from io import StringIO # from _sha import blocksize @@ -286,7 +286,7 @@ class VoltageReader(JRODataReader, ProcessingUnit): self.dataOut.heightList = numpy.arange( self.processingHeaderObj.nHeights) * self.processingHeaderObj.deltaHeight + self.processingHeaderObj.firstHeight - self.dataOut.channelList = range(self.systemHeaderObj.nChannels) + self.dataOut.channelList = list(range(self.systemHeaderObj.nChannels)) self.dataOut.nCohInt = self.processingHeaderObj.nCohInt @@ -307,12 +307,12 @@ class VoltageReader(JRODataReader, ProcessingUnit): return if self.nTxs < 1 and self.processingHeaderObj.profilesPerBlock % (1. / self.nTxs) != 0: - raise ValueError, "1./nTxs (=%f), should be a multiple of nProfiles (=%d)" % ( - 1. / self.nTxs, self.processingHeaderObj.profilesPerBlock) + raise ValueError("1./nTxs (=%f), should be a multiple of nProfiles (=%d)" % ( + 1. / self.nTxs, self.processingHeaderObj.profilesPerBlock)) if self.nTxs > 1 and self.processingHeaderObj.nHeights % self.nTxs != 0: - raise ValueError, "nTxs (=%d), should be a multiple of nHeights (=%d)" % ( - self.nTxs, self.processingHeaderObj.nHeights) + raise ValueError("nTxs (=%d), should be a multiple of nHeights (=%d)" % ( + self.nTxs, self.processingHeaderObj.nHeights)) self.datablock = self.datablock.reshape( (self.systemHeaderObj.nChannels, self.processingHeaderObj.profilesPerBlock * self.nTxs, self.processingHeaderObj.nHeights / self.nTxs)) @@ -345,7 +345,7 @@ class VoltageReader(JRODataReader, ProcessingUnit): elif datatype == 5: datatype_str = numpy.dtype([('real', ' Code=%d, azimuth=%2.2f, zenith=%2.2f, gain=%2.2f'%(k,v[0],v[1],v[2],v[3]) for k,v in dataOut.beamCodeDict.items()] + print('Number of Records by File: %d'%dataOut.nRecords) + print('Number of Pulses: %d'%dataOut.nProfiles) + print('Number of Pulses by Frame: %d'%dataOut.npulseByFrame) + print('Number of Samples by Pulse: %d'%len(dataOut.heightList)) + print('Ipp Seconds: %f'%dataOut.ippSeconds) + print('Number of Beams: %d'%dataOut.nBeams) + print('BeamCodes:') + beamStrList = ['Beam %d -> Code=%d, azimuth=%2.2f, zenith=%2.2f, gain=%2.2f'%(k,v[0],v[1],v[2],v[3]) for k,v in list(dataOut.beamCodeDict.items())] for b in beamStrList: - print b + print(b) self.__isPrinted = True return @@ -93,7 +93,7 @@ class BeamSelector(Operation): return 1 else: - raise ValueError, "BeamSelector needs beam value" + raise ValueError("BeamSelector needs beam value") return 0 @@ -117,7 +117,7 @@ class ProfileToChannels(Operation): dataOut.flagNoData = True if not(self.__isConfig): - nchannels = len(dataOut.beamRangeDict.keys()) + nchannels = len(list(dataOut.beamRangeDict.keys())) nsamples = dataOut.nHeights self.buffer = numpy.zeros((nchannels, nsamples), dtype = 'complex128') dataOut.beam.codeList = [dataOut.beamCodeDict[x][0] for x in range(nchannels)] @@ -136,7 +136,7 @@ class ProfileToChannels(Operation): if self.__counter_chan >= self.buffer.shape[0]: self.__counter_chan = 0 dataOut.data = self.buffer.copy() - dataOut.channelList = range(self.buffer.shape[0]) + dataOut.channelList = list(range(self.buffer.shape[0])) self.__isConfig = False dataOut.flagNoData = False pass diff --git a/schainpy/model/proc/jroproc_base.py b/schainpy/model/proc/jroproc_base.py index 31873d6..f78f4f8 100644 --- a/schainpy/model/proc/jroproc_base.py +++ b/schainpy/model/proc/jroproc_base.py @@ -104,7 +104,7 @@ class ProcessingUnit(object): def getOperationObj(self, objId): - if objId not in self.operations2RunDict.keys(): + if objId not in list(self.operations2RunDict.keys()): return None return self.operations2RunDict[objId] @@ -248,22 +248,22 @@ class ProcessingUnit(object): if opType == 'self': if not opName: - raise ValueError, "opName parameter should be defined" + raise ValueError("opName parameter should be defined") sts = self.callMethod(opName, opId) elif opType == 'other' or opType == 'external' or opType == 'plotter': if not opId: - raise ValueError, "opId parameter should be defined" + raise ValueError("opId parameter should be defined") - if opId not in self.operations2RunDict.keys(): - raise ValueError, "Any operation with id=%s has been added" %str(opId) + if opId not in list(self.operations2RunDict.keys()): + raise ValueError("Any operation with id=%s has been added" %str(opId)) sts = self.callObject(opId) else: - raise ValueError, "opType should be 'self', 'external' or 'plotter'; and not '%s'" %opType + raise ValueError("opType should be 'self', 'external' or 'plotter'; and not '%s'" %opType) return sts @@ -357,4 +357,4 @@ class Operation(object): def close(self): - pass + pass \ No newline at end of file diff --git a/schainpy/model/proc/jroproc_correlation.py b/schainpy/model/proc/jroproc_correlation.py index 67c72d3..f3b9ae0 100644 --- a/schainpy/model/proc/jroproc_correlation.py +++ b/schainpy/model/proc/jroproc_correlation.py @@ -1,6 +1,6 @@ import numpy -from jroproc_base import ProcessingUnit, Operation +from .jroproc_base import ProcessingUnit, Operation from schainpy.model.data.jrodata import Correlation, hildebrand_sekhon class CorrelationProc(ProcessingUnit): diff --git a/schainpy/model/proc/jroproc_heispectra.py b/schainpy/model/proc/jroproc_heispectra.py index 1c6976f..23f6989 100644 --- a/schainpy/model/proc/jroproc_heispectra.py +++ b/schainpy/model/proc/jroproc_heispectra.py @@ -1,6 +1,6 @@ import numpy -from jroproc_base import ProcessingUnit, Operation +from .jroproc_base import ProcessingUnit, Operation from schainpy.model.data.jrodata import SpectraHeis class SpectraHeisProc(ProcessingUnit): @@ -99,7 +99,7 @@ class SpectraHeisProc(ProcessingUnit): return - raise ValueError, "The type object %s is not valid"%(self.dataIn.type) + raise ValueError("The type object %s is not valid"%(self.dataIn.type)) def selectChannels(self, channelList): @@ -133,8 +133,8 @@ class SpectraHeisProc(ProcessingUnit): for channelIndex in channelIndexList: if channelIndex not in self.dataOut.channelIndexList: - print channelIndexList - raise ValueError, "The value %d in channelIndexList is not valid" %channelIndex + print(channelIndexList) + raise ValueError("The value %d in channelIndexList is not valid" %channelIndex) # nChannels = len(channelIndexList) @@ -187,7 +187,7 @@ class IncohInt4SpectraHeis(Operation): if n == None and timeInterval == None: - raise ValueError, "n or timeInterval should be specified ..." + raise ValueError("n or timeInterval should be specified ...") if n != None: self.n = n @@ -341,4 +341,4 @@ class IncohInt4SpectraHeis(Operation): dataOut.utctime = avgdatatime # dataOut.timeInterval = dataOut.ippSeconds * dataOut.nIncohInt # dataOut.timeInterval = self.__timeInterval*self.n - dataOut.flagNoData = False + dataOut.flagNoData = False \ No newline at end of file diff --git a/schainpy/model/proc/jroproc_parameters.py b/schainpy/model/proc/jroproc_parameters.py index 113190d..9328a44 100644 --- a/schainpy/model/proc/jroproc_parameters.py +++ b/schainpy/model/proc/jroproc_parameters.py @@ -10,8 +10,6 @@ import importlib import itertools from multiprocessing import Pool, TimeoutError from multiprocessing.pool import ThreadPool -import copy_reg -import cPickle import types from functools import partial import time @@ -19,7 +17,7 @@ import time from scipy.optimize import fmin_l_bfgs_b #optimize with bounds on state papameters -from jroproc_base import ProcessingUnit, Operation +from .jroproc_base import ProcessingUnit, Operation from schainpy.model.data.jrodata import Parameters, hildebrand_sekhon from scipy import asarray as ar,exp from scipy.optimize import curve_fit @@ -36,9 +34,9 @@ SPEED_OF_LIGHT = 299792458 '''solving pickling issue''' def _pickle_method(method): - func_name = method.im_func.__name__ - obj = method.im_self - cls = method.im_class + func_name = method.__func__.__name__ + obj = method.__self__ + cls = method.__self__.__class__ return _unpickle_method, (func_name, obj, cls) def _unpickle_method(func_name, obj, cls): @@ -213,7 +211,7 @@ class GaussianFit(Operation): self.spc = dataOut.data_pre[0].copy() - print 'SelfSpectra Shape', numpy.asarray(self.spc).shape + print('SelfSpectra Shape', numpy.asarray(self.spc).shape) #plt.figure(50) @@ -251,7 +249,7 @@ class GaussianFit(Operation): pool = Pool(processes=self.Num_Chn) args = [(Vrange, Ch, pnoise, noise_, num_intg, SNRlimit) for Ch in range(self.Num_Chn)] objs = [self for __ in range(self.Num_Chn)] - attrs = zip(objs, args) + attrs = list(zip(objs, args)) gauSPC = pool.map(target, attrs) dataOut.GauSPC = numpy.asarray(gauSPC) # ret = [] @@ -506,8 +504,8 @@ class GaussianFit(Operation): # print 'noise', noise # print 's_noise', wnoise - print '========================================================' - print 'total_time: ', time.time()-start_time + print('========================================================') + print('total_time: ', time.time()-start_time) # re-normalizing spc and noise # This part differs from gg1 @@ -959,12 +957,12 @@ class PrecipitationProc(Operation): dataOut.data_output = Ze dataOut.data_param = numpy.ones([2,self.Num_Hei]) dataOut.channelList = [0,1] - print 'channelList', dataOut.channelList + print('channelList', dataOut.channelList) dataOut.data_param[0]=dBZe dataOut.data_param[1]=dBRR - print 'RR SHAPE', dBRR.shape - print 'Ze SHAPE', dBZe.shape - print 'dataOut.data_param SHAPE', dataOut.data_param.shape + print('RR SHAPE', dBRR.shape) + print('Ze SHAPE', dBZe.shape) + print('dataOut.data_param SHAPE', dataOut.data_param.shape) def dBZeMODE2(self, dataOut): # Processing for MIRA35C @@ -980,7 +978,7 @@ class PrecipitationProc(Operation): data_output = numpy.ones([self.Num_Chn , self.Num_Hei])*numpy.NaN ETA = numpy.sum(SNR,1) - print 'ETA' , ETA + print('ETA' , ETA) ETA = numpy.where(ETA is not 0. , ETA, numpy.NaN) Ze = numpy.ones([self.Num_Chn, self.Num_Hei] ) @@ -1068,7 +1066,7 @@ class FullSpectralAnalysis(Operation): data = dataOut.data_pre noise = dataOut.noise - print 'noise',noise + print('noise',noise) #SNRdB = 10*numpy.log10(dataOut.data_SNR) FirstMoment = numpy.average(dataOut.data_param[:,1,:],0) @@ -1095,14 +1093,14 @@ class FullSpectralAnalysis(Operation): velocityX=numpy.append(velocityX, Vzon)#Vmag else: - print 'Vzon',Vzon + print('Vzon',Vzon) velocityX=numpy.append(velocityX, numpy.NaN) if abs(Vmer)<100. and abs(Vmer) > 0.: velocityY=numpy.append(velocityY, Vmer)#Vang else: - print 'Vmer',Vmer + print('Vmer',Vmer) velocityY=numpy.append(velocityY, numpy.NaN) if dbSNR[Height] > SNRlimit: @@ -1120,18 +1118,18 @@ class FullSpectralAnalysis(Operation): data_output[1]=numpy.array(velocityY) data_output[2]=-velocityV#FirstMoment - print ' ' + print(' ') #print 'FirstMoment' #print FirstMoment - print 'velocityX',data_output[0] - print ' ' - print 'velocityY',data_output[1] + print('velocityX',data_output[0]) + print(' ') + print('velocityY',data_output[1]) #print numpy.array(velocityY) - print ' ' + print(' ') #print 'SNR' #print 10*numpy.log10(dataOut.data_SNR) #print numpy.shape(10*numpy.log10(dataOut.data_SNR)) - print ' ' + print(' ') dataOut.data_output=data_output @@ -1184,20 +1182,20 @@ class FullSpectralAnalysis(Operation): SmoothSPC=self.moving_average(FactNorm,N=3) - xSamples = ar(range(len(SmoothSPC))) + xSamples = ar(list(range(len(SmoothSPC)))) ySamples[i] = SmoothSPC #dbSNR=10*numpy.log10(dataSNR) - print ' ' - print ' ' - print ' ' + print(' ') + print(' ') + print(' ') #print 'dataSNR', dbSNR.shape, dbSNR[0,40:120] - print 'SmoothSPC', SmoothSPC.shape, SmoothSPC[0:20] - print 'noise',noise - print 'zline',zline.shape, zline[0:20] - print 'FactNorm',FactNorm.shape, FactNorm[0:20] - print 'FactNorm suma', numpy.sum(FactNorm) + print('SmoothSPC', SmoothSPC.shape, SmoothSPC[0:20]) + print('noise',noise) + print('zline',zline.shape, zline[0:20]) + print('FactNorm',FactNorm.shape, FactNorm[0:20]) + print('FactNorm suma', numpy.sum(FactNorm)) for i in range(spc.shape[0]): @@ -1218,12 +1216,12 @@ class FullSpectralAnalysis(Operation): phase[i] = self.moving_average( numpy.arctan2(CSPCSamples[i].imag, CSPCSamples[i].real),N=1)#*180/numpy.pi - print 'cspcLine', cspcLine.shape, cspcLine[0:20] - print 'CSPCFactor', CSPCFactor#, CSPCFactor[0:20] - print numpy.sum(ySamples[chan_index0]), numpy.sum(ySamples[chan_index1]), -noise[i] - print 'CSPCNorm', CSPCNorm.shape, CSPCNorm[0:20] - print 'CSPCNorm suma', numpy.sum(CSPCNorm) - print 'CSPCSamples', CSPCSamples.shape, CSPCSamples[0,0:20] + print('cspcLine', cspcLine.shape, cspcLine[0:20]) + print('CSPCFactor', CSPCFactor)#, CSPCFactor[0:20] + print(numpy.sum(ySamples[chan_index0]), numpy.sum(ySamples[chan_index1]), -noise[i]) + print('CSPCNorm', CSPCNorm.shape, CSPCNorm[0:20]) + print('CSPCNorm suma', numpy.sum(CSPCNorm)) + print('CSPCSamples', CSPCSamples.shape, CSPCSamples[0,0:20]) '''****** Getting fij width ******''' @@ -1237,14 +1235,14 @@ class FullSpectralAnalysis(Operation): meanGauss=sum(xSamples*yMean) / len(xSamples) sigma=sum(yMean*(xSamples-meanGauss)**2) / len(xSamples) - print '****************************' - print 'len(xSamples): ',len(xSamples) - print 'yMean: ', yMean.shape, yMean[0:20] - print 'ySamples', ySamples.shape, ySamples[0,0:20] - print 'xSamples: ',xSamples.shape, xSamples[0:20] + print('****************************') + print('len(xSamples): ',len(xSamples)) + print('yMean: ', yMean.shape, yMean[0:20]) + print('ySamples', ySamples.shape, ySamples[0,0:20]) + print('xSamples: ',xSamples.shape, xSamples[0:20]) - print 'meanGauss',meanGauss - print 'sigma',sigma + print('meanGauss',meanGauss) + print('sigma',sigma) #if (abs(meanGauss/sigma**2) > 0.0001) : #0.000000001): if dbSNR > SNRlimit : @@ -1256,7 +1254,7 @@ class FullSpectralAnalysis(Operation): else: FitGauss=numpy.ones(len(xSamples))*numpy.mean(yMean) - print 'Verificador: Dentro', Height + print('Verificador: Dentro', Height) except :#RuntimeError: FitGauss=numpy.ones(len(xSamples))*numpy.mean(yMean) @@ -1293,10 +1291,10 @@ class FullSpectralAnalysis(Operation): else: Range = numpy.array([0,0]) - print ' ' - print 'GCpos',GCpos, ( len(xFrec)- len(xFrec)*0.1) - print 'Rangpos',Rangpos - print 'RANGE: ', Range + print(' ') + print('GCpos',GCpos, ( len(xFrec)- len(xFrec)*0.1)) + print('Rangpos',Rangpos) + print('RANGE: ', Range) FrecRange=xFrec[Range[0]:Range[1]] '''****** Getting SCPC Slope ******''' @@ -1306,9 +1304,9 @@ class FullSpectralAnalysis(Operation): if len(FrecRange)>5 and len(FrecRange) m): ss1 = m - valid = numpy.asarray(range(int(m + bb0 - ss1 + 1))) + ss1 + valid = numpy.asarray(list(range(int(m + bb0 - ss1 + 1)))) + ss1 power = ((spec2[valid] - n0)*fwindow[valid]).sum() fd = ((spec2[valid]- n0)*freq[valid]*fwindow[valid]).sum()/power w = math.sqrt(((spec2[valid] - n0)*fwindow[valid]*(freq[valid]- fd)**2).sum()/power) @@ -1809,7 +1807,7 @@ class WindProfiler(Operation): maxid = listPhi.index(max(listPhi)) minid = listPhi.index(min(listPhi)) - rango = range(len(phi)) + rango = list(range(len(phi))) # rango = numpy.delete(rango,maxid) heiRang1 = heiRang*math.cos(phi[maxid]) @@ -1867,7 +1865,7 @@ class WindProfiler(Operation): heiRang = kwargs['heightList'] SNR0 = kwargs['SNR'] - if kwargs.has_key('dirCosx') and kwargs.has_key('dirCosy'): + if 'dirCosx' in kwargs and 'dirCosy' in kwargs: theta_x = numpy.array(kwargs['dirCosx']) theta_y = numpy.array(kwargs['dirCosy']) else: @@ -1875,13 +1873,13 @@ class WindProfiler(Operation): azim = numpy.array(kwargs['azimuth']) theta_x, theta_y = self.__calculateCosDir(elev, azim) azimuth = kwargs['correctAzimuth'] - if kwargs.has_key('horizontalOnly'): + if 'horizontalOnly' in kwargs: horizontalOnly = kwargs['horizontalOnly'] else: horizontalOnly = False - if kwargs.has_key('correctFactor'): + if 'correctFactor' in kwargs: correctFactor = kwargs['correctFactor'] else: correctFactor = 1 - if kwargs.has_key('channelList'): + if 'channelList' in kwargs: channelList = kwargs['channelList'] if len(channelList) == 2: horizontalOnly = True @@ -2002,7 +2000,7 @@ class WindProfiler(Operation): position_y = kwargs['positionY'] azimuth = kwargs['azimuth'] - if kwargs.has_key('correctFactor'): + if 'correctFactor' in kwargs: correctFactor = kwargs['correctFactor'] else: correctFactor = 1 @@ -2355,20 +2353,20 @@ class WindProfiler(Operation): dataOut.flagNoData = True self.__dataReady = False - if kwargs.has_key('nHours'): + if 'nHours' in kwargs: nHours = kwargs['nHours'] else: nHours = 1 - if kwargs.has_key('meteorsPerBin'): + if 'meteorsPerBin' in kwargs: meteorThresh = kwargs['meteorsPerBin'] else: meteorThresh = 6 - if kwargs.has_key('hmin'): + if 'hmin' in kwargs: hmin = kwargs['hmin'] else: hmin = 70 - if kwargs.has_key('hmax'): + if 'hmax' in kwargs: hmax = kwargs['hmax'] else: hmax = 110 @@ -2404,22 +2402,22 @@ class WindProfiler(Operation): dataOut.flagNoData = True self.__dataReady = False - if kwargs.has_key('nMins'): + if 'nMins' in kwargs: nMins = kwargs['nMins'] else: nMins = 20 - if kwargs.has_key('rx_location'): + if 'rx_location' in kwargs: rx_location = kwargs['rx_location'] else: rx_location = [(0,1),(1,1),(1,0)] - if kwargs.has_key('azimuth'): + if 'azimuth' in kwargs: azimuth = kwargs['azimuth'] else: azimuth = 51.06 - if kwargs.has_key('dfactor'): + if 'dfactor' in kwargs: dfactor = kwargs['dfactor'] - if kwargs.has_key('mode'): + if 'mode' in kwargs: mode = kwargs['mode'] - if kwargs.has_key('theta_x'): + if 'theta_x' in kwargs: theta_x = kwargs['theta_x'] - if kwargs.has_key('theta_y'): + if 'theta_y' in kwargs: theta_y = kwargs['theta_y'] else: mode = 'SA' @@ -2480,7 +2478,7 @@ class EWDriftsEstimation(Operation): maxid = listPhi.index(max(listPhi)) minid = listPhi.index(min(listPhi)) - rango = range(len(phi)) + rango = list(range(len(phi))) # rango = numpy.delete(rango,maxid) heiRang1 = heiRang*math.cos(phi[maxid]) @@ -3857,7 +3855,7 @@ class SMOperations(): def getPhasePairs(self, channelPositions): chanPos = numpy.array(channelPositions) - listOper = list(itertools.combinations(range(5),2)) + listOper = list(itertools.combinations(list(range(5)),2)) distances = numpy.zeros(4) axisX = [] diff --git a/schainpy/model/proc/jroproc_spectra.py b/schainpy/model/proc/jroproc_spectra.py index 0c84cac..bbede48 100644 --- a/schainpy/model/proc/jroproc_spectra.py +++ b/schainpy/model/proc/jroproc_spectra.py @@ -2,10 +2,10 @@ import itertools import numpy -from jroproc_base import ProcessingUnit, Operation +from .jroproc_base import ProcessingUnit, Operation from schainpy.model.data.jrodata import Spectra from schainpy.model.data.jrodata import hildebrand_sekhon - +from schainpy.utils import log #yong class SpectraProc(ProcessingUnit): @@ -99,11 +99,11 @@ class SpectraProc(ProcessingUnit): (self.dataOut.nPairs, self.dataOut.nFFTPoints, self.dataOut.nHeights), dtype='complex') for pair in self.dataOut.pairsList: if pair[0] not in self.dataOut.channelList: - raise ValueError, "Error getting CrossSpectra: pair 0 of %s is not in channelList = %s" % ( - str(pair), str(self.dataOut.channelList)) + raise ValueError("Error getting CrossSpectra: pair 0 of %s is not in channelList = %s" % ( + str(pair), str(self.dataOut.channelList))) if pair[1] not in self.dataOut.channelList: - raise ValueError, "Error getting CrossSpectra: pair 1 of %s is not in channelList = %s" % ( - str(pair), str(self.dataOut.channelList)) + raise ValueError("Error getting CrossSpectra: pair 1 of %s is not in channelList = %s" % ( + str(pair), str(self.dataOut.channelList))) cspc[pairIndex, :, :] = fft_volt[pair[0], :, :] * \ numpy.conjugate(fft_volt[pair[1], :, :]) @@ -140,7 +140,7 @@ class SpectraProc(ProcessingUnit): if self.dataIn.type == "Voltage": if nFFTPoints == None: - raise ValueError, "This SpectraProc.run() need nFFTPoints input variable" + raise ValueError("This SpectraProc.run() need nFFTPoints input variable") if nProfiles == None: nProfiles = nFFTPoints @@ -180,8 +180,8 @@ class SpectraProc(ProcessingUnit): self.id_min += nVoltProfiles self.id_max += nVoltProfiles else: - raise ValueError, "The type object %s has %d profiles, it should just has %d profiles" % ( - self.dataIn.type, self.dataIn.data.shape[1], nProfiles) + raise ValueError("The type object %s has %d profiles, it should just has %d profiles" % ( + self.dataIn.type, self.dataIn.data.shape[1], nProfiles)) self.dataOut.flagNoData = True return 0 else: @@ -201,8 +201,8 @@ class SpectraProc(ProcessingUnit): return True - raise ValueError, "The type of input object '%s' is not valid" % ( - self.dataIn.type) + raise ValueError("The type of input object '%s' is not valid" % ( + self.dataIn.type)) def __selectPairs(self, pairsList): @@ -256,8 +256,8 @@ class SpectraProc(ProcessingUnit): for channel in channelList: if channel not in self.dataOut.channelList: - raise ValueError, "Error selecting channels, Channel %d is not valid.\nAvailable channels = %s" % ( - channel, str(self.dataOut.channelList)) + raise ValueError("Error selecting channels, Channel %d is not valid.\nAvailable channels = %s" % ( + channel, str(self.dataOut.channelList))) index = self.dataOut.channelList.index(channel) channelIndexList.append(index) @@ -282,8 +282,8 @@ class SpectraProc(ProcessingUnit): for channelIndex in channelIndexList: if channelIndex not in self.dataOut.channelIndexList: - raise ValueError, "Error selecting channels: The value %d in channelIndexList is not valid.\nAvailable channel indexes = " % ( - channelIndex, self.dataOut.channelIndexList) + raise ValueError("Error selecting channels: The value %d in channelIndexList is not valid.\nAvailable channel indexes = " % ( + channelIndex, self.dataOut.channelIndexList)) # nChannels = len(channelIndexList) @@ -318,8 +318,8 @@ class SpectraProc(ProcessingUnit): """ if (minHei > maxHei): - raise ValueError, "Error selecting heights: Height range (%d,%d) is not valid" % ( - minHei, maxHei) + raise ValueError("Error selecting heights: Height range (%d,%d) is not valid" % ( + minHei, maxHei)) if (minHei < self.dataOut.heightList[0]): minHei = self.dataOut.heightList[0] @@ -410,8 +410,8 @@ class SpectraProc(ProcessingUnit): """ if (minIndex < 0) or (minIndex > maxIndex): - raise ValueError, "Error selecting heights: Index range (%d,%d) is not valid" % ( - minIndex, maxIndex) + raise ValueError("Error selecting heights: Index range (%d,%d) is not valid" % ( + minIndex, maxIndex)) if (maxIndex >= self.dataOut.nHeights): maxIndex = self.dataOut.nHeights - 1 @@ -448,11 +448,12 @@ class SpectraProc(ProcessingUnit): else: jcspectraExist = False - freq_dc = jspectra.shape[1] / 2 + freq_dc = int(jspectra.shape[1] / 2) ind_vel = numpy.array([-2, -1, 1, 2]) + freq_dc + ind_vel = ind_vel.astype(int) if ind_vel[0] < 0: - ind_vel[range(0, 1)] = ind_vel[range(0, 1)] + self.num_prof + ind_vel[list(range(0, 1))] = ind_vel[list(range(0, 1))] + self.num_prof if mode == 1: jspectra[:, freq_dc, :] = ( @@ -468,12 +469,12 @@ class SpectraProc(ProcessingUnit): xx = numpy.zeros([4, 4]) for fil in range(4): - xx[fil, :] = vel[fil]**numpy.asarray(range(4)) + xx[fil, :] = vel[fil]**numpy.asarray(list(range(4))) xx_inv = numpy.linalg.inv(xx) xx_aux = xx_inv[0, :] - for ich in range(num_chan): + for ich in range(num_chan): yy = jspectra[ich, ind_vel, :] jspectra[ich, freq_dc, :] = numpy.dot(xx_aux, yy) @@ -508,7 +509,7 @@ class SpectraProc(ProcessingUnit): # hei_interf if hei_interf is None: count_hei = num_hei / 2 # Como es entero no importa - hei_interf = numpy.asmatrix(range(count_hei)) + num_hei - count_hei + hei_interf = numpy.asmatrix(list(range(count_hei))) + num_hei - count_hei hei_interf = numpy.asarray(hei_interf)[0] # nhei_interf if (nhei_interf == None): @@ -520,10 +521,10 @@ class SpectraProc(ProcessingUnit): if (offhei_interf == None): offhei_interf = 0 - ind_hei = range(num_hei) + ind_hei = list(range(num_hei)) # mask_prof = numpy.asarray(range(num_prof - 2)) + 1 # mask_prof[range(num_prof/2 - 1,len(mask_prof))] += 1 - mask_prof = numpy.asarray(range(num_prof)) + mask_prof = numpy.asarray(list(range(num_prof))) num_mask_prof = mask_prof.size comp_mask_prof = [0, num_prof / 2] @@ -541,8 +542,8 @@ class SpectraProc(ProcessingUnit): psort = power.ravel().argsort() # Se estima la interferencia promedio en los Espectros de Potencia empleando - junkspc_interf = jspectra[ich, :, hei_interf[psort[range( - offhei_interf, nhei_interf + offhei_interf)]]] + junkspc_interf = jspectra[ich, :, hei_interf[psort[list(range( + offhei_interf, nhei_interf + offhei_interf))]]] if noise_exist: # tmp_noise = jnoise[ich] / num_prof @@ -603,7 +604,7 @@ class SpectraProc(ProcessingUnit): xx = numpy.zeros([4, 4]) for id1 in range(4): - xx[:, id1] = ind[id1]**numpy.asarray(range(4)) + xx[:, id1] = ind[id1]**numpy.asarray(list(range(4))) xx_inv = numpy.linalg.inv(xx) xx = xx_inv[:, 0] @@ -632,17 +633,17 @@ class SpectraProc(ProcessingUnit): cspower = cspower.sum(axis=0) cspsort = cspower.ravel().argsort() - junkcspc_interf = jcspectra[ip, :, hei_interf[cspsort[range( - offhei_interf, nhei_interf + offhei_interf)]]] + junkcspc_interf = jcspectra[ip, :, hei_interf[cspsort[list(range( + offhei_interf, nhei_interf + offhei_interf))]]] junkcspc_interf = junkcspc_interf.transpose() jcspc_interf = junkcspc_interf.sum(axis=1) / nhei_interf ind = numpy.abs(jcspc_interf[mask_prof]).ravel().argsort() median_real = numpy.median(numpy.real( - junkcspc_interf[mask_prof[ind[range(3 * num_prof / 4)]], :])) + junkcspc_interf[mask_prof[ind[list(range(3 * num_prof / 4))]], :])) median_imag = numpy.median(numpy.imag( - junkcspc_interf[mask_prof[ind[range(3 * num_prof / 4)]], :])) + junkcspc_interf[mask_prof[ind[list(range(3 * num_prof / 4))]], :])) junkcspc_interf[comp_mask_prof, :] = numpy.complex( median_real, median_imag) @@ -662,7 +663,7 @@ class SpectraProc(ProcessingUnit): xx = numpy.zeros([4, 4]) for id1 in range(4): - xx[:, id1] = ind[id1]**numpy.asarray(range(4)) + xx[:, id1] = ind[id1]**numpy.asarray(list(range(4))) xx_inv = numpy.linalg.inv(xx) xx = xx_inv[:, 0] @@ -693,13 +694,13 @@ class SpectraProc(ProcessingUnit): maxHei = self.dataOut.heightList[-1] if (minHei < self.dataOut.heightList[0]) or (minHei > maxHei): - print 'minHei: %.2f is out of the heights range' % (minHei) - print 'minHei is setting to %.2f' % (self.dataOut.heightList[0]) + print('minHei: %.2f is out of the heights range' % (minHei)) + print('minHei is setting to %.2f' % (self.dataOut.heightList[0])) minHei = self.dataOut.heightList[0] if (maxHei > self.dataOut.heightList[-1]) or (maxHei < minHei): - print 'maxHei: %.2f is out of the heights range' % (maxHei) - print 'maxHei is setting to %.2f' % (self.dataOut.heightList[-1]) + print('maxHei: %.2f is out of the heights range' % (maxHei)) + print('maxHei is setting to %.2f' % (self.dataOut.heightList[-1])) maxHei = self.dataOut.heightList[-1] # validacion de velocidades @@ -712,13 +713,13 @@ class SpectraProc(ProcessingUnit): maxVel = velrange[-1] if (minVel < velrange[0]) or (minVel > maxVel): - print 'minVel: %.2f is out of the velocity range' % (minVel) - print 'minVel is setting to %.2f' % (velrange[0]) + print('minVel: %.2f is out of the velocity range' % (minVel)) + print('minVel is setting to %.2f' % (velrange[0])) minVel = velrange[0] if (maxVel > velrange[-1]) or (maxVel < minVel): - print 'maxVel: %.2f is out of the velocity range' % (maxVel) - print 'maxVel is setting to %.2f' % (velrange[-1]) + print('maxVel: %.2f is out of the velocity range' % (maxVel)) + print('maxVel is setting to %.2f' % (velrange[-1])) maxVel = velrange[-1] # seleccion de indices para rango @@ -740,8 +741,8 @@ class SpectraProc(ProcessingUnit): maxIndex = len(heights) if (minIndex < 0) or (minIndex > maxIndex): - raise ValueError, "some value in (%d,%d) is not valid" % ( - minIndex, maxIndex) + raise ValueError("some value in (%d,%d) is not valid" % ( + minIndex, maxIndex)) if (maxIndex >= self.dataOut.nHeights): maxIndex = self.dataOut.nHeights - 1 @@ -823,7 +824,7 @@ class IncohInt(Operation): self.__byTime = False if n is None and timeInterval is None: - raise ValueError, "n or timeInterval should be specified ..." + raise ValueError("n or timeInterval should be specified ...") if n is not None: self.n = int(n) @@ -949,4 +950,4 @@ class IncohInt(Operation): dataOut.nIncohInt *= self.n dataOut.utctime = avgdatatime - dataOut.flagNoData = False + dataOut.flagNoData = False \ No newline at end of file diff --git a/schainpy/model/proc/jroproc_spectra_acf.py b/schainpy/model/proc/jroproc_spectra_acf.py index 4d316d2..709922d 100644 --- a/schainpy/model/proc/jroproc_spectra_acf.py +++ b/schainpy/model/proc/jroproc_spectra_acf.py @@ -1,6 +1,6 @@ import numpy -from jroproc_base import ProcessingUnit, Operation +from .jroproc_base import ProcessingUnit, Operation from schainpy.model.data.jrodata import Spectra from schainpy.model.data.jrodata import hildebrand_sekhon @@ -119,9 +119,9 @@ class SpectraAFCProc(ProcessingUnit): cspc = numpy.zeros((self.dataOut.nPairs, self.dataOut.nFFTPoints, self.dataOut.nHeights), dtype='complex') for pair in self.dataOut.pairsList: if pair[0] not in self.dataOut.channelList: - raise ValueError, "Error getting CrossSpectra: pair 0 of %s is not in channelList = %s" %(str(pair), str(self.dataOut.channelList)) + raise ValueError("Error getting CrossSpectra: pair 0 of %s is not in channelList = %s" %(str(pair), str(self.dataOut.channelList))) if pair[1] not in self.dataOut.channelList: - raise ValueError, "Error getting CrossSpectra: pair 1 of %s is not in channelList = %s" %(str(pair), str(self.dataOut.channelList)) + raise ValueError("Error getting CrossSpectra: pair 1 of %s is not in channelList = %s" %(str(pair), str(self.dataOut.channelList))) chan_index0 = self.dataOut.channelList.index(pair[0]) chan_index1 = self.dataOut.channelList.index(pair[1]) @@ -148,7 +148,7 @@ class SpectraAFCProc(ProcessingUnit): if self.dataIn.type == "Voltage": if nFFTPoints == None: - raise ValueError, "This SpectraProc.run() need nFFTPoints input variable" + raise ValueError("This SpectraProc.run() need nFFTPoints input variable") if nProfiles == None: nProfiles = nFFTPoints @@ -172,7 +172,7 @@ class SpectraAFCProc(ProcessingUnit): # self.profIndex += 1 else: - raise ValueError, "" + raise ValueError("") self.firstdatatime = self.dataIn.utctime @@ -186,7 +186,7 @@ class SpectraAFCProc(ProcessingUnit): return True - raise ValueError, "The type of input object '%s' is not valid"%(self.dataIn.type) + raise ValueError("The type of input object '%s' is not valid"%(self.dataIn.type)) def __selectPairs(self, pairsList): @@ -246,7 +246,7 @@ class SpectraAFCProc(ProcessingUnit): for channel in channelList: if channel not in self.dataOut.channelList: - raise ValueError, "Error selecting channels, Channel %d is not valid.\nAvailable channels = %s" %(channel, str(self.dataOut.channelList)) + raise ValueError("Error selecting channels, Channel %d is not valid.\nAvailable channels = %s" %(channel, str(self.dataOut.channelList))) index = self.dataOut.channelList.index(channel) channelIndexList.append(index) @@ -271,7 +271,7 @@ class SpectraAFCProc(ProcessingUnit): for channelIndex in channelIndexList: if channelIndex not in self.dataOut.channelIndexList: - raise ValueError, "Error selecting channels: The value %d in channelIndexList is not valid.\nAvailable channel indexes = " %(channelIndex, self.dataOut.channelIndexList) + raise ValueError("Error selecting channels: The value %d in channelIndexList is not valid.\nAvailable channel indexes = " %(channelIndex, self.dataOut.channelIndexList)) # nChannels = len(channelIndexList) @@ -305,7 +305,7 @@ class SpectraAFCProc(ProcessingUnit): """ if (minHei > maxHei): - raise ValueError, "Error selecting heights: Height range (%d,%d) is not valid" % (minHei, maxHei) + raise ValueError("Error selecting heights: Height range (%d,%d) is not valid" % (minHei, maxHei)) if (minHei < self.dataOut.heightList[0]): minHei = self.dataOut.heightList[0] @@ -394,7 +394,7 @@ class SpectraAFCProc(ProcessingUnit): """ if (minIndex < 0) or (minIndex > maxIndex): - raise ValueError, "Error selecting heights: Index range (%d,%d) is not valid" % (minIndex, maxIndex) + raise ValueError("Error selecting heights: Index range (%d,%d) is not valid" % (minIndex, maxIndex)) if (maxIndex >= self.dataOut.nHeights): maxIndex = self.dataOut.nHeights-1 @@ -435,7 +435,7 @@ class SpectraAFCProc(ProcessingUnit): ind_vel = numpy.array([-2,-1,1,2]) + freq_dc if ind_vel[0]<0: - ind_vel[range(0,1)] = ind_vel[range(0,1)] + self.num_prof + ind_vel[list(range(0,1))] = ind_vel[list(range(0,1))] + self.num_prof if mode == 1: jspectra[:,freq_dc,:] = (jspectra[:,ind_vel[1],:] + jspectra[:,ind_vel[2],:])/2 #CORRECCION @@ -449,7 +449,7 @@ class SpectraAFCProc(ProcessingUnit): xx = numpy.zeros([4,4]) for fil in range(4): - xx[fil,:] = vel[fil]**numpy.asarray(range(4)) + xx[fil,:] = vel[fil]**numpy.asarray(list(range(4))) xx_inv = numpy.linalg.inv(xx) xx_aux = xx_inv[0,:] @@ -489,7 +489,7 @@ class SpectraAFCProc(ProcessingUnit): #hei_interf if hei_interf is None: count_hei = num_hei/2 #Como es entero no importa - hei_interf = numpy.asmatrix(range(count_hei)) + num_hei - count_hei + hei_interf = numpy.asmatrix(list(range(count_hei))) + num_hei - count_hei hei_interf = numpy.asarray(hei_interf)[0] #nhei_interf if (nhei_interf == None): @@ -501,10 +501,10 @@ class SpectraAFCProc(ProcessingUnit): if (offhei_interf == None): offhei_interf = 0 - ind_hei = range(num_hei) + ind_hei = list(range(num_hei)) # mask_prof = numpy.asarray(range(num_prof - 2)) + 1 # mask_prof[range(num_prof/2 - 1,len(mask_prof))] += 1 - mask_prof = numpy.asarray(range(num_prof)) + mask_prof = numpy.asarray(list(range(num_prof))) num_mask_prof = mask_prof.size comp_mask_prof = [0, num_prof/2] @@ -523,7 +523,7 @@ class SpectraAFCProc(ProcessingUnit): psort = power.ravel().argsort() #Se estima la interferencia promedio en los Espectros de Potencia empleando - junkspc_interf = jspectra[ich,:,hei_interf[psort[range(offhei_interf, nhei_interf + offhei_interf)]]] + junkspc_interf = jspectra[ich,:,hei_interf[psort[list(range(offhei_interf, nhei_interf + offhei_interf))]]] if noise_exist: # tmp_noise = jnoise[ich] / num_prof @@ -576,7 +576,7 @@ class SpectraAFCProc(ProcessingUnit): xx = numpy.zeros([4,4]) for id1 in range(4): - xx[:,id1] = ind[id1]**numpy.asarray(range(4)) + xx[:,id1] = ind[id1]**numpy.asarray(list(range(4))) xx_inv = numpy.linalg.inv(xx) xx = xx_inv[:,0] @@ -602,14 +602,14 @@ class SpectraAFCProc(ProcessingUnit): cspower = cspower.sum(axis = 0) cspsort = cspower.ravel().argsort() - junkcspc_interf = jcspectra[ip,:,hei_interf[cspsort[range(offhei_interf, nhei_interf + offhei_interf)]]] + junkcspc_interf = jcspectra[ip,:,hei_interf[cspsort[list(range(offhei_interf, nhei_interf + offhei_interf))]]] junkcspc_interf = junkcspc_interf.transpose() jcspc_interf = junkcspc_interf.sum(axis = 1)/nhei_interf ind = numpy.abs(jcspc_interf[mask_prof]).ravel().argsort() - median_real = numpy.median(numpy.real(junkcspc_interf[mask_prof[ind[range(3*num_prof/4)]],:])) - median_imag = numpy.median(numpy.imag(junkcspc_interf[mask_prof[ind[range(3*num_prof/4)]],:])) + median_real = numpy.median(numpy.real(junkcspc_interf[mask_prof[ind[list(range(3*num_prof/4))]],:])) + median_imag = numpy.median(numpy.imag(junkcspc_interf[mask_prof[ind[list(range(3*num_prof/4))]],:])) junkcspc_interf[comp_mask_prof,:] = numpy.complex(median_real, median_imag) for iprof in range(num_prof): @@ -626,7 +626,7 @@ class SpectraAFCProc(ProcessingUnit): xx = numpy.zeros([4,4]) for id1 in range(4): - xx[:,id1] = ind[id1]**numpy.asarray(range(4)) + xx[:,id1] = ind[id1]**numpy.asarray(list(range(4))) xx_inv = numpy.linalg.inv(xx) xx = xx_inv[:,0] @@ -657,13 +657,13 @@ class SpectraAFCProc(ProcessingUnit): maxHei = self.dataOut.heightList[-1] if (minHei < self.dataOut.heightList[0]) or (minHei > maxHei): - print 'minHei: %.2f is out of the heights range'%(minHei) - print 'minHei is setting to %.2f'%(self.dataOut.heightList[0]) + print('minHei: %.2f is out of the heights range'%(minHei)) + print('minHei is setting to %.2f'%(self.dataOut.heightList[0])) minHei = self.dataOut.heightList[0] if (maxHei > self.dataOut.heightList[-1]) or (maxHei < minHei): - print 'maxHei: %.2f is out of the heights range'%(maxHei) - print 'maxHei is setting to %.2f'%(self.dataOut.heightList[-1]) + print('maxHei: %.2f is out of the heights range'%(maxHei)) + print('maxHei is setting to %.2f'%(self.dataOut.heightList[-1])) maxHei = self.dataOut.heightList[-1] # validacion de velocidades @@ -676,13 +676,13 @@ class SpectraAFCProc(ProcessingUnit): maxVel = velrange[-1] if (minVel < velrange[0]) or (minVel > maxVel): - print 'minVel: %.2f is out of the velocity range'%(minVel) - print 'minVel is setting to %.2f'%(velrange[0]) + print('minVel: %.2f is out of the velocity range'%(minVel)) + print('minVel is setting to %.2f'%(velrange[0])) minVel = velrange[0] if (maxVel > velrange[-1]) or (maxVel < minVel): - print 'maxVel: %.2f is out of the velocity range'%(maxVel) - print 'maxVel is setting to %.2f'%(velrange[-1]) + print('maxVel: %.2f is out of the velocity range'%(maxVel)) + print('maxVel is setting to %.2f'%(velrange[-1])) maxVel = velrange[-1] # seleccion de indices para rango @@ -704,7 +704,7 @@ class SpectraAFCProc(ProcessingUnit): maxIndex = len(heights) if (minIndex < 0) or (minIndex > maxIndex): - raise ValueError, "some value in (%d,%d) is not valid" % (minIndex, maxIndex) + raise ValueError("some value in (%d,%d) is not valid" % (minIndex, maxIndex)) if (maxIndex >= self.dataOut.nHeights): maxIndex = self.dataOut.nHeights-1 @@ -733,4 +733,4 @@ class SpectraAFCProc(ProcessingUnit): self.dataOut.noise_estimation = noise.copy() - return 1 + return 1 \ No newline at end of file diff --git a/schainpy/model/proc/jroproc_spectra_lags.py b/schainpy/model/proc/jroproc_spectra_lags.py index fa64eae..90b88e5 100644 --- a/schainpy/model/proc/jroproc_spectra_lags.py +++ b/schainpy/model/proc/jroproc_spectra_lags.py @@ -1,6 +1,6 @@ import numpy -from jroproc_base import ProcessingUnit, Operation +from .jroproc_base import ProcessingUnit, Operation from schainpy.model.data.jrodata import Spectra from schainpy.model.data.jrodata import hildebrand_sekhon @@ -125,9 +125,9 @@ class SpectraLagsProc(ProcessingUnit): cspc = numpy.zeros((self.dataOut.nPairs, self.dataOut.nFFTPoints, self.dataOut.nHeights), dtype='complex') for pair in self.dataOut.pairsList: if pair[0] not in self.dataOut.channelList: - raise ValueError, "Error getting CrossSpectra: pair 0 of %s is not in channelList = %s" %(str(pair), str(self.dataOut.channelList)) + raise ValueError("Error getting CrossSpectra: pair 0 of %s is not in channelList = %s" %(str(pair), str(self.dataOut.channelList))) if pair[1] not in self.dataOut.channelList: - raise ValueError, "Error getting CrossSpectra: pair 1 of %s is not in channelList = %s" %(str(pair), str(self.dataOut.channelList)) + raise ValueError("Error getting CrossSpectra: pair 1 of %s is not in channelList = %s" %(str(pair), str(self.dataOut.channelList))) chan_index0 = self.dataOut.channelList.index(pair[0]) chan_index1 = self.dataOut.channelList.index(pair[1]) @@ -158,7 +158,7 @@ class SpectraLagsProc(ProcessingUnit): if self.dataIn.type == "Voltage": if nFFTPoints == None: - raise ValueError, "This SpectraProc.run() need nFFTPoints input variable" + raise ValueError("This SpectraProc.run() need nFFTPoints input variable") if nProfiles == None: nProfiles = nFFTPoints @@ -189,7 +189,7 @@ class SpectraLagsProc(ProcessingUnit): return True - raise ValueError, "The type of input object '%s' is not valid"%(self.dataIn.type) + raise ValueError("The type of input object '%s' is not valid"%(self.dataIn.type)) def __selectPairs(self, pairsList): @@ -249,7 +249,7 @@ class SpectraLagsProc(ProcessingUnit): for channel in channelList: if channel not in self.dataOut.channelList: - raise ValueError, "Error selecting channels, Channel %d is not valid.\nAvailable channels = %s" %(channel, str(self.dataOut.channelList)) + raise ValueError("Error selecting channels, Channel %d is not valid.\nAvailable channels = %s" %(channel, str(self.dataOut.channelList))) index = self.dataOut.channelList.index(channel) channelIndexList.append(index) @@ -274,7 +274,7 @@ class SpectraLagsProc(ProcessingUnit): for channelIndex in channelIndexList: if channelIndex not in self.dataOut.channelIndexList: - raise ValueError, "Error selecting channels: The value %d in channelIndexList is not valid.\nAvailable channel indexes = " %(channelIndex, self.dataOut.channelIndexList) + raise ValueError("Error selecting channels: The value %d in channelIndexList is not valid.\nAvailable channel indexes = " %(channelIndex, self.dataOut.channelIndexList)) # nChannels = len(channelIndexList) @@ -308,7 +308,7 @@ class SpectraLagsProc(ProcessingUnit): """ if (minHei > maxHei): - raise ValueError, "Error selecting heights: Height range (%d,%d) is not valid" % (minHei, maxHei) + raise ValueError("Error selecting heights: Height range (%d,%d) is not valid" % (minHei, maxHei)) if (minHei < self.dataOut.heightList[0]): minHei = self.dataOut.heightList[0] @@ -397,7 +397,7 @@ class SpectraLagsProc(ProcessingUnit): """ if (minIndex < 0) or (minIndex > maxIndex): - raise ValueError, "Error selecting heights: Index range (%d,%d) is not valid" % (minIndex, maxIndex) + raise ValueError("Error selecting heights: Index range (%d,%d) is not valid" % (minIndex, maxIndex)) if (maxIndex >= self.dataOut.nHeights): maxIndex = self.dataOut.nHeights-1 @@ -438,7 +438,7 @@ class SpectraLagsProc(ProcessingUnit): ind_vel = numpy.array([-2,-1,1,2]) + freq_dc if ind_vel[0]<0: - ind_vel[range(0,1)] = ind_vel[range(0,1)] + self.num_prof + ind_vel[list(range(0,1))] = ind_vel[list(range(0,1))] + self.num_prof if mode == 1: jspectra[:,freq_dc,:] = (jspectra[:,ind_vel[1],:] + jspectra[:,ind_vel[2],:])/2 #CORRECCION @@ -452,7 +452,7 @@ class SpectraLagsProc(ProcessingUnit): xx = numpy.zeros([4,4]) for fil in range(4): - xx[fil,:] = vel[fil]**numpy.asarray(range(4)) + xx[fil,:] = vel[fil]**numpy.asarray(list(range(4))) xx_inv = numpy.linalg.inv(xx) xx_aux = xx_inv[0,:] @@ -492,7 +492,7 @@ class SpectraLagsProc(ProcessingUnit): #hei_interf if hei_interf is None: count_hei = num_hei/2 #Como es entero no importa - hei_interf = numpy.asmatrix(range(count_hei)) + num_hei - count_hei + hei_interf = numpy.asmatrix(list(range(count_hei))) + num_hei - count_hei hei_interf = numpy.asarray(hei_interf)[0] #nhei_interf if (nhei_interf == None): @@ -504,10 +504,10 @@ class SpectraLagsProc(ProcessingUnit): if (offhei_interf == None): offhei_interf = 0 - ind_hei = range(num_hei) + ind_hei = list(range(num_hei)) # mask_prof = numpy.asarray(range(num_prof - 2)) + 1 # mask_prof[range(num_prof/2 - 1,len(mask_prof))] += 1 - mask_prof = numpy.asarray(range(num_prof)) + mask_prof = numpy.asarray(list(range(num_prof))) num_mask_prof = mask_prof.size comp_mask_prof = [0, num_prof/2] @@ -526,7 +526,7 @@ class SpectraLagsProc(ProcessingUnit): psort = power.ravel().argsort() #Se estima la interferencia promedio en los Espectros de Potencia empleando - junkspc_interf = jspectra[ich,:,hei_interf[psort[range(offhei_interf, nhei_interf + offhei_interf)]]] + junkspc_interf = jspectra[ich,:,hei_interf[psort[list(range(offhei_interf, nhei_interf + offhei_interf))]]] if noise_exist: # tmp_noise = jnoise[ich] / num_prof @@ -579,7 +579,7 @@ class SpectraLagsProc(ProcessingUnit): xx = numpy.zeros([4,4]) for id1 in range(4): - xx[:,id1] = ind[id1]**numpy.asarray(range(4)) + xx[:,id1] = ind[id1]**numpy.asarray(list(range(4))) xx_inv = numpy.linalg.inv(xx) xx = xx_inv[:,0] @@ -605,14 +605,14 @@ class SpectraLagsProc(ProcessingUnit): cspower = cspower.sum(axis = 0) cspsort = cspower.ravel().argsort() - junkcspc_interf = jcspectra[ip,:,hei_interf[cspsort[range(offhei_interf, nhei_interf + offhei_interf)]]] + junkcspc_interf = jcspectra[ip,:,hei_interf[cspsort[list(range(offhei_interf, nhei_interf + offhei_interf))]]] junkcspc_interf = junkcspc_interf.transpose() jcspc_interf = junkcspc_interf.sum(axis = 1)/nhei_interf ind = numpy.abs(jcspc_interf[mask_prof]).ravel().argsort() - median_real = numpy.median(numpy.real(junkcspc_interf[mask_prof[ind[range(3*num_prof/4)]],:])) - median_imag = numpy.median(numpy.imag(junkcspc_interf[mask_prof[ind[range(3*num_prof/4)]],:])) + median_real = numpy.median(numpy.real(junkcspc_interf[mask_prof[ind[list(range(3*num_prof/4))]],:])) + median_imag = numpy.median(numpy.imag(junkcspc_interf[mask_prof[ind[list(range(3*num_prof/4))]],:])) junkcspc_interf[comp_mask_prof,:] = numpy.complex(median_real, median_imag) for iprof in range(num_prof): @@ -629,7 +629,7 @@ class SpectraLagsProc(ProcessingUnit): xx = numpy.zeros([4,4]) for id1 in range(4): - xx[:,id1] = ind[id1]**numpy.asarray(range(4)) + xx[:,id1] = ind[id1]**numpy.asarray(list(range(4))) xx_inv = numpy.linalg.inv(xx) xx = xx_inv[:,0] @@ -660,13 +660,13 @@ class SpectraLagsProc(ProcessingUnit): maxHei = self.dataOut.heightList[-1] if (minHei < self.dataOut.heightList[0]) or (minHei > maxHei): - print 'minHei: %.2f is out of the heights range'%(minHei) - print 'minHei is setting to %.2f'%(self.dataOut.heightList[0]) + print('minHei: %.2f is out of the heights range'%(minHei)) + print('minHei is setting to %.2f'%(self.dataOut.heightList[0])) minHei = self.dataOut.heightList[0] if (maxHei > self.dataOut.heightList[-1]) or (maxHei < minHei): - print 'maxHei: %.2f is out of the heights range'%(maxHei) - print 'maxHei is setting to %.2f'%(self.dataOut.heightList[-1]) + print('maxHei: %.2f is out of the heights range'%(maxHei)) + print('maxHei is setting to %.2f'%(self.dataOut.heightList[-1])) maxHei = self.dataOut.heightList[-1] # validacion de velocidades @@ -679,13 +679,13 @@ class SpectraLagsProc(ProcessingUnit): maxVel = velrange[-1] if (minVel < velrange[0]) or (minVel > maxVel): - print 'minVel: %.2f is out of the velocity range'%(minVel) - print 'minVel is setting to %.2f'%(velrange[0]) + print('minVel: %.2f is out of the velocity range'%(minVel)) + print('minVel is setting to %.2f'%(velrange[0])) minVel = velrange[0] if (maxVel > velrange[-1]) or (maxVel < minVel): - print 'maxVel: %.2f is out of the velocity range'%(maxVel) - print 'maxVel is setting to %.2f'%(velrange[-1]) + print('maxVel: %.2f is out of the velocity range'%(maxVel)) + print('maxVel is setting to %.2f'%(velrange[-1])) maxVel = velrange[-1] # seleccion de indices para rango @@ -707,7 +707,7 @@ class SpectraLagsProc(ProcessingUnit): maxIndex = len(heights) if (minIndex < 0) or (minIndex > maxIndex): - raise ValueError, "some value in (%d,%d) is not valid" % (minIndex, maxIndex) + raise ValueError("some value in (%d,%d) is not valid" % (minIndex, maxIndex)) if (maxIndex >= self.dataOut.nHeights): maxIndex = self.dataOut.nHeights-1 @@ -736,4 +736,4 @@ class SpectraLagsProc(ProcessingUnit): self.dataOut.noise_estimation = noise.copy() - return 1 + return 1 \ No newline at end of file diff --git a/schainpy/model/proc/jroproc_voltage.py b/schainpy/model/proc/jroproc_voltage.py index ee0da38..d241c3a 100644 --- a/schainpy/model/proc/jroproc_voltage.py +++ b/schainpy/model/proc/jroproc_voltage.py @@ -1,8 +1,9 @@ import sys import numpy from scipy import interpolate -from schainpy import cSchain -from jroproc_base import ProcessingUnit, Operation +#TODO +#from schainpy import cSchain +from .jroproc_base import ProcessingUnit, Operation from schainpy.model.data.jrodata import Voltage from time import time @@ -71,7 +72,7 @@ class VoltageProc(ProcessingUnit): for channel in channelList: if channel not in self.dataOut.channelList: - raise ValueError, "Channel %d is not in %s" %(channel, str(self.dataOut.channelList)) + raise ValueError("Channel %d is not in %s" %(channel, str(self.dataOut.channelList))) index = self.dataOut.channelList.index(channel) channelIndexList.append(index) @@ -99,8 +100,8 @@ class VoltageProc(ProcessingUnit): for channelIndex in channelIndexList: if channelIndex not in self.dataOut.channelIndexList: - print channelIndexList - raise ValueError, "The value %d in channelIndexList is not valid" %channelIndex + print(channelIndexList) + raise ValueError("The value %d in channelIndexList is not valid" %channelIndex) if self.dataOut.flagDataAsBlock: """ @@ -184,7 +185,7 @@ class VoltageProc(ProcessingUnit): """ if (minIndex < 0) or (minIndex > maxIndex): - raise ValueError, "Height index range (%d,%d) is not valid" % (minIndex, maxIndex) + raise ValueError("Height index range (%d,%d) is not valid" % (minIndex, maxIndex)) if (maxIndex >= self.dataOut.nHeights): maxIndex = self.dataOut.nHeights @@ -204,7 +205,7 @@ class VoltageProc(ProcessingUnit): self.dataOut.heightList = self.dataOut.heightList[minIndex:maxIndex] if self.dataOut.nHeights <= 1: - raise ValueError, "selectHeights: Too few heights. Current number of heights is %d" %(self.dataOut.nHeights) + raise ValueError("selectHeights: Too few heights. Current number of heights is %d" %(self.dataOut.nHeights)) return 1 @@ -221,7 +222,7 @@ class VoltageProc(ProcessingUnit): newheights = (self.dataOut.nHeights-r)/window if newheights <= 1: - raise ValueError, "filterByHeights: Too few heights. Current number of heights is %d and window is %d" %(self.dataOut.nHeights, window) + raise ValueError("filterByHeights: Too few heights. Current number of heights is %d and window is %d" %(self.dataOut.nHeights, window)) if self.dataOut.flagDataAsBlock: """ @@ -257,7 +258,7 @@ class VoltageProc(ProcessingUnit): if self.dataOut.flagDataAsBlock: flip = self.flip - profileList = range(self.dataOut.nProfiles) + profileList = list(range(self.dataOut.nProfiles)) if not channelList: for thisProfile in profileList: @@ -306,7 +307,7 @@ class VoltageProc(ProcessingUnit): else: nHeights = self.dataOut.data.shape[2] x = numpy.hstack((numpy.arange(botLim),numpy.arange(topLim+1,nHeights))) - y = self.dataOut.data[:,:,range(botLim)+range(topLim+1,nHeights)] + y = self.dataOut.data[:,:,list(range(botLim))+list(range(topLim+1,nHeights))] f = interpolate.interp1d(x, y, axis = 2) xnew = numpy.arange(botLim,topLim+1) ynew = f(xnew) @@ -355,7 +356,7 @@ class CohInt(Operation): self.stride = stride if n == None and timeInterval == None: - raise ValueError, "n or timeInterval should be specified ..." + raise ValueError("n or timeInterval should be specified ...") if n != None: self.n = n @@ -613,7 +614,7 @@ class Decoder(Operation): self.__nHeis = dataOut.nHeights if self.__nHeis < self.nBaud: - raise ValueError, 'Number of heights (%d) should be greater than number of bauds (%d)' %(self.__nHeis, self.nBaud) + raise ValueError('Number of heights (%d) should be greater than number of bauds (%d)' %(self.__nHeis, self.nBaud)) #Frequency __codeBuffer = numpy.zeros((self.nCode, self.__nHeis), dtype=numpy.complex) @@ -666,7 +667,7 @@ class Decoder(Operation): junk = numpy.lib.stride_tricks.as_strided(self.code, (repetitions, self.code.size), (0, self.code.itemsize)) junk = junk.flatten() code_block = numpy.reshape(junk, (self.nCode*repetitions, self.nBaud)) - profilesList = xrange(self.__nProfiles) + profilesList = range(self.__nProfiles) for i in range(self.__nChannels): for j in profilesList: @@ -675,7 +676,7 @@ class Decoder(Operation): def __convolutionByBlockInFreq(self, data): - raise NotImplementedError, "Decoder by frequency fro Blocks not implemented" + raise NotImplementedError("Decoder by frequency fro Blocks not implemented") fft_code = self.fft_code[self.__profIndex].reshape(1,-1) @@ -692,13 +693,13 @@ class Decoder(Operation): def run(self, dataOut, code=None, nCode=None, nBaud=None, mode = 0, osamp=None, times=None): if dataOut.flagDecodeData: - print "This data is already decoded, recoding again ..." + print("This data is already decoded, recoding again ...") if not self.isConfig: if code is None: if dataOut.code is None: - raise ValueError, "Code could not be read from %s instance. Enter a value in Code parameter" %dataOut.type + raise ValueError("Code could not be read from %s instance. Enter a value in Code parameter" %dataOut.type) code = dataOut.code else: @@ -714,7 +715,7 @@ class Decoder(Operation): sys.stderr.write("Decoder Warning: Argument 'times' in not used anymore\n") if self.code is None: - print "Fail decoding: Code is not defined." + print("Fail decoding: Code is not defined.") return self.__nProfiles = dataOut.nProfiles @@ -746,7 +747,7 @@ class Decoder(Operation): datadec = self.__convolutionInFreqOpt(dataOut.data) if datadec is None: - raise ValueError, "Codification mode selected is not valid: mode=%d. Try selecting 0 or 1" %mode + raise ValueError("Codification mode selected is not valid: mode=%d. Try selecting 0 or 1" %mode) dataOut.code = self.code dataOut.nCode = self.nCode @@ -803,7 +804,7 @@ class ProfileConcat(Operation): self.isConfig = True if dataOut.flagDataAsBlock: - raise ValueError, "ProfileConcat can only be used when voltage have been read profile by profile, getBlock = False" + raise ValueError("ProfileConcat can only be used when voltage have been read profile by profile, getBlock = False") else: self.concat(dataOut.data) @@ -883,7 +884,7 @@ class ProfileSelector(Operation): if profileRangeList != None: minIndex = profileRangeList[0] maxIndex = profileRangeList[1] - profileList = range(minIndex, maxIndex+1) + profileList = list(range(minIndex, maxIndex+1)) dataOut.data = dataOut.data[:,minIndex:maxIndex+1,:] @@ -895,7 +896,7 @@ class ProfileSelector(Operation): minIndex = thisRange[0] maxIndex = thisRange[1] - profileList.extend(range(minIndex, maxIndex+1)) + profileList.extend(list(range(minIndex, maxIndex+1))) dataOut.data = dataOut.data[:,profileList,:] @@ -974,7 +975,7 @@ class ProfileSelector(Operation): return True - raise ValueError, "ProfileSelector needs profileList, profileRangeList or rangeList parameter" + raise ValueError("ProfileSelector needs profileList, profileRangeList or rangeList parameter") return False @@ -1015,21 +1016,21 @@ class Reshaper(Operation): def __checkInputs(self, dataOut, shape, nTxs): if shape is None and nTxs is None: - raise ValueError, "Reshaper: shape of factor should be defined" + raise ValueError("Reshaper: shape of factor should be defined") if nTxs: if nTxs < 0: - raise ValueError, "nTxs should be greater than 0" + raise ValueError("nTxs should be greater than 0") if nTxs < 1 and dataOut.nProfiles % (1./nTxs) != 0: - raise ValueError, "nProfiles= %d is not divisibled by (1./nTxs) = %f" %(dataOut.nProfiles, (1./nTxs)) + raise ValueError("nProfiles= %d is not divisibled by (1./nTxs) = %f" %(dataOut.nProfiles, (1./nTxs))) shape = [dataOut.nChannels, dataOut.nProfiles*nTxs, dataOut.nHeights/nTxs] return shape, nTxs if len(shape) != 2 and len(shape) != 3: - raise ValueError, "shape dimension should be equal to 2 or 3. shape = (nProfiles, nHeis) or (nChannels, nProfiles, nHeis). Actually shape = (%d, %d, %d)" %(dataOut.nChannels, dataOut.nProfiles, dataOut.nHeights) + raise ValueError("shape dimension should be equal to 2 or 3. shape = (nProfiles, nHeis) or (nChannels, nProfiles, nHeis). Actually shape = (%d, %d, %d)" %(dataOut.nChannels, dataOut.nProfiles, dataOut.nHeights)) if len(shape) == 2: shape_tuple = [dataOut.nChannels] @@ -1069,7 +1070,7 @@ class Reshaper(Operation): profileIndex = dataOut.profileIndex*nTxs else: - raise ValueError, "nTxs should be greater than 0 and lower than 1, or use VoltageReader(..., getblock=True)" + raise ValueError("nTxs should be greater than 0 and lower than 1, or use VoltageReader(..., getblock=True)") deltaHeight = dataOut.heightList[1] - dataOut.heightList[0] @@ -1098,7 +1099,7 @@ class SplitProfiles(Operation): shape = dataOut.data.shape if shape[2] % n != 0: - raise ValueError, "Could not split the data, n=%d has to be multiple of %d" %(n, shape[2]) + raise ValueError("Could not split the data, n=%d has to be multiple of %d" %(n, shape[2])) new_shape = shape[0], shape[1]*n, shape[2]/n @@ -1109,7 +1110,7 @@ class SplitProfiles(Operation): else: - raise ValueError, "Could not split the data when is read Profile by Profile. Use VoltageReader(..., getblock=True)" + raise ValueError("Could not split the data when is read Profile by Profile. Use VoltageReader(..., getblock=True)") deltaHeight = dataOut.heightList[1] - dataOut.heightList[0] @@ -1141,7 +1142,7 @@ class CombineProfiles(Operation): new_shape = shape[0], shape[1]/n, shape[2]*n if shape[1] % n != 0: - raise ValueError, "Could not split the data, n=%d has to be multiple of %d" %(n, shape[1]) + raise ValueError("Could not split the data, n=%d has to be multiple of %d" %(n, shape[1])) dataOut.data = numpy.reshape(dataOut.data, new_shape) dataOut.flagNoData = False @@ -1316,4 +1317,4 @@ class CombineProfiles(Operation): # # self.__startIndex += self.__newNSamples # -# return +# return \ No newline at end of file diff --git a/schainpy/model/proc/pxproc_parameters.py b/schainpy/model/proc/pxproc_parameters.py index 5d5e9d8..9ba0611 100644 --- a/schainpy/model/proc/pxproc_parameters.py +++ b/schainpy/model/proc/pxproc_parameters.py @@ -11,7 +11,7 @@ from time import gmtime from numpy import transpose -from jroproc_base import ProcessingUnit, Operation +from .jroproc_base import ProcessingUnit, Operation from schainpy.model.data.jrodata import Parameters diff --git a/schainpy/model/serializer/data.py b/schainpy/model/serializer/data.py index 1212559..4c025ee 100644 --- a/schainpy/model/serializer/data.py +++ b/schainpy/model/serializer/data.py @@ -3,7 +3,7 @@ Created on Jul 15, 2014 @author: Miguel Urco ''' -from serializer import DynamicSerializer +from .serializer import DynamicSerializer DEFAULT_SERIALIZER = None #'cPickle', 'msgpack', "yaml" @@ -20,7 +20,7 @@ def isDictFormat(thisValue): if type(thisValue) != type({}): return False - if CLASSNAME_KEY not in thisValue.keys(): + if CLASSNAME_KEY not in list(thisValue.keys()): return False return True @@ -28,13 +28,13 @@ def isDictFormat(thisValue): def obj2Dict(myObj, keyList=[]): if not keyList: - keyList = myObj.__dict__.keys() + keyList = list(myObj.__dict__.keys()) myDict = {} myDict[CLASSNAME_KEY] = myObj.__class__.__name__ - for thisKey, thisValue in myObj.__dict__.items(): + for thisKey, thisValue in list(myObj.__dict__.items()): if thisKey not in keyList: continue @@ -52,14 +52,14 @@ def dict2Obj(myDict): ''' ''' - if CLASSNAME_KEY not in myDict.keys(): + if CLASSNAME_KEY not in list(myDict.keys()): return None className = eval(myDict[CLASSNAME_KEY]) myObj = className() - for thisKey, thisValue in myDict.items(): + for thisKey, thisValue in list(myDict.items()): if thisKey == CLASSNAME_KEY: continue @@ -111,4 +111,4 @@ def serial2Obj(mySerial, metadataDict = {}, serializer=DEFAULT_SERIALIZER): metadataDict.update(myDataDict) myObj = dict2Obj(metadataDict) - return myObj + return myObj \ No newline at end of file diff --git a/schainpy/model/serializer/serializer.py b/schainpy/model/serializer/serializer.py index 82e5f46..c5e0ffe 100644 --- a/schainpy/model/serializer/serializer.py +++ b/schainpy/model/serializer/serializer.py @@ -7,7 +7,7 @@ Created on Jul 17, 2014 DEFAULT_SERIALIZER = None try: - import cPickle + import pickle DEFAULT_SERIALIZER = 'cPickle' except: pass @@ -86,7 +86,7 @@ class DynamicSerializer(Serializer): def __init__(self, module = None): if not DEFAULT_SERIALIZER: - raise ImportError, "Install a python serializer like cPickle or msgpack" + raise ImportError("Install a python serializer like cPickle or msgpack") if not module: module == DEFAULT_SERIALIZER diff --git a/schainpy/model/serializer/test/DynamicObject.py b/schainpy/model/serializer/test/DynamicObject.py index 5045e1f..39e4100 100644 --- a/schainpy/model/serializer/test/DynamicObject.py +++ b/schainpy/model/serializer/test/DynamicObject.py @@ -7,7 +7,7 @@ matching signatures. $Id$ ''' -import urllib +import urllib.request, urllib.parse, urllib.error import os import re import yaml # YAML Ain't Markup Language @@ -40,7 +40,7 @@ class Object(object): elif isinstance(object_uri, str): if object_uri.endswith('.yml'): # URI is a web hyper-linked yaml file - read it. - self.yaml = urllib.urlopen(object_uri).read() + self.yaml = urllib.request.urlopen(object_uri).read() else: # URI is a (hyper-linked?) directory - try reading it. #print "URI is a directory." @@ -55,12 +55,12 @@ class Object(object): for fn in self.files: self.yaml.append(Object(fn)) else: - print "Invalid URI supplied: %s"%(object_uri,) + print("Invalid URI supplied: %s"%(object_uri,)) def __parseLink(self, object_uri, recursive): """ Returns a listing of all YAML files located in the hyper-link directory given by page. """ - page = urllib.urlopen(object_uri).read() + page = urllib.request.urlopen(object_uri).read() #print "URI is a URL directory: %s"%(object_uri,) pattern = re.compile(r'') @@ -120,8 +120,8 @@ class Object(object): if not isinstance(obj, Object): return False - self_keys = self.__dict__.keys() - obj_keys = obj.__dict__.keys() + self_keys = list(self.__dict__.keys()) + obj_keys = list(obj.__dict__.keys()) if not self_keys == obj_keys: return False for key in self_keys: @@ -132,8 +132,8 @@ class Object(object): if not self_value.equals(obj_value, compare_time_created): return False elif isinstance(self_value, np.ndarray): - m1 = map(repr,self_value.flat) - m2 = map(repr,obj_value.flat) + m1 = list(map(repr,self_value.flat)) + m2 = list(map(repr,obj_value.flat)) ret = m1 == m2 if not ret: return False @@ -147,7 +147,7 @@ class Object(object): def sizeof(self): """ Recursively computes the size in bytes of the given Dynamic Object """ sz = 0 - values = self.__dict__.values() + values = list(self.__dict__.values()) for val in values: if isinstance(val, Object): sz += val.sizeof() elif isinstance(val, np.ndarray): sz += val.nbytes @@ -281,7 +281,7 @@ def __ref_constructor(loader, node): return _Reference(loader.construct_mapping(node)) else: return _Reference(loader.construct_scalar(node)) -add_constructor(u'!ref', __ref_constructor) +add_constructor('!ref', __ref_constructor) # Method constructor using !method tag: def __method_constructor(loader, node): @@ -289,7 +289,7 @@ def __method_constructor(loader, node): return _Method(loader.construct_mapping(node)) else: return _Method(loader.construct_scalar(node)) -add_constructor(u'!method', __method_constructor) +add_constructor('!method', __method_constructor) # Generic constructor for any _BuiltinDtype def __dtype_constructor(loader, node): @@ -302,8 +302,8 @@ def __dtype_constructor(loader, node): return ret # Register YAML constructors for each builtin type: -for dtype in Lookup.numpy_dtypes.keys() + Lookup.builtin_objects.keys(): - add_constructor(u'!%s'%(dtype,), __dtype_constructor) +for dtype in list(Lookup.numpy_dtypes.keys()) + list(Lookup.builtin_objects.keys()): + add_constructor('!%s'%(dtype,), __dtype_constructor) class FactoryLoader(OrderedYAML.Loader): """ A YAML Loader specifically designed to load YAML object definitions @@ -311,7 +311,7 @@ class FactoryLoader(OrderedYAML.Loader): def construct_yaml_timestamp(self, node): """ Make empty timestamps (None/null) acceptable, otherwise parse the timestamp """ - if node.value == u'': + if node.value == '': name = 'YAML_DEFN_LOADED_INCORRECTLY' # in case we forget to fix the name... return _Parameter(name, hasDefault=False, classType=datetime.datetime) else: @@ -319,7 +319,7 @@ class FactoryLoader(OrderedYAML.Loader): # Override default timestamp constructor: FactoryLoader.add_constructor( - u'tag:yaml.org,2002:timestamp', + 'tag:yaml.org,2002:timestamp', FactoryLoader.construct_yaml_timestamp ) @@ -414,7 +414,7 @@ class Factory: return _Parameter(sigName, True, default, length=None) # Is the object an array with length and default value given?: - if isinstance(sig.yamlString, dict) and "len" in sig.yamlString.keys(): + if isinstance(sig.yamlString, dict) and "len" in list(sig.yamlString.keys()): length = sig.yamlString["len"] # Shape is given as something like [[],[]], not [2,2] - convert @@ -495,7 +495,7 @@ class Factory: # List of names of classes we've created so far: #print [x for x in objClasses] - names = objClasses.keys() + names = list(objClasses.keys()) if ref_object.yamlString in names: defaultType = objClasses[ref_object.yamlString] @@ -594,7 +594,7 @@ class Factory: setattr(_self, classData[i].name, arg) # Set named attributes (given by dictionary kwargs): - for key,value in kwargs.items(): + for key,value in list(kwargs.items()): try: keyIndex = [param.name for param in classData].index(key) except ValueError: @@ -605,7 +605,7 @@ class Factory: # Object instantiation / creation time (if not already present): - if not kwargs.has_key('__time_created'): + if '__time_created' not in kwargs: setattr(_self, "__time_created", np.float64(time.time())) return init, attributes @@ -616,7 +616,7 @@ class Factory: a KeyError if the class cannot be found. """ # If class definition was in the YAML file, extend that one: - if className in localClasses.keys(): + if className in list(localClasses.keys()): return localClasses[className] # Else try finding the class definition in our global scope: @@ -647,7 +647,7 @@ class Factory: # Each document can contain multiple objects - build each one. # (NOTE: objects can cross reference each other in the same document # need to resolve Reference objects as last step) - for objClassName in document.keys(): + for objClassName in list(document.keys()): # The dictionary containing method & data signatures: objDict = document[objClassName] @@ -659,9 +659,9 @@ class Factory: classBases = [Object] # List structured documents result in a list of dicts each with one key: - if isinstance(objDict, list): keys = [param.keys()[0] for param in objDict] + if isinstance(objDict, list): keys = [list(param.keys())[0] for param in objDict] # Otherwise the parameter names are just the keys of the dict - else: keys = objDict.keys() # if key not found, raises AttributeError + else: keys = list(objDict.keys()) # if key not found, raises AttributeError for sigName in keys: #print sigName @@ -696,7 +696,7 @@ class Factory: else: msg = "Factory abstract base class doesn't " +\ "support the following signature: %r \"%s\""%(sig.__class__,str(sig)) - print sig.__class__ + print(sig.__class__) raise SignatureException(msg) # Built-in attribute for all Dynamic Objects: @@ -731,12 +731,12 @@ class Factory: def construct_dynamic_object(loader, node): kwargs = loader.construct_mapping(node) # Remove revision control from loaded objects (info is in the class object!) - for arg in kwargs.keys(): + for arg in list(kwargs.keys()): if arg in getattr(Object, 'getters') and arg != '__time_created': del kwargs[arg] return cls(**kwargs) revision = cls.meta_attributes["__revision_number"] - DynamicYAML.Loader.add_constructor(u'!%s.%s'%(str(objClassName),revision), construct_dynamic_object) + DynamicYAML.Loader.add_constructor('!%s.%s'%(str(objClassName),revision), construct_dynamic_object) represent_dynamic_object = DynamicYAML.Dumper.represent_dynamic_object DynamicYAML.Dumper.add_representer(cls, represent_dynamic_object) @@ -748,19 +748,19 @@ class Factory: except KeyError: # Now look for reference to class object loaded from any YAML defn file, loading the # most recent version / revision (number) of the definition - for dynClass in Object.dynamicClasses.keys()[::-1]: + for dynClass in list(Object.dynamicClasses.keys())[::-1]: if dynClass.startswith(className): return Object.dynamicClasses[dynClass] # Still unresolved - raise exception: - allDynamicClasses = repr(objClasses.keys() + Object.dynamicClasses.keys()) + allDynamicClasses = repr(list(objClasses.keys()) + list(Object.dynamicClasses.keys())) raise UnresolvedTypeException("Cannot resolve type '%s': Name not found in %s"%(className,allDynamicClasses)) def resolve(param): # Reference is just a string - that's the class name: - if isinstance(param.classType.yamlObject, (str, unicode)): + if isinstance(param.classType.yamlObject, str): className = str(param.classType.yamlObject) param.classType = findClass(className) return @@ -796,7 +796,7 @@ class Factory: param.hasDefault = False # for good measure # Is it an object array?: - if "len" in refDict.keys(): + if "len" in list(refDict.keys()): param.length = refDict["len"] # Resolve any unresolved data-types: @@ -810,7 +810,6 @@ class Factory: def load_defn(yaml): """ Shortcut for producing a single DynamicObject class object from the provided yaml definition in string format """ - return Factory(yaml=yaml).classes.values()[0] - + return list(Factory(yaml=yaml).classes.values())[0] diff --git a/schainpy/model/serializer/test/DynamicSerializer.py b/schainpy/model/serializer/test/DynamicSerializer.py index 34b0444..00e4c7b 100644 --- a/schainpy/model/serializer/test/DynamicSerializer.py +++ b/schainpy/model/serializer/test/DynamicSerializer.py @@ -66,4 +66,4 @@ class DynamicSerializer: if __name__ == "__main__": DynamicSerializer() - print "DynamicSerializer ran" \ No newline at end of file + print("DynamicSerializer ran") \ No newline at end of file diff --git a/schainpy/model/serializer/test/DynamicYAML.py b/schainpy/model/serializer/test/DynamicYAML.py index f74cf98..508ef55 100644 --- a/schainpy/model/serializer/test/DynamicYAML.py +++ b/schainpy/model/serializer/test/DynamicYAML.py @@ -54,7 +54,7 @@ class Loader(OrderedYAML.Loader): data = self.construct_mapping(self, node) self.constructed_objects[node] = data del self.recursive_objects[node] - if data.has_key('__revision_source'): + if '__revision_source' in data: # TODO: Handle password authentication client = pysvn.Client() source = data['__revision_source'] @@ -85,11 +85,11 @@ class Dumper(OrderedYAML.Dumper): """ state = {} - state.update(obj.__dict__.items()) - state.update(obj.__class__.meta_attributes.items()) + state.update(list(obj.__dict__.items())) + state.update(list(obj.__class__.meta_attributes.items())) name = obj.getObjectName() # obj.__class__.__name__ revision = obj.getRevisionNumber() - return self.represent_mapping(u'!%s.%s' % (name, revision), state) + return self.represent_mapping('!%s.%s' % (name, revision), state) # Dtypes to be stored as hex in YAML streams / strings hex_dtypes = ['float', 'complex', 'half', 'single', 'double'] @@ -98,7 +98,7 @@ hex_dtypes = ['float', 'complex', 'half', 'single', 'double'] dtypes = Lookup.numpy_dtypes # Inverse lookup for accessing tags given a class instance: -cls_dtypes = dict([(v,k) for (k,v) in dtypes.items()]) +cls_dtypes = dict([(v,k) for (k,v) in list(dtypes.items())]) # Representer for numpy arrays: def ndarray_representer(dumper, obj): @@ -108,23 +108,23 @@ def ndarray_representer(dumper, obj): np_ary = obj #hex_ary = np.empty(np_ary.shape, dtype=yaml.nodes.ScalarNode) np_flat, hex_flat = np_ary.flat, [] #hex_ary.flat - hex_flat.append(dumper.represent_sequence(u'tag:yaml.org,2002:seq', list(np_ary.shape), flow_style=True)) + hex_flat.append(dumper.represent_sequence('tag:yaml.org,2002:seq', list(np_ary.shape), flow_style=True)) if hexlify: lst = [] for i in range(len(np_flat)): - value = u'%s'%(np_flat[i],) - node = dumper.represent_scalar(u'tag:yaml.org,2002:str', value, style='') + value = '%s'%(np_flat[i],) + node = dumper.represent_scalar('tag:yaml.org,2002:str', value, style='') lst.append(node) - hex_flat.append(yaml.nodes.SequenceNode(u'tag:yaml.org,2002:seq', lst, flow_style=True)) + hex_flat.append(yaml.nodes.SequenceNode('tag:yaml.org,2002:seq', lst, flow_style=True)) lst = [] for i in range(len(np_flat)): - if hexlify: value = u'%s'%(binascii.hexlify(np_flat[i]),) - else: value = u'%s'%(np_flat[i],) - node = dumper.represent_scalar(u'tag:yaml.org,2002:str', value, style='') + if hexlify: value = '%s'%(binascii.hexlify(np_flat[i]),) + else: value = '%s'%(np_flat[i],) + node = dumper.represent_scalar('tag:yaml.org,2002:str', value, style='') if hexlify: lst.append(node) else: hex_flat.append(node) - if hexlify: hex_flat.append(yaml.nodes.SequenceNode(u'tag:yaml.org,2002:seq', lst, flow_style=True)) - return yaml.nodes.SequenceNode(u'!%s'%(tag,), hex_flat, flow_style=True) + if hexlify: hex_flat.append(yaml.nodes.SequenceNode('tag:yaml.org,2002:seq', lst, flow_style=True)) + return yaml.nodes.SequenceNode('!%s'%(tag,), hex_flat, flow_style=True) Dumper.add_representer(np.ndarray, ndarray_representer) # Constructor for ndarrays with arbitrary (specified) dtype: @@ -172,9 +172,9 @@ class __dtype_con: def dtype_representer(dumper, obj): tag, hexlify, dtype = self.fncn_attributes if isinstance(obj, float): obj = np.float64(obj) - if hexlify: value = u'%s'%(binascii.hexlify(obj),) - else: value = u'%s'%(obj,) - try: tag = u'!%s'%(cls_dtypes[obj.__class__]) # 'dtype.'+obj.__class__.__name__ # bullshit... + if hexlify: value = '%s'%(binascii.hexlify(obj),) + else: value = '%s'%(obj,) + try: tag = '!%s'%(cls_dtypes[obj.__class__]) # 'dtype.'+obj.__class__.__name__ # bullshit... except KeyError: tag = '' node = dumper.represent_scalar(tag, value, style='') return node @@ -182,40 +182,39 @@ class __dtype_con: self.dtype_constructor = dtype_constructor self.dtype_representer = dtype_representer -keys = [x for x in dtypes.keys() if x != 'dtype.int' and x != 'dtype.bool'] -print keys +keys = [x for x in list(dtypes.keys()) if x != 'dtype.int' and x != 'dtype.bool'] +print(keys) n = len(keys) -print n +print(n) i=0 for tag in keys: dtype = __dtype_con(tag) dtype_constructor = dtype.dtype_constructor dtype_representer = dtype.dtype_representer - Loader.add_constructor(u'!%s'%(tag,), dtype_constructor) + Loader.add_constructor('!%s'%(tag,), dtype_constructor) Dumper.add_representer(dtypes[tag], dtype_representer) # Precision time constructors & representers: def ns_rep(dumper, obj): state = {'second': obj.__dict__['second'], 'nanosecond': obj.__dict__['nanosecond']} - return dumper.represent_mapping(u'!timestamp_ns', state) + return dumper.represent_mapping('!timestamp_ns', state) def ps_rep(dumper, obj): state = {'second': obj.__dict__['second'], 'picosecond': obj.__dict__['picosecond']} - return dumper.represent_mapping(u'!timestamp_ps', state) + return dumper.represent_mapping('!timestamp_ps', state) def ns_con(loader, node): return PrecisionTime.nsTime(**loader.construct_mapping(node)) def ps_con(loader, node): return PrecisionTime.psTime(**loader.construct_mapping(node)) Dumper.add_representer(PrecisionTime.nsTime, ns_rep) Dumper.add_representer(PrecisionTime.psTime, ps_rep) -Loader.add_constructor(u'!timestamp_ns', ns_con) -Loader.add_constructor(u'!timestamp_nanosecond', ns_con) -Loader.add_constructor(u'!timestamp_ps', ps_con) -Loader.add_constructor(u'!timestamp_picosecond', ps_con) +Loader.add_constructor('!timestamp_ns', ns_con) +Loader.add_constructor('!timestamp_nanosecond', ns_con) +Loader.add_constructor('!timestamp_ps', ps_con) +Loader.add_constructor('!timestamp_picosecond', ps_con) # Binary object constructor & representer: -def bin_rep(dumper, obj): return dumper.represent_mapping(u'!binary', obj.__dict__) +def bin_rep(dumper, obj): return dumper.represent_mapping('!binary', obj.__dict__) def bin_con(loader, node): return DynamicObject.Binary(**loader.construct_mapping(node)) Dumper.add_representer(DynamicObject.Binary, bin_rep) -Loader.add_constructor(u'!binary', bin_con) - +Loader.add_constructor('!binary', bin_con) diff --git a/schainpy/model/serializer/test/Lookup.py b/schainpy/model/serializer/test/Lookup.py index 53ab278..9a5040e 100644 --- a/schainpy/model/serializer/test/Lookup.py +++ b/schainpy/model/serializer/test/Lookup.py @@ -15,8 +15,8 @@ import platform import collections # Implicit Types: -python_dtypes = tuple([bool,int,long,float,str,datetime.datetime,list, - set,dict,tuple,unicode]) +python_dtypes = tuple([bool,int,int,float,str,datetime.datetime,list, + set,dict,tuple,str]) # Numpy Data-types: numpy_dtypes = {'dtype.bool': bool, 'dtype.int': np.int, 'dtype.int8': np.int8, @@ -53,10 +53,9 @@ builtin_objects_simple = {'nsTime': PrecisionTime.nsTime, 'psTime': PrecisionTim 'Binary': Binary} # Inverse lookup for accessing tags given a class instance: -cls_dtypes = dict([(v,k) for (k,v) in numpy_dtypes.items()]) -obj_dtypes = dict([(v,k) for (k,v) in builtin_objects_simple.items()]) +cls_dtypes = dict([(v,k) for (k,v) in list(numpy_dtypes.items())]) +obj_dtypes = dict([(v,k) for (k,v) in list(builtin_objects_simple.items())]) # Pointer to the list of all Object classes created, as located in the Object module / class: dynamicClasses = DynamicObject.Object.dynamicClasses - diff --git a/schainpy/model/serializer/test/PrecisionTime.py b/schainpy/model/serializer/test/PrecisionTime.py index 205cf5b..7db41e5 100644 --- a/schainpy/model/serializer/test/PrecisionTime.py +++ b/schainpy/model/serializer/test/PrecisionTime.py @@ -18,15 +18,15 @@ class nsTime: def __init__(self, second, nanosecond): self.second = int(second) if self.second < 0: - raise ValueError, 'seconds must be greater than 0, not %i' % (self.second) - nanosecond = long(nanosecond) + raise ValueError('seconds must be greater than 0, not %i' % (self.second)) + nanosecond = int(nanosecond) if nanosecond < 0: - raise ValueError, 'nanoseconds must be greater 0, not %i' % (nanosecond) + raise ValueError('nanoseconds must be greater 0, not %i' % (nanosecond)) addSec = nanosecond / 1000000000 if addSec > 0: self.second += addSec self.nanosecond = nanosecond % 1000000000 - self.totalNS = long(self.nanosecond) + long(self.second) * 1000000000 + self.totalNS = int(self.nanosecond) + int(self.second) * 1000000000 def __add__(self, other): @@ -46,7 +46,7 @@ class nsTime: addSec = int(nsResult / 1000000000) self.second = self.second + other.second + addSec self.nanosecond = nsResult % 1000000000 - self.totalNS = long(self.nanosecond) + long(self.second) * 1000000000 + self.totalNS = int(self.nanosecond) + int(self.second) * 1000000000 def __sub__(self, other): @@ -65,8 +65,8 @@ class nsTime: def multiply(self, factor): """multiply this nsTime times an integer """ - if type(factor) not in (types.IntType, types.LongType): - raise ValueError, 'Illegal type %s passed into nsTime.multiply' % (str(type(factor))) + if type(factor) not in (int, int): + raise ValueError('Illegal type %s passed into nsTime.multiply' % (str(type(factor)))) newTotalNS = self.totalNS * factor newSeconds = int(newTotalNS / 1000000000) newNanoseconds = int(newTotalNS - (newSeconds * 1000000000)) @@ -85,7 +85,7 @@ class nsTime: def __mod__(self, other): """__mod__ implements self % other. """ - if type(other) in (types.IntType, types.LongType): + if type(other) in (int, int): return self.totalNS % other else: return self.totalNS % other.totalNS @@ -118,15 +118,15 @@ class psTime: def __init__(self, second, picosecond): self.second = int(second) if self.second < 0: - raise ValueError, 'seconds must be greater than 0, not %i' % (self.second) - picosecond = long(picosecond) + raise ValueError('seconds must be greater than 0, not %i' % (self.second)) + picosecond = int(picosecond) if picosecond < 0: - raise ValueError, 'picoseconds must be greater 0, not %i' % (picosecond) + raise ValueError('picoseconds must be greater 0, not %i' % (picosecond)) addSec = picosecond / 1000000000000 if addSec > 0: self.second += addSec self.picosecond = picosecond % 1000000000000 - self.totalPS = long(self.picosecond) + long(self.second) * 1000000000000 + self.totalPS = int(self.picosecond) + int(self.second) * 1000000000000 def __add__(self, other): @@ -146,7 +146,7 @@ class psTime: addSec = int(psResult / 1000000000000) self.second = self.second + other.second + addSec self.picosecond = psResult % 1000000000000 - self.totalPS = long(self.picosecond) + long(self.second) * 1000000000000 + self.totalPS = int(self.picosecond) + int(self.second) * 1000000000000 def __sub__(self, other): @@ -165,8 +165,8 @@ class psTime: def multiply(self, factor): """multiply this psTime times an integer """ - if type(factor) not in (types.IntType, types.LongType): - raise ValueError, 'Illegal type %s passed into psTime.multiply' % (str(type(factor))) + if type(factor) not in (int, int): + raise ValueError('Illegal type %s passed into psTime.multiply' % (str(type(factor)))) newTotalPS = self.totalPS * factor newSeconds = int(newTotalPS / 1000000000000) newPicoseconds = int(newTotalPS - (newSeconds * 1000000000000)) @@ -185,7 +185,7 @@ class psTime: def __mod__(self, other): """__mod__ implements self % other. """ - if type(other) in (types.IntType, types.LongType): + if type(other) in (int, int): return self.totalPS % other else: return self.totalPS % other.totalPS @@ -208,4 +208,3 @@ class psTime: def __str__(self): return '%d.%12d' % (self.second, self.picosecond) - diff --git a/schainpy/model/serializer/test/Serializer.py b/schainpy/model/serializer/test/Serializer.py index 9201f6c..53fee8f 100644 --- a/schainpy/model/serializer/test/Serializer.py +++ b/schainpy/model/serializer/test/Serializer.py @@ -82,16 +82,16 @@ class YAMLSerializer(Serializer): # Regular expression taken from yaml.constructor.py timestamp_regexp_str = str(\ - ur'^(?P[0-9][0-9][0-9][0-9])' - ur'-(?P[0-9][0-9]?)' - ur'-(?P[0-9][0-9]?)' - ur'(?:(?:[Tt]|[ \t]+)' - ur'(?P[0-9][0-9]?)' - ur':(?P[0-9][0-9])' - ur':(?P[0-9][0-9])' - ur'(?:\.(?P[0-9]*))?' - ur'(?:[ \t]*(?PZ|(?P[-+])(?P[0-9][0-9]?)' - ur'(?::(?P[0-9][0-9]))?))?)?$') + r'^(?P[0-9][0-9][0-9][0-9])' + r'-(?P[0-9][0-9]?)' + r'-(?P[0-9][0-9]?)' + r'(?:(?:[Tt]|[ \t]+)' + r'(?P[0-9][0-9]?)' + r':(?P[0-9][0-9])' + r':(?P[0-9][0-9])' + r'(?:\.(?P[0-9]*))?' + r'(?:[ \t]*(?PZ|(?P[-+])(?P[0-9][0-9]?)' + r'(?::(?P[0-9][0-9]))?))?)?$') timestamp_regexp = re.compile(timestamp_regexp_str, re.X) def construct_timestamp(value): @@ -133,10 +133,10 @@ class MessagePackSerializer(Serializer): def __fromSerial(self, msg_dict): if not isinstance(msg_dict, (dict, list, tuple)): return msg_dict # msg_dict is a value - return it - if isinstance(msg_dict, dict) and msg_dict.has_key('__meta_attributes'): + if isinstance(msg_dict, dict) and '__meta_attributes' in msg_dict: meta_attr = msg_dict['__meta_attributes'] msg_dict.pop('__meta_attributes') - if meta_attr.has_key('type'): + if 'type' in meta_attr: if meta_attr['type'] == 'datetime': return construct_timestamp(str(msg_dict['ts'])) elif meta_attr['type'] == 'nsTime': @@ -147,7 +147,7 @@ class MessagePackSerializer(Serializer): except KeyError: dtype = Lookup.builtin_objects[meta_attr['type']] return dtype(**msg_dict) else: - for key in msg_dict.keys(): + for key in list(msg_dict.keys()): msg_dict[key] = self.__fromSerial(msg_dict[key]) cls = Lookup.dynamicClasses['%s.%s'%(meta_attr['__object_name'],meta_attr['__revision_number'])] return cls(**msg_dict) @@ -159,7 +159,7 @@ class MessagePackSerializer(Serializer): return np.frombuffer(value, dtype=Lookup.numpy_dtypes[msg_dict[1]])[0] tup = isinstance(msg_dict, tuple) - if tup and len(msg_dict) > 1 and msg_dict[0] in Lookup.numpy_dtypes.keys(): + if tup and len(msg_dict) > 1 and msg_dict[0] in list(Lookup.numpy_dtypes.keys()): msg_flat = list(msg_dict) dtypeName = msg_flat.pop(0) dtype = Lookup.numpy_dtypes[dtypeName] @@ -192,7 +192,7 @@ class MessagePackSerializer(Serializer): return msg_dict elif isinstance(obj, DynamicObject.Object): msg_dict = {} - for key, value in obj.__dict__.items(): + for key, value in list(obj.__dict__.items()): msg_dict[key] = self.__toSerial(value) msg_dict['__meta_attributes'] = obj.__class__.meta_attributes @@ -210,7 +210,7 @@ class MessagePackSerializer(Serializer): msg_flat.append(toSer) return list(msg_flat) - is_builtin = obj.__class__ in Lookup.numpy_dtypes.values() + is_builtin = obj.__class__ in list(Lookup.numpy_dtypes.values()) #is_python = isinstance(obj, Lookup.python_dtypes) if is_builtin: # and not is_python: try: @@ -246,7 +246,7 @@ class HDF5Serializer(Serializer): if isinstance(grp, h5py.Dataset): return grp.value - elif isinstance(grp, h5py.Group) and '__type' in grp.keys(): + elif isinstance(grp, h5py.Group) and '__type' in list(grp.keys()): typ = grp['__type'].value if typ == 'datetime': return construct_timestamp(str(grp['ts'].value)) @@ -259,7 +259,7 @@ class HDF5Serializer(Serializer): try: cls = Lookup.builtin_objects_simple[typ] except KeyError: cls = Lookup.dynamicClasses[typ] args = [] - for key in grp.keys(): + for key in list(grp.keys()): fromSer = self.__fromSerial(grp[key]) args.append((key, fromSer)) kwargs = dict(args) @@ -299,7 +299,7 @@ class HDF5Serializer(Serializer): elif isinstance(obj, tuple(Lookup.builtin_objects_simple.values())): sub_grp = grp.create_group(name) sub_grp['__type'] = Lookup.obj_dtypes[obj.__class__] - for key, value in obj.__dict__.items(): + for key, value in list(obj.__dict__.items()): if value != None and key not in ['totalNS', 'totalPS']: sub_grp[key] = value @@ -313,7 +313,7 @@ class HDF5Serializer(Serializer): tag = '%s.%s'%(obj.getObjectName(), obj.getRevisionNumber()) sub_grp['__type'] = tag # Put all of the DynamicObject's attributes into the new h5py group - for key, value in obj.__dict__.items(): + for key, value in list(obj.__dict__.items()): self.__toSerial(value, sub_grp, key) elif isinstance(obj, tuple): @@ -356,7 +356,7 @@ class jsonSerializer(Serializer): #return json.dumps(string) return jsonpickle.encode(string, max_depth=500) -# Dict mapping from serializer type to corresponding class object: +# Dict mapping from .serializer type to corresponding class object: serializers = {'yaml': YAMLSerializer, 'msgpack': MessagePackSerializer, 'hdf5': HDF5Serializer, @@ -367,7 +367,6 @@ instances = {'yaml': YAMLSerializer(), 'hdf5': HDF5Serializer(), 'json': jsonSerializer()} -serial_types = dict([(v,u) for u,v in serializers.items()]) +serial_types = dict([(v,u) for u,v in list(serializers.items())]) compression_types = ['gzip', ''] - diff --git a/schainpy/model/serializer/test/serialtest.py b/schainpy/model/serializer/test/serialtest.py index 0b8ded6..577c4bd 100644 --- a/schainpy/model/serializer/test/serialtest.py +++ b/schainpy/model/serializer/test/serialtest.py @@ -157,7 +157,7 @@ datastr = serializer.toSerial(source_object) dest_object = serializer.fromSerial(datastr) -print "dest_object=",dest_object +print("dest_object=",dest_object) myObject = StateListObject(hierarchical="yes",state=np.array([1,2,3.0])) @@ -168,7 +168,7 @@ packed = msgpack.packb(datastr) try: r= redis.StrictRedis(host='localhost',port=6379,db=0) except Exception as eobj: - print "is the redis server running?",eobj + print("is the redis server running?",eobj) else: r.set('baz',packed) # converts to string @@ -178,10 +178,9 @@ unpacked = msgpack.unpackb(x) dest_object = serializer.fromSerial(unpacked) -print "val1=",dest_object.hierarchical +print("val1=",dest_object.hierarchical) val2 = dest_object.state -print "val2=",val2 +print("val2=",val2) # can numpy array be used as array? -print val2.shape - +print(val2.shape) diff --git a/schainpy/model/utils/__init__.py b/schainpy/model/utils/__init__.py index 0359aa1..40ee51f 100644 --- a/schainpy/model/utils/__init__.py +++ b/schainpy/model/utils/__init__.py @@ -4,5 +4,5 @@ $Author: murco $ $Id: Processor.py 1 2012-11-12 18:56:07Z murco $ ''' -from jroutils_ftp import * -from jroutils_publish import * +from .jroutils_ftp import * +from .jroutils_publish import * diff --git a/schainpy/model/utils/jroutils_ftp.py b/schainpy/model/utils/jroutils_ftp.py index a507f2f..a1cc981 100644 --- a/schainpy/model/utils/jroutils_ftp.py +++ b/schainpy/model/utils/jroutils_ftp.py @@ -9,7 +9,7 @@ try: import paramiko import scp except: - print "You should install paramiko and scp libraries \nif you want to use SSH protocol to upload files to the server" + print("You should install paramiko and scp libraries \nif you want to use SSH protocol to upload files to the server") import time @@ -64,9 +64,9 @@ class Remote(Thread): self.stopFlag = False - print "[Remote Server] Opening server: %s" %self.__server + print("[Remote Server] Opening server: %s" %self.__server) if self.open(self.__server, self.__username, self.__password, self.__remotefolder): - print "[Remote Server] %s server was opened successfully" %self.__server + print("[Remote Server] %s server was opened successfully" %self.__server) self.close() @@ -81,31 +81,31 @@ class Remote(Thread): """ Connect to server and create a connection class (FTP or SSH) to remote server. """ - raise NotImplementedError, "Implement this method in child class" + raise NotImplementedError("Implement this method in child class") def close(self): """ Close connection to server """ - raise NotImplementedError, "Implement this method in child class" + raise NotImplementedError("Implement this method in child class") def mkdir(self, remotefolder): """ Create a folder remotely """ - raise NotImplementedError, "Implement this method in child class" + raise NotImplementedError("Implement this method in child class") def cd(self, remotefolder): """ Change working directory in remote server """ - raise NotImplementedError, "Implement this method in child class" + raise NotImplementedError("Implement this method in child class") def download(self, filename, localfolder=None): """ Download a file from server to local host """ - raise NotImplementedError, "Implement this method in child class" + raise NotImplementedError("Implement this method in child class") def sendFile(self, fullfilename): """ @@ -117,7 +117,7 @@ class Remote(Thread): Returns: 0 in error case else 1 """ - raise NotImplementedError, "Implement this method in child class" + raise NotImplementedError("Implement this method in child class") def upload(self, fullfilename, remotefolder=None): """ @@ -132,7 +132,7 @@ class Remote(Thread): Returns: 0 in error case else 1 """ - print "[Remote Server] Uploading %s to %s:%s" %(fullfilename, self.server, self.remotefolder) + print("[Remote Server] Uploading %s to %s:%s" %(fullfilename, self.server, self.remotefolder)) if not self.status: return 0 @@ -144,10 +144,10 @@ class Remote(Thread): return 0 if not self.sendFile(fullfilename): - print "[Remote Server] Error uploading file %s" %fullfilename + print("[Remote Server] Error uploading file %s" %fullfilename) return 0 - print "[Remote Server] upload finished successfully" + print("[Remote Server] upload finished successfully") return 1 @@ -180,11 +180,11 @@ class Remote(Thread): def run(self): if not self.status: - print "Finishing FTP service" + print("Finishing FTP service") return if not self.cd(self.remotefolder): - raise ValueError, "Could not access to the new remote directory: %s" %self.remotefolder + raise ValueError("Could not access to the new remote directory: %s" %self.remotefolder) while True: @@ -199,7 +199,7 @@ class Remote(Thread): # self.bussy = True self.mutex.acquire() - print "[Remote Server] Opening %s" %self.__server + print("[Remote Server] Opening %s" %self.__server) if not self.open(self.__server, self.__username, self.__password, self.__remotefolder): self.mutex.release() continue @@ -207,13 +207,13 @@ class Remote(Thread): for thisFile in self.fileList: self.upload(thisFile, self.remotefolder) - print "[Remote Server] Closing %s" %self.__server + print("[Remote Server] Closing %s" %self.__server) self.close() self.mutex.release() # self.bussy = False - print "[Remote Server] Thread stopped successfully" + print("[Remote Server] Thread stopped successfully") class FTPClient(Remote): @@ -247,29 +247,29 @@ class FTPClient(Remote): """ if server == None: - raise ValueError, "FTP server should be defined" + raise ValueError("FTP server should be defined") if username == None: - raise ValueError, "FTP username should be defined" + raise ValueError("FTP username should be defined") if password == None: - raise ValueError, "FTP password should be defined" + raise ValueError("FTP password should be defined") if remotefolder == None: - raise ValueError, "FTP remote folder should be defined" + raise ValueError("FTP remote folder should be defined") try: ftpClientObj = ftplib.FTP(server) - except ftplib.all_errors, e: - print "[FTP Server]: FTP server connection fail: %s" %server - print "[FTP Server]:", e + except ftplib.all_errors as e: + print("[FTP Server]: FTP server connection fail: %s" %server) + print("[FTP Server]:", e) self.status = 0 return 0 try: ftpClientObj.login(username, password) except ftplib.all_errors: - print "[FTP Server]: FTP username or password are incorrect" + print("[FTP Server]: FTP username or password are incorrect") self.status = 0 return 0 @@ -279,7 +279,7 @@ class FTPClient(Remote): try: ftpClientObj.cwd(remotefolder) except ftplib.all_errors: - print "[FTP Server]: FTP remote folder is invalid: %s" %remotefolder + print("[FTP Server]: FTP remote folder is invalid: %s" %remotefolder) remotefolder = ftpClientObj.pwd() self.server = server @@ -316,7 +316,7 @@ class FTPClient(Remote): try: self.__ftpClientObj.mkd(dirname) except ftplib.all_errors: - print "[FTP Server]: Error creating remote folder: %s" %remotefolder + print("[FTP Server]: Error creating remote folder: %s" %remotefolder) return 0 return 1 @@ -343,11 +343,11 @@ class FTPClient(Remote): try: self.__ftpClientObj.cwd(remotefolder) except ftplib.all_errors: - print '[FTP Server]: Error changing to %s' %remotefolder - print '[FTP Server]: Trying to create remote folder' + print('[FTP Server]: Error changing to %s' %remotefolder) + print('[FTP Server]: Trying to create remote folder') if not self.mkdir(remotefolder): - print '[FTP Server]: Remote folder could not be created' + print('[FTP Server]: Remote folder could not be created') return 0 try: @@ -372,14 +372,14 @@ class FTPClient(Remote): try: self.__ftpClientObj.storbinary(command, fp) - except ftplib.all_errors, e: - print "[FTP Server]:", e + except ftplib.all_errors as e: + print("[FTP Server]:", e) return 0 try: self.__ftpClientObj.sendcmd('SITE CHMOD 755 ' + filename) - except ftplib.all_errors, e: - print "[FTP Server]:", e + except ftplib.all_errors as e: + print("[FTP Server]:", e) fp.close() @@ -418,16 +418,16 @@ class SSHClient(Remote): import socket if server == None: - raise ValueError, "SSH server should be defined" + raise ValueError("SSH server should be defined") if username == None: - raise ValueError, "SSH username should be defined" + raise ValueError("SSH username should be defined") if password == None: - raise ValueError, "SSH password should be defined" + raise ValueError("SSH password should be defined") if remotefolder == None: - raise ValueError, "SSH remote folder should be defined" + raise ValueError("SSH remote folder should be defined") sshClientObj = paramiko.SSHClient() @@ -437,16 +437,16 @@ class SSHClient(Remote): self.status = 0 try: sshClientObj.connect(server, username=username, password=password, port=port) - except paramiko.AuthenticationException, e: + except paramiko.AuthenticationException as e: # print "SSH username or password are incorrect: %s" - print "[SSH Server]:", e + print("[SSH Server]:", e) return 0 - except SSHException, e: - print "[SSH Server]:", e + except SSHException as e: + print("[SSH Server]:", e) return 0 except socket.error: self.status = 0 - print "[SSH Server]:", e + print("[SSH Server]:", e) return 0 self.status = 1 @@ -463,7 +463,7 @@ class SSHClient(Remote): self.status = 1 if not self.cd(remotefolder): - raise ValueError, "[SSH Server]: Could not access to remote folder: %s" %remotefolder + raise ValueError("[SSH Server]: Could not access to remote folder: %s" %remotefolder) return 0 self.remotefolder = remotefolder @@ -564,8 +564,8 @@ class SSHClient(Remote): try: self.__scpClientObj.put(fullfilename, remote_path=self.remotefolder) - except scp.ScpError, e: - print "[SSH Server]", str(e) + except scp.ScpError as e: + print("[SSH Server]", str(e)) return 0 remotefile = os.path.join(self.remotefolder, os.path.split(fullfilename)[-1]) @@ -596,7 +596,7 @@ class SendToServer(ProcessingUnit): self.clientObj = SSHClient(server, username, password, remotefolder, period) if not self.clientObj: - raise ValueError, "%s has been chosen as remote access protocol but it is not valid" %protocol + raise ValueError("%s has been chosen as remote access protocol but it is not valid" %protocol) self.clientObj.start() @@ -614,7 +614,7 @@ class SendToServer(ProcessingUnit): for thisFolder in folderList: - print "[Remote Server]: Searching files on %s" %thisFolder + print("[Remote Server]: Searching files on %s" %thisFolder) filenameList = glob.glob1(thisFolder, '*%s' %self.ext) @@ -643,18 +643,18 @@ class SendToServer(ProcessingUnit): self.isConfig = True if not self.clientObj.is_alive(): - print "[Remote Server]: Restarting connection " + print("[Remote Server]: Restarting connection ") self.setup(**kwargs) if time.time() - self.init >= self.period: fullfilenameList = self.findFiles() if self.clientObj.updateFileList(fullfilenameList): - print "[Remote Server]: Sending the next files ", str(fullfilenameList) + print("[Remote Server]: Sending the next files ", str(fullfilenameList)) self.init = time.time() def close(self): - print "[Remote Server] Stopping thread" + print("[Remote Server] Stopping thread") self.clientObj.stop() @@ -710,7 +710,7 @@ class FTP(object): # print 'Connect to FTP Server: Successfully' except ftplib.all_errors: - print 'Error FTP Service' + print('Error FTP Service') self.status = 1 return @@ -721,14 +721,14 @@ class FTP(object): try: self.dirList = self.ftp.nlst() - except ftplib.error_perm, resp: + except ftplib.error_perm as resp: if str(resp) == "550 No files found": - print "no files in this directory" + print("no files in this directory") self.status = 1 return except ftplib.all_errors: - print 'Error Displaying Dir-Files' + print('Error Displaying Dir-Files') self.status = 1 return @@ -763,7 +763,7 @@ class FTP(object): try: self.ftp.mkd(dirname) except: - print 'Error creating remote folder:%s'%dirname + print('Error creating remote folder:%s'%dirname) return 1 return 0 @@ -783,7 +783,7 @@ class FTP(object): try: self.ftp.delete(filename) except: - print 'Error deleting remote file:%s'%filename + print('Error deleting remote file:%s'%filename) return 1 return 0 @@ -805,7 +805,7 @@ class FTP(object): if not(filename in self.fileList): - print 'filename:%s not exists'%filename + print('filename:%s not exists'%filename) self.status = 1 return self.status @@ -814,11 +814,11 @@ class FTP(object): self.file = open(newfilename, 'wb') try: - print 'Download: ' + filename + print('Download: ' + filename) self.ftp.retrbinary('RETR ' + filename, self.__handleDownload) - print 'Download Complete' + print('Download Complete') except ftplib.all_errors: - print 'Error Downloading ' + filename + print('Error Downloading ' + filename) self.status = 1 return self.status @@ -861,12 +861,12 @@ class FTP(object): command = "STOR " + tail - print 'Uploading: ' + tail + print('Uploading: ' + tail) self.ftp.storbinary(command, self.file) - print 'Upload Completed' + print('Upload Completed') except ftplib.all_errors: - print 'Error Uploading ' + tail + print('Error Uploading ' + tail) self.status = 1 return self.status @@ -895,11 +895,11 @@ class FTP(object): """ self.remotefolder = remotefolder - print 'Change to ' + self.remotefolder + print('Change to ' + self.remotefolder) try: self.ftp.cwd(remotefolder) except ftplib.all_errors: - print 'Error Change to ' + self.remotefolder + print('Error Change to ' + self.remotefolder) infoList = None self.folderList = None return infoList,self.folderList @@ -909,14 +909,14 @@ class FTP(object): try: self.dirList = self.ftp.nlst() - except ftplib.error_perm, resp: + except ftplib.error_perm as resp: if str(resp) == "550 No files found": - print "no files in this directory" + print("no files in this directory") infoList = None self.folderList = None return infoList,self.folderList except ftplib.all_errors: - print 'Error Displaying Dir-Files' + print('Error Displaying Dir-Files') infoList = None self.folderList = None return infoList,self.folderList @@ -957,8 +957,8 @@ class SendByFTP(Operation): def error_print(self, ValueError): - print ValueError, 'Error FTP' - print "don't worry the program is running..." + print(ValueError, 'Error FTP') + print("don't worry the program is running...") def worker_ftp(self, server, username, password, remotefolder, filenameList): @@ -981,7 +981,7 @@ class SendByFTP(Operation): if p.is_alive(): p.terminate() p.join() - print 'killing ftp process...' + print('killing ftp process...') self.status = 0 return @@ -1005,4 +1005,4 @@ class SendByFTP(Operation): self.counter = 0 - self.status = 1 + self.status = 1 \ No newline at end of file diff --git a/schainpy/model/utils/jroutils_publish.py b/schainpy/model/utils/jroutils_publish.py index 7f2b188..ea41b05 100644 --- a/schainpy/model/utils/jroutils_publish.py +++ b/schainpy/model/utils/jroutils_publish.py @@ -56,7 +56,7 @@ def get_plot_code(s): def roundFloats(obj): if isinstance(obj, list): - return map(roundFloats, obj) + return list(map(roundFloats, obj)) elif isinstance(obj, float): return round(obj, 2) @@ -241,7 +241,7 @@ class Data(object): H.sort() for key in self.data: shape = self.shape(key)[:-1] + H.shape - for tm, obj in self.data[key].items(): + for tm, obj in list(self.data[key].items()): h = self.__heights[self.__times.index(tm)] if H.size == h.size: continue @@ -285,7 +285,7 @@ class Data(object): else: ret['pairs'] = [] - for key, value in self.meta.items(): + for key, value in list(self.meta.items()): ret[key] = value return json.dumps(ret) @@ -460,7 +460,7 @@ class PublishData(Operation): 'yData': yData } else: - print "Tipo de grafico invalido" + print("Tipo de grafico invalido") payload = { 'data': 'None', 'timestamp': 'None', @@ -805,7 +805,7 @@ class SendToFTP(Operation, Process): try: self.ftp.storbinary(command, fp, blocksize=1024) - except Exception, e: + except Exception as e: log.error('{}'.format(e), self.name) if self.ftp is not None: self.ftp.close() @@ -814,7 +814,7 @@ class SendToFTP(Operation, Process): try: self.ftp.sendcmd('SITE CHMOD 755 {}'.format(dst)) - except Exception, e: + except Exception as e: log.error('{}'.format(e), self.name) if self.ftp is not None: self.ftp.close() @@ -866,4 +866,4 @@ class SendToFTP(Operation, Process): if self.ftp is not None: self.ftp.close() - self.terminate() + self.terminate() \ No newline at end of file diff --git a/schainpy/utils/paramsFinder.py b/schainpy/utils/paramsFinder.py index 0efaf1c..e0ced54 100644 --- a/schainpy/utils/paramsFinder.py +++ b/schainpy/utils/paramsFinder.py @@ -63,9 +63,9 @@ def formatArgs(op): argsAsKey = ["\t'{}'".format(x) for x in args] argsFormatted = ": 'string',\n".join(argsAsKey) - print op - print "parameters = { \n" + argsFormatted + ": 'string',\n }" - print '\n' + print(op) + print("parameters = { \n" + argsFormatted + ": 'string',\n }") + print('\n') if __name__ == "__main__": diff --git a/schainpy/zerorpc/MyClient01.py b/schainpy/zerorpc/MyClient01.py index 0babed8..11bbfb5 100644 --- a/schainpy/zerorpc/MyClient01.py +++ b/schainpy/zerorpc/MyClient01.py @@ -103,8 +103,8 @@ def printSpeed(deltaTime, mySerial): size = len(mySerial)/1024. vel = 1.0*size / deltaTime - print "Index [", replayerObj.getProfileIndex(), "]: ", - print "Total time %5.2f ms, Data size %5.2f KB, Speed %5.2f MB/s" %(deltaTime, size, vel) + print("Index [", replayerObj.getProfileIndex(), "]: ", end=' ') + print("Total time %5.2f ms, Data size %5.2f KB, Speed %5.2f MB/s" %(deltaTime, size, vel)) #################### if __name__ == '__main__': @@ -131,7 +131,7 @@ if __name__ == '__main__': deltaTime = (time.time() - ini)*1024 if not mySerialData: - print "No more data" + print("No more data") break # myDataDict = SERIALIZER.loads(mySerialData) diff --git a/schainpy/zerorpc/MyClient02.py b/schainpy/zerorpc/MyClient02.py index dffb82f..246fc39 100644 --- a/schainpy/zerorpc/MyClient02.py +++ b/schainpy/zerorpc/MyClient02.py @@ -10,4 +10,4 @@ if __name__ == '__main__': c = zerorpc.Client() c.connect("tcp://127.0.0.1:4242") c.load("file2") # AAAHH! The previously loaded model gets overwritten here! - print c.getModelName() \ No newline at end of file + print(c.getModelName()) \ No newline at end of file diff --git a/schainpy/zerorpc/MyServer.py b/schainpy/zerorpc/MyServer.py index 5d8ad2e..8d40540 100644 --- a/schainpy/zerorpc/MyServer.py +++ b/schainpy/zerorpc/MyServer.py @@ -25,9 +25,9 @@ if __name__ == '__main__': replayerObj.start() - print "Initializing 'zerorpc' server" + print("Initializing 'zerorpc' server") s = zerorpc.Server(replayerObj) s.bind("tcp://0.0.0.0:4242") s.run() - print "End" \ No newline at end of file + print("End") \ No newline at end of file diff --git a/schainpy/zerorpc/test/testServer.py b/schainpy/zerorpc/test/testServer.py index 0f81f86..29517ca 100644 --- a/schainpy/zerorpc/test/testServer.py +++ b/schainpy/zerorpc/test/testServer.py @@ -22,7 +22,7 @@ def isDictFormat(thisValue): if type(thisValue) != type({}): return False - if '__name__' not in thisValue.keys(): + if '__name__' not in list(thisValue.keys()): return False return True @@ -33,7 +33,7 @@ def obj2Dict(myObj): myDict['__name__'] = myObj.__class__.__name__ - for thisKey, thisValue in myObj.__dict__.items(): + for thisKey, thisValue in list(myObj.__dict__.items()): if isNotClassVar(thisValue): myDict[thisKey] = thisValue @@ -49,14 +49,14 @@ def dict2Obj(myDict): ''' ''' - if '__name__' not in myDict.keys(): + if '__name__' not in list(myDict.keys()): return None className = eval(myDict['__name__']) myObj = className() - for thisKey, thisValue in myDict.items(): + for thisKey, thisValue in list(myDict.items()): if thisKey == '__name__': continue @@ -129,7 +129,7 @@ def myMsgPackTest(): x_enc = m.encode(x) x_rec = m.decode(x_enc) - print x_rec + print(x_rec) # # x_enc = msgpack.packb(x, default=m.encoder) # x_rec = msgpack.unpackb(x_enc, object_hook=m.decoder) @@ -159,19 +159,19 @@ if __name__ == '__main__': # print myNewObj.__dict__ # sys.exit() - print myDict + print(myDict) newSerial = serializerObj.encode(myDict) # print newSerial newDict = serializerObj.decode(newSerial) - print newDict + print(newDict) myNewObj = dict2Obj(newDict) - print - print - print 50*'###' - print myTestObj.__dict__ - print myNewObj.__dict__ + print() + print() + print(50*'###') + print(myTestObj.__dict__) + print(myNewObj.__dict__) \ No newline at end of file diff --git a/schainpy/zerorpc/test/testServer_Voltage.py b/schainpy/zerorpc/test/testServer_Voltage.py index 4ce04bd..8b2daca 100644 --- a/schainpy/zerorpc/test/testServer_Voltage.py +++ b/schainpy/zerorpc/test/testServer_Voltage.py @@ -5,7 +5,7 @@ Created on Jul 15, 2014 ''' import sys -import cPickle +import pickle from schainpy.model.data.jrodata import Voltage # from schainpy.model.io.jrodataIO import USRPReaderMP @@ -37,10 +37,10 @@ if __name__ == "__main__": # print newValue - print '###########CPICKLE##################' - print myDict - newSerialized = cPickle.dumps(myDict, 2) + print('###########CPICKLE##################') + print(myDict) + newSerialized = pickle.dumps(myDict, 2) # print newValue - newDict = cPickle.loads(newSerialized) - print newDict \ No newline at end of file + newDict = pickle.loads(newSerialized) + print(newDict) \ No newline at end of file