##// END OF EJS Templates
Python 2to3, Spectra (all operations) working
George Yong -
r1167:1f521b07c958
parent child
Show More
@@ -1,7 +1,7
1 '''
1 '''
2 Created on Feb 7, 2012
2 Created on Jul 3, 2018
3
3
4 @author $Author$
4 @author $Author$
5 @version $Id$
5 @version $Id$
6 '''
6 '''
7 __version__ = '2.3'
7 __version__ = '3.0'
@@ -11,8 +11,8 import sys
11 import time
11 import time
12 import traceback
12 import traceback
13 import smtplib
13 import smtplib
14 import ConfigParser
14 import configparser
15 import StringIO
15 import io
16 from threading import Thread
16 from threading import Thread
17 from multiprocessing import Process
17 from multiprocessing import Process
18 from email.mime.text import MIMEText
18 from email.mime.text import MIMEText
@@ -65,7 +65,7 class Alarm(Process):
65 @staticmethod
65 @staticmethod
66 def send_email(**kwargs):
66 def send_email(**kwargs):
67 notifier = SchainNotify()
67 notifier = SchainNotify()
68 print kwargs
68 print(kwargs)
69 notifier.notify(**kwargs)
69 notifier.notify(**kwargs)
70
70
71 @staticmethod
71 @staticmethod
@@ -144,10 +144,10 class SchainConfigure():
144 return
144 return
145
145
146 # create Parser using standard module ConfigParser
146 # create Parser using standard module ConfigParser
147 self.__parser = ConfigParser.ConfigParser()
147 self.__parser = configparser.ConfigParser()
148
148
149 # read conf file into a StringIO with "[madrigal]\n" section heading prepended
149 # read conf file into a StringIO with "[madrigal]\n" section heading prepended
150 strConfFile = StringIO.StringIO("[schain]\n" + self.__confFile.read())
150 strConfFile = io.StringIO("[schain]\n" + self.__confFile.read())
151
151
152 # parse StringIO configuration file
152 # parse StringIO configuration file
153 self.__parser.readfp(strConfFile)
153 self.__parser.readfp(strConfFile)
@@ -355,7 +355,7 class SchainNotify:
355 if not self.__emailToAddress:
355 if not self.__emailToAddress:
356 return 0
356 return 0
357
357
358 print "***** Sending alert to %s *****" %self.__emailToAddress
358 print("***** Sending alert to %s *****" %self.__emailToAddress)
359 # set up message
359 # set up message
360
360
361 sent=self.sendEmail(email_from=self.__emailFromAddress,
361 sent=self.sendEmail(email_from=self.__emailFromAddress,
@@ -500,4 +500,4 if __name__ == '__main__':
500
500
501 test.sendAlert('This is a message from the python module SchainNotify', 'Test from SchainNotify')
501 test.sendAlert('This is a message from the python module SchainNotify', 'Test from SchainNotify')
502
502
503 print 'Hopefully message sent - check.'
503 print('Hopefully message sent - check.') No newline at end of file
@@ -67,7 +67,7 def MPProject(project, n=cpu_count()):
67 for process in processes:
67 for process in processes:
68 process.terminate()
68 process.terminate()
69 process.join()
69 process.join()
70 print traceback.print_tb(trace)
70 print(traceback.print_tb(trace))
71
71
72 sys.excepthook = beforeExit
72 sys.excepthook = beforeExit
73
73
@@ -114,7 +114,7 class ParameterConf():
114 return self.__formated_value
114 return self.__formated_value
115
115
116 if value == '':
116 if value == '':
117 raise ValueError, '%s: This parameter value is empty' % self.name
117 raise ValueError('%s: This parameter value is empty' % self.name)
118
118
119 if format == 'list':
119 if format == 'list':
120 strList = value.split(',')
120 strList = value.split(',')
@@ -180,16 +180,16 class ParameterConf():
180 new_value = ast.literal_eval(value)
180 new_value = ast.literal_eval(value)
181
181
182 if type(new_value) not in (tuple, list):
182 if type(new_value) not in (tuple, list):
183 raise ValueError, '%s has to be a tuple or list of pairs' % value
183 raise ValueError('%s has to be a tuple or list of pairs' % value)
184
184
185 if type(new_value[0]) not in (tuple, list):
185 if type(new_value[0]) not in (tuple, list):
186 if len(new_value) != 2:
186 if len(new_value) != 2:
187 raise ValueError, '%s has to be a tuple or list of pairs' % value
187 raise ValueError('%s has to be a tuple or list of pairs' % value)
188 new_value = [new_value]
188 new_value = [new_value]
189
189
190 for thisPair in new_value:
190 for thisPair in new_value:
191 if len(thisPair) != 2:
191 if len(thisPair) != 2:
192 raise ValueError, '%s has to be a tuple or list of pairs' % value
192 raise ValueError('%s has to be a tuple or list of pairs' % value)
193
193
194 self.__formated_value = new_value
194 self.__formated_value = new_value
195
195
@@ -265,7 +265,7 class ParameterConf():
265
265
266 def printattr(self):
266 def printattr(self):
267
267
268 print 'Parameter[%s]: name = %s, value = %s, format = %s' % (self.id, self.name, self.value, self.format)
268 print('Parameter[%s]: name = %s, value = %s, format = %s' % (self.id, self.name, self.value, self.format))
269
269
270
270
271 class OperationConf():
271 class OperationConf():
@@ -434,11 +434,11 class OperationConf():
434
434
435 def printattr(self):
435 def printattr(self):
436
436
437 print '%s[%s]: name = %s, type = %s, priority = %s' % (self.ELEMENTNAME,
437 print('%s[%s]: name = %s, type = %s, priority = %s' % (self.ELEMENTNAME,
438 self.id,
438 self.id,
439 self.name,
439 self.name,
440 self.type,
440 self.type,
441 self.priority)
441 self.priority))
442
442
443 for parmConfObj in self.parmConfObjList:
443 for parmConfObj in self.parmConfObjList:
444 parmConfObj.printattr()
444 parmConfObj.printattr()
@@ -446,11 +446,11 class OperationConf():
446 def createObject(self, plotter_queue=None):
446 def createObject(self, plotter_queue=None):
447
447
448 if self.type == 'self':
448 if self.type == 'self':
449 raise ValueError, 'This operation type cannot be created'
449 raise ValueError('This operation type cannot be created')
450
450
451 if self.type == 'plotter':
451 if self.type == 'plotter':
452 if not plotter_queue:
452 if not plotter_queue:
453 raise ValueError, 'plotter_queue is not defined. Use:\nmyProject = Project()\nmyProject.setPlotterQueue(plotter_queue)'
453 raise ValueError('plotter_queue is not defined. Use:\nmyProject = Project()\nmyProject.setPlotterQueue(plotter_queue)')
454
454
455 opObj = Plotter(self.name, plotter_queue)
455 opObj = Plotter(self.name, plotter_queue)
456
456
@@ -563,7 +563,7 class ProcUnitConf():
563
563
564 # Compatible with old signal chain version
564 # Compatible with old signal chain version
565 if datatype == None and name == None:
565 if datatype == None and name == None:
566 raise ValueError, 'datatype or name should be defined'
566 raise ValueError('datatype or name should be defined')
567
567
568 if name == None:
568 if name == None:
569 if 'Proc' in datatype:
569 if 'Proc' in datatype:
@@ -652,11 +652,11 class ProcUnitConf():
652
652
653 def printattr(self):
653 def printattr(self):
654
654
655 print '%s[%s]: name = %s, datatype = %s, inputId = %s' % (self.ELEMENTNAME,
655 print('%s[%s]: name = %s, datatype = %s, inputId = %s' % (self.ELEMENTNAME,
656 self.id,
656 self.id,
657 self.name,
657 self.name,
658 self.datatype,
658 self.datatype,
659 self.inputId)
659 self.inputId))
660
660
661 for opConfObj in self.opConfObjList:
661 for opConfObj in self.opConfObjList:
662 opConfObj.printattr()
662 opConfObj.printattr()
@@ -759,7 +759,7 class ReadUnitConf(ProcUnitConf):
759
759
760 # Compatible with old signal chain version
760 # Compatible with old signal chain version
761 if datatype == None and name == None:
761 if datatype == None and name == None:
762 raise ValueError, 'datatype or name should be defined'
762 raise ValueError('datatype or name should be defined')
763 if name == None:
763 if name == None:
764 if 'Reader' in datatype:
764 if 'Reader' in datatype:
765 name = datatype
765 name = datatype
@@ -831,7 +831,7 class ReadUnitConf(ProcUnitConf):
831 opObj.addParameter(
831 opObj.addParameter(
832 name='endTime', value=self.endTime, format='time')
832 name='endTime', value=self.endTime, format='time')
833
833
834 for key, value in kwargs.items():
834 for key, value in list(kwargs.items()):
835 opObj.addParameter(name=key, value=value,
835 opObj.addParameter(name=key, value=value,
836 format=type(value).__name__)
836 format=type(value).__name__)
837 else:
837 else:
@@ -853,7 +853,7 class ReadUnitConf(ProcUnitConf):
853 name='startTime', value=self.startTime, format='time')
853 name='startTime', value=self.startTime, format='time')
854 opObj.addParameter(name='endTime', value=self.endTime, format='time')
854 opObj.addParameter(name='endTime', value=self.endTime, format='time')
855
855
856 for key, value in kwargs.items():
856 for key, value in list(kwargs.items()):
857 opObj.addParameter(name=key, value=value,
857 opObj.addParameter(name=key, value=value,
858 format=type(value).__name__)
858 format=type(value).__name__)
859
859
@@ -914,7 +914,7 class Project(Process):
914
914
915 def __getNewId(self):
915 def __getNewId(self):
916
916
917 idList = self.procUnitConfObjDict.keys()
917 idList = list(self.procUnitConfObjDict.keys())
918
918
919 id = int(self.id) * 10
919 id = int(self.id) * 10
920
920
@@ -940,7 +940,7 class Project(Process):
940
940
941 self.id = str(new_id)
941 self.id = str(new_id)
942
942
943 keyList = self.procUnitConfObjDict.keys()
943 keyList = list(self.procUnitConfObjDict.keys())
944 keyList.sort()
944 keyList.sort()
945
945
946 n = 1
946 n = 1
@@ -958,11 +958,11 class Project(Process):
958
958
959 def setup(self, id, name='', description='', email=None, alarm=[]):
959 def setup(self, id, name='', description='', email=None, alarm=[]):
960
960
961 print
961 print()
962 print '*' * 60
962 print('*' * 60)
963 print ' Starting SIGNAL CHAIN PROCESSING v%s ' % schainpy.__version__
963 print(' Starting SIGNAL CHAIN PROCESSING v%s ' % schainpy.__version__)
964 print '*' * 60
964 print('*' * 60)
965 print
965 print()
966 self.id = str(id)
966 self.id = str(id)
967 self.description = description
967 self.description = description
968 self.email = email
968 self.email = email
@@ -970,7 +970,7 class Project(Process):
970
970
971 def update(self, **kwargs):
971 def update(self, **kwargs):
972
972
973 for key, value in kwargs.items():
973 for key, value in list(kwargs.items()):
974 setattr(self, key, value)
974 setattr(self, key, value)
975
975
976 def clone(self):
976 def clone(self):
@@ -1008,7 +1008,7 class Project(Process):
1008
1008
1009 def removeProcUnit(self, id):
1009 def removeProcUnit(self, id):
1010
1010
1011 if id in self.procUnitConfObjDict.keys():
1011 if id in list(self.procUnitConfObjDict.keys()):
1012 self.procUnitConfObjDict.pop(id)
1012 self.procUnitConfObjDict.pop(id)
1013
1013
1014 def getReadUnitId(self):
1014 def getReadUnitId(self):
@@ -1019,7 +1019,7 class Project(Process):
1019
1019
1020 def getReadUnitObj(self):
1020 def getReadUnitObj(self):
1021
1021
1022 for obj in self.procUnitConfObjDict.values():
1022 for obj in list(self.procUnitConfObjDict.values()):
1023 if obj.getElementName() == 'ReadUnit':
1023 if obj.getElementName() == 'ReadUnit':
1024 return obj
1024 return obj
1025
1025
@@ -1037,7 +1037,7 class Project(Process):
1037
1037
1038 def getProcUnitObjByName(self, name):
1038 def getProcUnitObjByName(self, name):
1039
1039
1040 for obj in self.procUnitConfObjDict.values():
1040 for obj in list(self.procUnitConfObjDict.values()):
1041 if obj.name == name:
1041 if obj.name == name:
1042 return obj
1042 return obj
1043
1043
@@ -1045,7 +1045,7 class Project(Process):
1045
1045
1046 def procUnitItems(self):
1046 def procUnitItems(self):
1047
1047
1048 return self.procUnitConfObjDict.items()
1048 return list(self.procUnitConfObjDict.items())
1049
1049
1050 def makeXml(self):
1050 def makeXml(self):
1051
1051
@@ -1054,7 +1054,7 class Project(Process):
1054 projectElement.set('name', self.name)
1054 projectElement.set('name', self.name)
1055 projectElement.set('description', self.description)
1055 projectElement.set('description', self.description)
1056
1056
1057 for procUnitConfObj in self.procUnitConfObjDict.values():
1057 for procUnitConfObj in list(self.procUnitConfObjDict.values()):
1058 procUnitConfObj.makeXml(projectElement)
1058 procUnitConfObj.makeXml(projectElement)
1059
1059
1060 self.projectElement = projectElement
1060 self.projectElement = projectElement
@@ -1068,17 +1068,17 class Project(Process):
1068 filename = 'schain.xml'
1068 filename = 'schain.xml'
1069
1069
1070 if not filename:
1070 if not filename:
1071 print 'filename has not been defined. Use setFilename(filename) for do it.'
1071 print('filename has not been defined. Use setFilename(filename) for do it.')
1072 return 0
1072 return 0
1073
1073
1074 abs_file = os.path.abspath(filename)
1074 abs_file = os.path.abspath(filename)
1075
1075
1076 if not os.access(os.path.dirname(abs_file), os.W_OK):
1076 if not os.access(os.path.dirname(abs_file), os.W_OK):
1077 print 'No write permission on %s' % os.path.dirname(abs_file)
1077 print('No write permission on %s' % os.path.dirname(abs_file))
1078 return 0
1078 return 0
1079
1079
1080 if os.path.isfile(abs_file) and not(os.access(abs_file, os.W_OK)):
1080 if os.path.isfile(abs_file) and not(os.access(abs_file, os.W_OK)):
1081 print 'File %s already exists and it could not be overwriten' % abs_file
1081 print('File %s already exists and it could not be overwriten' % abs_file)
1082 return 0
1082 return 0
1083
1083
1084 self.makeXml()
1084 self.makeXml()
@@ -1092,13 +1092,13 class Project(Process):
1092 def readXml(self, filename=None):
1092 def readXml(self, filename=None):
1093
1093
1094 if not filename:
1094 if not filename:
1095 print 'filename is not defined'
1095 print('filename is not defined')
1096 return 0
1096 return 0
1097
1097
1098 abs_file = os.path.abspath(filename)
1098 abs_file = os.path.abspath(filename)
1099
1099
1100 if not os.path.isfile(abs_file):
1100 if not os.path.isfile(abs_file):
1101 print '%s file does not exist' % abs_file
1101 print('%s file does not exist' % abs_file)
1102 return 0
1102 return 0
1103
1103
1104 self.projectElement = None
1104 self.projectElement = None
@@ -1107,7 +1107,7 class Project(Process):
1107 try:
1107 try:
1108 self.projectElement = ElementTree().parse(abs_file)
1108 self.projectElement = ElementTree().parse(abs_file)
1109 except:
1109 except:
1110 print 'Error reading %s, verify file format' % filename
1110 print('Error reading %s, verify file format' % filename)
1111 return 0
1111 return 0
1112
1112
1113 self.project = self.projectElement.tag
1113 self.project = self.projectElement.tag
@@ -1146,16 +1146,16 class Project(Process):
1146
1146
1147 def printattr(self):
1147 def printattr(self):
1148
1148
1149 print 'Project[%s]: name = %s, description = %s' % (self.id,
1149 print('Project[%s]: name = %s, description = %s' % (self.id,
1150 self.name,
1150 self.name,
1151 self.description)
1151 self.description))
1152
1152
1153 for procUnitConfObj in self.procUnitConfObjDict.values():
1153 for procUnitConfObj in list(self.procUnitConfObjDict.values()):
1154 procUnitConfObj.printattr()
1154 procUnitConfObj.printattr()
1155
1155
1156 def createObjects(self):
1156 def createObjects(self):
1157
1157
1158 for procUnitConfObj in self.procUnitConfObjDict.values():
1158 for procUnitConfObj in list(self.procUnitConfObjDict.values()):
1159 procUnitConfObj.createObjects(self.plotterQueue)
1159 procUnitConfObj.createObjects(self.plotterQueue)
1160
1160
1161 def __connect(self, objIN, thisObj):
1161 def __connect(self, objIN, thisObj):
@@ -1164,7 +1164,7 class Project(Process):
1164
1164
1165 def connectObjects(self):
1165 def connectObjects(self):
1166
1166
1167 for thisPUConfObj in self.procUnitConfObjDict.values():
1167 for thisPUConfObj in list(self.procUnitConfObjDict.values()):
1168
1168
1169 inputId = thisPUConfObj.getInputId()
1169 inputId = thisPUConfObj.getInputId()
1170
1170
@@ -1245,7 +1245,7 class Project(Process):
1245 '''
1245 '''
1246
1246
1247 if self.isPaused():
1247 if self.isPaused():
1248 print 'Process suspended'
1248 print('Process suspended')
1249
1249
1250 while True:
1250 while True:
1251 time.sleep(0.1)
1251 time.sleep(0.1)
@@ -1256,10 +1256,10 class Project(Process):
1256 if self.isStopped():
1256 if self.isStopped():
1257 break
1257 break
1258
1258
1259 print 'Process reinitialized'
1259 print('Process reinitialized')
1260
1260
1261 if self.isStopped():
1261 if self.isStopped():
1262 print 'Process stopped'
1262 print('Process stopped')
1263 return 0
1263 return 0
1264
1264
1265 return 1
1265 return 1
@@ -1270,15 +1270,15 class Project(Process):
1270
1270
1271 def setPlotterQueue(self, plotter_queue):
1271 def setPlotterQueue(self, plotter_queue):
1272
1272
1273 raise NotImplementedError, 'Use schainpy.controller_api.ControllerThread instead Project class'
1273 raise NotImplementedError('Use schainpy.controller_api.ControllerThread instead Project class')
1274
1274
1275 def getPlotterQueue(self):
1275 def getPlotterQueue(self):
1276
1276
1277 raise NotImplementedError, 'Use schainpy.controller_api.ControllerThread instead Project class'
1277 raise NotImplementedError('Use schainpy.controller_api.ControllerThread instead Project class')
1278
1278
1279 def useExternalPlotter(self):
1279 def useExternalPlotter(self):
1280
1280
1281 raise NotImplementedError, 'Use schainpy.controller_api.ControllerThread instead Project class'
1281 raise NotImplementedError('Use schainpy.controller_api.ControllerThread instead Project class')
1282
1282
1283 def run(self):
1283 def run(self):
1284
1284
@@ -1287,7 +1287,7 class Project(Process):
1287 self.createObjects()
1287 self.createObjects()
1288 self.connectObjects()
1288 self.connectObjects()
1289
1289
1290 keyList = self.procUnitConfObjDict.keys()
1290 keyList = list(self.procUnitConfObjDict.keys())
1291 keyList.sort()
1291 keyList.sort()
1292
1292
1293 err = None
1293 err = None
@@ -1310,7 +1310,7 class Project(Process):
1310 except KeyboardInterrupt:
1310 except KeyboardInterrupt:
1311 is_ok = False
1311 is_ok = False
1312 break
1312 break
1313 except ValueError, e:
1313 except ValueError as e:
1314 time.sleep(0.5)
1314 time.sleep(0.5)
1315 err = self.__handleError(procUnitConfObj)
1315 err = self.__handleError(procUnitConfObj)
1316 is_ok = False
1316 is_ok = False
@@ -1,5 +1,5
1 import threading
1 import threading
2 from Queue import Queue
2 from queue import Queue
3
3
4 from schainpy.controller import Project
4 from schainpy.controller import Project
5 from schainpy.model.graphics.jroplotter import PlotManager
5 from schainpy.model.graphics.jroplotter import PlotManager
@@ -77,7 +77,7 class ControllerThread(threading.Thread, Project):
77
77
78 plotterList = PlotManager.plotterList
78 plotterList = PlotManager.plotterList
79
79
80 for thisPUConfObj in self.procUnitConfObjDict.values():
80 for thisPUConfObj in list(self.procUnitConfObjDict.values()):
81
81
82 inputId = thisPUConfObj.getInputId()
82 inputId = thisPUConfObj.getInputId()
83
83
@@ -5,8 +5,8
5 # from schainpy.model.utils.jroutils import *
5 # from schainpy.model.utils.jroutils import *
6 # from schainpy.serializer import *
6 # from schainpy.serializer import *
7
7
8 from graphics import *
8 from .graphics import *
9 from data import *
9 from .data import *
10 from io import *
10 from .io import *
11 from proc import *
11 from .proc import *
12 from utils import *
12 from .utils import *
@@ -7,7 +7,7 import sys
7 import numpy
7 import numpy
8 import copy
8 import copy
9 import datetime
9 import datetime
10 from __builtin__ import None
10
11
11
12 SPEED_OF_LIGHT = 299792458
12 SPEED_OF_LIGHT = 299792458
13 SPEED_OF_LIGHT = 3e8
13 SPEED_OF_LIGHT = 3e8
@@ -78,7 +78,7 class Header(object):
78 message += self.__class__.__name__.upper() + "\n"
78 message += self.__class__.__name__.upper() + "\n"
79 message += "#"*50 + "\n"
79 message += "#"*50 + "\n"
80
80
81 keyList = self.__dict__.keys()
81 keyList = list(self.__dict__.keys())
82 keyList.sort()
82 keyList.sort()
83
83
84 for key in keyList:
84 for key in keyList:
@@ -90,7 +90,7 class Header(object):
90 if attr:
90 if attr:
91 message += "%s = %s" %("size", attr) + "\n"
91 message += "%s = %s" %("size", attr) + "\n"
92
92
93 print message
93 print(message)
94
94
95 class FileHeader(Header):
95 class FileHeader(Header):
96
96
@@ -134,9 +134,9 class FileHeader(Header):
134
134
135 '''
135 '''
136
136
137 except Exception, e:
137 except Exception as e:
138 print "FileHeader: "
138 print("FileHeader: ")
139 print eBasicHeader
139 print(eBasicHeader)
140 return 0
140 return 0
141
141
142 self.FileMgcNumber= byte(header['FileMgcNumber'][0])
142 self.FileMgcNumber= byte(header['FileMgcNumber'][0])
@@ -279,8 +279,8 class RecordHeader(Header):
279
279
280 try:
280 try:
281 header = numpy.fromfile(fp,RECORD_STRUCTURE,1)
281 header = numpy.fromfile(fp,RECORD_STRUCTURE,1)
282 except Exception, e:
282 except Exception as e:
283 print "System Header: " + e
283 print("System Header: " + e)
284 return 0
284 return 0
285
285
286 self.RecMgcNumber = header['RecMgcNumber'][0] #0x23030001
286 self.RecMgcNumber = header['RecMgcNumber'][0] #0x23030001
@@ -1,3 +1,3
1 from jrodata import *
1 from .jrodata import *
2 from jroheaderIO import *
2 from .jroheaderIO import *
3 from jroamisr import * No newline at end of file
3 from .jroamisr import * No newline at end of file
@@ -68,7 +68,7 class AMISR:
68 if inputObj is None:
68 if inputObj is None:
69 return copy.deepcopy(self)
69 return copy.deepcopy(self)
70
70
71 for key in inputObj.__dict__.keys():
71 for key in list(inputObj.__dict__.keys()):
72 self.__dict__[key] = inputObj.__dict__[key]
72 self.__dict__[key] = inputObj.__dict__[key]
73
73
74 def getNHeights(self):
74 def getNHeights(self):
@@ -8,8 +8,8 import copy
8 import numpy
8 import numpy
9 import datetime
9 import datetime
10
10
11 from jroheaderIO import SystemHeader, RadarControllerHeader
11 from .jroheaderIO import SystemHeader, RadarControllerHeader
12 from schainpy import cSchain
12 # from schainpy import cSchain
13
13
14
14
15 def getNumpyDtype(dataTypeCode):
15 def getNumpyDtype(dataTypeCode):
@@ -27,7 +27,7 def getNumpyDtype(dataTypeCode):
27 elif dataTypeCode == 5:
27 elif dataTypeCode == 5:
28 numpyDtype = numpy.dtype([('real', '<f8'), ('imag', '<f8')])
28 numpyDtype = numpy.dtype([('real', '<f8'), ('imag', '<f8')])
29 else:
29 else:
30 raise ValueError, 'dataTypeCode was not defined'
30 raise ValueError('dataTypeCode was not defined')
31
31
32 return numpyDtype
32 return numpyDtype
33
33
@@ -68,41 +68,41 def hildebrand_sekhon(data, navg):
68 """
68 """
69
69
70 sortdata = numpy.sort(data, axis=None)
70 sortdata = numpy.sort(data, axis=None)
71 # lenOfData = len(sortdata)
71 lenOfData = len(sortdata)
72 # nums_min = lenOfData*0.2
72 nums_min = lenOfData*0.2
73 #
73
74 # if nums_min <= 5:
74 if nums_min <= 5:
75 # nums_min = 5
75 nums_min = 5
76 #
76
77 # sump = 0.
77 sump = 0.
78 #
78
79 # sumq = 0.
79 sumq = 0.
80 #
80
81 # j = 0
81 j = 0
82 #
82
83 # cont = 1
83 cont = 1
84 #
84
85 # while((cont==1)and(j<lenOfData)):
85 while((cont==1)and(j<lenOfData)):
86 #
86
87 # sump += sortdata[j]
87 sump += sortdata[j]
88 #
88
89 # sumq += sortdata[j]**2
89 sumq += sortdata[j]**2
90 #
90
91 # if j > nums_min:
91 if j > nums_min:
92 # rtest = float(j)/(j-1) + 1.0/navg
92 rtest = float(j)/(j-1) + 1.0/navg
93 # if ((sumq*j) > (rtest*sump**2)):
93 if ((sumq*j) > (rtest*sump**2)):
94 # j = j - 1
94 j = j - 1
95 # sump = sump - sortdata[j]
95 sump = sump - sortdata[j]
96 # sumq = sumq - sortdata[j]**2
96 sumq = sumq - sortdata[j]**2
97 # cont = 0
97 cont = 0
98 #
98
99 # j += 1
99 j += 1
100 #
100
101 # lnoise = sump /j
101 lnoise = sump /j
102 #
102
103 # return lnoise
103 return lnoise
104
104
105 return cSchain.hildebrand_sekhon(sortdata, navg)
105 # return cSchain.hildebrand_sekhon(sortdata, navg)
106
106
107
107
108 class Beam:
108 class Beam:
@@ -122,7 +122,7 class GenericData(object):
122 if inputObj == None:
122 if inputObj == None:
123 return copy.deepcopy(self)
123 return copy.deepcopy(self)
124
124
125 for key in inputObj.__dict__.keys():
125 for key in list(inputObj.__dict__.keys()):
126
126
127 attribute = inputObj.__dict__[key]
127 attribute = inputObj.__dict__[key]
128
128
@@ -241,7 +241,7 class JROData(GenericData):
241
241
242 def getChannelIndexList(self):
242 def getChannelIndexList(self):
243
243
244 return range(self.nChannels)
244 return list(range(self.nChannels))
245
245
246 def getNHeights(self):
246 def getNHeights(self):
247
247
@@ -662,7 +662,7 class Spectra(JROData):
662
662
663 def getPairsIndexList(self):
663 def getPairsIndexList(self):
664
664
665 return range(self.nPairs)
665 return list(range(self.nPairs))
666
666
667 def getNormFactor(self):
667 def getNormFactor(self):
668
668
@@ -714,8 +714,8 class Spectra(JROData):
714 pairsIndexList = []
714 pairsIndexList = []
715 for pair in pairsList:
715 for pair in pairsList:
716 if pair not in self.pairsList:
716 if pair not in self.pairsList:
717 raise ValueError, "Pair %s is not in dataOut.pairsList" % (
717 raise ValueError("Pair %s is not in dataOut.pairsList" % (
718 pair)
718 pair))
719 pairsIndexList.append(self.pairsList.index(pair))
719 pairsIndexList.append(self.pairsList.index(pair))
720 for i in range(len(pairsIndexList)):
720 for i in range(len(pairsIndexList)):
721 pair = self.pairsList[pairsIndexList[i]]
721 pair = self.pairsList[pairsIndexList[i]]
@@ -736,7 +736,7 class Spectra(JROData):
736
736
737 def setValue(self, value):
737 def setValue(self, value):
738
738
739 print "This property should not be initialized"
739 print("This property should not be initialized")
740
740
741 return
741 return
742
742
@@ -941,7 +941,7 class Fits(JROData):
941
941
942 def getChannelIndexList(self):
942 def getChannelIndexList(self):
943
943
944 return range(self.nChannels)
944 return list(range(self.nChannels))
945
945
946 def getNoise(self, type=1):
946 def getNoise(self, type=1):
947
947
@@ -1068,7 +1068,7 class Correlation(JROData):
1068 ind_vel = numpy.array([-2, -1, 1, 2]) + freq_dc
1068 ind_vel = numpy.array([-2, -1, 1, 2]) + freq_dc
1069
1069
1070 if ind_vel[0] < 0:
1070 if ind_vel[0] < 0:
1071 ind_vel[range(0, 1)] = ind_vel[range(0, 1)] + self.num_prof
1071 ind_vel[list(range(0, 1))] = ind_vel[list(range(0, 1))] + self.num_prof
1072
1072
1073 if mode == 1:
1073 if mode == 1:
1074 jspectra[:, freq_dc, :] = (
1074 jspectra[:, freq_dc, :] = (
@@ -1080,7 +1080,7 class Correlation(JROData):
1080 xx = numpy.zeros([4, 4])
1080 xx = numpy.zeros([4, 4])
1081
1081
1082 for fil in range(4):
1082 for fil in range(4):
1083 xx[fil, :] = vel[fil]**numpy.asarray(range(4))
1083 xx[fil, :] = vel[fil]**numpy.asarray(list(range(4)))
1084
1084
1085 xx_inv = numpy.linalg.inv(xx)
1085 xx_inv = numpy.linalg.inv(xx)
1086 xx_aux = xx_inv[0, :]
1086 xx_aux = xx_inv[0, :]
@@ -1239,7 +1239,7 class Parameters(Spectra):
1239
1239
1240 def setValue(self, value):
1240 def setValue(self, value):
1241
1241
1242 print "This property should not be initialized"
1242 print("This property should not be initialized")
1243
1243
1244 return
1244 return
1245
1245
@@ -8,6 +8,7 import numpy
8 import copy
8 import copy
9 import datetime
9 import datetime
10 import inspect
10 import inspect
11 from schainpy.utils import log
11
12
12 SPEED_OF_LIGHT = 299792458
13 SPEED_OF_LIGHT = 299792458
13 SPEED_OF_LIGHT = 3e8
14 SPEED_OF_LIGHT = 3e8
@@ -110,7 +111,7 class Header(object):
110 message += self.__class__.__name__.upper() + "\n"
111 message += self.__class__.__name__.upper() + "\n"
111 message += "#" * 50 + "\n"
112 message += "#" * 50 + "\n"
112
113
113 keyList = self.__dict__.keys()
114 keyList = list(self.__dict__.keys())
114 keyList.sort()
115 keyList.sort()
115
116
116 for key in keyList:
117 for key in keyList:
@@ -122,7 +123,7 class Header(object):
122 if attr:
123 if attr:
123 message += "%s = %s" % ("size", attr) + "\n"
124 message += "%s = %s" % ("size", attr) + "\n"
124
125
125 print message
126 print(message)
126
127
127
128
128 class BasicHeader(Header):
129 class BasicHeader(Header):
@@ -161,9 +162,9 class BasicHeader(Header):
161 header = numpy.fromfile(fp, BASIC_STRUCTURE, 1)
162 header = numpy.fromfile(fp, BASIC_STRUCTURE, 1)
162 else:
163 else:
163 header = numpy.fromstring(fp, BASIC_STRUCTURE, 1)
164 header = numpy.fromstring(fp, BASIC_STRUCTURE, 1)
164 except Exception, e:
165 except Exception as e:
165 print "BasicHeader: "
166 print("BasicHeader: ")
166 print e
167 print(e)
167 return 0
168 return 0
168
169
169 self.size = int(header['nSize'][0])
170 self.size = int(header['nSize'][0])
@@ -229,7 +230,7 class SystemHeader(Header):
229 self.length = 0
230 self.length = 0
230 try:
231 try:
231 startFp = fp.tell()
232 startFp = fp.tell()
232 except Exception, e:
233 except Exception as e:
233 startFp = None
234 startFp = None
234 pass
235 pass
235
236
@@ -238,8 +239,8 class SystemHeader(Header):
238 header = numpy.fromfile(fp, SYSTEM_STRUCTURE, 1)
239 header = numpy.fromfile(fp, SYSTEM_STRUCTURE, 1)
239 else:
240 else:
240 header = numpy.fromstring(fp, SYSTEM_STRUCTURE, 1)
241 header = numpy.fromstring(fp, SYSTEM_STRUCTURE, 1)
241 except Exception, e:
242 except Exception as e:
242 print "System Header: " + str(e)
243 print("System Header: " + str(e))
243 return 0
244 return 0
244
245
245 self.size = header['nSize'][0]
246 self.size = header['nSize'][0]
@@ -344,7 +345,7 class RadarControllerHeader(Header):
344 self.length = 0
345 self.length = 0
345 try:
346 try:
346 startFp = fp.tell()
347 startFp = fp.tell()
347 except Exception, e:
348 except Exception as e:
348 startFp = None
349 startFp = None
349 pass
350 pass
350
351
@@ -354,8 +355,8 class RadarControllerHeader(Header):
354 else:
355 else:
355 header = numpy.fromstring(fp, RADAR_STRUCTURE, 1)
356 header = numpy.fromstring(fp, RADAR_STRUCTURE, 1)
356 self.length += header.nbytes
357 self.length += header.nbytes
357 except Exception, e:
358 except Exception as e:
358 print "RadarControllerHeader: " + str(e)
359 print("RadarControllerHeader: " + str(e))
359 return 0
360 return 0
360
361
361 size = int(header['nSize'][0])
362 size = int(header['nSize'][0])
@@ -384,8 +385,8 class RadarControllerHeader(Header):
384 samplingWindow = numpy.fromstring(
385 samplingWindow = numpy.fromstring(
385 fp[self.length:], SAMPLING_STRUCTURE, self.nWindows)
386 fp[self.length:], SAMPLING_STRUCTURE, self.nWindows)
386 self.length += samplingWindow.nbytes
387 self.length += samplingWindow.nbytes
387 except Exception, e:
388 except Exception as e:
388 print "RadarControllerHeader: " + str(e)
389 print("RadarControllerHeader: " + str(e))
389 return 0
390 return 0
390 self.nHeights = int(numpy.sum(samplingWindow['nsa']))
391 self.nHeights = int(numpy.sum(samplingWindow['nsa']))
391 self.firstHeight = samplingWindow['h0']
392 self.firstHeight = samplingWindow['h0']
@@ -399,8 +400,8 class RadarControllerHeader(Header):
399 self.Taus = numpy.fromstring(
400 self.Taus = numpy.fromstring(
400 fp[self.length:], '<f4', self.numTaus)
401 fp[self.length:], '<f4', self.numTaus)
401 self.length += self.Taus.nbytes
402 self.length += self.Taus.nbytes
402 except Exception, e:
403 except Exception as e:
403 print "RadarControllerHeader: " + str(e)
404 print("RadarControllerHeader: " + str(e))
404 return 0
405 return 0
405
406
406 self.code_size = 0
407 self.code_size = 0
@@ -419,8 +420,8 class RadarControllerHeader(Header):
419 self.nBaud = numpy.fromstring(
420 self.nBaud = numpy.fromstring(
420 fp[self.length:], '<u4', 1)[0]
421 fp[self.length:], '<u4', 1)[0]
421 self.length += self.nBaud.nbytes
422 self.length += self.nBaud.nbytes
422 except Exception, e:
423 except Exception as e:
423 print "RadarControllerHeader: " + str(e)
424 print("RadarControllerHeader: " + str(e))
424 return 0
425 return 0
425 code = numpy.empty([self.nCode, self.nBaud], dtype='i1')
426 code = numpy.empty([self.nCode, self.nBaud], dtype='i1')
426
427
@@ -433,13 +434,14 class RadarControllerHeader(Header):
433 temp = numpy.fromstring(
434 temp = numpy.fromstring(
434 fp, 'u4', int(numpy.ceil(self.nBaud / 32.)))
435 fp, 'u4', int(numpy.ceil(self.nBaud / 32.)))
435 self.length += temp.nbytes
436 self.length += temp.nbytes
436 except Exception, e:
437 except Exception as e:
437 print "RadarControllerHeader: " + str(e)
438 print("RadarControllerHeader: " + str(e))
438 return 0
439 return 0
439
440
440 for ib in range(self.nBaud - 1, -1, -1):
441 for ib in range(self.nBaud - 1, -1, -1):
441 code[ic, ib] = temp[ib / 32] % 2
442 log.error(ib / 32)
442 temp[ib / 32] = temp[ib / 32] / 2
443 code[ic, ib] = temp[int(ib / 32)] % 2
444 temp[int(ib / 32)] = temp[int(ib / 32)] / 2
443
445
444 self.code = 2.0 * code - 1.0
446 self.code = 2.0 * code - 1.0
445 self.code_size = int(numpy.ceil(self.nBaud / 32.)) * self.nCode * 4
447 self.code_size = int(numpy.ceil(self.nBaud / 32.)) * self.nCode * 4
@@ -454,7 +456,7 class RadarControllerHeader(Header):
454
456
455 if fp.tell() != endFp:
457 if fp.tell() != endFp:
456 # fp.seek(endFp)
458 # fp.seek(endFp)
457 print "%s: Radar Controller Header size is not consistent: from data [%d] != from header field [%d]" % (fp.name, fp.tell() - startFp, size)
459 print("%s: Radar Controller Header size is not consistent: from data [%d] != from header field [%d]" % (fp.name, fp.tell() - startFp, size))
458 # return 0
460 # return 0
459
461
460 if fp.tell() > endFp:
462 if fp.tell() > endFp:
@@ -557,7 +559,7 class RadarControllerHeader(Header):
557
559
558 def set_size(self, value):
560 def set_size(self, value):
559
561
560 raise IOError, "size is a property and it cannot be set, just read"
562 raise IOError("size is a property and it cannot be set, just read")
561
563
562 return
564 return
563
565
@@ -617,7 +619,7 class ProcessingHeader(Header):
617 self.length = 0
619 self.length = 0
618 try:
620 try:
619 startFp = fp.tell()
621 startFp = fp.tell()
620 except Exception, e:
622 except Exception as e:
621 startFp = None
623 startFp = None
622 pass
624 pass
623
625
@@ -627,8 +629,8 class ProcessingHeader(Header):
627 else:
629 else:
628 header = numpy.fromstring(fp, PROCESSING_STRUCTURE, 1)
630 header = numpy.fromstring(fp, PROCESSING_STRUCTURE, 1)
629 self.length += header.nbytes
631 self.length += header.nbytes
630 except Exception, e:
632 except Exception as e:
631 print "ProcessingHeader: " + str(e)
633 print("ProcessingHeader: " + str(e))
632 return 0
634 return 0
633
635
634 size = int(header['nSize'][0])
636 size = int(header['nSize'][0])
@@ -650,8 +652,8 class ProcessingHeader(Header):
650 samplingWindow = numpy.fromstring(
652 samplingWindow = numpy.fromstring(
651 fp[self.length:], SAMPLING_STRUCTURE, self.nWindows)
653 fp[self.length:], SAMPLING_STRUCTURE, self.nWindows)
652 self.length += samplingWindow.nbytes
654 self.length += samplingWindow.nbytes
653 except Exception, e:
655 except Exception as e:
654 print "ProcessingHeader: " + str(e)
656 print("ProcessingHeader: " + str(e))
655 return 0
657 return 0
656
658
657 self.nHeights = int(numpy.sum(samplingWindow['nsa']))
659 self.nHeights = int(numpy.sum(samplingWindow['nsa']))
@@ -667,8 +669,8 class ProcessingHeader(Header):
667 self.spectraComb = numpy.fromstring(
669 self.spectraComb = numpy.fromstring(
668 fp[self.length:], 'u1', 2 * self.totalSpectra)
670 fp[self.length:], 'u1', 2 * self.totalSpectra)
669 self.length += self.spectraComb.nbytes
671 self.length += self.spectraComb.nbytes
670 except Exception, e:
672 except Exception as e:
671 print "ProcessingHeader: " + str(e)
673 print("ProcessingHeader: " + str(e))
672 return 0
674 return 0
673
675
674 if ((self.processFlags & PROCFLAG.DEFINE_PROCESS_CODE) == PROCFLAG.DEFINE_PROCESS_CODE):
676 if ((self.processFlags & PROCFLAG.DEFINE_PROCESS_CODE) == PROCFLAG.DEFINE_PROCESS_CODE):
@@ -783,7 +785,7 class ProcessingHeader(Header):
783
785
784 def set_size(self, value):
786 def set_size(self, value):
785
787
786 raise IOError, "size is a property and it cannot be set, just read"
788 raise IOError("size is a property and it cannot be set, just read")
787
789
788 return
790 return
789
791
@@ -1,7 +1,7
1 from jroplot_voltage import *
1 from .jroplot_voltage import *
2 from jroplot_spectra import *
2 from .jroplot_spectra import *
3 from jroplot_heispectra import *
3 from .jroplot_heispectra import *
4 from jroplot_correlation import *
4 from .jroplot_correlation import *
5 from jroplot_parameters import *
5 from .jroplot_parameters import *
6 from jroplot_data import *
6 from .jroplot_data import *
7 from jroplotter import *
7 from .jroplotter import *
@@ -1,7 +1,7
1 import os
1 import os
2 import numpy
2 import numpy
3 import time, datetime
3 import time, datetime
4 import mpldriver
4 from schainpy.model.graphics import mpldriver
5
5
6 from schainpy.model.proc.jroproc_base import Operation
6 from schainpy.model.proc.jroproc_base import Operation
7
7
@@ -130,7 +130,7 class Figure(Operation):
130
130
131 def init(self, id, nplots, wintitle):
131 def init(self, id, nplots, wintitle):
132
132
133 raise NotImplementedError, "This method has been replaced by createFigure"
133 raise NotImplementedError("This method has been replaced by createFigure")
134
134
135 def createFigure(self, id, wintitle, widthplot=None, heightplot=None, show=True):
135 def createFigure(self, id, wintitle, widthplot=None, heightplot=None, show=True):
136
136
@@ -188,11 +188,11 class Figure(Operation):
188
188
189 def setTextFromAxes(self, text):
189 def setTextFromAxes(self, text):
190
190
191 raise NotImplementedError, "This method has been replaced with Axes.setText"
191 raise NotImplementedError("This method has been replaced with Axes.setText")
192
192
193 def makeAxes(self, nrow, ncol, xpos, ypos, colspan, rowspan):
193 def makeAxes(self, nrow, ncol, xpos, ypos, colspan, rowspan):
194
194
195 raise NotImplementedError, "This method has been replaced with Axes.addAxes"
195 raise NotImplementedError("This method has been replaced with Axes.addAxes")
196
196
197 def addAxes(self, *args):
197 def addAxes(self, *args):
198 """
198 """
@@ -234,7 +234,7 class Figure(Operation):
234 if not figfile:
234 if not figfile:
235
235
236 if not thisDatetime:
236 if not thisDatetime:
237 raise ValueError, "Saving figure: figfile or thisDatetime should be defined"
237 raise ValueError("Saving figure: figfile or thisDatetime should be defined")
238 return
238 return
239
239
240 str_datetime = thisDatetime.strftime("%Y%m%d_%H%M%S")
240 str_datetime = thisDatetime.strftime("%Y%m%d_%H%M%S")
@@ -3,7 +3,7 import datetime
3 import numpy
3 import numpy
4 import copy
4 import copy
5 from schainpy.model import *
5 from schainpy.model import *
6 from figure import Figure, isRealtime
6 from .figure import Figure, isRealtime
7
7
8 class CorrelationPlot(Figure):
8 class CorrelationPlot(Figure):
9 isConfig = None
9 isConfig = None
@@ -99,7 +99,7 class CorrelationPlot(Figure):
99
99
100 if realtime:
100 if realtime:
101 if not(isRealtime(utcdatatime = dataOut.utctime)):
101 if not(isRealtime(utcdatatime = dataOut.utctime)):
102 print 'Skipping this plot function'
102 print('Skipping this plot function')
103 return
103 return
104
104
105 if channelList == None:
105 if channelList == None:
@@ -108,7 +108,7 class CorrelationPlot(Figure):
108 channelIndexList = []
108 channelIndexList = []
109 for channel in channelList:
109 for channel in channelList:
110 if channel not in dataOut.channelList:
110 if channel not in dataOut.channelList:
111 raise ValueError, "Channel %d is not in dataOut.channelList"
111 raise ValueError("Channel %d is not in dataOut.channelList")
112 channelIndexList.append(dataOut.channelList.index(channel))
112 channelIndexList.append(dataOut.channelList.index(channel))
113
113
114 factor = dataOut.normFactor
114 factor = dataOut.normFactor
@@ -339,7 +339,7 class PlotData(Operation, Process):
339 self.titles: list of axes title
339 self.titles: list of axes title
340
340
341 '''
341 '''
342 raise(NotImplementedError, 'Implement this method in child class')
342 raise NotImplementedError
343
343
344 def fill_gaps(self, x_buffer, y_buffer, z_buffer):
344 def fill_gaps(self, x_buffer, y_buffer, z_buffer):
345 '''
345 '''
@@ -490,7 +490,7 class PlotData(Operation, Process):
490 if self.save_labels:
490 if self.save_labels:
491 labels = self.save_labels
491 labels = self.save_labels
492 else:
492 else:
493 labels = range(self.nrows)
493 labels = list(range(self.nrows))
494
494
495 if self.oneFigure:
495 if self.oneFigure:
496 label = ''
496 label = ''
@@ -514,7 +514,7 class PlotData(Operation, Process):
514 def plot(self):
514 def plot(self):
515 '''
515 '''
516 '''
516 '''
517 raise(NotImplementedError, 'Implement this method in child class')
517 raise NotImplementedError
518
518
519 def run(self):
519 def run(self):
520
520
@@ -961,7 +961,7 class PlotParamData(PlotRTIData):
961 self.ylabel = 'Height [km]'
961 self.ylabel = 'Height [km]'
962 if not self.titles:
962 if not self.titles:
963 self.titles = self.data.parameters \
963 self.titles = self.data.parameters \
964 if self.data.parameters else ['Param {}'.format(x) for x in xrange(self.nrows)]
964 if self.data.parameters else ['Param {}'.format(x) for x in range(self.nrows)]
965 if self.showSNR:
965 if self.showSNR:
966 self.titles.append('SNR')
966 self.titles.append('SNR')
967
967
@@ -1041,7 +1041,7 class PlotPolarMapData(PlotData):
1041 else:
1041 else:
1042 self.nplots = self.data.shape(self.CODE)[0]
1042 self.nplots = self.data.shape(self.CODE)[0]
1043 self.nrows = self.nplots
1043 self.nrows = self.nplots
1044 self.channels = range(self.nplots)
1044 self.channels = list(range(self.nplots))
1045 if self.mode == 'E':
1045 if self.mode == 'E':
1046 self.xlabel = 'Longitude'
1046 self.xlabel = 'Longitude'
1047 self.ylabel = 'Latitude'
1047 self.ylabel = 'Latitude'
@@ -7,8 +7,8 import os
7 import datetime
7 import datetime
8 import numpy
8 import numpy
9
9
10 from figure import Figure, isRealtime
10 from .figure import Figure, isRealtime
11 from plotting_codes import *
11 from .plotting_codes import *
12
12
13 class SpectraHeisScope(Figure):
13 class SpectraHeisScope(Figure):
14
14
@@ -98,7 +98,7 class SpectraHeisScope(Figure):
98
98
99 if dataOut.realtime:
99 if dataOut.realtime:
100 if not(isRealtime(utcdatatime = dataOut.utctime)):
100 if not(isRealtime(utcdatatime = dataOut.utctime)):
101 print 'Skipping this plot function'
101 print('Skipping this plot function')
102 return
102 return
103
103
104 if channelList == None:
104 if channelList == None:
@@ -107,7 +107,7 class SpectraHeisScope(Figure):
107 channelIndexList = []
107 channelIndexList = []
108 for channel in channelList:
108 for channel in channelList:
109 if channel not in dataOut.channelList:
109 if channel not in dataOut.channelList:
110 raise ValueError, "Channel %d is not in dataOut.channelList"
110 raise ValueError("Channel %d is not in dataOut.channelList")
111 channelIndexList.append(dataOut.channelList.index(channel))
111 channelIndexList.append(dataOut.channelList.index(channel))
112
112
113 # x = dataOut.heightList
113 # x = dataOut.heightList
@@ -238,7 +238,7 class RTIfromSpectraHeis(Figure):
238 channelIndexList = []
238 channelIndexList = []
239 for channel in channelList:
239 for channel in channelList:
240 if channel not in dataOut.channelList:
240 if channel not in dataOut.channelList:
241 raise ValueError, "Channel %d is not in dataOut.channelList"
241 raise ValueError("Channel %d is not in dataOut.channelList")
242 channelIndexList.append(dataOut.channelList.index(channel))
242 channelIndexList.append(dataOut.channelList.index(channel))
243
243
244 if timerange != None:
244 if timerange != None:
@@ -2,8 +2,8 import os
2 import datetime
2 import datetime
3 import numpy
3 import numpy
4 import inspect
4 import inspect
5 from figure import Figure, isRealtime, isTimeInHourRange
5 from .figure import Figure, isRealtime, isTimeInHourRange
6 from plotting_codes import *
6 from .plotting_codes import *
7
7
8
8
9 class FitGauPlot(Figure):
9 class FitGauPlot(Figure):
@@ -101,7 +101,7 class FitGauPlot(Figure):
101 """
101 """
102 if realtime:
102 if realtime:
103 if not(isRealtime(utcdatatime = dataOut.utctime)):
103 if not(isRealtime(utcdatatime = dataOut.utctime)):
104 print 'Skipping this plot function'
104 print('Skipping this plot function')
105 return
105 return
106
106
107 if channelList == None:
107 if channelList == None:
@@ -110,7 +110,7 class FitGauPlot(Figure):
110 channelIndexList = []
110 channelIndexList = []
111 for channel in channelList:
111 for channel in channelList:
112 if channel not in dataOut.channelList:
112 if channel not in dataOut.channelList:
113 raise ValueError, "Channel %d is not in dataOut.channelList" %channel
113 raise ValueError("Channel %d is not in dataOut.channelList" %channel)
114 channelIndexList.append(dataOut.channelList.index(channel))
114 channelIndexList.append(dataOut.channelList.index(channel))
115
115
116 # if normFactor is None:
116 # if normFactor is None:
@@ -134,7 +134,7 class FitGauPlot(Figure):
134 y = dataOut.getHeiRange()
134 y = dataOut.getHeiRange()
135
135
136 z = dataOut.GauSPC[:,GauSelector,:,:] #GauSelector] #dataOut.data_spc/factor
136 z = dataOut.GauSPC[:,GauSelector,:,:] #GauSelector] #dataOut.data_spc/factor
137 print 'GausSPC', z[0,32,10:40]
137 print('GausSPC', z[0,32,10:40])
138 z = numpy.where(numpy.isfinite(z), z, numpy.NAN)
138 z = numpy.where(numpy.isfinite(z), z, numpy.NAN)
139 zdB = 10*numpy.log10(z)
139 zdB = 10*numpy.log10(z)
140
140
@@ -311,7 +311,7 class MomentsPlot(Figure):
311
311
312 if realtime:
312 if realtime:
313 if not(isRealtime(utcdatatime = dataOut.utctime)):
313 if not(isRealtime(utcdatatime = dataOut.utctime)):
314 print 'Skipping this plot function'
314 print('Skipping this plot function')
315 return
315 return
316
316
317 if channelList == None:
317 if channelList == None:
@@ -320,7 +320,7 class MomentsPlot(Figure):
320 channelIndexList = []
320 channelIndexList = []
321 for channel in channelList:
321 for channel in channelList:
322 if channel not in dataOut.channelList:
322 if channel not in dataOut.channelList:
323 raise ValueError, "Channel %d is not in dataOut.channelList"
323 raise ValueError("Channel %d is not in dataOut.channelList")
324 channelIndexList.append(dataOut.channelList.index(channel))
324 channelIndexList.append(dataOut.channelList.index(channel))
325
325
326 factor = dataOut.normFactor
326 factor = dataOut.normFactor
@@ -701,7 +701,7 class WindProfilerPlot(Figure):
701 if ymin == None: ymin = numpy.nanmin(y)
701 if ymin == None: ymin = numpy.nanmin(y)
702 if ymax == None: ymax = numpy.nanmax(y)
702 if ymax == None: ymax = numpy.nanmax(y)
703
703
704 if zmax == None: zmax = numpy.nanmax(abs(z[range(2),:]))
704 if zmax == None: zmax = numpy.nanmax(abs(z[list(range(2)),:]))
705 #if numpy.isnan(zmax): zmax = 50
705 #if numpy.isnan(zmax): zmax = 50
706 if zmin == None: zmin = -zmax
706 if zmin == None: zmin = -zmax
707
707
@@ -875,12 +875,12 class ParametersPlot(Figure):
875 return
875 return
876
876
877 if channelList == None:
877 if channelList == None:
878 channelIndexList = range(dataOut.data_param.shape[0])
878 channelIndexList = list(range(dataOut.data_param.shape[0]))
879 else:
879 else:
880 channelIndexList = []
880 channelIndexList = []
881 for channel in channelList:
881 for channel in channelList:
882 if channel not in dataOut.channelList:
882 if channel not in dataOut.channelList:
883 raise ValueError, "Channel %d is not in dataOut.channelList"
883 raise ValueError("Channel %d is not in dataOut.channelList")
884 channelIndexList.append(dataOut.channelList.index(channel))
884 channelIndexList.append(dataOut.channelList.index(channel))
885
885
886 x = dataOut.getTimeRange1(dataOut.paramInterval)
886 x = dataOut.getTimeRange1(dataOut.paramInterval)
@@ -7,8 +7,8 import os
7 import datetime
7 import datetime
8 import numpy
8 import numpy
9
9
10 from figure import Figure, isRealtime, isTimeInHourRange
10 from .figure import Figure, isRealtime, isTimeInHourRange
11 from plotting_codes import *
11 from .plotting_codes import *
12
12
13
13
14 class SpectraPlot(Figure):
14 class SpectraPlot(Figure):
@@ -106,7 +106,7 class SpectraPlot(Figure):
106 """
106 """
107 if realtime:
107 if realtime:
108 if not(isRealtime(utcdatatime = dataOut.utctime)):
108 if not(isRealtime(utcdatatime = dataOut.utctime)):
109 print 'Skipping this plot function'
109 print('Skipping this plot function')
110 return
110 return
111
111
112 if channelList == None:
112 if channelList == None:
@@ -115,7 +115,7 class SpectraPlot(Figure):
115 channelIndexList = []
115 channelIndexList = []
116 for channel in channelList:
116 for channel in channelList:
117 if channel not in dataOut.channelList:
117 if channel not in dataOut.channelList:
118 raise ValueError, "Channel %d is not in dataOut.channelList" %channel
118 raise ValueError("Channel %d is not in dataOut.channelList" %channel)
119 channelIndexList.append(dataOut.channelList.index(channel))
119 channelIndexList.append(dataOut.channelList.index(channel))
120
120
121 if normFactor is None:
121 if normFactor is None:
@@ -307,7 +307,7 class CrossSpectraPlot(Figure):
307 pairsIndexList = []
307 pairsIndexList = []
308 for pair in pairsList:
308 for pair in pairsList:
309 if pair not in dataOut.pairsList:
309 if pair not in dataOut.pairsList:
310 raise ValueError, "Pair %s is not in dataOut.pairsList" %str(pair)
310 raise ValueError("Pair %s is not in dataOut.pairsList" %str(pair))
311 pairsIndexList.append(dataOut.pairsList.index(pair))
311 pairsIndexList.append(dataOut.pairsList.index(pair))
312
312
313 if not pairsIndexList:
313 if not pairsIndexList:
@@ -554,7 +554,7 class RTIPlot(Figure):
554 channelIndexList = []
554 channelIndexList = []
555 for channel in channelList:
555 for channel in channelList:
556 if channel not in dataOut.channelList:
556 if channel not in dataOut.channelList:
557 raise ValueError, "Channel %d is not in dataOut.channelList"
557 raise ValueError("Channel %d is not in dataOut.channelList")
558 channelIndexList.append(dataOut.channelList.index(channel))
558 channelIndexList.append(dataOut.channelList.index(channel))
559
559
560 if normFactor is None:
560 if normFactor is None:
@@ -581,7 +581,7 class RTIPlot(Figure):
581
581
582 update_figfile = False
582 update_figfile = False
583
583
584 if dataOut.ltctime >= self.xmax:
584 if self.xmax is not None and dataOut.ltctime >= self.xmax: #yong
585 self.counter_imagwr = wr_period
585 self.counter_imagwr = wr_period
586 self.isConfig = False
586 self.isConfig = False
587 update_figfile = True
587 update_figfile = True
@@ -732,7 +732,7 class CoherenceMap(Figure):
732 pairsIndexList = []
732 pairsIndexList = []
733 for pair in pairsList:
733 for pair in pairsList:
734 if pair not in dataOut.pairsList:
734 if pair not in dataOut.pairsList:
735 raise ValueError, "Pair %s is not in dataOut.pairsList" %(pair)
735 raise ValueError("Pair %s is not in dataOut.pairsList" %(pair))
736 pairsIndexList.append(dataOut.pairsList.index(pair))
736 pairsIndexList.append(dataOut.pairsList.index(pair))
737
737
738 if pairsIndexList == []:
738 if pairsIndexList == []:
@@ -915,7 +915,7 class PowerProfilePlot(Figure):
915 channelIndexList = []
915 channelIndexList = []
916 for channel in channelList:
916 for channel in channelList:
917 if channel not in dataOut.channelList:
917 if channel not in dataOut.channelList:
918 raise ValueError, "Channel %d is not in dataOut.channelList"
918 raise ValueError("Channel %d is not in dataOut.channelList")
919 channelIndexList.append(dataOut.channelList.index(channel))
919 channelIndexList.append(dataOut.channelList.index(channel))
920
920
921 factor = dataOut.normFactor
921 factor = dataOut.normFactor
@@ -1040,7 +1040,7 class SpectraCutPlot(Figure):
1040 channelIndexList = []
1040 channelIndexList = []
1041 for channel in channelList:
1041 for channel in channelList:
1042 if channel not in dataOut.channelList:
1042 if channel not in dataOut.channelList:
1043 raise ValueError, "Channel %d is not in dataOut.channelList"
1043 raise ValueError("Channel %d is not in dataOut.channelList")
1044 channelIndexList.append(dataOut.channelList.index(channel))
1044 channelIndexList.append(dataOut.channelList.index(channel))
1045
1045
1046 factor = dataOut.normFactor
1046 factor = dataOut.normFactor
@@ -1219,7 +1219,7 class Noise(Figure):
1219 channelIndexList = []
1219 channelIndexList = []
1220 for channel in channelList:
1220 for channel in channelList:
1221 if channel not in dataOut.channelList:
1221 if channel not in dataOut.channelList:
1222 raise ValueError, "Channel %d is not in dataOut.channelList"
1222 raise ValueError("Channel %d is not in dataOut.channelList")
1223 channelIndexList.append(dataOut.channelList.index(channel))
1223 channelIndexList.append(dataOut.channelList.index(channel))
1224
1224
1225 x = dataOut.getTimeRange()
1225 x = dataOut.getTimeRange()
@@ -1408,7 +1408,7 class BeaconPhase(Figure):
1408 pairsIndexList = []
1408 pairsIndexList = []
1409 for pair in pairsList:
1409 for pair in pairsList:
1410 if pair not in dataOut.pairsList:
1410 if pair not in dataOut.pairsList:
1411 raise ValueError, "Pair %s is not in dataOut.pairsList" %(pair)
1411 raise ValueError("Pair %s is not in dataOut.pairsList" %(pair))
1412 pairsIndexList.append(dataOut.pairsList.index(pair))
1412 pairsIndexList.append(dataOut.pairsList.index(pair))
1413
1413
1414 if pairsIndexList == []:
1414 if pairsIndexList == []:
@@ -7,7 +7,7 import os
7 import datetime
7 import datetime
8 import numpy
8 import numpy
9
9
10 from figure import Figure
10 from .figure import Figure
11
11
12 class Scope(Figure):
12 class Scope(Figure):
13
13
@@ -134,7 +134,7 class Scope(Figure):
134 channelIndexList = []
134 channelIndexList = []
135 for channel in channelList:
135 for channel in channelList:
136 if channel not in dataOut.channelList:
136 if channel not in dataOut.channelList:
137 raise ValueError, "Channel %d is not in dataOut.channelList"
137 raise ValueError("Channel %d is not in dataOut.channelList")
138 channelIndexList.append(dataOut.channelList.index(channel))
138 channelIndexList.append(dataOut.channelList.index(channel))
139
139
140 thisDatetime = datetime.datetime.utcfromtimestamp(dataOut.getTimeRange()[0])
140 thisDatetime = datetime.datetime.utcfromtimestamp(dataOut.getTimeRange()[0])
@@ -17,11 +17,11 import schainpy.admin
17
17
18 from schainpy.model.proc.jroproc_base import Operation
18 from schainpy.model.proc.jroproc_base import Operation
19 from schainpy.model.serializer.data import obj2Dict, dict2Obj
19 from schainpy.model.serializer.data import obj2Dict, dict2Obj
20 from jroplot_correlation import *
20 from .jroplot_correlation import *
21 from jroplot_heispectra import *
21 from .jroplot_heispectra import *
22 from jroplot_parameters import *
22 from .jroplot_parameters import *
23 from jroplot_spectra import *
23 from .jroplot_spectra import *
24 from jroplot_voltage import *
24 from .jroplot_voltage import *
25
25
26
26
27 class Plotter(Operation):
27 class Plotter(Operation):
@@ -46,7 +46,7 class Plotter(Operation):
46
46
47 def setup(self, **kwargs):
47 def setup(self, **kwargs):
48
48
49 print "Initializing ..."
49 print("Initializing ...")
50
50
51
51
52 def run(self, dataOut, id=None, **kwargs):
52 def run(self, dataOut, id=None, **kwargs):
@@ -106,8 +106,8 class PlotManager():
106 sys.exc_info()[1],
106 sys.exc_info()[1],
107 sys.exc_info()[2])
107 sys.exc_info()[2])
108
108
109 print "***** Error occurred in PlotManager *****"
109 print("***** Error occurred in PlotManager *****")
110 print "***** [%s]: %s" %(name, err[-1])
110 print("***** [%s]: %s" %(name, err[-1]))
111
111
112 message = "\nError ocurred in %s:\n" %name
112 message = "\nError ocurred in %s:\n" %name
113 message += "".join(err)
113 message += "".join(err)
@@ -168,7 +168,7 class PlotManager():
168
168
169 dataPlot = serial_data['data']
169 dataPlot = serial_data['data']
170
170
171 if plot_id not in self.plotInstanceDict.keys():
171 if plot_id not in list(self.plotInstanceDict.keys()):
172 className = eval(plot_name)
172 className = eval(plot_name)
173 self.plotInstanceDict[plot_id] = className(**kwargs)
173 self.plotInstanceDict[plot_id] = className(**kwargs)
174
174
@@ -198,7 +198,7 class PlotManager():
198
198
199 self.__lock.acquire()
199 self.__lock.acquire()
200
200
201 for plot_id in self.plotInstanceDict.keys():
201 for plot_id in list(self.plotInstanceDict.keys()):
202 plotter = self.plotInstanceDict[plot_id]
202 plotter = self.plotInstanceDict[plot_id]
203 plotter.close()
203 plotter.close()
204
204
@@ -211,7 +211,7 class PlotManager():
211 def start(self):
211 def start(self):
212
212
213 if not self.controllerThreadObj.isRunning():
213 if not self.controllerThreadObj.isRunning():
214 raise RuntimeError, "controllerThreadObj has not been initialized. Use controllerThreadObj.start() before call this method"
214 raise RuntimeError("controllerThreadObj has not been initialized. Use controllerThreadObj.start() before call this method")
215
215
216 self.join()
216 self.join()
217
217
@@ -171,11 +171,11 def createPline(ax, x, y, xmin, xmax, ymin, ymax, xlabel='', ylabel='', title=''
171
171
172 ######################################################
172 ######################################################
173 if '0.' in matplotlib.__version__[0:2]:
173 if '0.' in matplotlib.__version__[0:2]:
174 print "The matplotlib version has to be updated to 1.1 or newer"
174 print("The matplotlib version has to be updated to 1.1 or newer")
175 return iplot
175 return iplot
176
176
177 if '1.0.' in matplotlib.__version__[0:4]:
177 if '1.0.' in matplotlib.__version__[0:4]:
178 print "The matplotlib version has to be updated to 1.1 or newer"
178 print("The matplotlib version has to be updated to 1.1 or newer")
179 return iplot
179 return iplot
180
180
181 if grid != None:
181 if grid != None:
@@ -246,11 +246,11 def createPcolor(ax, x, y, z, xmin, xmax, ymin, ymax, zmin, zmax,
246 ax_cb.yaxis.tick_right()
246 ax_cb.yaxis.tick_right()
247
247
248 if '0.' in matplotlib.__version__[0:2]:
248 if '0.' in matplotlib.__version__[0:2]:
249 print "The matplotlib version has to be updated to 1.1 or newer"
249 print("The matplotlib version has to be updated to 1.1 or newer")
250 return imesh
250 return imesh
251
251
252 if '1.0.' in matplotlib.__version__[0:4]:
252 if '1.0.' in matplotlib.__version__[0:4]:
253 print "The matplotlib version has to be updated to 1.1 or newer"
253 print("The matplotlib version has to be updated to 1.1 or newer")
254 return imesh
254 return imesh
255
255
256 matplotlib.pyplot.tight_layout()
256 matplotlib.pyplot.tight_layout()
@@ -334,11 +334,11 def createPmultiline(ax, x, y, xmin, xmax, ymin, ymax, xlabel='', ylabel='', tit
334 iplot = ax.lines[-1]
334 iplot = ax.lines[-1]
335
335
336 if '0.' in matplotlib.__version__[0:2]:
336 if '0.' in matplotlib.__version__[0:2]:
337 print "The matplotlib version has to be updated to 1.1 or newer"
337 print("The matplotlib version has to be updated to 1.1 or newer")
338 return iplot
338 return iplot
339
339
340 if '1.0.' in matplotlib.__version__[0:4]:
340 if '1.0.' in matplotlib.__version__[0:4]:
341 print "The matplotlib version has to be updated to 1.1 or newer"
341 print("The matplotlib version has to be updated to 1.1 or newer")
342 return iplot
342 return iplot
343
343
344 if grid != None:
344 if grid != None:
@@ -407,11 +407,11 def createPmultilineYAxis(ax, x, y, xmin, xmax, ymin, ymax, xlabel='', ylabel=''
407 iplot = ax.lines[-1]
407 iplot = ax.lines[-1]
408
408
409 if '0.' in matplotlib.__version__[0:2]:
409 if '0.' in matplotlib.__version__[0:2]:
410 print "The matplotlib version has to be updated to 1.1 or newer"
410 print("The matplotlib version has to be updated to 1.1 or newer")
411 return iplot
411 return iplot
412
412
413 if '1.0.' in matplotlib.__version__[0:4]:
413 if '1.0.' in matplotlib.__version__[0:4]:
414 print "The matplotlib version has to be updated to 1.1 or newer"
414 print("The matplotlib version has to be updated to 1.1 or newer")
415 return iplot
415 return iplot
416
416
417 if grid != None:
417 if grid != None:
@@ -461,11 +461,11 def createPolar(ax, x, y,
461 iplot = ax.lines[-1]
461 iplot = ax.lines[-1]
462
462
463 if '0.' in matplotlib.__version__[0:2]:
463 if '0.' in matplotlib.__version__[0:2]:
464 print "The matplotlib version has to be updated to 1.1 or newer"
464 print("The matplotlib version has to be updated to 1.1 or newer")
465 return iplot
465 return iplot
466
466
467 if '1.0.' in matplotlib.__version__[0:4]:
467 if '1.0.' in matplotlib.__version__[0:4]:
468 print "The matplotlib version has to be updated to 1.1 or newer"
468 print("The matplotlib version has to be updated to 1.1 or newer")
469 return iplot
469 return iplot
470
470
471 # if grid != None:
471 # if grid != None:
@@ -491,7 +491,7 def polar(iplot, x, y, xlabel='', ylabel='', title=''):
491 def draw(fig):
491 def draw(fig):
492
492
493 if type(fig) == 'int':
493 if type(fig) == 'int':
494 raise ValueError, "Error drawing: Fig parameter should be a matplotlib figure object figure"
494 raise ValueError("Error drawing: Fig parameter should be a matplotlib figure object figure")
495
495
496 fig.canvas.draw()
496 fig.canvas.draw()
497
497
@@ -291,8 +291,8 RadarConst5 = RadarConst
291 # print 'OffsetStartHeader ',self.OffsetStartHeader,'RecCounter ', self.RecCounter, 'Off2StartNxtRec ' , self.Off2StartNxtRec
291 # print 'OffsetStartHeader ',self.OffsetStartHeader,'RecCounter ', self.RecCounter, 'Off2StartNxtRec ' , self.Off2StartNxtRec
292 #OffRHeader= self.OffsetStartHeader + self.RecCounter*self.Off2StartNxtRec
292 #OffRHeader= self.OffsetStartHeader + self.RecCounter*self.Off2StartNxtRec
293 #startFp.seek(OffRHeader, os.SEEK_SET)
293 #startFp.seek(OffRHeader, os.SEEK_SET)
294 print 'debe ser 48, RecCounter*811248', self.OffsetStartHeader, self.RecCounter, self.Off2StartNxtRec
294 print('debe ser 48, RecCounter*811248', self.OffsetStartHeader, self.RecCounter, self.Off2StartNxtRec)
295 print 'Posicion del bloque: ', OffRHeader
295 print('Posicion del bloque: ', OffRHeader)
296
296
297 header = numpy.fromfile(startFp, SRVI_STRUCTURE, 1)
297 header = numpy.fromfile(startFp, SRVI_STRUCTURE, 1)
298
298
@@ -326,6 +326,6 self.Datasize = self.nProfiles * self.nChannels * self.nHeights * 2 * 4
326 # print 'Datasize',self.Datasize
326 # print 'Datasize',self.Datasize
327 endFp = self.OffsetStartHeader + self.RecCounter * self.Off2StartNxtRec
327 endFp = self.OffsetStartHeader + self.RecCounter * self.Off2StartNxtRec
328
328
329 print '=============================================='
329 print('==============================================')
330
330
331 print '=============================================='
331 print('==============================================') No newline at end of file
@@ -4,20 +4,20 $Author: murco $
4 $Id: JRODataIO.py 169 2012-11-19 21:57:03Z murco $
4 $Id: JRODataIO.py 169 2012-11-19 21:57:03Z murco $
5 '''
5 '''
6
6
7 from jroIO_voltage import *
7 from .jroIO_voltage import *
8 from jroIO_spectra import *
8 from .jroIO_spectra import *
9 from jroIO_heispectra import *
9 from .jroIO_heispectra import *
10 from jroIO_usrp import *
10 from .jroIO_usrp import *
11 from jroIO_digitalRF import *
11 from .jroIO_digitalRF import *
12 from jroIO_kamisr import *
12 from .jroIO_kamisr import *
13 from jroIO_param import *
13 from .jroIO_param import *
14 from jroIO_hf import *
14 from .jroIO_hf import *
15
15
16 from jroIO_madrigal import *
16 from .jroIO_madrigal import *
17
17
18 from bltrIO_param import *
18 from .bltrIO_param import *
19 from jroIO_bltr import *
19 from .jroIO_bltr import *
20 from jroIO_mira35c import *
20 from .jroIO_mira35c import *
21 from julIO_param import *
21 from .julIO_param import *
22
22
23 from pxIO_param import * No newline at end of file
23 from .pxIO_param import * No newline at end of file
@@ -121,7 +121,7 class BLTRParamReader(JRODataReader, ProcessingUnit):
121 self.datatime = datetime.datetime(1900,1,1)
121 self.datatime = datetime.datetime(1900,1,1)
122
122
123 if self.path is None:
123 if self.path is None:
124 raise ValueError, "The path is not valid"
124 raise ValueError("The path is not valid")
125
125
126 if ext is None:
126 if ext is None:
127 ext = self.ext
127 ext = self.ext
@@ -131,8 +131,8 class BLTRParamReader(JRODataReader, ProcessingUnit):
131 self.fileIndex = 0
131 self.fileIndex = 0
132
132
133 if not self.fileList:
133 if not self.fileList:
134 raise Warning, "There is no files matching these date in the folder: %s. \n Check 'startDate' and 'endDate' " % (
134 raise Warning("There is no files matching these date in the folder: %s. \n Check 'startDate' and 'endDate' " % (
135 path)
135 path))
136
136
137 self.setNextFile()
137 self.setNextFile()
138
138
@@ -340,7 +340,7 class BLTRParamReader(JRODataReader, ProcessingUnit):
340 self.dataOut.sizeOfFile = self.sizeOfFile
340 self.dataOut.sizeOfFile = self.sizeOfFile
341 self.dataOut.lat = self.lat
341 self.dataOut.lat = self.lat
342 self.dataOut.lon = self.lon
342 self.dataOut.lon = self.lon
343 self.dataOut.channelList = range(self.nchannels)
343 self.dataOut.channelList = list(range(self.nchannels))
344 self.dataOut.kchan = self.kchan
344 self.dataOut.kchan = self.kchan
345 self.dataOut.delta = self.delta
345 self.dataOut.delta = self.delta
346 self.dataOut.correction = self.correction
346 self.dataOut.correction = self.correction
@@ -144,7 +144,7 class AMISRReader(ProcessingUnit):
144 self.status = 1
144 self.status = 1
145 else:
145 else:
146 self.status = 0
146 self.status = 0
147 print 'Path:%s does not exists'%self.path
147 print('Path:%s does not exists'%self.path)
148
148
149 return
149 return
150
150
@@ -169,11 +169,11 class AMISRReader(ProcessingUnit):
169
169
170 pat = '\d+.\d+'
170 pat = '\d+.\d+'
171 dirnameList = [re.search(pat,x) for x in os.listdir(self.path)]
171 dirnameList = [re.search(pat,x) for x in os.listdir(self.path)]
172 dirnameList = filter(lambda x:x!=None,dirnameList)
172 dirnameList = [x for x in dirnameList if x!=None]
173 dirnameList = [x.string for x in dirnameList]
173 dirnameList = [x.string for x in dirnameList]
174 if not(online):
174 if not(online):
175 dirnameList = [self.__selDates(x) for x in dirnameList]
175 dirnameList = [self.__selDates(x) for x in dirnameList]
176 dirnameList = filter(lambda x:x!=None,dirnameList)
176 dirnameList = [x for x in dirnameList if x!=None]
177 if len(dirnameList)>0:
177 if len(dirnameList)>0:
178 self.status = 1
178 self.status = 1
179 self.dirnameList = dirnameList
179 self.dirnameList = dirnameList
@@ -186,8 +186,8 class AMISRReader(ProcessingUnit):
186 startDateTime_Reader = datetime.datetime.combine(self.startDate,self.startTime)
186 startDateTime_Reader = datetime.datetime.combine(self.startDate,self.startTime)
187 endDateTime_Reader = datetime.datetime.combine(self.endDate,self.endTime)
187 endDateTime_Reader = datetime.datetime.combine(self.endDate,self.endTime)
188
188
189 print 'Filtering Files from %s to %s'%(startDateTime_Reader, endDateTime_Reader)
189 print('Filtering Files from %s to %s'%(startDateTime_Reader, endDateTime_Reader))
190 print '........................................'
190 print('........................................')
191 filter_filenameList = []
191 filter_filenameList = []
192 self.filenameList.sort()
192 self.filenameList.sort()
193 for i in range(len(self.filenameList)-1):
193 for i in range(len(self.filenameList)-1):
@@ -226,7 +226,7 class AMISRReader(ProcessingUnit):
226
226
227 def __getFilenameList(self, fileListInKeys, dirList):
227 def __getFilenameList(self, fileListInKeys, dirList):
228 for value in fileListInKeys:
228 for value in fileListInKeys:
229 dirName = value.keys()[0]
229 dirName = list(value.keys())[0]
230 for file in value[dirName]:
230 for file in value[dirName]:
231 filename = os.path.join(dirName, file)
231 filename = os.path.join(dirName, file)
232 self.filenameList.append(filename)
232 self.filenameList.append(filename)
@@ -304,7 +304,7 class AMISRReader(ProcessingUnit):
304 self.__selectDataForTimes()
304 self.__selectDataForTimes()
305
305
306 for i in range(len(self.filenameList)):
306 for i in range(len(self.filenameList)):
307 print "%s" %(self.filenameList[i])
307 print("%s" %(self.filenameList[i]))
308
308
309 return
309 return
310
310
@@ -315,7 +315,7 class AMISRReader(ProcessingUnit):
315 idFile += 1
315 idFile += 1
316 if not(idFile < len(self.filenameList)):
316 if not(idFile < len(self.filenameList)):
317 self.flagNoMoreFiles = 1
317 self.flagNoMoreFiles = 1
318 print "No more Files"
318 print("No more Files")
319 return 0
319 return 0
320
320
321 filename = self.filenameList[idFile]
321 filename = self.filenameList[idFile]
@@ -330,7 +330,7 class AMISRReader(ProcessingUnit):
330
330
331 self.amisrFilePointer = amisrFilePointer
331 self.amisrFilePointer = amisrFilePointer
332
332
333 print "Setting the file: %s"%self.filename
333 print("Setting the file: %s"%self.filename)
334
334
335 return 1
335 return 1
336
336
@@ -341,7 +341,7 class AMISRReader(ProcessingUnit):
341 self.__selectDataForTimes(online=True)
341 self.__selectDataForTimes(online=True)
342 filename = self.filenameList[0]
342 filename = self.filenameList[0]
343 while self.__filename_online == filename:
343 while self.__filename_online == filename:
344 print 'waiting %d seconds to get a new file...'%(self.__waitForNewFile)
344 print('waiting %d seconds to get a new file...'%(self.__waitForNewFile))
345 sleep(self.__waitForNewFile)
345 sleep(self.__waitForNewFile)
346 self.__selectDataForTimes(online=True)
346 self.__selectDataForTimes(online=True)
347 filename = self.filenameList[0]
347 filename = self.filenameList[0]
@@ -351,7 +351,7 class AMISRReader(ProcessingUnit):
351 self.amisrFilePointer = h5py.File(filename,'r')
351 self.amisrFilePointer = h5py.File(filename,'r')
352 self.flagIsNewFile = 1
352 self.flagIsNewFile = 1
353 self.filename = filename
353 self.filename = filename
354 print "Setting the file: %s"%self.filename
354 print("Setting the file: %s"%self.filename)
355 return 1
355 return 1
356
356
357
357
@@ -368,12 +368,12 class AMISRReader(ProcessingUnit):
368 #looking index list for data
368 #looking index list for data
369 start_index = self.radacHeaderObj.pulseCount[0,:][0]
369 start_index = self.radacHeaderObj.pulseCount[0,:][0]
370 end_index = self.radacHeaderObj.npulses
370 end_index = self.radacHeaderObj.npulses
371 range4data = range(start_index, end_index)
371 range4data = list(range(start_index, end_index))
372 self.index4_schain_datablock = numpy.array(range4data)
372 self.index4_schain_datablock = numpy.array(range4data)
373
373
374 buffer_start_index = 0
374 buffer_start_index = 0
375 buffer_end_index = self.radacHeaderObj.pulseCount[0,:][0]
375 buffer_end_index = self.radacHeaderObj.pulseCount[0,:][0]
376 range4buffer = range(buffer_start_index, buffer_end_index)
376 range4buffer = list(range(buffer_start_index, buffer_end_index))
377 self.index4_buffer = numpy.array(range4buffer)
377 self.index4_buffer = numpy.array(range4buffer)
378
378
379 self.linear_pulseCount = numpy.array(range4data + range4buffer)
379 self.linear_pulseCount = numpy.array(range4data + range4buffer)
@@ -403,8 +403,8 class AMISRReader(ProcessingUnit):
403
403
404 just4record0 = self.radacHeaderObj.beamCodeByPulse[0,:]
404 just4record0 = self.radacHeaderObj.beamCodeByPulse[0,:]
405
405
406 for i in range(len(self.beamCodeDict.values())):
406 for i in range(len(list(self.beamCodeDict.values()))):
407 xx = numpy.where(just4record0==self.beamCodeDict.values()[i][0])
407 xx = numpy.where(just4record0==list(self.beamCodeDict.values())[i][0])
408 indexPulseByBeam = self.linear_pulseCount[xx[0]]
408 indexPulseByBeam = self.linear_pulseCount[xx[0]]
409 self.beamRangeDict[i] = indexPulseByBeam
409 self.beamRangeDict[i] = indexPulseByBeam
410
410
@@ -499,7 +499,7 class AMISRReader(ProcessingUnit):
499 self.searchFilesOnLine(path, walk)
499 self.searchFilesOnLine(path, walk)
500
500
501 if not(self.filenameList):
501 if not(self.filenameList):
502 print "There is no files into the folder: %s"%(path)
502 print("There is no files into the folder: %s"%(path))
503
503
504 sys.exit(-1)
504 sys.exit(-1)
505
505
@@ -632,8 +632,8 class AMISRReader(ProcessingUnit):
632 return 0
632 return 0
633
633
634 def printUTC(self):
634 def printUTC(self):
635 print self.dataOut.utctime
635 print(self.dataOut.utctime)
636 print ''
636 print('')
637
637
638 def setObjProperties(self):
638 def setObjProperties(self):
639
639
@@ -661,7 +661,7 class AMISRReader(ProcessingUnit):
661
661
662 if self.flagNoMoreFiles:
662 if self.flagNoMoreFiles:
663 self.dataOut.flagNoData = True
663 self.dataOut.flagNoData = True
664 print 'Process finished'
664 print('Process finished')
665 return 0
665 return 0
666
666
667 if self.__hasNotDataInBuffer():
667 if self.__hasNotDataInBuffer():
@@ -75,14 +75,14 def isFileInEpoch(filename, startUTSeconds, endUTSeconds):
75 try:
75 try:
76 fp = open(filename, 'rb')
76 fp = open(filename, 'rb')
77 except IOError:
77 except IOError:
78 print "The file %s can't be opened" % (filename)
78 print("The file %s can't be opened" % (filename))
79 return 0
79 return 0
80
80
81 sts = basicHeaderObj.read(fp)
81 sts = basicHeaderObj.read(fp)
82 fp.close()
82 fp.close()
83
83
84 if not(sts):
84 if not(sts):
85 print "Skipping the file %s because it has not a valid header" % (filename)
85 print("Skipping the file %s because it has not a valid header" % (filename))
86 return 0
86 return 0
87
87
88 if not ((startUTSeconds <= basicHeaderObj.utc) and (endUTSeconds > basicHeaderObj.utc)):
88 if not ((startUTSeconds <= basicHeaderObj.utc) and (endUTSeconds > basicHeaderObj.utc)):
@@ -130,7 +130,7 def isFileInTimeRange(filename, startDate, endDate, startTime, endTime):
130 try:
130 try:
131 fp = open(filename, 'rb')
131 fp = open(filename, 'rb')
132 except IOError:
132 except IOError:
133 print "The file %s can't be opened" % (filename)
133 print("The file %s can't be opened" % (filename))
134 return None
134 return None
135
135
136 firstBasicHeaderObj = BasicHeader(LOCALTIME)
136 firstBasicHeaderObj = BasicHeader(LOCALTIME)
@@ -143,7 +143,7 def isFileInTimeRange(filename, startDate, endDate, startTime, endTime):
143 sts = firstBasicHeaderObj.read(fp)
143 sts = firstBasicHeaderObj.read(fp)
144
144
145 if not(sts):
145 if not(sts):
146 print "[Reading] Skipping the file %s because it has not a valid header" % (filename)
146 print("[Reading] Skipping the file %s because it has not a valid header" % (filename))
147 return None
147 return None
148
148
149 if not systemHeaderObj.read(fp):
149 if not systemHeaderObj.read(fp):
@@ -160,7 +160,7 def isFileInTimeRange(filename, startDate, endDate, startTime, endTime):
160 offset = processingHeaderObj.blockSize + 24 # header size
160 offset = processingHeaderObj.blockSize + 24 # header size
161
161
162 if filesize <= offset:
162 if filesize <= offset:
163 print "[Reading] %s: This file has not enough data" % filename
163 print("[Reading] %s: This file has not enough data" % filename)
164 return None
164 return None
165
165
166 fp.seek(-offset, 2)
166 fp.seek(-offset, 2)
@@ -231,7 +231,7 def isFolderInDateRange(folder, startDate=None, endDate=None):
231 basename = os.path.basename(folder)
231 basename = os.path.basename(folder)
232
232
233 if not isRadarFolder(basename):
233 if not isRadarFolder(basename):
234 print "The folder %s has not the rigth format" % folder
234 print("The folder %s has not the rigth format" % folder)
235 return 0
235 return 0
236
236
237 if startDate and endDate:
237 if startDate and endDate:
@@ -274,7 +274,7 def isFileInDateRange(filename, startDate=None, endDate=None):
274 basename = os.path.basename(filename)
274 basename = os.path.basename(filename)
275
275
276 if not isRadarFile(basename):
276 if not isRadarFile(basename):
277 print "The filename %s has not the rigth format" % filename
277 print("The filename %s has not the rigth format" % filename)
278 return 0
278 return 0
279
279
280 if startDate and endDate:
280 if startDate and endDate:
@@ -315,8 +315,8 def getFileFromSet(path, ext, set):
315 return myfile[0]
315 return myfile[0]
316 else:
316 else:
317 filename = '*%4.4d%3.3d%3.3d%s' % (year, doy, set, ext.lower())
317 filename = '*%4.4d%3.3d%3.3d%s' % (year, doy, set, ext.lower())
318 print 'the filename %s does not exist' % filename
318 print('the filename %s does not exist' % filename)
319 print '...going to the last file: '
319 print('...going to the last file: ')
320
320
321 if validFilelist:
321 if validFilelist:
322 validFilelist = sorted(validFilelist, key=str.lower)
322 validFilelist = sorted(validFilelist, key=str.lower)
@@ -646,9 +646,9 class JRODataReader(JRODataIO):
646 return [], []
646 return [], []
647
647
648 if len(dateList) > 1:
648 if len(dateList) > 1:
649 print "[Reading] Data found for date range [%s - %s]: total days = %d" % (startDate, endDate, len(dateList))
649 print("[Reading] Data found for date range [%s - %s]: total days = %d" % (startDate, endDate, len(dateList)))
650 else:
650 else:
651 print "[Reading] Data found for date range [%s - %s]: date = %s" % (startDate, endDate, dateList[0])
651 print("[Reading] Data found for date range [%s - %s]: date = %s" % (startDate, endDate, dateList[0]))
652
652
653 filenameList = []
653 filenameList = []
654 datetimeList = []
654 datetimeList = []
@@ -679,10 +679,10 class JRODataReader(JRODataIO):
679 datetimeList = datetimeList[cursor * skip:cursor * skip + skip]
679 datetimeList = datetimeList[cursor * skip:cursor * skip + skip]
680
680
681 if not(filenameList):
681 if not(filenameList):
682 print "[Reading] Time range selected invalid [%s - %s]: No *%s files in %s)" % (startTime, endTime, ext, path)
682 print("[Reading] Time range selected invalid [%s - %s]: No *%s files in %s)" % (startTime, endTime, ext, path))
683 return [], []
683 return [], []
684
684
685 print "[Reading] %d file(s) was(were) found in time range: %s - %s" % (len(filenameList), startTime, endTime)
685 print("[Reading] %d file(s) was(were) found in time range: %s - %s" % (len(filenameList), startTime, endTime))
686
686
687 # for i in range(len(filenameList)):
687 # for i in range(len(filenameList)):
688 # print "[Reading] %s -> [%s]" %(filenameList[i], datetimeList[i].ctime())
688 # print "[Reading] %s -> [%s]" %(filenameList[i], datetimeList[i].ctime())
@@ -743,7 +743,7 class JRODataReader(JRODataIO):
743 doypath.split('_')) > 1 else 0
743 doypath.split('_')) > 1 else 0
744 fullpath = os.path.join(path, doypath, expLabel)
744 fullpath = os.path.join(path, doypath, expLabel)
745
745
746 print "[Reading] %s folder was found: " % (fullpath)
746 print("[Reading] %s folder was found: " % (fullpath))
747
747
748 if set == None:
748 if set == None:
749 filename = getlastFileFromPath(fullpath, ext)
749 filename = getlastFileFromPath(fullpath, ext)
@@ -753,7 +753,7 class JRODataReader(JRODataIO):
753 if not(filename):
753 if not(filename):
754 return None, None, None, None, None, None
754 return None, None, None, None, None, None
755
755
756 print "[Reading] %s file was found" % (filename)
756 print("[Reading] %s file was found" % (filename))
757
757
758 if not(self.__verifyFile(os.path.join(fullpath, filename))):
758 if not(self.__verifyFile(os.path.join(fullpath, filename))):
759 return None, None, None, None, None, None
759 return None, None, None, None, None, None
@@ -844,10 +844,10 class JRODataReader(JRODataIO):
844
844
845 for nTries in range(tries):
845 for nTries in range(tries):
846 if firstTime_flag:
846 if firstTime_flag:
847 print "\t[Reading] Waiting %0.2f sec for the next file: \"%s\" , try %03d ..." % (self.delay, filename, nTries + 1)
847 print("\t[Reading] Waiting %0.2f sec for the next file: \"%s\" , try %03d ..." % (self.delay, filename, nTries + 1))
848 sleep(self.delay)
848 sleep(self.delay)
849 else:
849 else:
850 print "\t[Reading] Searching the next \"%s%04d%03d%03d%s\" file ..." % (self.optchar, self.year, self.doy, self.set, self.ext)
850 print("\t[Reading] Searching the next \"%s%04d%03d%03d%s\" file ..." % (self.optchar, self.year, self.doy, self.set, self.ext))
851
851
852 fullfilename, filename = checkForRealPath(
852 fullfilename, filename = checkForRealPath(
853 self.path, self.foldercounter, self.year, self.doy, self.set, self.ext)
853 self.path, self.foldercounter, self.year, self.doy, self.set, self.ext)
@@ -902,7 +902,7 class JRODataReader(JRODataIO):
902 return 0
902 return 0
903
903
904 if self.verbose:
904 if self.verbose:
905 print '[Reading] Setting the file: %s' % self.filename
905 print('[Reading] Setting the file: %s' % self.filename)
906
906
907 self.__readFirstHeader()
907 self.__readFirstHeader()
908 self.nReadBlocks = 0
908 self.nReadBlocks = 0
@@ -941,7 +941,7 class JRODataReader(JRODataIO):
941 # self.flagEoF = True
941 # self.flagEoF = True
942 return 0
942 return 0
943
943
944 print "[Reading] Waiting %0.2f seconds for the next block, try %03d ..." % (self.delay, nTries + 1)
944 print("[Reading] Waiting %0.2f seconds for the next block, try %03d ..." % (self.delay, nTries + 1))
945 sleep(self.delay)
945 sleep(self.delay)
946
946
947 return 0
947 return 0
@@ -963,7 +963,7 class JRODataReader(JRODataIO):
963 if (currentSize >= neededSize):
963 if (currentSize >= neededSize):
964 return 1
964 return 1
965
965
966 print "[Reading] Waiting %0.2f seconds for the next block, try %03d ..." % (self.delay, nTries + 1)
966 print("[Reading] Waiting %0.2f seconds for the next block, try %03d ..." % (self.delay, nTries + 1))
967 sleep(self.delay)
967 sleep(self.delay)
968
968
969 return 0
969 return 0
@@ -1052,7 +1052,7 class JRODataReader(JRODataIO):
1052 # Skip block out of startTime and endTime
1052 # Skip block out of startTime and endTime
1053 while True:
1053 while True:
1054 if not(self.__setNewBlock()):
1054 if not(self.__setNewBlock()):
1055 raise(schainpy.admin.SchainWarning('No more files'))
1055 raise schainpy
1056 return 0
1056 return 0
1057
1057
1058 if not(self.readBlock()):
1058 if not(self.readBlock()):
@@ -1060,17 +1060,17 class JRODataReader(JRODataIO):
1060
1060
1061 self.getBasicHeader()
1061 self.getBasicHeader()
1062 if (self.dataOut.datatime < datetime.datetime.combine(self.startDate, self.startTime)) or (self.dataOut.datatime > datetime.datetime.combine(self.endDate, self.endTime)):
1062 if (self.dataOut.datatime < datetime.datetime.combine(self.startDate, self.startTime)) or (self.dataOut.datatime > datetime.datetime.combine(self.endDate, self.endTime)):
1063 print "[Reading] Block No. %d/%d -> %s [Skipping]" % (self.nReadBlocks,
1063 print("[Reading] Block No. %d/%d -> %s [Skipping]" % (self.nReadBlocks,
1064 self.processingHeaderObj.dataBlocksPerFile,
1064 self.processingHeaderObj.dataBlocksPerFile,
1065 self.dataOut.datatime.ctime())
1065 self.dataOut.datatime.ctime()))
1066 continue
1066 continue
1067
1067
1068 break
1068 break
1069
1069
1070 if self.verbose:
1070 if self.verbose:
1071 print "[Reading] Block No. %d/%d -> %s" % (self.nReadBlocks,
1071 print("[Reading] Block No. %d/%d -> %s" % (self.nReadBlocks,
1072 self.processingHeaderObj.dataBlocksPerFile,
1072 self.processingHeaderObj.dataBlocksPerFile,
1073 self.dataOut.datatime.ctime())
1073 self.dataOut.datatime.ctime()))
1074 return 1
1074 return 1
1075
1075
1076 def __readFirstHeader(self):
1076 def __readFirstHeader(self):
@@ -1097,7 +1097,7 class JRODataReader(JRODataIO):
1097 elif datatype == 5:
1097 elif datatype == 5:
1098 datatype_str = numpy.dtype([('real', '<f8'), ('imag', '<f8')])
1098 datatype_str = numpy.dtype([('real', '<f8'), ('imag', '<f8')])
1099 else:
1099 else:
1100 raise ValueError, 'Data type was not defined'
1100 raise ValueError('Data type was not defined')
1101
1101
1102 self.dtype = datatype_str
1102 self.dtype = datatype_str
1103 #self.ippSeconds = 2 * 1000 * self.radarControllerHeaderObj.ipp / self.c
1103 #self.ippSeconds = 2 * 1000 * self.radarControllerHeaderObj.ipp / self.c
@@ -1117,7 +1117,7 class JRODataReader(JRODataIO):
1117 except IOError:
1117 except IOError:
1118
1118
1119 if msgFlag:
1119 if msgFlag:
1120 print "[Reading] File %s can't be opened" % (filename)
1120 print("[Reading] File %s can't be opened" % (filename))
1121
1121
1122 return False
1122 return False
1123
1123
@@ -1157,7 +1157,7 class JRODataReader(JRODataIO):
1157
1157
1158 if currentSize < neededSize:
1158 if currentSize < neededSize:
1159 if msgFlag and (msg != None):
1159 if msgFlag and (msg != None):
1160 print msg
1160 print(msg)
1161 return False
1161 return False
1162
1162
1163 return True
1163 return True
@@ -1255,10 +1255,10 class JRODataReader(JRODataIO):
1255 pattern_path = multi_path[0]
1255 pattern_path = multi_path[0]
1256
1256
1257 if path_empty:
1257 if path_empty:
1258 print "[Reading] No *%s files in %s for %s to %s" % (ext, pattern_path, startDate, endDate)
1258 print("[Reading] No *%s files in %s for %s to %s" % (ext, pattern_path, startDate, endDate))
1259 else:
1259 else:
1260 if not dateList:
1260 if not dateList:
1261 print "[Reading] Date range selected invalid [%s - %s]: No *%s files in %s)" % (startDate, endDate, ext, path)
1261 print("[Reading] Date range selected invalid [%s - %s]: No *%s files in %s)" % (startDate, endDate, ext, path))
1262
1262
1263 if include_path:
1263 if include_path:
1264 return dateList, pathList
1264 return dateList, pathList
@@ -1301,17 +1301,17 class JRODataReader(JRODataIO):
1301 self.receiver = self.context.socket(zmq.PULL)
1301 self.receiver = self.context.socket(zmq.PULL)
1302 self.receiver.connect(self.server)
1302 self.receiver.connect(self.server)
1303 time.sleep(0.5)
1303 time.sleep(0.5)
1304 print '[Starting] ReceiverData from {}'.format(self.server)
1304 print('[Starting] ReceiverData from {}'.format(self.server))
1305 else:
1305 else:
1306 self.server = None
1306 self.server = None
1307 if path == None:
1307 if path == None:
1308 raise ValueError, "[Reading] The path is not valid"
1308 raise ValueError("[Reading] The path is not valid")
1309
1309
1310 if ext == None:
1310 if ext == None:
1311 ext = self.ext
1311 ext = self.ext
1312
1312
1313 if online:
1313 if online:
1314 print "[Reading] Searching files in online mode..."
1314 print("[Reading] Searching files in online mode...")
1315
1315
1316 for nTries in range(self.nTries):
1316 for nTries in range(self.nTries):
1317 fullpath, foldercounter, file, year, doy, set = self.__searchFilesOnLine(
1317 fullpath, foldercounter, file, year, doy, set = self.__searchFilesOnLine(
@@ -1320,7 +1320,7 class JRODataReader(JRODataIO):
1320 if fullpath:
1320 if fullpath:
1321 break
1321 break
1322
1322
1323 print '[Reading] Waiting %0.2f sec for an valid file in %s: try %02d ...' % (delay, path, nTries + 1)
1323 print('[Reading] Waiting %0.2f sec for an valid file in %s: try %02d ...' % (delay, path, nTries + 1))
1324 sleep(delay)
1324 sleep(delay)
1325
1325
1326 if not(fullpath):
1326 if not(fullpath):
@@ -1334,7 +1334,7 class JRODataReader(JRODataIO):
1334 self.foldercounter = foldercounter
1334 self.foldercounter = foldercounter
1335 last_set = None
1335 last_set = None
1336 else:
1336 else:
1337 print "[Reading] Searching files in offline mode ..."
1337 print("[Reading] Searching files in offline mode ...")
1338 pathList, filenameList = self.searchFilesOffLine(path, startDate=startDate, endDate=endDate,
1338 pathList, filenameList = self.searchFilesOffLine(path, startDate=startDate, endDate=endDate,
1339 startTime=startTime, endTime=endTime,
1339 startTime=startTime, endTime=endTime,
1340 set=set, expLabel=expLabel, ext=ext,
1340 set=set, expLabel=expLabel, ext=ext,
@@ -1375,11 +1375,11 class JRODataReader(JRODataIO):
1375
1375
1376 if not(self.setNextFile()):
1376 if not(self.setNextFile()):
1377 if (startDate != None) and (endDate != None):
1377 if (startDate != None) and (endDate != None):
1378 print "[Reading] No files in range: %s - %s" % (datetime.datetime.combine(startDate, startTime).ctime(), datetime.datetime.combine(endDate, endTime).ctime())
1378 print("[Reading] No files in range: %s - %s" % (datetime.datetime.combine(startDate, startTime).ctime(), datetime.datetime.combine(endDate, endTime).ctime()))
1379 elif startDate != None:
1379 elif startDate != None:
1380 print "[Reading] No files in range: %s" % (datetime.datetime.combine(startDate, startTime).ctime())
1380 print("[Reading] No files in range: %s" % (datetime.datetime.combine(startDate, startTime).ctime()))
1381 else:
1381 else:
1382 print "[Reading] No files"
1382 print("[Reading] No files")
1383
1383
1384 self.fileIndex = -1
1384 self.fileIndex = -1
1385 self.pathList = []
1385 self.pathList = []
@@ -1434,11 +1434,11 class JRODataReader(JRODataIO):
1434
1434
1435 def printReadBlocks(self):
1435 def printReadBlocks(self):
1436
1436
1437 print "[Reading] Number of read blocks per file %04d" % self.nReadBlocks
1437 print("[Reading] Number of read blocks per file %04d" % self.nReadBlocks)
1438
1438
1439 def printTotalBlocks(self):
1439 def printTotalBlocks(self):
1440
1440
1441 print "[Reading] Number of read blocks %04d" % self.nTotalBlocks
1441 print("[Reading] Number of read blocks %04d" % self.nTotalBlocks)
1442
1442
1443 def printNumberOfBlock(self):
1443 def printNumberOfBlock(self):
1444 'SPAM!'
1444 'SPAM!'
@@ -1679,8 +1679,8 class JRODataWriter(JRODataIO):
1679
1679
1680 self.writeBlock()
1680 self.writeBlock()
1681
1681
1682 print "[Writing] Block No. %d/%d" % (self.blockIndex,
1682 print("[Writing] Block No. %d/%d" % (self.blockIndex,
1683 self.processingHeaderObj.dataBlocksPerFile)
1683 self.processingHeaderObj.dataBlocksPerFile))
1684
1684
1685 return 1
1685 return 1
1686
1686
@@ -1756,7 +1756,7 class JRODataWriter(JRODataIO):
1756
1756
1757 self.setFirstHeader()
1757 self.setFirstHeader()
1758
1758
1759 print '[Writing] Opening file: %s' % self.filename
1759 print('[Writing] Opening file: %s' % self.filename)
1760
1760
1761 self.__writeFirstHeader()
1761 self.__writeFirstHeader()
1762
1762
@@ -1808,7 +1808,7 class JRODataWriter(JRODataIO):
1808 self.dtype = get_numpy_dtype(datatype)
1808 self.dtype = get_numpy_dtype(datatype)
1809
1809
1810 if not(self.setNextFile()):
1810 if not(self.setNextFile()):
1811 print "[Writing] There isn't a next file"
1811 print("[Writing] There isn't a next file")
1812 return 0
1812 return 0
1813
1813
1814 self.setBlockDimension()
1814 self.setBlockDimension()
@@ -29,7 +29,7 from schainpy.model.proc.jroproc_base import ProcessingUnit, Operation
29 #from schainpy.model.io.jroIO_bltr import BLTRReader
29 #from schainpy.model.io.jroIO_bltr import BLTRReader
30 from numpy import imag, shape, NaN
30 from numpy import imag, shape, NaN
31
31
32 from jroIO_base import JRODataReader
32 from .jroIO_base import JRODataReader
33
33
34
34
35 class Header(object):
35 class Header(object):
@@ -51,7 +51,7 class Header(object):
51 message += self.__class__.__name__.upper() + "\n"
51 message += self.__class__.__name__.upper() + "\n"
52 message += "#" * 50 + "\n"
52 message += "#" * 50 + "\n"
53
53
54 keyList = self.__dict__.keys()
54 keyList = list(self.__dict__.keys())
55 keyList.sort()
55 keyList.sort()
56
56
57 for key in keyList:
57 for key in keyList:
@@ -94,9 +94,9 class FileHeaderBLTR(Header):
94
94
95 header = numpy.fromfile(startFp, FILE_STRUCTURE, 1)
95 header = numpy.fromfile(startFp, FILE_STRUCTURE, 1)
96
96
97 print ' '
97 print(' ')
98 print 'puntero file header', startFp.tell()
98 print('puntero file header', startFp.tell())
99 print ' '
99 print(' ')
100
100
101 ''' numpy.fromfile(file, dtype, count, sep='')
101 ''' numpy.fromfile(file, dtype, count, sep='')
102 file : file or str
102 file : file or str
@@ -323,28 +323,28 class RecordHeaderBLTR(Header):
323 # RecCounter=0
323 # RecCounter=0
324 # Off2StartNxtRec=811248
324 # Off2StartNxtRec=811248
325 OffRHeader = self.OffsetStartHeader + self.RecCounter * self.Off2StartNxtRec
325 OffRHeader = self.OffsetStartHeader + self.RecCounter * self.Off2StartNxtRec
326 print ' '
326 print(' ')
327 print 'puntero Record Header', startFp.tell()
327 print('puntero Record Header', startFp.tell())
328 print ' '
328 print(' ')
329
329
330 startFp.seek(OffRHeader, os.SEEK_SET)
330 startFp.seek(OffRHeader, os.SEEK_SET)
331
331
332 print ' '
332 print(' ')
333 print 'puntero Record Header con seek', startFp.tell()
333 print('puntero Record Header con seek', startFp.tell())
334 print ' '
334 print(' ')
335
335
336 # print 'Posicion del bloque: ',OffRHeader
336 # print 'Posicion del bloque: ',OffRHeader
337
337
338 header = numpy.fromfile(startFp, RECORD_STRUCTURE, 1)
338 header = numpy.fromfile(startFp, RECORD_STRUCTURE, 1)
339
339
340 print ' '
340 print(' ')
341 print 'puntero Record Header con seek', startFp.tell()
341 print('puntero Record Header con seek', startFp.tell())
342 print ' '
342 print(' ')
343
343
344 print ' '
344 print(' ')
345 #
345 #
346 # print 'puntero Record Header despues de seek', header.tell()
346 # print 'puntero Record Header despues de seek', header.tell()
347 print ' '
347 print(' ')
348
348
349 self.RecMgcNumber = hex(header['RecMgcNumber'][0]) # 0x23030001
349 self.RecMgcNumber = hex(header['RecMgcNumber'][0]) # 0x23030001
350 self.RecCounter = int(header['RecCounter'][0])
350 self.RecCounter = int(header['RecCounter'][0])
@@ -400,21 +400,21 class RecordHeaderBLTR(Header):
400 # print 'Datasize',self.Datasize
400 # print 'Datasize',self.Datasize
401 endFp = self.OffsetStartHeader + self.RecCounter * self.Off2StartNxtRec
401 endFp = self.OffsetStartHeader + self.RecCounter * self.Off2StartNxtRec
402
402
403 print '=============================================='
403 print('==============================================')
404 print 'RecMgcNumber ', self.RecMgcNumber
404 print('RecMgcNumber ', self.RecMgcNumber)
405 print 'RecCounter ', self.RecCounter
405 print('RecCounter ', self.RecCounter)
406 print 'Off2StartNxtRec ', self.Off2StartNxtRec
406 print('Off2StartNxtRec ', self.Off2StartNxtRec)
407 print 'Off2StartData ', self.Off2StartData
407 print('Off2StartData ', self.Off2StartData)
408 print 'Range Resolution ', self.SampResolution
408 print('Range Resolution ', self.SampResolution)
409 print 'First Height ', self.StartRangeSamp
409 print('First Height ', self.StartRangeSamp)
410 print 'PRF (Hz) ', self.PRFhz
410 print('PRF (Hz) ', self.PRFhz)
411 print 'Heights (K) ', self.nHeights
411 print('Heights (K) ', self.nHeights)
412 print 'Channels (N) ', self.nChannels
412 print('Channels (N) ', self.nChannels)
413 print 'Profiles (J) ', self.nProfiles
413 print('Profiles (J) ', self.nProfiles)
414 print 'iCoh ', self.nCohInt
414 print('iCoh ', self.nCohInt)
415 print 'iInCoh ', self.nIncohInt
415 print('iInCoh ', self.nIncohInt)
416 print 'BeamAngleAzim ', self.BeamAngleAzim
416 print('BeamAngleAzim ', self.BeamAngleAzim)
417 print 'BeamAngleZen ', self.BeamAngleZen
417 print('BeamAngleZen ', self.BeamAngleZen)
418
418
419 # print 'ModoEnUso ',self.DualModeIndex
419 # print 'ModoEnUso ',self.DualModeIndex
420 # print 'UtcTime ',self.nUtime
420 # print 'UtcTime ',self.nUtime
@@ -423,25 +423,25 class RecordHeaderBLTR(Header):
423 # print 'Exp Comment ',self.ExpComment
423 # print 'Exp Comment ',self.ExpComment
424 # print 'FFT Window Index ',self.FFTwindowingInd
424 # print 'FFT Window Index ',self.FFTwindowingInd
425 # print 'N Dig. Channels ',self.nDigChannels
425 # print 'N Dig. Channels ',self.nDigChannels
426 print 'Size de bloque ', self.RHsize
426 print('Size de bloque ', self.RHsize)
427 print 'DataSize ', self.Datasize
427 print('DataSize ', self.Datasize)
428 print 'BeamAngleAzim ', self.BeamAngleAzim
428 print('BeamAngleAzim ', self.BeamAngleAzim)
429 # print 'AntennaCoord0 ',self.AntennaCoord0
429 # print 'AntennaCoord0 ',self.AntennaCoord0
430 # print 'AntennaAngl0 ',self.AntennaAngl0
430 # print 'AntennaAngl0 ',self.AntennaAngl0
431 # print 'AntennaCoord1 ',self.AntennaCoord1
431 # print 'AntennaCoord1 ',self.AntennaCoord1
432 # print 'AntennaAngl1 ',self.AntennaAngl1
432 # print 'AntennaAngl1 ',self.AntennaAngl1
433 # print 'AntennaCoord2 ',self.AntennaCoord2
433 # print 'AntennaCoord2 ',self.AntennaCoord2
434 # print 'AntennaAngl2 ',self.AntennaAngl2
434 # print 'AntennaAngl2 ',self.AntennaAngl2
435 print 'RecPhaseCalibr0 ', self.RecPhaseCalibr0
435 print('RecPhaseCalibr0 ', self.RecPhaseCalibr0)
436 print 'RecPhaseCalibr1 ', self.RecPhaseCalibr1
436 print('RecPhaseCalibr1 ', self.RecPhaseCalibr1)
437 print 'RecPhaseCalibr2 ', self.RecPhaseCalibr2
437 print('RecPhaseCalibr2 ', self.RecPhaseCalibr2)
438 print 'RecAmpCalibr0 ', self.RecAmpCalibr0
438 print('RecAmpCalibr0 ', self.RecAmpCalibr0)
439 print 'RecAmpCalibr1 ', self.RecAmpCalibr1
439 print('RecAmpCalibr1 ', self.RecAmpCalibr1)
440 print 'RecAmpCalibr2 ', self.RecAmpCalibr2
440 print('RecAmpCalibr2 ', self.RecAmpCalibr2)
441 print 'ReceiverGaindB0 ', self.ReceiverGaindB0
441 print('ReceiverGaindB0 ', self.ReceiverGaindB0)
442 print 'ReceiverGaindB1 ', self.ReceiverGaindB1
442 print('ReceiverGaindB1 ', self.ReceiverGaindB1)
443 print 'ReceiverGaindB2 ', self.ReceiverGaindB2
443 print('ReceiverGaindB2 ', self.ReceiverGaindB2)
444 print '=============================================='
444 print('==============================================')
445
445
446 if OffRHeader > endFp:
446 if OffRHeader > endFp:
447 sys.stderr.write(
447 sys.stderr.write(
@@ -590,7 +590,7 class BLTRSpectraReader (ProcessingUnit, FileHeaderBLTR, RecordHeaderBLTR, JRODa
590
590
591 if self.flagNoMoreFiles:
591 if self.flagNoMoreFiles:
592 self.dataOut.flagNoData = True
592 self.dataOut.flagNoData = True
593 print 'NoData se vuelve true'
593 print('NoData se vuelve true')
594 return 0
594 return 0
595
595
596 self.fp = self.path
596 self.fp = self.path
@@ -600,7 +600,7 class BLTRSpectraReader (ProcessingUnit, FileHeaderBLTR, RecordHeaderBLTR, JRODa
600 self.dataOut.data_cspc = self.data_cspc
600 self.dataOut.data_cspc = self.data_cspc
601 self.dataOut.data_output = self.data_output
601 self.dataOut.data_output = self.data_output
602
602
603 print 'self.dataOut.data_output', shape(self.dataOut.data_output)
603 print('self.dataOut.data_output', shape(self.dataOut.data_output))
604
604
605 # self.removeDC()
605 # self.removeDC()
606 return self.dataOut.data_spc
606 return self.dataOut.data_spc
@@ -617,7 +617,7 class BLTRSpectraReader (ProcessingUnit, FileHeaderBLTR, RecordHeaderBLTR, JRODa
617 '''
617 '''
618
618
619 # The address of the folder is generated the name of the .fdt file that will be read
619 # The address of the folder is generated the name of the .fdt file that will be read
620 print "File: ", self.fileSelector + 1
620 print("File: ", self.fileSelector + 1)
621
621
622 if self.fileSelector < len(self.filenameList):
622 if self.fileSelector < len(self.filenameList):
623
623
@@ -630,7 +630,7 class BLTRSpectraReader (ProcessingUnit, FileHeaderBLTR, RecordHeaderBLTR, JRODa
630
630
631 self.readBlock() # Block reading
631 self.readBlock() # Block reading
632 else:
632 else:
633 print 'readFile FlagNoData becomes true'
633 print('readFile FlagNoData becomes true')
634 self.flagNoMoreFiles = True
634 self.flagNoMoreFiles = True
635 self.dataOut.flagNoData = True
635 self.dataOut.flagNoData = True
636 return 0
636 return 0
@@ -660,7 +660,7 class BLTRSpectraReader (ProcessingUnit, FileHeaderBLTR, RecordHeaderBLTR, JRODa
660 '''
660 '''
661
661
662 if self.BlockCounter < self.nFDTdataRecors - 2:
662 if self.BlockCounter < self.nFDTdataRecors - 2:
663 print self.nFDTdataRecors, 'CONDICION!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!'
663 print(self.nFDTdataRecors, 'CONDICION!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!')
664 if self.ReadMode == 1:
664 if self.ReadMode == 1:
665 rheader = RecordHeaderBLTR(RecCounter=self.BlockCounter + 1)
665 rheader = RecordHeaderBLTR(RecCounter=self.BlockCounter + 1)
666 elif self.ReadMode == 0:
666 elif self.ReadMode == 0:
@@ -687,8 +687,8 class BLTRSpectraReader (ProcessingUnit, FileHeaderBLTR, RecordHeaderBLTR, JRODa
687 self.__firstHeigth = rheader.StartRangeSamp
687 self.__firstHeigth = rheader.StartRangeSamp
688 self.__deltaHeigth = rheader.SampResolution
688 self.__deltaHeigth = rheader.SampResolution
689 self.dataOut.heightList = self.__firstHeigth + \
689 self.dataOut.heightList = self.__firstHeigth + \
690 numpy.array(range(self.nHeights)) * self.__deltaHeigth
690 numpy.array(list(range(self.nHeights))) * self.__deltaHeigth
691 self.dataOut.channelList = range(self.nChannels)
691 self.dataOut.channelList = list(range(self.nChannels))
692 self.dataOut.nProfiles = rheader.nProfiles
692 self.dataOut.nProfiles = rheader.nProfiles
693 self.dataOut.nIncohInt = rheader.nIncohInt
693 self.dataOut.nIncohInt = rheader.nIncohInt
694 self.dataOut.nCohInt = rheader.nCohInt
694 self.dataOut.nCohInt = rheader.nCohInt
@@ -703,7 +703,7 class BLTRSpectraReader (ProcessingUnit, FileHeaderBLTR, RecordHeaderBLTR, JRODa
703 self.dataOut.nCohInt * self.dataOut.nIncohInt * self.nProfiles
703 self.dataOut.nCohInt * self.dataOut.nIncohInt * self.nProfiles
704
704
705 self.data_output = numpy.ones([3, rheader.nHeights]) * numpy.NaN
705 self.data_output = numpy.ones([3, rheader.nHeights]) * numpy.NaN
706 print 'self.data_output', shape(self.data_output)
706 print('self.data_output', shape(self.data_output))
707 self.dataOut.velocityX = []
707 self.dataOut.velocityX = []
708 self.dataOut.velocityY = []
708 self.dataOut.velocityY = []
709 self.dataOut.velocityV = []
709 self.dataOut.velocityV = []
@@ -757,11 +757,11 class BLTRSpectraReader (ProcessingUnit, FileHeaderBLTR, RecordHeaderBLTR, JRODa
757 z = self.data_spc.copy() # /factor
757 z = self.data_spc.copy() # /factor
758 z = numpy.where(numpy.isfinite(z), z, numpy.NAN)
758 z = numpy.where(numpy.isfinite(z), z, numpy.NAN)
759 #zdB = 10*numpy.log10(z)
759 #zdB = 10*numpy.log10(z)
760 print ' '
760 print(' ')
761 print 'Z: '
761 print('Z: ')
762 print shape(z)
762 print(shape(z))
763 print ' '
763 print(' ')
764 print ' '
764 print(' ')
765
765
766 self.dataOut.data_spc = self.data_spc
766 self.dataOut.data_spc = self.data_spc
767
767
@@ -1177,4 +1177,4 class BLTRSpectraReader (ProcessingUnit, FileHeaderBLTR, RecordHeaderBLTR, JRODa
1177 else:
1177 else:
1178 self.fileSelector += 1
1178 self.fileSelector += 1
1179 self.BlockCounter = 0
1179 self.BlockCounter = 0
1180 print "Next File"
1180 print("Next File") No newline at end of file
@@ -27,11 +27,11 from schainpy.model.data.jrodata import Voltage
27 from schainpy.model.proc.jroproc_base import ProcessingUnit, Operation
27 from schainpy.model.proc.jroproc_base import ProcessingUnit, Operation
28 from time import time
28 from time import time
29
29
30 import cPickle
30 import pickle
31 try:
31 try:
32 import digital_rf
32 import digital_rf
33 except:
33 except:
34 print 'You should install "digital_rf" module if you want to read Digital RF data'
34 print('You should install "digital_rf" module if you want to read Digital RF data')
35
35
36
36
37 class DigitalRFReader(ProcessingUnit):
37 class DigitalRFReader(ProcessingUnit):
@@ -59,7 +59,7 class DigitalRFReader(ProcessingUnit):
59 self.oldAverage = None
59 self.oldAverage = None
60
60
61 def close(self):
61 def close(self):
62 print 'Average of writing to digital rf format is ', self.oldAverage * 1000
62 print('Average of writing to digital rf format is ', self.oldAverage * 1000)
63 return
63 return
64
64
65 def __getCurrentSecond(self):
65 def __getCurrentSecond(self):
@@ -115,7 +115,7 class DigitalRFReader(ProcessingUnit):
115 numpy.arange(self.__nSamples, dtype=numpy.float) * \
115 numpy.arange(self.__nSamples, dtype=numpy.float) * \
116 self.__deltaHeigth
116 self.__deltaHeigth
117
117
118 self.dataOut.channelList = range(self.__num_subchannels)
118 self.dataOut.channelList = list(range(self.__num_subchannels))
119
119
120 self.dataOut.blocksize = self.dataOut.getNChannels() * self.dataOut.getNHeights()
120 self.dataOut.blocksize = self.dataOut.getNChannels() * self.dataOut.getNHeights()
121
121
@@ -256,7 +256,7 class DigitalRFReader(ProcessingUnit):
256 self.flagDecodeData = flagDecodeData
256 self.flagDecodeData = flagDecodeData
257 self.i = 0
257 self.i = 0
258 if not os.path.isdir(path):
258 if not os.path.isdir(path):
259 raise ValueError, "[Reading] Directory %s does not exist" % path
259 raise ValueError("[Reading] Directory %s does not exist" % path)
260
260
261 try:
261 try:
262 self.digitalReadObj = digital_rf.DigitalRFReader(
262 self.digitalReadObj = digital_rf.DigitalRFReader(
@@ -267,10 +267,10 class DigitalRFReader(ProcessingUnit):
267 channelNameList = self.digitalReadObj.get_channels()
267 channelNameList = self.digitalReadObj.get_channels()
268
268
269 if not channelNameList:
269 if not channelNameList:
270 raise ValueError, "[Reading] Directory %s does not have any files" % path
270 raise ValueError("[Reading] Directory %s does not have any files" % path)
271
271
272 if not channelList:
272 if not channelList:
273 channelList = range(len(channelNameList))
273 channelList = list(range(len(channelNameList)))
274
274
275 ########## Reading metadata ######################
275 ########## Reading metadata ######################
276
276
@@ -294,7 +294,7 class DigitalRFReader(ProcessingUnit):
294 self.__processingHeader = self.fixed_metadata_dict['processingHeader']
294 self.__processingHeader = self.fixed_metadata_dict['processingHeader']
295 self.__radarControllerHeader = self.fixed_metadata_dict['radarControllerHeader']
295 self.__radarControllerHeader = self.fixed_metadata_dict['radarControllerHeader']
296 self.__systemHeader = self.fixed_metadata_dict['systemHeader']
296 self.__systemHeader = self.fixed_metadata_dict['systemHeader']
297 self.dtype = cPickle.loads(self.fixed_metadata_dict['dtype'])
297 self.dtype = pickle.loads(self.fixed_metadata_dict['dtype'])
298 except:
298 except:
299 pass
299 pass
300
300
@@ -361,7 +361,7 class DigitalRFReader(ProcessingUnit):
361 endUTCSecond = end_index / self.__sample_rate
361 endUTCSecond = end_index / self.__sample_rate
362 if not nSamples:
362 if not nSamples:
363 if not ippKm:
363 if not ippKm:
364 raise ValueError, "[Reading] nSamples or ippKm should be defined"
364 raise ValueError("[Reading] nSamples or ippKm should be defined")
365 nSamples = int(ippKm / (1e6 * 0.15 / self.__sample_rate))
365 nSamples = int(ippKm / (1e6 * 0.15 / self.__sample_rate))
366 channelBoundList = []
366 channelBoundList = []
367 channelNameListFiltered = []
367 channelNameListFiltered = []
@@ -388,7 +388,7 class DigitalRFReader(ProcessingUnit):
388 self.__channelNameList = channelNameListFiltered
388 self.__channelNameList = channelNameListFiltered
389 self.__channelBoundList = channelBoundList
389 self.__channelBoundList = channelBoundList
390 self.__nSamples = nSamples
390 self.__nSamples = nSamples
391 self.__samples_to_read = long(nSamples) # FIJO: AHORA 40
391 self.__samples_to_read = int(nSamples) # FIJO: AHORA 40
392 self.__nChannels = len(self.__channelList)
392 self.__nChannels = len(self.__channelList)
393
393
394 self.__startUTCSecond = startUTCSecond
394 self.__startUTCSecond = startUTCSecond
@@ -402,7 +402,7 class DigitalRFReader(ProcessingUnit):
402 startUTCSecond = numpy.floor(endUTCSecond)
402 startUTCSecond = numpy.floor(endUTCSecond)
403
403
404 # por que en el otro metodo lo primero q se hace es sumar samplestoread
404 # por que en el otro metodo lo primero q se hace es sumar samplestoread
405 self.__thisUnixSample = long(
405 self.__thisUnixSample = int(
406 startUTCSecond * self.__sample_rate) - self.__samples_to_read
406 startUTCSecond * self.__sample_rate) - self.__samples_to_read
407
407
408 self.__data_buffer = numpy.zeros(
408 self.__data_buffer = numpy.zeros(
@@ -411,17 +411,17 class DigitalRFReader(ProcessingUnit):
411 self.__setFileHeader()
411 self.__setFileHeader()
412 self.isConfig = True
412 self.isConfig = True
413
413
414 print "[Reading] Digital RF Data was found from %s to %s " % (
414 print("[Reading] Digital RF Data was found from %s to %s " % (
415 datetime.datetime.utcfromtimestamp(
415 datetime.datetime.utcfromtimestamp(
416 self.__startUTCSecond - self.__timezone),
416 self.__startUTCSecond - self.__timezone),
417 datetime.datetime.utcfromtimestamp(
417 datetime.datetime.utcfromtimestamp(
418 self.__endUTCSecond - self.__timezone)
418 self.__endUTCSecond - self.__timezone)
419 )
419 ))
420
420
421 print "[Reading] Starting process from %s to %s" % (datetime.datetime.utcfromtimestamp(startUTCSecond - self.__timezone),
421 print("[Reading] Starting process from %s to %s" % (datetime.datetime.utcfromtimestamp(startUTCSecond - self.__timezone),
422 datetime.datetime.utcfromtimestamp(
422 datetime.datetime.utcfromtimestamp(
423 endUTCSecond - self.__timezone)
423 endUTCSecond - self.__timezone)
424 )
424 ))
425 self.oldAverage = None
425 self.oldAverage = None
426 self.count = 0
426 self.count = 0
427 self.executionTime = 0
427 self.executionTime = 0
@@ -433,7 +433,7 class DigitalRFReader(ProcessingUnit):
433 # datetime.datetime.utcfromtimestamp(self.__startUTCSecond - self.__timezone),
433 # datetime.datetime.utcfromtimestamp(self.__startUTCSecond - self.__timezone),
434 # datetime.datetime.utcfromtimestamp(self.__endUTCSecond - self.__timezone)
434 # datetime.datetime.utcfromtimestamp(self.__endUTCSecond - self.__timezone)
435 # )
435 # )
436 print "[Reading] reloading metadata ..."
436 print("[Reading] reloading metadata ...")
437
437
438 try:
438 try:
439 self.digitalReadObj.reload(complete_update=True)
439 self.digitalReadObj.reload(complete_update=True)
@@ -448,13 +448,13 class DigitalRFReader(ProcessingUnit):
448
448
449 if end_index > self.__endUTCSecond * self.__sample_rate:
449 if end_index > self.__endUTCSecond * self.__sample_rate:
450 self.__endUTCSecond = 1.0 * end_index / self.__sample_rate
450 self.__endUTCSecond = 1.0 * end_index / self.__sample_rate
451 print
451 print()
452 print "[Reading] New timerange found [%s, %s] " % (
452 print("[Reading] New timerange found [%s, %s] " % (
453 datetime.datetime.utcfromtimestamp(
453 datetime.datetime.utcfromtimestamp(
454 self.__startUTCSecond - self.__timezone),
454 self.__startUTCSecond - self.__timezone),
455 datetime.datetime.utcfromtimestamp(
455 datetime.datetime.utcfromtimestamp(
456 self.__endUTCSecond - self.__timezone)
456 self.__endUTCSecond - self.__timezone)
457 )
457 ))
458
458
459 return True
459 return True
460
460
@@ -480,7 +480,7 class DigitalRFReader(ProcessingUnit):
480 self.__thisUnixSample += self.__samples_to_read
480 self.__thisUnixSample += self.__samples_to_read
481
481
482 if self.__thisUnixSample + 2 * self.__samples_to_read > self.__endUTCSecond * self.__sample_rate:
482 if self.__thisUnixSample + 2 * self.__samples_to_read > self.__endUTCSecond * self.__sample_rate:
483 print "[Reading] There are no more data into selected time-range"
483 print("[Reading] There are no more data into selected time-range")
484 if self.__online:
484 if self.__online:
485 self.__reload()
485 self.__reload()
486 else:
486 else:
@@ -507,17 +507,17 class DigitalRFReader(ProcessingUnit):
507 self.executionTime + self.count * self.oldAverage) / (self.count + 1.0)
507 self.executionTime + self.count * self.oldAverage) / (self.count + 1.0)
508 self.count = self.count + 1.0
508 self.count = self.count + 1.0
509
509
510 except IOError, e:
510 except IOError as e:
511 # read next profile
511 # read next profile
512 self.__flagDiscontinuousBlock = True
512 self.__flagDiscontinuousBlock = True
513 print "[Reading] %s" % datetime.datetime.utcfromtimestamp(self.thisSecond - self.__timezone), e
513 print("[Reading] %s" % datetime.datetime.utcfromtimestamp(self.thisSecond - self.__timezone), e)
514 break
514 break
515
515
516 if result.shape[0] != self.__samples_to_read:
516 if result.shape[0] != self.__samples_to_read:
517 self.__flagDiscontinuousBlock = True
517 self.__flagDiscontinuousBlock = True
518 print "[Reading] %s: Too few samples were found, just %d/%d samples" % (datetime.datetime.utcfromtimestamp(self.thisSecond - self.__timezone),
518 print("[Reading] %s: Too few samples were found, just %d/%d samples" % (datetime.datetime.utcfromtimestamp(self.thisSecond - self.__timezone),
519 result.shape[0],
519 result.shape[0],
520 self.__samples_to_read)
520 self.__samples_to_read))
521 break
521 break
522
522
523 self.__data_buffer[indexSubchannel, :] = result * volt_scale
523 self.__data_buffer[indexSubchannel, :] = result * volt_scale
@@ -531,9 +531,9 class DigitalRFReader(ProcessingUnit):
531 if not dataOk:
531 if not dataOk:
532 return False
532 return False
533
533
534 print "[Reading] %s: %d samples <> %f sec" % (datetime.datetime.utcfromtimestamp(self.thisSecond - self.__timezone),
534 print("[Reading] %s: %d samples <> %f sec" % (datetime.datetime.utcfromtimestamp(self.thisSecond - self.__timezone),
535 self.__samples_to_read,
535 self.__samples_to_read,
536 self.__timeInterval)
536 self.__timeInterval))
537
537
538 self.__bufferIndex = 0
538 self.__bufferIndex = 0
539
539
@@ -572,7 +572,7 class DigitalRFReader(ProcessingUnit):
572 return False
572 return False
573
573
574 if self.__flagDiscontinuousBlock:
574 if self.__flagDiscontinuousBlock:
575 print '[Reading] discontinuous block found ... continue with the next block'
575 print('[Reading] discontinuous block found ... continue with the next block')
576 continue
576 continue
577
577
578 if not self.__online:
578 if not self.__online:
@@ -582,7 +582,7 class DigitalRFReader(ProcessingUnit):
582 if err_counter > nTries:
582 if err_counter > nTries:
583 return False
583 return False
584
584
585 print '[Reading] waiting %d seconds to read a new block' % seconds
585 print('[Reading] waiting %d seconds to read a new block' % seconds)
586 sleep(seconds)
586 sleep(seconds)
587
587
588 self.dataOut.data = self.__data_buffer[:,
588 self.dataOut.data = self.__data_buffer[:,
@@ -650,7 +650,7 class DigitalRFWriter(Operation):
650
650
651 self.metadata_dict['frequency'] = self.dataOut.frequency
651 self.metadata_dict['frequency'] = self.dataOut.frequency
652 self.metadata_dict['timezone'] = self.dataOut.timeZone
652 self.metadata_dict['timezone'] = self.dataOut.timeZone
653 self.metadata_dict['dtype'] = cPickle.dumps(self.dataOut.dtype)
653 self.metadata_dict['dtype'] = pickle.dumps(self.dataOut.dtype)
654 self.metadata_dict['nProfiles'] = self.dataOut.nProfiles
654 self.metadata_dict['nProfiles'] = self.dataOut.nProfiles
655 self.metadata_dict['heightList'] = self.dataOut.heightList
655 self.metadata_dict['heightList'] = self.dataOut.heightList
656 self.metadata_dict['channelList'] = self.dataOut.channelList
656 self.metadata_dict['channelList'] = self.dataOut.channelList
@@ -690,8 +690,8 class DigitalRFWriter(Operation):
690 file_cadence_millisecs = 1000
690 file_cadence_millisecs = 1000
691
691
692 sample_rate_fraction = Fraction(self.__sample_rate).limit_denominator()
692 sample_rate_fraction = Fraction(self.__sample_rate).limit_denominator()
693 sample_rate_numerator = long(sample_rate_fraction.numerator)
693 sample_rate_numerator = int(sample_rate_fraction.numerator)
694 sample_rate_denominator = long(sample_rate_fraction.denominator)
694 sample_rate_denominator = int(sample_rate_fraction.denominator)
695 start_global_index = dataOut.utctime * self.__sample_rate
695 start_global_index = dataOut.utctime * self.__sample_rate
696
696
697 uuid = 'prueba'
697 uuid = 'prueba'
@@ -781,8 +781,8 class DigitalRFWriter(Operation):
781 ## if self.currentSample == self.__nProfiles: self.currentSample = 0
781 ## if self.currentSample == self.__nProfiles: self.currentSample = 0
782
782
783 def close(self):
783 def close(self):
784 print '[Writing] - Closing files '
784 print('[Writing] - Closing files ')
785 print 'Average of writing to digital rf format is ', self.oldAverage * 1000
785 print('Average of writing to digital rf format is ', self.oldAverage * 1000)
786 try:
786 try:
787 self.digitalWriteObj.close()
787 self.digitalWriteObj.close()
788 except:
788 except:
@@ -13,12 +13,12 from time import sleep
13
13
14 try:
14 try:
15 import pyfits
15 import pyfits
16 except ImportError, e:
16 except ImportError as e:
17 print "Fits data cannot be used. Install pyfits module"
17 print("Fits data cannot be used. Install pyfits module")
18
18
19 from xml.etree.ElementTree import ElementTree
19 from xml.etree.ElementTree import ElementTree
20
20
21 from jroIO_base import isRadarFolder, isNumber
21 from .jroIO_base import isRadarFolder, isNumber
22 from schainpy.model.data.jrodata import Fits
22 from schainpy.model.data.jrodata import Fits
23 from schainpy.model.proc.jroproc_base import Operation, ProcessingUnit
23 from schainpy.model.proc.jroproc_base import Operation, ProcessingUnit
24
24
@@ -240,7 +240,7 class FitsWriter(Operation):
240 self.setFile = setFile
240 self.setFile = setFile
241 self.flagIsNewFile = 1
241 self.flagIsNewFile = 1
242
242
243 print 'Writing the file: %s'%self.filename
243 print('Writing the file: %s'%self.filename)
244
244
245 self.setFitsHeader(self.dataOut, self.metadatafile)
245 self.setFitsHeader(self.dataOut, self.metadatafile)
246
246
@@ -327,7 +327,7 class FitsReader(ProcessingUnit):
327 try:
327 try:
328 fitsObj = pyfits.open(filename,'readonly')
328 fitsObj = pyfits.open(filename,'readonly')
329 except:
329 except:
330 print "File %s can't be opened" %(filename)
330 print("File %s can't be opened" %(filename))
331 return None
331 return None
332
332
333 header = fitsObj[0].header
333 header = fitsObj[0].header
@@ -355,7 +355,7 class FitsReader(ProcessingUnit):
355 idFile += 1
355 idFile += 1
356 if not(idFile < len(self.filenameList)):
356 if not(idFile < len(self.filenameList)):
357 self.flagNoMoreFiles = 1
357 self.flagNoMoreFiles = 1
358 print "No more Files"
358 print("No more Files")
359 return 0
359 return 0
360
360
361 filename = self.filenameList[idFile]
361 filename = self.filenameList[idFile]
@@ -373,7 +373,7 class FitsReader(ProcessingUnit):
373 self.fileSize = fileSize
373 self.fileSize = fileSize
374 self.fitsObj = fitsObj
374 self.fitsObj = fitsObj
375 self.blockIndex = 0
375 self.blockIndex = 0
376 print "Setting the file: %s"%self.filename
376 print("Setting the file: %s"%self.filename)
377
377
378 return 1
378 return 1
379
379
@@ -398,10 +398,10 class FitsReader(ProcessingUnit):
398 headerObj = self.fitsObj[0]
398 headerObj = self.fitsObj[0]
399
399
400 self.header_dict = headerObj.header
400 self.header_dict = headerObj.header
401 if 'EXPNAME' in headerObj.header.keys():
401 if 'EXPNAME' in list(headerObj.header.keys()):
402 self.expName = headerObj.header['EXPNAME']
402 self.expName = headerObj.header['EXPNAME']
403
403
404 if 'DATATYPE' in headerObj.header.keys():
404 if 'DATATYPE' in list(headerObj.header.keys()):
405 self.dataType = headerObj.header['DATATYPE']
405 self.dataType = headerObj.header['DATATYPE']
406
406
407 self.datetimestr = headerObj.header['DATETIME']
407 self.datetimestr = headerObj.header['DATETIME']
@@ -421,7 +421,7 class FitsReader(ProcessingUnit):
421
421
422 # self.timeInterval = self.ippSeconds * self.nCohInt * self.nIncohInt
422 # self.timeInterval = self.ippSeconds * self.nCohInt * self.nIncohInt
423
423
424 if 'COMMENT' in headerObj.header.keys():
424 if 'COMMENT' in list(headerObj.header.keys()):
425 self.comments = headerObj.header['COMMENT']
425 self.comments = headerObj.header['COMMENT']
426
426
427 self.readHeightList()
427 self.readHeightList()
@@ -498,10 +498,10 class FitsReader(ProcessingUnit):
498 thisDate += datetime.timedelta(1)
498 thisDate += datetime.timedelta(1)
499
499
500 if pathList == []:
500 if pathList == []:
501 print "Any folder was found for the date range: %s-%s" %(startDate, endDate)
501 print("Any folder was found for the date range: %s-%s" %(startDate, endDate))
502 return None, None
502 return None, None
503
503
504 print "%d folder(s) was(were) found for the date range: %s - %s" %(len(pathList), startDate, endDate)
504 print("%d folder(s) was(were) found for the date range: %s - %s" %(len(pathList), startDate, endDate))
505
505
506 filenameList = []
506 filenameList = []
507 datetimeList = []
507 datetimeList = []
@@ -525,14 +525,14 class FitsReader(ProcessingUnit):
525 datetimeList.append(thisDatetime)
525 datetimeList.append(thisDatetime)
526
526
527 if not(filenameList):
527 if not(filenameList):
528 print "Any file was found for the time range %s - %s" %(startTime, endTime)
528 print("Any file was found for the time range %s - %s" %(startTime, endTime))
529 return None, None
529 return None, None
530
530
531 print "%d file(s) was(were) found for the time range: %s - %s" %(len(filenameList), startTime, endTime)
531 print("%d file(s) was(were) found for the time range: %s - %s" %(len(filenameList), startTime, endTime))
532 print
532 print()
533
533
534 for i in range(len(filenameList)):
534 for i in range(len(filenameList)):
535 print "%s -> [%s]" %(filenameList[i], datetimeList[i].ctime())
535 print("%s -> [%s]" %(filenameList[i], datetimeList[i].ctime()))
536
536
537 self.filenameList = filenameList
537 self.filenameList = filenameList
538 self.datetimeList = datetimeList
538 self.datetimeList = datetimeList
@@ -552,22 +552,22 class FitsReader(ProcessingUnit):
552 walk = True):
552 walk = True):
553
553
554 if path == None:
554 if path == None:
555 raise ValueError, "The path is not valid"
555 raise ValueError("The path is not valid")
556
556
557 if ext == None:
557 if ext == None:
558 ext = self.ext
558 ext = self.ext
559
559
560 if not(online):
560 if not(online):
561 print "Searching files in offline mode ..."
561 print("Searching files in offline mode ...")
562 pathList, filenameList = self.searchFilesOffLine(path, startDate=startDate, endDate=endDate,
562 pathList, filenameList = self.searchFilesOffLine(path, startDate=startDate, endDate=endDate,
563 startTime=startTime, endTime=endTime,
563 startTime=startTime, endTime=endTime,
564 set=set, expLabel=expLabel, ext=ext,
564 set=set, expLabel=expLabel, ext=ext,
565 walk=walk)
565 walk=walk)
566
566
567 if not(pathList):
567 if not(pathList):
568 print "No *%s files into the folder %s \nfor the range: %s - %s"%(ext, path,
568 print("No *%s files into the folder %s \nfor the range: %s - %s"%(ext, path,
569 datetime.datetime.combine(startDate,startTime).ctime(),
569 datetime.datetime.combine(startDate,startTime).ctime(),
570 datetime.datetime.combine(endDate,endTime).ctime())
570 datetime.datetime.combine(endDate,endTime).ctime()))
571
571
572 sys.exit(-1)
572 sys.exit(-1)
573
573
@@ -582,11 +582,11 class FitsReader(ProcessingUnit):
582
582
583 if not(self.setNextFile()):
583 if not(self.setNextFile()):
584 if (startDate!=None) and (endDate!=None):
584 if (startDate!=None) and (endDate!=None):
585 print "No files in range: %s - %s" %(datetime.datetime.combine(startDate,startTime).ctime(), datetime.datetime.combine(endDate,endTime).ctime())
585 print("No files in range: %s - %s" %(datetime.datetime.combine(startDate,startTime).ctime(), datetime.datetime.combine(endDate,endTime).ctime()))
586 elif startDate != None:
586 elif startDate != None:
587 print "No files in range: %s" %(datetime.datetime.combine(startDate,startTime).ctime())
587 print("No files in range: %s" %(datetime.datetime.combine(startDate,startTime).ctime()))
588 else:
588 else:
589 print "No files"
589 print("No files")
590
590
591 sys.exit(-1)
591 sys.exit(-1)
592
592
@@ -638,7 +638,7 class FitsReader(ProcessingUnit):
638 self.__rdBasicHeader()
638 self.__rdBasicHeader()
639 return 1
639 return 1
640
640
641 print "\tWaiting %0.2f seconds for the next block, try %03d ..." % (self.delay, nTries+1)
641 print("\tWaiting %0.2f seconds for the next block, try %03d ..." % (self.delay, nTries+1))
642 sleep( self.delay )
642 sleep( self.delay )
643
643
644
644
@@ -691,7 +691,7 class FitsReader(ProcessingUnit):
691
691
692 if self.flagNoMoreFiles:
692 if self.flagNoMoreFiles:
693 self.dataOut.flagNoData = True
693 self.dataOut.flagNoData = True
694 print 'Process finished'
694 print('Process finished')
695 return 0
695 return 0
696
696
697 self.flagDiscontinuousBlock = 0
697 self.flagDiscontinuousBlock = 0
@@ -68,10 +68,10 def getFileFromSet(path, ext, set=None):
68 if set == None:
68 if set == None:
69 return validFilelist[-1]
69 return validFilelist[-1]
70
70
71 print "set =" ,set
71 print("set =" ,set)
72 for thisFile in validFilelist:
72 for thisFile in validFilelist:
73 if set <= int(thisFile[6:16]):
73 if set <= int(thisFile[6:16]):
74 print thisFile,int(thisFile[6:16])
74 print(thisFile,int(thisFile[6:16]))
75 return thisFile
75 return thisFile
76
76
77 return validFilelist[-1]
77 return validFilelist[-1]
@@ -83,8 +83,8 def getFileFromSet(path, ext, set=None):
83 return myfile[0]
83 return myfile[0]
84 else:
84 else:
85 filename = '*%10.10d%s'%(set,ext.lower())
85 filename = '*%10.10d%s'%(set,ext.lower())
86 print 'the filename %s does not exist'%filename
86 print('the filename %s does not exist'%filename)
87 print '...going to the last file: '
87 print('...going to the last file: ')
88
88
89 if validFilelist:
89 if validFilelist:
90 validFilelist = sorted( validFilelist, key=str.lower )
90 validFilelist = sorted( validFilelist, key=str.lower )
@@ -115,7 +115,7 Depura el fileList dejando solo los que cumplan el formato de "res-xxxxxx.ext"
115 try:
115 try:
116 number= int(thisFile[6:16])
116 number= int(thisFile[6:16])
117 except:
117 except:
118 print "There is a file or folder with different format"
118 print("There is a file or folder with different format")
119 if not isNumber(number):
119 if not isNumber(number):
120 continue
120 continue
121
121
@@ -256,7 +256,7 class HFReader(ProcessingUnit):
256 self.status=1
256 self.status=1
257 else:
257 else:
258 self.status=0
258 self.status=0
259 print 'Path %s does not exits'%self.path
259 print('Path %s does not exits'%self.path)
260 return
260 return
261 return
261 return
262
262
@@ -282,12 +282,12 class HFReader(ProcessingUnit):
282
282
283 pat = '\d+.\d+'
283 pat = '\d+.\d+'
284 dirnameList = [re.search(pat,x) for x in os.listdir(self.path)]
284 dirnameList = [re.search(pat,x) for x in os.listdir(self.path)]
285 dirnameList = filter(lambda x:x!=None,dirnameList)
285 dirnameList = [x for x in dirnameList if x!=None]
286 dirnameList = [x.string for x in dirnameList]
286 dirnameList = [x.string for x in dirnameList]
287 if not(online):
287 if not(online):
288
288
289 dirnameList = [self.__selDates(x) for x in dirnameList]
289 dirnameList = [self.__selDates(x) for x in dirnameList]
290 dirnameList = filter(lambda x:x!=None,dirnameList)
290 dirnameList = [x for x in dirnameList if x!=None]
291
291
292 if len(dirnameList)>0:
292 if len(dirnameList)>0:
293 self.status = 1
293 self.status = 1
@@ -301,8 +301,8 class HFReader(ProcessingUnit):
301 def __getTimeFromData(self):
301 def __getTimeFromData(self):
302 startDateTime_Reader = datetime.datetime.combine(self.startDate,self.startTime)
302 startDateTime_Reader = datetime.datetime.combine(self.startDate,self.startTime)
303 endDateTime_Reader = datetime.datetime.combine(self.endDate,self.endTime)
303 endDateTime_Reader = datetime.datetime.combine(self.endDate,self.endTime)
304 print 'Filtering Files from %s to %s'%(startDateTime_Reader, endDateTime_Reader)
304 print('Filtering Files from %s to %s'%(startDateTime_Reader, endDateTime_Reader))
305 print '........................................'
305 print('........................................')
306 filter_filenameList=[]
306 filter_filenameList=[]
307 self.filenameList.sort()
307 self.filenameList.sort()
308 for i in range(len(self.filenameList)-1):
308 for i in range(len(self.filenameList)-1):
@@ -363,24 +363,24 class HFReader(ProcessingUnit):
363
363
364 self.flag_nextfile=False
364 self.flag_nextfile=False
365 else:
365 else:
366 print filename
366 print(filename)
367 print "PRIMERA CONDICION"
367 print("PRIMERA CONDICION")
368 #if self.filename_next_set== int(filename[6:16]):
368 #if self.filename_next_set== int(filename[6:16]):
369 print "TODO BIEN"
369 print("TODO BIEN")
370
370
371 if filename == None:
371 if filename == None:
372 raise ValueError, "corregir"
372 raise ValueError("corregir")
373
373
374 self.dirnameList=[filename]
374 self.dirnameList=[filename]
375 fullfilename=self.path+"/"+filename
375 fullfilename=self.path+"/"+filename
376 self.filenameList=[fullfilename]
376 self.filenameList=[fullfilename]
377 self.filename_next_set=int(filename[6:16])+10
377 self.filename_next_set=int(filename[6:16])+10
378 print "Setting next file",self.filename_next_set
378 print("Setting next file",self.filename_next_set)
379 self.set=int(filename[6:16])
379 self.set=int(filename[6:16])
380 if True:
380 if True:
381 pass
381 pass
382 else:
382 else:
383 print "ESTOY AQUI PORQUE NO EXISTE EL SIGUIENTE ARCHIVO"
383 print("ESTOY AQUI PORQUE NO EXISTE EL SIGUIENTE ARCHIVO")
384
384
385 else:
385 else:
386 filename =getlastFileFromPath(self.path,self.ext)
386 filename =getlastFileFromPath(self.path,self.ext)
@@ -394,24 +394,24 class HFReader(ProcessingUnit):
394 self.flag_nextfile=False
394 self.flag_nextfile=False
395 else:
395 else:
396 filename=getFileFromSet(self.path,self.ext,self.set)
396 filename=getFileFromSet(self.path,self.ext,self.set)
397 print filename
397 print(filename)
398 print "PRIMERA CONDICION"
398 print("PRIMERA CONDICION")
399 #if self.filename_next_set== int(filename[6:16]):
399 #if self.filename_next_set== int(filename[6:16]):
400 print "TODO BIEN"
400 print("TODO BIEN")
401
401
402 if filename == None:
402 if filename == None:
403 raise ValueError, "corregir"
403 raise ValueError("corregir")
404
404
405 self.dirnameList=[filename]
405 self.dirnameList=[filename]
406 fullfilename=self.path+"/"+filename
406 fullfilename=self.path+"/"+filename
407 self.filenameList=[fullfilename]
407 self.filenameList=[fullfilename]
408 self.filename_next_set=int(filename[6:16])+10
408 self.filename_next_set=int(filename[6:16])+10
409 print "Setting next file",self.filename_next_set
409 print("Setting next file",self.filename_next_set)
410 self.set=int(filename[6:16])
410 self.set=int(filename[6:16])
411 if True:
411 if True:
412 pass
412 pass
413 else:
413 else:
414 print "ESTOY AQUI PORQUE NO EXISTE EL SIGUIENTE ARCHIVO"
414 print("ESTOY AQUI PORQUE NO EXISTE EL SIGUIENTE ARCHIVO")
415
415
416
416
417
417
@@ -434,7 +434,7 class HFReader(ProcessingUnit):
434 self.__selectDataForTimes()
434 self.__selectDataForTimes()
435
435
436 for i in range(len(self.filenameList)):
436 for i in range(len(self.filenameList)):
437 print "%s"% (self.filenameList[i])
437 print("%s"% (self.filenameList[i]))
438
438
439 return
439 return
440
440
@@ -456,7 +456,7 class HFReader(ProcessingUnit):
456 self.__checkPath()
456 self.__checkPath()
457
457
458 fullpath=path
458 fullpath=path
459 print "%s folder was found: " %(fullpath )
459 print("%s folder was found: " %(fullpath ))
460
460
461 if set == None:
461 if set == None:
462 self.set=None
462 self.set=None
@@ -518,7 +518,7 class HFReader(ProcessingUnit):
518 idFile += 1
518 idFile += 1
519 if not (idFile < len(self.filenameList)):
519 if not (idFile < len(self.filenameList)):
520 self.flagNoMoreFiles = 1
520 self.flagNoMoreFiles = 1
521 print "No more Files"
521 print("No more Files")
522 return 0
522 return 0
523 filename = self.filenameList[idFile]
523 filename = self.filenameList[idFile]
524 hfFilePointer =h5py.File(filename,'r')
524 hfFilePointer =h5py.File(filename,'r')
@@ -534,14 +534,14 class HFReader(ProcessingUnit):
534 self.hfFilePointer = hfFilePointer
534 self.hfFilePointer = hfFilePointer
535 hfFilePointer.close()
535 hfFilePointer.close()
536 self.__t0=epoc
536 self.__t0=epoc
537 print "Setting the file: %s"%self.filename
537 print("Setting the file: %s"%self.filename)
538
538
539 return 1
539 return 1
540
540
541 def __setNextFileOnline(self):
541 def __setNextFileOnline(self):
542 """
542 """
543 """
543 """
544 print "SOY NONE",self.set
544 print("SOY NONE",self.set)
545 if self.set==None:
545 if self.set==None:
546 pass
546 pass
547 else:
547 else:
@@ -552,7 +552,7 class HFReader(ProcessingUnit):
552 self.__selectDataForTimes(online=True)
552 self.__selectDataForTimes(online=True)
553 filename = self.filenameList[0]
553 filename = self.filenameList[0]
554 while self.filename_online == filename:
554 while self.filename_online == filename:
555 print 'waiting %d seconds to get a new file...'%(self.__waitForNewFile)
555 print('waiting %d seconds to get a new file...'%(self.__waitForNewFile))
556 time.sleep(self.__waitForNewFile)
556 time.sleep(self.__waitForNewFile)
557 #self.__findDataForDates(online=True)
557 #self.__findDataForDates(online=True)
558 self.set=self.filename_next_set
558 self.set=self.filename_next_set
@@ -563,27 +563,27 class HFReader(ProcessingUnit):
563 #print filename
563 #print filename
564 sizeoffile=os.path.getsize(filename)
564 sizeoffile=os.path.getsize(filename)
565 if sizeoffile<1670240:
565 if sizeoffile<1670240:
566 print "%s is not the rigth size"%filename
566 print("%s is not the rigth size"%filename)
567 delay=50
567 delay=50
568 print 'waiting %d seconds for delay...'%(delay)
568 print('waiting %d seconds for delay...'%(delay))
569 time.sleep(delay)
569 time.sleep(delay)
570 sizeoffile=os.path.getsize(filename)
570 sizeoffile=os.path.getsize(filename)
571 if sizeoffile<1670240:
571 if sizeoffile<1670240:
572 delay=50
572 delay=50
573 print 'waiting %d more seconds for delay...'%(delay)
573 print('waiting %d more seconds for delay...'%(delay))
574 time.sleep(delay)
574 time.sleep(delay)
575
575
576 sizeoffile=os.path.getsize(filename)
576 sizeoffile=os.path.getsize(filename)
577 if sizeoffile<1670240:
577 if sizeoffile<1670240:
578 delay=50
578 delay=50
579 print 'waiting %d more seconds for delay...'%(delay)
579 print('waiting %d more seconds for delay...'%(delay))
580 time.sleep(delay)
580 time.sleep(delay)
581
581
582 try:
582 try:
583 hfFilePointer=h5py.File(filename,'r')
583 hfFilePointer=h5py.File(filename,'r')
584
584
585 except:
585 except:
586 print "Error reading file %s"%filename
586 print("Error reading file %s"%filename)
587
587
588 self.filename_online=filename
588 self.filename_online=filename
589 epoc=hfFilePointer['t'].value
589 epoc=hfFilePointer['t'].value
@@ -596,7 +596,7 class HFReader(ProcessingUnit):
596 self.flagIsNewFile = 1
596 self.flagIsNewFile = 1
597 self.filename = filename
597 self.filename = filename
598
598
599 print "Setting the file: %s"%self.filename
599 print("Setting the file: %s"%self.filename)
600 return 1
600 return 1
601
601
602 def __getExpParameters(self):
602 def __getExpParameters(self):
@@ -622,7 +622,7 class HFReader(ProcessingUnit):
622
622
623 '''
623 '''
624 if path==None:
624 if path==None:
625 raise ValueError,"The path is not valid"
625 raise ValueError("The path is not valid")
626
626
627 if ext==None:
627 if ext==None:
628 ext = self.ext
628 ext = self.ext
@@ -634,11 +634,11 class HFReader(ProcessingUnit):
634
634
635 #print set
635 #print set
636 if not(online):
636 if not(online):
637 print "Searching files in offline mode..."
637 print("Searching files in offline mode...")
638
638
639 self.searchFilesOffLine(path, startDate, endDate, ext, startTime, endTime, walk)
639 self.searchFilesOffLine(path, startDate, endDate, ext, startTime, endTime, walk)
640 else:
640 else:
641 print "Searching files in online mode..."
641 print("Searching files in online mode...")
642 self.searchFilesOnLine(path, walk,ext,set=set)
642 self.searchFilesOnLine(path, walk,ext,set=set)
643 if set==None:
643 if set==None:
644 pass
644 pass
@@ -659,7 +659,7 class HFReader(ProcessingUnit):
659
659
660
660
661 if not(self.filenameList):
661 if not(self.filenameList):
662 print "There is no files into the folder: %s"%(path)
662 print("There is no files into the folder: %s"%(path))
663 sys.exit(-1)
663 sys.exit(-1)
664
664
665 self.__getExpParameters()
665 self.__getExpParameters()
@@ -745,7 +745,7 class HFReader(ProcessingUnit):
745
745
746 self.dataOut.heightList = self.__firstHeigth + numpy.arange(self.__nSamples, dtype = numpy.float)*self.__deltaHeigth
746 self.dataOut.heightList = self.__firstHeigth + numpy.arange(self.__nSamples, dtype = numpy.float)*self.__deltaHeigth
747
747
748 self.dataOut.channelList = range(self.nChannels)
748 self.dataOut.channelList = list(range(self.nChannels))
749
749
750 #self.dataOut.channelIndexList = None
750 #self.dataOut.channelIndexList = None
751
751
@@ -833,7 +833,7 class HFReader(ProcessingUnit):
833 def getData(self):
833 def getData(self):
834 if self.flagNoMoreFiles:
834 if self.flagNoMoreFiles:
835 self.dataOut.flagNoData = True
835 self.dataOut.flagNoData = True
836 print 'Process finished'
836 print('Process finished')
837 return 0
837 return 0
838
838
839 if self.__hasNotDataInBuffer():
839 if self.__hasNotDataInBuffer():
@@ -111,7 +111,7 class AMISRReader(ProcessingUnit):
111 self.searchFilesOnLine(path, startDate, endDate, startTime,endTime,walk)
111 self.searchFilesOnLine(path, startDate, endDate, startTime,endTime,walk)
112
112
113 if not(self.filenameList):
113 if not(self.filenameList):
114 print "There is no files into the folder: %s"%(path)
114 print("There is no files into the folder: %s"%(path))
115
115
116 sys.exit(-1)
116 sys.exit(-1)
117
117
@@ -177,7 +177,7 class AMISRReader(ProcessingUnit):
177 #filling system header parameters
177 #filling system header parameters
178 self.__nSamples = self.nsa
178 self.__nSamples = self.nsa
179 self.newProfiles = self.nprofiles/self.nchannels
179 self.newProfiles = self.nprofiles/self.nchannels
180 self.__channelList = range(self.nchannels)
180 self.__channelList = list(range(self.nchannels))
181
181
182 self.__frequency = self.frequency[0][0]
182 self.__frequency = self.frequency[0][0]
183
183
@@ -200,7 +200,7 class AMISRReader(ProcessingUnit):
200 self.status = 1
200 self.status = 1
201 else:
201 else:
202 self.status = 0
202 self.status = 0
203 print 'Path:%s does not exists'%self.path
203 print('Path:%s does not exists'%self.path)
204
204
205 return
205 return
206
206
@@ -225,11 +225,11 class AMISRReader(ProcessingUnit):
225
225
226 pat = '\d+.\d+'
226 pat = '\d+.\d+'
227 dirnameList = [re.search(pat,x) for x in os.listdir(self.path)]
227 dirnameList = [re.search(pat,x) for x in os.listdir(self.path)]
228 dirnameList = filter(lambda x:x!=None,dirnameList)
228 dirnameList = [x for x in dirnameList if x!=None]
229 dirnameList = [x.string for x in dirnameList]
229 dirnameList = [x.string for x in dirnameList]
230 if not(online):
230 if not(online):
231 dirnameList = [self.__selDates(x) for x in dirnameList]
231 dirnameList = [self.__selDates(x) for x in dirnameList]
232 dirnameList = filter(lambda x:x!=None,dirnameList)
232 dirnameList = [x for x in dirnameList if x!=None]
233 if len(dirnameList)>0:
233 if len(dirnameList)>0:
234 self.status = 1
234 self.status = 1
235 self.dirnameList = dirnameList
235 self.dirnameList = dirnameList
@@ -242,8 +242,8 class AMISRReader(ProcessingUnit):
242 startDateTime_Reader = datetime.datetime.combine(self.startDate,self.startTime)
242 startDateTime_Reader = datetime.datetime.combine(self.startDate,self.startTime)
243 endDateTime_Reader = datetime.datetime.combine(self.endDate,self.endTime)
243 endDateTime_Reader = datetime.datetime.combine(self.endDate,self.endTime)
244
244
245 print 'Filtering Files from %s to %s'%(startDateTime_Reader, endDateTime_Reader)
245 print('Filtering Files from %s to %s'%(startDateTime_Reader, endDateTime_Reader))
246 print '........................................'
246 print('........................................')
247 filter_filenameList = []
247 filter_filenameList = []
248 self.filenameList.sort()
248 self.filenameList.sort()
249 #for i in range(len(self.filenameList)-1):
249 #for i in range(len(self.filenameList)-1):
@@ -288,7 +288,7 class AMISRReader(ProcessingUnit):
288
288
289 def __getFilenameList(self, fileListInKeys, dirList):
289 def __getFilenameList(self, fileListInKeys, dirList):
290 for value in fileListInKeys:
290 for value in fileListInKeys:
291 dirName = value.keys()[0]
291 dirName = list(value.keys())[0]
292 for file in value[dirName]:
292 for file in value[dirName]:
293 filename = os.path.join(dirName, file)
293 filename = os.path.join(dirName, file)
294 self.filenameList.append(filename)
294 self.filenameList.append(filename)
@@ -366,7 +366,7 class AMISRReader(ProcessingUnit):
366 self.__selectDataForTimes()
366 self.__selectDataForTimes()
367
367
368 for i in range(len(self.filenameList)):
368 for i in range(len(self.filenameList)):
369 print "%s" %(self.filenameList[i])
369 print("%s" %(self.filenameList[i]))
370
370
371 return
371 return
372
372
@@ -377,7 +377,7 class AMISRReader(ProcessingUnit):
377 idFile += 1
377 idFile += 1
378 if not(idFile < len(self.filenameList)):
378 if not(idFile < len(self.filenameList)):
379 self.flagNoMoreFiles = 1
379 self.flagNoMoreFiles = 1
380 print "No more Files"
380 print("No more Files")
381 return 0
381 return 0
382
382
383 filename = self.filenameList[idFile]
383 filename = self.filenameList[idFile]
@@ -392,7 +392,7 class AMISRReader(ProcessingUnit):
392
392
393 self.amisrFilePointer = amisrFilePointer
393 self.amisrFilePointer = amisrFilePointer
394
394
395 print "Setting the file: %s"%self.filename
395 print("Setting the file: %s"%self.filename)
396
396
397 return 1
397 return 1
398
398
@@ -404,7 +404,7 class AMISRReader(ProcessingUnit):
404 filename = self.filenameList[0]
404 filename = self.filenameList[0]
405 wait = 0
405 wait = 0
406 while self.__filename_online == filename:
406 while self.__filename_online == filename:
407 print 'waiting %d seconds to get a new file...'%(self.__waitForNewFile)
407 print('waiting %d seconds to get a new file...'%(self.__waitForNewFile))
408 if wait == 5:
408 if wait == 5:
409 return 0
409 return 0
410 sleep(self.__waitForNewFile)
410 sleep(self.__waitForNewFile)
@@ -417,7 +417,7 class AMISRReader(ProcessingUnit):
417 self.amisrFilePointer = h5py.File(filename,'r')
417 self.amisrFilePointer = h5py.File(filename,'r')
418 self.flagIsNewFile = 1
418 self.flagIsNewFile = 1
419 self.filename = filename
419 self.filename = filename
420 print "Setting the file: %s"%self.filename
420 print("Setting the file: %s"%self.filename)
421 return 1
421 return 1
422
422
423
423
@@ -585,7 +585,7 class AMISRReader(ProcessingUnit):
585
585
586 if self.flagNoMoreFiles:
586 if self.flagNoMoreFiles:
587 self.dataOut.flagNoData = True
587 self.dataOut.flagNoData = True
588 print 'Process finished'
588 print('Process finished')
589 return 0
589 return 0
590
590
591 if self.__hasNotDataInBuffer():
591 if self.__hasNotDataInBuffer():
@@ -63,10 +63,10 def load_json(obj):
63 iterable = obj
63 iterable = obj
64
64
65 if isinstance(iterable, dict):
65 if isinstance(iterable, dict):
66 return {str(k): load_json(v) if isinstance(v, dict) else str(v) if isinstance(v, unicode) else v
66 return {str(k): load_json(v) if isinstance(v, dict) else str(v) if isinstance(v, str) else v
67 for k, v in iterable.items()}
67 for k, v in list(iterable.items())}
68 elif isinstance(iterable, (list, tuple)):
68 elif isinstance(iterable, (list, tuple)):
69 return [str(v) if isinstance(v, unicode) else v for v in iterable]
69 return [str(v) if isinstance(v, str) else v for v in iterable]
70
70
71 return iterable
71 return iterable
72
72
@@ -107,10 +107,10 class MADReader(JRODataReader, ProcessingUnit):
107 self.ind2DList = load_json(kwargs.get('ind2DList',
107 self.ind2DList = load_json(kwargs.get('ind2DList',
108 "[\"GDALT\"]"))
108 "[\"GDALT\"]"))
109 if self.path is None:
109 if self.path is None:
110 raise ValueError, 'The path is not valid'
110 raise ValueError('The path is not valid')
111
111
112 if format is None:
112 if format is None:
113 raise ValueError, 'The format is not valid choose simple or hdf5'
113 raise ValueError('The format is not valid choose simple or hdf5')
114 elif format.lower() in ('simple', 'txt'):
114 elif format.lower() in ('simple', 'txt'):
115 self.ext = '.txt'
115 self.ext = '.txt'
116 elif format.lower() in ('cedar',):
116 elif format.lower() in ('cedar',):
@@ -122,7 +122,7 class MADReader(JRODataReader, ProcessingUnit):
122 self.fileId = 0
122 self.fileId = 0
123
123
124 if not self.fileList:
124 if not self.fileList:
125 raise Warning, 'There is no files matching these date in the folder: {}. \n Check startDate and endDate'.format(path)
125 raise Warning('There is no files matching these date in the folder: {}. \n Check startDate and endDate'.format(path))
126
126
127 self.setNextFile()
127 self.setNextFile()
128
128
@@ -198,7 +198,7 class MADReader(JRODataReader, ProcessingUnit):
198 log.success('Spatial parameters: {}'.format(','.join(s_parameters)),
198 log.success('Spatial parameters: {}'.format(','.join(s_parameters)),
199 'MADReader')
199 'MADReader')
200
200
201 for param in self.oneDDict.keys():
201 for param in list(self.oneDDict.keys()):
202 if param.lower() not in self.parameters:
202 if param.lower() not in self.parameters:
203 log.warning(
203 log.warning(
204 'Parameter {} not found will be ignored'.format(
204 'Parameter {} not found will be ignored'.format(
@@ -206,7 +206,7 class MADReader(JRODataReader, ProcessingUnit):
206 'MADReader')
206 'MADReader')
207 self.oneDDict.pop(param, None)
207 self.oneDDict.pop(param, None)
208
208
209 for param, value in self.twoDDict.items():
209 for param, value in list(self.twoDDict.items()):
210 if param.lower() not in self.parameters:
210 if param.lower() not in self.parameters:
211 log.warning(
211 log.warning(
212 'Parameter {} not found, it will be ignored'.format(
212 'Parameter {} not found, it will be ignored'.format(
@@ -352,11 +352,11 class MADReader(JRODataReader, ProcessingUnit):
352
352
353 parameters = [None for __ in self.parameters]
353 parameters = [None for __ in self.parameters]
354
354
355 for param, attr in self.oneDDict.items():
355 for param, attr in list(self.oneDDict.items()):
356 x = self.parameters.index(param.lower())
356 x = self.parameters.index(param.lower())
357 setattr(self.dataOut, attr, self.buffer[0][x])
357 setattr(self.dataOut, attr, self.buffer[0][x])
358
358
359 for param, value in self.twoDDict.items():
359 for param, value in list(self.twoDDict.items()):
360 x = self.parameters.index(param.lower())
360 x = self.parameters.index(param.lower())
361 if self.ext == '.txt':
361 if self.ext == '.txt':
362 y = self.parameters.index(self.ind2DList[0].lower())
362 y = self.parameters.index(self.ind2DList[0].lower())
@@ -376,7 +376,7 class MADReader(JRODataReader, ProcessingUnit):
376 self.output[value[0]][value[1]] = dummy
376 self.output[value[0]][value[1]] = dummy
377 parameters[value[1]] = param
377 parameters[value[1]] = param
378
378
379 for key, value in self.output.items():
379 for key, value in list(self.output.items()):
380 setattr(self.dataOut, key, numpy.array(value))
380 setattr(self.dataOut, key, numpy.array(value))
381
381
382 self.dataOut.parameters = [s for s in parameters if s]
382 self.dataOut.parameters = [s for s in parameters if s]
@@ -508,7 +508,7 class MADWriter(Operation):
508 'Creating file: {}'.format(self.fullname),
508 'Creating file: {}'.format(self.fullname),
509 'MADWriter')
509 'MADWriter')
510 self.fp = madrigal.cedar.MadrigalCedarFile(self.fullname, True)
510 self.fp = madrigal.cedar.MadrigalCedarFile(self.fullname, True)
511 except ValueError, e:
511 except ValueError as e:
512 log.error(
512 log.error(
513 'Impossible to create a cedar object with "madrigal.cedar.MadrigalCedarFile"',
513 'Impossible to create a cedar object with "madrigal.cedar.MadrigalCedarFile"',
514 'MADWriter')
514 'MADWriter')
@@ -528,7 +528,7 class MADWriter(Operation):
528 heights = self.dataOut.heightList
528 heights = self.dataOut.heightList
529
529
530 if self.ext == '.dat':
530 if self.ext == '.dat':
531 for key, value in self.twoDDict.items():
531 for key, value in list(self.twoDDict.items()):
532 if isinstance(value, str):
532 if isinstance(value, str):
533 data = getattr(self.dataOut, value)
533 data = getattr(self.dataOut, value)
534 invalid = numpy.isnan(data)
534 invalid = numpy.isnan(data)
@@ -540,7 +540,7 class MADWriter(Operation):
540 data[invalid] = self.missing
540 data[invalid] = self.missing
541
541
542 out = {}
542 out = {}
543 for key, value in self.twoDDict.items():
543 for key, value in list(self.twoDDict.items()):
544 key = key.lower()
544 key = key.lower()
545 if isinstance(value, str):
545 if isinstance(value, str):
546 if 'db' in value.lower():
546 if 'db' in value.lower():
@@ -576,8 +576,8 class MADWriter(Operation):
576 endTime.minute,
576 endTime.minute,
577 endTime.second,
577 endTime.second,
578 endTime.microsecond/10000,
578 endTime.microsecond/10000,
579 self.oneDDict.keys(),
579 list(self.oneDDict.keys()),
580 self.twoDDict.keys(),
580 list(self.twoDDict.keys()),
581 len(index),
581 len(index),
582 **self.extra_args
582 **self.extra_args
583 )
583 )
@@ -29,7 +29,7 class matoffReader(ProcessingUnit):
29 def __setHeader(self, datastuff):
29 def __setHeader(self, datastuff):
30
30
31 self.dataOut.pairsList=[(0,1)]
31 self.dataOut.pairsList=[(0,1)]
32 self.dataOut.channelList = range(np.array(datastuff.get('power')).shape[1])
32 self.dataOut.channelList = list(range(np.array(datastuff.get('power')).shape[1]))
33 self.dataOut.nProfiles = len(np.array(datastuff.get('vel')).flatten()) #this!
33 self.dataOut.nProfiles = len(np.array(datastuff.get('vel')).flatten()) #this!
34 self.dataOut.nIncohInt = 20
34 self.dataOut.nIncohInt = 20
35 self.dataOut.nCohInt = 1 #this!
35 self.dataOut.nCohInt = 1 #this!
@@ -39,7 +39,7 class matoffReader(ProcessingUnit):
39 self.dataOut.heightList = np.array(datastuff.get('hts')).flatten()
39 self.dataOut.heightList = np.array(datastuff.get('hts')).flatten()
40
40
41 def __readFile(self, currentfile):
41 def __readFile(self, currentfile):
42 print "Reading from this file:" + currentfile
42 print("Reading from this file:" + currentfile)
43
43
44 #filesplit=currentfile.split("\\")
44 #filesplit=currentfile.split("\\")
45 filesplit=currentfile.split("/")
45 filesplit=currentfile.split("/")
@@ -64,7 +64,7 class matoffReader(ProcessingUnit):
64 # self.utcmatcounter=0
64 # self.utcmatcounter=0
65
65
66 # print self.utcmatcounter
66 # print self.utcmatcounter
67 print self.utcfirst
67 print(self.utcfirst)
68 try:
68 try:
69 datastuff=sio.loadmat(currentfile)
69 datastuff=sio.loadmat(currentfile)
70 except:
70 except:
@@ -115,7 +115,7 class matoffReader(ProcessingUnit):
115 utclist=[]
115 utclist=[]
116
116
117 if not dirList:
117 if not dirList:
118 print "No directories found"
118 print("No directories found")
119 return []
119 return []
120
120
121 #if self.online:
121 #if self.online:
@@ -146,7 +146,7 class matoffReader(ProcessingUnit):
146 utclist.append(utctime)
146 utclist.append(utctime)
147
147
148 if not dirListFiltered:
148 if not dirListFiltered:
149 print "filtro"
149 print("filtro")
150 return []
150 return []
151
151
152 for thisDir in dirListFiltered:
152 for thisDir in dirListFiltered:
@@ -188,7 +188,7 class matoffReader(ProcessingUnit):
188 if nTries > 3:
188 if nTries > 3:
189 break
189 break
190
190
191 print "Waiting %d seconds ..." %seconds
191 print("Waiting %d seconds ..." %seconds)
192 time.sleep(40)
192 time.sleep(40)
193
193
194 if not (len(filelist) > ncurrentfiles):
194 if not (len(filelist) > ncurrentfiles):
@@ -227,7 +227,7 class matoffReader(ProcessingUnit):
227
227
228 self.fileList = fileList
228 self.fileList = fileList
229
229
230 print "fin setup"
230 print("fin setup")
231
231
232 def run(self,path=None,startDate=None, endDate=None,
232 def run(self,path=None,startDate=None, endDate=None,
233 startTime=datetime.time(0,0,0),
233 startTime=datetime.time(0,0,0),
@@ -251,7 +251,7 class matoffReader(ProcessingUnit):
251
251
252 if not self.fileList:
252 if not self.fileList:
253 self.dataOut.flagNoData = True
253 self.dataOut.flagNoData = True
254 print "lista vacia"
254 print("lista vacia")
255 return
255 return
256
256
257 currentfile = self.__getNextFile()
257 currentfile = self.__getNextFile()
@@ -48,7 +48,7 class Header(object):
48 message += self.__class__.__name__.upper() + "\n"
48 message += self.__class__.__name__.upper() + "\n"
49 message += "#" * 50 + "\n"
49 message += "#" * 50 + "\n"
50
50
51 keyList = self.__dict__.keys()
51 keyList = list(self.__dict__.keys())
52 keyList.sort()
52 keyList.sort()
53
53
54 for key in keyList:
54 for key in keyList:
@@ -333,7 +333,7 class SRVIHeader(Header):
333 self.DataBlockTitleSRVI1 = str(header['DataBlockTitleSRVI1'][0])
333 self.DataBlockTitleSRVI1 = str(header['DataBlockTitleSRVI1'][0])
334 self.SizeOfSRVI1 = header['SizeOfSRVI1'][0]
334 self.SizeOfSRVI1 = header['SizeOfSRVI1'][0]
335 # 16
335 # 16
336 print 'Pointer fp SRVIheader', fp.tell()
336 print('Pointer fp SRVIheader', fp.tell())
337
337
338
338
339 SRVI_STRUCTURE = numpy.dtype([
339 SRVI_STRUCTURE = numpy.dtype([
@@ -435,9 +435,9 class RecordHeader(Header):
435 # print 'Datasize',self.Datasize
435 # print 'Datasize',self.Datasize
436 #endFp = self.OffsetStartHeader + self.RecCounter*self.Off2StartNxtRec
436 #endFp = self.OffsetStartHeader + self.RecCounter*self.Off2StartNxtRec
437
437
438 print '=============================================='
438 print('==============================================')
439
439
440 print '=============================================='
440 print('==============================================')
441
441
442 return 1
442 return 1
443
443
@@ -572,7 +572,7 class MIRA35CReader (ProcessingUnit, FileHeaderMIRA35c, SRVIHeader, RecordHeader
572
572
573 if self.flagNoMoreFiles:
573 if self.flagNoMoreFiles:
574 self.dataOut.flagNoData = True
574 self.dataOut.flagNoData = True
575 print 'NoData se vuelve true'
575 print('NoData se vuelve true')
576 return 0
576 return 0
577
577
578 self.fp = self.path
578 self.fp = self.path
@@ -602,7 +602,7 class MIRA35CReader (ProcessingUnit, FileHeaderMIRA35c, SRVIHeader, RecordHeader
602 '''
602 '''
603
603
604 # The address of the folder is generated the name of the .fdt file that will be read
604 # The address of the folder is generated the name of the .fdt file that will be read
605 print "File: ", self.fileSelector + 1
605 print("File: ", self.fileSelector + 1)
606
606
607 if self.fileSelector < len(self.filenameList):
607 if self.fileSelector < len(self.filenameList):
608
608
@@ -642,7 +642,7 class MIRA35CReader (ProcessingUnit, FileHeaderMIRA35c, SRVIHeader, RecordHeader
642 self.readBlock() # Block reading
642 self.readBlock() # Block reading
643
643
644 else:
644 else:
645 print 'readFile FlagNoData becomes true'
645 print('readFile FlagNoData becomes true')
646 self.flagNoMoreFiles = True
646 self.flagNoMoreFiles = True
647 self.dataOut.flagNoData = True
647 self.dataOut.flagNoData = True
648 self.FileHeaderFlag == True
648 self.FileHeaderFlag == True
@@ -673,7 +673,7 class MIRA35CReader (ProcessingUnit, FileHeaderMIRA35c, SRVIHeader, RecordHeader
673 self.blocksize = self.srviHeader.SizeOfDataBlock1 # Se obtiene el tamao del bloque
673 self.blocksize = self.srviHeader.SizeOfDataBlock1 # Se obtiene el tamao del bloque
674
674
675 if self.blocksize == 148:
675 if self.blocksize == 148:
676 print 'blocksize == 148 bug'
676 print('blocksize == 148 bug')
677 jump = numpy.fromfile(self.fp, [('jump', numpy.str_, 140)], 1)
677 jump = numpy.fromfile(self.fp, [('jump', numpy.str_, 140)], 1)
678
678
679 # Se obtiene la cabecera del SRVI
679 # Se obtiene la cabecera del SRVI
@@ -691,7 +691,7 class MIRA35CReader (ProcessingUnit, FileHeaderMIRA35c, SRVIHeader, RecordHeader
691 npw1 = self.recordheader.npw1
691 npw1 = self.recordheader.npw1
692 npw2 = self.recordheader.npw2
692 npw2 = self.recordheader.npw2
693
693
694 self.dataOut.channelList = range(1)
694 self.dataOut.channelList = list(range(1))
695 self.dataOut.nIncohInt = self.Num_inCoh
695 self.dataOut.nIncohInt = self.Num_inCoh
696 self.dataOut.nProfiles = self.Num_Bins
696 self.dataOut.nProfiles = self.Num_Bins
697 self.dataOut.nCohInt = 1
697 self.dataOut.nCohInt = 1
@@ -701,7 +701,7 class MIRA35CReader (ProcessingUnit, FileHeaderMIRA35c, SRVIHeader, RecordHeader
701
701
702 self.dataOut.outputInterval = self.dataOut.getTimeInterval()
702 self.dataOut.outputInterval = self.dataOut.getTimeInterval()
703 self.dataOut.heightList = self.SPARrawGate1 * self.__deltaHeigth + \
703 self.dataOut.heightList = self.SPARrawGate1 * self.__deltaHeigth + \
704 numpy.array(range(self.Num_Hei)) * self.__deltaHeigth
704 numpy.array(list(range(self.Num_Hei))) * self.__deltaHeigth
705
705
706 self.HSDVsign = numpy.fromfile(self.fp, [('HSDV', numpy.str_, 4)], 1)
706 self.HSDVsign = numpy.fromfile(self.fp, [('HSDV', numpy.str_, 4)], 1)
707 self.SizeHSDV = numpy.fromfile(self.fp, [('SizeHSDV', '<i4')], 1)
707 self.SizeHSDV = numpy.fromfile(self.fp, [('SizeHSDV', '<i4')], 1)
@@ -766,8 +766,8 class MIRA35CReader (ProcessingUnit, FileHeaderMIRA35c, SRVIHeader, RecordHeader
766
766
767 self.dataOut.COFA = numpy.array([self.COFA_Co, self.COFA_Cx])
767 self.dataOut.COFA = numpy.array([self.COFA_Co, self.COFA_Cx])
768
768
769 print ' '
769 print(' ')
770 print 'SPC', numpy.shape(self.dataOut.data_spc)
770 print('SPC', numpy.shape(self.dataOut.data_spc))
771 # print 'SPC',self.dataOut.data_spc
771 # print 'SPC',self.dataOut.data_spc
772
772
773 noinor1 = 713031680
773 noinor1 = 713031680
@@ -777,7 +777,7 class MIRA35CReader (ProcessingUnit, FileHeaderMIRA35c, SRVIHeader, RecordHeader
777 npw2 = 1 # 0**(npw2/10) * noinor1 * noinor2
777 npw2 = 1 # 0**(npw2/10) * noinor1 * noinor2
778 self.dataOut.NPW = numpy.array([npw1, npw2])
778 self.dataOut.NPW = numpy.array([npw1, npw2])
779
779
780 print ' '
780 print(' ')
781
781
782 self.data_spc = numpy.transpose(self.data_spc, (2, 1, 0))
782 self.data_spc = numpy.transpose(self.data_spc, (2, 1, 0))
783 self.data_spc = numpy.fft.fftshift(self.data_spc, axes=1)
783 self.data_spc = numpy.fft.fftshift(self.data_spc, axes=1)
@@ -7,7 +7,7 import datetime
7
7
8 from schainpy.model.data.jrodata import *
8 from schainpy.model.data.jrodata import *
9 from schainpy.model.proc.jroproc_base import ProcessingUnit, Operation
9 from schainpy.model.proc.jroproc_base import ProcessingUnit, Operation
10 # from jroIO_base import *
10 # from .jroIO_base import *
11 from schainpy.model.io.jroIO_base import *
11 from schainpy.model.io.jroIO_base import *
12 import schainpy
12 import schainpy
13
13
@@ -87,22 +87,22 class ParamReader(ProcessingUnit):
87 startTime = kwargs['startTime']
87 startTime = kwargs['startTime']
88 endTime = kwargs['endTime']
88 endTime = kwargs['endTime']
89 walk = kwargs['walk']
89 walk = kwargs['walk']
90 if kwargs.has_key('ext'):
90 if 'ext' in kwargs:
91 ext = kwargs['ext']
91 ext = kwargs['ext']
92 else:
92 else:
93 ext = '.hdf5'
93 ext = '.hdf5'
94 if kwargs.has_key('timezone'):
94 if 'timezone' in kwargs:
95 self.timezone = kwargs['timezone']
95 self.timezone = kwargs['timezone']
96 else:
96 else:
97 self.timezone = 'lt'
97 self.timezone = 'lt'
98
98
99 print "[Reading] Searching files in offline mode ..."
99 print("[Reading] Searching files in offline mode ...")
100 pathList, filenameList = self.searchFilesOffLine(path, startDate=startDate, endDate=endDate,
100 pathList, filenameList = self.searchFilesOffLine(path, startDate=startDate, endDate=endDate,
101 startTime=startTime, endTime=endTime,
101 startTime=startTime, endTime=endTime,
102 ext=ext, walk=walk)
102 ext=ext, walk=walk)
103
103
104 if not(filenameList):
104 if not(filenameList):
105 print "There is no files into the folder: %s"%(path)
105 print("There is no files into the folder: %s"%(path))
106 sys.exit(-1)
106 sys.exit(-1)
107
107
108 self.fileIndex = -1
108 self.fileIndex = -1
@@ -134,16 +134,16 class ParamReader(ProcessingUnit):
134 dateList, pathList = JRODataObj.findDatafiles(path, startDate, endDate, expLabel, ext, walk, include_path=True)
134 dateList, pathList = JRODataObj.findDatafiles(path, startDate, endDate, expLabel, ext, walk, include_path=True)
135
135
136 if dateList == []:
136 if dateList == []:
137 print "[Reading] No *%s files in %s from %s to %s)"%(ext, path,
137 print("[Reading] No *%s files in %s from %s to %s)"%(ext, path,
138 datetime.datetime.combine(startDate,startTime).ctime(),
138 datetime.datetime.combine(startDate,startTime).ctime(),
139 datetime.datetime.combine(endDate,endTime).ctime())
139 datetime.datetime.combine(endDate,endTime).ctime()))
140
140
141 return None, None
141 return None, None
142
142
143 if len(dateList) > 1:
143 if len(dateList) > 1:
144 print "[Reading] %d days were found in date range: %s - %s" %(len(dateList), startDate, endDate)
144 print("[Reading] %d days were found in date range: %s - %s" %(len(dateList), startDate, endDate))
145 else:
145 else:
146 print "[Reading] data was found for the date %s" %(dateList[0])
146 print("[Reading] data was found for the date %s" %(dateList[0]))
147
147
148 filenameList = []
148 filenameList = []
149 datetimeList = []
149 datetimeList = []
@@ -172,11 +172,11 class ParamReader(ProcessingUnit):
172 datetimeList.append(thisDatetime)
172 datetimeList.append(thisDatetime)
173
173
174 if not(filenameList):
174 if not(filenameList):
175 print "[Reading] Any file was found int time range %s - %s" %(datetime.datetime.combine(startDate,startTime).ctime(), datetime.datetime.combine(endDate,endTime).ctime())
175 print("[Reading] Any file was found int time range %s - %s" %(datetime.datetime.combine(startDate,startTime).ctime(), datetime.datetime.combine(endDate,endTime).ctime()))
176 return None, None
176 return None, None
177
177
178 print "[Reading] %d file(s) was(were) found in time range: %s - %s" %(len(filenameList), startTime, endTime)
178 print("[Reading] %d file(s) was(were) found in time range: %s - %s" %(len(filenameList), startTime, endTime))
179 print
179 print()
180
180
181 # for i in range(len(filenameList)):
181 # for i in range(len(filenameList)):
182 # print "[Reading] %s -> [%s]" %(filenameList[i], datetimeList[i].ctime())
182 # print "[Reading] %s -> [%s]" %(filenameList[i], datetimeList[i].ctime())
@@ -218,7 +218,7 class ParamReader(ProcessingUnit):
218
218
219 except IOError:
219 except IOError:
220 traceback.print_exc()
220 traceback.print_exc()
221 raise IOError, "The file %s can't be opened" %(filename)
221 raise IOError("The file %s can't be opened" %(filename))
222 #chino rata
222 #chino rata
223 #In case has utctime attribute
223 #In case has utctime attribute
224 grp2 = grp1['utctime']
224 grp2 = grp1['utctime']
@@ -271,7 +271,7 class ParamReader(ProcessingUnit):
271 idFile = self.fileIndex
271 idFile = self.fileIndex
272
272
273 if not(idFile < len(self.filenameList)):
273 if not(idFile < len(self.filenameList)):
274 print "No more Files"
274 print("No more Files")
275 return 0
275 return 0
276
276
277 filename = self.filenameList[idFile]
277 filename = self.filenameList[idFile]
@@ -282,7 +282,7 class ParamReader(ProcessingUnit):
282
282
283 self.fp = filePointer
283 self.fp = filePointer
284
284
285 print "Setting the file: %s"%self.filename
285 print("Setting the file: %s"%self.filename)
286
286
287 # self.__readMetadata()
287 # self.__readMetadata()
288 self.__setBlockList()
288 self.__setBlockList()
@@ -361,7 +361,7 class ParamReader(ProcessingUnit):
361
361
362 listMetaname = []
362 listMetaname = []
363 listMetadata = []
363 listMetadata = []
364 for item in gp.items():
364 for item in list(gp.items()):
365 name = item[0]
365 name = item[0]
366
366
367 if name=='array dimensions':
367 if name=='array dimensions':
@@ -389,7 +389,7 class ParamReader(ProcessingUnit):
389 listdataname = []
389 listdataname = []
390 listdata = []
390 listdata = []
391
391
392 for item in grp.items():
392 for item in list(grp.items()):
393 name = item[0]
393 name = item[0]
394 listdataname.append(name)
394 listdataname.append(name)
395
395
@@ -921,7 +921,7 class ParamWriter(Operation):
921 # self.nDims = nDims
921 # self.nDims = nDims
922 # self.nDimsForDs = nDimsForDs
922 # self.nDimsForDs = nDimsForDs
923 #Saving variables
923 #Saving variables
924 print 'Writing the file: %s'%filename
924 print('Writing the file: %s'%filename)
925 self.filename = filename
925 self.filename = filename
926 # self.fp = fp
926 # self.fp = fp
927 # self.grp = grp
927 # self.grp = grp
@@ -5,7 +5,7 Created on Jul 2, 2014
5 '''
5 '''
6 import numpy
6 import numpy
7
7
8 from jroIO_base import LOCALTIME, JRODataReader, JRODataWriter
8 from schainpy.model.io.jroIO_base import LOCALTIME, JRODataReader, JRODataWriter
9 from schainpy.model.proc.jroproc_base import ProcessingUnit, Operation
9 from schainpy.model.proc.jroproc_base import ProcessingUnit, Operation
10 from schainpy.model.data.jroheaderIO import PROCFLAG, BasicHeader, SystemHeader, RadarControllerHeader, ProcessingHeader
10 from schainpy.model.data.jroheaderIO import PROCFLAG, BasicHeader, SystemHeader, RadarControllerHeader, ProcessingHeader
11 from schainpy.model.data.jrodata import Spectra
11 from schainpy.model.data.jrodata import Spectra
@@ -325,7 +325,7 class SpectraReader(JRODataReader, ProcessingUnit):
325
325
326 self.dataOut.heightList = numpy.arange(self.processingHeaderObj.firstHeight, xf, self.processingHeaderObj.deltaHeight)
326 self.dataOut.heightList = numpy.arange(self.processingHeaderObj.firstHeight, xf, self.processingHeaderObj.deltaHeight)
327
327
328 self.dataOut.channelList = range(self.systemHeaderObj.nChannels)
328 self.dataOut.channelList = list(range(self.systemHeaderObj.nChannels))
329
329
330 self.dataOut.flagShiftFFT = True #Data is always shifted
330 self.dataOut.flagShiftFFT = True #Data is always shifted
331
331
@@ -354,7 +354,7 class SpectraReader(JRODataReader, ProcessingUnit):
354
354
355 if self.flagNoMoreFiles:
355 if self.flagNoMoreFiles:
356 self.dataOut.flagNoData = True
356 self.dataOut.flagNoData = True
357 print 'Process finished'
357 print('Process finished')
358 return 0
358 return 0
359
359
360 self.flagDiscontinuousBlock = 0
360 self.flagDiscontinuousBlock = 0
@@ -19,7 +19,7 from schainpy.model.proc.jroproc_base import ProcessingUnit, Operation
19 try:
19 try:
20 import digital_rf_hdf5
20 import digital_rf_hdf5
21 except:
21 except:
22 print 'You should install "digital_rf_hdf5" module if you want to read USRP data'
22 print('You should install "digital_rf_hdf5" module if you want to read USRP data')
23
23
24 class USRPReader(ProcessingUnit):
24 class USRPReader(ProcessingUnit):
25 '''
25 '''
@@ -209,7 +209,7 class USRPReader(ProcessingUnit):
209 '''
209 '''
210
210
211 if not os.path.isdir(path):
211 if not os.path.isdir(path):
212 raise ValueError, "[Reading] Directory %s does not exist" %path
212 raise ValueError("[Reading] Directory %s does not exist" %path)
213
213
214 try:
214 try:
215 self.digitalReadObj = digital_rf_hdf5.read_hdf5(path, load_all_metadata=True)
215 self.digitalReadObj = digital_rf_hdf5.read_hdf5(path, load_all_metadata=True)
@@ -219,10 +219,10 class USRPReader(ProcessingUnit):
219 channelNameList = self.digitalReadObj.get_channels()
219 channelNameList = self.digitalReadObj.get_channels()
220
220
221 if not channelNameList:
221 if not channelNameList:
222 raise ValueError, "[Reading] Directory %s does not have any files" %path
222 raise ValueError("[Reading] Directory %s does not have any files" %path)
223
223
224 if not channelList:
224 if not channelList:
225 channelList = range(len(channelNameList))
225 channelList = list(range(len(channelNameList)))
226
226
227 ########## Reading metadata ######################
227 ########## Reading metadata ######################
228
228
@@ -241,7 +241,7 class USRPReader(ProcessingUnit):
241 self.__frequency = this_metadata_file['fc'].value
241 self.__frequency = this_metadata_file['fc'].value
242
242
243 if not self.__frequency:
243 if not self.__frequency:
244 raise ValueError, "Center Frequency is not defined in metadata file"
244 raise ValueError("Center Frequency is not defined in metadata file")
245
245
246 try:
246 try:
247 self.__timezone = this_metadata_file['timezone'].value
247 self.__timezone = this_metadata_file['timezone'].value
@@ -299,7 +299,7 class USRPReader(ProcessingUnit):
299
299
300 if not nSamples:
300 if not nSamples:
301 if not ippKm:
301 if not ippKm:
302 raise ValueError, "[Reading] nSamples or ippKm should be defined"
302 raise ValueError("[Reading] nSamples or ippKm should be defined")
303
303
304 nSamples = int(ippKm / (1e6*0.15/self.__sample_rate))
304 nSamples = int(ippKm / (1e6*0.15/self.__sample_rate))
305
305
@@ -346,14 +346,14 class USRPReader(ProcessingUnit):
346 self.__setFileHeader()
346 self.__setFileHeader()
347 self.isConfig = True
347 self.isConfig = True
348
348
349 print "[Reading] USRP Data was found from %s to %s " %(
349 print("[Reading] USRP Data was found from %s to %s " %(
350 datetime.datetime.utcfromtimestamp(self.__startUTCSecond - self.__timezone),
350 datetime.datetime.utcfromtimestamp(self.__startUTCSecond - self.__timezone),
351 datetime.datetime.utcfromtimestamp(self.__endUTCSecond - self.__timezone)
351 datetime.datetime.utcfromtimestamp(self.__endUTCSecond - self.__timezone)
352 )
352 ))
353
353
354 print "[Reading] Starting process from %s to %s" %(datetime.datetime.utcfromtimestamp(startUTCSecond - self.__timezone),
354 print("[Reading] Starting process from %s to %s" %(datetime.datetime.utcfromtimestamp(startUTCSecond - self.__timezone),
355 datetime.datetime.utcfromtimestamp(endUTCSecond - self.__timezone)
355 datetime.datetime.utcfromtimestamp(endUTCSecond - self.__timezone)
356 )
356 ))
357
357
358 def __reload(self):
358 def __reload(self):
359
359
@@ -366,7 +366,7 class USRPReader(ProcessingUnit):
366 # datetime.datetime.utcfromtimestamp(self.__startUTCSecond - self.__timezone),
366 # datetime.datetime.utcfromtimestamp(self.__startUTCSecond - self.__timezone),
367 # datetime.datetime.utcfromtimestamp(self.__endUTCSecond - self.__timezone)
367 # datetime.datetime.utcfromtimestamp(self.__endUTCSecond - self.__timezone)
368 # )
368 # )
369 print "[Reading] reloading metadata ..."
369 print("[Reading] reloading metadata ...")
370
370
371 try:
371 try:
372 self.digitalReadObj.reload(complete_update=True)
372 self.digitalReadObj.reload(complete_update=True)
@@ -380,11 +380,11 class USRPReader(ProcessingUnit):
380
380
381 if end_index > self.__endUTCSecond*self.__sample_rate:
381 if end_index > self.__endUTCSecond*self.__sample_rate:
382 self.__endUTCSecond = 1.0*end_index/self.__sample_rate
382 self.__endUTCSecond = 1.0*end_index/self.__sample_rate
383 print
383 print()
384 print "[Reading] New timerange found [%s, %s] " %(
384 print("[Reading] New timerange found [%s, %s] " %(
385 datetime.datetime.utcfromtimestamp(self.__startUTCSecond - self.__timezone),
385 datetime.datetime.utcfromtimestamp(self.__startUTCSecond - self.__timezone),
386 datetime.datetime.utcfromtimestamp(self.__endUTCSecond - self.__timezone)
386 datetime.datetime.utcfromtimestamp(self.__endUTCSecond - self.__timezone)
387 )
387 ))
388
388
389 return True
389 return True
390
390
@@ -399,7 +399,7 class USRPReader(ProcessingUnit):
399 self.__thisUnixSample += self.__samples_to_read
399 self.__thisUnixSample += self.__samples_to_read
400
400
401 if self.__thisUnixSample + 2*self.__samples_to_read > self.__endUTCSecond*self.__sample_rate:
401 if self.__thisUnixSample + 2*self.__samples_to_read > self.__endUTCSecond*self.__sample_rate:
402 print "[Reading] There are no more data into selected time-range"
402 print("[Reading] There are no more data into selected time-range")
403
403
404 self.__reload()
404 self.__reload()
405
405
@@ -418,17 +418,17 class USRPReader(ProcessingUnit):
418 self.__samples_to_read,
418 self.__samples_to_read,
419 thisChannelName)
419 thisChannelName)
420
420
421 except IOError, e:
421 except IOError as e:
422 #read next profile
422 #read next profile
423 self.__flagDiscontinuousBlock = True
423 self.__flagDiscontinuousBlock = True
424 print "[Reading] %s" %datetime.datetime.utcfromtimestamp(self.thisSecond - self.__timezone), e
424 print("[Reading] %s" %datetime.datetime.utcfromtimestamp(self.thisSecond - self.__timezone), e)
425 break
425 break
426
426
427 if result.shape[0] != self.__samples_to_read:
427 if result.shape[0] != self.__samples_to_read:
428 self.__flagDiscontinuousBlock = True
428 self.__flagDiscontinuousBlock = True
429 print "[Reading] %s: Too few samples were found, just %d/%d samples" %(datetime.datetime.utcfromtimestamp(self.thisSecond - self.__timezone),
429 print("[Reading] %s: Too few samples were found, just %d/%d samples" %(datetime.datetime.utcfromtimestamp(self.thisSecond - self.__timezone),
430 result.shape[0],
430 result.shape[0],
431 self.__samples_to_read)
431 self.__samples_to_read))
432 break
432 break
433
433
434 self.__data_buffer[indexChannel,:] = result*volt_scale
434 self.__data_buffer[indexChannel,:] = result*volt_scale
@@ -442,9 +442,9 class USRPReader(ProcessingUnit):
442 if not dataOk:
442 if not dataOk:
443 return False
443 return False
444
444
445 print "[Reading] %s: %d samples <> %f sec" %(datetime.datetime.utcfromtimestamp(self.thisSecond - self.__timezone),
445 print("[Reading] %s: %d samples <> %f sec" %(datetime.datetime.utcfromtimestamp(self.thisSecond - self.__timezone),
446 self.__samples_to_read,
446 self.__samples_to_read,
447 self.__timeInterval)
447 self.__timeInterval))
448
448
449 self.__bufferIndex = 0
449 self.__bufferIndex = 0
450
450
@@ -490,7 +490,7 class USRPReader(ProcessingUnit):
490 return False
490 return False
491
491
492 if self.__flagDiscontinuousBlock:
492 if self.__flagDiscontinuousBlock:
493 print '[Reading] discontinuous block found ... continue with the next block'
493 print('[Reading] discontinuous block found ... continue with the next block')
494 continue
494 continue
495
495
496 if not self.__online:
496 if not self.__online:
@@ -500,7 +500,7 class USRPReader(ProcessingUnit):
500 if err_counter > nTries:
500 if err_counter > nTries:
501 return False
501 return False
502
502
503 print '[Reading] waiting %d seconds to read a new block' %seconds
503 print('[Reading] waiting %d seconds to read a new block' %seconds)
504 sleep(seconds)
504 sleep(seconds)
505
505
506 self.dataOut.data = self.__data_buffer[:,self.__bufferIndex:self.__bufferIndex+self.__nSamples]
506 self.dataOut.data = self.__data_buffer[:,self.__bufferIndex:self.__bufferIndex+self.__nSamples]
@@ -532,7 +532,7 class USRPReader(ProcessingUnit):
532 '''
532 '''
533 '''
533 '''
534
534
535 print self.profileIndex
535 print(self.profileIndex)
536
536
537 def run(self, **kwargs):
537 def run(self, **kwargs):
538 '''
538 '''
@@ -5,7 +5,7 Created on Jul 15, 2014
5 '''
5 '''
6 import time
6 import time
7 import threading
7 import threading
8 import cPickle
8 import pickle
9
9
10 # try:
10 # try:
11 # from gevent import sleep
11 # from gevent import sleep
@@ -109,9 +109,9 class USRPReaderAPI(USRPReader, threading.Thread):
109 '''
109 '''
110
110
111 if not self.isConfig:
111 if not self.isConfig:
112 raise RuntimeError, 'setup() method has to be called before start()'
112 raise RuntimeError('setup() method has to be called before start()')
113
113
114 print "Running ..."
114 print("Running ...")
115
115
116 while True:
116 while True:
117
117
@@ -122,7 +122,7 class USRPReaderAPI(USRPReader, threading.Thread):
122 if not self.getData():
122 if not self.getData():
123 break
123 break
124
124
125 print ".",
125 print(".", end=' ')
126
126
127 self.__mySerial = obj2Serial(self.dataOut,
127 self.__mySerial = obj2Serial(self.dataOut,
128 keyList = self.__DATAKEYLIST,
128 keyList = self.__DATAKEYLIST,
@@ -134,6 +134,6 class USRPReaderAPI(USRPReader, threading.Thread):
134
134
135 # sleep(0.1)
135 # sleep(0.1)
136
136
137 print "Closing thread"
137 print("Closing thread")
138
138
139 return No newline at end of file
139 return
@@ -6,13 +6,13 Created on Jul 2, 2014
6
6
7 import numpy
7 import numpy
8
8
9 from jroIO_base import LOCALTIME, JRODataReader, JRODataWriter
9 from .jroIO_base import LOCALTIME, JRODataReader, JRODataWriter
10 from schainpy.model.proc.jroproc_base import ProcessingUnit, Operation
10 from schainpy.model.proc.jroproc_base import ProcessingUnit, Operation
11 from schainpy.model.data.jroheaderIO import PROCFLAG, BasicHeader, SystemHeader, RadarControllerHeader, ProcessingHeader
11 from schainpy.model.data.jroheaderIO import PROCFLAG, BasicHeader, SystemHeader, RadarControllerHeader, ProcessingHeader
12 from schainpy.model.data.jrodata import Voltage
12 from schainpy.model.data.jrodata import Voltage
13 import zmq
13 import zmq
14 import tempfile
14 import tempfile
15 from StringIO import StringIO
15 from io import StringIO
16 # from _sha import blocksize
16 # from _sha import blocksize
17
17
18
18
@@ -286,7 +286,7 class VoltageReader(JRODataReader, ProcessingUnit):
286 self.dataOut.heightList = numpy.arange(
286 self.dataOut.heightList = numpy.arange(
287 self.processingHeaderObj.nHeights) * self.processingHeaderObj.deltaHeight + self.processingHeaderObj.firstHeight
287 self.processingHeaderObj.nHeights) * self.processingHeaderObj.deltaHeight + self.processingHeaderObj.firstHeight
288
288
289 self.dataOut.channelList = range(self.systemHeaderObj.nChannels)
289 self.dataOut.channelList = list(range(self.systemHeaderObj.nChannels))
290
290
291 self.dataOut.nCohInt = self.processingHeaderObj.nCohInt
291 self.dataOut.nCohInt = self.processingHeaderObj.nCohInt
292
292
@@ -307,12 +307,12 class VoltageReader(JRODataReader, ProcessingUnit):
307 return
307 return
308
308
309 if self.nTxs < 1 and self.processingHeaderObj.profilesPerBlock % (1. / self.nTxs) != 0:
309 if self.nTxs < 1 and self.processingHeaderObj.profilesPerBlock % (1. / self.nTxs) != 0:
310 raise ValueError, "1./nTxs (=%f), should be a multiple of nProfiles (=%d)" % (
310 raise ValueError("1./nTxs (=%f), should be a multiple of nProfiles (=%d)" % (
311 1. / self.nTxs, self.processingHeaderObj.profilesPerBlock)
311 1. / self.nTxs, self.processingHeaderObj.profilesPerBlock))
312
312
313 if self.nTxs > 1 and self.processingHeaderObj.nHeights % self.nTxs != 0:
313 if self.nTxs > 1 and self.processingHeaderObj.nHeights % self.nTxs != 0:
314 raise ValueError, "nTxs (=%d), should be a multiple of nHeights (=%d)" % (
314 raise ValueError("nTxs (=%d), should be a multiple of nHeights (=%d)" % (
315 self.nTxs, self.processingHeaderObj.nHeights)
315 self.nTxs, self.processingHeaderObj.nHeights))
316
316
317 self.datablock = self.datablock.reshape(
317 self.datablock = self.datablock.reshape(
318 (self.systemHeaderObj.nChannels, self.processingHeaderObj.profilesPerBlock * self.nTxs, self.processingHeaderObj.nHeights / self.nTxs))
318 (self.systemHeaderObj.nChannels, self.processingHeaderObj.profilesPerBlock * self.nTxs, self.processingHeaderObj.nHeights / self.nTxs))
@@ -345,7 +345,7 class VoltageReader(JRODataReader, ProcessingUnit):
345 elif datatype == 5:
345 elif datatype == 5:
346 datatype_str = numpy.dtype([('real', '<f8'), ('imag', '<f8')])
346 datatype_str = numpy.dtype([('real', '<f8'), ('imag', '<f8')])
347 else:
347 else:
348 raise ValueError, 'Data type was not defined'
348 raise ValueError('Data type was not defined')
349
349
350 self.dtype = datatype_str
350 self.dtype = datatype_str
351 #self.ippSeconds = 2 * 1000 * self.radarControllerHeaderObj.ipp / self.c
351 #self.ippSeconds = 2 * 1000 * self.radarControllerHeaderObj.ipp / self.c
@@ -378,7 +378,7 class VoltageReader(JRODataReader, ProcessingUnit):
378 self.readFirstHeaderFromServer()
378 self.readFirstHeaderFromServer()
379
379
380 timestamp = self.basicHeaderObj.get_datatime()
380 timestamp = self.basicHeaderObj.get_datatime()
381 print '[Reading] - Block {} - {}'.format(self.nTotalBlocks, timestamp)
381 print('[Reading] - Block {} - {}'.format(self.nTotalBlocks, timestamp))
382 current_pointer_location = self.blockPointer
382 current_pointer_location = self.blockPointer
383 junk = numpy.fromstring(
383 junk = numpy.fromstring(
384 block[self.blockPointer:], self.dtype, self.blocksize)
384 block[self.blockPointer:], self.dtype, self.blocksize)
@@ -463,7 +463,7 class VoltageReader(JRODataReader, ProcessingUnit):
463 """
463 """
464 if self.flagNoMoreFiles:
464 if self.flagNoMoreFiles:
465 self.dataOut.flagNoData = True
465 self.dataOut.flagNoData = True
466 print 'Process finished'
466 print('Process finished')
467 return 0
467 return 0
468 self.flagDiscontinuousBlock = 0
468 self.flagDiscontinuousBlock = 0
469 self.flagIsNewBlock = 0
469 self.flagIsNewBlock = 0
@@ -98,7 +98,7 class JULIAParamReader(JRODataReader, ProcessingUnit):
98 self.format = format
98 self.format = format
99
99
100 if self.path is None:
100 if self.path is None:
101 raise ValueError, "The path is not valid"
101 raise ValueError("The path is not valid")
102
102
103 if ext is None:
103 if ext is None:
104 ext = self.ext
104 ext = self.ext
@@ -13,7 +13,7 import tarfile
13
13
14 import numpy
14 import numpy
15
15
16 from utils import folder_in_range
16 from .utils import folder_in_range
17
17
18 from schainpy.model.io.jroIO_base import JRODataReader
18 from schainpy.model.io.jroIO_base import JRODataReader
19 from schainpy.model.proc.jroproc_base import ProcessingUnit, Operation
19 from schainpy.model.proc.jroproc_base import ProcessingUnit, Operation
@@ -69,14 +69,14 class PXReader(JRODataReader, ProcessingUnit):
69 self.ele = kwargs.get('ext', '')
69 self.ele = kwargs.get('ext', '')
70
70
71 if self.path is None:
71 if self.path is None:
72 raise ValueError, 'The path is not valid'
72 raise ValueError('The path is not valid')
73
73
74 self.search_files(path, startDate, endDate, startTime, endTime, walk)
74 self.search_files(path, startDate, endDate, startTime, endTime, walk)
75 self.cursor = 0
75 self.cursor = 0
76 self.counter_records = 0
76 self.counter_records = 0
77
77
78 if not self.files:
78 if not self.files:
79 raise Warning, 'There is no files matching these date in the folder: {}. \n Check startDate and endDate'.format(path)
79 raise Warning('There is no files matching these date in the folder: {}. \n Check startDate and endDate'.format(path))
80
80
81 def search_files(self, path, startDate, endDate, startTime, endTime, walk):
81 def search_files(self, path, startDate, endDate, startTime, endTime, walk):
82 '''
82 '''
@@ -136,7 +136,7 class PXReader(JRODataReader, ProcessingUnit):
136 self.files[dt] = []
136 self.files[dt] = []
137 self.files[dt].append(fullname)
137 self.files[dt].append(fullname)
138
138
139 self.dates = self.files.keys()
139 self.dates = list(self.files.keys())
140 self.dates.sort()
140 self.dates.sort()
141
141
142 return
142 return
@@ -203,7 +203,7 class PXReader(JRODataReader, ProcessingUnit):
203 if not self.files:
203 if not self.files:
204 return 0
204 return 0
205
205
206 self.dates = self.files.keys()
206 self.dates = list(self.files.keys())
207 self.dates.sort()
207 self.dates.sort()
208 self.cursor = 0
208 self.cursor = 0
209
209
@@ -348,4 +348,3 class PXReader(JRODataReader, ProcessingUnit):
348 self.set_output()
348 self.set_output()
349
349
350 return 1
350 return 1
351
@@ -4,13 +4,13 $Author: murco $
4 $Id: Processor.py 1 2012-11-12 18:56:07Z murco $
4 $Id: Processor.py 1 2012-11-12 18:56:07Z murco $
5 '''
5 '''
6
6
7 from jroproc_voltage import *
7 from .jroproc_voltage import *
8 from jroproc_spectra import *
8 from .jroproc_spectra import *
9 from jroproc_heispectra import *
9 from .jroproc_heispectra import *
10 from jroproc_amisr import *
10 from .jroproc_amisr import *
11 from jroproc_correlation import *
11 from .jroproc_correlation import *
12 from jroproc_parameters import *
12 from .jroproc_parameters import *
13 from jroproc_spectra_lags import *
13 from .jroproc_spectra_lags import *
14 from jroproc_spectra_acf import *
14 from .jroproc_spectra_acf import *
15 from bltrproc_parameters import *
15 from .bltrproc_parameters import *
16 from pxproc_parameters import *
16 from .pxproc_parameters import *
@@ -12,7 +12,7 from time import gmtime
12
12
13 from numpy import transpose
13 from numpy import transpose
14
14
15 from jroproc_base import ProcessingUnit, Operation
15 from .jroproc_base import ProcessingUnit, Operation
16 from schainpy.model.data.jrodata import Parameters
16 from schainpy.model.data.jrodata import Parameters
17
17
18
18
@@ -95,7 +95,7 class OutliersFilter(Operation):
95 npoints - number of points for mask filter
95 npoints - number of points for mask filter
96 '''
96 '''
97
97
98 print ' Outliers Filter {} {} / threshold = {}'.format(svalue, svalue, factor)
98 print(' Outliers Filter {} {} / threshold = {}'.format(svalue, svalue, factor))
99
99
100
100
101 yaxis = self.dataOut.heightList
101 yaxis = self.dataOut.heightList
@@ -400,4 +400,3 class OutliersFilter(Operation):
400 return startDTList, data_fHeigths_List, data_fZonal_List, data_fMeridional_List, data_fVertical_List
400 return startDTList, data_fHeigths_List, data_fZonal_List, data_fMeridional_List, data_fVertical_List
401
401
402
402
403 No newline at end of file
@@ -2,7 +2,7
2 @author: Daniel Suarez
2 @author: Daniel Suarez
3 '''
3 '''
4 import numpy
4 import numpy
5 from jroproc_base import ProcessingUnit, Operation
5 from .jroproc_base import ProcessingUnit, Operation
6 from schainpy.model.data.jroamisr import AMISR
6 from schainpy.model.data.jroamisr import AMISR
7
7
8 class AMISRProc(ProcessingUnit):
8 class AMISRProc(ProcessingUnit):
@@ -24,16 +24,16 class PrintInfo(Operation):
24 def run(self, dataOut):
24 def run(self, dataOut):
25
25
26 if not self.__isPrinted:
26 if not self.__isPrinted:
27 print 'Number of Records by File: %d'%dataOut.nRecords
27 print('Number of Records by File: %d'%dataOut.nRecords)
28 print 'Number of Pulses: %d'%dataOut.nProfiles
28 print('Number of Pulses: %d'%dataOut.nProfiles)
29 print 'Number of Pulses by Frame: %d'%dataOut.npulseByFrame
29 print('Number of Pulses by Frame: %d'%dataOut.npulseByFrame)
30 print 'Number of Samples by Pulse: %d'%len(dataOut.heightList)
30 print('Number of Samples by Pulse: %d'%len(dataOut.heightList))
31 print 'Ipp Seconds: %f'%dataOut.ippSeconds
31 print('Ipp Seconds: %f'%dataOut.ippSeconds)
32 print 'Number of Beams: %d'%dataOut.nBeams
32 print('Number of Beams: %d'%dataOut.nBeams)
33 print 'BeamCodes:'
33 print('BeamCodes:')
34 beamStrList = ['Beam %d -> Code=%d, azimuth=%2.2f, zenith=%2.2f, gain=%2.2f'%(k,v[0],v[1],v[2],v[3]) for k,v in dataOut.beamCodeDict.items()]
34 beamStrList = ['Beam %d -> Code=%d, azimuth=%2.2f, zenith=%2.2f, gain=%2.2f'%(k,v[0],v[1],v[2],v[3]) for k,v in list(dataOut.beamCodeDict.items())]
35 for b in beamStrList:
35 for b in beamStrList:
36 print b
36 print(b)
37 self.__isPrinted = True
37 self.__isPrinted = True
38
38
39 return
39 return
@@ -93,7 +93,7 class BeamSelector(Operation):
93 return 1
93 return 1
94
94
95 else:
95 else:
96 raise ValueError, "BeamSelector needs beam value"
96 raise ValueError("BeamSelector needs beam value")
97
97
98 return 0
98 return 0
99
99
@@ -117,7 +117,7 class ProfileToChannels(Operation):
117 dataOut.flagNoData = True
117 dataOut.flagNoData = True
118
118
119 if not(self.__isConfig):
119 if not(self.__isConfig):
120 nchannels = len(dataOut.beamRangeDict.keys())
120 nchannels = len(list(dataOut.beamRangeDict.keys()))
121 nsamples = dataOut.nHeights
121 nsamples = dataOut.nHeights
122 self.buffer = numpy.zeros((nchannels, nsamples), dtype = 'complex128')
122 self.buffer = numpy.zeros((nchannels, nsamples), dtype = 'complex128')
123 dataOut.beam.codeList = [dataOut.beamCodeDict[x][0] for x in range(nchannels)]
123 dataOut.beam.codeList = [dataOut.beamCodeDict[x][0] for x in range(nchannels)]
@@ -136,7 +136,7 class ProfileToChannels(Operation):
136 if self.__counter_chan >= self.buffer.shape[0]:
136 if self.__counter_chan >= self.buffer.shape[0]:
137 self.__counter_chan = 0
137 self.__counter_chan = 0
138 dataOut.data = self.buffer.copy()
138 dataOut.data = self.buffer.copy()
139 dataOut.channelList = range(self.buffer.shape[0])
139 dataOut.channelList = list(range(self.buffer.shape[0]))
140 self.__isConfig = False
140 self.__isConfig = False
141 dataOut.flagNoData = False
141 dataOut.flagNoData = False
142 pass
142 pass
@@ -104,7 +104,7 class ProcessingUnit(object):
104
104
105 def getOperationObj(self, objId):
105 def getOperationObj(self, objId):
106
106
107 if objId not in self.operations2RunDict.keys():
107 if objId not in list(self.operations2RunDict.keys()):
108 return None
108 return None
109
109
110 return self.operations2RunDict[objId]
110 return self.operations2RunDict[objId]
@@ -248,22 +248,22 class ProcessingUnit(object):
248 if opType == 'self':
248 if opType == 'self':
249
249
250 if not opName:
250 if not opName:
251 raise ValueError, "opName parameter should be defined"
251 raise ValueError("opName parameter should be defined")
252
252
253 sts = self.callMethod(opName, opId)
253 sts = self.callMethod(opName, opId)
254
254
255 elif opType == 'other' or opType == 'external' or opType == 'plotter':
255 elif opType == 'other' or opType == 'external' or opType == 'plotter':
256
256
257 if not opId:
257 if not opId:
258 raise ValueError, "opId parameter should be defined"
258 raise ValueError("opId parameter should be defined")
259
259
260 if opId not in self.operations2RunDict.keys():
260 if opId not in list(self.operations2RunDict.keys()):
261 raise ValueError, "Any operation with id=%s has been added" %str(opId)
261 raise ValueError("Any operation with id=%s has been added" %str(opId))
262
262
263 sts = self.callObject(opId)
263 sts = self.callObject(opId)
264
264
265 else:
265 else:
266 raise ValueError, "opType should be 'self', 'external' or 'plotter'; and not '%s'" %opType
266 raise ValueError("opType should be 'self', 'external' or 'plotter'; and not '%s'" %opType)
267
267
268 return sts
268 return sts
269
269
@@ -1,6 +1,6
1 import numpy
1 import numpy
2
2
3 from jroproc_base import ProcessingUnit, Operation
3 from .jroproc_base import ProcessingUnit, Operation
4 from schainpy.model.data.jrodata import Correlation, hildebrand_sekhon
4 from schainpy.model.data.jrodata import Correlation, hildebrand_sekhon
5
5
6 class CorrelationProc(ProcessingUnit):
6 class CorrelationProc(ProcessingUnit):
@@ -1,6 +1,6
1 import numpy
1 import numpy
2
2
3 from jroproc_base import ProcessingUnit, Operation
3 from .jroproc_base import ProcessingUnit, Operation
4 from schainpy.model.data.jrodata import SpectraHeis
4 from schainpy.model.data.jrodata import SpectraHeis
5
5
6 class SpectraHeisProc(ProcessingUnit):
6 class SpectraHeisProc(ProcessingUnit):
@@ -99,7 +99,7 class SpectraHeisProc(ProcessingUnit):
99
99
100 return
100 return
101
101
102 raise ValueError, "The type object %s is not valid"%(self.dataIn.type)
102 raise ValueError("The type object %s is not valid"%(self.dataIn.type))
103
103
104
104
105 def selectChannels(self, channelList):
105 def selectChannels(self, channelList):
@@ -133,8 +133,8 class SpectraHeisProc(ProcessingUnit):
133
133
134 for channelIndex in channelIndexList:
134 for channelIndex in channelIndexList:
135 if channelIndex not in self.dataOut.channelIndexList:
135 if channelIndex not in self.dataOut.channelIndexList:
136 print channelIndexList
136 print(channelIndexList)
137 raise ValueError, "The value %d in channelIndexList is not valid" %channelIndex
137 raise ValueError("The value %d in channelIndexList is not valid" %channelIndex)
138
138
139 # nChannels = len(channelIndexList)
139 # nChannels = len(channelIndexList)
140
140
@@ -187,7 +187,7 class IncohInt4SpectraHeis(Operation):
187
187
188
188
189 if n == None and timeInterval == None:
189 if n == None and timeInterval == None:
190 raise ValueError, "n or timeInterval should be specified ..."
190 raise ValueError("n or timeInterval should be specified ...")
191
191
192 if n != None:
192 if n != None:
193 self.n = n
193 self.n = n
@@ -10,8 +10,6 import importlib
10 import itertools
10 import itertools
11 from multiprocessing import Pool, TimeoutError
11 from multiprocessing import Pool, TimeoutError
12 from multiprocessing.pool import ThreadPool
12 from multiprocessing.pool import ThreadPool
13 import copy_reg
14 import cPickle
15 import types
13 import types
16 from functools import partial
14 from functools import partial
17 import time
15 import time
@@ -19,7 +17,7 import time
19
17
20
18
21 from scipy.optimize import fmin_l_bfgs_b #optimize with bounds on state papameters
19 from scipy.optimize import fmin_l_bfgs_b #optimize with bounds on state papameters
22 from jroproc_base import ProcessingUnit, Operation
20 from .jroproc_base import ProcessingUnit, Operation
23 from schainpy.model.data.jrodata import Parameters, hildebrand_sekhon
21 from schainpy.model.data.jrodata import Parameters, hildebrand_sekhon
24 from scipy import asarray as ar,exp
22 from scipy import asarray as ar,exp
25 from scipy.optimize import curve_fit
23 from scipy.optimize import curve_fit
@@ -36,9 +34,9 SPEED_OF_LIGHT = 299792458
36 '''solving pickling issue'''
34 '''solving pickling issue'''
37
35
38 def _pickle_method(method):
36 def _pickle_method(method):
39 func_name = method.im_func.__name__
37 func_name = method.__func__.__name__
40 obj = method.im_self
38 obj = method.__self__
41 cls = method.im_class
39 cls = method.__self__.__class__
42 return _unpickle_method, (func_name, obj, cls)
40 return _unpickle_method, (func_name, obj, cls)
43
41
44 def _unpickle_method(func_name, obj, cls):
42 def _unpickle_method(func_name, obj, cls):
@@ -213,7 +211,7 class GaussianFit(Operation):
213 self.spc = dataOut.data_pre[0].copy()
211 self.spc = dataOut.data_pre[0].copy()
214
212
215
213
216 print 'SelfSpectra Shape', numpy.asarray(self.spc).shape
214 print('SelfSpectra Shape', numpy.asarray(self.spc).shape)
217
215
218
216
219 #plt.figure(50)
217 #plt.figure(50)
@@ -251,7 +249,7 class GaussianFit(Operation):
251 pool = Pool(processes=self.Num_Chn)
249 pool = Pool(processes=self.Num_Chn)
252 args = [(Vrange, Ch, pnoise, noise_, num_intg, SNRlimit) for Ch in range(self.Num_Chn)]
250 args = [(Vrange, Ch, pnoise, noise_, num_intg, SNRlimit) for Ch in range(self.Num_Chn)]
253 objs = [self for __ in range(self.Num_Chn)]
251 objs = [self for __ in range(self.Num_Chn)]
254 attrs = zip(objs, args)
252 attrs = list(zip(objs, args))
255 gauSPC = pool.map(target, attrs)
253 gauSPC = pool.map(target, attrs)
256 dataOut.GauSPC = numpy.asarray(gauSPC)
254 dataOut.GauSPC = numpy.asarray(gauSPC)
257 # ret = []
255 # ret = []
@@ -506,8 +504,8 class GaussianFit(Operation):
506 # print 'noise', noise
504 # print 'noise', noise
507 # print 's_noise', wnoise
505 # print 's_noise', wnoise
508
506
509 print '========================================================'
507 print('========================================================')
510 print 'total_time: ', time.time()-start_time
508 print('total_time: ', time.time()-start_time)
511
509
512 # re-normalizing spc and noise
510 # re-normalizing spc and noise
513 # This part differs from gg1
511 # This part differs from gg1
@@ -959,12 +957,12 class PrecipitationProc(Operation):
959 dataOut.data_output = Ze
957 dataOut.data_output = Ze
960 dataOut.data_param = numpy.ones([2,self.Num_Hei])
958 dataOut.data_param = numpy.ones([2,self.Num_Hei])
961 dataOut.channelList = [0,1]
959 dataOut.channelList = [0,1]
962 print 'channelList', dataOut.channelList
960 print('channelList', dataOut.channelList)
963 dataOut.data_param[0]=dBZe
961 dataOut.data_param[0]=dBZe
964 dataOut.data_param[1]=dBRR
962 dataOut.data_param[1]=dBRR
965 print 'RR SHAPE', dBRR.shape
963 print('RR SHAPE', dBRR.shape)
966 print 'Ze SHAPE', dBZe.shape
964 print('Ze SHAPE', dBZe.shape)
967 print 'dataOut.data_param SHAPE', dataOut.data_param.shape
965 print('dataOut.data_param SHAPE', dataOut.data_param.shape)
968
966
969
967
970 def dBZeMODE2(self, dataOut): # Processing for MIRA35C
968 def dBZeMODE2(self, dataOut): # Processing for MIRA35C
@@ -980,7 +978,7 class PrecipitationProc(Operation):
980 data_output = numpy.ones([self.Num_Chn , self.Num_Hei])*numpy.NaN
978 data_output = numpy.ones([self.Num_Chn , self.Num_Hei])*numpy.NaN
981
979
982 ETA = numpy.sum(SNR,1)
980 ETA = numpy.sum(SNR,1)
983 print 'ETA' , ETA
981 print('ETA' , ETA)
984 ETA = numpy.where(ETA is not 0. , ETA, numpy.NaN)
982 ETA = numpy.where(ETA is not 0. , ETA, numpy.NaN)
985
983
986 Ze = numpy.ones([self.Num_Chn, self.Num_Hei] )
984 Ze = numpy.ones([self.Num_Chn, self.Num_Hei] )
@@ -1068,7 +1066,7 class FullSpectralAnalysis(Operation):
1068
1066
1069 data = dataOut.data_pre
1067 data = dataOut.data_pre
1070 noise = dataOut.noise
1068 noise = dataOut.noise
1071 print 'noise',noise
1069 print('noise',noise)
1072 #SNRdB = 10*numpy.log10(dataOut.data_SNR)
1070 #SNRdB = 10*numpy.log10(dataOut.data_SNR)
1073
1071
1074 FirstMoment = numpy.average(dataOut.data_param[:,1,:],0)
1072 FirstMoment = numpy.average(dataOut.data_param[:,1,:],0)
@@ -1095,14 +1093,14 class FullSpectralAnalysis(Operation):
1095 velocityX=numpy.append(velocityX, Vzon)#Vmag
1093 velocityX=numpy.append(velocityX, Vzon)#Vmag
1096
1094
1097 else:
1095 else:
1098 print 'Vzon',Vzon
1096 print('Vzon',Vzon)
1099 velocityX=numpy.append(velocityX, numpy.NaN)
1097 velocityX=numpy.append(velocityX, numpy.NaN)
1100
1098
1101 if abs(Vmer)<100. and abs(Vmer) > 0.:
1099 if abs(Vmer)<100. and abs(Vmer) > 0.:
1102 velocityY=numpy.append(velocityY, Vmer)#Vang
1100 velocityY=numpy.append(velocityY, Vmer)#Vang
1103
1101
1104 else:
1102 else:
1105 print 'Vmer',Vmer
1103 print('Vmer',Vmer)
1106 velocityY=numpy.append(velocityY, numpy.NaN)
1104 velocityY=numpy.append(velocityY, numpy.NaN)
1107
1105
1108 if dbSNR[Height] > SNRlimit:
1106 if dbSNR[Height] > SNRlimit:
@@ -1120,18 +1118,18 class FullSpectralAnalysis(Operation):
1120 data_output[1]=numpy.array(velocityY)
1118 data_output[1]=numpy.array(velocityY)
1121 data_output[2]=-velocityV#FirstMoment
1119 data_output[2]=-velocityV#FirstMoment
1122
1120
1123 print ' '
1121 print(' ')
1124 #print 'FirstMoment'
1122 #print 'FirstMoment'
1125 #print FirstMoment
1123 #print FirstMoment
1126 print 'velocityX',data_output[0]
1124 print('velocityX',data_output[0])
1127 print ' '
1125 print(' ')
1128 print 'velocityY',data_output[1]
1126 print('velocityY',data_output[1])
1129 #print numpy.array(velocityY)
1127 #print numpy.array(velocityY)
1130 print ' '
1128 print(' ')
1131 #print 'SNR'
1129 #print 'SNR'
1132 #print 10*numpy.log10(dataOut.data_SNR)
1130 #print 10*numpy.log10(dataOut.data_SNR)
1133 #print numpy.shape(10*numpy.log10(dataOut.data_SNR))
1131 #print numpy.shape(10*numpy.log10(dataOut.data_SNR))
1134 print ' '
1132 print(' ')
1135
1133
1136
1134
1137 dataOut.data_output=data_output
1135 dataOut.data_output=data_output
@@ -1184,20 +1182,20 class FullSpectralAnalysis(Operation):
1184
1182
1185 SmoothSPC=self.moving_average(FactNorm,N=3)
1183 SmoothSPC=self.moving_average(FactNorm,N=3)
1186
1184
1187 xSamples = ar(range(len(SmoothSPC)))
1185 xSamples = ar(list(range(len(SmoothSPC))))
1188 ySamples[i] = SmoothSPC
1186 ySamples[i] = SmoothSPC
1189
1187
1190 #dbSNR=10*numpy.log10(dataSNR)
1188 #dbSNR=10*numpy.log10(dataSNR)
1191 print ' '
1189 print(' ')
1192 print ' '
1190 print(' ')
1193 print ' '
1191 print(' ')
1194
1192
1195 #print 'dataSNR', dbSNR.shape, dbSNR[0,40:120]
1193 #print 'dataSNR', dbSNR.shape, dbSNR[0,40:120]
1196 print 'SmoothSPC', SmoothSPC.shape, SmoothSPC[0:20]
1194 print('SmoothSPC', SmoothSPC.shape, SmoothSPC[0:20])
1197 print 'noise',noise
1195 print('noise',noise)
1198 print 'zline',zline.shape, zline[0:20]
1196 print('zline',zline.shape, zline[0:20])
1199 print 'FactNorm',FactNorm.shape, FactNorm[0:20]
1197 print('FactNorm',FactNorm.shape, FactNorm[0:20])
1200 print 'FactNorm suma', numpy.sum(FactNorm)
1198 print('FactNorm suma', numpy.sum(FactNorm))
1201
1199
1202 for i in range(spc.shape[0]):
1200 for i in range(spc.shape[0]):
1203
1201
@@ -1218,12 +1216,12 class FullSpectralAnalysis(Operation):
1218
1216
1219 phase[i] = self.moving_average( numpy.arctan2(CSPCSamples[i].imag, CSPCSamples[i].real),N=1)#*180/numpy.pi
1217 phase[i] = self.moving_average( numpy.arctan2(CSPCSamples[i].imag, CSPCSamples[i].real),N=1)#*180/numpy.pi
1220
1218
1221 print 'cspcLine', cspcLine.shape, cspcLine[0:20]
1219 print('cspcLine', cspcLine.shape, cspcLine[0:20])
1222 print 'CSPCFactor', CSPCFactor#, CSPCFactor[0:20]
1220 print('CSPCFactor', CSPCFactor)#, CSPCFactor[0:20]
1223 print numpy.sum(ySamples[chan_index0]), numpy.sum(ySamples[chan_index1]), -noise[i]
1221 print(numpy.sum(ySamples[chan_index0]), numpy.sum(ySamples[chan_index1]), -noise[i])
1224 print 'CSPCNorm', CSPCNorm.shape, CSPCNorm[0:20]
1222 print('CSPCNorm', CSPCNorm.shape, CSPCNorm[0:20])
1225 print 'CSPCNorm suma', numpy.sum(CSPCNorm)
1223 print('CSPCNorm suma', numpy.sum(CSPCNorm))
1226 print 'CSPCSamples', CSPCSamples.shape, CSPCSamples[0,0:20]
1224 print('CSPCSamples', CSPCSamples.shape, CSPCSamples[0,0:20])
1227
1225
1228 '''****** Getting fij width ******'''
1226 '''****** Getting fij width ******'''
1229
1227
@@ -1237,14 +1235,14 class FullSpectralAnalysis(Operation):
1237 meanGauss=sum(xSamples*yMean) / len(xSamples)
1235 meanGauss=sum(xSamples*yMean) / len(xSamples)
1238 sigma=sum(yMean*(xSamples-meanGauss)**2) / len(xSamples)
1236 sigma=sum(yMean*(xSamples-meanGauss)**2) / len(xSamples)
1239
1237
1240 print '****************************'
1238 print('****************************')
1241 print 'len(xSamples): ',len(xSamples)
1239 print('len(xSamples): ',len(xSamples))
1242 print 'yMean: ', yMean.shape, yMean[0:20]
1240 print('yMean: ', yMean.shape, yMean[0:20])
1243 print 'ySamples', ySamples.shape, ySamples[0,0:20]
1241 print('ySamples', ySamples.shape, ySamples[0,0:20])
1244 print 'xSamples: ',xSamples.shape, xSamples[0:20]
1242 print('xSamples: ',xSamples.shape, xSamples[0:20])
1245
1243
1246 print 'meanGauss',meanGauss
1244 print('meanGauss',meanGauss)
1247 print 'sigma',sigma
1245 print('sigma',sigma)
1248
1246
1249 #if (abs(meanGauss/sigma**2) > 0.0001) : #0.000000001):
1247 #if (abs(meanGauss/sigma**2) > 0.0001) : #0.000000001):
1250 if dbSNR > SNRlimit :
1248 if dbSNR > SNRlimit :
@@ -1256,7 +1254,7 class FullSpectralAnalysis(Operation):
1256
1254
1257 else:
1255 else:
1258 FitGauss=numpy.ones(len(xSamples))*numpy.mean(yMean)
1256 FitGauss=numpy.ones(len(xSamples))*numpy.mean(yMean)
1259 print 'Verificador: Dentro', Height
1257 print('Verificador: Dentro', Height)
1260 except :#RuntimeError:
1258 except :#RuntimeError:
1261 FitGauss=numpy.ones(len(xSamples))*numpy.mean(yMean)
1259 FitGauss=numpy.ones(len(xSamples))*numpy.mean(yMean)
1262
1260
@@ -1293,10 +1291,10 class FullSpectralAnalysis(Operation):
1293 else:
1291 else:
1294 Range = numpy.array([0,0])
1292 Range = numpy.array([0,0])
1295
1293
1296 print ' '
1294 print(' ')
1297 print 'GCpos',GCpos, ( len(xFrec)- len(xFrec)*0.1)
1295 print('GCpos',GCpos, ( len(xFrec)- len(xFrec)*0.1))
1298 print 'Rangpos',Rangpos
1296 print('Rangpos',Rangpos)
1299 print 'RANGE: ', Range
1297 print('RANGE: ', Range)
1300 FrecRange=xFrec[Range[0]:Range[1]]
1298 FrecRange=xFrec[Range[0]:Range[1]]
1301
1299
1302 '''****** Getting SCPC Slope ******'''
1300 '''****** Getting SCPC Slope ******'''
@@ -1306,9 +1304,9 class FullSpectralAnalysis(Operation):
1306 if len(FrecRange)>5 and len(FrecRange)<spc.shape[1]*0.5:
1304 if len(FrecRange)>5 and len(FrecRange)<spc.shape[1]*0.5:
1307 PhaseRange=self.moving_average(phase[i,Range[0]:Range[1]],N=3)
1305 PhaseRange=self.moving_average(phase[i,Range[0]:Range[1]],N=3)
1308
1306
1309 print 'FrecRange', len(FrecRange) , FrecRange
1307 print('FrecRange', len(FrecRange) , FrecRange)
1310 print 'PhaseRange', len(PhaseRange), PhaseRange
1308 print('PhaseRange', len(PhaseRange), PhaseRange)
1311 print ' '
1309 print(' ')
1312 if len(FrecRange) == len(PhaseRange):
1310 if len(FrecRange) == len(PhaseRange):
1313 slope, intercept, r_value, p_value, std_err = stats.linregress(FrecRange,PhaseRange)
1311 slope, intercept, r_value, p_value, std_err = stats.linregress(FrecRange,PhaseRange)
1314 PhaseSlope[i]=slope
1312 PhaseSlope[i]=slope
@@ -1354,7 +1352,7 class FullSpectralAnalysis(Operation):
1354 Vmag=numpy.sqrt(Vzon**2+Vmer**2)
1352 Vmag=numpy.sqrt(Vzon**2+Vmer**2)
1355 Vang=numpy.arctan2(Vmer,Vzon)
1353 Vang=numpy.arctan2(Vmer,Vzon)
1356 Vver=xFrec[Vpos]
1354 Vver=xFrec[Vpos]
1357 print 'vzon y vmer', Vzon, Vmer
1355 print('vzon y vmer', Vzon, Vmer)
1358 return Vzon, Vmer, Vver, GaussCenter
1356 return Vzon, Vmer, Vver, GaussCenter
1359
1357
1360 class SpectralMoments(Operation):
1358 class SpectralMoments(Operation):
@@ -1441,11 +1439,11 class SpectralMoments(Operation):
1441 else: spec2 = scipy.ndimage.filters.uniform_filter1d(spec,size=smooth)
1439 else: spec2 = scipy.ndimage.filters.uniform_filter1d(spec,size=smooth)
1442
1440
1443 # Calculo de Momentos
1441 # Calculo de Momentos
1444 bb = spec2[range(m,spec2.size)]
1442 bb = spec2[list(range(m,spec2.size))]
1445 bb = (bb<n0).nonzero()
1443 bb = (bb<n0).nonzero()
1446 bb = bb[0]
1444 bb = bb[0]
1447
1445
1448 ss = spec2[range(0,m + 1)]
1446 ss = spec2[list(range(0,m + 1))]
1449 ss = (ss<n0).nonzero()
1447 ss = (ss<n0).nonzero()
1450 ss = ss[0]
1448 ss = ss[0]
1451
1449
@@ -1461,7 +1459,7 class SpectralMoments(Operation):
1461
1459
1462 if (ss1 > m): ss1 = m
1460 if (ss1 > m): ss1 = m
1463
1461
1464 valid = numpy.asarray(range(int(m + bb0 - ss1 + 1))) + ss1
1462 valid = numpy.asarray(list(range(int(m + bb0 - ss1 + 1)))) + ss1
1465 power = ((spec2[valid] - n0)*fwindow[valid]).sum()
1463 power = ((spec2[valid] - n0)*fwindow[valid]).sum()
1466 fd = ((spec2[valid]- n0)*freq[valid]*fwindow[valid]).sum()/power
1464 fd = ((spec2[valid]- n0)*freq[valid]*fwindow[valid]).sum()/power
1467 w = math.sqrt(((spec2[valid] - n0)*fwindow[valid]*(freq[valid]- fd)**2).sum()/power)
1465 w = math.sqrt(((spec2[valid] - n0)*fwindow[valid]*(freq[valid]- fd)**2).sum()/power)
@@ -1809,7 +1807,7 class WindProfiler(Operation):
1809 maxid = listPhi.index(max(listPhi))
1807 maxid = listPhi.index(max(listPhi))
1810 minid = listPhi.index(min(listPhi))
1808 minid = listPhi.index(min(listPhi))
1811
1809
1812 rango = range(len(phi))
1810 rango = list(range(len(phi)))
1813 # rango = numpy.delete(rango,maxid)
1811 # rango = numpy.delete(rango,maxid)
1814
1812
1815 heiRang1 = heiRang*math.cos(phi[maxid])
1813 heiRang1 = heiRang*math.cos(phi[maxid])
@@ -1867,7 +1865,7 class WindProfiler(Operation):
1867 heiRang = kwargs['heightList']
1865 heiRang = kwargs['heightList']
1868 SNR0 = kwargs['SNR']
1866 SNR0 = kwargs['SNR']
1869
1867
1870 if kwargs.has_key('dirCosx') and kwargs.has_key('dirCosy'):
1868 if 'dirCosx' in kwargs and 'dirCosy' in kwargs:
1871 theta_x = numpy.array(kwargs['dirCosx'])
1869 theta_x = numpy.array(kwargs['dirCosx'])
1872 theta_y = numpy.array(kwargs['dirCosy'])
1870 theta_y = numpy.array(kwargs['dirCosy'])
1873 else:
1871 else:
@@ -1875,13 +1873,13 class WindProfiler(Operation):
1875 azim = numpy.array(kwargs['azimuth'])
1873 azim = numpy.array(kwargs['azimuth'])
1876 theta_x, theta_y = self.__calculateCosDir(elev, azim)
1874 theta_x, theta_y = self.__calculateCosDir(elev, azim)
1877 azimuth = kwargs['correctAzimuth']
1875 azimuth = kwargs['correctAzimuth']
1878 if kwargs.has_key('horizontalOnly'):
1876 if 'horizontalOnly' in kwargs:
1879 horizontalOnly = kwargs['horizontalOnly']
1877 horizontalOnly = kwargs['horizontalOnly']
1880 else: horizontalOnly = False
1878 else: horizontalOnly = False
1881 if kwargs.has_key('correctFactor'):
1879 if 'correctFactor' in kwargs:
1882 correctFactor = kwargs['correctFactor']
1880 correctFactor = kwargs['correctFactor']
1883 else: correctFactor = 1
1881 else: correctFactor = 1
1884 if kwargs.has_key('channelList'):
1882 if 'channelList' in kwargs:
1885 channelList = kwargs['channelList']
1883 channelList = kwargs['channelList']
1886 if len(channelList) == 2:
1884 if len(channelList) == 2:
1887 horizontalOnly = True
1885 horizontalOnly = True
@@ -2002,7 +2000,7 class WindProfiler(Operation):
2002 position_y = kwargs['positionY']
2000 position_y = kwargs['positionY']
2003 azimuth = kwargs['azimuth']
2001 azimuth = kwargs['azimuth']
2004
2002
2005 if kwargs.has_key('correctFactor'):
2003 if 'correctFactor' in kwargs:
2006 correctFactor = kwargs['correctFactor']
2004 correctFactor = kwargs['correctFactor']
2007 else:
2005 else:
2008 correctFactor = 1
2006 correctFactor = 1
@@ -2355,20 +2353,20 class WindProfiler(Operation):
2355 dataOut.flagNoData = True
2353 dataOut.flagNoData = True
2356 self.__dataReady = False
2354 self.__dataReady = False
2357
2355
2358 if kwargs.has_key('nHours'):
2356 if 'nHours' in kwargs:
2359 nHours = kwargs['nHours']
2357 nHours = kwargs['nHours']
2360 else:
2358 else:
2361 nHours = 1
2359 nHours = 1
2362
2360
2363 if kwargs.has_key('meteorsPerBin'):
2361 if 'meteorsPerBin' in kwargs:
2364 meteorThresh = kwargs['meteorsPerBin']
2362 meteorThresh = kwargs['meteorsPerBin']
2365 else:
2363 else:
2366 meteorThresh = 6
2364 meteorThresh = 6
2367
2365
2368 if kwargs.has_key('hmin'):
2366 if 'hmin' in kwargs:
2369 hmin = kwargs['hmin']
2367 hmin = kwargs['hmin']
2370 else: hmin = 70
2368 else: hmin = 70
2371 if kwargs.has_key('hmax'):
2369 if 'hmax' in kwargs:
2372 hmax = kwargs['hmax']
2370 hmax = kwargs['hmax']
2373 else: hmax = 110
2371 else: hmax = 110
2374
2372
@@ -2404,22 +2402,22 class WindProfiler(Operation):
2404 dataOut.flagNoData = True
2402 dataOut.flagNoData = True
2405 self.__dataReady = False
2403 self.__dataReady = False
2406
2404
2407 if kwargs.has_key('nMins'):
2405 if 'nMins' in kwargs:
2408 nMins = kwargs['nMins']
2406 nMins = kwargs['nMins']
2409 else: nMins = 20
2407 else: nMins = 20
2410 if kwargs.has_key('rx_location'):
2408 if 'rx_location' in kwargs:
2411 rx_location = kwargs['rx_location']
2409 rx_location = kwargs['rx_location']
2412 else: rx_location = [(0,1),(1,1),(1,0)]
2410 else: rx_location = [(0,1),(1,1),(1,0)]
2413 if kwargs.has_key('azimuth'):
2411 if 'azimuth' in kwargs:
2414 azimuth = kwargs['azimuth']
2412 azimuth = kwargs['azimuth']
2415 else: azimuth = 51.06
2413 else: azimuth = 51.06
2416 if kwargs.has_key('dfactor'):
2414 if 'dfactor' in kwargs:
2417 dfactor = kwargs['dfactor']
2415 dfactor = kwargs['dfactor']
2418 if kwargs.has_key('mode'):
2416 if 'mode' in kwargs:
2419 mode = kwargs['mode']
2417 mode = kwargs['mode']
2420 if kwargs.has_key('theta_x'):
2418 if 'theta_x' in kwargs:
2421 theta_x = kwargs['theta_x']
2419 theta_x = kwargs['theta_x']
2422 if kwargs.has_key('theta_y'):
2420 if 'theta_y' in kwargs:
2423 theta_y = kwargs['theta_y']
2421 theta_y = kwargs['theta_y']
2424 else: mode = 'SA'
2422 else: mode = 'SA'
2425
2423
@@ -2480,7 +2478,7 class EWDriftsEstimation(Operation):
2480 maxid = listPhi.index(max(listPhi))
2478 maxid = listPhi.index(max(listPhi))
2481 minid = listPhi.index(min(listPhi))
2479 minid = listPhi.index(min(listPhi))
2482
2480
2483 rango = range(len(phi))
2481 rango = list(range(len(phi)))
2484 # rango = numpy.delete(rango,maxid)
2482 # rango = numpy.delete(rango,maxid)
2485
2483
2486 heiRang1 = heiRang*math.cos(phi[maxid])
2484 heiRang1 = heiRang*math.cos(phi[maxid])
@@ -3857,7 +3855,7 class SMOperations():
3857
3855
3858 def getPhasePairs(self, channelPositions):
3856 def getPhasePairs(self, channelPositions):
3859 chanPos = numpy.array(channelPositions)
3857 chanPos = numpy.array(channelPositions)
3860 listOper = list(itertools.combinations(range(5),2))
3858 listOper = list(itertools.combinations(list(range(5)),2))
3861
3859
3862 distances = numpy.zeros(4)
3860 distances = numpy.zeros(4)
3863 axisX = []
3861 axisX = []
@@ -2,10 +2,10 import itertools
2
2
3 import numpy
3 import numpy
4
4
5 from jroproc_base import ProcessingUnit, Operation
5 from .jroproc_base import ProcessingUnit, Operation
6 from schainpy.model.data.jrodata import Spectra
6 from schainpy.model.data.jrodata import Spectra
7 from schainpy.model.data.jrodata import hildebrand_sekhon
7 from schainpy.model.data.jrodata import hildebrand_sekhon
8
8 from schainpy.utils import log #yong
9
9
10 class SpectraProc(ProcessingUnit):
10 class SpectraProc(ProcessingUnit):
11
11
@@ -99,11 +99,11 class SpectraProc(ProcessingUnit):
99 (self.dataOut.nPairs, self.dataOut.nFFTPoints, self.dataOut.nHeights), dtype='complex')
99 (self.dataOut.nPairs, self.dataOut.nFFTPoints, self.dataOut.nHeights), dtype='complex')
100 for pair in self.dataOut.pairsList:
100 for pair in self.dataOut.pairsList:
101 if pair[0] not in self.dataOut.channelList:
101 if pair[0] not in self.dataOut.channelList:
102 raise ValueError, "Error getting CrossSpectra: pair 0 of %s is not in channelList = %s" % (
102 raise ValueError("Error getting CrossSpectra: pair 0 of %s is not in channelList = %s" % (
103 str(pair), str(self.dataOut.channelList))
103 str(pair), str(self.dataOut.channelList)))
104 if pair[1] not in self.dataOut.channelList:
104 if pair[1] not in self.dataOut.channelList:
105 raise ValueError, "Error getting CrossSpectra: pair 1 of %s is not in channelList = %s" % (
105 raise ValueError("Error getting CrossSpectra: pair 1 of %s is not in channelList = %s" % (
106 str(pair), str(self.dataOut.channelList))
106 str(pair), str(self.dataOut.channelList)))
107
107
108 cspc[pairIndex, :, :] = fft_volt[pair[0], :, :] * \
108 cspc[pairIndex, :, :] = fft_volt[pair[0], :, :] * \
109 numpy.conjugate(fft_volt[pair[1], :, :])
109 numpy.conjugate(fft_volt[pair[1], :, :])
@@ -140,7 +140,7 class SpectraProc(ProcessingUnit):
140 if self.dataIn.type == "Voltage":
140 if self.dataIn.type == "Voltage":
141
141
142 if nFFTPoints == None:
142 if nFFTPoints == None:
143 raise ValueError, "This SpectraProc.run() need nFFTPoints input variable"
143 raise ValueError("This SpectraProc.run() need nFFTPoints input variable")
144
144
145 if nProfiles == None:
145 if nProfiles == None:
146 nProfiles = nFFTPoints
146 nProfiles = nFFTPoints
@@ -180,8 +180,8 class SpectraProc(ProcessingUnit):
180 self.id_min += nVoltProfiles
180 self.id_min += nVoltProfiles
181 self.id_max += nVoltProfiles
181 self.id_max += nVoltProfiles
182 else:
182 else:
183 raise ValueError, "The type object %s has %d profiles, it should just has %d profiles" % (
183 raise ValueError("The type object %s has %d profiles, it should just has %d profiles" % (
184 self.dataIn.type, self.dataIn.data.shape[1], nProfiles)
184 self.dataIn.type, self.dataIn.data.shape[1], nProfiles))
185 self.dataOut.flagNoData = True
185 self.dataOut.flagNoData = True
186 return 0
186 return 0
187 else:
187 else:
@@ -201,8 +201,8 class SpectraProc(ProcessingUnit):
201
201
202 return True
202 return True
203
203
204 raise ValueError, "The type of input object '%s' is not valid" % (
204 raise ValueError("The type of input object '%s' is not valid" % (
205 self.dataIn.type)
205 self.dataIn.type))
206
206
207 def __selectPairs(self, pairsList):
207 def __selectPairs(self, pairsList):
208
208
@@ -256,8 +256,8 class SpectraProc(ProcessingUnit):
256
256
257 for channel in channelList:
257 for channel in channelList:
258 if channel not in self.dataOut.channelList:
258 if channel not in self.dataOut.channelList:
259 raise ValueError, "Error selecting channels, Channel %d is not valid.\nAvailable channels = %s" % (
259 raise ValueError("Error selecting channels, Channel %d is not valid.\nAvailable channels = %s" % (
260 channel, str(self.dataOut.channelList))
260 channel, str(self.dataOut.channelList)))
261
261
262 index = self.dataOut.channelList.index(channel)
262 index = self.dataOut.channelList.index(channel)
263 channelIndexList.append(index)
263 channelIndexList.append(index)
@@ -282,8 +282,8 class SpectraProc(ProcessingUnit):
282
282
283 for channelIndex in channelIndexList:
283 for channelIndex in channelIndexList:
284 if channelIndex not in self.dataOut.channelIndexList:
284 if channelIndex not in self.dataOut.channelIndexList:
285 raise ValueError, "Error selecting channels: The value %d in channelIndexList is not valid.\nAvailable channel indexes = " % (
285 raise ValueError("Error selecting channels: The value %d in channelIndexList is not valid.\nAvailable channel indexes = " % (
286 channelIndex, self.dataOut.channelIndexList)
286 channelIndex, self.dataOut.channelIndexList))
287
287
288 # nChannels = len(channelIndexList)
288 # nChannels = len(channelIndexList)
289
289
@@ -318,8 +318,8 class SpectraProc(ProcessingUnit):
318 """
318 """
319
319
320 if (minHei > maxHei):
320 if (minHei > maxHei):
321 raise ValueError, "Error selecting heights: Height range (%d,%d) is not valid" % (
321 raise ValueError("Error selecting heights: Height range (%d,%d) is not valid" % (
322 minHei, maxHei)
322 minHei, maxHei))
323
323
324 if (minHei < self.dataOut.heightList[0]):
324 if (minHei < self.dataOut.heightList[0]):
325 minHei = self.dataOut.heightList[0]
325 minHei = self.dataOut.heightList[0]
@@ -410,8 +410,8 class SpectraProc(ProcessingUnit):
410 """
410 """
411
411
412 if (minIndex < 0) or (minIndex > maxIndex):
412 if (minIndex < 0) or (minIndex > maxIndex):
413 raise ValueError, "Error selecting heights: Index range (%d,%d) is not valid" % (
413 raise ValueError("Error selecting heights: Index range (%d,%d) is not valid" % (
414 minIndex, maxIndex)
414 minIndex, maxIndex))
415
415
416 if (maxIndex >= self.dataOut.nHeights):
416 if (maxIndex >= self.dataOut.nHeights):
417 maxIndex = self.dataOut.nHeights - 1
417 maxIndex = self.dataOut.nHeights - 1
@@ -448,11 +448,12 class SpectraProc(ProcessingUnit):
448 else:
448 else:
449 jcspectraExist = False
449 jcspectraExist = False
450
450
451 freq_dc = jspectra.shape[1] / 2
451 freq_dc = int(jspectra.shape[1] / 2)
452 ind_vel = numpy.array([-2, -1, 1, 2]) + freq_dc
452 ind_vel = numpy.array([-2, -1, 1, 2]) + freq_dc
453 ind_vel = ind_vel.astype(int)
453
454
454 if ind_vel[0] < 0:
455 if ind_vel[0] < 0:
455 ind_vel[range(0, 1)] = ind_vel[range(0, 1)] + self.num_prof
456 ind_vel[list(range(0, 1))] = ind_vel[list(range(0, 1))] + self.num_prof
456
457
457 if mode == 1:
458 if mode == 1:
458 jspectra[:, freq_dc, :] = (
459 jspectra[:, freq_dc, :] = (
@@ -468,7 +469,7 class SpectraProc(ProcessingUnit):
468 xx = numpy.zeros([4, 4])
469 xx = numpy.zeros([4, 4])
469
470
470 for fil in range(4):
471 for fil in range(4):
471 xx[fil, :] = vel[fil]**numpy.asarray(range(4))
472 xx[fil, :] = vel[fil]**numpy.asarray(list(range(4)))
472
473
473 xx_inv = numpy.linalg.inv(xx)
474 xx_inv = numpy.linalg.inv(xx)
474 xx_aux = xx_inv[0, :]
475 xx_aux = xx_inv[0, :]
@@ -508,7 +509,7 class SpectraProc(ProcessingUnit):
508 # hei_interf
509 # hei_interf
509 if hei_interf is None:
510 if hei_interf is None:
510 count_hei = num_hei / 2 # Como es entero no importa
511 count_hei = num_hei / 2 # Como es entero no importa
511 hei_interf = numpy.asmatrix(range(count_hei)) + num_hei - count_hei
512 hei_interf = numpy.asmatrix(list(range(count_hei))) + num_hei - count_hei
512 hei_interf = numpy.asarray(hei_interf)[0]
513 hei_interf = numpy.asarray(hei_interf)[0]
513 # nhei_interf
514 # nhei_interf
514 if (nhei_interf == None):
515 if (nhei_interf == None):
@@ -520,10 +521,10 class SpectraProc(ProcessingUnit):
520 if (offhei_interf == None):
521 if (offhei_interf == None):
521 offhei_interf = 0
522 offhei_interf = 0
522
523
523 ind_hei = range(num_hei)
524 ind_hei = list(range(num_hei))
524 # mask_prof = numpy.asarray(range(num_prof - 2)) + 1
525 # mask_prof = numpy.asarray(range(num_prof - 2)) + 1
525 # mask_prof[range(num_prof/2 - 1,len(mask_prof))] += 1
526 # mask_prof[range(num_prof/2 - 1,len(mask_prof))] += 1
526 mask_prof = numpy.asarray(range(num_prof))
527 mask_prof = numpy.asarray(list(range(num_prof)))
527 num_mask_prof = mask_prof.size
528 num_mask_prof = mask_prof.size
528 comp_mask_prof = [0, num_prof / 2]
529 comp_mask_prof = [0, num_prof / 2]
529
530
@@ -541,8 +542,8 class SpectraProc(ProcessingUnit):
541 psort = power.ravel().argsort()
542 psort = power.ravel().argsort()
542
543
543 # Se estima la interferencia promedio en los Espectros de Potencia empleando
544 # Se estima la interferencia promedio en los Espectros de Potencia empleando
544 junkspc_interf = jspectra[ich, :, hei_interf[psort[range(
545 junkspc_interf = jspectra[ich, :, hei_interf[psort[list(range(
545 offhei_interf, nhei_interf + offhei_interf)]]]
546 offhei_interf, nhei_interf + offhei_interf))]]]
546
547
547 if noise_exist:
548 if noise_exist:
548 # tmp_noise = jnoise[ich] / num_prof
549 # tmp_noise = jnoise[ich] / num_prof
@@ -603,7 +604,7 class SpectraProc(ProcessingUnit):
603 xx = numpy.zeros([4, 4])
604 xx = numpy.zeros([4, 4])
604
605
605 for id1 in range(4):
606 for id1 in range(4):
606 xx[:, id1] = ind[id1]**numpy.asarray(range(4))
607 xx[:, id1] = ind[id1]**numpy.asarray(list(range(4)))
607
608
608 xx_inv = numpy.linalg.inv(xx)
609 xx_inv = numpy.linalg.inv(xx)
609 xx = xx_inv[:, 0]
610 xx = xx_inv[:, 0]
@@ -632,17 +633,17 class SpectraProc(ProcessingUnit):
632 cspower = cspower.sum(axis=0)
633 cspower = cspower.sum(axis=0)
633
634
634 cspsort = cspower.ravel().argsort()
635 cspsort = cspower.ravel().argsort()
635 junkcspc_interf = jcspectra[ip, :, hei_interf[cspsort[range(
636 junkcspc_interf = jcspectra[ip, :, hei_interf[cspsort[list(range(
636 offhei_interf, nhei_interf + offhei_interf)]]]
637 offhei_interf, nhei_interf + offhei_interf))]]]
637 junkcspc_interf = junkcspc_interf.transpose()
638 junkcspc_interf = junkcspc_interf.transpose()
638 jcspc_interf = junkcspc_interf.sum(axis=1) / nhei_interf
639 jcspc_interf = junkcspc_interf.sum(axis=1) / nhei_interf
639
640
640 ind = numpy.abs(jcspc_interf[mask_prof]).ravel().argsort()
641 ind = numpy.abs(jcspc_interf[mask_prof]).ravel().argsort()
641
642
642 median_real = numpy.median(numpy.real(
643 median_real = numpy.median(numpy.real(
643 junkcspc_interf[mask_prof[ind[range(3 * num_prof / 4)]], :]))
644 junkcspc_interf[mask_prof[ind[list(range(3 * num_prof / 4))]], :]))
644 median_imag = numpy.median(numpy.imag(
645 median_imag = numpy.median(numpy.imag(
645 junkcspc_interf[mask_prof[ind[range(3 * num_prof / 4)]], :]))
646 junkcspc_interf[mask_prof[ind[list(range(3 * num_prof / 4))]], :]))
646 junkcspc_interf[comp_mask_prof, :] = numpy.complex(
647 junkcspc_interf[comp_mask_prof, :] = numpy.complex(
647 median_real, median_imag)
648 median_real, median_imag)
648
649
@@ -662,7 +663,7 class SpectraProc(ProcessingUnit):
662 xx = numpy.zeros([4, 4])
663 xx = numpy.zeros([4, 4])
663
664
664 for id1 in range(4):
665 for id1 in range(4):
665 xx[:, id1] = ind[id1]**numpy.asarray(range(4))
666 xx[:, id1] = ind[id1]**numpy.asarray(list(range(4)))
666
667
667 xx_inv = numpy.linalg.inv(xx)
668 xx_inv = numpy.linalg.inv(xx)
668 xx = xx_inv[:, 0]
669 xx = xx_inv[:, 0]
@@ -693,13 +694,13 class SpectraProc(ProcessingUnit):
693 maxHei = self.dataOut.heightList[-1]
694 maxHei = self.dataOut.heightList[-1]
694
695
695 if (minHei < self.dataOut.heightList[0]) or (minHei > maxHei):
696 if (minHei < self.dataOut.heightList[0]) or (minHei > maxHei):
696 print 'minHei: %.2f is out of the heights range' % (minHei)
697 print('minHei: %.2f is out of the heights range' % (minHei))
697 print 'minHei is setting to %.2f' % (self.dataOut.heightList[0])
698 print('minHei is setting to %.2f' % (self.dataOut.heightList[0]))
698 minHei = self.dataOut.heightList[0]
699 minHei = self.dataOut.heightList[0]
699
700
700 if (maxHei > self.dataOut.heightList[-1]) or (maxHei < minHei):
701 if (maxHei > self.dataOut.heightList[-1]) or (maxHei < minHei):
701 print 'maxHei: %.2f is out of the heights range' % (maxHei)
702 print('maxHei: %.2f is out of the heights range' % (maxHei))
702 print 'maxHei is setting to %.2f' % (self.dataOut.heightList[-1])
703 print('maxHei is setting to %.2f' % (self.dataOut.heightList[-1]))
703 maxHei = self.dataOut.heightList[-1]
704 maxHei = self.dataOut.heightList[-1]
704
705
705 # validacion de velocidades
706 # validacion de velocidades
@@ -712,13 +713,13 class SpectraProc(ProcessingUnit):
712 maxVel = velrange[-1]
713 maxVel = velrange[-1]
713
714
714 if (minVel < velrange[0]) or (minVel > maxVel):
715 if (minVel < velrange[0]) or (minVel > maxVel):
715 print 'minVel: %.2f is out of the velocity range' % (minVel)
716 print('minVel: %.2f is out of the velocity range' % (minVel))
716 print 'minVel is setting to %.2f' % (velrange[0])
717 print('minVel is setting to %.2f' % (velrange[0]))
717 minVel = velrange[0]
718 minVel = velrange[0]
718
719
719 if (maxVel > velrange[-1]) or (maxVel < minVel):
720 if (maxVel > velrange[-1]) or (maxVel < minVel):
720 print 'maxVel: %.2f is out of the velocity range' % (maxVel)
721 print('maxVel: %.2f is out of the velocity range' % (maxVel))
721 print 'maxVel is setting to %.2f' % (velrange[-1])
722 print('maxVel is setting to %.2f' % (velrange[-1]))
722 maxVel = velrange[-1]
723 maxVel = velrange[-1]
723
724
724 # seleccion de indices para rango
725 # seleccion de indices para rango
@@ -740,8 +741,8 class SpectraProc(ProcessingUnit):
740 maxIndex = len(heights)
741 maxIndex = len(heights)
741
742
742 if (minIndex < 0) or (minIndex > maxIndex):
743 if (minIndex < 0) or (minIndex > maxIndex):
743 raise ValueError, "some value in (%d,%d) is not valid" % (
744 raise ValueError("some value in (%d,%d) is not valid" % (
744 minIndex, maxIndex)
745 minIndex, maxIndex))
745
746
746 if (maxIndex >= self.dataOut.nHeights):
747 if (maxIndex >= self.dataOut.nHeights):
747 maxIndex = self.dataOut.nHeights - 1
748 maxIndex = self.dataOut.nHeights - 1
@@ -823,7 +824,7 class IncohInt(Operation):
823 self.__byTime = False
824 self.__byTime = False
824
825
825 if n is None and timeInterval is None:
826 if n is None and timeInterval is None:
826 raise ValueError, "n or timeInterval should be specified ..."
827 raise ValueError("n or timeInterval should be specified ...")
827
828
828 if n is not None:
829 if n is not None:
829 self.n = int(n)
830 self.n = int(n)
@@ -1,6 +1,6
1 import numpy
1 import numpy
2
2
3 from jroproc_base import ProcessingUnit, Operation
3 from .jroproc_base import ProcessingUnit, Operation
4 from schainpy.model.data.jrodata import Spectra
4 from schainpy.model.data.jrodata import Spectra
5 from schainpy.model.data.jrodata import hildebrand_sekhon
5 from schainpy.model.data.jrodata import hildebrand_sekhon
6
6
@@ -119,9 +119,9 class SpectraAFCProc(ProcessingUnit):
119 cspc = numpy.zeros((self.dataOut.nPairs, self.dataOut.nFFTPoints, self.dataOut.nHeights), dtype='complex')
119 cspc = numpy.zeros((self.dataOut.nPairs, self.dataOut.nFFTPoints, self.dataOut.nHeights), dtype='complex')
120 for pair in self.dataOut.pairsList:
120 for pair in self.dataOut.pairsList:
121 if pair[0] not in self.dataOut.channelList:
121 if pair[0] not in self.dataOut.channelList:
122 raise ValueError, "Error getting CrossSpectra: pair 0 of %s is not in channelList = %s" %(str(pair), str(self.dataOut.channelList))
122 raise ValueError("Error getting CrossSpectra: pair 0 of %s is not in channelList = %s" %(str(pair), str(self.dataOut.channelList)))
123 if pair[1] not in self.dataOut.channelList:
123 if pair[1] not in self.dataOut.channelList:
124 raise ValueError, "Error getting CrossSpectra: pair 1 of %s is not in channelList = %s" %(str(pair), str(self.dataOut.channelList))
124 raise ValueError("Error getting CrossSpectra: pair 1 of %s is not in channelList = %s" %(str(pair), str(self.dataOut.channelList)))
125
125
126 chan_index0 = self.dataOut.channelList.index(pair[0])
126 chan_index0 = self.dataOut.channelList.index(pair[0])
127 chan_index1 = self.dataOut.channelList.index(pair[1])
127 chan_index1 = self.dataOut.channelList.index(pair[1])
@@ -148,7 +148,7 class SpectraAFCProc(ProcessingUnit):
148 if self.dataIn.type == "Voltage":
148 if self.dataIn.type == "Voltage":
149
149
150 if nFFTPoints == None:
150 if nFFTPoints == None:
151 raise ValueError, "This SpectraProc.run() need nFFTPoints input variable"
151 raise ValueError("This SpectraProc.run() need nFFTPoints input variable")
152
152
153 if nProfiles == None:
153 if nProfiles == None:
154 nProfiles = nFFTPoints
154 nProfiles = nFFTPoints
@@ -172,7 +172,7 class SpectraAFCProc(ProcessingUnit):
172 # self.profIndex += 1
172 # self.profIndex += 1
173
173
174 else:
174 else:
175 raise ValueError, ""
175 raise ValueError("")
176
176
177 self.firstdatatime = self.dataIn.utctime
177 self.firstdatatime = self.dataIn.utctime
178
178
@@ -186,7 +186,7 class SpectraAFCProc(ProcessingUnit):
186
186
187 return True
187 return True
188
188
189 raise ValueError, "The type of input object '%s' is not valid"%(self.dataIn.type)
189 raise ValueError("The type of input object '%s' is not valid"%(self.dataIn.type))
190
190
191 def __selectPairs(self, pairsList):
191 def __selectPairs(self, pairsList):
192
192
@@ -246,7 +246,7 class SpectraAFCProc(ProcessingUnit):
246
246
247 for channel in channelList:
247 for channel in channelList:
248 if channel not in self.dataOut.channelList:
248 if channel not in self.dataOut.channelList:
249 raise ValueError, "Error selecting channels, Channel %d is not valid.\nAvailable channels = %s" %(channel, str(self.dataOut.channelList))
249 raise ValueError("Error selecting channels, Channel %d is not valid.\nAvailable channels = %s" %(channel, str(self.dataOut.channelList)))
250
250
251 index = self.dataOut.channelList.index(channel)
251 index = self.dataOut.channelList.index(channel)
252 channelIndexList.append(index)
252 channelIndexList.append(index)
@@ -271,7 +271,7 class SpectraAFCProc(ProcessingUnit):
271
271
272 for channelIndex in channelIndexList:
272 for channelIndex in channelIndexList:
273 if channelIndex not in self.dataOut.channelIndexList:
273 if channelIndex not in self.dataOut.channelIndexList:
274 raise ValueError, "Error selecting channels: The value %d in channelIndexList is not valid.\nAvailable channel indexes = " %(channelIndex, self.dataOut.channelIndexList)
274 raise ValueError("Error selecting channels: The value %d in channelIndexList is not valid.\nAvailable channel indexes = " %(channelIndex, self.dataOut.channelIndexList))
275
275
276 # nChannels = len(channelIndexList)
276 # nChannels = len(channelIndexList)
277
277
@@ -305,7 +305,7 class SpectraAFCProc(ProcessingUnit):
305 """
305 """
306
306
307 if (minHei > maxHei):
307 if (minHei > maxHei):
308 raise ValueError, "Error selecting heights: Height range (%d,%d) is not valid" % (minHei, maxHei)
308 raise ValueError("Error selecting heights: Height range (%d,%d) is not valid" % (minHei, maxHei))
309
309
310 if (minHei < self.dataOut.heightList[0]):
310 if (minHei < self.dataOut.heightList[0]):
311 minHei = self.dataOut.heightList[0]
311 minHei = self.dataOut.heightList[0]
@@ -394,7 +394,7 class SpectraAFCProc(ProcessingUnit):
394 """
394 """
395
395
396 if (minIndex < 0) or (minIndex > maxIndex):
396 if (minIndex < 0) or (minIndex > maxIndex):
397 raise ValueError, "Error selecting heights: Index range (%d,%d) is not valid" % (minIndex, maxIndex)
397 raise ValueError("Error selecting heights: Index range (%d,%d) is not valid" % (minIndex, maxIndex))
398
398
399 if (maxIndex >= self.dataOut.nHeights):
399 if (maxIndex >= self.dataOut.nHeights):
400 maxIndex = self.dataOut.nHeights-1
400 maxIndex = self.dataOut.nHeights-1
@@ -435,7 +435,7 class SpectraAFCProc(ProcessingUnit):
435 ind_vel = numpy.array([-2,-1,1,2]) + freq_dc
435 ind_vel = numpy.array([-2,-1,1,2]) + freq_dc
436
436
437 if ind_vel[0]<0:
437 if ind_vel[0]<0:
438 ind_vel[range(0,1)] = ind_vel[range(0,1)] + self.num_prof
438 ind_vel[list(range(0,1))] = ind_vel[list(range(0,1))] + self.num_prof
439
439
440 if mode == 1:
440 if mode == 1:
441 jspectra[:,freq_dc,:] = (jspectra[:,ind_vel[1],:] + jspectra[:,ind_vel[2],:])/2 #CORRECCION
441 jspectra[:,freq_dc,:] = (jspectra[:,ind_vel[1],:] + jspectra[:,ind_vel[2],:])/2 #CORRECCION
@@ -449,7 +449,7 class SpectraAFCProc(ProcessingUnit):
449 xx = numpy.zeros([4,4])
449 xx = numpy.zeros([4,4])
450
450
451 for fil in range(4):
451 for fil in range(4):
452 xx[fil,:] = vel[fil]**numpy.asarray(range(4))
452 xx[fil,:] = vel[fil]**numpy.asarray(list(range(4)))
453
453
454 xx_inv = numpy.linalg.inv(xx)
454 xx_inv = numpy.linalg.inv(xx)
455 xx_aux = xx_inv[0,:]
455 xx_aux = xx_inv[0,:]
@@ -489,7 +489,7 class SpectraAFCProc(ProcessingUnit):
489 #hei_interf
489 #hei_interf
490 if hei_interf is None:
490 if hei_interf is None:
491 count_hei = num_hei/2 #Como es entero no importa
491 count_hei = num_hei/2 #Como es entero no importa
492 hei_interf = numpy.asmatrix(range(count_hei)) + num_hei - count_hei
492 hei_interf = numpy.asmatrix(list(range(count_hei))) + num_hei - count_hei
493 hei_interf = numpy.asarray(hei_interf)[0]
493 hei_interf = numpy.asarray(hei_interf)[0]
494 #nhei_interf
494 #nhei_interf
495 if (nhei_interf == None):
495 if (nhei_interf == None):
@@ -501,10 +501,10 class SpectraAFCProc(ProcessingUnit):
501 if (offhei_interf == None):
501 if (offhei_interf == None):
502 offhei_interf = 0
502 offhei_interf = 0
503
503
504 ind_hei = range(num_hei)
504 ind_hei = list(range(num_hei))
505 # mask_prof = numpy.asarray(range(num_prof - 2)) + 1
505 # mask_prof = numpy.asarray(range(num_prof - 2)) + 1
506 # mask_prof[range(num_prof/2 - 1,len(mask_prof))] += 1
506 # mask_prof[range(num_prof/2 - 1,len(mask_prof))] += 1
507 mask_prof = numpy.asarray(range(num_prof))
507 mask_prof = numpy.asarray(list(range(num_prof)))
508 num_mask_prof = mask_prof.size
508 num_mask_prof = mask_prof.size
509 comp_mask_prof = [0, num_prof/2]
509 comp_mask_prof = [0, num_prof/2]
510
510
@@ -523,7 +523,7 class SpectraAFCProc(ProcessingUnit):
523 psort = power.ravel().argsort()
523 psort = power.ravel().argsort()
524
524
525 #Se estima la interferencia promedio en los Espectros de Potencia empleando
525 #Se estima la interferencia promedio en los Espectros de Potencia empleando
526 junkspc_interf = jspectra[ich,:,hei_interf[psort[range(offhei_interf, nhei_interf + offhei_interf)]]]
526 junkspc_interf = jspectra[ich,:,hei_interf[psort[list(range(offhei_interf, nhei_interf + offhei_interf))]]]
527
527
528 if noise_exist:
528 if noise_exist:
529 # tmp_noise = jnoise[ich] / num_prof
529 # tmp_noise = jnoise[ich] / num_prof
@@ -576,7 +576,7 class SpectraAFCProc(ProcessingUnit):
576 xx = numpy.zeros([4,4])
576 xx = numpy.zeros([4,4])
577
577
578 for id1 in range(4):
578 for id1 in range(4):
579 xx[:,id1] = ind[id1]**numpy.asarray(range(4))
579 xx[:,id1] = ind[id1]**numpy.asarray(list(range(4)))
580
580
581 xx_inv = numpy.linalg.inv(xx)
581 xx_inv = numpy.linalg.inv(xx)
582 xx = xx_inv[:,0]
582 xx = xx_inv[:,0]
@@ -602,14 +602,14 class SpectraAFCProc(ProcessingUnit):
602 cspower = cspower.sum(axis = 0)
602 cspower = cspower.sum(axis = 0)
603
603
604 cspsort = cspower.ravel().argsort()
604 cspsort = cspower.ravel().argsort()
605 junkcspc_interf = jcspectra[ip,:,hei_interf[cspsort[range(offhei_interf, nhei_interf + offhei_interf)]]]
605 junkcspc_interf = jcspectra[ip,:,hei_interf[cspsort[list(range(offhei_interf, nhei_interf + offhei_interf))]]]
606 junkcspc_interf = junkcspc_interf.transpose()
606 junkcspc_interf = junkcspc_interf.transpose()
607 jcspc_interf = junkcspc_interf.sum(axis = 1)/nhei_interf
607 jcspc_interf = junkcspc_interf.sum(axis = 1)/nhei_interf
608
608
609 ind = numpy.abs(jcspc_interf[mask_prof]).ravel().argsort()
609 ind = numpy.abs(jcspc_interf[mask_prof]).ravel().argsort()
610
610
611 median_real = numpy.median(numpy.real(junkcspc_interf[mask_prof[ind[range(3*num_prof/4)]],:]))
611 median_real = numpy.median(numpy.real(junkcspc_interf[mask_prof[ind[list(range(3*num_prof/4))]],:]))
612 median_imag = numpy.median(numpy.imag(junkcspc_interf[mask_prof[ind[range(3*num_prof/4)]],:]))
612 median_imag = numpy.median(numpy.imag(junkcspc_interf[mask_prof[ind[list(range(3*num_prof/4))]],:]))
613 junkcspc_interf[comp_mask_prof,:] = numpy.complex(median_real, median_imag)
613 junkcspc_interf[comp_mask_prof,:] = numpy.complex(median_real, median_imag)
614
614
615 for iprof in range(num_prof):
615 for iprof in range(num_prof):
@@ -626,7 +626,7 class SpectraAFCProc(ProcessingUnit):
626 xx = numpy.zeros([4,4])
626 xx = numpy.zeros([4,4])
627
627
628 for id1 in range(4):
628 for id1 in range(4):
629 xx[:,id1] = ind[id1]**numpy.asarray(range(4))
629 xx[:,id1] = ind[id1]**numpy.asarray(list(range(4)))
630
630
631 xx_inv = numpy.linalg.inv(xx)
631 xx_inv = numpy.linalg.inv(xx)
632 xx = xx_inv[:,0]
632 xx = xx_inv[:,0]
@@ -657,13 +657,13 class SpectraAFCProc(ProcessingUnit):
657 maxHei = self.dataOut.heightList[-1]
657 maxHei = self.dataOut.heightList[-1]
658
658
659 if (minHei < self.dataOut.heightList[0]) or (minHei > maxHei):
659 if (minHei < self.dataOut.heightList[0]) or (minHei > maxHei):
660 print 'minHei: %.2f is out of the heights range'%(minHei)
660 print('minHei: %.2f is out of the heights range'%(minHei))
661 print 'minHei is setting to %.2f'%(self.dataOut.heightList[0])
661 print('minHei is setting to %.2f'%(self.dataOut.heightList[0]))
662 minHei = self.dataOut.heightList[0]
662 minHei = self.dataOut.heightList[0]
663
663
664 if (maxHei > self.dataOut.heightList[-1]) or (maxHei < minHei):
664 if (maxHei > self.dataOut.heightList[-1]) or (maxHei < minHei):
665 print 'maxHei: %.2f is out of the heights range'%(maxHei)
665 print('maxHei: %.2f is out of the heights range'%(maxHei))
666 print 'maxHei is setting to %.2f'%(self.dataOut.heightList[-1])
666 print('maxHei is setting to %.2f'%(self.dataOut.heightList[-1]))
667 maxHei = self.dataOut.heightList[-1]
667 maxHei = self.dataOut.heightList[-1]
668
668
669 # validacion de velocidades
669 # validacion de velocidades
@@ -676,13 +676,13 class SpectraAFCProc(ProcessingUnit):
676 maxVel = velrange[-1]
676 maxVel = velrange[-1]
677
677
678 if (minVel < velrange[0]) or (minVel > maxVel):
678 if (minVel < velrange[0]) or (minVel > maxVel):
679 print 'minVel: %.2f is out of the velocity range'%(minVel)
679 print('minVel: %.2f is out of the velocity range'%(minVel))
680 print 'minVel is setting to %.2f'%(velrange[0])
680 print('minVel is setting to %.2f'%(velrange[0]))
681 minVel = velrange[0]
681 minVel = velrange[0]
682
682
683 if (maxVel > velrange[-1]) or (maxVel < minVel):
683 if (maxVel > velrange[-1]) or (maxVel < minVel):
684 print 'maxVel: %.2f is out of the velocity range'%(maxVel)
684 print('maxVel: %.2f is out of the velocity range'%(maxVel))
685 print 'maxVel is setting to %.2f'%(velrange[-1])
685 print('maxVel is setting to %.2f'%(velrange[-1]))
686 maxVel = velrange[-1]
686 maxVel = velrange[-1]
687
687
688 # seleccion de indices para rango
688 # seleccion de indices para rango
@@ -704,7 +704,7 class SpectraAFCProc(ProcessingUnit):
704 maxIndex = len(heights)
704 maxIndex = len(heights)
705
705
706 if (minIndex < 0) or (minIndex > maxIndex):
706 if (minIndex < 0) or (minIndex > maxIndex):
707 raise ValueError, "some value in (%d,%d) is not valid" % (minIndex, maxIndex)
707 raise ValueError("some value in (%d,%d) is not valid" % (minIndex, maxIndex))
708
708
709 if (maxIndex >= self.dataOut.nHeights):
709 if (maxIndex >= self.dataOut.nHeights):
710 maxIndex = self.dataOut.nHeights-1
710 maxIndex = self.dataOut.nHeights-1
@@ -1,6 +1,6
1 import numpy
1 import numpy
2
2
3 from jroproc_base import ProcessingUnit, Operation
3 from .jroproc_base import ProcessingUnit, Operation
4 from schainpy.model.data.jrodata import Spectra
4 from schainpy.model.data.jrodata import Spectra
5 from schainpy.model.data.jrodata import hildebrand_sekhon
5 from schainpy.model.data.jrodata import hildebrand_sekhon
6
6
@@ -125,9 +125,9 class SpectraLagsProc(ProcessingUnit):
125 cspc = numpy.zeros((self.dataOut.nPairs, self.dataOut.nFFTPoints, self.dataOut.nHeights), dtype='complex')
125 cspc = numpy.zeros((self.dataOut.nPairs, self.dataOut.nFFTPoints, self.dataOut.nHeights), dtype='complex')
126 for pair in self.dataOut.pairsList:
126 for pair in self.dataOut.pairsList:
127 if pair[0] not in self.dataOut.channelList:
127 if pair[0] not in self.dataOut.channelList:
128 raise ValueError, "Error getting CrossSpectra: pair 0 of %s is not in channelList = %s" %(str(pair), str(self.dataOut.channelList))
128 raise ValueError("Error getting CrossSpectra: pair 0 of %s is not in channelList = %s" %(str(pair), str(self.dataOut.channelList)))
129 if pair[1] not in self.dataOut.channelList:
129 if pair[1] not in self.dataOut.channelList:
130 raise ValueError, "Error getting CrossSpectra: pair 1 of %s is not in channelList = %s" %(str(pair), str(self.dataOut.channelList))
130 raise ValueError("Error getting CrossSpectra: pair 1 of %s is not in channelList = %s" %(str(pair), str(self.dataOut.channelList)))
131
131
132 chan_index0 = self.dataOut.channelList.index(pair[0])
132 chan_index0 = self.dataOut.channelList.index(pair[0])
133 chan_index1 = self.dataOut.channelList.index(pair[1])
133 chan_index1 = self.dataOut.channelList.index(pair[1])
@@ -158,7 +158,7 class SpectraLagsProc(ProcessingUnit):
158 if self.dataIn.type == "Voltage":
158 if self.dataIn.type == "Voltage":
159
159
160 if nFFTPoints == None:
160 if nFFTPoints == None:
161 raise ValueError, "This SpectraProc.run() need nFFTPoints input variable"
161 raise ValueError("This SpectraProc.run() need nFFTPoints input variable")
162
162
163 if nProfiles == None:
163 if nProfiles == None:
164 nProfiles = nFFTPoints
164 nProfiles = nFFTPoints
@@ -189,7 +189,7 class SpectraLagsProc(ProcessingUnit):
189
189
190 return True
190 return True
191
191
192 raise ValueError, "The type of input object '%s' is not valid"%(self.dataIn.type)
192 raise ValueError("The type of input object '%s' is not valid"%(self.dataIn.type))
193
193
194 def __selectPairs(self, pairsList):
194 def __selectPairs(self, pairsList):
195
195
@@ -249,7 +249,7 class SpectraLagsProc(ProcessingUnit):
249
249
250 for channel in channelList:
250 for channel in channelList:
251 if channel not in self.dataOut.channelList:
251 if channel not in self.dataOut.channelList:
252 raise ValueError, "Error selecting channels, Channel %d is not valid.\nAvailable channels = %s" %(channel, str(self.dataOut.channelList))
252 raise ValueError("Error selecting channels, Channel %d is not valid.\nAvailable channels = %s" %(channel, str(self.dataOut.channelList)))
253
253
254 index = self.dataOut.channelList.index(channel)
254 index = self.dataOut.channelList.index(channel)
255 channelIndexList.append(index)
255 channelIndexList.append(index)
@@ -274,7 +274,7 class SpectraLagsProc(ProcessingUnit):
274
274
275 for channelIndex in channelIndexList:
275 for channelIndex in channelIndexList:
276 if channelIndex not in self.dataOut.channelIndexList:
276 if channelIndex not in self.dataOut.channelIndexList:
277 raise ValueError, "Error selecting channels: The value %d in channelIndexList is not valid.\nAvailable channel indexes = " %(channelIndex, self.dataOut.channelIndexList)
277 raise ValueError("Error selecting channels: The value %d in channelIndexList is not valid.\nAvailable channel indexes = " %(channelIndex, self.dataOut.channelIndexList))
278
278
279 # nChannels = len(channelIndexList)
279 # nChannels = len(channelIndexList)
280
280
@@ -308,7 +308,7 class SpectraLagsProc(ProcessingUnit):
308 """
308 """
309
309
310 if (minHei > maxHei):
310 if (minHei > maxHei):
311 raise ValueError, "Error selecting heights: Height range (%d,%d) is not valid" % (minHei, maxHei)
311 raise ValueError("Error selecting heights: Height range (%d,%d) is not valid" % (minHei, maxHei))
312
312
313 if (minHei < self.dataOut.heightList[0]):
313 if (minHei < self.dataOut.heightList[0]):
314 minHei = self.dataOut.heightList[0]
314 minHei = self.dataOut.heightList[0]
@@ -397,7 +397,7 class SpectraLagsProc(ProcessingUnit):
397 """
397 """
398
398
399 if (minIndex < 0) or (minIndex > maxIndex):
399 if (minIndex < 0) or (minIndex > maxIndex):
400 raise ValueError, "Error selecting heights: Index range (%d,%d) is not valid" % (minIndex, maxIndex)
400 raise ValueError("Error selecting heights: Index range (%d,%d) is not valid" % (minIndex, maxIndex))
401
401
402 if (maxIndex >= self.dataOut.nHeights):
402 if (maxIndex >= self.dataOut.nHeights):
403 maxIndex = self.dataOut.nHeights-1
403 maxIndex = self.dataOut.nHeights-1
@@ -438,7 +438,7 class SpectraLagsProc(ProcessingUnit):
438 ind_vel = numpy.array([-2,-1,1,2]) + freq_dc
438 ind_vel = numpy.array([-2,-1,1,2]) + freq_dc
439
439
440 if ind_vel[0]<0:
440 if ind_vel[0]<0:
441 ind_vel[range(0,1)] = ind_vel[range(0,1)] + self.num_prof
441 ind_vel[list(range(0,1))] = ind_vel[list(range(0,1))] + self.num_prof
442
442
443 if mode == 1:
443 if mode == 1:
444 jspectra[:,freq_dc,:] = (jspectra[:,ind_vel[1],:] + jspectra[:,ind_vel[2],:])/2 #CORRECCION
444 jspectra[:,freq_dc,:] = (jspectra[:,ind_vel[1],:] + jspectra[:,ind_vel[2],:])/2 #CORRECCION
@@ -452,7 +452,7 class SpectraLagsProc(ProcessingUnit):
452 xx = numpy.zeros([4,4])
452 xx = numpy.zeros([4,4])
453
453
454 for fil in range(4):
454 for fil in range(4):
455 xx[fil,:] = vel[fil]**numpy.asarray(range(4))
455 xx[fil,:] = vel[fil]**numpy.asarray(list(range(4)))
456
456
457 xx_inv = numpy.linalg.inv(xx)
457 xx_inv = numpy.linalg.inv(xx)
458 xx_aux = xx_inv[0,:]
458 xx_aux = xx_inv[0,:]
@@ -492,7 +492,7 class SpectraLagsProc(ProcessingUnit):
492 #hei_interf
492 #hei_interf
493 if hei_interf is None:
493 if hei_interf is None:
494 count_hei = num_hei/2 #Como es entero no importa
494 count_hei = num_hei/2 #Como es entero no importa
495 hei_interf = numpy.asmatrix(range(count_hei)) + num_hei - count_hei
495 hei_interf = numpy.asmatrix(list(range(count_hei))) + num_hei - count_hei
496 hei_interf = numpy.asarray(hei_interf)[0]
496 hei_interf = numpy.asarray(hei_interf)[0]
497 #nhei_interf
497 #nhei_interf
498 if (nhei_interf == None):
498 if (nhei_interf == None):
@@ -504,10 +504,10 class SpectraLagsProc(ProcessingUnit):
504 if (offhei_interf == None):
504 if (offhei_interf == None):
505 offhei_interf = 0
505 offhei_interf = 0
506
506
507 ind_hei = range(num_hei)
507 ind_hei = list(range(num_hei))
508 # mask_prof = numpy.asarray(range(num_prof - 2)) + 1
508 # mask_prof = numpy.asarray(range(num_prof - 2)) + 1
509 # mask_prof[range(num_prof/2 - 1,len(mask_prof))] += 1
509 # mask_prof[range(num_prof/2 - 1,len(mask_prof))] += 1
510 mask_prof = numpy.asarray(range(num_prof))
510 mask_prof = numpy.asarray(list(range(num_prof)))
511 num_mask_prof = mask_prof.size
511 num_mask_prof = mask_prof.size
512 comp_mask_prof = [0, num_prof/2]
512 comp_mask_prof = [0, num_prof/2]
513
513
@@ -526,7 +526,7 class SpectraLagsProc(ProcessingUnit):
526 psort = power.ravel().argsort()
526 psort = power.ravel().argsort()
527
527
528 #Se estima la interferencia promedio en los Espectros de Potencia empleando
528 #Se estima la interferencia promedio en los Espectros de Potencia empleando
529 junkspc_interf = jspectra[ich,:,hei_interf[psort[range(offhei_interf, nhei_interf + offhei_interf)]]]
529 junkspc_interf = jspectra[ich,:,hei_interf[psort[list(range(offhei_interf, nhei_interf + offhei_interf))]]]
530
530
531 if noise_exist:
531 if noise_exist:
532 # tmp_noise = jnoise[ich] / num_prof
532 # tmp_noise = jnoise[ich] / num_prof
@@ -579,7 +579,7 class SpectraLagsProc(ProcessingUnit):
579 xx = numpy.zeros([4,4])
579 xx = numpy.zeros([4,4])
580
580
581 for id1 in range(4):
581 for id1 in range(4):
582 xx[:,id1] = ind[id1]**numpy.asarray(range(4))
582 xx[:,id1] = ind[id1]**numpy.asarray(list(range(4)))
583
583
584 xx_inv = numpy.linalg.inv(xx)
584 xx_inv = numpy.linalg.inv(xx)
585 xx = xx_inv[:,0]
585 xx = xx_inv[:,0]
@@ -605,14 +605,14 class SpectraLagsProc(ProcessingUnit):
605 cspower = cspower.sum(axis = 0)
605 cspower = cspower.sum(axis = 0)
606
606
607 cspsort = cspower.ravel().argsort()
607 cspsort = cspower.ravel().argsort()
608 junkcspc_interf = jcspectra[ip,:,hei_interf[cspsort[range(offhei_interf, nhei_interf + offhei_interf)]]]
608 junkcspc_interf = jcspectra[ip,:,hei_interf[cspsort[list(range(offhei_interf, nhei_interf + offhei_interf))]]]
609 junkcspc_interf = junkcspc_interf.transpose()
609 junkcspc_interf = junkcspc_interf.transpose()
610 jcspc_interf = junkcspc_interf.sum(axis = 1)/nhei_interf
610 jcspc_interf = junkcspc_interf.sum(axis = 1)/nhei_interf
611
611
612 ind = numpy.abs(jcspc_interf[mask_prof]).ravel().argsort()
612 ind = numpy.abs(jcspc_interf[mask_prof]).ravel().argsort()
613
613
614 median_real = numpy.median(numpy.real(junkcspc_interf[mask_prof[ind[range(3*num_prof/4)]],:]))
614 median_real = numpy.median(numpy.real(junkcspc_interf[mask_prof[ind[list(range(3*num_prof/4))]],:]))
615 median_imag = numpy.median(numpy.imag(junkcspc_interf[mask_prof[ind[range(3*num_prof/4)]],:]))
615 median_imag = numpy.median(numpy.imag(junkcspc_interf[mask_prof[ind[list(range(3*num_prof/4))]],:]))
616 junkcspc_interf[comp_mask_prof,:] = numpy.complex(median_real, median_imag)
616 junkcspc_interf[comp_mask_prof,:] = numpy.complex(median_real, median_imag)
617
617
618 for iprof in range(num_prof):
618 for iprof in range(num_prof):
@@ -629,7 +629,7 class SpectraLagsProc(ProcessingUnit):
629 xx = numpy.zeros([4,4])
629 xx = numpy.zeros([4,4])
630
630
631 for id1 in range(4):
631 for id1 in range(4):
632 xx[:,id1] = ind[id1]**numpy.asarray(range(4))
632 xx[:,id1] = ind[id1]**numpy.asarray(list(range(4)))
633
633
634 xx_inv = numpy.linalg.inv(xx)
634 xx_inv = numpy.linalg.inv(xx)
635 xx = xx_inv[:,0]
635 xx = xx_inv[:,0]
@@ -660,13 +660,13 class SpectraLagsProc(ProcessingUnit):
660 maxHei = self.dataOut.heightList[-1]
660 maxHei = self.dataOut.heightList[-1]
661
661
662 if (minHei < self.dataOut.heightList[0]) or (minHei > maxHei):
662 if (minHei < self.dataOut.heightList[0]) or (minHei > maxHei):
663 print 'minHei: %.2f is out of the heights range'%(minHei)
663 print('minHei: %.2f is out of the heights range'%(minHei))
664 print 'minHei is setting to %.2f'%(self.dataOut.heightList[0])
664 print('minHei is setting to %.2f'%(self.dataOut.heightList[0]))
665 minHei = self.dataOut.heightList[0]
665 minHei = self.dataOut.heightList[0]
666
666
667 if (maxHei > self.dataOut.heightList[-1]) or (maxHei < minHei):
667 if (maxHei > self.dataOut.heightList[-1]) or (maxHei < minHei):
668 print 'maxHei: %.2f is out of the heights range'%(maxHei)
668 print('maxHei: %.2f is out of the heights range'%(maxHei))
669 print 'maxHei is setting to %.2f'%(self.dataOut.heightList[-1])
669 print('maxHei is setting to %.2f'%(self.dataOut.heightList[-1]))
670 maxHei = self.dataOut.heightList[-1]
670 maxHei = self.dataOut.heightList[-1]
671
671
672 # validacion de velocidades
672 # validacion de velocidades
@@ -679,13 +679,13 class SpectraLagsProc(ProcessingUnit):
679 maxVel = velrange[-1]
679 maxVel = velrange[-1]
680
680
681 if (minVel < velrange[0]) or (minVel > maxVel):
681 if (minVel < velrange[0]) or (minVel > maxVel):
682 print 'minVel: %.2f is out of the velocity range'%(minVel)
682 print('minVel: %.2f is out of the velocity range'%(minVel))
683 print 'minVel is setting to %.2f'%(velrange[0])
683 print('minVel is setting to %.2f'%(velrange[0]))
684 minVel = velrange[0]
684 minVel = velrange[0]
685
685
686 if (maxVel > velrange[-1]) or (maxVel < minVel):
686 if (maxVel > velrange[-1]) or (maxVel < minVel):
687 print 'maxVel: %.2f is out of the velocity range'%(maxVel)
687 print('maxVel: %.2f is out of the velocity range'%(maxVel))
688 print 'maxVel is setting to %.2f'%(velrange[-1])
688 print('maxVel is setting to %.2f'%(velrange[-1]))
689 maxVel = velrange[-1]
689 maxVel = velrange[-1]
690
690
691 # seleccion de indices para rango
691 # seleccion de indices para rango
@@ -707,7 +707,7 class SpectraLagsProc(ProcessingUnit):
707 maxIndex = len(heights)
707 maxIndex = len(heights)
708
708
709 if (minIndex < 0) or (minIndex > maxIndex):
709 if (minIndex < 0) or (minIndex > maxIndex):
710 raise ValueError, "some value in (%d,%d) is not valid" % (minIndex, maxIndex)
710 raise ValueError("some value in (%d,%d) is not valid" % (minIndex, maxIndex))
711
711
712 if (maxIndex >= self.dataOut.nHeights):
712 if (maxIndex >= self.dataOut.nHeights):
713 maxIndex = self.dataOut.nHeights-1
713 maxIndex = self.dataOut.nHeights-1
@@ -1,8 +1,9
1 import sys
1 import sys
2 import numpy
2 import numpy
3 from scipy import interpolate
3 from scipy import interpolate
4 from schainpy import cSchain
4 #TODO
5 from jroproc_base import ProcessingUnit, Operation
5 #from schainpy import cSchain
6 from .jroproc_base import ProcessingUnit, Operation
6 from schainpy.model.data.jrodata import Voltage
7 from schainpy.model.data.jrodata import Voltage
7 from time import time
8 from time import time
8
9
@@ -71,7 +72,7 class VoltageProc(ProcessingUnit):
71
72
72 for channel in channelList:
73 for channel in channelList:
73 if channel not in self.dataOut.channelList:
74 if channel not in self.dataOut.channelList:
74 raise ValueError, "Channel %d is not in %s" %(channel, str(self.dataOut.channelList))
75 raise ValueError("Channel %d is not in %s" %(channel, str(self.dataOut.channelList)))
75
76
76 index = self.dataOut.channelList.index(channel)
77 index = self.dataOut.channelList.index(channel)
77 channelIndexList.append(index)
78 channelIndexList.append(index)
@@ -99,8 +100,8 class VoltageProc(ProcessingUnit):
99
100
100 for channelIndex in channelIndexList:
101 for channelIndex in channelIndexList:
101 if channelIndex not in self.dataOut.channelIndexList:
102 if channelIndex not in self.dataOut.channelIndexList:
102 print channelIndexList
103 print(channelIndexList)
103 raise ValueError, "The value %d in channelIndexList is not valid" %channelIndex
104 raise ValueError("The value %d in channelIndexList is not valid" %channelIndex)
104
105
105 if self.dataOut.flagDataAsBlock:
106 if self.dataOut.flagDataAsBlock:
106 """
107 """
@@ -184,7 +185,7 class VoltageProc(ProcessingUnit):
184 """
185 """
185
186
186 if (minIndex < 0) or (minIndex > maxIndex):
187 if (minIndex < 0) or (minIndex > maxIndex):
187 raise ValueError, "Height index range (%d,%d) is not valid" % (minIndex, maxIndex)
188 raise ValueError("Height index range (%d,%d) is not valid" % (minIndex, maxIndex))
188
189
189 if (maxIndex >= self.dataOut.nHeights):
190 if (maxIndex >= self.dataOut.nHeights):
190 maxIndex = self.dataOut.nHeights
191 maxIndex = self.dataOut.nHeights
@@ -204,7 +205,7 class VoltageProc(ProcessingUnit):
204 self.dataOut.heightList = self.dataOut.heightList[minIndex:maxIndex]
205 self.dataOut.heightList = self.dataOut.heightList[minIndex:maxIndex]
205
206
206 if self.dataOut.nHeights <= 1:
207 if self.dataOut.nHeights <= 1:
207 raise ValueError, "selectHeights: Too few heights. Current number of heights is %d" %(self.dataOut.nHeights)
208 raise ValueError("selectHeights: Too few heights. Current number of heights is %d" %(self.dataOut.nHeights))
208
209
209 return 1
210 return 1
210
211
@@ -221,7 +222,7 class VoltageProc(ProcessingUnit):
221 newheights = (self.dataOut.nHeights-r)/window
222 newheights = (self.dataOut.nHeights-r)/window
222
223
223 if newheights <= 1:
224 if newheights <= 1:
224 raise ValueError, "filterByHeights: Too few heights. Current number of heights is %d and window is %d" %(self.dataOut.nHeights, window)
225 raise ValueError("filterByHeights: Too few heights. Current number of heights is %d and window is %d" %(self.dataOut.nHeights, window))
225
226
226 if self.dataOut.flagDataAsBlock:
227 if self.dataOut.flagDataAsBlock:
227 """
228 """
@@ -257,7 +258,7 class VoltageProc(ProcessingUnit):
257
258
258 if self.dataOut.flagDataAsBlock:
259 if self.dataOut.flagDataAsBlock:
259 flip = self.flip
260 flip = self.flip
260 profileList = range(self.dataOut.nProfiles)
261 profileList = list(range(self.dataOut.nProfiles))
261
262
262 if not channelList:
263 if not channelList:
263 for thisProfile in profileList:
264 for thisProfile in profileList:
@@ -306,7 +307,7 class VoltageProc(ProcessingUnit):
306 else:
307 else:
307 nHeights = self.dataOut.data.shape[2]
308 nHeights = self.dataOut.data.shape[2]
308 x = numpy.hstack((numpy.arange(botLim),numpy.arange(topLim+1,nHeights)))
309 x = numpy.hstack((numpy.arange(botLim),numpy.arange(topLim+1,nHeights)))
309 y = self.dataOut.data[:,:,range(botLim)+range(topLim+1,nHeights)]
310 y = self.dataOut.data[:,:,list(range(botLim))+list(range(topLim+1,nHeights))]
310 f = interpolate.interp1d(x, y, axis = 2)
311 f = interpolate.interp1d(x, y, axis = 2)
311 xnew = numpy.arange(botLim,topLim+1)
312 xnew = numpy.arange(botLim,topLim+1)
312 ynew = f(xnew)
313 ynew = f(xnew)
@@ -355,7 +356,7 class CohInt(Operation):
355 self.stride = stride
356 self.stride = stride
356
357
357 if n == None and timeInterval == None:
358 if n == None and timeInterval == None:
358 raise ValueError, "n or timeInterval should be specified ..."
359 raise ValueError("n or timeInterval should be specified ...")
359
360
360 if n != None:
361 if n != None:
361 self.n = n
362 self.n = n
@@ -613,7 +614,7 class Decoder(Operation):
613 self.__nHeis = dataOut.nHeights
614 self.__nHeis = dataOut.nHeights
614
615
615 if self.__nHeis < self.nBaud:
616 if self.__nHeis < self.nBaud:
616 raise ValueError, 'Number of heights (%d) should be greater than number of bauds (%d)' %(self.__nHeis, self.nBaud)
617 raise ValueError('Number of heights (%d) should be greater than number of bauds (%d)' %(self.__nHeis, self.nBaud))
617
618
618 #Frequency
619 #Frequency
619 __codeBuffer = numpy.zeros((self.nCode, self.__nHeis), dtype=numpy.complex)
620 __codeBuffer = numpy.zeros((self.nCode, self.__nHeis), dtype=numpy.complex)
@@ -666,7 +667,7 class Decoder(Operation):
666 junk = numpy.lib.stride_tricks.as_strided(self.code, (repetitions, self.code.size), (0, self.code.itemsize))
667 junk = numpy.lib.stride_tricks.as_strided(self.code, (repetitions, self.code.size), (0, self.code.itemsize))
667 junk = junk.flatten()
668 junk = junk.flatten()
668 code_block = numpy.reshape(junk, (self.nCode*repetitions, self.nBaud))
669 code_block = numpy.reshape(junk, (self.nCode*repetitions, self.nBaud))
669 profilesList = xrange(self.__nProfiles)
670 profilesList = range(self.__nProfiles)
670
671
671 for i in range(self.__nChannels):
672 for i in range(self.__nChannels):
672 for j in profilesList:
673 for j in profilesList:
@@ -675,7 +676,7 class Decoder(Operation):
675
676
676 def __convolutionByBlockInFreq(self, data):
677 def __convolutionByBlockInFreq(self, data):
677
678
678 raise NotImplementedError, "Decoder by frequency fro Blocks not implemented"
679 raise NotImplementedError("Decoder by frequency fro Blocks not implemented")
679
680
680
681
681 fft_code = self.fft_code[self.__profIndex].reshape(1,-1)
682 fft_code = self.fft_code[self.__profIndex].reshape(1,-1)
@@ -692,13 +693,13 class Decoder(Operation):
692 def run(self, dataOut, code=None, nCode=None, nBaud=None, mode = 0, osamp=None, times=None):
693 def run(self, dataOut, code=None, nCode=None, nBaud=None, mode = 0, osamp=None, times=None):
693
694
694 if dataOut.flagDecodeData:
695 if dataOut.flagDecodeData:
695 print "This data is already decoded, recoding again ..."
696 print("This data is already decoded, recoding again ...")
696
697
697 if not self.isConfig:
698 if not self.isConfig:
698
699
699 if code is None:
700 if code is None:
700 if dataOut.code is None:
701 if dataOut.code is None:
701 raise ValueError, "Code could not be read from %s instance. Enter a value in Code parameter" %dataOut.type
702 raise ValueError("Code could not be read from %s instance. Enter a value in Code parameter" %dataOut.type)
702
703
703 code = dataOut.code
704 code = dataOut.code
704 else:
705 else:
@@ -714,7 +715,7 class Decoder(Operation):
714 sys.stderr.write("Decoder Warning: Argument 'times' in not used anymore\n")
715 sys.stderr.write("Decoder Warning: Argument 'times' in not used anymore\n")
715
716
716 if self.code is None:
717 if self.code is None:
717 print "Fail decoding: Code is not defined."
718 print("Fail decoding: Code is not defined.")
718 return
719 return
719
720
720 self.__nProfiles = dataOut.nProfiles
721 self.__nProfiles = dataOut.nProfiles
@@ -746,7 +747,7 class Decoder(Operation):
746 datadec = self.__convolutionInFreqOpt(dataOut.data)
747 datadec = self.__convolutionInFreqOpt(dataOut.data)
747
748
748 if datadec is None:
749 if datadec is None:
749 raise ValueError, "Codification mode selected is not valid: mode=%d. Try selecting 0 or 1" %mode
750 raise ValueError("Codification mode selected is not valid: mode=%d. Try selecting 0 or 1" %mode)
750
751
751 dataOut.code = self.code
752 dataOut.code = self.code
752 dataOut.nCode = self.nCode
753 dataOut.nCode = self.nCode
@@ -803,7 +804,7 class ProfileConcat(Operation):
803 self.isConfig = True
804 self.isConfig = True
804
805
805 if dataOut.flagDataAsBlock:
806 if dataOut.flagDataAsBlock:
806 raise ValueError, "ProfileConcat can only be used when voltage have been read profile by profile, getBlock = False"
807 raise ValueError("ProfileConcat can only be used when voltage have been read profile by profile, getBlock = False")
807
808
808 else:
809 else:
809 self.concat(dataOut.data)
810 self.concat(dataOut.data)
@@ -883,7 +884,7 class ProfileSelector(Operation):
883 if profileRangeList != None:
884 if profileRangeList != None:
884 minIndex = profileRangeList[0]
885 minIndex = profileRangeList[0]
885 maxIndex = profileRangeList[1]
886 maxIndex = profileRangeList[1]
886 profileList = range(minIndex, maxIndex+1)
887 profileList = list(range(minIndex, maxIndex+1))
887
888
888 dataOut.data = dataOut.data[:,minIndex:maxIndex+1,:]
889 dataOut.data = dataOut.data[:,minIndex:maxIndex+1,:]
889
890
@@ -895,7 +896,7 class ProfileSelector(Operation):
895 minIndex = thisRange[0]
896 minIndex = thisRange[0]
896 maxIndex = thisRange[1]
897 maxIndex = thisRange[1]
897
898
898 profileList.extend(range(minIndex, maxIndex+1))
899 profileList.extend(list(range(minIndex, maxIndex+1)))
899
900
900 dataOut.data = dataOut.data[:,profileList,:]
901 dataOut.data = dataOut.data[:,profileList,:]
901
902
@@ -974,7 +975,7 class ProfileSelector(Operation):
974
975
975 return True
976 return True
976
977
977 raise ValueError, "ProfileSelector needs profileList, profileRangeList or rangeList parameter"
978 raise ValueError("ProfileSelector needs profileList, profileRangeList or rangeList parameter")
978
979
979 return False
980 return False
980
981
@@ -1015,21 +1016,21 class Reshaper(Operation):
1015 def __checkInputs(self, dataOut, shape, nTxs):
1016 def __checkInputs(self, dataOut, shape, nTxs):
1016
1017
1017 if shape is None and nTxs is None:
1018 if shape is None and nTxs is None:
1018 raise ValueError, "Reshaper: shape of factor should be defined"
1019 raise ValueError("Reshaper: shape of factor should be defined")
1019
1020
1020 if nTxs:
1021 if nTxs:
1021 if nTxs < 0:
1022 if nTxs < 0:
1022 raise ValueError, "nTxs should be greater than 0"
1023 raise ValueError("nTxs should be greater than 0")
1023
1024
1024 if nTxs < 1 and dataOut.nProfiles % (1./nTxs) != 0:
1025 if nTxs < 1 and dataOut.nProfiles % (1./nTxs) != 0:
1025 raise ValueError, "nProfiles= %d is not divisibled by (1./nTxs) = %f" %(dataOut.nProfiles, (1./nTxs))
1026 raise ValueError("nProfiles= %d is not divisibled by (1./nTxs) = %f" %(dataOut.nProfiles, (1./nTxs)))
1026
1027
1027 shape = [dataOut.nChannels, dataOut.nProfiles*nTxs, dataOut.nHeights/nTxs]
1028 shape = [dataOut.nChannels, dataOut.nProfiles*nTxs, dataOut.nHeights/nTxs]
1028
1029
1029 return shape, nTxs
1030 return shape, nTxs
1030
1031
1031 if len(shape) != 2 and len(shape) != 3:
1032 if len(shape) != 2 and len(shape) != 3:
1032 raise ValueError, "shape dimension should be equal to 2 or 3. shape = (nProfiles, nHeis) or (nChannels, nProfiles, nHeis). Actually shape = (%d, %d, %d)" %(dataOut.nChannels, dataOut.nProfiles, dataOut.nHeights)
1033 raise ValueError("shape dimension should be equal to 2 or 3. shape = (nProfiles, nHeis) or (nChannels, nProfiles, nHeis). Actually shape = (%d, %d, %d)" %(dataOut.nChannels, dataOut.nProfiles, dataOut.nHeights))
1033
1034
1034 if len(shape) == 2:
1035 if len(shape) == 2:
1035 shape_tuple = [dataOut.nChannels]
1036 shape_tuple = [dataOut.nChannels]
@@ -1069,7 +1070,7 class Reshaper(Operation):
1069 profileIndex = dataOut.profileIndex*nTxs
1070 profileIndex = dataOut.profileIndex*nTxs
1070
1071
1071 else:
1072 else:
1072 raise ValueError, "nTxs should be greater than 0 and lower than 1, or use VoltageReader(..., getblock=True)"
1073 raise ValueError("nTxs should be greater than 0 and lower than 1, or use VoltageReader(..., getblock=True)")
1073
1074
1074 deltaHeight = dataOut.heightList[1] - dataOut.heightList[0]
1075 deltaHeight = dataOut.heightList[1] - dataOut.heightList[0]
1075
1076
@@ -1098,7 +1099,7 class SplitProfiles(Operation):
1098 shape = dataOut.data.shape
1099 shape = dataOut.data.shape
1099
1100
1100 if shape[2] % n != 0:
1101 if shape[2] % n != 0:
1101 raise ValueError, "Could not split the data, n=%d has to be multiple of %d" %(n, shape[2])
1102 raise ValueError("Could not split the data, n=%d has to be multiple of %d" %(n, shape[2]))
1102
1103
1103 new_shape = shape[0], shape[1]*n, shape[2]/n
1104 new_shape = shape[0], shape[1]*n, shape[2]/n
1104
1105
@@ -1109,7 +1110,7 class SplitProfiles(Operation):
1109
1110
1110 else:
1111 else:
1111
1112
1112 raise ValueError, "Could not split the data when is read Profile by Profile. Use VoltageReader(..., getblock=True)"
1113 raise ValueError("Could not split the data when is read Profile by Profile. Use VoltageReader(..., getblock=True)")
1113
1114
1114 deltaHeight = dataOut.heightList[1] - dataOut.heightList[0]
1115 deltaHeight = dataOut.heightList[1] - dataOut.heightList[0]
1115
1116
@@ -1141,7 +1142,7 class CombineProfiles(Operation):
1141 new_shape = shape[0], shape[1]/n, shape[2]*n
1142 new_shape = shape[0], shape[1]/n, shape[2]*n
1142
1143
1143 if shape[1] % n != 0:
1144 if shape[1] % n != 0:
1144 raise ValueError, "Could not split the data, n=%d has to be multiple of %d" %(n, shape[1])
1145 raise ValueError("Could not split the data, n=%d has to be multiple of %d" %(n, shape[1]))
1145
1146
1146 dataOut.data = numpy.reshape(dataOut.data, new_shape)
1147 dataOut.data = numpy.reshape(dataOut.data, new_shape)
1147 dataOut.flagNoData = False
1148 dataOut.flagNoData = False
@@ -11,7 +11,7 from time import gmtime
11
11
12 from numpy import transpose
12 from numpy import transpose
13
13
14 from jroproc_base import ProcessingUnit, Operation
14 from .jroproc_base import ProcessingUnit, Operation
15 from schainpy.model.data.jrodata import Parameters
15 from schainpy.model.data.jrodata import Parameters
16
16
17
17
@@ -3,7 +3,7 Created on Jul 15, 2014
3
3
4 @author: Miguel Urco
4 @author: Miguel Urco
5 '''
5 '''
6 from serializer import DynamicSerializer
6 from .serializer import DynamicSerializer
7
7
8 DEFAULT_SERIALIZER = None #'cPickle', 'msgpack', "yaml"
8 DEFAULT_SERIALIZER = None #'cPickle', 'msgpack', "yaml"
9
9
@@ -20,7 +20,7 def isDictFormat(thisValue):
20 if type(thisValue) != type({}):
20 if type(thisValue) != type({}):
21 return False
21 return False
22
22
23 if CLASSNAME_KEY not in thisValue.keys():
23 if CLASSNAME_KEY not in list(thisValue.keys()):
24 return False
24 return False
25
25
26 return True
26 return True
@@ -28,13 +28,13 def isDictFormat(thisValue):
28 def obj2Dict(myObj, keyList=[]):
28 def obj2Dict(myObj, keyList=[]):
29
29
30 if not keyList:
30 if not keyList:
31 keyList = myObj.__dict__.keys()
31 keyList = list(myObj.__dict__.keys())
32
32
33 myDict = {}
33 myDict = {}
34
34
35 myDict[CLASSNAME_KEY] = myObj.__class__.__name__
35 myDict[CLASSNAME_KEY] = myObj.__class__.__name__
36
36
37 for thisKey, thisValue in myObj.__dict__.items():
37 for thisKey, thisValue in list(myObj.__dict__.items()):
38
38
39 if thisKey not in keyList:
39 if thisKey not in keyList:
40 continue
40 continue
@@ -52,14 +52,14 def dict2Obj(myDict):
52 '''
52 '''
53 '''
53 '''
54
54
55 if CLASSNAME_KEY not in myDict.keys():
55 if CLASSNAME_KEY not in list(myDict.keys()):
56 return None
56 return None
57
57
58 className = eval(myDict[CLASSNAME_KEY])
58 className = eval(myDict[CLASSNAME_KEY])
59
59
60 myObj = className()
60 myObj = className()
61
61
62 for thisKey, thisValue in myDict.items():
62 for thisKey, thisValue in list(myDict.items()):
63
63
64 if thisKey == CLASSNAME_KEY:
64 if thisKey == CLASSNAME_KEY:
65 continue
65 continue
@@ -7,7 +7,7 Created on Jul 17, 2014
7 DEFAULT_SERIALIZER = None
7 DEFAULT_SERIALIZER = None
8
8
9 try:
9 try:
10 import cPickle
10 import pickle
11 DEFAULT_SERIALIZER = 'cPickle'
11 DEFAULT_SERIALIZER = 'cPickle'
12 except:
12 except:
13 pass
13 pass
@@ -86,7 +86,7 class DynamicSerializer(Serializer):
86 def __init__(self, module = None):
86 def __init__(self, module = None):
87
87
88 if not DEFAULT_SERIALIZER:
88 if not DEFAULT_SERIALIZER:
89 raise ImportError, "Install a python serializer like cPickle or msgpack"
89 raise ImportError("Install a python serializer like cPickle or msgpack")
90
90
91 if not module:
91 if not module:
92 module == DEFAULT_SERIALIZER
92 module == DEFAULT_SERIALIZER
@@ -7,7 +7,7 matching signatures.
7 $Id$
7 $Id$
8 '''
8 '''
9
9
10 import urllib
10 import urllib.request, urllib.parse, urllib.error
11 import os
11 import os
12 import re
12 import re
13 import yaml # YAML Ain't Markup Language
13 import yaml # YAML Ain't Markup Language
@@ -40,7 +40,7 class Object(object):
40 elif isinstance(object_uri, str):
40 elif isinstance(object_uri, str):
41 if object_uri.endswith('.yml'):
41 if object_uri.endswith('.yml'):
42 # URI is a web hyper-linked yaml file - read it.
42 # URI is a web hyper-linked yaml file - read it.
43 self.yaml = urllib.urlopen(object_uri).read()
43 self.yaml = urllib.request.urlopen(object_uri).read()
44 else:
44 else:
45 # URI is a (hyper-linked?) directory - try reading it.
45 # URI is a (hyper-linked?) directory - try reading it.
46 #print "URI is a directory."
46 #print "URI is a directory."
@@ -55,12 +55,12 class Object(object):
55 for fn in self.files:
55 for fn in self.files:
56 self.yaml.append(Object(fn))
56 self.yaml.append(Object(fn))
57 else:
57 else:
58 print "Invalid URI supplied: %s"%(object_uri,)
58 print("Invalid URI supplied: %s"%(object_uri,))
59
59
60 def __parseLink(self, object_uri, recursive):
60 def __parseLink(self, object_uri, recursive):
61 """ Returns a listing of all YAML files located in the
61 """ Returns a listing of all YAML files located in the
62 hyper-link directory given by page. """
62 hyper-link directory given by page. """
63 page = urllib.urlopen(object_uri).read()
63 page = urllib.request.urlopen(object_uri).read()
64 #print "URI is a URL directory: %s"%(object_uri,)
64 #print "URI is a URL directory: %s"%(object_uri,)
65 pattern = re.compile(r'<a href="[^"]*">')
65 pattern = re.compile(r'<a href="[^"]*">')
66
66
@@ -120,8 +120,8 class Object(object):
120
120
121 if not isinstance(obj, Object): return False
121 if not isinstance(obj, Object): return False
122
122
123 self_keys = self.__dict__.keys()
123 self_keys = list(self.__dict__.keys())
124 obj_keys = obj.__dict__.keys()
124 obj_keys = list(obj.__dict__.keys())
125 if not self_keys == obj_keys:
125 if not self_keys == obj_keys:
126 return False
126 return False
127 for key in self_keys:
127 for key in self_keys:
@@ -132,8 +132,8 class Object(object):
132 if not self_value.equals(obj_value, compare_time_created):
132 if not self_value.equals(obj_value, compare_time_created):
133 return False
133 return False
134 elif isinstance(self_value, np.ndarray):
134 elif isinstance(self_value, np.ndarray):
135 m1 = map(repr,self_value.flat)
135 m1 = list(map(repr,self_value.flat))
136 m2 = map(repr,obj_value.flat)
136 m2 = list(map(repr,obj_value.flat))
137 ret = m1 == m2
137 ret = m1 == m2
138 if not ret:
138 if not ret:
139 return False
139 return False
@@ -147,7 +147,7 class Object(object):
147 def sizeof(self):
147 def sizeof(self):
148 """ Recursively computes the size in bytes of the given Dynamic Object """
148 """ Recursively computes the size in bytes of the given Dynamic Object """
149 sz = 0
149 sz = 0
150 values = self.__dict__.values()
150 values = list(self.__dict__.values())
151 for val in values:
151 for val in values:
152 if isinstance(val, Object): sz += val.sizeof()
152 if isinstance(val, Object): sz += val.sizeof()
153 elif isinstance(val, np.ndarray): sz += val.nbytes
153 elif isinstance(val, np.ndarray): sz += val.nbytes
@@ -281,7 +281,7 def __ref_constructor(loader, node):
281 return _Reference(loader.construct_mapping(node))
281 return _Reference(loader.construct_mapping(node))
282 else:
282 else:
283 return _Reference(loader.construct_scalar(node))
283 return _Reference(loader.construct_scalar(node))
284 add_constructor(u'!ref', __ref_constructor)
284 add_constructor('!ref', __ref_constructor)
285
285
286 # Method constructor using !method tag:
286 # Method constructor using !method tag:
287 def __method_constructor(loader, node):
287 def __method_constructor(loader, node):
@@ -289,7 +289,7 def __method_constructor(loader, node):
289 return _Method(loader.construct_mapping(node))
289 return _Method(loader.construct_mapping(node))
290 else:
290 else:
291 return _Method(loader.construct_scalar(node))
291 return _Method(loader.construct_scalar(node))
292 add_constructor(u'!method', __method_constructor)
292 add_constructor('!method', __method_constructor)
293
293
294 # Generic constructor for any _BuiltinDtype
294 # Generic constructor for any _BuiltinDtype
295 def __dtype_constructor(loader, node):
295 def __dtype_constructor(loader, node):
@@ -302,8 +302,8 def __dtype_constructor(loader, node):
302 return ret
302 return ret
303
303
304 # Register YAML constructors for each builtin type:
304 # Register YAML constructors for each builtin type:
305 for dtype in Lookup.numpy_dtypes.keys() + Lookup.builtin_objects.keys():
305 for dtype in list(Lookup.numpy_dtypes.keys()) + list(Lookup.builtin_objects.keys()):
306 add_constructor(u'!%s'%(dtype,), __dtype_constructor)
306 add_constructor('!%s'%(dtype,), __dtype_constructor)
307
307
308 class FactoryLoader(OrderedYAML.Loader):
308 class FactoryLoader(OrderedYAML.Loader):
309 """ A YAML Loader specifically designed to load YAML object definitions
309 """ A YAML Loader specifically designed to load YAML object definitions
@@ -311,7 +311,7 class FactoryLoader(OrderedYAML.Loader):
311
311
312 def construct_yaml_timestamp(self, node):
312 def construct_yaml_timestamp(self, node):
313 """ Make empty timestamps (None/null) acceptable, otherwise parse the timestamp """
313 """ Make empty timestamps (None/null) acceptable, otherwise parse the timestamp """
314 if node.value == u'':
314 if node.value == '':
315 name = 'YAML_DEFN_LOADED_INCORRECTLY' # in case we forget to fix the name...
315 name = 'YAML_DEFN_LOADED_INCORRECTLY' # in case we forget to fix the name...
316 return _Parameter(name, hasDefault=False, classType=datetime.datetime)
316 return _Parameter(name, hasDefault=False, classType=datetime.datetime)
317 else:
317 else:
@@ -319,7 +319,7 class FactoryLoader(OrderedYAML.Loader):
319
319
320 # Override default timestamp constructor:
320 # Override default timestamp constructor:
321 FactoryLoader.add_constructor(
321 FactoryLoader.add_constructor(
322 u'tag:yaml.org,2002:timestamp',
322 'tag:yaml.org,2002:timestamp',
323 FactoryLoader.construct_yaml_timestamp
323 FactoryLoader.construct_yaml_timestamp
324 )
324 )
325
325
@@ -414,7 +414,7 class Factory:
414 return _Parameter(sigName, True, default, length=None)
414 return _Parameter(sigName, True, default, length=None)
415
415
416 # Is the object an array with length and default value given?:
416 # Is the object an array with length and default value given?:
417 if isinstance(sig.yamlString, dict) and "len" in sig.yamlString.keys():
417 if isinstance(sig.yamlString, dict) and "len" in list(sig.yamlString.keys()):
418 length = sig.yamlString["len"]
418 length = sig.yamlString["len"]
419
419
420 # Shape is given as something like [[],[]], not [2,2] - convert
420 # Shape is given as something like [[],[]], not [2,2] - convert
@@ -495,7 +495,7 class Factory:
495
495
496 # List of names of classes we've created so far:
496 # List of names of classes we've created so far:
497 #print [x for x in objClasses]
497 #print [x for x in objClasses]
498 names = objClasses.keys()
498 names = list(objClasses.keys())
499
499
500 if ref_object.yamlString in names:
500 if ref_object.yamlString in names:
501 defaultType = objClasses[ref_object.yamlString]
501 defaultType = objClasses[ref_object.yamlString]
@@ -594,7 +594,7 class Factory:
594 setattr(_self, classData[i].name, arg)
594 setattr(_self, classData[i].name, arg)
595
595
596 # Set named attributes (given by dictionary kwargs):
596 # Set named attributes (given by dictionary kwargs):
597 for key,value in kwargs.items():
597 for key,value in list(kwargs.items()):
598
598
599 try: keyIndex = [param.name for param in classData].index(key)
599 try: keyIndex = [param.name for param in classData].index(key)
600 except ValueError:
600 except ValueError:
@@ -605,7 +605,7 class Factory:
605
605
606
606
607 # Object instantiation / creation time (if not already present):
607 # Object instantiation / creation time (if not already present):
608 if not kwargs.has_key('__time_created'):
608 if '__time_created' not in kwargs:
609 setattr(_self, "__time_created", np.float64(time.time()))
609 setattr(_self, "__time_created", np.float64(time.time()))
610
610
611 return init, attributes
611 return init, attributes
@@ -616,7 +616,7 class Factory:
616 a KeyError if the class cannot be found. """
616 a KeyError if the class cannot be found. """
617
617
618 # If class definition was in the YAML file, extend that one:
618 # If class definition was in the YAML file, extend that one:
619 if className in localClasses.keys():
619 if className in list(localClasses.keys()):
620 return localClasses[className]
620 return localClasses[className]
621
621
622 # Else try finding the class definition in our global scope:
622 # Else try finding the class definition in our global scope:
@@ -647,7 +647,7 class Factory:
647 # Each document can contain multiple objects - build each one.
647 # Each document can contain multiple objects - build each one.
648 # (NOTE: objects can cross reference each other in the same document
648 # (NOTE: objects can cross reference each other in the same document
649 # need to resolve Reference objects as last step)
649 # need to resolve Reference objects as last step)
650 for objClassName in document.keys():
650 for objClassName in list(document.keys()):
651
651
652 # The dictionary containing method & data signatures:
652 # The dictionary containing method & data signatures:
653 objDict = document[objClassName]
653 objDict = document[objClassName]
@@ -659,9 +659,9 class Factory:
659 classBases = [Object]
659 classBases = [Object]
660
660
661 # List structured documents result in a list of dicts each with one key:
661 # List structured documents result in a list of dicts each with one key:
662 if isinstance(objDict, list): keys = [param.keys()[0] for param in objDict]
662 if isinstance(objDict, list): keys = [list(param.keys())[0] for param in objDict]
663 # Otherwise the parameter names are just the keys of the dict
663 # Otherwise the parameter names are just the keys of the dict
664 else: keys = objDict.keys() # if key not found, raises AttributeError
664 else: keys = list(objDict.keys()) # if key not found, raises AttributeError
665
665
666 for sigName in keys:
666 for sigName in keys:
667 #print sigName
667 #print sigName
@@ -696,7 +696,7 class Factory:
696 else:
696 else:
697 msg = "Factory abstract base class doesn't " +\
697 msg = "Factory abstract base class doesn't " +\
698 "support the following signature: %r \"%s\""%(sig.__class__,str(sig))
698 "support the following signature: %r \"%s\""%(sig.__class__,str(sig))
699 print sig.__class__
699 print(sig.__class__)
700 raise SignatureException(msg)
700 raise SignatureException(msg)
701
701
702 # Built-in attribute for all Dynamic Objects:
702 # Built-in attribute for all Dynamic Objects:
@@ -731,12 +731,12 class Factory:
731 def construct_dynamic_object(loader, node):
731 def construct_dynamic_object(loader, node):
732 kwargs = loader.construct_mapping(node)
732 kwargs = loader.construct_mapping(node)
733 # Remove revision control from loaded objects (info is in the class object!)
733 # Remove revision control from loaded objects (info is in the class object!)
734 for arg in kwargs.keys():
734 for arg in list(kwargs.keys()):
735 if arg in getattr(Object, 'getters') and arg != '__time_created':
735 if arg in getattr(Object, 'getters') and arg != '__time_created':
736 del kwargs[arg]
736 del kwargs[arg]
737 return cls(**kwargs)
737 return cls(**kwargs)
738 revision = cls.meta_attributes["__revision_number"]
738 revision = cls.meta_attributes["__revision_number"]
739 DynamicYAML.Loader.add_constructor(u'!%s.%s'%(str(objClassName),revision), construct_dynamic_object)
739 DynamicYAML.Loader.add_constructor('!%s.%s'%(str(objClassName),revision), construct_dynamic_object)
740
740
741 represent_dynamic_object = DynamicYAML.Dumper.represent_dynamic_object
741 represent_dynamic_object = DynamicYAML.Dumper.represent_dynamic_object
742 DynamicYAML.Dumper.add_representer(cls, represent_dynamic_object)
742 DynamicYAML.Dumper.add_representer(cls, represent_dynamic_object)
@@ -748,19 +748,19 class Factory:
748 except KeyError:
748 except KeyError:
749 # Now look for reference to class object loaded from any YAML defn file, loading the
749 # Now look for reference to class object loaded from any YAML defn file, loading the
750 # most recent version / revision (number) of the definition
750 # most recent version / revision (number) of the definition
751 for dynClass in Object.dynamicClasses.keys()[::-1]:
751 for dynClass in list(Object.dynamicClasses.keys())[::-1]:
752 if dynClass.startswith(className):
752 if dynClass.startswith(className):
753 return Object.dynamicClasses[dynClass]
753 return Object.dynamicClasses[dynClass]
754
754
755 # Still unresolved - raise exception:
755 # Still unresolved - raise exception:
756 allDynamicClasses = repr(objClasses.keys() + Object.dynamicClasses.keys())
756 allDynamicClasses = repr(list(objClasses.keys()) + list(Object.dynamicClasses.keys()))
757 raise UnresolvedTypeException("Cannot resolve type '%s': Name not found in %s"%(className,allDynamicClasses))
757 raise UnresolvedTypeException("Cannot resolve type '%s': Name not found in %s"%(className,allDynamicClasses))
758
758
759
759
760 def resolve(param):
760 def resolve(param):
761
761
762 # Reference is just a string - that's the class name:
762 # Reference is just a string - that's the class name:
763 if isinstance(param.classType.yamlObject, (str, unicode)):
763 if isinstance(param.classType.yamlObject, str):
764 className = str(param.classType.yamlObject)
764 className = str(param.classType.yamlObject)
765 param.classType = findClass(className)
765 param.classType = findClass(className)
766 return
766 return
@@ -796,7 +796,7 class Factory:
796 param.hasDefault = False # for good measure
796 param.hasDefault = False # for good measure
797
797
798 # Is it an object array?:
798 # Is it an object array?:
799 if "len" in refDict.keys():
799 if "len" in list(refDict.keys()):
800 param.length = refDict["len"]
800 param.length = refDict["len"]
801
801
802 # Resolve any unresolved data-types:
802 # Resolve any unresolved data-types:
@@ -810,7 +810,6 class Factory:
810 def load_defn(yaml):
810 def load_defn(yaml):
811 """ Shortcut for producing a single DynamicObject class object from
811 """ Shortcut for producing a single DynamicObject class object from
812 the provided yaml definition in string format """
812 the provided yaml definition in string format """
813 return Factory(yaml=yaml).classes.values()[0]
813 return list(Factory(yaml=yaml).classes.values())[0]
814
815
814
816
815
@@ -66,4 +66,4 class DynamicSerializer:
66
66
67 if __name__ == "__main__":
67 if __name__ == "__main__":
68 DynamicSerializer()
68 DynamicSerializer()
69 print "DynamicSerializer ran" No newline at end of file
69 print("DynamicSerializer ran") No newline at end of file
@@ -54,7 +54,7 class Loader(OrderedYAML.Loader):
54 data = self.construct_mapping(self, node)
54 data = self.construct_mapping(self, node)
55 self.constructed_objects[node] = data
55 self.constructed_objects[node] = data
56 del self.recursive_objects[node]
56 del self.recursive_objects[node]
57 if data.has_key('__revision_source'):
57 if '__revision_source' in data:
58 # TODO: Handle password authentication
58 # TODO: Handle password authentication
59 client = pysvn.Client()
59 client = pysvn.Client()
60 source = data['__revision_source']
60 source = data['__revision_source']
@@ -85,11 +85,11 class Dumper(OrderedYAML.Dumper):
85 """
85 """
86
86
87 state = {}
87 state = {}
88 state.update(obj.__dict__.items())
88 state.update(list(obj.__dict__.items()))
89 state.update(obj.__class__.meta_attributes.items())
89 state.update(list(obj.__class__.meta_attributes.items()))
90 name = obj.getObjectName() # obj.__class__.__name__
90 name = obj.getObjectName() # obj.__class__.__name__
91 revision = obj.getRevisionNumber()
91 revision = obj.getRevisionNumber()
92 return self.represent_mapping(u'!%s.%s' % (name, revision), state)
92 return self.represent_mapping('!%s.%s' % (name, revision), state)
93
93
94 # Dtypes to be stored as hex in YAML streams / strings
94 # Dtypes to be stored as hex in YAML streams / strings
95 hex_dtypes = ['float', 'complex', 'half', 'single', 'double']
95 hex_dtypes = ['float', 'complex', 'half', 'single', 'double']
@@ -98,7 +98,7 hex_dtypes = ['float', 'complex', 'half', 'single', 'double']
98 dtypes = Lookup.numpy_dtypes
98 dtypes = Lookup.numpy_dtypes
99
99
100 # Inverse lookup for accessing tags given a class instance:
100 # Inverse lookup for accessing tags given a class instance:
101 cls_dtypes = dict([(v,k) for (k,v) in dtypes.items()])
101 cls_dtypes = dict([(v,k) for (k,v) in list(dtypes.items())])
102
102
103 # Representer for numpy arrays:
103 # Representer for numpy arrays:
104 def ndarray_representer(dumper, obj):
104 def ndarray_representer(dumper, obj):
@@ -108,23 +108,23 def ndarray_representer(dumper, obj):
108 np_ary = obj
108 np_ary = obj
109 #hex_ary = np.empty(np_ary.shape, dtype=yaml.nodes.ScalarNode)
109 #hex_ary = np.empty(np_ary.shape, dtype=yaml.nodes.ScalarNode)
110 np_flat, hex_flat = np_ary.flat, [] #hex_ary.flat
110 np_flat, hex_flat = np_ary.flat, [] #hex_ary.flat
111 hex_flat.append(dumper.represent_sequence(u'tag:yaml.org,2002:seq', list(np_ary.shape), flow_style=True))
111 hex_flat.append(dumper.represent_sequence('tag:yaml.org,2002:seq', list(np_ary.shape), flow_style=True))
112 if hexlify:
112 if hexlify:
113 lst = []
113 lst = []
114 for i in range(len(np_flat)):
114 for i in range(len(np_flat)):
115 value = u'%s'%(np_flat[i],)
115 value = '%s'%(np_flat[i],)
116 node = dumper.represent_scalar(u'tag:yaml.org,2002:str', value, style='')
116 node = dumper.represent_scalar('tag:yaml.org,2002:str', value, style='')
117 lst.append(node)
117 lst.append(node)
118 hex_flat.append(yaml.nodes.SequenceNode(u'tag:yaml.org,2002:seq', lst, flow_style=True))
118 hex_flat.append(yaml.nodes.SequenceNode('tag:yaml.org,2002:seq', lst, flow_style=True))
119 lst = []
119 lst = []
120 for i in range(len(np_flat)):
120 for i in range(len(np_flat)):
121 if hexlify: value = u'%s'%(binascii.hexlify(np_flat[i]),)
121 if hexlify: value = '%s'%(binascii.hexlify(np_flat[i]),)
122 else: value = u'%s'%(np_flat[i],)
122 else: value = '%s'%(np_flat[i],)
123 node = dumper.represent_scalar(u'tag:yaml.org,2002:str', value, style='')
123 node = dumper.represent_scalar('tag:yaml.org,2002:str', value, style='')
124 if hexlify: lst.append(node)
124 if hexlify: lst.append(node)
125 else: hex_flat.append(node)
125 else: hex_flat.append(node)
126 if hexlify: hex_flat.append(yaml.nodes.SequenceNode(u'tag:yaml.org,2002:seq', lst, flow_style=True))
126 if hexlify: hex_flat.append(yaml.nodes.SequenceNode('tag:yaml.org,2002:seq', lst, flow_style=True))
127 return yaml.nodes.SequenceNode(u'!%s'%(tag,), hex_flat, flow_style=True)
127 return yaml.nodes.SequenceNode('!%s'%(tag,), hex_flat, flow_style=True)
128 Dumper.add_representer(np.ndarray, ndarray_representer)
128 Dumper.add_representer(np.ndarray, ndarray_representer)
129
129
130 # Constructor for ndarrays with arbitrary (specified) dtype:
130 # Constructor for ndarrays with arbitrary (specified) dtype:
@@ -172,9 +172,9 class __dtype_con:
172 def dtype_representer(dumper, obj):
172 def dtype_representer(dumper, obj):
173 tag, hexlify, dtype = self.fncn_attributes
173 tag, hexlify, dtype = self.fncn_attributes
174 if isinstance(obj, float): obj = np.float64(obj)
174 if isinstance(obj, float): obj = np.float64(obj)
175 if hexlify: value = u'%s'%(binascii.hexlify(obj),)
175 if hexlify: value = '%s'%(binascii.hexlify(obj),)
176 else: value = u'%s'%(obj,)
176 else: value = '%s'%(obj,)
177 try: tag = u'!%s'%(cls_dtypes[obj.__class__]) # 'dtype.'+obj.__class__.__name__ # bullshit...
177 try: tag = '!%s'%(cls_dtypes[obj.__class__]) # 'dtype.'+obj.__class__.__name__ # bullshit...
178 except KeyError: tag = ''
178 except KeyError: tag = ''
179 node = dumper.represent_scalar(tag, value, style='')
179 node = dumper.represent_scalar(tag, value, style='')
180 return node
180 return node
@@ -182,40 +182,39 class __dtype_con:
182 self.dtype_constructor = dtype_constructor
182 self.dtype_constructor = dtype_constructor
183 self.dtype_representer = dtype_representer
183 self.dtype_representer = dtype_representer
184
184
185 keys = [x for x in dtypes.keys() if x != 'dtype.int' and x != 'dtype.bool']
185 keys = [x for x in list(dtypes.keys()) if x != 'dtype.int' and x != 'dtype.bool']
186 print keys
186 print(keys)
187
187
188 n = len(keys)
188 n = len(keys)
189 print n
189 print(n)
190 i=0
190 i=0
191
191
192 for tag in keys:
192 for tag in keys:
193 dtype = __dtype_con(tag)
193 dtype = __dtype_con(tag)
194 dtype_constructor = dtype.dtype_constructor
194 dtype_constructor = dtype.dtype_constructor
195 dtype_representer = dtype.dtype_representer
195 dtype_representer = dtype.dtype_representer
196 Loader.add_constructor(u'!%s'%(tag,), dtype_constructor)
196 Loader.add_constructor('!%s'%(tag,), dtype_constructor)
197 Dumper.add_representer(dtypes[tag], dtype_representer)
197 Dumper.add_representer(dtypes[tag], dtype_representer)
198
198
199 # Precision time constructors & representers:
199 # Precision time constructors & representers:
200 def ns_rep(dumper, obj):
200 def ns_rep(dumper, obj):
201 state = {'second': obj.__dict__['second'], 'nanosecond': obj.__dict__['nanosecond']}
201 state = {'second': obj.__dict__['second'], 'nanosecond': obj.__dict__['nanosecond']}
202 return dumper.represent_mapping(u'!timestamp_ns', state)
202 return dumper.represent_mapping('!timestamp_ns', state)
203 def ps_rep(dumper, obj):
203 def ps_rep(dumper, obj):
204 state = {'second': obj.__dict__['second'], 'picosecond': obj.__dict__['picosecond']}
204 state = {'second': obj.__dict__['second'], 'picosecond': obj.__dict__['picosecond']}
205 return dumper.represent_mapping(u'!timestamp_ps', state)
205 return dumper.represent_mapping('!timestamp_ps', state)
206 def ns_con(loader, node): return PrecisionTime.nsTime(**loader.construct_mapping(node))
206 def ns_con(loader, node): return PrecisionTime.nsTime(**loader.construct_mapping(node))
207 def ps_con(loader, node): return PrecisionTime.psTime(**loader.construct_mapping(node))
207 def ps_con(loader, node): return PrecisionTime.psTime(**loader.construct_mapping(node))
208
208
209 Dumper.add_representer(PrecisionTime.nsTime, ns_rep)
209 Dumper.add_representer(PrecisionTime.nsTime, ns_rep)
210 Dumper.add_representer(PrecisionTime.psTime, ps_rep)
210 Dumper.add_representer(PrecisionTime.psTime, ps_rep)
211 Loader.add_constructor(u'!timestamp_ns', ns_con)
211 Loader.add_constructor('!timestamp_ns', ns_con)
212 Loader.add_constructor(u'!timestamp_nanosecond', ns_con)
212 Loader.add_constructor('!timestamp_nanosecond', ns_con)
213 Loader.add_constructor(u'!timestamp_ps', ps_con)
213 Loader.add_constructor('!timestamp_ps', ps_con)
214 Loader.add_constructor(u'!timestamp_picosecond', ps_con)
214 Loader.add_constructor('!timestamp_picosecond', ps_con)
215
215
216 # Binary object constructor & representer:
216 # Binary object constructor & representer:
217 def bin_rep(dumper, obj): return dumper.represent_mapping(u'!binary', obj.__dict__)
217 def bin_rep(dumper, obj): return dumper.represent_mapping('!binary', obj.__dict__)
218 def bin_con(loader, node): return DynamicObject.Binary(**loader.construct_mapping(node))
218 def bin_con(loader, node): return DynamicObject.Binary(**loader.construct_mapping(node))
219 Dumper.add_representer(DynamicObject.Binary, bin_rep)
219 Dumper.add_representer(DynamicObject.Binary, bin_rep)
220 Loader.add_constructor(u'!binary', bin_con)
220 Loader.add_constructor('!binary', bin_con)
221
@@ -15,8 +15,8 import platform
15 import collections
15 import collections
16
16
17 # Implicit Types:
17 # Implicit Types:
18 python_dtypes = tuple([bool,int,long,float,str,datetime.datetime,list,
18 python_dtypes = tuple([bool,int,int,float,str,datetime.datetime,list,
19 set,dict,tuple,unicode])
19 set,dict,tuple,str])
20
20
21 # Numpy Data-types:
21 # Numpy Data-types:
22 numpy_dtypes = {'dtype.bool': bool, 'dtype.int': np.int, 'dtype.int8': np.int8,
22 numpy_dtypes = {'dtype.bool': bool, 'dtype.int': np.int, 'dtype.int8': np.int8,
@@ -53,10 +53,9 builtin_objects_simple = {'nsTime': PrecisionTime.nsTime, 'psTime': PrecisionTim
53 'Binary': Binary}
53 'Binary': Binary}
54
54
55 # Inverse lookup for accessing tags given a class instance:
55 # Inverse lookup for accessing tags given a class instance:
56 cls_dtypes = dict([(v,k) for (k,v) in numpy_dtypes.items()])
56 cls_dtypes = dict([(v,k) for (k,v) in list(numpy_dtypes.items())])
57 obj_dtypes = dict([(v,k) for (k,v) in builtin_objects_simple.items()])
57 obj_dtypes = dict([(v,k) for (k,v) in list(builtin_objects_simple.items())])
58
58
59 # Pointer to the list of all Object classes created, as located in the Object module / class:
59 # Pointer to the list of all Object classes created, as located in the Object module / class:
60 dynamicClasses = DynamicObject.Object.dynamicClasses
60 dynamicClasses = DynamicObject.Object.dynamicClasses
61
61
62
@@ -18,15 +18,15 class nsTime:
18 def __init__(self, second, nanosecond):
18 def __init__(self, second, nanosecond):
19 self.second = int(second)
19 self.second = int(second)
20 if self.second < 0:
20 if self.second < 0:
21 raise ValueError, 'seconds must be greater than 0, not %i' % (self.second)
21 raise ValueError('seconds must be greater than 0, not %i' % (self.second))
22 nanosecond = long(nanosecond)
22 nanosecond = int(nanosecond)
23 if nanosecond < 0:
23 if nanosecond < 0:
24 raise ValueError, 'nanoseconds must be greater 0, not %i' % (nanosecond)
24 raise ValueError('nanoseconds must be greater 0, not %i' % (nanosecond))
25 addSec = nanosecond / 1000000000
25 addSec = nanosecond / 1000000000
26 if addSec > 0:
26 if addSec > 0:
27 self.second += addSec
27 self.second += addSec
28 self.nanosecond = nanosecond % 1000000000
28 self.nanosecond = nanosecond % 1000000000
29 self.totalNS = long(self.nanosecond) + long(self.second) * 1000000000
29 self.totalNS = int(self.nanosecond) + int(self.second) * 1000000000
30
30
31
31
32 def __add__(self, other):
32 def __add__(self, other):
@@ -46,7 +46,7 class nsTime:
46 addSec = int(nsResult / 1000000000)
46 addSec = int(nsResult / 1000000000)
47 self.second = self.second + other.second + addSec
47 self.second = self.second + other.second + addSec
48 self.nanosecond = nsResult % 1000000000
48 self.nanosecond = nsResult % 1000000000
49 self.totalNS = long(self.nanosecond) + long(self.second) * 1000000000
49 self.totalNS = int(self.nanosecond) + int(self.second) * 1000000000
50
50
51
51
52 def __sub__(self, other):
52 def __sub__(self, other):
@@ -65,8 +65,8 class nsTime:
65 def multiply(self, factor):
65 def multiply(self, factor):
66 """multiply this nsTime times an integer
66 """multiply this nsTime times an integer
67 """
67 """
68 if type(factor) not in (types.IntType, types.LongType):
68 if type(factor) not in (int, int):
69 raise ValueError, 'Illegal type %s passed into nsTime.multiply' % (str(type(factor)))
69 raise ValueError('Illegal type %s passed into nsTime.multiply' % (str(type(factor))))
70 newTotalNS = self.totalNS * factor
70 newTotalNS = self.totalNS * factor
71 newSeconds = int(newTotalNS / 1000000000)
71 newSeconds = int(newTotalNS / 1000000000)
72 newNanoseconds = int(newTotalNS - (newSeconds * 1000000000))
72 newNanoseconds = int(newTotalNS - (newSeconds * 1000000000))
@@ -85,7 +85,7 class nsTime:
85 def __mod__(self, other):
85 def __mod__(self, other):
86 """__mod__ implements self % other.
86 """__mod__ implements self % other.
87 """
87 """
88 if type(other) in (types.IntType, types.LongType):
88 if type(other) in (int, int):
89 return self.totalNS % other
89 return self.totalNS % other
90 else:
90 else:
91 return self.totalNS % other.totalNS
91 return self.totalNS % other.totalNS
@@ -118,15 +118,15 class psTime:
118 def __init__(self, second, picosecond):
118 def __init__(self, second, picosecond):
119 self.second = int(second)
119 self.second = int(second)
120 if self.second < 0:
120 if self.second < 0:
121 raise ValueError, 'seconds must be greater than 0, not %i' % (self.second)
121 raise ValueError('seconds must be greater than 0, not %i' % (self.second))
122 picosecond = long(picosecond)
122 picosecond = int(picosecond)
123 if picosecond < 0:
123 if picosecond < 0:
124 raise ValueError, 'picoseconds must be greater 0, not %i' % (picosecond)
124 raise ValueError('picoseconds must be greater 0, not %i' % (picosecond))
125 addSec = picosecond / 1000000000000
125 addSec = picosecond / 1000000000000
126 if addSec > 0:
126 if addSec > 0:
127 self.second += addSec
127 self.second += addSec
128 self.picosecond = picosecond % 1000000000000
128 self.picosecond = picosecond % 1000000000000
129 self.totalPS = long(self.picosecond) + long(self.second) * 1000000000000
129 self.totalPS = int(self.picosecond) + int(self.second) * 1000000000000
130
130
131
131
132 def __add__(self, other):
132 def __add__(self, other):
@@ -146,7 +146,7 class psTime:
146 addSec = int(psResult / 1000000000000)
146 addSec = int(psResult / 1000000000000)
147 self.second = self.second + other.second + addSec
147 self.second = self.second + other.second + addSec
148 self.picosecond = psResult % 1000000000000
148 self.picosecond = psResult % 1000000000000
149 self.totalPS = long(self.picosecond) + long(self.second) * 1000000000000
149 self.totalPS = int(self.picosecond) + int(self.second) * 1000000000000
150
150
151
151
152 def __sub__(self, other):
152 def __sub__(self, other):
@@ -165,8 +165,8 class psTime:
165 def multiply(self, factor):
165 def multiply(self, factor):
166 """multiply this psTime times an integer
166 """multiply this psTime times an integer
167 """
167 """
168 if type(factor) not in (types.IntType, types.LongType):
168 if type(factor) not in (int, int):
169 raise ValueError, 'Illegal type %s passed into psTime.multiply' % (str(type(factor)))
169 raise ValueError('Illegal type %s passed into psTime.multiply' % (str(type(factor))))
170 newTotalPS = self.totalPS * factor
170 newTotalPS = self.totalPS * factor
171 newSeconds = int(newTotalPS / 1000000000000)
171 newSeconds = int(newTotalPS / 1000000000000)
172 newPicoseconds = int(newTotalPS - (newSeconds * 1000000000000))
172 newPicoseconds = int(newTotalPS - (newSeconds * 1000000000000))
@@ -185,7 +185,7 class psTime:
185 def __mod__(self, other):
185 def __mod__(self, other):
186 """__mod__ implements self % other.
186 """__mod__ implements self % other.
187 """
187 """
188 if type(other) in (types.IntType, types.LongType):
188 if type(other) in (int, int):
189 return self.totalPS % other
189 return self.totalPS % other
190 else:
190 else:
191 return self.totalPS % other.totalPS
191 return self.totalPS % other.totalPS
@@ -208,4 +208,3 class psTime:
208 def __str__(self):
208 def __str__(self):
209 return '%d.%12d' % (self.second, self.picosecond)
209 return '%d.%12d' % (self.second, self.picosecond)
210
210
211
@@ -82,16 +82,16 class YAMLSerializer(Serializer):
82
82
83 # Regular expression taken from yaml.constructor.py
83 # Regular expression taken from yaml.constructor.py
84 timestamp_regexp_str = str(\
84 timestamp_regexp_str = str(\
85 ur'^(?P<year>[0-9][0-9][0-9][0-9])'
85 r'^(?P<year>[0-9][0-9][0-9][0-9])'
86 ur'-(?P<month>[0-9][0-9]?)'
86 r'-(?P<month>[0-9][0-9]?)'
87 ur'-(?P<day>[0-9][0-9]?)'
87 r'-(?P<day>[0-9][0-9]?)'
88 ur'(?:(?:[Tt]|[ \t]+)'
88 r'(?:(?:[Tt]|[ \t]+)'
89 ur'(?P<hour>[0-9][0-9]?)'
89 r'(?P<hour>[0-9][0-9]?)'
90 ur':(?P<minute>[0-9][0-9])'
90 r':(?P<minute>[0-9][0-9])'
91 ur':(?P<second>[0-9][0-9])'
91 r':(?P<second>[0-9][0-9])'
92 ur'(?:\.(?P<fraction>[0-9]*))?'
92 r'(?:\.(?P<fraction>[0-9]*))?'
93 ur'(?:[ \t]*(?P<tz>Z|(?P<tz_sign>[-+])(?P<tz_hour>[0-9][0-9]?)'
93 r'(?:[ \t]*(?P<tz>Z|(?P<tz_sign>[-+])(?P<tz_hour>[0-9][0-9]?)'
94 ur'(?::(?P<tz_minute>[0-9][0-9]))?))?)?$')
94 r'(?::(?P<tz_minute>[0-9][0-9]))?))?)?$')
95 timestamp_regexp = re.compile(timestamp_regexp_str, re.X)
95 timestamp_regexp = re.compile(timestamp_regexp_str, re.X)
96
96
97 def construct_timestamp(value):
97 def construct_timestamp(value):
@@ -133,10 +133,10 class MessagePackSerializer(Serializer):
133 def __fromSerial(self, msg_dict):
133 def __fromSerial(self, msg_dict):
134 if not isinstance(msg_dict, (dict, list, tuple)):
134 if not isinstance(msg_dict, (dict, list, tuple)):
135 return msg_dict # msg_dict is a value - return it
135 return msg_dict # msg_dict is a value - return it
136 if isinstance(msg_dict, dict) and msg_dict.has_key('__meta_attributes'):
136 if isinstance(msg_dict, dict) and '__meta_attributes' in msg_dict:
137 meta_attr = msg_dict['__meta_attributes']
137 meta_attr = msg_dict['__meta_attributes']
138 msg_dict.pop('__meta_attributes')
138 msg_dict.pop('__meta_attributes')
139 if meta_attr.has_key('type'):
139 if 'type' in meta_attr:
140 if meta_attr['type'] == 'datetime':
140 if meta_attr['type'] == 'datetime':
141 return construct_timestamp(str(msg_dict['ts']))
141 return construct_timestamp(str(msg_dict['ts']))
142 elif meta_attr['type'] == 'nsTime':
142 elif meta_attr['type'] == 'nsTime':
@@ -147,7 +147,7 class MessagePackSerializer(Serializer):
147 except KeyError: dtype = Lookup.builtin_objects[meta_attr['type']]
147 except KeyError: dtype = Lookup.builtin_objects[meta_attr['type']]
148 return dtype(**msg_dict)
148 return dtype(**msg_dict)
149 else:
149 else:
150 for key in msg_dict.keys():
150 for key in list(msg_dict.keys()):
151 msg_dict[key] = self.__fromSerial(msg_dict[key])
151 msg_dict[key] = self.__fromSerial(msg_dict[key])
152 cls = Lookup.dynamicClasses['%s.%s'%(meta_attr['__object_name'],meta_attr['__revision_number'])]
152 cls = Lookup.dynamicClasses['%s.%s'%(meta_attr['__object_name'],meta_attr['__revision_number'])]
153 return cls(**msg_dict)
153 return cls(**msg_dict)
@@ -159,7 +159,7 class MessagePackSerializer(Serializer):
159 return np.frombuffer(value, dtype=Lookup.numpy_dtypes[msg_dict[1]])[0]
159 return np.frombuffer(value, dtype=Lookup.numpy_dtypes[msg_dict[1]])[0]
160
160
161 tup = isinstance(msg_dict, tuple)
161 tup = isinstance(msg_dict, tuple)
162 if tup and len(msg_dict) > 1 and msg_dict[0] in Lookup.numpy_dtypes.keys():
162 if tup and len(msg_dict) > 1 and msg_dict[0] in list(Lookup.numpy_dtypes.keys()):
163 msg_flat = list(msg_dict)
163 msg_flat = list(msg_dict)
164 dtypeName = msg_flat.pop(0)
164 dtypeName = msg_flat.pop(0)
165 dtype = Lookup.numpy_dtypes[dtypeName]
165 dtype = Lookup.numpy_dtypes[dtypeName]
@@ -192,7 +192,7 class MessagePackSerializer(Serializer):
192 return msg_dict
192 return msg_dict
193 elif isinstance(obj, DynamicObject.Object):
193 elif isinstance(obj, DynamicObject.Object):
194 msg_dict = {}
194 msg_dict = {}
195 for key, value in obj.__dict__.items():
195 for key, value in list(obj.__dict__.items()):
196 msg_dict[key] = self.__toSerial(value)
196 msg_dict[key] = self.__toSerial(value)
197
197
198 msg_dict['__meta_attributes'] = obj.__class__.meta_attributes
198 msg_dict['__meta_attributes'] = obj.__class__.meta_attributes
@@ -210,7 +210,7 class MessagePackSerializer(Serializer):
210 msg_flat.append(toSer)
210 msg_flat.append(toSer)
211 return list(msg_flat)
211 return list(msg_flat)
212
212
213 is_builtin = obj.__class__ in Lookup.numpy_dtypes.values()
213 is_builtin = obj.__class__ in list(Lookup.numpy_dtypes.values())
214 #is_python = isinstance(obj, Lookup.python_dtypes)
214 #is_python = isinstance(obj, Lookup.python_dtypes)
215 if is_builtin: # and not is_python:
215 if is_builtin: # and not is_python:
216 try:
216 try:
@@ -246,7 +246,7 class HDF5Serializer(Serializer):
246 if isinstance(grp, h5py.Dataset):
246 if isinstance(grp, h5py.Dataset):
247 return grp.value
247 return grp.value
248
248
249 elif isinstance(grp, h5py.Group) and '__type' in grp.keys():
249 elif isinstance(grp, h5py.Group) and '__type' in list(grp.keys()):
250 typ = grp['__type'].value
250 typ = grp['__type'].value
251 if typ == 'datetime':
251 if typ == 'datetime':
252 return construct_timestamp(str(grp['ts'].value))
252 return construct_timestamp(str(grp['ts'].value))
@@ -259,7 +259,7 class HDF5Serializer(Serializer):
259 try: cls = Lookup.builtin_objects_simple[typ]
259 try: cls = Lookup.builtin_objects_simple[typ]
260 except KeyError: cls = Lookup.dynamicClasses[typ]
260 except KeyError: cls = Lookup.dynamicClasses[typ]
261 args = []
261 args = []
262 for key in grp.keys():
262 for key in list(grp.keys()):
263 fromSer = self.__fromSerial(grp[key])
263 fromSer = self.__fromSerial(grp[key])
264 args.append((key, fromSer))
264 args.append((key, fromSer))
265 kwargs = dict(args)
265 kwargs = dict(args)
@@ -299,7 +299,7 class HDF5Serializer(Serializer):
299 elif isinstance(obj, tuple(Lookup.builtin_objects_simple.values())):
299 elif isinstance(obj, tuple(Lookup.builtin_objects_simple.values())):
300 sub_grp = grp.create_group(name)
300 sub_grp = grp.create_group(name)
301 sub_grp['__type'] = Lookup.obj_dtypes[obj.__class__]
301 sub_grp['__type'] = Lookup.obj_dtypes[obj.__class__]
302 for key, value in obj.__dict__.items():
302 for key, value in list(obj.__dict__.items()):
303 if value != None and key not in ['totalNS', 'totalPS']:
303 if value != None and key not in ['totalNS', 'totalPS']:
304 sub_grp[key] = value
304 sub_grp[key] = value
305
305
@@ -313,7 +313,7 class HDF5Serializer(Serializer):
313 tag = '%s.%s'%(obj.getObjectName(), obj.getRevisionNumber())
313 tag = '%s.%s'%(obj.getObjectName(), obj.getRevisionNumber())
314 sub_grp['__type'] = tag
314 sub_grp['__type'] = tag
315 # Put all of the DynamicObject's attributes into the new h5py group
315 # Put all of the DynamicObject's attributes into the new h5py group
316 for key, value in obj.__dict__.items():
316 for key, value in list(obj.__dict__.items()):
317 self.__toSerial(value, sub_grp, key)
317 self.__toSerial(value, sub_grp, key)
318
318
319 elif isinstance(obj, tuple):
319 elif isinstance(obj, tuple):
@@ -356,7 +356,7 class jsonSerializer(Serializer):
356 #return json.dumps(string)
356 #return json.dumps(string)
357 return jsonpickle.encode(string, max_depth=500)
357 return jsonpickle.encode(string, max_depth=500)
358
358
359 # Dict mapping from serializer type to corresponding class object:
359 # Dict mapping from .serializer type to corresponding class object:
360 serializers = {'yaml': YAMLSerializer,
360 serializers = {'yaml': YAMLSerializer,
361 'msgpack': MessagePackSerializer,
361 'msgpack': MessagePackSerializer,
362 'hdf5': HDF5Serializer,
362 'hdf5': HDF5Serializer,
@@ -367,7 +367,6 instances = {'yaml': YAMLSerializer(),
367 'hdf5': HDF5Serializer(),
367 'hdf5': HDF5Serializer(),
368 'json': jsonSerializer()}
368 'json': jsonSerializer()}
369
369
370 serial_types = dict([(v,u) for u,v in serializers.items()])
370 serial_types = dict([(v,u) for u,v in list(serializers.items())])
371
371
372 compression_types = ['gzip', '']
372 compression_types = ['gzip', '']
373
@@ -157,7 +157,7 datastr = serializer.toSerial(source_object)
157
157
158 dest_object = serializer.fromSerial(datastr)
158 dest_object = serializer.fromSerial(datastr)
159
159
160 print "dest_object=",dest_object
160 print("dest_object=",dest_object)
161
161
162 myObject = StateListObject(hierarchical="yes",state=np.array([1,2,3.0]))
162 myObject = StateListObject(hierarchical="yes",state=np.array([1,2,3.0]))
163
163
@@ -168,7 +168,7 packed = msgpack.packb(datastr)
168 try:
168 try:
169 r= redis.StrictRedis(host='localhost',port=6379,db=0)
169 r= redis.StrictRedis(host='localhost',port=6379,db=0)
170 except Exception as eobj:
170 except Exception as eobj:
171 print "is the redis server running?",eobj
171 print("is the redis server running?",eobj)
172 else:
172 else:
173
173
174 r.set('baz',packed) # converts to string
174 r.set('baz',packed) # converts to string
@@ -178,10 +178,9 unpacked = msgpack.unpackb(x)
178
178
179 dest_object = serializer.fromSerial(unpacked)
179 dest_object = serializer.fromSerial(unpacked)
180
180
181 print "val1=",dest_object.hierarchical
181 print("val1=",dest_object.hierarchical)
182 val2 = dest_object.state
182 val2 = dest_object.state
183 print "val2=",val2
183 print("val2=",val2)
184 # can numpy array be used as array?
184 # can numpy array be used as array?
185 print val2.shape
185 print(val2.shape)
186
187
186
@@ -4,5 +4,5 $Author: murco $
4 $Id: Processor.py 1 2012-11-12 18:56:07Z murco $
4 $Id: Processor.py 1 2012-11-12 18:56:07Z murco $
5 '''
5 '''
6
6
7 from jroutils_ftp import *
7 from .jroutils_ftp import *
8 from jroutils_publish import *
8 from .jroutils_publish import *
@@ -9,7 +9,7 try:
9 import paramiko
9 import paramiko
10 import scp
10 import scp
11 except:
11 except:
12 print "You should install paramiko and scp libraries \nif you want to use SSH protocol to upload files to the server"
12 print("You should install paramiko and scp libraries \nif you want to use SSH protocol to upload files to the server")
13
13
14 import time
14 import time
15
15
@@ -64,9 +64,9 class Remote(Thread):
64
64
65 self.stopFlag = False
65 self.stopFlag = False
66
66
67 print "[Remote Server] Opening server: %s" %self.__server
67 print("[Remote Server] Opening server: %s" %self.__server)
68 if self.open(self.__server, self.__username, self.__password, self.__remotefolder):
68 if self.open(self.__server, self.__username, self.__password, self.__remotefolder):
69 print "[Remote Server] %s server was opened successfully" %self.__server
69 print("[Remote Server] %s server was opened successfully" %self.__server)
70
70
71 self.close()
71 self.close()
72
72
@@ -81,31 +81,31 class Remote(Thread):
81 """
81 """
82 Connect to server and create a connection class (FTP or SSH) to remote server.
82 Connect to server and create a connection class (FTP or SSH) to remote server.
83 """
83 """
84 raise NotImplementedError, "Implement this method in child class"
84 raise NotImplementedError("Implement this method in child class")
85
85
86 def close(self):
86 def close(self):
87 """
87 """
88 Close connection to server
88 Close connection to server
89 """
89 """
90 raise NotImplementedError, "Implement this method in child class"
90 raise NotImplementedError("Implement this method in child class")
91
91
92 def mkdir(self, remotefolder):
92 def mkdir(self, remotefolder):
93 """
93 """
94 Create a folder remotely
94 Create a folder remotely
95 """
95 """
96 raise NotImplementedError, "Implement this method in child class"
96 raise NotImplementedError("Implement this method in child class")
97
97
98 def cd(self, remotefolder):
98 def cd(self, remotefolder):
99 """
99 """
100 Change working directory in remote server
100 Change working directory in remote server
101 """
101 """
102 raise NotImplementedError, "Implement this method in child class"
102 raise NotImplementedError("Implement this method in child class")
103
103
104 def download(self, filename, localfolder=None):
104 def download(self, filename, localfolder=None):
105 """
105 """
106 Download a file from server to local host
106 Download a file from server to local host
107 """
107 """
108 raise NotImplementedError, "Implement this method in child class"
108 raise NotImplementedError("Implement this method in child class")
109
109
110 def sendFile(self, fullfilename):
110 def sendFile(self, fullfilename):
111 """
111 """
@@ -117,7 +117,7 class Remote(Thread):
117 Returns:
117 Returns:
118 0 in error case else 1
118 0 in error case else 1
119 """
119 """
120 raise NotImplementedError, "Implement this method in child class"
120 raise NotImplementedError("Implement this method in child class")
121
121
122 def upload(self, fullfilename, remotefolder=None):
122 def upload(self, fullfilename, remotefolder=None):
123 """
123 """
@@ -132,7 +132,7 class Remote(Thread):
132 Returns:
132 Returns:
133 0 in error case else 1
133 0 in error case else 1
134 """
134 """
135 print "[Remote Server] Uploading %s to %s:%s" %(fullfilename, self.server, self.remotefolder)
135 print("[Remote Server] Uploading %s to %s:%s" %(fullfilename, self.server, self.remotefolder))
136
136
137 if not self.status:
137 if not self.status:
138 return 0
138 return 0
@@ -144,10 +144,10 class Remote(Thread):
144 return 0
144 return 0
145
145
146 if not self.sendFile(fullfilename):
146 if not self.sendFile(fullfilename):
147 print "[Remote Server] Error uploading file %s" %fullfilename
147 print("[Remote Server] Error uploading file %s" %fullfilename)
148 return 0
148 return 0
149
149
150 print "[Remote Server] upload finished successfully"
150 print("[Remote Server] upload finished successfully")
151
151
152 return 1
152 return 1
153
153
@@ -180,11 +180,11 class Remote(Thread):
180 def run(self):
180 def run(self):
181
181
182 if not self.status:
182 if not self.status:
183 print "Finishing FTP service"
183 print("Finishing FTP service")
184 return
184 return
185
185
186 if not self.cd(self.remotefolder):
186 if not self.cd(self.remotefolder):
187 raise ValueError, "Could not access to the new remote directory: %s" %self.remotefolder
187 raise ValueError("Could not access to the new remote directory: %s" %self.remotefolder)
188
188
189 while True:
189 while True:
190
190
@@ -199,7 +199,7 class Remote(Thread):
199 # self.bussy = True
199 # self.bussy = True
200 self.mutex.acquire()
200 self.mutex.acquire()
201
201
202 print "[Remote Server] Opening %s" %self.__server
202 print("[Remote Server] Opening %s" %self.__server)
203 if not self.open(self.__server, self.__username, self.__password, self.__remotefolder):
203 if not self.open(self.__server, self.__username, self.__password, self.__remotefolder):
204 self.mutex.release()
204 self.mutex.release()
205 continue
205 continue
@@ -207,13 +207,13 class Remote(Thread):
207 for thisFile in self.fileList:
207 for thisFile in self.fileList:
208 self.upload(thisFile, self.remotefolder)
208 self.upload(thisFile, self.remotefolder)
209
209
210 print "[Remote Server] Closing %s" %self.__server
210 print("[Remote Server] Closing %s" %self.__server)
211 self.close()
211 self.close()
212
212
213 self.mutex.release()
213 self.mutex.release()
214 # self.bussy = False
214 # self.bussy = False
215
215
216 print "[Remote Server] Thread stopped successfully"
216 print("[Remote Server] Thread stopped successfully")
217
217
218 class FTPClient(Remote):
218 class FTPClient(Remote):
219
219
@@ -247,29 +247,29 class FTPClient(Remote):
247 """
247 """
248
248
249 if server == None:
249 if server == None:
250 raise ValueError, "FTP server should be defined"
250 raise ValueError("FTP server should be defined")
251
251
252 if username == None:
252 if username == None:
253 raise ValueError, "FTP username should be defined"
253 raise ValueError("FTP username should be defined")
254
254
255 if password == None:
255 if password == None:
256 raise ValueError, "FTP password should be defined"
256 raise ValueError("FTP password should be defined")
257
257
258 if remotefolder == None:
258 if remotefolder == None:
259 raise ValueError, "FTP remote folder should be defined"
259 raise ValueError("FTP remote folder should be defined")
260
260
261 try:
261 try:
262 ftpClientObj = ftplib.FTP(server)
262 ftpClientObj = ftplib.FTP(server)
263 except ftplib.all_errors, e:
263 except ftplib.all_errors as e:
264 print "[FTP Server]: FTP server connection fail: %s" %server
264 print("[FTP Server]: FTP server connection fail: %s" %server)
265 print "[FTP Server]:", e
265 print("[FTP Server]:", e)
266 self.status = 0
266 self.status = 0
267 return 0
267 return 0
268
268
269 try:
269 try:
270 ftpClientObj.login(username, password)
270 ftpClientObj.login(username, password)
271 except ftplib.all_errors:
271 except ftplib.all_errors:
272 print "[FTP Server]: FTP username or password are incorrect"
272 print("[FTP Server]: FTP username or password are incorrect")
273 self.status = 0
273 self.status = 0
274 return 0
274 return 0
275
275
@@ -279,7 +279,7 class FTPClient(Remote):
279 try:
279 try:
280 ftpClientObj.cwd(remotefolder)
280 ftpClientObj.cwd(remotefolder)
281 except ftplib.all_errors:
281 except ftplib.all_errors:
282 print "[FTP Server]: FTP remote folder is invalid: %s" %remotefolder
282 print("[FTP Server]: FTP remote folder is invalid: %s" %remotefolder)
283 remotefolder = ftpClientObj.pwd()
283 remotefolder = ftpClientObj.pwd()
284
284
285 self.server = server
285 self.server = server
@@ -316,7 +316,7 class FTPClient(Remote):
316 try:
316 try:
317 self.__ftpClientObj.mkd(dirname)
317 self.__ftpClientObj.mkd(dirname)
318 except ftplib.all_errors:
318 except ftplib.all_errors:
319 print "[FTP Server]: Error creating remote folder: %s" %remotefolder
319 print("[FTP Server]: Error creating remote folder: %s" %remotefolder)
320 return 0
320 return 0
321
321
322 return 1
322 return 1
@@ -343,11 +343,11 class FTPClient(Remote):
343 try:
343 try:
344 self.__ftpClientObj.cwd(remotefolder)
344 self.__ftpClientObj.cwd(remotefolder)
345 except ftplib.all_errors:
345 except ftplib.all_errors:
346 print '[FTP Server]: Error changing to %s' %remotefolder
346 print('[FTP Server]: Error changing to %s' %remotefolder)
347 print '[FTP Server]: Trying to create remote folder'
347 print('[FTP Server]: Trying to create remote folder')
348
348
349 if not self.mkdir(remotefolder):
349 if not self.mkdir(remotefolder):
350 print '[FTP Server]: Remote folder could not be created'
350 print('[FTP Server]: Remote folder could not be created')
351 return 0
351 return 0
352
352
353 try:
353 try:
@@ -372,14 +372,14 class FTPClient(Remote):
372
372
373 try:
373 try:
374 self.__ftpClientObj.storbinary(command, fp)
374 self.__ftpClientObj.storbinary(command, fp)
375 except ftplib.all_errors, e:
375 except ftplib.all_errors as e:
376 print "[FTP Server]:", e
376 print("[FTP Server]:", e)
377 return 0
377 return 0
378
378
379 try:
379 try:
380 self.__ftpClientObj.sendcmd('SITE CHMOD 755 ' + filename)
380 self.__ftpClientObj.sendcmd('SITE CHMOD 755 ' + filename)
381 except ftplib.all_errors, e:
381 except ftplib.all_errors as e:
382 print "[FTP Server]:", e
382 print("[FTP Server]:", e)
383
383
384 fp.close()
384 fp.close()
385
385
@@ -418,16 +418,16 class SSHClient(Remote):
418 import socket
418 import socket
419
419
420 if server == None:
420 if server == None:
421 raise ValueError, "SSH server should be defined"
421 raise ValueError("SSH server should be defined")
422
422
423 if username == None:
423 if username == None:
424 raise ValueError, "SSH username should be defined"
424 raise ValueError("SSH username should be defined")
425
425
426 if password == None:
426 if password == None:
427 raise ValueError, "SSH password should be defined"
427 raise ValueError("SSH password should be defined")
428
428
429 if remotefolder == None:
429 if remotefolder == None:
430 raise ValueError, "SSH remote folder should be defined"
430 raise ValueError("SSH remote folder should be defined")
431
431
432 sshClientObj = paramiko.SSHClient()
432 sshClientObj = paramiko.SSHClient()
433
433
@@ -437,16 +437,16 class SSHClient(Remote):
437 self.status = 0
437 self.status = 0
438 try:
438 try:
439 sshClientObj.connect(server, username=username, password=password, port=port)
439 sshClientObj.connect(server, username=username, password=password, port=port)
440 except paramiko.AuthenticationException, e:
440 except paramiko.AuthenticationException as e:
441 # print "SSH username or password are incorrect: %s"
441 # print "SSH username or password are incorrect: %s"
442 print "[SSH Server]:", e
442 print("[SSH Server]:", e)
443 return 0
443 return 0
444 except SSHException, e:
444 except SSHException as e:
445 print "[SSH Server]:", e
445 print("[SSH Server]:", e)
446 return 0
446 return 0
447 except socket.error:
447 except socket.error:
448 self.status = 0
448 self.status = 0
449 print "[SSH Server]:", e
449 print("[SSH Server]:", e)
450 return 0
450 return 0
451
451
452 self.status = 1
452 self.status = 1
@@ -463,7 +463,7 class SSHClient(Remote):
463 self.status = 1
463 self.status = 1
464
464
465 if not self.cd(remotefolder):
465 if not self.cd(remotefolder):
466 raise ValueError, "[SSH Server]: Could not access to remote folder: %s" %remotefolder
466 raise ValueError("[SSH Server]: Could not access to remote folder: %s" %remotefolder)
467 return 0
467 return 0
468
468
469 self.remotefolder = remotefolder
469 self.remotefolder = remotefolder
@@ -564,8 +564,8 class SSHClient(Remote):
564
564
565 try:
565 try:
566 self.__scpClientObj.put(fullfilename, remote_path=self.remotefolder)
566 self.__scpClientObj.put(fullfilename, remote_path=self.remotefolder)
567 except scp.ScpError, e:
567 except scp.ScpError as e:
568 print "[SSH Server]", str(e)
568 print("[SSH Server]", str(e))
569 return 0
569 return 0
570
570
571 remotefile = os.path.join(self.remotefolder, os.path.split(fullfilename)[-1])
571 remotefile = os.path.join(self.remotefolder, os.path.split(fullfilename)[-1])
@@ -596,7 +596,7 class SendToServer(ProcessingUnit):
596 self.clientObj = SSHClient(server, username, password, remotefolder, period)
596 self.clientObj = SSHClient(server, username, password, remotefolder, period)
597
597
598 if not self.clientObj:
598 if not self.clientObj:
599 raise ValueError, "%s has been chosen as remote access protocol but it is not valid" %protocol
599 raise ValueError("%s has been chosen as remote access protocol but it is not valid" %protocol)
600
600
601 self.clientObj.start()
601 self.clientObj.start()
602
602
@@ -614,7 +614,7 class SendToServer(ProcessingUnit):
614
614
615 for thisFolder in folderList:
615 for thisFolder in folderList:
616
616
617 print "[Remote Server]: Searching files on %s" %thisFolder
617 print("[Remote Server]: Searching files on %s" %thisFolder)
618
618
619 filenameList = glob.glob1(thisFolder, '*%s' %self.ext)
619 filenameList = glob.glob1(thisFolder, '*%s' %self.ext)
620
620
@@ -643,18 +643,18 class SendToServer(ProcessingUnit):
643 self.isConfig = True
643 self.isConfig = True
644
644
645 if not self.clientObj.is_alive():
645 if not self.clientObj.is_alive():
646 print "[Remote Server]: Restarting connection "
646 print("[Remote Server]: Restarting connection ")
647 self.setup(**kwargs)
647 self.setup(**kwargs)
648
648
649 if time.time() - self.init >= self.period:
649 if time.time() - self.init >= self.period:
650 fullfilenameList = self.findFiles()
650 fullfilenameList = self.findFiles()
651
651
652 if self.clientObj.updateFileList(fullfilenameList):
652 if self.clientObj.updateFileList(fullfilenameList):
653 print "[Remote Server]: Sending the next files ", str(fullfilenameList)
653 print("[Remote Server]: Sending the next files ", str(fullfilenameList))
654 self.init = time.time()
654 self.init = time.time()
655
655
656 def close(self):
656 def close(self):
657 print "[Remote Server] Stopping thread"
657 print("[Remote Server] Stopping thread")
658 self.clientObj.stop()
658 self.clientObj.stop()
659
659
660
660
@@ -710,7 +710,7 class FTP(object):
710 # print 'Connect to FTP Server: Successfully'
710 # print 'Connect to FTP Server: Successfully'
711
711
712 except ftplib.all_errors:
712 except ftplib.all_errors:
713 print 'Error FTP Service'
713 print('Error FTP Service')
714 self.status = 1
714 self.status = 1
715 return
715 return
716
716
@@ -721,14 +721,14 class FTP(object):
721 try:
721 try:
722 self.dirList = self.ftp.nlst()
722 self.dirList = self.ftp.nlst()
723
723
724 except ftplib.error_perm, resp:
724 except ftplib.error_perm as resp:
725 if str(resp) == "550 No files found":
725 if str(resp) == "550 No files found":
726 print "no files in this directory"
726 print("no files in this directory")
727 self.status = 1
727 self.status = 1
728 return
728 return
729
729
730 except ftplib.all_errors:
730 except ftplib.all_errors:
731 print 'Error Displaying Dir-Files'
731 print('Error Displaying Dir-Files')
732 self.status = 1
732 self.status = 1
733 return
733 return
734
734
@@ -763,7 +763,7 class FTP(object):
763 try:
763 try:
764 self.ftp.mkd(dirname)
764 self.ftp.mkd(dirname)
765 except:
765 except:
766 print 'Error creating remote folder:%s'%dirname
766 print('Error creating remote folder:%s'%dirname)
767 return 1
767 return 1
768
768
769 return 0
769 return 0
@@ -783,7 +783,7 class FTP(object):
783 try:
783 try:
784 self.ftp.delete(filename)
784 self.ftp.delete(filename)
785 except:
785 except:
786 print 'Error deleting remote file:%s'%filename
786 print('Error deleting remote file:%s'%filename)
787 return 1
787 return 1
788
788
789 return 0
789 return 0
@@ -805,7 +805,7 class FTP(object):
805
805
806
806
807 if not(filename in self.fileList):
807 if not(filename in self.fileList):
808 print 'filename:%s not exists'%filename
808 print('filename:%s not exists'%filename)
809 self.status = 1
809 self.status = 1
810 return self.status
810 return self.status
811
811
@@ -814,11 +814,11 class FTP(object):
814 self.file = open(newfilename, 'wb')
814 self.file = open(newfilename, 'wb')
815
815
816 try:
816 try:
817 print 'Download: ' + filename
817 print('Download: ' + filename)
818 self.ftp.retrbinary('RETR ' + filename, self.__handleDownload)
818 self.ftp.retrbinary('RETR ' + filename, self.__handleDownload)
819 print 'Download Complete'
819 print('Download Complete')
820 except ftplib.all_errors:
820 except ftplib.all_errors:
821 print 'Error Downloading ' + filename
821 print('Error Downloading ' + filename)
822 self.status = 1
822 self.status = 1
823 return self.status
823 return self.status
824
824
@@ -861,12 +861,12 class FTP(object):
861
861
862 command = "STOR " + tail
862 command = "STOR " + tail
863
863
864 print 'Uploading: ' + tail
864 print('Uploading: ' + tail)
865 self.ftp.storbinary(command, self.file)
865 self.ftp.storbinary(command, self.file)
866 print 'Upload Completed'
866 print('Upload Completed')
867
867
868 except ftplib.all_errors:
868 except ftplib.all_errors:
869 print 'Error Uploading ' + tail
869 print('Error Uploading ' + tail)
870 self.status = 1
870 self.status = 1
871 return self.status
871 return self.status
872
872
@@ -895,11 +895,11 class FTP(object):
895 """
895 """
896
896
897 self.remotefolder = remotefolder
897 self.remotefolder = remotefolder
898 print 'Change to ' + self.remotefolder
898 print('Change to ' + self.remotefolder)
899 try:
899 try:
900 self.ftp.cwd(remotefolder)
900 self.ftp.cwd(remotefolder)
901 except ftplib.all_errors:
901 except ftplib.all_errors:
902 print 'Error Change to ' + self.remotefolder
902 print('Error Change to ' + self.remotefolder)
903 infoList = None
903 infoList = None
904 self.folderList = None
904 self.folderList = None
905 return infoList,self.folderList
905 return infoList,self.folderList
@@ -909,14 +909,14 class FTP(object):
909 try:
909 try:
910 self.dirList = self.ftp.nlst()
910 self.dirList = self.ftp.nlst()
911
911
912 except ftplib.error_perm, resp:
912 except ftplib.error_perm as resp:
913 if str(resp) == "550 No files found":
913 if str(resp) == "550 No files found":
914 print "no files in this directory"
914 print("no files in this directory")
915 infoList = None
915 infoList = None
916 self.folderList = None
916 self.folderList = None
917 return infoList,self.folderList
917 return infoList,self.folderList
918 except ftplib.all_errors:
918 except ftplib.all_errors:
919 print 'Error Displaying Dir-Files'
919 print('Error Displaying Dir-Files')
920 infoList = None
920 infoList = None
921 self.folderList = None
921 self.folderList = None
922 return infoList,self.folderList
922 return infoList,self.folderList
@@ -957,8 +957,8 class SendByFTP(Operation):
957
957
958 def error_print(self, ValueError):
958 def error_print(self, ValueError):
959
959
960 print ValueError, 'Error FTP'
960 print(ValueError, 'Error FTP')
961 print "don't worry the program is running..."
961 print("don't worry the program is running...")
962
962
963 def worker_ftp(self, server, username, password, remotefolder, filenameList):
963 def worker_ftp(self, server, username, password, remotefolder, filenameList):
964
964
@@ -981,7 +981,7 class SendByFTP(Operation):
981 if p.is_alive():
981 if p.is_alive():
982 p.terminate()
982 p.terminate()
983 p.join()
983 p.join()
984 print 'killing ftp process...'
984 print('killing ftp process...')
985 self.status = 0
985 self.status = 0
986 return
986 return
987
987
@@ -56,7 +56,7 def get_plot_code(s):
56
56
57 def roundFloats(obj):
57 def roundFloats(obj):
58 if isinstance(obj, list):
58 if isinstance(obj, list):
59 return map(roundFloats, obj)
59 return list(map(roundFloats, obj))
60 elif isinstance(obj, float):
60 elif isinstance(obj, float):
61 return round(obj, 2)
61 return round(obj, 2)
62
62
@@ -241,7 +241,7 class Data(object):
241 H.sort()
241 H.sort()
242 for key in self.data:
242 for key in self.data:
243 shape = self.shape(key)[:-1] + H.shape
243 shape = self.shape(key)[:-1] + H.shape
244 for tm, obj in self.data[key].items():
244 for tm, obj in list(self.data[key].items()):
245 h = self.__heights[self.__times.index(tm)]
245 h = self.__heights[self.__times.index(tm)]
246 if H.size == h.size:
246 if H.size == h.size:
247 continue
247 continue
@@ -285,7 +285,7 class Data(object):
285 else:
285 else:
286 ret['pairs'] = []
286 ret['pairs'] = []
287
287
288 for key, value in self.meta.items():
288 for key, value in list(self.meta.items()):
289 ret[key] = value
289 ret[key] = value
290
290
291 return json.dumps(ret)
291 return json.dumps(ret)
@@ -460,7 +460,7 class PublishData(Operation):
460 'yData': yData
460 'yData': yData
461 }
461 }
462 else:
462 else:
463 print "Tipo de grafico invalido"
463 print("Tipo de grafico invalido")
464 payload = {
464 payload = {
465 'data': 'None',
465 'data': 'None',
466 'timestamp': 'None',
466 'timestamp': 'None',
@@ -805,7 +805,7 class SendToFTP(Operation, Process):
805
805
806 try:
806 try:
807 self.ftp.storbinary(command, fp, blocksize=1024)
807 self.ftp.storbinary(command, fp, blocksize=1024)
808 except Exception, e:
808 except Exception as e:
809 log.error('{}'.format(e), self.name)
809 log.error('{}'.format(e), self.name)
810 if self.ftp is not None:
810 if self.ftp is not None:
811 self.ftp.close()
811 self.ftp.close()
@@ -814,7 +814,7 class SendToFTP(Operation, Process):
814
814
815 try:
815 try:
816 self.ftp.sendcmd('SITE CHMOD 755 {}'.format(dst))
816 self.ftp.sendcmd('SITE CHMOD 755 {}'.format(dst))
817 except Exception, e:
817 except Exception as e:
818 log.error('{}'.format(e), self.name)
818 log.error('{}'.format(e), self.name)
819 if self.ftp is not None:
819 if self.ftp is not None:
820 self.ftp.close()
820 self.ftp.close()
@@ -63,9 +63,9 def formatArgs(op):
63 argsAsKey = ["\t'{}'".format(x) for x in args]
63 argsAsKey = ["\t'{}'".format(x) for x in args]
64 argsFormatted = ": 'string',\n".join(argsAsKey)
64 argsFormatted = ": 'string',\n".join(argsAsKey)
65
65
66 print op
66 print(op)
67 print "parameters = { \n" + argsFormatted + ": 'string',\n }"
67 print("parameters = { \n" + argsFormatted + ": 'string',\n }")
68 print '\n'
68 print('\n')
69
69
70
70
71 if __name__ == "__main__":
71 if __name__ == "__main__":
@@ -103,8 +103,8 def printSpeed(deltaTime, mySerial):
103 size = len(mySerial)/1024.
103 size = len(mySerial)/1024.
104 vel = 1.0*size / deltaTime
104 vel = 1.0*size / deltaTime
105
105
106 print "Index [", replayerObj.getProfileIndex(), "]: ",
106 print("Index [", replayerObj.getProfileIndex(), "]: ", end=' ')
107 print "Total time %5.2f ms, Data size %5.2f KB, Speed %5.2f MB/s" %(deltaTime, size, vel)
107 print("Total time %5.2f ms, Data size %5.2f KB, Speed %5.2f MB/s" %(deltaTime, size, vel))
108 ####################
108 ####################
109
109
110 if __name__ == '__main__':
110 if __name__ == '__main__':
@@ -131,7 +131,7 if __name__ == '__main__':
131 deltaTime = (time.time() - ini)*1024
131 deltaTime = (time.time() - ini)*1024
132
132
133 if not mySerialData:
133 if not mySerialData:
134 print "No more data"
134 print("No more data")
135 break
135 break
136
136
137 # myDataDict = SERIALIZER.loads(mySerialData)
137 # myDataDict = SERIALIZER.loads(mySerialData)
@@ -10,4 +10,4 if __name__ == '__main__':
10 c = zerorpc.Client()
10 c = zerorpc.Client()
11 c.connect("tcp://127.0.0.1:4242")
11 c.connect("tcp://127.0.0.1:4242")
12 c.load("file2") # AAAHH! The previously loaded model gets overwritten here!
12 c.load("file2") # AAAHH! The previously loaded model gets overwritten here!
13 print c.getModelName() No newline at end of file
13 print(c.getModelName()) No newline at end of file
@@ -25,9 +25,9 if __name__ == '__main__':
25
25
26 replayerObj.start()
26 replayerObj.start()
27
27
28 print "Initializing 'zerorpc' server"
28 print("Initializing 'zerorpc' server")
29 s = zerorpc.Server(replayerObj)
29 s = zerorpc.Server(replayerObj)
30 s.bind("tcp://0.0.0.0:4242")
30 s.bind("tcp://0.0.0.0:4242")
31 s.run()
31 s.run()
32
32
33 print "End" No newline at end of file
33 print("End") No newline at end of file
@@ -22,7 +22,7 def isDictFormat(thisValue):
22 if type(thisValue) != type({}):
22 if type(thisValue) != type({}):
23 return False
23 return False
24
24
25 if '__name__' not in thisValue.keys():
25 if '__name__' not in list(thisValue.keys()):
26 return False
26 return False
27
27
28 return True
28 return True
@@ -33,7 +33,7 def obj2Dict(myObj):
33
33
34 myDict['__name__'] = myObj.__class__.__name__
34 myDict['__name__'] = myObj.__class__.__name__
35
35
36 for thisKey, thisValue in myObj.__dict__.items():
36 for thisKey, thisValue in list(myObj.__dict__.items()):
37
37
38 if isNotClassVar(thisValue):
38 if isNotClassVar(thisValue):
39 myDict[thisKey] = thisValue
39 myDict[thisKey] = thisValue
@@ -49,14 +49,14 def dict2Obj(myDict):
49 '''
49 '''
50 '''
50 '''
51
51
52 if '__name__' not in myDict.keys():
52 if '__name__' not in list(myDict.keys()):
53 return None
53 return None
54
54
55 className = eval(myDict['__name__'])
55 className = eval(myDict['__name__'])
56
56
57 myObj = className()
57 myObj = className()
58
58
59 for thisKey, thisValue in myDict.items():
59 for thisKey, thisValue in list(myDict.items()):
60
60
61 if thisKey == '__name__':
61 if thisKey == '__name__':
62 continue
62 continue
@@ -129,7 +129,7 def myMsgPackTest():
129 x_enc = m.encode(x)
129 x_enc = m.encode(x)
130 x_rec = m.decode(x_enc)
130 x_rec = m.decode(x_enc)
131
131
132 print x_rec
132 print(x_rec)
133 #
133 #
134 # x_enc = msgpack.packb(x, default=m.encoder)
134 # x_enc = msgpack.packb(x, default=m.encoder)
135 # x_rec = msgpack.unpackb(x_enc, object_hook=m.decoder)
135 # x_rec = msgpack.unpackb(x_enc, object_hook=m.decoder)
@@ -159,19 +159,19 if __name__ == '__main__':
159 # print myNewObj.__dict__
159 # print myNewObj.__dict__
160
160
161 # sys.exit()
161 # sys.exit()
162 print myDict
162 print(myDict)
163
163
164 newSerial = serializerObj.encode(myDict)
164 newSerial = serializerObj.encode(myDict)
165 # print newSerial
165 # print newSerial
166
166
167 newDict = serializerObj.decode(newSerial)
167 newDict = serializerObj.decode(newSerial)
168 print newDict
168 print(newDict)
169
169
170 myNewObj = dict2Obj(newDict)
170 myNewObj = dict2Obj(newDict)
171
171
172 print
172 print()
173 print
173 print()
174 print 50*'###'
174 print(50*'###')
175 print myTestObj.__dict__
175 print(myTestObj.__dict__)
176 print myNewObj.__dict__
176 print(myNewObj.__dict__)
177 No newline at end of file
177
@@ -5,7 +5,7 Created on Jul 15, 2014
5 '''
5 '''
6
6
7 import sys
7 import sys
8 import cPickle
8 import pickle
9
9
10 from schainpy.model.data.jrodata import Voltage
10 from schainpy.model.data.jrodata import Voltage
11 # from schainpy.model.io.jrodataIO import USRPReaderMP
11 # from schainpy.model.io.jrodataIO import USRPReaderMP
@@ -37,10 +37,10 if __name__ == "__main__":
37 # print newValue
37 # print newValue
38
38
39
39
40 print '###########CPICKLE##################'
40 print('###########CPICKLE##################')
41 print myDict
41 print(myDict)
42 newSerialized = cPickle.dumps(myDict, 2)
42 newSerialized = pickle.dumps(myDict, 2)
43 # print newValue
43 # print newValue
44
44
45 newDict = cPickle.loads(newSerialized)
45 newDict = pickle.loads(newSerialized)
46 print newDict No newline at end of file
46 print(newDict) No newline at end of file
General Comments 0
You need to be logged in to leave comments. Login now