@@ -1,6 +1,6 | |||
|
1 |
# |
|
|
1 | # CHANGELOG: | |
|
2 | 2 | |
|
3 |
## |
|
|
3 | ## 3.0 | |
|
4 | 4 | * Python 3.x & 2.X compatible |
|
5 | 5 | * New architecture with multiprocessing support |
|
6 | 6 | * Add @MPDecorator for multiprocessing Operations (Plots, Writers and Publishers) |
@@ -9,7 +9,7 | |||
|
9 | 9 | * Clean controller to optimize scripts (format & optype are no longer required) |
|
10 | 10 | * New GUI with dinamic load of Units and operations (use Kivy framework) |
|
11 | 11 | |
|
12 |
## |
|
|
12 | ## 2.3 | |
|
13 | 13 | * Added support for Madrigal formats (reading/writing). |
|
14 | 14 | * Added support for reading BLTR parameters (*.sswma). |
|
15 | 15 | * Added support for reading Julia format (*.dat). |
@@ -30,83 +30,83 | |||
|
30 | 30 | * Updated README for MAC OS GUI installation. |
|
31 | 31 | * Setup now installs numpy. |
|
32 | 32 | |
|
33 |
## |
|
|
33 | ## 2.2.6 | |
|
34 | 34 | * Graphics generated by the GUI are now the same as generated by scripts. Issue #1074. |
|
35 | 35 | * Added support for C extensions. |
|
36 | 36 | * Function `hildebrand_sehkon` optimized with a C wrapper. |
|
37 | 37 | * Numpy version updated. |
|
38 | 38 | * Migration to GIT. |
|
39 | 39 | |
|
40 |
## |
|
|
40 | ## 2.2.5: | |
|
41 | 41 | * splitProfiles and combineProfiles modules were added to VoltageProc and Signal Chain GUI. |
|
42 | 42 | * nProfiles of USRP data (hdf5) is the number of profiles thera are in one second. |
|
43 | 43 | * jroPlotter works directly with data objects instead of dictionaries |
|
44 | 44 | * script "schain" was added to Signal Chain installer |
|
45 | 45 | |
|
46 |
## |
|
|
46 | ## 2.2.4.1: | |
|
47 | 47 | * jroIO_usrp.py is update to read Sandra's data |
|
48 | 48 | * decimation in Spectra and RTI plots is always enabled. |
|
49 | 49 | * time* window option added to GUI |
|
50 | 50 | |
|
51 |
## |
|
|
51 | ## 2.2.4: | |
|
52 | 52 | * jroproc_spectra_lags.py added to schainpy |
|
53 | 53 | * Bug fixed in schainGUI: ProcUnit was created with the same id in some cases. |
|
54 | 54 | * Bug fixed in jroHeaderIO: Header size validation. |
|
55 | 55 | |
|
56 |
## |
|
|
56 | ## 2.2.3.1: | |
|
57 | 57 | * Filtering block by time has been added. |
|
58 | 58 | * Bug fixed plotting RTI, CoherenceMap and others using xmin and xmax parameters. The first day worked |
|
59 | 59 | properly but the next days did not. |
|
60 | 60 | |
|
61 |
## |
|
|
61 | ## 2.2.3: | |
|
62 | 62 | * Bug fixed in GUI: Error getting(reading) Code value |
|
63 | 63 | * Bug fixed in GUI: Flip option always needs channelList field |
|
64 | 64 | * Bug fixed in jrodata: when one branch modified a value in "dataOut" (example: dataOut.code) this value |
|
65 | 65 | was modified for every branch (because this was a reference). It was modified in data.copy() |
|
66 | 66 | * Bug fixed in jroproc_voltage.profileSelector(): rangeList replaces to profileRangeList. |
|
67 | 67 | |
|
68 |
## |
|
|
68 | ## 2.2.2: | |
|
69 | 69 | * VoltageProc: ProfileSelector, Reshape, Decoder with nTxs!=1 and getblock=True was tested |
|
70 | 70 | * Rawdata and testRawdata.py added to Signal Chain project |
|
71 | 71 | |
|
72 |
## |
|
|
72 | ## 2.2.1: | |
|
73 | 73 | * Bugs fixed in GUI |
|
74 | 74 | * Views were improved in GUI |
|
75 | 75 | * Support to MST* ISR experiments |
|
76 | 76 | * Bug fixed getting noise using hyldebrant. (minimum number of points > 20%) |
|
77 | 77 | * handleError added to jroplotter.py |
|
78 | 78 | |
|
79 |
## |
|
|
79 | ## 2.2.0: | |
|
80 | 80 | * GUI: use of external plotter |
|
81 | 81 | * Compatible with matplotlib 1.5.0 |
|
82 | 82 | |
|
83 |
## |
|
|
83 | ## 2.1.5: | |
|
84 | 84 | * serializer module added to Signal Chain |
|
85 | 85 | * jroplotter.py added to Signal Chain |
|
86 | 86 | |
|
87 |
## |
|
|
87 | ## 2.1.4.2: | |
|
88 | 88 | * A new Plotter Class was added |
|
89 | 89 | * Project.start() does not accept filename as a parameter anymore |
|
90 | 90 | |
|
91 |
## |
|
|
91 | ## 2.1.4.1: | |
|
92 | 92 | * Send notifications when an error different to ValueError is detected |
|
93 | 93 | |
|
94 |
## |
|
|
94 | ## 2.1.4: | |
|
95 | 95 | * Sending error notifications to signal chain administrator |
|
96 | 96 | * Login to email server added |
|
97 | 97 | |
|
98 |
## |
|
|
98 | ## 2.1.3.3: | |
|
99 | 99 | * Colored Button Icons were added to GUI |
|
100 | 100 | |
|
101 |
## |
|
|
101 | ## 2.1.3.2: | |
|
102 | 102 | * GUI: user interaction enhanced |
|
103 | 103 | * controller_api.py: Safe access to ControllerThead |
|
104 | 104 | |
|
105 |
## |
|
|
105 | ## 2.1.3.1: | |
|
106 | 106 | * GUI: every icon were resized |
|
107 | 107 | * jroproc_voltage.py: Print a message when "Read from code" option is selected and the code is not defined inside data file |
|
108 | 108 | |
|
109 |
## |
|
|
109 | ## 2.1.3: | |
|
110 | 110 | * jroplot_heispectra.py: SpectraHeisScope was not showing the right channels |
|
111 | 111 | * jroproc_voltage.py: Bug fixed selecting profiles (self.nProfiles took a wrong value), |
|
112 | 112 | Bug fixed selecting heights by block (selecting profiles instead heights) |
@@ -114,7 +114,7 was modified for every branch (because this was a reference). It was modified in | |||
|
114 | 114 | * jroIO_heispectra.py: Bug fixed in FitsReader. Using local Fits instance instead schainpy.mode.data.jrodata.Fits. |
|
115 | 115 | * jroIO_heispectra.py: Channel index list does not exist. |
|
116 | 116 | |
|
117 |
## |
|
|
117 | ## 2.1.2: | |
|
118 | 118 | * jroutils_ftp.py: Bug fixed, Any error sending file stopped the Server Thread |
|
119 | 119 | Server thread opens and closes remote server each time file list is sent |
|
120 | 120 | * jroplot_spectra.py: Noise path was not being created when noise data is saved. |
@@ -388,6 +388,7 class Voltage(JROData): | |||
|
388 | 388 | self.errorCount = None |
|
389 | 389 | self.nCohInt = None |
|
390 | 390 | self.blocksize = None |
|
391 | self.flagCohInt = False | |
|
391 | 392 | self.flagDecodeData = False # asumo q la data no esta decodificada |
|
392 | 393 | self.flagDeflipData = False # asumo q la data no esta sin flip |
|
393 | 394 | self.flagShiftFFT = False |
@@ -899,7 +899,8 class JRODataReader(Reader): | |||
|
899 | 899 | def readNextBlock(self): |
|
900 | 900 | |
|
901 | 901 | while True: |
|
902 | self.__setNewBlock() | |
|
902 | if not(self.__setNewBlock()): | |
|
903 | continue | |
|
903 | 904 | |
|
904 | 905 | if not(self.readBlock()): |
|
905 | 906 | return 0 |
@@ -954,55 +955,49 class JRODataReader(Reader): | |||
|
954 | 955 | # self.dataOut.channelIndexList = numpy.arange(self.systemHeaderObj.numChannels) |
|
955 | 956 | self.getBlockDimension() |
|
956 | 957 | |
|
957 |
def verifyFile(self, filename |
|
|
958 | def verifyFile(self, filename): | |
|
958 | 959 | |
|
959 |
|
|
|
960 | flag = True | |
|
960 | 961 | |
|
961 | 962 | try: |
|
962 | 963 | fp = open(filename, 'rb') |
|
963 | 964 | except IOError: |
|
964 | ||
|
965 | if msgFlag: | |
|
966 | print("[Reading] File %s can't be opened" % (filename)) | |
|
967 | ||
|
965 | log.error("File {} can't be opened".format(filename), self.name) | |
|
968 | 966 | return False |
|
969 | 967 | |
|
970 | if self.waitDataBlock(0): | |
|
971 | basicHeaderObj = BasicHeader(LOCALTIME) | |
|
972 | systemHeaderObj = SystemHeader() | |
|
973 | radarControllerHeaderObj = RadarControllerHeader() | |
|
974 |
|
|
|
975 | ||
|
976 | if not(basicHeaderObj.read(fp)): | |
|
977 | fp.close() | |
|
978 | return False | |
|
979 | ||
|
980 |
|
|
|
981 |
|
|
|
982 | return False | |
|
983 | ||
|
984 |
|
|
|
985 |
|
|
|
986 | return False | |
|
987 | ||
|
988 |
|
|
|
989 |
|
|
|
990 |
|
|
|
991 | ||
|
992 |
|
|
|
993 | dt1 = basicHeaderObj.datatime | |
|
994 | fp.seek(self.fileSize-processingHeaderObj.blockSize-24) | |
|
968 | if self.online and self.waitDataBlock(0): | |
|
969 | pass | |
|
970 | ||
|
971 | basicHeaderObj = BasicHeader(LOCALTIME) | |
|
972 | systemHeaderObj = SystemHeader() | |
|
973 | radarControllerHeaderObj = RadarControllerHeader() | |
|
974 | processingHeaderObj = ProcessingHeader() | |
|
975 | ||
|
976 | if not(basicHeaderObj.read(fp)): | |
|
977 | flag = False | |
|
978 | if not(systemHeaderObj.read(fp)): | |
|
979 | flag = False | |
|
980 | if not(radarControllerHeaderObj.read(fp)): | |
|
981 | flag = False | |
|
982 | if not(processingHeaderObj.read(fp)): | |
|
983 | flag = False | |
|
984 | if not self.online: | |
|
985 | dt1 = basicHeaderObj.datatime | |
|
986 | pos = self.fileSize-processingHeaderObj.blockSize-24 | |
|
987 | if pos<0: | |
|
988 | flag = False | |
|
989 | log.error('Invalid size for file: {}'.format(self.filename), self.name) | |
|
990 | else: | |
|
991 | fp.seek(pos) | |
|
995 | 992 | if not(basicHeaderObj.read(fp)): |
|
996 |
f |
|
|
997 | return False | |
|
998 | dt2 = basicHeaderObj.datatime | |
|
999 |
|
|
|
1000 | self.isDateTimeInRange(dt2, self.startDate, self.endDate, self.startTime, self.endTime): | |
|
1001 | return False | |
|
993 | flag = False | |
|
994 | dt2 = basicHeaderObj.datatime | |
|
995 | if not self.isDateTimeInRange(dt1, self.startDate, self.endDate, self.startTime, self.endTime) and not \ | |
|
996 | self.isDateTimeInRange(dt2, self.startDate, self.endDate, self.startTime, self.endTime): | |
|
997 | flag = False | |
|
1002 | 998 | |
|
1003 | 999 | fp.close() |
|
1004 | ||
|
1005 | return True | |
|
1000 | return flag | |
|
1006 | 1001 | |
|
1007 | 1002 | def findDatafiles(self, path, startDate=None, endDate=None, expLabel='', ext='.r', walk=True, include_path=False): |
|
1008 | 1003 |
@@ -358,7 +358,7 class SpectraWriter(JRODataWriter, Operation): | |||
|
358 | 358 | |
|
359 | 359 | spc = numpy.transpose( self.data_spc, (0,2,1) ) |
|
360 | 360 | if not self.processingHeaderObj.shif_fft: |
|
361 | spc = numpy.roll( spc, self.processingHeaderObj.profilesPerBlock/2, axis=2 ) #desplaza a la derecha en el eje 2 determinadas posiciones | |
|
361 | spc = numpy.roll( spc, int(self.processingHeaderObj.profilesPerBlock/2), axis=2 ) #desplaza a la derecha en el eje 2 determinadas posiciones | |
|
362 | 362 | data = spc.reshape((-1)) |
|
363 | 363 | data = data.astype(self.dtype[0]) |
|
364 | 364 | data.tofile(self.fp) |
@@ -369,7 +369,7 class SpectraWriter(JRODataWriter, Operation): | |||
|
369 | 369 | data = numpy.zeros( numpy.shape(cspc), self.dtype ) |
|
370 | 370 | #print 'data.shape', self.shape_cspc_Buffer |
|
371 | 371 | if not self.processingHeaderObj.shif_fft: |
|
372 | cspc = numpy.roll( cspc, self.processingHeaderObj.profilesPerBlock/2, axis=2 ) #desplaza a la derecha en el eje 2 determinadas posiciones | |
|
372 | cspc = numpy.roll( cspc, int(self.processingHeaderObj.profilesPerBlock/2), axis=2 ) #desplaza a la derecha en el eje 2 determinadas posiciones | |
|
373 | 373 | data['real'] = cspc.real |
|
374 | 374 | data['imag'] = cspc.imag |
|
375 | 375 | data = data.reshape((-1)) |
@@ -74,7 +74,7 class ProcessingUnit(object): | |||
|
74 | 74 | elif 'SchainError' in err: |
|
75 | 75 | log.error(err.split('SchainError:')[-1].split('\n')[0].strip(), self.name) |
|
76 | 76 | else: |
|
77 |
log.error(err |
|
|
77 | log.error(err, self.name) | |
|
78 | 78 | self.dataOut.error = True |
|
79 | 79 | |
|
80 | 80 | for op, optype, opkwargs in self.operations: |
@@ -193,7 +193,7 def MPDecorator(BaseClass): | |||
|
193 | 193 | BaseClass.run(self, dataOut, **self.kwargs) |
|
194 | 194 | except: |
|
195 | 195 | err = traceback.format_exc() |
|
196 |
log.error(err |
|
|
196 | log.error(err, self.name) | |
|
197 | 197 | else: |
|
198 | 198 | break |
|
199 | 199 |
@@ -435,8 +435,6 class CohInt(Operation): | |||
|
435 | 435 | |
|
436 | 436 | Operation.__init__(self, **kwargs) |
|
437 | 437 | |
|
438 | # self.isConfig = False | |
|
439 | ||
|
440 | 438 | def setup(self, n=None, timeInterval=None, stride=None, overlapping=False, byblock=False): |
|
441 | 439 | """ |
|
442 | 440 | Set the parameters of the integration class. |
@@ -670,7 +668,9 class CohInt(Operation): | |||
|
670 | 668 | |
|
671 | 669 | if self.__dataReady: |
|
672 | 670 | dataOut.data = avgdata |
|
673 |
dataOut. |
|
|
671 | if not dataOut.flagCohInt: | |
|
672 | dataOut.nCohInt *= self.n | |
|
673 | dataOut.flagCohInt = True | |
|
674 | 674 | dataOut.utctime = avgdatatime |
|
675 | 675 | # print avgdata, avgdatatime |
|
676 | 676 | # raise |
General Comments 0
You need to be logged in to leave comments.
Login now