From b509e99f0d8d07a7b46dc17a583efcbf4fc356c3 2012-04-25 16:48:58 From: Miguel Valdez Date: 2012-04-25 16:48:58 Subject: [PATCH] Se ha reordenado las variables --- diff --git a/schainpy/Graphics/SpectraPlot.py b/schainpy/Graphics/SpectraPlot.py index 3742ad5..62d400e 100644 --- a/schainpy/Graphics/SpectraPlot.py +++ b/schainpy/Graphics/SpectraPlot.py @@ -136,7 +136,7 @@ class Spectrum(): nChan, nX, nY = numpy.shape(data) x = numpy.arange(nX) - y = self.m_Spectra.heights + y = self.m_Spectra.heightList thisDatetime = datetime.datetime.fromtimestamp(self.m_Spectra.m_BasicHeader.utc) txtDate = "Self Spectra - Date: %s" %(thisDatetime.strftime("%d-%b-%Y %H:%M:%S")) diff --git a/schainpy/IO/CorrelationIO.py b/schainpy/IO/CorrelationIO.py index 78e58dd..23a540d 100644 --- a/schainpy/IO/CorrelationIO.py +++ b/schainpy/IO/CorrelationIO.py @@ -9,9 +9,22 @@ from DataIO import DataReader from DataIO import DataWriter class CorrelationReader(DataReader): + def __init__(self): + pass class CorrelationWriter(DataWriter): + def __init__(self): - pass \ No newline at end of file + + pass + + def puData(self): + pass + + def writeBlock(self): + pass + + + \ No newline at end of file diff --git a/schainpy/IO/DataIO.py b/schainpy/IO/JRODataIO.py similarity index 87% rename from schainpy/IO/DataIO.py rename to schainpy/IO/JRODataIO.py index 330bdc5..1b9b400 100644 --- a/schainpy/IO/DataIO.py +++ b/schainpy/IO/JRODataIO.py @@ -166,40 +166,10 @@ def getlastFileFromPath(path, ext): return None - -class DataReader(): - - def __init__(self): - pass - - -class DataWriter(): - - def __init__(self): - pass - +class JRODataIO(): -class JRODataReader(DataReader): - - """ - Esta clase es usada como la clase padre de las clases DataReader, - contiene todos lo metodos necesarios para leer datos desde archivos en formato - jicamarca o pdata (.r o .pdata). La lectura de los datos siempre se realiza por bloques. Los datos - leidos son array de 3 dimensiones: - - Para Voltajes - perfiles * alturas * canales - - Para Spectra - paresCanalesIguales * alturas * perfiles (Self Spectra) - paresCanalesDiferentes * alturas * perfiles (Cross Spectra) - canales * alturas (DC Channels) - - y son almacenados en su buffer respectivo. - - Esta clase contiene instancias (objetos) de las clases BasicHeader, SystemHeader, - RadarControllerHeader y DataObj. Los tres primeros se usan para almacenar informacion de la - cabecera de datos (metadata), y el cuarto (DataObj) para obtener y almacenar los datos desde - el buffer cada vez que se ejecute el metodo "getData". - """ + #speed of light + c = 3E8 m_BasicHeader = BasicHeader() @@ -211,14 +181,12 @@ class JRODataReader(DataReader): m_DataObj = None - heightList = None - - channelList = None - online = 0 fp = None + dataType = None + fileSizeByHeader = None filenameList = [] @@ -231,11 +199,11 @@ class JRODataReader(DataReader): basicHeaderSize = 24 - dataType = None + nTotalBlocks = 0 + + ippSeconds = 0 - maxTimeStep = 30 - - flagNoMoreFiles = 0 + blocksize = 0 set = 0 @@ -243,47 +211,75 @@ class JRODataReader(DataReader): path = None + maxTimeStep = 30 + + delay = 3 #seconds nTries = 3 #quantity tries - nFiles = 3 #number of files for searching + nFiles = 3 #number of files for searching + - nBlocks = 0 + flagNoMoreFiles = 0 flagIsNewFile = 1 - - ippSeconds = 0 - + flagResetProcessing = 0 flagIsNewBlock = 0 - nReadBlocks = 0 - - blocksize = 0 + def __init__(self): + pass + +class JRODataReader(JRODataIO): + + """ + Esta clase es usada como la clase padre de las clases VoltageReader y SpectraReader. + Contiene todos lo metodos necesarios para leer datos desde archivos en formato + jicamarca o pdata (.r o .pdata). La lectura de los datos siempre se realiza por bloques. Los datos + leidos son array de 3 dimensiones: - datablockIndex = 9999 + Para Voltajes - perfiles * alturas * canales + + Para Spectra - paresCanalesIguales * alturas * perfiles (Self Spectra) + paresCanalesDiferentes * alturas * perfiles (Cross Spectra) + canales * alturas (DC Channels) + + y son almacenados en su buffer respectivo. + + Esta clase contiene instancias (objetos) de las clases BasicHeader, SystemHeader, + RadarControllerHeader y DataObj. Los tres primeros se usan para almacenar informacion de la + cabecera de datos (metadata), y el cuarto (DataObj) para obtener y almacenar los datos desde + el buffer de datos cada vez que se ejecute el metodo "getData". + """ - #speed of light - c = 3E8 + nReadBlocks = 0 def __init__(self, m_DataObj=None): + raise ValueError, "This class can't be instanced" - - - def hasNotDataInBuffer(self): + raise ValueError, "Not implemented" - - + + def getBlockDimension(self): + + raise ValueError, "No implemented" + def readBlock(self): + + self.nTotalBlocks += 1 + self.nReadBlocks += 1 + raise ValueError, "This method has not been implemented" - + def getData( self ): + raise ValueError, "This method has not been implemented" + def __rdSystemHeader(self, fp=None): @@ -313,9 +309,6 @@ class JRODataReader(DataReader): fp = self.fp self.m_BasicHeader.read(fp) - - def getBlockDimension(self): - raise ValueError, "No implemented" def __readFirstHeader(self): """ @@ -327,7 +320,6 @@ class JRODataReader(DataReader): self.m_RadarControllerHeader self.m_ProcessingHeader self.firstHeaderSize - self.heightList self.dataType self.fileSizeByHeader self.ippSeconds @@ -341,40 +333,116 @@ class JRODataReader(DataReader): self.__rdProcessingHeader() self.firstHeaderSize = self.m_BasicHeader.size - data_type=int(numpy.log2((self.m_ProcessingHeader.processFlags & PROCFLAG.DATATYPE_MASK))-numpy.log2(PROCFLAG.DATATYPE_CHAR)) - if data_type == 0: - tmp = numpy.dtype([('real','= neededSize ): + self.__rdBasicHeader() + return 1 + + #si es OnLine y ademas aun no se han leido un bloque completo entonces se espera por uno valido + if self.online and (self.nReadBlocks < self.m_ProcessingHeader.dataBlocksPerFile): + + fpointer = self.fp.tell() + + for nTries in range( self.nTries ): + #self.fp.close() + + print "\tWaiting %0.2f seconds for the next block, try %03d ..." % (self.delay, nTries+1) + time.sleep( self.delay ) + + #self.fp = open( self.filename, 'rb' ) + #self.fp.seek( fpointer ) + + self.fileSize = os.path.getsize( self.filename ) + currentSize = self.fileSize - fpointer + + if ( currentSize >= neededSize ): + self.__rdBasicHeader() + return 1 + + #Setting new file + if not( self.setNextFile() ): + return 0 + + deltaTime = self.m_BasicHeader.utc - self.lastUTTime # check this + + self.flagResetProcessing = 0 + + if deltaTime > self.maxTimeStep: + self.flagResetProcessing = 1 + + return 1 + + def readNextBlock(self): + """ + Establece un nuevo bloque de datos a leer y los lee, si es que no existiese + mas bloques disponibles en el archivo actual salta al siguiente. + + Affected: + self.lastUTTime + Return: None + """ + + if not(self.__setNewBlock()): + return 0 + + if not(self.readBlock()): + return 0 + + return 1 def __setNextFileOnline(self): """ @@ -540,81 +608,14 @@ class JRODataReader(DataReader): newFile = self.__setNextFileOnline() else: newFile = self.__setNextFileOffline() - - if self.flagNoMoreFiles: - sys.exit(0) if not(newFile): return 0 self.__readFirstHeader() - self.nBlocks = 0 + self.nReadBlocks = 0 return 1 - - - def __setNewBlock(self): - """ - Lee el Basic Header y posiciona le file pointer en la posicion inicial del bloque a leer - Affected: - self.m_BasicHeader - self.flagNoContinuousBlock - self.ns - - Return: - 0 : Si el file no tiene un Basic Header que pueda ser leido - 1 : Si se pudo leer el Basic Header - """ - if self.fp == None: - return 0 - - if self.flagIsNewFile: - return 1 - - currentSize = self.fileSize - self.fp.tell() - neededSize = self.m_ProcessingHeader.blockSize + self.basicHeaderSize - - #If there is enough data setting new data block - if ( currentSize >= neededSize ): - self.__rdBasicHeader() - return 1 - - #si es OnLine y ademas aun no se han leido un bloque completo entonces se espera por uno valido - elif (self.nBlocks != self.m_ProcessingHeader.dataBlocksPerFile) and self.online: - for nTries in range( self.nTries ): - - fpointer = self.fp.tell() - self.fp.close() - - print "\tWaiting %0.2f sec for the next block, try %03d ..." % (self.delay, nTries+1) - time.sleep( self.delay ) - - self.fp = open( self.filename, 'rb' ) - self.fp.seek( fpointer ) - - self.fileSize = os.path.getsize( self.filename ) - currentSize = self.fileSize - self.fp.tell() - neededSize = self.m_ProcessingHeader.blockSize + self.basicHeaderSize - - if ( currentSize >= neededSize ): - self.__rdBasicHeader() - return 1 - - #Setting new file - if not( self.setNextFile() ): - return 0 - - deltaTime = self.m_BasicHeader.utc - self.lastUTTime # check this - - self.flagResetProcessing = 0 - - if deltaTime > self.maxTimeStep: - self.flagResetProcessing = 1 - #self.nReadBlocks = 0 - - return 1 - - def __searchFilesOnLine(self, path, startDateTime=None, endDateTime=None, expLabel = "", ext = None): """ Busca el ultimo archivo de la ultima carpeta (determinada o no por startDateTime) y @@ -818,8 +819,16 @@ class JRODataReader(DataReader): return 0 return 1 - + def updateDataHeader(self): + + self.m_DataObj.m_BasicHeader = self.m_BasicHeader.copy() + self.m_DataObj.m_ProcessingHeader = self.m_ProcessingHeader.copy() + self.m_DataObj.m_RadarControllerHeader = self.m_RadarControllerHeader.copy() + self.m_DataObj.m_SystemHeader = self.m_SystemHeader.copy() + + self.m_DataObj.updateObjFromHeader() + def setup(self, path, startDateTime=None, endDateTime=None, set=0, expLabel = "", ext = None, online = 0): """ setup configura los parametros de lectura de la clase DataReader. @@ -923,88 +932,20 @@ class JRODataReader(DataReader): self.endDoy = endDateTime.timetuple().tm_yday #call fillHeaderValues() - to Data Object - self.m_DataObj.m_BasicHeader = self.m_BasicHeader.copy() - self.m_DataObj.m_ProcessingHeader = self.m_ProcessingHeader.copy() - self.m_DataObj.m_RadarControllerHeader = self.m_RadarControllerHeader.copy() - self.m_DataObj.m_SystemHeader = self.m_SystemHeader.copy() - self.m_DataObj.dataType = self.dataType + self.updateDataHeader() - return 1 - - - def readNextBlock(self): - """ - Establece un nuevo bloque de datos a leer y los lee, si es que no existiese - mas bloques disponibles en el archivo actual salta al siguiente. - - Affected: - self.lastUTTime - - Return: None - """ - if not(self.__setNewBlock()): - return 0 - - if not(self.readBlock()): - return 0 - - self.lastUTTime = self.m_BasicHeader.utc - return 1 - - -class JRODataWriter(DataWriter): + +class JRODataWriter(JRODataIO): """ Esta clase permite escribir datos a archivos procesados (.r o ,pdata). La escritura de los datos siempre se realiza por bloques. """ - m_BasicHeader = BasicHeader() - - m_SystemHeader = SystemHeader() - - m_RadarControllerHeader = RadarControllerHeader() - - m_ProcessingHeader = ProcessingHeader() - - fp = None - - blocksCounter = 0 - - flagIsNewFile = 1 - - nWriteBlocks = 0 - - flagIsNewBlock = 0 - - flagNoMoreFiles = 0 - - m_DataObj = None - - fp = None - - blocksCounter = 0 - - flagIsNewFile = 1 - nWriteBlocks = 0 - - flagIsNewBlock = 0 - - flagNoMoreFiles = 0 setFile = None - - dataType = None - - path = None - - noMoreFiles = 0 - - filename = None - - channelList = None def __init__(self, m_DataObj=None): @@ -1120,12 +1061,12 @@ class JRODataWriter(DataWriter): timeTuple = time.localtime( self.m_DataObj.m_BasicHeader.utc ) subfolder = 'D%4.4d%3.3d' % (timeTuple.tm_year,timeTuple.tm_yday) - tmp = os.path.join( path, subfolder ) - if not( os.path.exists(tmp) ): - os.mkdir(tmp) + doypath = os.path.join( path, subfolder ) + if not( os.path.exists(doypath) ): + os.mkdir(doypath) self.setFile = -1 #inicializo mi contador de seteo else: - filesList = os.listdir( tmp ) + filesList = os.listdir( doypath ) if len( filesList ) > 0: filesList = sorted( filesList, key=str.lower ) filen = filesList[-1] @@ -1152,7 +1093,7 @@ class JRODataWriter(DataWriter): fp = open( filename,'wb' ) - self.blocksCounter = 0 + self.nWriteBlocks = 0 #guardando atributos self.filename = filename @@ -1182,7 +1123,7 @@ class JRODataWriter(DataWriter): if self.flagIsNewFile: return 1 - if self.blocksCounter < self.m_ProcessingHeader.dataBlocksPerFile: + if self.nWriteBlocks < self.m_ProcessingHeader.dataBlocksPerFile: self.__writeBasicHeader() return 1 @@ -1208,7 +1149,7 @@ class JRODataWriter(DataWriter): return 1 - def getHeader(self): + def getDataHeader(self): """ Obtiene una copia del First Header @@ -1222,12 +1163,15 @@ class JRODataWriter(DataWriter): Return: None """ + self.m_DataObj.updateHeaderFromObj() + self.m_BasicHeader = self.m_DataObj.m_BasicHeader.copy() self.m_SystemHeader = self.m_DataObj.m_SystemHeader.copy() self.m_RadarControllerHeader = self.m_DataObj.m_RadarControllerHeader.copy() self.m_ProcessingHeader = self.m_DataObj.m_ProcessingHeader.copy() + self.dataType = self.m_DataObj.dataType - + def setup(self, path, set=0, ext=None): """ @@ -1252,7 +1196,7 @@ class JRODataWriter(DataWriter): self.setFile = set - 1 self.ext = ext #self.format = format - self.getHeader() + self.getDataHeader() self.setBlockDimension() diff --git a/schainpy/IO/SpectraIO.py b/schainpy/IO/SpectraIO.py index 8a82060..905ea8c 100644 --- a/schainpy/IO/SpectraIO.py +++ b/schainpy/IO/SpectraIO.py @@ -18,9 +18,9 @@ sys.path.append(path) from Model.JROHeader import * from Model.Spectra import Spectra -from DataIO import JRODataReader -from DataIO import JRODataWriter -from DataIO import isNumber +from JRODataIO import JRODataReader +from JRODataIO import JRODataWriter +from JRODataIO import isNumber class SpectraReader( JRODataReader ): @@ -160,7 +160,7 @@ class SpectraReader( JRODataReader ): self.nFiles = 3 #number of files for searching - self.nBlocks = 0 + self.nReadBlocks = 0 self.flagIsNewFile = 1 @@ -170,7 +170,7 @@ class SpectraReader( JRODataReader ): self.flagIsNewBlock = 0 - self.nReadBlocks = 0 + self.nTotalBlocks = 0 self.blocksize = 0 @@ -202,14 +202,14 @@ class SpectraReader( JRODataReader ): """ self.nChannels = 0 self.nPairs = 0 - #self.pairList = [] + self.pairList = [] for i in range( 0, self.m_ProcessingHeader.totalSpectra*2, 2 ): if self.m_ProcessingHeader.spectraComb[i] == self.m_ProcessingHeader.spectraComb[i+1]: self.nChannels = self.nChannels + 1 #par de canales iguales else: self.nPairs = self.nPairs + 1 #par de canales diferentes - #self.pairList.append( (self.m_ProcessingHeader.spectraComb[i], self.m_ProcessingHeader.spectraComb[i+1]) ) + self.pairList.append( (self.m_ProcessingHeader.spectraComb[i], self.m_ProcessingHeader.spectraComb[i+1]) ) pts2read = self.m_ProcessingHeader.numHeights * self.m_ProcessingHeader.profilesPerBlock @@ -217,13 +217,8 @@ class SpectraReader( JRODataReader ): self.pts2read_CrossSpectra = int( self.nPairs * pts2read ) self.pts2read_DCchannels = int( self.m_SystemHeader.numChannels * self.m_ProcessingHeader.numHeights ) - self.blocksize = self.pts2read_SelfSpectra + self.pts2read_CrossSpectra + self.pts2read_DCchannels + self.blocksize = self.pts2read_SelfSpectra + self.pts2read_CrossSpectra + self.pts2read_DCchannels - self.m_DataObj.nPoints = self.m_ProcessingHeader.profilesPerBlock - self.m_DataObj.nChannels = self.nChannels - self.m_DataObj.nPairs = self.nPairs - - #self.pairList = tuple( self.pairList ) self.channelList = numpy.arange( self.nChannels ) @@ -240,7 +235,7 @@ class SpectraReader( JRODataReader ): self.datablockIndex self.flagIsNewFile self.flagIsNewBlock - self.nReadBlocks + self.nTotalBlocks self.data_spc self.data_cspc self.data_dc @@ -254,25 +249,6 @@ class SpectraReader( JRODataReader ): spc = numpy.fromfile( self.fp, self.dataType[0], self.pts2read_SelfSpectra ) cspc = numpy.fromfile( self.fp, self.dataType, self.pts2read_CrossSpectra ) dc = numpy.fromfile( self.fp, self.dataType, self.pts2read_DCchannels ) #int(self.m_ProcessingHeader.numHeights*self.m_SystemHeader.numChannels) ) - - if self.online: - if (spc.size + cspc.size + dc.size) != self.blocksize: - for nTries in range( self.nTries ): - print "\tWaiting %0.2f sec for the next block, try %03d ..." % (self.delay, nTries+1) - time.sleep( self.delay ) - self.fp.seek( fpointer ) - fpointer = self.fp.tell() - - spc = numpy.fromfile( self.fp, self.dataType[0], self.pts2read_SelfSpectra ) - cspc = numpy.fromfile( self.fp, self.dataType, self.pts2read_CrossSpectra ) - dc = numpy.fromfile( self.fp, self.dataType, self.pts2read_DCchannels ) #int(self.m_ProcessingHeader.numHeights*self.m_SystemHeader.numChannels) ) - - if (spc.size + cspc.size + dc.size) == self.blocksize: - blockOk_flag = True - break - - if not( blockOk_flag ): - return 0 try: spc = spc.reshape( (self.nChannels, self.m_ProcessingHeader.numHeights, self.m_ProcessingHeader.profilesPerBlock) ) #transforma a un arreglo 3D @@ -304,8 +280,8 @@ class SpectraReader( JRODataReader ): self.flagIsNewFile = 0 self.flagIsNewBlock = 1 + self.nTotalBlocks += 1 self.nReadBlocks += 1 - self.nBlocks += 1 return 1 @@ -335,15 +311,9 @@ class SpectraReader( JRODataReader ): if self.__hasNotDataInBuffer(): if not( self.readNextBlock() ): - self.setNextFile() return 0 - self.m_DataObj.m_BasicHeader = self.m_BasicHeader.copy() - self.m_DataObj.m_ProcessingHeader = self.m_ProcessingHeader.copy() - self.m_DataObj.m_RadarControllerHeader = self.m_RadarControllerHeader.copy() - self.m_DataObj.m_SystemHeader = self.m_SystemHeader.copy() - self.m_DataObj.heightList = self.heightList - self.m_DataObj.dataType = self.dataType + self.updateDataHeader() if self.flagNoMoreFiles == 1: print 'Process finished' @@ -420,11 +390,11 @@ class SpectraWriter(JRODataWriter): self.fp = None - self.blocksCounter = 0 + self.nWriteBlocks = 0 self.flagIsNewFile = 1 - self.nWriteBlocks = 0 + self.nTotalBlocks = 0 self.flagIsNewBlock = 0 @@ -486,8 +456,8 @@ class SpectraWriter(JRODataWriter): self.data_dc self.flagIsNewFile self.flagIsNewBlock - self.nWriteBlocks - self.blocksCounter + self.nTotalBlocks + self.nWriteBlocks Return: None """ @@ -522,8 +492,8 @@ class SpectraWriter(JRODataWriter): self.flagIsNewFile = 0 self.flagIsNewBlock = 1 + self.nTotalBlocks += 1 self.nWriteBlocks += 1 - self.blocksCounter += 1 def putData(self): @@ -556,7 +526,7 @@ class SpectraWriter(JRODataWriter): # #self.m_ProcessingHeader.dataBlocksPerFile) if self.hasAllDataInBuffer(): - self.getHeader() + self.getDataHeader() self.writeNextBlock() if self.flagNoMoreFiles: diff --git a/schainpy/IO/TestIO.py b/schainpy/IO/TestIO.py index 2e9b15c..a33bafd 100644 --- a/schainpy/IO/TestIO.py +++ b/schainpy/IO/TestIO.py @@ -48,7 +48,7 @@ class TestIO(): print 'jump' if self.voltageReaderObj.flagIsNewBlock: - print 'Block No %04d, Time: %s'%(self.voltageReaderObj.nReadBlocks, + print 'Block No %04d, Time: %s'%(self.voltageReaderObj.nTotalBlocks, datetime.datetime.fromtimestamp(self.voltageReaderObj.m_BasicHeader.utc)) # self.voltageWriterObj.putData() diff --git a/schainpy/IO/VoltageIO.py b/schainpy/IO/VoltageIO.py index 6a2ef00..ba7da80 100644 --- a/schainpy/IO/VoltageIO.py +++ b/schainpy/IO/VoltageIO.py @@ -17,8 +17,8 @@ sys.path.append(path) from Model.JROHeader import * from Model.Voltage import Voltage -from IO.DataIO import JRODataReader -from IO.DataIO import JRODataWriter +from IO.JRODataIO import JRODataReader +from IO.JRODataIO import JRODataWriter class VoltageReader(JRODataReader): @@ -50,7 +50,7 @@ class VoltageReader(JRODataReader): #to get one profile profile = readerObj.getData() - + #print the profile print profile @@ -63,13 +63,7 @@ class VoltageReader(JRODataReader): """ m_DataObj = None - idProfile = 0 - datablock = None - - pts2read = 0 - - utc = 0 ext = ".r" @@ -101,13 +95,9 @@ class VoltageReader(JRODataReader): raise ValueError, "in VoltageReader, m_Voltage must be an Voltage class object" self.m_DataObj = m_Voltage - - self.idProfile = 0 self.datablock = None - self.pts2read = 0 - self.utc = 0 self.ext = ".r" @@ -160,7 +150,7 @@ class VoltageReader(JRODataReader): self.path = None - self.datablockIndex = 9999 + self.profileIndex = 9999 self.delay = 3 #seconds @@ -168,7 +158,7 @@ class VoltageReader(JRODataReader): self.nFiles = 3 #number of files for searching - self.nBlocks = 0 + self.nReadBlocks = 0 self.flagIsNewFile = 1 @@ -178,12 +168,12 @@ class VoltageReader(JRODataReader): self.flagIsNewBlock = 0 - self.nReadBlocks = 0 + self.nTotalBlocks = 0 self.blocksize = 0 def __hasNotDataInBuffer(self): - if self.datablockIndex >= self.m_ProcessingHeader.profilesPerBlock: + if self.profileIndex >= self.m_ProcessingHeader.profilesPerBlock: return 1 return 0 @@ -193,15 +183,13 @@ class VoltageReader(JRODataReader): Obtiene la cantidad de puntos a leer por cada bloque de datos Affected: - self.pts2read self.blocksize Return: None """ - self.pts2read = self.m_ProcessingHeader.profilesPerBlock * self.m_ProcessingHeader.numHeights * self.m_SystemHeader.numChannels - self.blocksize = self.pts2read - self.m_DataObj.nProfiles = self.m_ProcessingHeader.profilesPerBlock + pts2read = self.m_ProcessingHeader.profilesPerBlock * self.m_ProcessingHeader.numHeights * self.m_SystemHeader.numChannels + self.blocksize = pts2read def readBlock(self): @@ -218,54 +206,34 @@ class VoltageReader(JRODataReader): None Affected: - self.datablockIndex + self.profileIndex self.datablock self.flagIsNewFile - self.idProfile self.flagIsNewBlock - self.nReadBlocks + self.nTotalBlocks Exceptions: Si un bloque leido no es un bloque valido """ - blockOk_flag = False - fpointer = self.fp.tell() - - junk = numpy.fromfile( self.fp, self.dataType, self.pts2read ) - - if self.online: - if junk.size != self.blocksize: - for nTries in range( self.nTries ): - print "\tWaiting %0.2f sec for the next block, try %03d ..." % (self.delay, nTries+1) - time.sleep( self.delay ) - self.fp.seek( fpointer ) - fpointer = self.fp.tell() - - junk = numpy.fromfile( self.fp, self.dataType, self.pts2read ) - - if junk.size == self.blocksize: - blockOk_flag = True - break - - if not( blockOk_flag ): - return 0 + + junk = numpy.fromfile( self.fp, self.dataType, self.blocksize ) try: junk = junk.reshape( (self.m_ProcessingHeader.profilesPerBlock, self.m_ProcessingHeader.numHeights, self.m_SystemHeader.numChannels) ) except: - print "Data file %s is invalid" % self.filename + print "The read block (%3d) has not enough data" %self.nReadBlocks return 0 junk = numpy.transpose(junk, (2,0,1)) self.datablock = junk['real'] + junk['imag']*1j - self.datablockIndex = 0 + self.profileIndex = 0 + self.flagIsNewFile = 0 - self.idProfile = 0 self.flagIsNewBlock = 1 + self.nTotalBlocks += 1 self.nReadBlocks += 1 - self.nBlocks += 1 return 1 @@ -284,15 +252,13 @@ class VoltageReader(JRODataReader): Variables afectadas: self.m_DataObj - self.datablockIndex - self.idProfile + self.profileIndex Affected: self.m_DataObj - self.datablockIndex + self.profileIndex self.flagResetProcessing self.flagIsNewBlock - self.idProfile """ if self.flagNoMoreFiles: return 0 @@ -302,15 +268,9 @@ class VoltageReader(JRODataReader): if self.__hasNotDataInBuffer(): if not( self.readNextBlock() ): - self.setNextFile() return 0 - self.m_DataObj.m_BasicHeader = self.m_BasicHeader.copy() - self.m_DataObj.m_ProcessingHeader = self.m_ProcessingHeader.copy() - self.m_DataObj.m_RadarControllerHeader = self.m_RadarControllerHeader.copy() - self.m_DataObj.m_SystemHeader = self.m_SystemHeader.copy() - self.m_DataObj.heightList = self.heightList - self.m_DataObj.dataType = self.dataType + self.updateDataHeader() if self.flagNoMoreFiles == 1: print 'Process finished' @@ -322,17 +282,15 @@ class VoltageReader(JRODataReader): self.m_DataObj.flagNoData = True return 0 - time = self.m_BasicHeader.utc + self.datablockIndex * self.ippSeconds + time = self.m_BasicHeader.utc + self.profileIndex * self.ippSeconds self.m_DataObj.m_BasicHeader.utc = time self.m_DataObj.flagNoData = False self.m_DataObj.flagResetProcessing = self.flagResetProcessing - self.m_DataObj.data = self.datablock[:,self.datablockIndex,:] - self.m_DataObj.idProfile = self.idProfile + self.m_DataObj.data = self.datablock[:,self.profileIndex,:] - self.datablockIndex += 1 - self.idProfile += 1 + self.profileIndex += 1 #call setData - to Data Object @@ -354,7 +312,7 @@ class VoltageWriter( JRODataWriter ): datablock = None - datablockIndex = 0 + profileIndex = 0 shapeBuffer = None @@ -378,7 +336,7 @@ class VoltageWriter( JRODataWriter ): def hasAllDataInBuffer(self): - if self.datablockIndex >= self.m_ProcessingHeader.profilesPerBlock: + if self.profileIndex >= self.m_ProcessingHeader.profilesPerBlock: return 1 return 0 @@ -409,11 +367,11 @@ class VoltageWriter( JRODataWriter ): Escribe el buffer en el file designado Affected: - self.datablockIndex + self.profileIndex self.flagIsNewFile self.flagIsNewBlock - self.nWriteBlocks - self.blocksCounter + self.nTotalBlocks + self.nWriteBlocks Return: None """ @@ -429,11 +387,11 @@ class VoltageWriter( JRODataWriter ): data.tofile( self.fp ) self.datablock.fill(0) - self.datablockIndex = 0 + self.profileIndex = 0 self.flagIsNewFile = 0 self.flagIsNewBlock = 1 + self.nTotalBlocks += 1 self.nWriteBlocks += 1 - self.blocksCounter += 1 def putData(self): @@ -442,7 +400,7 @@ class VoltageWriter( JRODataWriter ): Affected: self.flagIsNewBlock - self.datablockIndex + self.profileIndex Return: 0 : Si no hay data o no hay mas files que puedan escribirse @@ -456,16 +414,16 @@ class VoltageWriter( JRODataWriter ): if self.m_DataObj.flagResetProcessing: self.datablock.fill(0) - self.datablockIndex = 0 + self.profileIndex = 0 self.setNextFile() - self.datablock[:,self.datablockIndex,:] = self.m_DataObj.data + self.datablock[:,self.profileIndex,:] = self.m_DataObj.data - self.datablockIndex += 1 + self.profileIndex += 1 if self.hasAllDataInBuffer(): #if self.flagIsNewFile: - self.getHeader() + self.getDataHeader() self.writeNextBlock() if self.flagNoMoreFiles: diff --git a/schainpy/Model/Correlation.py b/schainpy/Model/Correlation.py index 4b1e6c8..42b1814 100644 --- a/schainpy/Model/Correlation.py +++ b/schainpy/Model/Correlation.py @@ -4,15 +4,55 @@ Created on Feb 7, 2012 @author $Author$ @version $Id$ ''' +from JROData import JROData, Noise +from JROHeader import RadarControllerHeader, ProcessingHeader, SystemHeader, BasicHeader -class Correlation(Data): +class Correlation(JROData): ''' classdocs ''' - - + + data = None + + nLags = None + + lagsList = None + def __init__(self): ''' Constructor ''' - pass \ No newline at end of file + + self.m_RadarControllerHeader = RadarControllerHeader() + + self.m_ProcessingHeader = ProcessingHeader() + + self.m_SystemHeader = SystemHeader() + + self.m_BasicHeader = BasicHeader() + + self.m_NoiseObj = Noise() + + self.type = "Spectra" + + self.dataType = None + + self.nHeights = 0 + + self.nChannels = 0 + + self.channelList = None + + self.heightList = None + + self.flagNoData = True + + self.flagResetProcessing = False + + + self.data = None + + self.nLags = 0 + + self.lagsList = None + \ No newline at end of file diff --git a/schainpy/Model/JROData.py b/schainpy/Model/JROData.py index cd6eb72..17b6771 100644 --- a/schainpy/Model/JROData.py +++ b/schainpy/Model/JROData.py @@ -11,7 +11,6 @@ class Data: ''' classdocs ''' - type = None def __init__(self): ''' @@ -46,27 +45,59 @@ class JROData(Data): ''' classdocs ''' + m_RadarControllerHeader = RadarControllerHeader() + m_ProcessingHeader = ProcessingHeader() + m_SystemHeader = SystemHeader() + m_BasicHeader = BasicHeader() + m_NoiseObj = Noise() - data = None + type = None + dataType = None - - nProfiles = None + nHeights = None + nChannels = None heightList = None + channelList = None flagNoData = False + flagResetProcessing = False def __init__(self): ''' Constructor ''' - raise ValueError, "This class has not been implemented" \ No newline at end of file + raise ValueError, "This class has not been implemented" + + def updateHeaderFromObj(self): + + xi = self.heightList[0] + step = self.heightList[1] - self.heightList[0] + + self.m_ProcessingHeader.firstHeight = xi + self.m_ProcessingHeader.deltaHeight = step + + self.m_ProcessingHeader.numHeights = self.nHeights + self.m_SystemHeader.numChannels = self.nChannels + + def updateObjFromHeader(self): + + xi = self.m_ProcessingHeader.firstHeight + step = self.m_ProcessingHeader.deltaHeight + xf = xi + self.m_ProcessingHeader.numHeights*step + + self.heightList = numpy.arange(xi, xf, step) + self.channelList = numpy.arange(self.m_SystemHeader.numChannels) + + self.nHeights = len(self.heightList) + self.nChannels = len(self.channelList) + \ No newline at end of file diff --git a/schainpy/Model/Spectra.py b/schainpy/Model/Spectra.py index aeef1f7..a243d7c 100644 --- a/schainpy/Model/Spectra.py +++ b/schainpy/Model/Spectra.py @@ -13,11 +13,18 @@ class Spectra(JROData): classdocs ''' - type = "Spectra" data_spc = None + data_cspc = None + data_dc = None + nFFTPoints = None + + nPairs = None + + pairsList = None + def __init__(self): ''' @@ -32,29 +39,38 @@ class Spectra(JROData): self.m_BasicHeader = BasicHeader() - m_NoiseObj = Noise() + self.m_NoiseObj = Noise() - #data es un numpy array de 3 dmensiones (perfiles, alturas y canales) - self.data_spc = None + self.type = "Spectra" - self.data_cspc = None + self.dataType = None - self.data_dc = None - - self.heightList = None + self.nHeights = 0 + + self.nChannels = 0 self.channelList = None + self.heightList = None + self.flagNoData = True - self.nProfiles = None + self.flagResetProcessing = False + - self.nPoints = None + #data es un numpy array de 3 dmensiones (perfiles, alturas y canales) + self.data_spc = None - self.dataType = None + self.data_cspc = None - self.flagResetProcessing = False + self.data_dc = None + + self.nFFTPoints = None self.nPairs = 0 - - self.nChannels = 0 \ No newline at end of file + + self.pairsList = None + + + + \ No newline at end of file diff --git a/schainpy/Model/Voltage.py b/schainpy/Model/Voltage.py index 20d95bf..47bd903 100644 --- a/schainpy/Model/Voltage.py +++ b/schainpy/Model/Voltage.py @@ -13,8 +13,10 @@ class Voltage(JROData): classdocs ''' - type = "Voltage" data = None + + nProfiles = None + profileIndex = None def __init__(self): @@ -22,28 +24,40 @@ class Voltage(JROData): Constructor ''' - self.m_RadarControllerHeader= RadarControllerHeader() + self.m_RadarControllerHeader = RadarControllerHeader() - self.m_ProcessingHeader= ProcessingHeader() + self.m_ProcessingHeader = ProcessingHeader() - self.m_SystemHeader= SystemHeader() + self.m_SystemHeader = SystemHeader() - self.m_BasicHeader= BasicHeader() + self.m_BasicHeader = BasicHeader() - m_NoiseObj = Noise() + self.m_NoiseObj = Noise() + + self.type = "Voltage" #data es un numpy array de 3 dmensiones (perfiles, alturas y canales) self.data = None self.dataType = None + self.nHeights = 0 + + self.nChannels = 0 + + self.channelList = None + self.heightList = None + self.flagNoData = True + + self.flagResetProcessing = False + + + self.profileIndex = None self.nProfiles = None - self.flagNoData = True - self.flagResetProcessing = False \ No newline at end of file diff --git a/schainpy/Processing/SpectraProcessor.py b/schainpy/Processing/SpectraProcessor.py index 03f2bcd..ae92d95 100644 --- a/schainpy/Processing/SpectraProcessor.py +++ b/schainpy/Processing/SpectraProcessor.py @@ -52,7 +52,7 @@ class SpectraProcessor: self.plotterIndex = 0 if nFFTPoints == None: - nFFTPoints = self.dataOutObj.nPoints + nFFTPoints = self.dataOutObj.nFFTPoints self.nFFTPoints = nFFTPoints self.pairList = pairList @@ -91,16 +91,16 @@ class SpectraProcessor: self.dataOutObj.m_ProcessingHeader.shif_fft """ blocksize = 0 - npoints = self.nFFTPoints - nchannels, nheis = self.dataInObj.data.shape + nFFTPoints = self.nFFTPoints + nChannels, nheis = self.dataInObj.data.shape if self.buffer == None: - self.buffer = numpy.zeros((nchannels, npoints, nheis), dtype='complex') + self.buffer = numpy.zeros((nChannels, nFFTPoints, nheis), dtype='complex') self.buffer[:,self.ptsId,:] = self.dataInObj.data self.ptsId += 1 - if self.ptsId < self.dataOutObj.nPoints: + if self.ptsId < self.dataOutObj.nFFTPoints: self.dataOutObj.flagNoData = True return @@ -115,14 +115,14 @@ class SpectraProcessor: blocksize += spc.size cspc = None - npair = 0 + nPair = 0 if self.pairList != None: #calculo de cross-spectra - npairs = len(self.pairList) - cspc = numpy.zeros((npairs, npoints, nheis), dtype='complex') + nPairs = len(self.pairList) + cspc = numpy.zeros((nPairs, nFFTPoints, nheis), dtype='complex') for pair in self.pairList: - cspc[npair,:,:] = numpy.abs(fft_volt[pair[0],:,:] * numpy.conjugate(fft_volt[pair[1],:,:])) - npair += 1 + cspc[nPair,:,:] = numpy.abs(fft_volt[pair[0],:,:] * numpy.conjugate(fft_volt[pair[1],:,:])) + nPair += 1 blocksize += cspc.size self.dataOutObj.data_spc = spc @@ -141,25 +141,25 @@ class SpectraProcessor: self.dataOutObj.m_SystemHeader = self.dataInObj.m_SystemHeader.copy() self.dataOutObj.dataType = self.dataInObj.dataType - self.dataOutObj.nPairs = npair - self.dataOutObj.nChannels = nchannels - self.dataOutObj.nProfiles = npoints + self.dataOutObj.nPairs = nPair + self.dataOutObj.nChannels = nChannels + self.dataOutObj.nProfiles = nFFTPoints self.dataOutObj.nHeights = nheis - self.dataOutObj.nPoints = npoints + self.dataOutObj.nFFTPoints = nFFTPoints #self.dataOutObj.data = None - self.dataOutObj.m_SystemHeader.numChannels = nchannels - self.dataOutObj.m_SystemHeader.nProfiles = npoints + self.dataOutObj.m_SystemHeader.numChannels = nChannels + self.dataOutObj.m_SystemHeader.nProfiles = nFFTPoints self.dataOutObj.m_ProcessingHeader.blockSize = blocksize - self.dataOutObj.m_ProcessingHeader.totalSpectra = nchannels + npair - self.dataOutObj.m_ProcessingHeader.profilesPerBlock = npoints + self.dataOutObj.m_ProcessingHeader.totalSpectra = nChannels + nPair + self.dataOutObj.m_ProcessingHeader.profilesPerBlock = nFFTPoints self.dataOutObj.m_ProcessingHeader.numHeights = nheis self.dataOutObj.m_ProcessingHeader.shif_fft = True - spectraComb = numpy.zeros( (nchannels+npair)*2,numpy.dtype('u1')) + spectraComb = numpy.zeros( (nChannels+nPair)*2,numpy.dtype('u1')) k = 0 - for i in range( 0,nchannels*2,2 ): + for i in range( 0,nChannels*2,2 ): spectraComb[i] = k spectraComb[i+1] = k k += 1 @@ -285,29 +285,27 @@ class SpectraProcessor: if self.dataOutObj.flagNoData: return 0 - nchannels = 0 - npairs = 0 - profiles = self.dataOutObj.nProfiles + nProfiles = self.dataOutObj.nProfiles dataType = self.dataOutObj.dataType - heights = self.dataOutObj.m_ProcessingHeader.numHeights + nHeights = self.dataOutObj.m_ProcessingHeader.numHeights blocksize = 0 #self spectra - nchannels = len(channelList) - spc = numpy.zeros( (nchannels,profiles,heights), dataType[0] ) + nChannels = len(channelList) + spc = numpy.zeros( (nChannels,nProfiles,nHeights), dataType[0] ) for index, channel in enumerate(channelList): spc[index,:,:] = self.dataOutObj.data_spc[channel,:,:] #DC channel - dc = numpy.zeros( (nchannels,heights), dtype='complex' ) + dc = numpy.zeros( (nChannels,nHeights), dtype='complex' ) for index, channel in enumerate(channelList): dc[index,:] = self.dataOutObj.data_dc[channel,:] blocksize += dc.size blocksize += spc.size - npairs = 0 + nPairs = 0 cspc = None if pairList == None: @@ -315,8 +313,8 @@ class SpectraProcessor: if pairList != None: #cross spectra - npairs = len(pairList) - cspc = numpy.zeros( (npairs,profiles,heights), dtype='complex' ) + nPairs = len(pairList) + cspc = numpy.zeros( (nPairs,nProfiles,nHeights), dtype='complex' ) spectraComb = self.dataOutObj.m_ProcessingHeader.spectraComb totalSpectra = len(spectraComb) @@ -338,7 +336,7 @@ class SpectraProcessor: if cspc != None: blocksize += cspc.size - spectraComb = numpy.zeros( (nchannels+npairs)*2,numpy.dtype('u1')) + spectraComb = numpy.zeros( (nChannels+nPairs)*2,numpy.dtype('u1')) i = 0 for val in channelList: spectraComb[i] = val @@ -354,15 +352,15 @@ class SpectraProcessor: self.dataOutObj.data_spc = spc self.dataOutObj.data_cspc = cspc self.dataOutObj.data_dc = dc - self.dataOutObj.nChannels = nchannels - self.dataOutObj.nPairs = npairs + self.dataOutObj.nChannels = nChannels + self.dataOutObj.nPairs = nPairs self.dataOutObj.channelList = channelList self.dataOutObj.m_ProcessingHeader.spectraComb = spectraComb - self.dataOutObj.m_ProcessingHeader.totalSpectra = nchannels + npairs - self.dataOutObj.m_SystemHeader.numChannels = nchannels - self.dataOutObj.nChannels = nchannels + self.dataOutObj.m_ProcessingHeader.totalSpectra = nChannels + nPairs + self.dataOutObj.m_SystemHeader.numChannels = nChannels + self.dataOutObj.nChannels = nChannels self.dataOutObj.m_ProcessingHeader.blockSize = blocksize @@ -432,26 +430,26 @@ class SpectraProcessor: if self.dataOutObj.flagNoData: return 0 - nchannels = self.dataOutObj.nChannels - npairs = self.dataOutObj.nPairs - profiles = self.dataOutObj.nProfiles + nChannels = self.dataOutObj.nChannels + nPairs = self.dataOutObj.nPairs + nProfiles = self.dataOutObj.nProfiles dataType = self.dataOutObj.dataType newheis = maxIndex - minIndex + 1 blockSize = 0 #self spectra - spc = numpy.zeros( (nchannels,profiles,newheis), dataType[0] ) - for i in range(nchannels): + spc = numpy.zeros( (nChannels,nProfiles,newheis), dataType[0] ) + for i in range(nChannels): spc[i,:,:] = self.dataOutObj.data_spc[i,:,minIndex:maxIndex+1] #cross spectra - cspc = numpy.zeros( (npairs,profiles,newheis), dtype='complex') - for i in range(npairs): + cspc = numpy.zeros( (nPairs,nProfiles,newheis), dtype='complex') + for i in range(nPairs): cspc[i,:,:] = self.dataOutObj.data_cspc[i,:,minIndex:maxIndex+1] #DC channel - dc = numpy.zeros( (nchannels,newheis), dtype='complex') - for i in range(nchannels): + dc = numpy.zeros( (nChannels,newheis), dtype='complex') + for i in range(nChannels): dc[i] = self.dataOutObj.data_dc[i,minIndex:maxIndex+1] self.dataOutObj.data_spc = spc diff --git a/schainpy/TestDataSChain.py b/schainpy/TestDataSChain.py index f35febb..8ac8fcf 100644 --- a/schainpy/TestDataSChain.py +++ b/schainpy/TestDataSChain.py @@ -57,8 +57,8 @@ class TestSChain(): if self.readerObj.noMoreFiles: break - if self.readerObj.flagIsNewBlock and self.readerObj.nReadBlocks: - print 'Block No %04d, Time: %s' %(self.readerObj.nReadBlocks, + if self.readerObj.flagIsNewBlock and self.readerObj.nTotalBlocks: + print 'Block No %04d, Time: %s' %(self.readerObj.nTotalBlocks, datetime.datetime.fromtimestamp(self.readerObj.m_BasicHeader.utc),) #=============================================================== # fin = time.time() diff --git a/schainpy/TestSChain.py b/schainpy/TestSChain.py index b3e2c42..c7d1f9d 100644 --- a/schainpy/TestSChain.py +++ b/schainpy/TestSChain.py @@ -85,7 +85,7 @@ class TestSChain(): break if self.readerObj.flagIsNewBlock: - print 'Block No %04d, Time: %s' %(self.readerObj.nReadBlocks, + print 'Block No %04d, Time: %s' %(self.readerObj.nTotalBlocks, datetime.datetime.fromtimestamp(self.readerObj.m_BasicHeader.utc),) fin = time.time() print 'Tiempo de un bloque leido y escrito: [%6.5f]' %(fin - ini) diff --git a/schainpy/TestSVoltageChain.py b/schainpy/TestSVoltageChain.py index 7483ba0..1f0964c 100644 --- a/schainpy/TestSVoltageChain.py +++ b/schainpy/TestSVoltageChain.py @@ -28,27 +28,27 @@ class TestSChain(): def setValues( self ): self.path = "/home/dsuarez/Projects" #1 - #self.path = "/home/valentin/Tmp/VOLTAGE2" #2 + self.path = "/home/roj-idl71/Data/RAWDATA/IMAGING" # self.startDateTime = datetime.datetime(2007,5,1,15,49,0) # self.endDateTime = datetime.datetime(2007,5,1,23,0,0) self.startDateTime = datetime.datetime(2011,10,4,0,0,0) self.endDateTime = datetime.datetime(2011,10,4,0,20,0) - self.N = 2 - self.npts = 4 + self.N = 10 + self.npts = 1024 def createObjects( self ): - self.Obj = Voltage() - self.OutObj = Voltage() - self.readerObj = VoltageReader(self.Obj) - self.procObj = VoltageProcessor(self.Obj, self.OutObj) + self.voltObj1 = Voltage() + self.voltObj2 = Voltage() + self.specObj1 = Spectra() - self.spectraObj = Spectra() - self.specProcObj = SpectraProcessor(self.OutObj, self.spectraObj,self.npts) + self.readerObj = VoltageReader(self.voltObj1) + self.voltProcObj = VoltageProcessor(self.voltObj1, self.voltObj2) + self.specProcObj = SpectraProcessor(self.voltObj2, self.specObj1) - #self.plotObj = Osciloscope(self.Obj) + #self.plotObj = Osciloscope(self.voltObj1) if not(self.readerObj.setup( self.path, self.startDateTime, self.endDateTime, expLabel='', online =0) ): sys.exit(0) @@ -62,33 +62,33 @@ class TestSChain(): while(True): self.readerObj.getData() - self.procObj.init() + self.voltProcObj.init() - self.procObj.plotData(idProfile = 1, type='power',winTitle='figura 1') +# self.voltProcObj.plotData(idProfile = 1, type='iq', ymin=-25000, ymax=25000, winTitle='sin decodificar') - self.procObj.decoder(type=0) + self.voltProcObj.decoder(type=0) -# self.procObj.plotData(idProfile = 1, type='iq', xmin=0, xmax=100,winTitle='figura 2') +# self.voltProcObj.plotData(idProfile = 1, type='iq', ymin=-70000, ymax=70000,winTitle='Decodificado') # -# self.procObj.integrator(self.N) + self.voltProcObj.integrator(self.N) - self.procObj.plotData(idProfile = 1, type='power',winTitle='figura 3') +# self.voltProcObj.plotData(idProfile = 1, type='iq', ymin=-700000, ymax=700000,winTitle='figura 3') - self.specProcObj.init() + self.specProcObj.init(self.npts) self.specProcObj.integrator(2) - self.specProcObj.plotData(winTitle='Spectra 1', index=2) + self.specProcObj.plotData(winTitle='Spectra 1', index=0) # if self.readerObj.getData(): # self.plotObj.plotData(idProfile=0, type='power' ) # # -# if self.readerObj.flagNoMoreFiles: -# break + if self.readerObj.flagNoMoreFiles: + break # if self.readerObj.flagIsNewBlock: - print 'Block No %04d, Time: %s' %(self.readerObj.nReadBlocks, + print 'Block No %04d, Time: %s' %(self.readerObj.nTotalBlocks, datetime.datetime.fromtimestamp(self.readerObj.m_BasicHeader.utc),) # fin = time.time() diff --git a/schainpy/TestSpectraSChain.py b/schainpy/TestSpectraSChain.py index c51bc3b..d3b3712 100644 --- a/schainpy/TestSpectraSChain.py +++ b/schainpy/TestSpectraSChain.py @@ -56,8 +56,8 @@ class TestSChain(): if self.readerObj.flagNoMoreFiles: break - if self.readerObj.flagIsNewBlock and self.readerObj.nReadBlocks: - print 'Block No %04d, Time: %s' %(self.readerObj.nReadBlocks, + if self.readerObj.flagIsNewBlock and self.readerObj.nTotalBlocks: + print 'Block No %04d, Time: %s' %(self.readerObj.nTotalBlocks, datetime.datetime.fromtimestamp(self.readerObj.m_BasicHeader.utc),) #=============================================================== # fin = time.time() diff --git a/schainpy/TestSpectraWriteChain.py b/schainpy/TestSpectraWriteChain.py index bc3c504..16fe995 100644 --- a/schainpy/TestSpectraWriteChain.py +++ b/schainpy/TestSpectraWriteChain.py @@ -57,8 +57,8 @@ class TestSChain(): if self.readerObj.flagNoMoreFiles: break - if self.readerObj.flagIsNewBlock and self.readerObj.nReadBlocks: - print 'Block No %04d, Time: %s' %(self.readerObj.nReadBlocks, + if self.readerObj.flagIsNewBlock and self.readerObj.nTotalBlocks: + print 'Block No %04d, Time: %s' %(self.readerObj.nTotalBlocks, datetime.datetime.fromtimestamp(self.readerObj.m_BasicHeader.utc)) #=============================================================== # fin = time.time()