From 1ab50a21fdd6a2c73af0f08ae8cba6b3527a8b64 2012-09-18 19:31:41 From: Daniel Valdez Date: 2012-09-18 19:31:41 Subject: [PATCH] En esta version se las funciones de LECTURA de rawdata y pdata operan satisfactoriamente. --- diff --git a/schainpy2/Data/JROData.py b/schainpy2/Data/JROData.py index 59ccfb0..83dad0a 100644 --- a/schainpy2/Data/JROData.py +++ b/schainpy2/Data/JROData.py @@ -1,57 +1,57 @@ -import os, sys -import copy -import numpy - -path = os.path.split(os.getcwd())[0] -sys.path.append(path) - -from IO.JROHeader import SystemHeader, RadarControllerHeader - -class JROData(): - -# m_BasicHeader = BasicHeader() -# m_ProcessingHeader = ProcessingHeader() - - systemHeaderObj = SystemHeader() - - radarControllerHeaderObj = RadarControllerHeader() - - data = None - - type = None - - dtype = None - - nChannels = None - - nHeights = None - - nProfiles = None - - heightList = None - - channelList = None - - channelIndexList = None - - flagNoData = False - - flagTimeBlock = False - - dataUtcTime = None - - def __init__(self): - - raise ValueError, "This class has not been implemented" - - def copy(self, inputObj=None): - - if inputObj == None: - return copy.deepcopy(self) - - for key in inputObj.__dict__.keys(): - self.__dict__[key] = inputObj.__dict__[key] - - def deepcopy(self): - +import os, sys +import copy +import numpy + +path = os.path.split(os.getcwd())[0] +sys.path.append(path) + +from IO.JROHeader import SystemHeader, RadarControllerHeader + +class JROData: + +# m_BasicHeader = BasicHeader() +# m_ProcessingHeader = ProcessingHeader() + + systemHeaderObj = SystemHeader() + + radarControllerHeaderObj = RadarControllerHeader() + +# data = None + + type = None + + dtype = None + + nChannels = None + + nHeights = None + + nProfiles = None + + heightList = None + + channelList = None + + channelIndexList = None + + flagNoData = False + + flagTimeBlock = False + + dataUtcTime = None + + def __init__(self): + + raise ValueError, "This class has not been implemented" + + def copy(self, inputObj=None): + + if inputObj == None: + return copy.deepcopy(self) + + for key in inputObj.__dict__.keys(): + self.__dict__[key] = inputObj.__dict__[key] + + def deepcopy(self): + return copy.deepcopy(self) \ No newline at end of file diff --git a/schainpy2/Data/Spectra.py b/schainpy2/Data/Spectra.py index 81500fa..c21574c 100644 --- a/schainpy2/Data/Spectra.py +++ b/schainpy2/Data/Spectra.py @@ -1,60 +1,60 @@ -import os, sys -import numpy - -path = os.path.split(os.getcwd())[0] -sys.path.append(path) - -from JROData import JROData -from IO.JROHeader import SystemHeader, RadarControllerHeader - -class Spectra(JROData): - data_spc = None - - data_cspc = None - - data_dc = None - - nFFTPoints = None - - nPairs = None - - pairsList = None - - nIncohInt = None - - def __init__(self): - ''' - Constructor - ''' - - self.m_RadarControllerHeader = RadarControllerHeader() - - self.m_SystemHeader = SystemHeader() - - self.type = "Spectra" - - #data es un numpy array de 2 dmensiones ( canales, alturas) -# self.data = None - - self.dtype = None - - self.nChannels = 0 - - self.nHeights = 0 - - self.nProfiles = None - - self.heightList = None - - self.channelList = None - - self.channelIndexList = None - - self.flagNoData = True - - self.flagTimeBlock = False - - self.dataUtcTime = None - - self.nIncohInt = None +import os, sys +import numpy + +path = os.path.split(os.getcwd())[0] +sys.path.append(path) + +from JROData import JROData +from IO.JROHeader import SystemHeader, RadarControllerHeader + +class Spectra(JROData): + data_spc = None + + data_cspc = None + + data_dc = None + + nFFTPoints = None + + nPairs = None + + pairsList = None + + nIncohInt = None + + def __init__(self): + ''' + Constructor + ''' + + self.m_RadarControllerHeader = RadarControllerHeader() + + self.m_SystemHeader = SystemHeader() + + self.type = "Spectra" + + #data es un numpy array de 2 dmensiones ( canales, alturas) +# self.data = None + + self.dtype = None + + self.nChannels = 0 + + self.nHeights = 0 + + self.nProfiles = None + + self.heightList = None + + self.channelList = None + + self.channelIndexList = None + + self.flagNoData = True + + self.flagTimeBlock = False + + self.dataUtcTime = None + + self.nIncohInt = None \ No newline at end of file diff --git a/schainpy2/Data/Voltage.py b/schainpy2/Data/Voltage.py index 4cd00fe..2cf7ec3 100644 --- a/schainpy2/Data/Voltage.py +++ b/schainpy2/Data/Voltage.py @@ -1,48 +1,50 @@ -import os, sys -import numpy - -path = os.path.split(os.getcwd())[0] -sys.path.append(path) - -from JROData import JROData -from IO.JROHeader import SystemHeader, RadarControllerHeader - -class Voltage(JROData): - - nCohInt = None - - def __init__(self): - ''' - Constructor - ''' - - self.m_RadarControllerHeader = RadarControllerHeader() - - self.m_SystemHeader = SystemHeader() - - self.type = "Voltage" - - #data es un numpy array de 2 dmensiones ( canales, alturas) - self.data = None - - self.dtype = None - - self.nChannels = 0 - - self.nHeights = 0 - - self.nProfiles = None - - self.heightList = None - - self.channelList = None - - self.channelIndexList = None - - self.flagNoData = True - - self.flagTimeBlock = False - - self.dataUtcTime = None - - self.nCohInt = None +import os, sys +import numpy + +path = os.path.split(os.getcwd())[0] +sys.path.append(path) + +from JROData import JROData +from IO.JROHeader import SystemHeader, RadarControllerHeader + +class Voltage(JROData): + + nCohInt = None + + data = None + + def __init__(self): + ''' + Constructor + ''' + + self.m_RadarControllerHeader = RadarControllerHeader() + + self.m_SystemHeader = SystemHeader() + + self.type = "Voltage" + + #data es un numpy array de 2 dmensiones ( canales, alturas) + self.data = None + + self.dtype = None + + self.nChannels = 0 + + self.nHeights = 0 + + self.nProfiles = None + + self.heightList = None + + self.channelList = None + + self.channelIndexList = None + + self.flagNoData = True + + self.flagTimeBlock = False + + self.dataUtcTime = None + + self.nCohInt = None diff --git a/schainpy2/IO/JRODataIO.py b/schainpy2/IO/JRODataIO.py index 1c65d5a..98d7860 100644 --- a/schainpy2/IO/JRODataIO.py +++ b/schainpy2/IO/JRODataIO.py @@ -1,727 +1,738 @@ -import os, sys -import glob -import time -import numpy -import fnmatch -import time, datetime - -path = os.path.split(os.getcwd())[0] -sys.path.append(path) - -from JROHeader import * -from Data.JROData import JROData - -def isNumber(str): - """ - Chequea si el conjunto de caracteres que componen un string puede ser convertidos a un numero. - - Excepciones: - Si un determinado string no puede ser convertido a numero - Input: - str, string al cual se le analiza para determinar si convertible a un numero o no - - Return: - True : si el string es uno numerico - False : no es un string numerico - """ - try: - float( str ) - return True - except: - return False - -def isThisFileinRange(filename, startUTSeconds, endUTSeconds): - """ - Esta funcion determina si un archivo de datos se encuentra o no dentro del rango de fecha especificado. - - Inputs: - filename : nombre completo del archivo de datos en formato Jicamarca (.r) - - startUTSeconds : fecha inicial del rango seleccionado. La fecha esta dada en - segundos contados desde 01/01/1970. - endUTSeconds : fecha final del rango seleccionado. La fecha esta dada en - segundos contados desde 01/01/1970. - - Return: - Boolean : Retorna True si el archivo de datos contiene datos en el rango de - fecha especificado, de lo contrario retorna False. - - Excepciones: - Si el archivo no existe o no puede ser abierto - Si la cabecera no puede ser leida. - - """ - basicHeaderObj = BasicHeader() - - try: - fp = open(filename,'rb') - except: - raise IOError, "The file %s can't be opened" %(filename) - - sts = basicHeaderObj.read(fp) - fp.close() - - if not(sts): - print "Skipping the file %s because it has not a valid header" %(filename) - return 0 - - if not ((startUTSeconds <= basicHeaderObj.utc) and (endUTSeconds > basicHeaderObj.utc)): - return 0 - - return 1 - - - - -class JRODataIO: - - c = 3E8 - - basicHeaderObj = BasicHeader() - - systemHeaderObj = SystemHeader() - - radarControllerHeaderObj = RadarControllerHeader() - - processingHeaderObj = ProcessingHeader() - - online = 0 - - dtype = None - - pathList = [] - - filenameList = [] - - filename = None - - ext = None - - flagNoMoreFiles = 0 - - flagIsNewFile = 1 - - flagTimeBlock = 0 - - flagIsNewBlock = 0 - - fp = None - - firstHeaderSize = 0 - - basicHeaderSize = 24 - - versionFile = 1103 - - fileSize = None - - ippSeconds = None - - fileSizeByHeader = None - - fileIndex = None - - profileIndex = None - - blockIndex = None - - nTotalBlocks = None - - maxTimeStep = 30 - - lastUTTime = None - - def __init__(self): - pass - -class JRODataReader(JRODataIO): - def __init__(self): - pass - - def createObjByDefault(self): - """ - - """ - raise ValueError, "This method has not been implemented" - - def getBlockDimension(self): - - raise ValueError, "No implemented" - - def __searchFilesOffLine(self, - path, - startDate, - endDate, - startTime=datetime.time(0,0,0), - endTime=datetime.time(23,59,59), - set=None, - expLabel="", - ext=".r"): - dirList = [] - for thisPath in os.listdir(path): - if os.path.isdir(os.path.join(path,thisPath)): - dirList.append(thisPath) - - if not(dirList): - return None, None - - pathList = [] - dateList = [] - - thisDate = startDate - - while(thisDate <= endDate): - year = thisDate.timetuple().tm_year - doy = thisDate.timetuple().tm_yday - - match = fnmatch.filter(dirList, '?' + '%4.4d%3.3d' % (year,doy)) - if len(match) == 0: - thisDate += datetime.timedelta(1) - continue - - pathList.append(os.path.join(path,match[0],expLabel)) - dateList.append(thisDate) - thisDate += datetime.timedelta(1) - - filenameList = [] - for index in range(len(pathList)): - - thisPath = pathList[index] - fileList = glob.glob1(thisPath, "*%s" %ext) - fileList.sort() - - #Busqueda de datos en el rango de horas indicados - thisDate = dateList[index] - startDT = datetime.datetime.combine(thisDate, startTime) - endDT = datetime.datetime.combine(thisDate, endTime) - - startUtSeconds = time.mktime(startDT.timetuple()) - endUtSeconds = time.mktime(endDT.timetuple()) - - for file in fileList: - - filename = os.path.join(thisPath,file) - - if isThisFileinRange(filename, startUtSeconds, endUtSeconds): - filenameList.append(filename) - - if not(filenameList): - return None, None - - self.filenameList = filenameList - - return pathList, filenameList - - def setup(self,dataOutObj=None, - path=None,startDate=None, - endDate=None, - startTime=datetime.time(0,0,0), - endTime=datetime.time(23,59,59), - set=0, - expLabel = "", - ext = None, - online = 0): - - if path == None: - raise ValueError, "The path is not valid" - - if ext == None: - ext = self.ext - - if dataOutObj == None: - dataOutObj = self.createObjByDefault() - - self.dataOutObj = dataOutObj - - if online: - pass - - else: - print "Searching file in offline mode" - pathList, filenameList = self.__searchFilesOffLine(path, startDate, endDate, startTime, endTime, set, expLabel, ext) - if not(pathList): - print "No files in range: %s - %s"%(datetime.datetime.combine(startDate,startTime).ctime(), datetime.datetime.combine(endDate,endTime).ctime()) - return None - self.fileIndex = -1 - self.pathList = pathList - self.filenameList = filenameList - - self.online = online - ext = ext.lower() - self.ext = ext - - if not(self.setNextFile()): - if (startDate!=None) and (endDate!=None): - print "No files in range: %s - %s" %(datetime.datetime.combine(startDate,startTime).ctime(), datetime.datetime.combine(endDate,endTime).ctime()) - elif startDate != None: - print "No files in range: %s" %(datetime.datetime.combine(startDate,startTime).ctime()) - else: - print "No files" - - return None - -# self.updateDataHeader() - - return self.dataOutObj - - def __setNextFileOffline(self): - idFile = self.fileIndex - - while (True): - idFile += 1 - if not(idFile < len(self.filenameList)): - self.flagNoMoreFiles = 1 - print "No more Files" - return 0 - - filename = self.filenameList[idFile] - - if not(self.__verifyFile(filename)): - continue - - fileSize = os.path.getsize(filename) - fp = open(filename,'rb') - break - - self.flagIsNewFile = 1 - self.fileIndex = idFile - self.filename = filename - self.fileSize = fileSize - self.fp = fp - - print "Setting the file: %s"%self.filename - - return 1 - - - - def setNextFile(self): - if self.fp != None: - self.fp.close() - - if self.online: - newFile = self.__setNextFileOnline() - else: - newFile = self.__setNextFileOffline() - - if not(newFile): - return 0 - - self.__readFirstHeader() - self.nReadBlocks = 0 - return 1 - - def __setNewBlock(self): - if self.fp == None: - return 0 - - if self.flagIsNewFile: - return 1 - - self.lastUTTime = self.basicHeaderObj.utc - currentSize = self.fileSize - self.fp.tell() - neededSize = self.processingHeaderObj.blockSize + self.basicHeaderSize - - if (currentSize >= neededSize): - self.__rdBasicHeader() - return 1 - - if not(self.setNextFile()): - return 0 - - deltaTime = self.basicHeaderObj.utc - self.lastUTTime # - - self.flagTimeBlock = 0 - - if deltaTime > self.maxTimeStep: - self.flagTimeBlock = 1 - - return 1 - - - def readNextBlock(self): - if not(self.__setNewBlock()): - return 0 - - if not(self.readBlock()): - return 0 - - return 1 - - def __rdProcessingHeader(self, fp=None): - if fp == None: - fp = self.fp - - self.processingHeaderObj.read(fp) - - def __rdRadarControllerHeader(self, fp=None): - if fp == None: - fp = self.fp - - self.radarControllerHeaderObj.read(fp) - - def __rdSystemHeader(self, fp=None): - if fp == None: - fp = self.fp - - self.systemHeaderObj.read(fp) - - def __rdBasicHeader(self, fp=None): - if fp == None: - fp = self.fp - - self.basicHeaderObj.read(fp) - - - def __readFirstHeader(self): - self.__rdBasicHeader() - self.__rdSystemHeader() - self.__rdRadarControllerHeader() - self.__rdProcessingHeader() - - self.firstHeaderSize = self.basicHeaderObj.size - - datatype = int(numpy.log2((self.processingHeaderObj.processFlags & PROCFLAG.DATATYPE_MASK))-numpy.log2(PROCFLAG.DATATYPE_CHAR)) - if datatype == 0: - datatype_str = numpy.dtype([('real',' 0: - filesList = sorted( filesList, key=str.lower ) - filen = filesList[-1] - # el filename debera tener el siguiente formato - # 0 1234 567 89A BCDE (hex) - # x YYYY DDD SSS .ext - if isNumber( filen[8:11] ): - self.setFile = int( filen[8:11] ) #inicializo mi contador de seteo al seteo del ultimo file - else: - self.setFile = -1 - else: - self.setFile = -1 #inicializo mi contador de seteo - - setFile = self.setFile - setFile += 1 - - file = '%s%4.4d%3.3d%3.3d%s' % (self.optchar, - timeTuple.tm_year, - timeTuple.tm_yday, - setFile, - ext ) - - filename = os.path.join( path, subfolder, file ) - - fp = open( filename,'wb' ) - - self.blockIndex = 0 - - #guardando atributos - self.filename = filename - self.subfolder = subfolder - self.fp = fp - self.setFile = setFile - self.flagIsNewFile = 1 - - print 'Writing the file: %s'%self.filename - - self.__writeFirstHeader() - - return 1 - - - def __setNewBlock(self): - """ - Si es un nuevo file escribe el First Header caso contrario escribe solo el Basic Header - - Return: - 0 : si no pudo escribir nada - 1 : Si escribio el Basic el First Header - """ - if self.fp == None: - self.setNextFile() - - if self.flagIsNewFile: - return 1 - - if self.blockIndex < self.processingHeaderObj.dataBlocksPerFile: - self.__writeBasicHeader() - return 1 - - if not( self.setNextFile() ): - return 0 - - return 1 - - - def writeNextBlock(self): - """ - Selecciona el bloque siguiente de datos y los escribe en un file - - Return: - 0 : Si no hizo pudo escribir el bloque de datos - 1 : Si no pudo escribir el bloque de datos - """ - if not( self.__setNewBlock() ): - return 0 - - self.writeBlock() - - return 1 - - - def getDataHeader(self): - """ - Obtiene una copia del First Header - - Affected: - self.basicHeaderObj - self.systemHeaderObj - self.radarControllerHeaderObj - self.processingHeaderObj - self.dtype - - Return: - None - """ - - raise ValueError, "No implemented" - - def setup(self, path, set=0, ext=None): - """ - Setea el tipo de formato en la cual sera guardada la data y escribe el First Header - - Inputs: - path : el path destino en el cual se escribiran los files a crear - format : formato en el cual sera salvado un file - set : el setebo del file - - Return: - 0 : Si no realizo un buen seteo - 1 : Si realizo un buen seteo - """ - - if ext == None: - ext = self.ext - - ext = ext.lower() - - self.path = path - self.setFile = set - 1 - self.ext = ext - #self.format = format - self.getDataHeader() - - self.setBlockDimension() - - if not( self.setNextFile() ): - print "There isn't a next file" - return 0 - - return 1 - - - - - - +import os, sys +import glob +import time +import numpy +import fnmatch +import time, datetime + +path = os.path.split(os.getcwd())[0] +sys.path.append(path) + +from JROHeader import * +from Data.JROData import JROData + +def isNumber(str): + """ + Chequea si el conjunto de caracteres que componen un string puede ser convertidos a un numero. + + Excepciones: + Si un determinado string no puede ser convertido a numero + Input: + str, string al cual se le analiza para determinar si convertible a un numero o no + + Return: + True : si el string es uno numerico + False : no es un string numerico + """ + try: + float( str ) + return True + except: + return False + +def isThisFileinRange(filename, startUTSeconds, endUTSeconds): + """ + Esta funcion determina si un archivo de datos se encuentra o no dentro del rango de fecha especificado. + + Inputs: + filename : nombre completo del archivo de datos en formato Jicamarca (.r) + + startUTSeconds : fecha inicial del rango seleccionado. La fecha esta dada en + segundos contados desde 01/01/1970. + endUTSeconds : fecha final del rango seleccionado. La fecha esta dada en + segundos contados desde 01/01/1970. + + Return: + Boolean : Retorna True si el archivo de datos contiene datos en el rango de + fecha especificado, de lo contrario retorna False. + + Excepciones: + Si el archivo no existe o no puede ser abierto + Si la cabecera no puede ser leida. + + """ + basicHeaderObj = BasicHeader() + + try: + fp = open(filename,'rb') + except: + raise IOError, "The file %s can't be opened" %(filename) + + sts = basicHeaderObj.read(fp) + fp.close() + + if not(sts): + print "Skipping the file %s because it has not a valid header" %(filename) + return 0 + + if not ((startUTSeconds <= basicHeaderObj.utc) and (endUTSeconds > basicHeaderObj.utc)): + return 0 + + return 1 + + + + +class JRODataIO: + + c = 3E8 + + basicHeaderObj = BasicHeader() + + systemHeaderObj = SystemHeader() + + radarControllerHeaderObj = RadarControllerHeader() + + processingHeaderObj = ProcessingHeader() + + online = 0 + + dtype = None + + pathList = [] + + filenameList = [] + + filename = None + + ext = None + + flagNoMoreFiles = 0 + + flagIsNewFile = 1 + + flagTimeBlock = 0 + + flagIsNewBlock = 0 + + fp = None + + firstHeaderSize = 0 + + basicHeaderSize = 24 + + versionFile = 1103 + + fileSize = None + + ippSeconds = None + + fileSizeByHeader = None + + fileIndex = None + + profileIndex = None + + blockIndex = None + + nTotalBlocks = None + + maxTimeStep = 30 + + lastUTTime = None + + datablock = None + + dataOutObj = None + + blocksize = None + + def __init__(self): + pass + +class JRODataReader(JRODataIO): + + nReadBlocks = 0 + + def __init__(self): + + pass + + def createObjByDefault(self): + """ + + """ + raise ValueError, "This method has not been implemented" + + def getBlockDimension(self): + + raise ValueError, "No implemented" + + def __searchFilesOffLine(self, + path, + startDate, + endDate, + startTime=datetime.time(0,0,0), + endTime=datetime.time(23,59,59), + set=None, + expLabel="", + ext=".r"): + dirList = [] + for thisPath in os.listdir(path): + if os.path.isdir(os.path.join(path,thisPath)): + dirList.append(thisPath) + + if not(dirList): + return None, None + + pathList = [] + dateList = [] + + thisDate = startDate + + while(thisDate <= endDate): + year = thisDate.timetuple().tm_year + doy = thisDate.timetuple().tm_yday + + match = fnmatch.filter(dirList, '?' + '%4.4d%3.3d' % (year,doy)) + if len(match) == 0: + thisDate += datetime.timedelta(1) + continue + + pathList.append(os.path.join(path,match[0],expLabel)) + dateList.append(thisDate) + thisDate += datetime.timedelta(1) + + filenameList = [] + for index in range(len(pathList)): + + thisPath = pathList[index] + fileList = glob.glob1(thisPath, "*%s" %ext) + fileList.sort() + + #Busqueda de datos en el rango de horas indicados + thisDate = dateList[index] + startDT = datetime.datetime.combine(thisDate, startTime) + endDT = datetime.datetime.combine(thisDate, endTime) + + startUtSeconds = time.mktime(startDT.timetuple()) + endUtSeconds = time.mktime(endDT.timetuple()) + + for file in fileList: + + filename = os.path.join(thisPath,file) + + if isThisFileinRange(filename, startUtSeconds, endUtSeconds): + filenameList.append(filename) + + if not(filenameList): + return None, None + + self.filenameList = filenameList + + return pathList, filenameList + + def setup(self,dataOutObj=None, + path=None, + startDate=None, + endDate=None, + startTime=datetime.time(0,0,0), + endTime=datetime.time(23,59,59), + set=0, + expLabel = "", + ext = None, + online = 0): + + if path == None: + raise ValueError, "The path is not valid" + + if ext == None: + ext = self.ext + + if dataOutObj == None: + dataOutObj = self.createObjByDefault() + + self.dataOutObj = dataOutObj + + if online: + pass + + else: + print "Searching file in offline mode" + pathList, filenameList = self.__searchFilesOffLine(path, startDate, endDate, startTime, endTime, set, expLabel, ext) + if not(pathList): + print "No files in range: %s - %s"%(datetime.datetime.combine(startDate,startTime).ctime(), datetime.datetime.combine(endDate,endTime).ctime()) + return None + self.fileIndex = -1 + self.pathList = pathList + self.filenameList = filenameList + + self.online = online + ext = ext.lower() + self.ext = ext + + if not(self.setNextFile()): + if (startDate!=None) and (endDate!=None): + print "No files in range: %s - %s" %(datetime.datetime.combine(startDate,startTime).ctime(), datetime.datetime.combine(endDate,endTime).ctime()) + elif startDate != None: + print "No files in range: %s" %(datetime.datetime.combine(startDate,startTime).ctime()) + else: + print "No files" + + return None + +# self.updateDataHeader() + + return self.dataOutObj + + def __setNextFileOffline(self): + idFile = self.fileIndex + + while (True): + idFile += 1 + if not(idFile < len(self.filenameList)): + self.flagNoMoreFiles = 1 + print "No more Files" + return 0 + + filename = self.filenameList[idFile] + + if not(self.__verifyFile(filename)): + continue + + fileSize = os.path.getsize(filename) + fp = open(filename,'rb') + break + + self.flagIsNewFile = 1 + self.fileIndex = idFile + self.filename = filename + self.fileSize = fileSize + self.fp = fp + + print "Setting the file: %s"%self.filename + + return 1 + + + + def setNextFile(self): + if self.fp != None: + self.fp.close() + + if self.online: + newFile = self.__setNextFileOnline() + else: + newFile = self.__setNextFileOffline() + + if not(newFile): + return 0 + + self.__readFirstHeader() + self.nReadBlocks = 0 + return 1 + + def __setNewBlock(self): + if self.fp == None: + return 0 + + if self.flagIsNewFile: + return 1 + + self.lastUTTime = self.basicHeaderObj.utc + currentSize = self.fileSize - self.fp.tell() + neededSize = self.processingHeaderObj.blockSize + self.basicHeaderSize + + if (currentSize >= neededSize): + self.__rdBasicHeader() + return 1 + + if not(self.setNextFile()): + return 0 + + deltaTime = self.basicHeaderObj.utc - self.lastUTTime # + + self.flagTimeBlock = 0 + + if deltaTime > self.maxTimeStep: + self.flagTimeBlock = 1 + + return 1 + + + def readNextBlock(self): + if not(self.__setNewBlock()): + return 0 + + if not(self.readBlock()): + return 0 + + return 1 + + def __rdProcessingHeader(self, fp=None): + if fp == None: + fp = self.fp + + self.processingHeaderObj.read(fp) + + def __rdRadarControllerHeader(self, fp=None): + if fp == None: + fp = self.fp + + self.radarControllerHeaderObj.read(fp) + + def __rdSystemHeader(self, fp=None): + if fp == None: + fp = self.fp + + self.systemHeaderObj.read(fp) + + def __rdBasicHeader(self, fp=None): + if fp == None: + fp = self.fp + + self.basicHeaderObj.read(fp) + + + def __readFirstHeader(self): + self.__rdBasicHeader() + self.__rdSystemHeader() + self.__rdRadarControllerHeader() + self.__rdProcessingHeader() + + self.firstHeaderSize = self.basicHeaderObj.size + + datatype = int(numpy.log2((self.processingHeaderObj.processFlags & PROCFLAG.DATATYPE_MASK))-numpy.log2(PROCFLAG.DATATYPE_CHAR)) + if datatype == 0: + datatype_str = numpy.dtype([('real',' 0: + filesList = sorted( filesList, key=str.lower ) + filen = filesList[-1] + # el filename debera tener el siguiente formato + # 0 1234 567 89A BCDE (hex) + # x YYYY DDD SSS .ext + if isNumber( filen[8:11] ): + self.setFile = int( filen[8:11] ) #inicializo mi contador de seteo al seteo del ultimo file + else: + self.setFile = -1 + else: + self.setFile = -1 #inicializo mi contador de seteo + + setFile = self.setFile + setFile += 1 + + file = '%s%4.4d%3.3d%3.3d%s' % (self.optchar, + timeTuple.tm_year, + timeTuple.tm_yday, + setFile, + ext ) + + filename = os.path.join( path, subfolder, file ) + + fp = open( filename,'wb' ) + + self.blockIndex = 0 + + #guardando atributos + self.filename = filename + self.subfolder = subfolder + self.fp = fp + self.setFile = setFile + self.flagIsNewFile = 1 + + print 'Writing the file: %s'%self.filename + + self.__writeFirstHeader() + + return 1 + + + def __setNewBlock(self): + """ + Si es un nuevo file escribe el First Header caso contrario escribe solo el Basic Header + + Return: + 0 : si no pudo escribir nada + 1 : Si escribio el Basic el First Header + """ + if self.fp == None: + self.setNextFile() + + if self.flagIsNewFile: + return 1 + + if self.blockIndex < self.processingHeaderObj.dataBlocksPerFile: + self.__writeBasicHeader() + return 1 + + if not( self.setNextFile() ): + return 0 + + return 1 + + + def writeNextBlock(self): + """ + Selecciona el bloque siguiente de datos y los escribe en un file + + Return: + 0 : Si no hizo pudo escribir el bloque de datos + 1 : Si no pudo escribir el bloque de datos + """ + if not( self.__setNewBlock() ): + return 0 + + self.writeBlock() + + return 1 + + + def getDataHeader(self): + """Obtiene una copia del First Header Affected: self.basicHeaderObj self. + systemHeaderObj self.radarControllerHeaderObj self.processingHeaderObj self. + dtype Return: None + """ + + raise ValueError, "No implemented" + + def setup(self, path, set=0, ext=None): + """ + Setea el tipo de formato en la cual sera guardada la data y escribe el First Header + + Inputs: + path : el path destino en el cual se escribiran los files a crear + format : formato en el cual sera salvado un file + set : el setebo del file + + Return: + 0 : Si no realizo un buen seteo + 1 : Si realizo un buen seteo + """ + + if ext == None: + ext = self.ext + + ext = ext.lower() + + self.path = path + self.setFile = set - 1 + self.ext = ext + #self.format = format + self.getDataHeader() + + self.setBlockDimension() + + if not( self.setNextFile() ): + print "There isn't a next file" + return 0 + + return 1 + + + + + + diff --git a/schainpy2/IO/JROHeader.py b/schainpy2/IO/JROHeader.py index 48b784f..36125cc 100644 --- a/schainpy2/IO/JROHeader.py +++ b/schainpy2/IO/JROHeader.py @@ -1,506 +1,506 @@ -''' -Created on 23/01/2012 - -@author $Author: vsarmiento $ -@version $Id: HeaderIO.py 37 2012-03-26 22:55:13Z vsarmiento $ -''' - -import numpy -import copy - -class Header: - - def __init__(self): - raise - - def copy(self): - return copy.deepcopy(self) - - def read(): - pass - - def write(): - pass - -class BasicHeader(Header): - - size = None - version = None - dataBlock = None - utc = None - miliSecond = None - timeZone = None - dstFlag = None - errorCount = None - struct = None - - def __init__(self): - - self.size = 0 - self.version = 0 - self.dataBlock = 0 - self.utc = 0 - self.miliSecond = 0 - self.timeZone = 0 - self.dstFlag = 0 - self.errorCount = 0 - self.struct = numpy.dtype([ - ('nSize',' 0: - fp.seek(jumpFp) - - except: - return 0 - - return 1 - - def write(self, fp): - headerTuple = (self.size, - self.expType, - self.nTx, - self.ipp, - self.txA, - self.txB, - self.nWindows, - self.numTaus, - self.codeType, - self.line6Function, - self.line5Function, - self.fClock, - self.prePulseBefore, - self.prePulserAfter, - self.rangeIpp, - self.rangeTxA, - self.rangeTxB) - - header = numpy.array(headerTuple,self.struct) - header.tofile(fp) - - dynamic = self.dynamic - dynamic.tofile(fp) - - return 1 - - - -class ProcessingHeader(Header): - - size = None - dtype = None - blockSize = None - profilesPerBlock = None - dataBlocksPerFile = None - nWindows = None - processFlags = None - nCohInt = None - nIncohInt = None - totalSpectra = None - struct = None - flag_dc = None - flag_cspc = None - - def __init__(self): - self.size = 0 - self.dataType = 0 - self.blockSize = 0 - self.profilesPerBlock = 0 - self.dataBlocksPerFile = 0 - self.nWindows = 0 - self.processFlags = 0 - self.nCohInt = 0 - self.nIncohInt = 0 - self.totalSpectra = 0 - self.struct = numpy.dtype([ - ('nSize',' 0: - self.flag_cspc = True - - except: - return 0 - - return 1 - - def write(self, fp): - headerTuple = (self.size, - self.dataType, - self.blockSize, - self.profilesPerBlock, - self.dataBlocksPerFile, - self.nWindows, - self.processFlags, - self.nCohInt, - self.nIncohInt, - self.totalSpectra) - - header = numpy.array(headerTuple,self.struct) - header.tofile(fp) - - if self.nWindows != 0: - sampleWindowTuple = (self.firstHeight,self.deltaHeight,self.samplesWin) - samplingWindow = numpy.array(sampleWindowTuple,self.structSamplingWindow) - samplingWindow.tofile(fp) - - - if self.totalSpectra != 0: - spectraComb = numpy.array([],numpy.dtype('u1')) - spectraComb = self.spectraComb - spectraComb.tofile(fp) - - - if self.processFlags & PROCFLAG.DEFINE_PROCESS_CODE == PROCFLAG.DEFINE_PROCESS_CODE: - numCode = self.numCode - numCode.tofile(fp) - - numBaud = self.numBaud - numBaud.tofile(fp) - - code = self.code.reshape(numCode*numBaud) - code.tofile(fp) - - return 1 - -class RCfunction: - NONE=0 - FLIP=1 - CODE=2 - SAMPLING=3 - LIN6DIV256=4 - SYNCHRO=5 - -class nCodeType: - NONE=0 - USERDEFINE=1 - BARKER2=2 - BARKER3=3 - BARKER4=4 - BARKER5=5 - BARKER7=6 - BARKER11=7 - BARKER13=8 - AC128=9 - COMPLEMENTARYCODE2=10 - COMPLEMENTARYCODE4=11 - COMPLEMENTARYCODE8=12 - COMPLEMENTARYCODE16=13 - COMPLEMENTARYCODE32=14 - COMPLEMENTARYCODE64=15 - COMPLEMENTARYCODE128=16 - CODE_BINARY28=17 - -class PROCFLAG: - COHERENT_INTEGRATION = numpy.uint32(0x00000001) - DECODE_DATA = numpy.uint32(0x00000002) - SPECTRA_CALC = numpy.uint32(0x00000004) - INCOHERENT_INTEGRATION = numpy.uint32(0x00000008) - POST_COHERENT_INTEGRATION = numpy.uint32(0x00000010) - SHIFT_FFT_DATA = numpy.uint32(0x00000020) - - DATATYPE_CHAR = numpy.uint32(0x00000040) - DATATYPE_SHORT = numpy.uint32(0x00000080) - DATATYPE_LONG = numpy.uint32(0x00000100) - DATATYPE_INT64 = numpy.uint32(0x00000200) - DATATYPE_FLOAT = numpy.uint32(0x00000400) - DATATYPE_DOUBLE = numpy.uint32(0x00000800) - - DATAARRANGE_CONTIGUOUS_CH = numpy.uint32(0x00001000) - DATAARRANGE_CONTIGUOUS_H = numpy.uint32(0x00002000) - DATAARRANGE_CONTIGUOUS_P = numpy.uint32(0x00004000) - - SAVE_CHANNELS_DC = numpy.uint32(0x00008000) - DEFLIP_DATA = numpy.uint32(0x00010000) - DEFINE_PROCESS_CODE = numpy.uint32(0x00020000) - - ACQ_SYS_NATALIA = numpy.uint32(0x00040000) - ACQ_SYS_ECHOTEK = numpy.uint32(0x00080000) - ACQ_SYS_ADRXD = numpy.uint32(0x000C0000) - ACQ_SYS_JULIA = numpy.uint32(0x00100000) - ACQ_SYS_XXXXXX = numpy.uint32(0x00140000) - - EXP_NAME_ESP = numpy.uint32(0x00200000) - CHANNEL_NAMES_ESP = numpy.uint32(0x00400000) - - OPERATION_MASK = numpy.uint32(0x0000003F) - DATATYPE_MASK = numpy.uint32(0x00000FC0) - DATAARRANGE_MASK = numpy.uint32(0x00007000) +''' +Created on 23/01/2012 + +@author $Author: vsarmiento $ +@version $Id: HeaderIO.py 37 2012-03-26 22:55:13Z vsarmiento $ +''' + +import numpy +import copy + +class Header: + + def __init__(self): + raise + + def copy(self): + return copy.deepcopy(self) + + def read(): + pass + + def write(): + pass + +class BasicHeader(Header): + + size = None + version = None + dataBlock = None + utc = None + miliSecond = None + timeZone = None + dstFlag = None + errorCount = None + struct = None + + def __init__(self): + + self.size = 0 + self.version = 0 + self.dataBlock = 0 + self.utc = 0 + self.miliSecond = 0 + self.timeZone = 0 + self.dstFlag = 0 + self.errorCount = 0 + self.struct = numpy.dtype([ + ('nSize',' 0: + fp.seek(jumpFp) + + except: + return 0 + + return 1 + + def write(self, fp): + headerTuple = (self.size, + self.expType, + self.nTx, + self.ipp, + self.txA, + self.txB, + self.nWindows, + self.numTaus, + self.codeType, + self.line6Function, + self.line5Function, + self.fClock, + self.prePulseBefore, + self.prePulserAfter, + self.rangeIpp, + self.rangeTxA, + self.rangeTxB) + + header = numpy.array(headerTuple,self.struct) + header.tofile(fp) + + dynamic = self.dynamic + dynamic.tofile(fp) + + return 1 + + + +class ProcessingHeader(Header): + + size = None + dtype = None + blockSize = None + profilesPerBlock = None + dataBlocksPerFile = None + nWindows = None + processFlags = None + nCohInt = None + nIncohInt = None + totalSpectra = None + struct = None + flag_dc = None + flag_cspc = None + + def __init__(self): + self.size = 0 + self.dataType = 0 + self.blockSize = 0 + self.profilesPerBlock = 0 + self.dataBlocksPerFile = 0 + self.nWindows = 0 + self.processFlags = 0 + self.nCohInt = 0 + self.nIncohInt = 0 + self.totalSpectra = 0 + self.struct = numpy.dtype([ + ('nSize',' 0: + self.flag_cspc = True + + except: + return 0 + + return 1 + + def write(self, fp): + headerTuple = (self.size, + self.dataType, + self.blockSize, + self.profilesPerBlock, + self.dataBlocksPerFile, + self.nWindows, + self.processFlags, + self.nCohInt, + self.nIncohInt, + self.totalSpectra) + + header = numpy.array(headerTuple,self.struct) + header.tofile(fp) + + if self.nWindows != 0: + sampleWindowTuple = (self.firstHeight,self.deltaHeight,self.samplesWin) + samplingWindow = numpy.array(sampleWindowTuple,self.structSamplingWindow) + samplingWindow.tofile(fp) + + + if self.totalSpectra != 0: + spectraComb = numpy.array([],numpy.dtype('u1')) + spectraComb = self.spectraComb + spectraComb.tofile(fp) + + + if self.processFlags & PROCFLAG.DEFINE_PROCESS_CODE == PROCFLAG.DEFINE_PROCESS_CODE: + numCode = self.numCode + numCode.tofile(fp) + + numBaud = self.numBaud + numBaud.tofile(fp) + + code = self.code.reshape(numCode*numBaud) + code.tofile(fp) + + return 1 + +class RCfunction: + NONE=0 + FLIP=1 + CODE=2 + SAMPLING=3 + LIN6DIV256=4 + SYNCHRO=5 + +class nCodeType: + NONE=0 + USERDEFINE=1 + BARKER2=2 + BARKER3=3 + BARKER4=4 + BARKER5=5 + BARKER7=6 + BARKER11=7 + BARKER13=8 + AC128=9 + COMPLEMENTARYCODE2=10 + COMPLEMENTARYCODE4=11 + COMPLEMENTARYCODE8=12 + COMPLEMENTARYCODE16=13 + COMPLEMENTARYCODE32=14 + COMPLEMENTARYCODE64=15 + COMPLEMENTARYCODE128=16 + CODE_BINARY28=17 + +class PROCFLAG: + COHERENT_INTEGRATION = numpy.uint32(0x00000001) + DECODE_DATA = numpy.uint32(0x00000002) + SPECTRA_CALC = numpy.uint32(0x00000004) + INCOHERENT_INTEGRATION = numpy.uint32(0x00000008) + POST_COHERENT_INTEGRATION = numpy.uint32(0x00000010) + SHIFT_FFT_DATA = numpy.uint32(0x00000020) + + DATATYPE_CHAR = numpy.uint32(0x00000040) + DATATYPE_SHORT = numpy.uint32(0x00000080) + DATATYPE_LONG = numpy.uint32(0x00000100) + DATATYPE_INT64 = numpy.uint32(0x00000200) + DATATYPE_FLOAT = numpy.uint32(0x00000400) + DATATYPE_DOUBLE = numpy.uint32(0x00000800) + + DATAARRANGE_CONTIGUOUS_CH = numpy.uint32(0x00001000) + DATAARRANGE_CONTIGUOUS_H = numpy.uint32(0x00002000) + DATAARRANGE_CONTIGUOUS_P = numpy.uint32(0x00004000) + + SAVE_CHANNELS_DC = numpy.uint32(0x00008000) + DEFLIP_DATA = numpy.uint32(0x00010000) + DEFINE_PROCESS_CODE = numpy.uint32(0x00020000) + + ACQ_SYS_NATALIA = numpy.uint32(0x00040000) + ACQ_SYS_ECHOTEK = numpy.uint32(0x00080000) + ACQ_SYS_ADRXD = numpy.uint32(0x000C0000) + ACQ_SYS_JULIA = numpy.uint32(0x00100000) + ACQ_SYS_XXXXXX = numpy.uint32(0x00140000) + + EXP_NAME_ESP = numpy.uint32(0x00200000) + CHANNEL_NAMES_ESP = numpy.uint32(0x00400000) + + OPERATION_MASK = numpy.uint32(0x0000003F) + DATATYPE_MASK = numpy.uint32(0x00000FC0) + DATAARRANGE_MASK = numpy.uint32(0x00007000) ACQ_SYS_MASK = numpy.uint32(0x001C0000) \ No newline at end of file diff --git a/schainpy2/IO/SpectraIO.py b/schainpy2/IO/SpectraIO.py index fc7e3f8..3fe513f 100644 --- a/schainpy2/IO/SpectraIO.py +++ b/schainpy2/IO/SpectraIO.py @@ -15,8 +15,8 @@ import time, datetime path = os.path.split(os.getcwd())[0] sys.path.append(path) -from Model.JROHeader import * -from Model.Spectra import Spectra +from IO.JROHeader import * +from Data.Spectra import Spectra from JRODataIO import JRODataReader from JRODataIO import JRODataWriter @@ -59,9 +59,6 @@ class SpectraReader(JRODataReader): break """ - dataOutObj = None - - datablock = None pts2read_SelfSpectra = 0 @@ -73,7 +70,14 @@ class SpectraReader(JRODataReader): optchar = "P" - flag_cspc = False + dataOutObj = None + + nRdChannels = None + + nRdPairs = None + + rdPairList = [] + def __init__(self, dataOutObj=None): """ @@ -97,7 +101,7 @@ class SpectraReader(JRODataReader): self.pts2read_CrossSpectra = 0 - self.pts2read_DCs = 0 + self.pts2read_DCchannels = 0 self.datablock = None @@ -183,8 +187,8 @@ class SpectraReader(JRODataReader): Obtiene la cantidad de puntos a leer por cada bloque de datos Affected: - self.nChannels - self.nPairs + self.nRdChannels + self.nRdPairs self.pts2read_SelfSpectra self.pts2read_CrossSpectra self.pts2read_DCchannels @@ -195,28 +199,28 @@ class SpectraReader(JRODataReader): Return: None """ - self.nChannels = 0 - self.nPairs = 0 - self.pairList = [] + self.nRdChannels = 0 + self.nRdPairs = 0 + self.rdPairList = [] for i in range( 0, self.processingHeaderObj.totalSpectra*2, 2 ): if self.processingHeaderObj.spectraComb[i] == self.processingHeaderObj.spectraComb[i+1]: - self.nChannels = self.nChannels + 1 #par de canales iguales + self.nRdChannels = self.nRdChannels + 1 #par de canales iguales else: - self.nPairs = self.nPairs + 1 #par de canales diferentes - self.pairList.append( (self.processingHeaderObj.spectraComb[i], self.processingHeaderObj.spectraComb[i+1]) ) + self.nRdPairs = self.nRdPairs + 1 #par de canales diferentes + self.rdPairList.append( (self.processingHeaderObj.spectraComb[i], self.processingHeaderObj.spectraComb[i+1]) ) - pts2read = self.processingHeaderObj.numHeights * self.processingHeaderObj.profilesPerBlock + pts2read = self.processingHeaderObj.nHeights * self.processingHeaderObj.profilesPerBlock - self.pts2read_SelfSpectra = int(self.nChannels * pts2read) + self.pts2read_SelfSpectra = int(self.nRdChannels * pts2read) self.blocksize = self.pts2read_SelfSpectra if self.processingHeaderObj.flag_cspc: - self.pts2read_CrossSpectra = int(self.nPairs * pts2read) + self.pts2read_CrossSpectra = int(self.nRdPairs * pts2read) self.blocksize += self.pts2read_CrossSpectra if self.processingHeaderObj.flag_dc: - self.pts2read_DCchannels = int(self.systemHeaderObj.numChannels * self.processingHeaderObj.numHeights) + self.pts2read_DCchannels = int(self.systemHeaderObj.nChannels * self.processingHeaderObj.nHeights) self.blocksize += self.pts2read_DCchannels # self.blocksize = self.pts2read_SelfSpectra + self.pts2read_CrossSpectra + self.pts2read_DCchannels @@ -232,7 +236,7 @@ class SpectraReader(JRODataReader): Return: None Variables afectadas: - self.datablockIndex + self.flagIsNewFile self.flagIsNewBlock self.nTotalBlocks @@ -246,29 +250,29 @@ class SpectraReader(JRODataReader): blockOk_flag = False fpointer = self.fp.tell() - spc = numpy.fromfile( self.fp, self.dataType[0], self.pts2read_SelfSpectra ) - spc = spc.reshape( (self.nChannels, self.processingHeaderObj.numHeights, self.processingHeaderObj.profilesPerBlock) ) #transforma a un arreglo 3D + spc = numpy.fromfile( self.fp, self.dtype[0], self.pts2read_SelfSpectra ) + spc = spc.reshape( (self.nRdChannels, self.processingHeaderObj.nHeights, self.processingHeaderObj.profilesPerBlock) ) #transforma a un arreglo 3D - if self.flag_cspc: - cspc = numpy.fromfile( self.fp, self.dataType, self.pts2read_CrossSpectra ) - cspc = cspc.reshape( (self.nPairs, self.processingHeaderObj.numHeights, self.processingHeaderObj.profilesPerBlock) ) #transforma a un arreglo 3D + if self.processingHeaderObj.flag_cspc: + cspc = numpy.fromfile( self.fp, self.dtype, self.pts2read_CrossSpectra ) + cspc = cspc.reshape( (self.nRdPairs, self.processingHeaderObj.nHeights, self.processingHeaderObj.profilesPerBlock) ) #transforma a un arreglo 3D if self.processingHeaderObj.flag_dc: - dc = numpy.fromfile( self.fp, self.dataType, self.pts2read_DCchannels ) #int(self.processingHeaderObj.numHeights*self.systemHeaderObj.numChannels) ) - dc = dc.reshape( (self.systemHeaderObj.numChannels, self.processingHeaderObj.numHeights) ) #transforma a un arreglo 2D + dc = numpy.fromfile( self.fp, self.dtype, self.pts2read_DCchannels ) #int(self.processingHeaderObj.nHeights*self.systemHeaderObj.nChannels) ) + dc = dc.reshape( (self.systemHeaderObj.nChannels, self.processingHeaderObj.nHeights) ) #transforma a un arreglo 2D if not(self.processingHeaderObj.shif_fft): spc = numpy.roll( spc, self.processingHeaderObj.profilesPerBlock/2, axis=2 ) #desplaza a la derecha en el eje 2 determinadas posiciones - if self.flag_cspc: + if self.processingHeaderObj.flag_cspc: cspc = numpy.roll( cspc, self.processingHeaderObj.profilesPerBlock/2, axis=2 ) #desplaza a la derecha en el eje 2 determinadas posiciones spc = numpy.transpose( spc, (0,2,1) ) self.data_spc = spc - if self.flag_cspc: + if self.processingHeaderObj.flag_cspc: cspc = numpy.transpose( cspc, (0,2,1) ) self.data_cspc = cspc['real'] + cspc['imag']*1j else: @@ -279,7 +283,6 @@ class SpectraReader(JRODataReader): else: self.data_dc = None - self.datablockIndex = 0 self.flagIsNewFile = 0 self.flagIsNewBlock = 1 @@ -301,7 +304,7 @@ class SpectraReader(JRODataReader): Affected: self.dataOutObj - self.datablockIndex + self.flagTimeBlock self.flagIsNewBlock """ @@ -316,7 +319,7 @@ class SpectraReader(JRODataReader): if not( self.readNextBlock() ): return 0 - self.updateDataHeader() +# self.updateDataHeader() if self.flagNoMoreFiles == 1: print 'Process finished' @@ -328,12 +331,49 @@ class SpectraReader(JRODataReader): self.dataOutObj.flagNoData = True return 0 - self.dataOutObj.flagNoData = False - self.dataOutObj.flagTimeBlock = self.flagTimeBlock - + self.dataOutObj.data_spc = self.data_spc + self.dataOutObj.data_cspc = self.data_cspc + self.dataOutObj.data_dc = self.data_dc + + self.dataOutObj.flagTimeBlock = self.flagTimeBlock + + self.dataOutObj.flagNoData = False + + self.dataOutObj.dtype = self.dtype + + self.dataOutObj.nChannels = self.nRdChannels + + self.dataOutObj.nPairs = self.nRdPairs + + self.dataOutObj.pairsList = self.rdPairList + + self.dataOutObj.nHeights = self.processingHeaderObj.nHeights + + self.dataOutObj.nProfiles = self.processingHeaderObj.profilesPerBlock + + self.dataOutObj.nFFTPoints = self.processingHeaderObj.profilesPerBlock + + self.dataOutObj.nIncohInt = self.processingHeaderObj.nIncohInt + + + xf = self.processingHeaderObj.firstHeight + self.processingHeaderObj.nHeights*self.processingHeaderObj.deltaHeight + + self.dataOutObj.heightList = numpy.arange(self.processingHeaderObj.firstHeight, xf, self.processingHeaderObj.deltaHeight) + + self.dataOutObj.channelList = range(self.systemHeaderObj.nChannels) + + self.dataOutObj.channelIndexList = range(self.systemHeaderObj.nChannels) + + self.dataOutObj.dataUtcTime = self.basicHeaderObj.utc #+ self.profileIndex * self.ippSeconds + +# self.profileIndex += 1 + + self.dataOutObj.systemHeaderObj = self.systemHeaderObj.copy() + + self.dataOutObj.radarControllerHeaderObj = self.radarControllerHeaderObj.copy() return 1 @@ -345,16 +385,11 @@ class SpectraWriter(JRODataWriter): de los datos siempre se realiza por bloques. """ - dataOutObj = None shape_spc_Buffer = None shape_cspc_Buffer = None shape_dc_Buffer = None - - data_spc = None - data_cspc = None - data_dc = None - + dataOutObj = None def __init__(self, dataOutObj=None): """ @@ -405,7 +440,7 @@ class SpectraWriter(JRODataWriter): self.setFile = None - self.dataType = None + self.dtype = None self.path = None @@ -438,15 +473,15 @@ class SpectraWriter(JRODataWriter): Return: None """ self.shape_spc_Buffer = (self.dataOutObj.nChannels, - self.processingHeaderObj.numHeights, + self.processingHeaderObj.nHeights, self.processingHeaderObj.profilesPerBlock) self.shape_cspc_Buffer = (self.dataOutObj.nPairs, - self.processingHeaderObj.numHeights, + self.processingHeaderObj.nHeights, self.processingHeaderObj.profilesPerBlock) - self.shape_dc_Buffer = (self.systemHeaderObj.numChannels, - self.processingHeaderObj.numHeights) + self.shape_dc_Buffer = (self.systemHeaderObj.nChannels, + self.processingHeaderObj.nHeights) def writeBlock(self): @@ -472,7 +507,7 @@ class SpectraWriter(JRODataWriter): data.tofile(self.fp) if self.data_cspc != None: - data = numpy.zeros( self.shape_cspc_Buffer, self.dataType ) + data = numpy.zeros( self.shape_cspc_Buffer, self.dtype ) cspc = numpy.transpose( self.data_cspc, (0,2,1) ) if not( self.processingHeaderObj.shif_fft ): cspc = numpy.roll( cspc, self.processingHeaderObj.profilesPerBlock/2, axis=2 ) #desplaza a la derecha en el eje 2 determinadas posiciones @@ -481,7 +516,7 @@ class SpectraWriter(JRODataWriter): data = data.reshape((-1)) data.tofile(self.fp) - data = numpy.zeros( self.shape_dc_Buffer, self.dataType ) + data = numpy.zeros( self.shape_dc_Buffer, self.dtype ) dc = self.data_dc data['real'] = dc.real data['imag'] = dc.imag diff --git a/schainpy2/IO/VoltageIO.py b/schainpy2/IO/VoltageIO.py index 1522d64..e032970 100644 --- a/schainpy2/IO/VoltageIO.py +++ b/schainpy2/IO/VoltageIO.py @@ -1,490 +1,481 @@ -''' -Created on 23/01/2012 - -@author $Author: dsuarez $ -@version $Id: VoltageIO.py 110 2012-07-19 15:18:18Z dsuarez $ -''' - -import os, sys -import numpy -import glob -import fnmatch -import time, datetime - -path = os.path.split(os.getcwd())[0] -sys.path.append(path) - -from JROHeader import * -from JRODataIO import JRODataReader -from JRODataIO import JRODataWriter - -from Data.Voltage import Voltage - -class VoltageReader(JRODataReader): - """ - Esta clase permite leer datos de voltage desde archivos en formato rawdata (.r). La lectura - de los datos siempre se realiza por bloques. Los datos leidos (array de 3 dimensiones: - perfiles*alturas*canales) son almacenados en la variable "buffer". - - perfiles * alturas * canales - - Esta clase contiene instancias (objetos) de las clases BasicHeader, SystemHeader, - RadarControllerHeader y Voltage. Los tres primeros se usan para almacenar informacion de la - cabecera de datos (metadata), y el cuarto (Voltage) para obtener y almacenar un perfil de - datos desde el "buffer" cada vez que se ejecute el metodo "getData". - - Example: - - dpath = "/home/myuser/data" - - startTime = datetime.datetime(2010,1,20,0,0,0,0,0,0) - - endTime = datetime.datetime(2010,1,21,23,59,59,0,0,0) - - readerObj = VoltageReader() - - readerObj.setup(dpath, startTime, endTime) - - while(True): - - #to get one profile - profile = readerObj.getData() - - #print the profile - print profile - - #If you want to see all datablock - print readerObj.datablock - - if readerObj.flagNoMoreFiles: - break - - """ - dataOutObj = None - - datablock = None - - ext = ".r" - - optchar = "D" - - - def __init__(self, dataOutObj=None): - """ - Inicializador de la clase VoltageReader para la lectura de datos de voltage. - - Input: - dataOutObj : Objeto de la clase Voltage. Este objeto sera utilizado para - almacenar un perfil de datos cada vez que se haga un requerimiento - (getData). El perfil sera obtenido a partir del buffer de datos, - si el buffer esta vacio se hara un nuevo proceso de lectura de un - bloque de datos. - Si este parametro no es pasado se creara uno internamente. - - Variables afectadas: - self.dataOutObj - - Return: - None - """ - - self.datablock = None - - self.utc = 0 - - self.ext = ".r" - - self.optchar = "D" - - self.basicHeaderObj = BasicHeader() - - self.systemHeaderObj = SystemHeader() - - self.radarControllerHeaderObj = RadarControllerHeader() - - self.processingHeaderObj = ProcessingHeader() - - self.online = 0 - - self.fp = None - - self.idFile = None - - self.dtype = None - - self.fileSizeByHeader = None - - self.filenameList = [] - - self.filename = None - - self.fileSize = None - - self.firstHeaderSize = 0 - - self.basicHeaderSize = 24 - - self.pathList = [] - - self.filenameList = [] - - self.lastUTTime = 0 - - self.maxTimeStep = 30 - - self.flagNoMoreFiles = 0 - - self.set = 0 - - self.path = None - - self.profileIndex = 9999 - - self.delay = 3 #seconds - - self.nTries = 3 #quantity tries - - self.nFiles = 3 #number of files for searching - - self.nReadBlocks = 0 - - self.flagIsNewFile = 1 - - self.ippSeconds = 0 - - self.flagTimeBlock = 0 - - self.flagIsNewBlock = 0 - - self.nTotalBlocks = 0 - - self.blocksize = 0 - - def createObjByDefault(self): - - dataObj = Voltage() - - return dataObj - - def __hasNotDataInBuffer(self): - if self.profileIndex >= self.processingHeaderObj.profilesPerBlock: - return 1 - return 0 - - - def getBlockDimension(self): - """ - Obtiene la cantidad de puntos a leer por cada bloque de datos - - Affected: - self.blocksize - - Return: - None - """ - pts2read = self.processingHeaderObj.profilesPerBlock * self.processingHeaderObj.nHeights * self.systemHeaderObj.nChannels - self.blocksize = pts2read - - - def readBlock(self): - """ - readBlock lee el bloque de datos desde la posicion actual del puntero del archivo - (self.fp) y actualiza todos los parametros relacionados al bloque de datos - (metadata + data). La data leida es almacenada en el buffer y el contador del buffer - es seteado a 0 - - Inputs: - None - - Return: - None - - Affected: - self.profileIndex - self.datablock - self.flagIsNewFile - self.flagIsNewBlock - self.nTotalBlocks - - Exceptions: - Si un bloque leido no es un bloque valido - """ - - junk = numpy.fromfile( self.fp, self.dtype, self.blocksize ) - - try: - junk = junk.reshape( (self.processingHeaderObj.profilesPerBlock, self.processingHeaderObj.nHeights, self.systemHeaderObj.nChannels) ) - except: - print "The read block (%3d) has not enough data" %self.nReadBlocks - return 0 - - junk = numpy.transpose(junk, (2,0,1)) - self.datablock = junk['real'] + junk['imag']*1j - - self.profileIndex = 0 - - self.flagIsNewFile = 0 - self.flagIsNewBlock = 1 - - self.nTotalBlocks += 1 - self.nReadBlocks += 1 - - return 1 - - - def getData(self): - """ - getData obtiene una unidad de datos del buffer de lectura y la copia a la clase "Voltage" - con todos los parametros asociados a este (metadata). cuando no hay datos en el buffer de - lectura es necesario hacer una nueva lectura de los bloques de datos usando "readNextBlock" - - Ademas incrementa el contador del buffer en 1. - - Return: - data : retorna un perfil de voltages (alturas * canales) copiados desde el - buffer. Si no hay mas archivos a leer retorna None. - - Variables afectadas: - self.dataOutObj - self.profileIndex - - Affected: - self.dataOutObj - self.profileIndex - self.flagTimeBlock - self.flagIsNewBlock - """ - if self.flagNoMoreFiles: return 0 - - self.flagTimeBlock = 0 - self.flagIsNewBlock = 0 - - if self.__hasNotDataInBuffer(): - - if not( self.readNextBlock() ): - return 0 - -# self.updateDataHeader() - - if self.flagNoMoreFiles == 1: - print 'Process finished' - return 0 - - #data es un numpy array de 3 dmensiones (perfiles, alturas y canales) - - if self.datablock == None: - self.dataOutObj.flagNoData = True - return 0 - - self.dataOutObj.data = self.datablock[:,self.profileIndex,:] - - self.dataOutObj.dtype = self.dtype - - self.dataOutObj.nChannels = self.systemHeaderObj.nChannels - - self.dataOutObj.nHeights = self.processingHeaderObj.nHeights - - self.dataOutObj.nProfiles = self.processingHeaderObj.profilesPerBlock - - xf = self.processingHeaderObj.firstHeight + self.processingHeaderObj.nHeights*self.processingHeaderObj.deltaHeight - - self.dataOutObj.heightList = numpy.arange(self.processingHeaderObj.firstHeight, xf, self.processingHeaderObj.deltaHeight) - - self.dataOutObj.channelList = range(self.systemHeaderObj.nChannels) - - self.dataOutObj.channelIndexList = range(self.systemHeaderObj.nChannels) - - self.dataOutObj.flagNoData = True - - self.dataOutObj.flagTimeBlock = self.flagTimeBlock - - self.dataOutObj.dataUtcTime = self.basicHeaderObj.utc + self.profileIndex * self.ippSeconds - - self.dataOutObj.nCohInt = self.processingHeaderObj.nCohInt - - self.profileIndex += 1 - - self.dataOutObj.systemHeaderObj = self.systemHeaderObj.copy() - - self.dataOutObj.radarControllerHeaderObj = self.radarControllerHeaderObj.copy() - - return 1 - - -class VoltageWriter(JRODataWriter): - """ - Esta clase permite escribir datos de voltajes a archivos procesados (.r). La escritura - de los datos siempre se realiza por bloques. - """ - __configHeaderFile = 'wrSetHeadet.txt' - - dataOutObj = None - - ext = ".r" - - optchar = "D" - - datablock = None - - profileIndex = 0 - - shapeBuffer = None - - - def __init__(self, dataOutObj=None): - """ - Inicializador de la clase VoltageWriter para la escritura de datos de espectros. - - Affected: - self.dataOutObj - - Return: None - """ - if dataOutObj == None: - dataOutObj = Voltage() - - if not( isinstance(dataOutObj, Voltage) ): - raise ValueError, "in VoltageReader, dataOutObj must be an Spectra class object" - - self.dataOutObj = dataOutObj - - - def hasAllDataInBuffer(self): - if self.profileIndex >= self.processingHeaderObj.profilesPerBlock: - return 1 - return 0 - - - def setBlockDimension(self): - """ - Obtiene las formas dimensionales del los subbloques de datos que componen un bloque - - Affected: - self.shape_spc_Buffer - self.shape_cspc_Buffer - self.shape_dc_Buffer - - Return: None - """ - self.shapeBuffer = (self.processingHeaderObj.profilesPerBlock, - self.processingHeaderObj.nHeights, - self.systemHeaderObj.nChannels ) - - self.datablock = numpy.zeros((self.systemHeaderObj.nChannels, - self.processingHeaderObj.profilesPerBlock, - self.processingHeaderObj.nHeights), - dtype=numpy.dtype('complex')) - - - def writeBlock(self): - """ - Escribe el buffer en el file designado - - Affected: - self.profileIndex - self.flagIsNewFile - self.flagIsNewBlock - self.nTotalBlocks - self.blockIndex - - Return: None - """ - data = numpy.zeros( self.shapeBuffer, self.dtype ) - - junk = numpy.transpose(self.datablock, (1,2,0)) - - data['real'] = junk.real - data['imag'] = junk.imag - - data = data.reshape( (-1) ) - - data.tofile( self.fp ) - - self.datablock.fill(0) - - self.profileIndex = 0 - self.flagIsNewFile = 0 - self.flagIsNewBlock = 1 - - self.blockIndex += 1 - self.nTotalBlocks += 1 - - def putData(self): - """ - Setea un bloque de datos y luego los escribe en un file - - Affected: - self.flagIsNewBlock - self.profileIndex - - Return: - 0 : Si no hay data o no hay mas files que puedan escribirse - 1 : Si se escribio la data de un bloque en un file - """ - self.flagIsNewBlock = 0 - - if self.dataOutObj.flagNoData: - return 0 - - if self.dataOutObj.flagTimeBlock: - - self.datablock.fill(0) - self.profileIndex = 0 - self.setNextFile() - - self.datablock[:,self.profileIndex,:] = self.dataOutObj.data - - self.profileIndex += 1 - - if self.hasAllDataInBuffer(): - #if self.flagIsNewFile: - self.getDataHeader() - self.writeNextBlock() - - if self.flagNoMoreFiles: - #print 'Process finished' - return 0 - - return 1 - - def getDataHeader(self): - - """ - Obtiene una copia del First Header - - Affected: - self.systemHeaderObj - self.radarControllerHeaderObj - self.dtype - - Return: - None - """ - -# CALCULAR PARAMETROS - - self.systemHeaderObj = self.dataOutObj.systemHeaderObj.copy() - self.radarControllerHeaderObj = self.dataOutObj.radarControllerHeaderObj.copy() - - self.basicHeaderObj.size = self.basicHeaderSize - self.basicHeaderObj.version = self.versionFile - self.basicHeaderObj.dataBlock = self.nTotalBlocks - self.basicHeaderObj.utc = self.dataOutObj.dataUtcTime - self.basicHeaderObj.miliSecond = 0 - self.basicHeaderObj.timeZone = 0 - self.basicHeaderObj.dstFlag = 0 - self.basicHeaderObj.errorCount = 0 - - self.processingHeaderObj.size = 0 - self.processingHeaderObj.dtype = self.dataOutObj.dtype - self.processingHeaderObj.blockSize = 0 - self.processingHeaderObj.profilesPerBlock = 0 - self.processingHeaderObj.dataBlocksPerFile = 0 - self.processingHeaderObj.numWindows = 0 - self.processingHeaderObj.processFlags = 0 - self.processingHeaderObj.coherentInt = 0 - self.processingHeaderObj.incoherentInt = 0 - self.processingHeaderObj.totalSpectra = 0 - - self.dtype = self.dataOutObj.dtype +''' +Created on 23/01/2012 + +@author $Author: dsuarez $ +@version $Id: VoltageIO.py 110 2012-07-19 15:18:18Z dsuarez $ +''' + +import os, sys +import numpy +import glob +import fnmatch +import time, datetime + +path = os.path.split(os.getcwd())[0] +sys.path.append(path) + +from JROHeader import * +from JRODataIO import JRODataReader +from JRODataIO import JRODataWriter + +from Data.Voltage import Voltage + +class VoltageReader(JRODataReader): + """ + Esta clase permite leer datos de voltage desde archivos en formato rawdata (.r). La lectura + de los datos siempre se realiza por bloques. Los datos leidos (array de 3 dimensiones: + perfiles*alturas*canales) son almacenados en la variable "buffer". + + perfiles * alturas * canales + + Esta clase contiene instancias (objetos) de las clases BasicHeader, SystemHeader, + RadarControllerHeader y Voltage. Los tres primeros se usan para almacenar informacion de la + cabecera de datos (metadata), y el cuarto (Voltage) para obtener y almacenar un perfil de + datos desde el "buffer" cada vez que se ejecute el metodo "getData". + + Example: + + dpath = "/home/myuser/data" + + startTime = datetime.datetime(2010,1,20,0,0,0,0,0,0) + + endTime = datetime.datetime(2010,1,21,23,59,59,0,0,0) + + readerObj = VoltageReader() + + readerObj.setup(dpath, startTime, endTime) + + while(True): + + #to get one profile + profile = readerObj.getData() + + #print the profile + print profile + + #If you want to see all datablock + print readerObj.datablock + + if readerObj.flagNoMoreFiles: + break + + """ + + ext = ".r" + + optchar = "D" + dataOutObj = None + + + def __init__(self, dataOutObj=None): + """ + Inicializador de la clase VoltageReader para la lectura de datos de voltage. + + Input: + dataOutObj : Objeto de la clase Voltage. Este objeto sera utilizado para + almacenar un perfil de datos cada vez que se haga un requerimiento + (getData). El perfil sera obtenido a partir del buffer de datos, + si el buffer esta vacio se hara un nuevo proceso de lectura de un + bloque de datos. + Si este parametro no es pasado se creara uno internamente. + + Variables afectadas: + self.dataOutObj + + Return: + None + """ + + self.datablock = None + + self.utc = 0 + + self.ext = ".r" + + self.optchar = "D" + + self.basicHeaderObj = BasicHeader() + + self.systemHeaderObj = SystemHeader() + + self.radarControllerHeaderObj = RadarControllerHeader() + + self.processingHeaderObj = ProcessingHeader() + + self.online = 0 + + self.fp = None + + self.idFile = None + + self.dtype = None + + self.fileSizeByHeader = None + + self.filenameList = [] + + self.filename = None + + self.fileSize = None + + self.firstHeaderSize = 0 + + self.basicHeaderSize = 24 + + self.pathList = [] + + self.filenameList = [] + + self.lastUTTime = 0 + + self.maxTimeStep = 30 + + self.flagNoMoreFiles = 0 + + self.set = 0 + + self.path = None + + self.profileIndex = 9999 + + self.delay = 3 #seconds + + self.nTries = 3 #quantity tries + + self.nFiles = 3 #number of files for searching + + self.nReadBlocks = 0 + + self.flagIsNewFile = 1 + + self.ippSeconds = 0 + + self.flagTimeBlock = 0 + + self.flagIsNewBlock = 0 + + self.nTotalBlocks = 0 + + self.blocksize = 0 + + def createObjByDefault(self): + + dataObj = Voltage() + + return dataObj + + def __hasNotDataInBuffer(self): + if self.profileIndex >= self.processingHeaderObj.profilesPerBlock: + return 1 + return 0 + + + def getBlockDimension(self): + """ + Obtiene la cantidad de puntos a leer por cada bloque de datos + + Affected: + self.blocksize + + Return: + None + """ + pts2read = self.processingHeaderObj.profilesPerBlock * self.processingHeaderObj.nHeights * self.systemHeaderObj.nChannels + self.blocksize = pts2read + + + def readBlock(self): + """ + readBlock lee el bloque de datos desde la posicion actual del puntero del archivo + (self.fp) y actualiza todos los parametros relacionados al bloque de datos + (metadata + data). La data leida es almacenada en el buffer y el contador del buffer + es seteado a 0 + + Inputs: + None + + Return: + None + + Affected: + self.profileIndex + self.datablock + self.flagIsNewFile + self.flagIsNewBlock + self.nTotalBlocks + + Exceptions: + Si un bloque leido no es un bloque valido + """ + + junk = numpy.fromfile( self.fp, self.dtype, self.blocksize ) + + try: + junk = junk.reshape( (self.processingHeaderObj.profilesPerBlock, self.processingHeaderObj.nHeights, self.systemHeaderObj.nChannels) ) + except: + print "The read block (%3d) has not enough data" %self.nReadBlocks + return 0 + + junk = numpy.transpose(junk, (2,0,1)) + self.datablock = junk['real'] + junk['imag']*1j + + self.profileIndex = 0 + + self.flagIsNewFile = 0 + self.flagIsNewBlock = 1 + + self.nTotalBlocks += 1 + self.nReadBlocks += 1 + + return 1 + + + def getData(self): + """ + getData obtiene una unidad de datos del buffer de lectura y la copia a la clase "Voltage" + con todos los parametros asociados a este (metadata). cuando no hay datos en el buffer de + lectura es necesario hacer una nueva lectura de los bloques de datos usando "readNextBlock" + + Ademas incrementa el contador del buffer en 1. + + Return: + data : retorna un perfil de voltages (alturas * canales) copiados desde el + buffer. Si no hay mas archivos a leer retorna None. + + Variables afectadas: + self.dataOutObj + self.profileIndex + + Affected: + self.dataOutObj + self.profileIndex + self.flagTimeBlock + self.flagIsNewBlock + """ + if self.flagNoMoreFiles: return 0 + + self.flagTimeBlock = 0 + self.flagIsNewBlock = 0 + + if self.__hasNotDataInBuffer(): + + if not( self.readNextBlock() ): + return 0 + +# self.updateDataHeader() + + if self.flagNoMoreFiles == 1: + print 'Process finished' + return 0 + + #data es un numpy array de 3 dmensiones (perfiles, alturas y canales) + + if self.datablock == None: + self.dataOutObj.flagNoData = True + return 0 + + self.dataOutObj.data = self.datablock[:,self.profileIndex,:] + + self.dataOutObj.dtype = self.dtype + + self.dataOutObj.nChannels = self.systemHeaderObj.nChannels + + self.dataOutObj.nHeights = self.processingHeaderObj.nHeights + + self.dataOutObj.nProfiles = self.processingHeaderObj.profilesPerBlock + + xf = self.processingHeaderObj.firstHeight + self.processingHeaderObj.nHeights*self.processingHeaderObj.deltaHeight + + self.dataOutObj.heightList = numpy.arange(self.processingHeaderObj.firstHeight, xf, self.processingHeaderObj.deltaHeight) + + self.dataOutObj.channelList = range(self.systemHeaderObj.nChannels) + + self.dataOutObj.channelIndexList = range(self.systemHeaderObj.nChannels) + + self.dataOutObj.flagTimeBlock = self.flagTimeBlock + + self.dataOutObj.dataUtcTime = self.basicHeaderObj.utc + self.profileIndex * self.ippSeconds + + self.dataOutObj.nCohInt = self.processingHeaderObj.nCohInt + + self.profileIndex += 1 + + self.dataOutObj.systemHeaderObj = self.systemHeaderObj.copy() + + self.dataOutObj.radarControllerHeaderObj = self.radarControllerHeaderObj.copy() + + self.dataOutObj.flagNoData = False + + return 1 + + +class VoltageWriter(JRODataWriter): + """ + Esta clase permite escribir datos de voltajes a archivos procesados (.r). La escritura + de los datos siempre se realiza por bloques. + """ + + ext = ".r" + + optchar = "D" + + shapeBuffer = None + + + def __init__(self, dataOutObj=None): + """ + Inicializador de la clase VoltageWriter para la escritura de datos de espectros. + + Affected: + self.dataOutObj + + Return: None + """ + if dataOutObj == None: + dataOutObj = Voltage() + + if not( isinstance(dataOutObj, Voltage) ): + raise ValueError, "in VoltageReader, dataOutObj must be an Spectra class object" + + self.dataOutObj = dataOutObj + + + def hasAllDataInBuffer(self): + if self.profileIndex >= self.processingHeaderObj.profilesPerBlock: + return 1 + return 0 + + + def setBlockDimension(self): + """ + Obtiene las formas dimensionales del los subbloques de datos que componen un bloque + + Affected: + self.shape_spc_Buffer + self.shape_cspc_Buffer + self.shape_dc_Buffer + + Return: None + """ + self.shapeBuffer = (self.processingHeaderObj.profilesPerBlock, + self.processingHeaderObj.nHeights, + self.systemHeaderObj.nChannels ) + + self.datablock = numpy.zeros((self.systemHeaderObj.nChannels, + self.processingHeaderObj.profilesPerBlock, + self.processingHeaderObj.nHeights), + dtype=numpy.dtype('complex')) + + + def writeBlock(self): + """ + Escribe el buffer en el file designado + + Affected: + self.profileIndex + self.flagIsNewFile + self.flagIsNewBlock + self.nTotalBlocks + self.blockIndex + + Return: None + """ + data = numpy.zeros( self.shapeBuffer, self.dtype ) + + junk = numpy.transpose(self.datablock, (1,2,0)) + + data['real'] = junk.real + data['imag'] = junk.imag + + data = data.reshape( (-1) ) + + data.tofile( self.fp ) + + self.datablock.fill(0) + + self.profileIndex = 0 + self.flagIsNewFile = 0 + self.flagIsNewBlock = 1 + + self.blockIndex += 1 + self.nTotalBlocks += 1 + + def putData(self): + """ + Setea un bloque de datos y luego los escribe en un file + + Affected: + self.flagIsNewBlock + self.profileIndex + + Return: + 0 : Si no hay data o no hay mas files que puedan escribirse + 1 : Si se escribio la data de un bloque en un file + """ + self.flagIsNewBlock = 0 + + if self.dataOutObj.flagNoData: + return 0 + + if self.dataOutObj.flagTimeBlock: + + self.datablock.fill(0) + self.profileIndex = 0 + self.setNextFile() + + self.datablock[:,self.profileIndex,:] = self.dataOutObj.data + + self.profileIndex += 1 + + if self.hasAllDataInBuffer(): + #if self.flagIsNewFile: + self.getDataHeader() + self.writeNextBlock() + + if self.flagNoMoreFiles: + #print 'Process finished' + return 0 + + return 1 + + def getDataHeader(self): + + """ + Obtiene una copia del First Header + + Affected: + self.systemHeaderObj + self.radarControllerHeaderObj + self.dtype + + Return: + None + """ + +# CALCULAR PARAMETROS + + self.systemHeaderObj = self.dataOutObj.systemHeaderObj.copy() + self.radarControllerHeaderObj = self.dataOutObj.radarControllerHeaderObj.copy() + + self.basicHeaderObj.size = self.basicHeaderSize + self.basicHeaderObj.version = self.versionFile + self.basicHeaderObj.dataBlock = self.nTotalBlocks + self.basicHeaderObj.utc = self.dataOutObj.dataUtcTime + self.basicHeaderObj.miliSecond = 0 + self.basicHeaderObj.timeZone = 0 + self.basicHeaderObj.dstFlag = 0 + self.basicHeaderObj.errorCount = 0 + + self.processingHeaderObj.size = 0 + self.processingHeaderObj.dtype = self.dataOutObj.dtype + self.processingHeaderObj.blockSize = 0 + self.processingHeaderObj.profilesPerBlock = 0 + self.processingHeaderObj.dataBlocksPerFile = 0 + self.processingHeaderObj.numWindows = 0 + self.processingHeaderObj.processFlags = 0 + self.processingHeaderObj.coherentInt = 0 + self.processingHeaderObj.incoherentInt = 0 + self.processingHeaderObj.totalSpectra = 0 + + self.dtype = self.dataOutObj.dtype \ No newline at end of file diff --git a/schainpy2/Processing/VoltageProcessor.py b/schainpy2/Processing/VoltageProcessor.py index 19e10fe..98b08e0 100644 --- a/schainpy2/Processing/VoltageProcessor.py +++ b/schainpy2/Processing/VoltageProcessor.py @@ -1,133 +1,133 @@ -import os -import sys -import numpy - -path = os.path.split(os.getcwd())[0] -sys.path.append(path) - -from Data.Voltage import Voltage - - -class VoltageProcessor: - dataInObj = None - dataOutObj = None - integratorObjIndex = None - writerObjIndex = None - integratorObjList = None - writerObjList = None - - def __init__(self): - self.integratorObjIndex = None - self.writerObjIndex = None - self.integratorObjList = [] - self.writerObjList = [] - - def setup(self,dataInObj=None,dataOutObj=None): - self.dataInObj = dataInObj - - if self.dataOutObj == None: - dataOutObj = Voltage() - - self.dataOutObj = dataOutObj - - return self.dataOutObj - - def init(self): - self.integratorObjIndex = 0 - self.writerObjIndex = 0 - # No necesita copiar en cada init() los atributos de dataInObj - # la copia deberia hacerse por cada nuevo bloque de datos - - def addIntegrator(self,N,timeInterval): - objCohInt = CoherentIntegrator(N,timeInterval) - self.integratorObjList.append(objCohInt) - - def addWriter(self): - pass - - def integrator(self, N=None, timeInterval=None): - if self.dataOutObj.flagNoData: - return 0 - if len(self.integratorObjList) <= self.integratorObjIndex: - self.addIntegrator(N,timeInterval) - - myCohIntObj = self.integratorObjList[self.integratorObjIndex] - myCohIntObj.exe(data=self.dataOutObj.data,timeOfData=None) - - pass - - def writeData(self): - pass - -class CoherentIntegrator: - - integ_counter = None - data = None - navg = None - buffer = None - nCohInt = None - - def __init__(self, N=None,timeInterval=None): - - self.data = None - self.navg = None - self.buffer = None - self.timeOut = None - self.exitCondition = False - self.isReady = False - self.nCohInt = N - self.integ_counter = 0 - if timeInterval!=None: - self.timeIntervalInSeconds = timeInterval * 60. #if (type(timeInterval)!=integer) -> change this line - - if ((timeInterval==None) and (N==None)): - raise ValueError, "N = None ; timeInterval = None" - - if timeInterval == None: - self.timeFlag = False - else: - self.timeFlag = True - - def exe(self, data, timeOfData): - - if self.timeFlag: - if self.timeOut == None: - self.timeOut = timeOfData + self.timeIntervalInSeconds - - if timeOfData < self.timeOut: - if self.buffer == None: - self.buffer = data - else: - self.buffer = self.buffer + data - self.integ_counter += 1 - else: - self.exitCondition = True - - else: - if self.integ_counter < self.nCohInt: - if self.buffer == None: - self.buffer = data - else: - self.buffer = self.buffer + data - - self.integ_counter += 1 - - if self.integ_counter == self.nCohInt: - self.exitCondition = True - - if self.exitCondition: - self.data = self.buffer - self.navg = self.integ_counter - self.isReady = True - self.buffer = None - self.timeOut = None - self.integ_counter = 0 - self.exitCondition = False - - if self.timeFlag: - self.buffer = data - self.timeOut = timeOfData + self.timeIntervalInSeconds - else: - self.isReady = False - - +import os +import sys +import numpy + +path = os.path.split(os.getcwd())[0] +sys.path.append(path) + +from Data.Voltage import Voltage + + +class VoltageProcessor: + dataInObj = None + dataOutObj = None + integratorObjIndex = None + writerObjIndex = None + integratorObjList = None + writerObjList = None + + def __init__(self): + self.integratorObjIndex = None + self.writerObjIndex = None + self.integratorObjList = [] + self.writerObjList = [] + + def setup(self,dataInObj=None,dataOutObj=None): + self.dataInObj = dataInObj + + if self.dataOutObj == None: + dataOutObj = Voltage() + + self.dataOutObj = dataOutObj + + return self.dataOutObj + + def init(self): + self.integratorObjIndex = 0 + self.writerObjIndex = 0 + # No necesita copiar en cada init() los atributos de dataInObj + # la copia deberia hacerse por cada nuevo bloque de datos + + def addIntegrator(self,N,timeInterval): + objCohInt = CoherentIntegrator(N,timeInterval) + self.integratorObjList.append(objCohInt) + + def addWriter(self): + pass + + def integrator(self, N=None, timeInterval=None): + if self.dataOutObj.flagNoData: + return 0 + if len(self.integratorObjList) <= self.integratorObjIndex: + self.addIntegrator(N,timeInterval) + + myCohIntObj = self.integratorObjList[self.integratorObjIndex] + myCohIntObj.exe(data=self.dataOutObj.data,timeOfData=None) + + pass + + def writeData(self): + pass + +class CoherentIntegrator: + + integ_counter = None + data = None + navg = None + buffer = None + nCohInt = None + + def __init__(self, N=None,timeInterval=None): + + self.data = None + self.navg = None + self.buffer = None + self.timeOut = None + self.exitCondition = False + self.isReady = False + self.nCohInt = N + self.integ_counter = 0 + if timeInterval!=None: + self.timeIntervalInSeconds = timeInterval * 60. #if (type(timeInterval)!=integer) -> change this line + + if ((timeInterval==None) and (N==None)): + raise ValueError, "N = None ; timeInterval = None" + + if timeInterval == None: + self.timeFlag = False + else: + self.timeFlag = True + + def exe(self, data, timeOfData): + + if self.timeFlag: + if self.timeOut == None: + self.timeOut = timeOfData + self.timeIntervalInSeconds + + if timeOfData < self.timeOut: + if self.buffer == None: + self.buffer = data + else: + self.buffer = self.buffer + data + self.integ_counter += 1 + else: + self.exitCondition = True + + else: + if self.integ_counter < self.nCohInt: + if self.buffer == None: + self.buffer = data + else: + self.buffer = self.buffer + data + + self.integ_counter += 1 + + if self.integ_counter == self.nCohInt: + self.exitCondition = True + + if self.exitCondition: + self.data = self.buffer + self.navg = self.integ_counter + self.isReady = True + self.buffer = None + self.timeOut = None + self.integ_counter = 0 + self.exitCondition = False + + if self.timeFlag: + self.buffer = data + self.timeOut = timeOfData + self.timeIntervalInSeconds + else: + self.isReady = False + + diff --git a/schainpy2/__init__.py b/schainpy2/__init__.py new file mode 100644 index 0000000..d3f5a12 --- /dev/null +++ b/schainpy2/__init__.py @@ -0,0 +1 @@ + diff --git a/schainpy2/testSchainSpecExp.py b/schainpy2/testSchainSpecExp.py new file mode 100644 index 0000000..c71eb25 --- /dev/null +++ b/schainpy2/testSchainSpecExp.py @@ -0,0 +1,68 @@ + +import os, sys +import time, datetime + +path = os.path.split(os.getcwd())[0] +sys.path.append(path) + +from Data.Voltage import Voltage +from Data.Spectra import Spectra +from IO.VoltageIO import * +from IO.SpectraIO import * +from Processing.VoltageProcessor import * + + + +class TestSChain: + + def __init__(self): + self.setValues() + self.createObjects() + self.testSChain() + + def setValues(self): + self.path = "/Users/jro/Documents/RadarData/MST_ISR/MST" +# self.path = "/home/roj-idl71/Data/RAWDATA/IMAGING" + self.path = "/Users/danielangelsuarezmunoz/Data/EW_Drifts" + self.path = "/Users/danielangelsuarezmunoz/Data/IMAGING" + + self.wrpath = "/Users/jro/Documents/RadarData/wr_data" + + self.startDate = datetime.date(2012,3,1) + self.endDate = datetime.date(2012,3,30) + + self.startTime = datetime.time(0,0,0) + self.endTime = datetime.time(14,1,1) + + def createObjects(self): + + self.readerObj = SpectraReader() + + self.voltObj1 = self.readerObj.setup( + path = self.path, + startDate = self.startDate, + endDate = self.endDate, + startTime = self.startTime, + endTime = self.endTime, + expLabel = '', + online = 0) + + + + def testSChain(self): + + ini = time.time() + + while(True): + self.readerObj.getData() + + if self.readerObj.flagNoMoreFiles: + break + + if self.readerObj.flagIsNewBlock: + print 'Block No %04d, Time: %s' %(self.readerObj.nTotalBlocks, + datetime.datetime.fromtimestamp(self.readerObj.basicHeaderObj.utc),) + + +if __name__ == '__main__': + TestSChain() \ No newline at end of file