jrodataIO.py
3836 lines
| 123.1 KiB
| text/x-python
|
PythonLexer
|
r174 | ''' | ||
$Author: murco $ | ||||
$Id: JRODataIO.py 169 2012-11-19 21:57:03Z murco $ | ||||
''' | ||||
import os, sys | ||||
import glob | ||||
import time | ||||
import numpy | ||||
import fnmatch | ||||
import time, datetime | ||||
|
r473 | import h5py | ||
|
r352 | from xml.etree.ElementTree import Element, SubElement, ElementTree | ||
|
r344 | try: | ||
import pyfits | ||||
except: | ||||
print "pyfits module has not been imported, it should be installed to save files in fits format" | ||||
|
r174 | |||
|
r178 | from jrodata import * | ||
from jroheaderIO import * | ||||
|
r197 | from jroprocessing import * | ||
|
r174 | |||
|
r344 | LOCALTIME = True #-18000 | ||
|
r234 | |||
|
r174 | def isNumber(str): | ||
""" | ||||
Chequea si el conjunto de caracteres que componen un string puede ser convertidos a un numero. | ||||
Excepciones: | ||||
Si un determinado string no puede ser convertido a numero | ||||
Input: | ||||
str, string al cual se le analiza para determinar si convertible a un numero o no | ||||
Return: | ||||
True : si el string es uno numerico | ||||
False : no es un string numerico | ||||
""" | ||||
try: | ||||
float( str ) | ||||
return True | ||||
except: | ||||
return False | ||||
def isThisFileinRange(filename, startUTSeconds, endUTSeconds): | ||||
""" | ||||
Esta funcion determina si un archivo de datos se encuentra o no dentro del rango de fecha especificado. | ||||
Inputs: | ||||
filename : nombre completo del archivo de datos en formato Jicamarca (.r) | ||||
startUTSeconds : fecha inicial del rango seleccionado. La fecha esta dada en | ||||
segundos contados desde 01/01/1970. | ||||
endUTSeconds : fecha final del rango seleccionado. La fecha esta dada en | ||||
segundos contados desde 01/01/1970. | ||||
Return: | ||||
Boolean : Retorna True si el archivo de datos contiene datos en el rango de | ||||
fecha especificado, de lo contrario retorna False. | ||||
Excepciones: | ||||
Si el archivo no existe o no puede ser abierto | ||||
Si la cabecera no puede ser leida. | ||||
""" | ||||
|
r234 | basicHeaderObj = BasicHeader(LOCALTIME) | ||
|
r174 | |||
try: | ||||
fp = open(filename,'rb') | ||||
except: | ||||
raise IOError, "The file %s can't be opened" %(filename) | ||||
sts = basicHeaderObj.read(fp) | ||||
fp.close() | ||||
if not(sts): | ||||
print "Skipping the file %s because it has not a valid header" %(filename) | ||||
return 0 | ||||
if not ((startUTSeconds <= basicHeaderObj.utc) and (endUTSeconds > basicHeaderObj.utc)): | ||||
return 0 | ||||
return 1 | ||||
|
r224 | def isFileinThisTime(filename, startTime, endTime): | ||
""" | ||||
Retorna 1 si el archivo de datos se encuentra dentro del rango de horas especificado. | ||||
Inputs: | ||||
filename : nombre completo del archivo de datos en formato Jicamarca (.r) | ||||
startTime : tiempo inicial del rango seleccionado en formato datetime.time | ||||
endTime : tiempo final del rango seleccionado en formato datetime.time | ||||
Return: | ||||
Boolean : Retorna True si el archivo de datos contiene datos en el rango de | ||||
fecha especificado, de lo contrario retorna False. | ||||
Excepciones: | ||||
Si el archivo no existe o no puede ser abierto | ||||
Si la cabecera no puede ser leida. | ||||
""" | ||||
try: | ||||
fp = open(filename,'rb') | ||||
except: | ||||
raise IOError, "The file %s can't be opened" %(filename) | ||||
|
r234 | basicHeaderObj = BasicHeader(LOCALTIME) | ||
|
r224 | sts = basicHeaderObj.read(fp) | ||
fp.close() | ||||
|
r332 | thisDatetime = basicHeaderObj.datatime | ||
|
r224 | thisTime = basicHeaderObj.datatime.time() | ||
if not(sts): | ||||
print "Skipping the file %s because it has not a valid header" %(filename) | ||||
|
r332 | return None | ||
|
r224 | |||
if not ((startTime <= thisTime) and (endTime > thisTime)): | ||||
|
r332 | return None | ||
|
r224 | |||
|
r332 | return thisDatetime | ||
|
r224 | |||
|
r389 | def getFileFromSet(path,ext,set): | ||
validFilelist = [] | ||||
fileList = os.listdir(path) | ||||
# 0 1234 567 89A BCDE | ||||
# H YYYY DDD SSS .ext | ||||
for file in fileList: | ||||
try: | ||||
year = int(file[1:5]) | ||||
doy = int(file[5:8]) | ||||
except: | ||||
continue | ||||
if (os.path.splitext(file)[-1].lower() != ext.lower()): | ||||
continue | ||||
validFilelist.append(file) | ||||
myfile = fnmatch.filter(validFilelist,'*%4.4d%3.3d%3.3d*'%(year,doy,set)) | ||||
if len(myfile)!= 0: | ||||
return myfile[0] | ||||
else: | ||||
filename = '*%4.4d%3.3d%3.3d%s'%(year,doy,set,ext.lower()) | ||||
print 'the filename %s does not exist'%filename | ||||
print '...going to the last file: ' | ||||
if validFilelist: | ||||
validFilelist = sorted( validFilelist, key=str.lower ) | ||||
return validFilelist[-1] | ||||
return None | ||||
|
r174 | def getlastFileFromPath(path, ext): | ||
""" | ||||
Depura el fileList dejando solo los que cumplan el formato de "PYYYYDDDSSS.ext" | ||||
al final de la depuracion devuelve el ultimo file de la lista que quedo. | ||||
Input: | ||||
fileList : lista conteniendo todos los files (sin path) que componen una determinada carpeta | ||||
ext : extension de los files contenidos en una carpeta | ||||
Return: | ||||
El ultimo file de una determinada carpeta, no se considera el path. | ||||
""" | ||||
validFilelist = [] | ||||
fileList = os.listdir(path) | ||||
# 0 1234 567 89A BCDE | ||||
# H YYYY DDD SSS .ext | ||||
for file in fileList: | ||||
try: | ||||
year = int(file[1:5]) | ||||
doy = int(file[5:8]) | ||||
|
r224 | |||
|
r174 | except: | ||
continue | ||||
|
r224 | |||
if (os.path.splitext(file)[-1].lower() != ext.lower()): | ||||
continue | ||||
|
r174 | validFilelist.append(file) | ||
if validFilelist: | ||||
validFilelist = sorted( validFilelist, key=str.lower ) | ||||
return validFilelist[-1] | ||||
return None | ||||
|
r343 | def checkForRealPath(path, foldercounter, year, doy, set, ext): | ||
|
r174 | """ | ||
Por ser Linux Case Sensitive entonces checkForRealPath encuentra el nombre correcto de un path, | ||||
Prueba por varias combinaciones de nombres entre mayusculas y minusculas para determinar | ||||
el path exacto de un determinado file. | ||||
Example : | ||||
nombre correcto del file es .../.../D2009307/P2009307367.ext | ||||
Entonces la funcion prueba con las siguientes combinaciones | ||||
|
r224 | .../.../y2009307367.ext | ||
.../.../Y2009307367.ext | ||||
|
r174 | .../.../x2009307/y2009307367.ext | ||
.../.../x2009307/Y2009307367.ext | ||||
.../.../X2009307/y2009307367.ext | ||||
.../.../X2009307/Y2009307367.ext | ||||
siendo para este caso, la ultima combinacion de letras, identica al file buscado | ||||
Return: | ||||
Si encuentra la cobinacion adecuada devuelve el path completo y el nombre del file | ||||
caso contrario devuelve None como path y el la ultima combinacion de nombre en mayusculas | ||||
para el filename | ||||
""" | ||||
|
r224 | fullfilename = None | ||
|
r174 | find_flag = False | ||
filename = None | ||||
|
r224 | |||
prefixDirList = [None,'d','D'] | ||||
|
r174 | if ext.lower() == ".r": #voltage | ||
|
r224 | prefixFileList = ['d','D'] | ||
|
r174 | elif ext.lower() == ".pdata": #spectra | ||
|
r224 | prefixFileList = ['p','P'] | ||
|
r174 | else: | ||
return None, filename | ||||
|
r224 | |||
#barrido por las combinaciones posibles | ||||
for prefixDir in prefixDirList: | ||||
thispath = path | ||||
if prefixDir != None: | ||||
#formo el nombre del directorio xYYYYDDD (x=d o x=D) | ||||
|
r343 | if foldercounter == 0: | ||
thispath = os.path.join(path, "%s%04d%03d" % ( prefixDir, year, doy )) | ||||
else: | ||||
thispath = os.path.join(path, "%s%04d%03d_%02d" % ( prefixDir, year, doy , foldercounter)) | ||||
|
r224 | for prefixFile in prefixFileList: #barrido por las dos combinaciones posibles de "D" | ||
filename = "%s%04d%03d%03d%s" % ( prefixFile, year, doy, set, ext ) #formo el nombre del file xYYYYDDDSSS.ext | ||||
fullfilename = os.path.join( thispath, filename ) #formo el path completo | ||||
|
r174 | |||
|
r224 | if os.path.exists( fullfilename ): #verifico que exista | ||
|
r174 | find_flag = True | ||
break | ||||
if find_flag: | ||||
break | ||||
if not(find_flag): | ||||
return None, filename | ||||
|
r224 | return fullfilename, filename | ||
|
r174 | |||
|
r294 | def isDoyFolder(folder): | ||
try: | ||||
year = int(folder[1:5]) | ||||
except: | ||||
return 0 | ||||
try: | ||||
doy = int(folder[5:8]) | ||||
except: | ||||
return 0 | ||||
|
r297 | |||
|
r294 | return 1 | ||
|
r174 | class JRODataIO: | ||
c = 3E8 | ||||
|
r179 | isConfig = False | ||
|
r176 | |||
|
r234 | basicHeaderObj = BasicHeader(LOCALTIME) | ||
|
r174 | |||
systemHeaderObj = SystemHeader() | ||||
radarControllerHeaderObj = RadarControllerHeader() | ||||
processingHeaderObj = ProcessingHeader() | ||||
online = 0 | ||||
dtype = None | ||||
pathList = [] | ||||
filenameList = [] | ||||
filename = None | ||||
ext = None | ||||
flagIsNewFile = 1 | ||||
flagTimeBlock = 0 | ||||
flagIsNewBlock = 0 | ||||
fp = None | ||||
firstHeaderSize = 0 | ||||
basicHeaderSize = 24 | ||||
versionFile = 1103 | ||||
fileSize = None | ||||
ippSeconds = None | ||||
fileSizeByHeader = None | ||||
fileIndex = None | ||||
profileIndex = None | ||||
blockIndex = None | ||||
nTotalBlocks = None | ||||
maxTimeStep = 30 | ||||
lastUTTime = None | ||||
datablock = None | ||||
|
r179 | dataOut = None | ||
|
r174 | |||
blocksize = None | ||||
def __init__(self): | ||||
|
r177 | |||
raise ValueError, "Not implemented" | ||||
def run(self): | ||||
raise ValueError, "Not implemented" | ||||
|
r192 | def getOutput(self): | ||
|
r191 | |||
return self.dataOut | ||||
|
r174 | |||
|
r197 | class JRODataReader(JRODataIO, ProcessingUnit): | ||
|
r174 | |||
nReadBlocks = 0 | ||||
|
r216 | delay = 10 #number of seconds waiting a new file | ||
|
r174 | |||
nTries = 3 #quantity tries | ||||
nFiles = 3 #number of files for searching | ||||
|
r199 | |||
|
r343 | path = None | ||
foldercounter = 0 | ||||
|
r199 | flagNoMoreFiles = 0 | ||
|
r174 | |||
|
r324 | datetimeList = [] | ||
|
r321 | __isFirstTimeOnline = 1 | ||
|
r332 | __printInfo = True | ||
|
r344 | profileIndex = None | ||
|
r174 | def __init__(self): | ||
""" | ||||
""" | ||||
raise ValueError, "This method has not been implemented" | ||||
def createObjByDefault(self): | ||||
""" | ||||
""" | ||||
raise ValueError, "This method has not been implemented" | ||||
def getBlockDimension(self): | ||||
raise ValueError, "No implemented" | ||||
def __searchFilesOffLine(self, | ||||
path, | ||||
startDate, | ||||
endDate, | ||||
startTime=datetime.time(0,0,0), | ||||
endTime=datetime.time(23,59,59), | ||||
set=None, | ||||
|
r224 | expLabel='', | ||
ext='.r', | ||||
walk=True): | ||||
|
r174 | |||
|
r224 | pathList = [] | ||
|
r174 | |||
|
r224 | if not walk: | ||
|
r449 | #pathList.append(path) | ||
multi_path = path.split(',') | ||||
for single_path in multi_path: | ||||
pathList.append(single_path) | ||||
|
r332 | |||
|
r224 | else: | ||
|
r449 | #dirList = [] | ||
multi_path = path.split(',') | ||||
for single_path in multi_path: | ||||
dirList = [] | ||||
for thisPath in os.listdir(single_path): | ||||
if not os.path.isdir(os.path.join(single_path,thisPath)): | ||||
continue | ||||
if not isDoyFolder(thisPath): | ||||
continue | ||||
|
r294 | |||
|
r449 | dirList.append(thisPath) | ||
|
r224 | |||
|
r449 | if not(dirList): | ||
return None, None | ||||
|
r224 | |||
|
r449 | thisDate = startDate | ||
|
r174 | |||
|
r449 | while(thisDate <= endDate): | ||
year = thisDate.timetuple().tm_year | ||||
doy = thisDate.timetuple().tm_yday | ||||
matchlist = fnmatch.filter(dirList, '?' + '%4.4d%3.3d' % (year,doy) + '*') | ||||
if len(matchlist) == 0: | ||||
thisDate += datetime.timedelta(1) | ||||
continue | ||||
for match in matchlist: | ||||
pathList.append(os.path.join(single_path,match,expLabel)) | ||||
|
r224 | thisDate += datetime.timedelta(1) | ||
|
r231 | if pathList == []: | ||
|
r264 | print "Any folder was found for the date range: %s-%s" %(startDate, endDate) | ||
|
r231 | return None, None | ||
|
r174 | |||
|
r324 | print "%d folder(s) was(were) found for the date range: %s - %s" %(len(pathList), startDate, endDate) | ||
|
r231 | |||
|
r174 | filenameList = [] | ||
|
r324 | datetimeList = [] | ||
|
r449 | pathDict = {} | ||
filenameList_to_sort = [] | ||||
|
r324 | |||
for i in range(len(pathList)): | ||||
thisPath = pathList[i] | ||||
|
r174 | |||
fileList = glob.glob1(thisPath, "*%s" %ext) | ||||
fileList.sort() | ||||
|
r449 | pathDict.setdefault(fileList[0]) | ||
pathDict[fileList[0]] = i | ||||
filenameList_to_sort.append(fileList[0]) | ||||
filenameList_to_sort.sort() | ||||
for file in filenameList_to_sort: | ||||
thisPath = pathList[pathDict[file]] | ||||
|
r174 | |||
|
r449 | fileList = glob.glob1(thisPath, "*%s" %ext) | ||
fileList.sort() | ||||
|
r174 | for file in fileList: | ||
filename = os.path.join(thisPath,file) | ||||
|
r332 | thisDatetime = isFileinThisTime(filename, startTime, endTime) | ||
|
r324 | |||
|
r332 | if not(thisDatetime): | ||
|
r324 | continue | ||
filenameList.append(filename) | ||||
|
r332 | datetimeList.append(thisDatetime) | ||
|
r174 | |||
if not(filenameList): | ||||
|
r264 | print "Any file was found for the time range %s - %s" %(startTime, endTime) | ||
|
r174 | return None, None | ||
|
r264 | |||
print "%d file(s) was(were) found for the time range: %s - %s" %(len(filenameList), startTime, endTime) | ||||
|
r324 | |||
for i in range(len(filenameList)): | ||||
print "%s -> [%s]" %(filenameList[i], datetimeList[i].ctime()) | ||||
|
r174 | |||
self.filenameList = filenameList | ||||
|
r324 | self.datetimeList = datetimeList | ||
|
r174 | |||
return pathList, filenameList | ||||
|
r389 | def __searchFilesOnLine(self, path, expLabel = "", ext = None, walk=True, set=None): | ||
|
r174 | |||
""" | ||||
Busca el ultimo archivo de la ultima carpeta (determinada o no por startDateTime) y | ||||
devuelve el archivo encontrado ademas de otros datos. | ||||
Input: | ||||
path : carpeta donde estan contenidos los files que contiene data | ||||
expLabel : Nombre del subexperimento (subfolder) | ||||
|
r224 | ext : extension de los files | ||
walk : Si es habilitado no realiza busquedas dentro de los ubdirectorios (doypath) | ||||
|
r174 | |||
Return: | ||||
directory : eL directorio donde esta el file encontrado | ||||
filename : el ultimo file de una determinada carpeta | ||||
year : el anho | ||||
doy : el numero de dia del anho | ||||
set : el set del archivo | ||||
""" | ||||
dirList = [] | ||||
|
r332 | if not walk: | ||
fullpath = path | ||||
|
r389 | foldercounter = 0 | ||
|
r332 | else: | ||
|
r224 | #Filtra solo los directorios | ||
for thisPath in os.listdir(path): | ||||
|
r296 | if not os.path.isdir(os.path.join(path,thisPath)): | ||
continue | ||||
if not isDoyFolder(thisPath): | ||||
continue | ||||
|
r297 | |||
dirList.append(thisPath) | ||||
|
r292 | |||
|
r224 | if not(dirList): | ||
|
r360 | return None, None, None, None, None, None | ||
|
r224 | |||
dirList = sorted( dirList, key=str.lower ) | ||||
doypath = dirList[-1] | ||||
|
r343 | foldercounter = int(doypath.split('_')[1]) if len(doypath.split('_'))>1 else 0 | ||
|
r224 | fullpath = os.path.join(path, doypath, expLabel) | ||
|
r332 | |||
|
r292 | |||
|
r293 | print "%s folder was found: " %(fullpath ) | ||
|
r174 | |||
|
r389 | if set == None: | ||
filename = getlastFileFromPath(fullpath, ext) | ||||
else: | ||||
filename = getFileFromSet(fullpath, ext, set) | ||||
|
r174 | |||
if not(filename): | ||||
|
r389 | return None, None, None, None, None, None | ||
|
r292 | |||
print "%s file was found" %(filename) | ||||
|
r224 | if not(self.__verifyFile(os.path.join(fullpath, filename))): | ||
|
r389 | return None, None, None, None, None, None | ||
|
r174 | |||
year = int( filename[1:5] ) | ||||
doy = int( filename[5:8] ) | ||||
set = int( filename[8:11] ) | ||||
|
r343 | return fullpath, foldercounter, filename, year, doy, set | ||
|
r174 | |||
def __setNextFileOffline(self): | ||||
idFile = self.fileIndex | ||||
while (True): | ||||
idFile += 1 | ||||
if not(idFile < len(self.filenameList)): | ||||
self.flagNoMoreFiles = 1 | ||||
print "No more Files" | ||||
return 0 | ||||
filename = self.filenameList[idFile] | ||||
if not(self.__verifyFile(filename)): | ||||
continue | ||||
fileSize = os.path.getsize(filename) | ||||
fp = open(filename,'rb') | ||||
break | ||||
self.flagIsNewFile = 1 | ||||
self.fileIndex = idFile | ||||
self.filename = filename | ||||
self.fileSize = fileSize | ||||
self.fp = fp | ||||
print "Setting the file: %s"%self.filename | ||||
return 1 | ||||
def __setNextFileOnline(self): | ||||
""" | ||||
Busca el siguiente file que tenga suficiente data para ser leida, dentro de un folder especifico, si | ||||
no encuentra un file valido espera un tiempo determinado y luego busca en los posibles n files | ||||
siguientes. | ||||
Affected: | ||||
self.flagIsNewFile | ||||
self.filename | ||||
self.fileSize | ||||
self.fp | ||||
self.set | ||||
self.flagNoMoreFiles | ||||
Return: | ||||
0 : si luego de una busqueda del siguiente file valido este no pudo ser encontrado | ||||
1 : si el file fue abierto con exito y esta listo a ser leido | ||||
Excepciones: | ||||
Si un determinado file no puede ser abierto | ||||
""" | ||||
nFiles = 0 | ||||
fileOk_flag = False | ||||
firstTime_flag = True | ||||
self.set += 1 | ||||
|
r343 | if self.set > 999: | ||
self.set = 0 | ||||
self.foldercounter += 1 | ||||
|
r174 | #busca el 1er file disponible | ||
|
r343 | fullfilename, filename = checkForRealPath( self.path, self.foldercounter, self.year, self.doy, self.set, self.ext ) | ||
|
r224 | if fullfilename: | ||
if self.__verifyFile(fullfilename, False): | ||||
|
r174 | fileOk_flag = True | ||
#si no encuentra un file entonces espera y vuelve a buscar | ||||
if not(fileOk_flag): | ||||
for nFiles in range(self.nFiles+1): #busco en los siguientes self.nFiles+1 files posibles | ||||
if firstTime_flag: #si es la 1era vez entonces hace el for self.nTries veces | ||||
tries = self.nTries | ||||
else: | ||||
tries = 1 #si no es la 1era vez entonces solo lo hace una vez | ||||
for nTries in range( tries ): | ||||
if firstTime_flag: | ||||
print "\tWaiting %0.2f sec for the file \"%s\" , try %03d ..." % ( self.delay, filename, nTries+1 ) | ||||
time.sleep( self.delay ) | ||||
else: | ||||
print "\tSearching next \"%s%04d%03d%03d%s\" file ..." % (self.optchar, self.year, self.doy, self.set, self.ext) | ||||
|
r343 | fullfilename, filename = checkForRealPath( self.path, self.foldercounter, self.year, self.doy, self.set, self.ext ) | ||
|
r224 | if fullfilename: | ||
if self.__verifyFile(fullfilename): | ||||
|
r174 | fileOk_flag = True | ||
break | ||||
if fileOk_flag: | ||||
break | ||||
firstTime_flag = False | ||||
print "\tSkipping the file \"%s\" due to this file doesn't exist" % filename | ||||
self.set += 1 | ||||
if nFiles == (self.nFiles-1): #si no encuentro el file buscado cambio de carpeta y busco en la siguiente carpeta | ||||
self.set = 0 | ||||
self.doy += 1 | ||||
|
r343 | self.foldercounter = 0 | ||
|
r174 | |||
if fileOk_flag: | ||||
|
r224 | self.fileSize = os.path.getsize( fullfilename ) | ||
self.filename = fullfilename | ||||
|
r174 | self.flagIsNewFile = 1 | ||
if self.fp != None: self.fp.close() | ||||
|
r224 | self.fp = open(fullfilename, 'rb') | ||
|
r174 | self.flagNoMoreFiles = 0 | ||
|
r224 | print 'Setting the file: %s' % fullfilename | ||
|
r174 | else: | ||
self.fileSize = 0 | ||||
self.filename = None | ||||
self.flagIsNewFile = 0 | ||||
self.fp = None | ||||
self.flagNoMoreFiles = 1 | ||||
print 'No more Files' | ||||
return fileOk_flag | ||||
def setNextFile(self): | ||||
if self.fp != None: | ||||
self.fp.close() | ||||
if self.online: | ||||
newFile = self.__setNextFileOnline() | ||||
else: | ||||
newFile = self.__setNextFileOffline() | ||||
if not(newFile): | ||||
return 0 | ||||
self.__readFirstHeader() | ||||
self.nReadBlocks = 0 | ||||
return 1 | ||||
|
r216 | def __waitNewBlock(self): | ||
|
r224 | """ | ||
Return 1 si se encontro un nuevo bloque de datos, 0 de otra forma. | ||||
Si el modo de lectura es OffLine siempre retorn 0 | ||||
""" | ||||
|
r216 | if not self.online: | ||
return 0 | ||||
if (self.nReadBlocks >= self.processingHeaderObj.dataBlocksPerFile): | ||||
return 0 | ||||
currentPointer = self.fp.tell() | ||||
neededSize = self.processingHeaderObj.blockSize + self.basicHeaderSize | ||||
for nTries in range( self.nTries ): | ||||
self.fp.close() | ||||
self.fp = open( self.filename, 'rb' ) | ||||
self.fp.seek( currentPointer ) | ||||
self.fileSize = os.path.getsize( self.filename ) | ||||
currentSize = self.fileSize - currentPointer | ||||
if ( currentSize >= neededSize ): | ||||
self.__rdBasicHeader() | ||||
return 1 | ||||
|
r389 | if self.fileSize == self.fileSizeByHeader: | ||
# self.flagEoF = True | ||||
return 0 | ||||
|
r216 | print "\tWaiting %0.2f seconds for the next block, try %03d ..." % (self.delay, nTries+1) | ||
time.sleep( self.delay ) | ||||
|
r321 | return 0 | ||
|
r434 | def waitDataBlock(self,pointer_location): | ||
currentPointer = pointer_location | ||||
neededSize = self.processingHeaderObj.blockSize #+ self.basicHeaderSize | ||||
for nTries in range( self.nTries ): | ||||
self.fp.close() | ||||
self.fp = open( self.filename, 'rb' ) | ||||
self.fp.seek( currentPointer ) | ||||
self.fileSize = os.path.getsize( self.filename ) | ||||
currentSize = self.fileSize - currentPointer | ||||
if ( currentSize >= neededSize ): | ||||
return 1 | ||||
print "\tWaiting %0.2f seconds for the next block, try %03d ..." % (self.delay, nTries+1) | ||||
time.sleep( self.delay ) | ||||
return 0 | ||||
|
r321 | def __jumpToLastBlock(self): | ||
if not(self.__isFirstTimeOnline): | ||||
return | ||||
csize = self.fileSize - self.fp.tell() | ||||
|
r389 | blocksize = self.processingHeaderObj.blockSize | ||
|
r321 | |||
|
r389 | #salta el primer bloque de datos | ||
|
r321 | if csize > self.processingHeaderObj.blockSize: | ||
|
r389 | self.fp.seek(self.fp.tell() + blocksize) | ||
|
r321 | else: | ||
return | ||||
csize = self.fileSize - self.fp.tell() | ||||
neededsize = self.processingHeaderObj.blockSize + self.basicHeaderSize | ||||
|
r389 | while True: | ||
if self.fp.tell()<self.fileSize: | ||||
self.fp.seek(self.fp.tell() + neededsize) | ||||
else: | ||||
self.fp.seek(self.fp.tell() - neededsize) | ||||
break | ||||
# csize = self.fileSize - self.fp.tell() | ||||
# neededsize = self.processingHeaderObj.blockSize + self.basicHeaderSize | ||||
# factor = int(csize/neededsize) | ||||
# if factor > 0: | ||||
# self.fp.seek(self.fp.tell() + factor*neededsize) | ||||
|
r321 | |||
self.flagIsNewFile = 0 | ||||
self.__isFirstTimeOnline = 0 | ||||
|
r216 | |||
|
r174 | def __setNewBlock(self): | ||
|
r224 | |||
|
r174 | if self.fp == None: | ||
return 0 | ||||
|
r321 | |||
if self.online: | ||||
self.__jumpToLastBlock() | ||||
|
r174 | if self.flagIsNewFile: | ||
return 1 | ||||
self.lastUTTime = self.basicHeaderObj.utc | ||||
currentSize = self.fileSize - self.fp.tell() | ||||
neededSize = self.processingHeaderObj.blockSize + self.basicHeaderSize | ||||
if (currentSize >= neededSize): | ||||
self.__rdBasicHeader() | ||||
return 1 | ||||
|
r216 | |||
if self.__waitNewBlock(): | ||||
return 1 | ||||
|
r174 | |||
if not(self.setNextFile()): | ||||
return 0 | ||||
deltaTime = self.basicHeaderObj.utc - self.lastUTTime # | ||||
|
r321 | |||
|
r174 | self.flagTimeBlock = 0 | ||
if deltaTime > self.maxTimeStep: | ||||
self.flagTimeBlock = 1 | ||||
return 1 | ||||
def readNextBlock(self): | ||||
if not(self.__setNewBlock()): | ||||
return 0 | ||||
if not(self.readBlock()): | ||||
return 0 | ||||
return 1 | ||||
def __rdProcessingHeader(self, fp=None): | ||||
if fp == None: | ||||
fp = self.fp | ||||
self.processingHeaderObj.read(fp) | ||||
def __rdRadarControllerHeader(self, fp=None): | ||||
if fp == None: | ||||
fp = self.fp | ||||
self.radarControllerHeaderObj.read(fp) | ||||
def __rdSystemHeader(self, fp=None): | ||||
if fp == None: | ||||
fp = self.fp | ||||
self.systemHeaderObj.read(fp) | ||||
def __rdBasicHeader(self, fp=None): | ||||
if fp == None: | ||||
fp = self.fp | ||||
self.basicHeaderObj.read(fp) | ||||
def __readFirstHeader(self): | ||||
self.__rdBasicHeader() | ||||
self.__rdSystemHeader() | ||||
self.__rdRadarControllerHeader() | ||||
self.__rdProcessingHeader() | ||||
self.firstHeaderSize = self.basicHeaderObj.size | ||||
datatype = int(numpy.log2((self.processingHeaderObj.processFlags & PROCFLAG.DATATYPE_MASK))-numpy.log2(PROCFLAG.DATATYPE_CHAR)) | ||||
if datatype == 0: | ||||
datatype_str = numpy.dtype([('real','<i1'),('imag','<i1')]) | ||||
elif datatype == 1: | ||||
datatype_str = numpy.dtype([('real','<i2'),('imag','<i2')]) | ||||
elif datatype == 2: | ||||
datatype_str = numpy.dtype([('real','<i4'),('imag','<i4')]) | ||||
elif datatype == 3: | ||||
datatype_str = numpy.dtype([('real','<i8'),('imag','<i8')]) | ||||
elif datatype == 4: | ||||
datatype_str = numpy.dtype([('real','<f4'),('imag','<f4')]) | ||||
elif datatype == 5: | ||||
datatype_str = numpy.dtype([('real','<f8'),('imag','<f8')]) | ||||
else: | ||||
raise ValueError, 'Data type was not defined' | ||||
self.dtype = datatype_str | ||||
self.ippSeconds = 2 * 1000 * self.radarControllerHeaderObj.ipp / self.c | ||||
self.fileSizeByHeader = self.processingHeaderObj.dataBlocksPerFile * self.processingHeaderObj.blockSize + self.firstHeaderSize + self.basicHeaderSize*(self.processingHeaderObj.dataBlocksPerFile - 1) | ||||
|
r179 | # self.dataOut.channelList = numpy.arange(self.systemHeaderObj.numChannels) | ||
# self.dataOut.channelIndexList = numpy.arange(self.systemHeaderObj.numChannels) | ||||
|
r174 | self.getBlockDimension() | ||
def __verifyFile(self, filename, msgFlag=True): | ||||
msg = None | ||||
try: | ||||
fp = open(filename, 'rb') | ||||
currentPosition = fp.tell() | ||||
except: | ||||
if msgFlag: | ||||
print "The file %s can't be opened" % (filename) | ||||
return False | ||||
neededSize = self.processingHeaderObj.blockSize + self.firstHeaderSize | ||||
if neededSize == 0: | ||||
|
r234 | basicHeaderObj = BasicHeader(LOCALTIME) | ||
|
r174 | systemHeaderObj = SystemHeader() | ||
radarControllerHeaderObj = RadarControllerHeader() | ||||
processingHeaderObj = ProcessingHeader() | ||||
try: | ||||
|
r218 | if not( basicHeaderObj.read(fp) ): raise IOError | ||
if not( systemHeaderObj.read(fp) ): raise IOError | ||||
if not( radarControllerHeaderObj.read(fp) ): raise IOError | ||||
if not( processingHeaderObj.read(fp) ): raise IOError | ||||
|
r174 | data_type = int(numpy.log2((processingHeaderObj.processFlags & PROCFLAG.DATATYPE_MASK))-numpy.log2(PROCFLAG.DATATYPE_CHAR)) | ||
neededSize = processingHeaderObj.blockSize + basicHeaderObj.size | ||||
except: | ||||
if msgFlag: | ||||
print "\tThe file %s is empty or it hasn't enough data" % filename | ||||
fp.close() | ||||
return False | ||||
else: | ||||
msg = "\tSkipping the file %s due to it hasn't enough data" %filename | ||||
fp.close() | ||||
fileSize = os.path.getsize(filename) | ||||
currentSize = fileSize - currentPosition | ||||
if currentSize < neededSize: | ||||
if msgFlag and (msg != None): | ||||
print msg #print"\tSkipping the file %s due to it hasn't enough data" %filename | ||||
return False | ||||
return True | ||||
|
r197 | def setup(self, | ||
path=None, | ||||
startDate=None, | ||||
endDate=None, | ||||
startTime=datetime.time(0,0,0), | ||||
endTime=datetime.time(23,59,59), | ||||
|
r389 | set=None, | ||
|
r197 | expLabel = "", | ||
ext = None, | ||||
online = False, | ||||
|
r224 | delay = 60, | ||
walk = True): | ||||
|
r197 | |||
if path == None: | ||||
raise ValueError, "The path is not valid" | ||||
if ext == None: | ||||
ext = self.ext | ||||
if online: | ||||
print "Searching files in online mode..." | ||||
|
r224 | |||
for nTries in range( self.nTries ): | ||||
|
r389 | fullpath, foldercounter, file, year, doy, set = self.__searchFilesOnLine(path=path, expLabel=expLabel, ext=ext, walk=walk, set=set) | ||
|
r224 | |||
if fullpath: | ||||
break | ||||
print '\tWaiting %0.2f sec for an valid file in %s: try %02d ...' % (self.delay, path, nTries+1) | ||||
time.sleep( self.delay ) | ||||
|
r197 | |||
|
r224 | if not(fullpath): | ||
|
r197 | print "There 'isn't valied files in %s" % path | ||
return None | ||||
self.year = year | ||||
self.doy = doy | ||||
self.set = set - 1 | ||||
self.path = path | ||||
|
r343 | self.foldercounter = foldercounter | ||
|
r457 | last_set = None | ||
|
r197 | |||
else: | ||||
print "Searching files in offline mode ..." | ||||
|
r224 | pathList, filenameList = self.__searchFilesOffLine(path, startDate=startDate, endDate=endDate, | ||
startTime=startTime, endTime=endTime, | ||||
set=set, expLabel=expLabel, ext=ext, | ||||
walk=walk) | ||||
|
r197 | |||
if not(pathList): | ||||
print "No *%s files into the folder %s \nfor the range: %s - %s"%(ext, path, | ||||
datetime.datetime.combine(startDate,startTime).ctime(), | ||||
datetime.datetime.combine(endDate,endTime).ctime()) | ||||
sys.exit(-1) | ||||
self.fileIndex = -1 | ||||
self.pathList = pathList | ||||
self.filenameList = filenameList | ||||
|
r457 | file_name = os.path.basename(filenameList[-1]) | ||
basename, ext = os.path.splitext(file_name) | ||||
last_set = int(basename[-3:]) | ||||
|
r197 | self.online = online | ||
self.delay = delay | ||||
ext = ext.lower() | ||||
self.ext = ext | ||||
if not(self.setNextFile()): | ||||
if (startDate!=None) and (endDate!=None): | ||||
print "No files in range: %s - %s" %(datetime.datetime.combine(startDate,startTime).ctime(), datetime.datetime.combine(endDate,endTime).ctime()) | ||||
elif startDate != None: | ||||
print "No files in range: %s" %(datetime.datetime.combine(startDate,startTime).ctime()) | ||||
else: | ||||
print "No files" | ||||
sys.exit(-1) | ||||
# self.updateDataHeader() | ||||
|
r457 | if last_set != None: | ||
self.dataOut.last_block = last_set * self.processingHeaderObj.dataBlocksPerFile + self.basicHeaderObj.dataBlock | ||||
|
r197 | return self.dataOut | ||
|
r344 | def getBasicHeader(self): | ||
self.dataOut.utctime = self.basicHeaderObj.utc + self.basicHeaderObj.miliSecond/1000. + self.profileIndex * self.ippSeconds | ||||
self.dataOut.flagTimeBlock = self.flagTimeBlock | ||||
self.dataOut.timeZone = self.basicHeaderObj.timeZone | ||||
self.dataOut.dstFlag = self.basicHeaderObj.dstFlag | ||||
self.dataOut.errorCount = self.basicHeaderObj.errorCount | ||||
self.dataOut.useLocalTime = self.basicHeaderObj.useLocalTime | ||||
def getFirstHeader(self): | ||||
raise ValueError, "This method has not been implemented" | ||||
|
r174 | def getData(): | ||
|
r199 | |||
raise ValueError, "This method has not been implemented" | ||||
|
r174 | |||
def hasNotDataInBuffer(): | ||||
|
r199 | |||
raise ValueError, "This method has not been implemented" | ||||
|
r174 | |||
def readBlock(): | ||||
|
r199 | |||
raise ValueError, "This method has not been implemented" | ||||
def isEndProcess(self): | ||||
return self.flagNoMoreFiles | ||||
def printReadBlocks(self): | ||||
print "Number of read blocks per file %04d" %self.nReadBlocks | ||||
|
r176 | |||
|
r199 | def printTotalBlocks(self): | ||
print "Number of read blocks %04d" %self.nTotalBlocks | ||||
|
r220 | |||
|
r270 | def printNumberOfBlock(self): | ||
if self.flagIsNewBlock: | ||||
|
r324 | print "Block No. %04d, Total blocks %04d -> %s" %(self.basicHeaderObj.dataBlock, self.nTotalBlocks, self.dataOut.datatime.ctime()) | ||
|
r457 | self.dataOut.blocknow = self.basicHeaderObj.dataBlock | ||
|
r220 | def printInfo(self): | ||
|
r332 | if self.__printInfo == False: | ||
return | ||||
self.basicHeaderObj.printInfo() | ||||
self.systemHeaderObj.printInfo() | ||||
self.radarControllerHeaderObj.printInfo() | ||||
self.processingHeaderObj.printInfo() | ||||
self.__printInfo = False | ||||
|
r220 | |||
|
r176 | def run(self, **kwargs): | ||
|
r179 | if not(self.isConfig): | ||
|
r176 | |||
|
r179 | # self.dataOut = dataOut | ||
|
r176 | self.setup(**kwargs) | ||
|
r179 | self.isConfig = True | ||
|
r176 | |||
|
r177 | self.getData() | ||
|
r174 | |||
|
r197 | class JRODataWriter(JRODataIO, Operation): | ||
|
r174 | |||
""" | ||||
Esta clase permite escribir datos a archivos procesados (.r o ,pdata). La escritura | ||||
de los datos siempre se realiza por bloques. | ||||
""" | ||||
blockIndex = 0 | ||||
path = None | ||||
setFile = None | ||||
profilesPerBlock = None | ||||
blocksPerFile = None | ||||
nWriteBlocks = 0 | ||||
|
r179 | def __init__(self, dataOut=None): | ||
|
r174 | raise ValueError, "Not implemented" | ||
def hasAllDataInBuffer(self): | ||||
raise ValueError, "Not implemented" | ||||
def setBlockDimension(self): | ||||
raise ValueError, "Not implemented" | ||||
def writeBlock(self): | ||||
raise ValueError, "No implemented" | ||||
def putData(self): | ||||
raise ValueError, "No implemented" | ||||
|
r344 | |||
def setBasicHeader(self): | ||||
self.basicHeaderObj.size = self.basicHeaderSize #bytes | ||||
self.basicHeaderObj.version = self.versionFile | ||||
self.basicHeaderObj.dataBlock = self.nTotalBlocks | ||||
utc = numpy.floor(self.dataOut.utctime) | ||||
milisecond = (self.dataOut.utctime - utc)* 1000.0 | ||||
self.basicHeaderObj.utc = utc | ||||
self.basicHeaderObj.miliSecond = milisecond | ||||
self.basicHeaderObj.timeZone = self.dataOut.timeZone | ||||
self.basicHeaderObj.dstFlag = self.dataOut.dstFlag | ||||
self.basicHeaderObj.errorCount = self.dataOut.errorCount | ||||
def setFirstHeader(self): | ||||
|
r174 | """ | ||
Obtiene una copia del First Header | ||||
Affected: | ||||
self.basicHeaderObj | ||||
self.systemHeaderObj | ||||
self.radarControllerHeaderObj | ||||
self.processingHeaderObj self. | ||||
Return: | ||||
None | ||||
""" | ||||
raise ValueError, "No implemented" | ||||
|
r344 | |||
|
r174 | def __writeFirstHeader(self): | ||
""" | ||||
Escribe el primer header del file es decir el Basic header y el Long header (SystemHeader, RadarControllerHeader, ProcessingHeader) | ||||
Affected: | ||||
__dataType | ||||
Return: | ||||
None | ||||
""" | ||||
# CALCULAR PARAMETROS | ||||
sizeLongHeader = self.systemHeaderObj.size + self.radarControllerHeaderObj.size + self.processingHeaderObj.size | ||||
self.basicHeaderObj.size = self.basicHeaderSize + sizeLongHeader | ||||
self.basicHeaderObj.write(self.fp) | ||||
self.systemHeaderObj.write(self.fp) | ||||
self.radarControllerHeaderObj.write(self.fp) | ||||
self.processingHeaderObj.write(self.fp) | ||||
|
r179 | self.dtype = self.dataOut.dtype | ||
|
r174 | |||
def __setNewBlock(self): | ||||
""" | ||||
Si es un nuevo file escribe el First Header caso contrario escribe solo el Basic Header | ||||
Return: | ||||
0 : si no pudo escribir nada | ||||
1 : Si escribio el Basic el First Header | ||||
""" | ||||
if self.fp == None: | ||||
self.setNextFile() | ||||
if self.flagIsNewFile: | ||||
return 1 | ||||
if self.blockIndex < self.processingHeaderObj.dataBlocksPerFile: | ||||
self.basicHeaderObj.write(self.fp) | ||||
return 1 | ||||
if not( self.setNextFile() ): | ||||
return 0 | ||||
return 1 | ||||
def writeNextBlock(self): | ||||
""" | ||||
Selecciona el bloque siguiente de datos y los escribe en un file | ||||
Return: | ||||
0 : Si no hizo pudo escribir el bloque de datos | ||||
1 : Si no pudo escribir el bloque de datos | ||||
""" | ||||
if not( self.__setNewBlock() ): | ||||
return 0 | ||||
self.writeBlock() | ||||
return 1 | ||||
def setNextFile(self): | ||||
""" | ||||
Determina el siguiente file que sera escrito | ||||
Affected: | ||||
self.filename | ||||
self.subfolder | ||||
self.fp | ||||
self.setFile | ||||
self.flagIsNewFile | ||||
Return: | ||||
0 : Si el archivo no puede ser escrito | ||||
1 : Si el archivo esta listo para ser escrito | ||||
""" | ||||
ext = self.ext | ||||
path = self.path | ||||
if self.fp != None: | ||||
self.fp.close() | ||||
|
r268 | timeTuple = time.localtime( self.dataOut.utctime) | ||
|
r273 | subfolder = 'd%4.4d%3.3d' % (timeTuple.tm_year,timeTuple.tm_yday) | ||
|
r174 | |||
|
r224 | fullpath = os.path.join( path, subfolder ) | ||
if not( os.path.exists(fullpath) ): | ||||
os.mkdir(fullpath) | ||||
|
r174 | self.setFile = -1 #inicializo mi contador de seteo | ||
else: | ||||
|
r224 | filesList = os.listdir( fullpath ) | ||
|
r174 | if len( filesList ) > 0: | ||
filesList = sorted( filesList, key=str.lower ) | ||||
filen = filesList[-1] | ||||
# el filename debera tener el siguiente formato | ||||
# 0 1234 567 89A BCDE (hex) | ||||
# x YYYY DDD SSS .ext | ||||
if isNumber( filen[8:11] ): | ||||
self.setFile = int( filen[8:11] ) #inicializo mi contador de seteo al seteo del ultimo file | ||||
else: | ||||
self.setFile = -1 | ||||
else: | ||||
self.setFile = -1 #inicializo mi contador de seteo | ||||
setFile = self.setFile | ||||
setFile += 1 | ||||
file = '%s%4.4d%3.3d%3.3d%s' % (self.optchar, | ||||
timeTuple.tm_year, | ||||
timeTuple.tm_yday, | ||||
setFile, | ||||
ext ) | ||||
filename = os.path.join( path, subfolder, file ) | ||||
fp = open( filename,'wb' ) | ||||
self.blockIndex = 0 | ||||
#guardando atributos | ||||
self.filename = filename | ||||
self.subfolder = subfolder | ||||
self.fp = fp | ||||
self.setFile = setFile | ||||
self.flagIsNewFile = 1 | ||||
|
r344 | self.setFirstHeader() | ||
|
r174 | |||
print 'Writing the file: %s'%self.filename | ||||
self.__writeFirstHeader() | ||||
return 1 | ||||
|
r414 | def setup(self, dataOut, path, blocksPerFile, profilesPerBlock=64, set=0, ext=None): | ||
|
r174 | """ | ||
Setea el tipo de formato en la cual sera guardada la data y escribe el First Header | ||||
Inputs: | ||||
path : el path destino en el cual se escribiran los files a crear | ||||
format : formato en el cual sera salvado un file | ||||
set : el setebo del file | ||||
Return: | ||||
0 : Si no realizo un buen seteo | ||||
1 : Si realizo un buen seteo | ||||
""" | ||||
if ext == None: | ||||
ext = self.ext | ||||
ext = ext.lower() | ||||
self.ext = ext | ||||
self.path = path | ||||
self.setFile = set - 1 | ||||
self.blocksPerFile = blocksPerFile | ||||
self.profilesPerBlock = profilesPerBlock | ||||
|
r186 | self.dataOut = dataOut | ||
|
r174 | if not(self.setNextFile()): | ||
print "There isn't a next file" | ||||
return 0 | ||||
self.setBlockDimension() | ||||
return 1 | ||||
def run(self, dataOut, **kwargs): | ||||
|
r179 | if not(self.isConfig): | ||
|
r174 | |||
|
r186 | self.setup(dataOut, **kwargs) | ||
|
r179 | self.isConfig = True | ||
|
r174 | |||
self.putData() | ||||
class VoltageReader(JRODataReader): | ||||
""" | ||||
Esta clase permite leer datos de voltage desde archivos en formato rawdata (.r). La lectura | ||||
de los datos siempre se realiza por bloques. Los datos leidos (array de 3 dimensiones: | ||||
perfiles*alturas*canales) son almacenados en la variable "buffer". | ||||
perfiles * alturas * canales | ||||
Esta clase contiene instancias (objetos) de las clases BasicHeader, SystemHeader, | ||||
RadarControllerHeader y Voltage. Los tres primeros se usan para almacenar informacion de la | ||||
cabecera de datos (metadata), y el cuarto (Voltage) para obtener y almacenar un perfil de | ||||
datos desde el "buffer" cada vez que se ejecute el metodo "getData". | ||||
Example: | ||||
dpath = "/home/myuser/data" | ||||
startTime = datetime.datetime(2010,1,20,0,0,0,0,0,0) | ||||
endTime = datetime.datetime(2010,1,21,23,59,59,0,0,0) | ||||
readerObj = VoltageReader() | ||||
readerObj.setup(dpath, startTime, endTime) | ||||
while(True): | ||||
#to get one profile | ||||
profile = readerObj.getData() | ||||
#print the profile | ||||
print profile | ||||
#If you want to see all datablock | ||||
print readerObj.datablock | ||||
if readerObj.flagNoMoreFiles: | ||||
break | ||||
""" | ||||
ext = ".r" | ||||
optchar = "D" | ||||
|
r179 | dataOut = None | ||
|
r174 | |||
|
r186 | def __init__(self): | ||
|
r174 | """ | ||
Inicializador de la clase VoltageReader para la lectura de datos de voltage. | ||||
Input: | ||||
|
r179 | dataOut : Objeto de la clase Voltage. Este objeto sera utilizado para | ||
|
r174 | almacenar un perfil de datos cada vez que se haga un requerimiento | ||
(getData). El perfil sera obtenido a partir del buffer de datos, | ||||
si el buffer esta vacio se hara un nuevo proceso de lectura de un | ||||
bloque de datos. | ||||
Si este parametro no es pasado se creara uno internamente. | ||||
Variables afectadas: | ||||
|
r179 | self.dataOut | ||
|
r174 | |||
Return: | ||||
None | ||||
""" | ||||
|
r179 | self.isConfig = False | ||
|
r176 | |||
|
r174 | self.datablock = None | ||
self.utc = 0 | ||||
self.ext = ".r" | ||||
self.optchar = "D" | ||||
|
r234 | self.basicHeaderObj = BasicHeader(LOCALTIME) | ||
|
r174 | |||
self.systemHeaderObj = SystemHeader() | ||||
self.radarControllerHeaderObj = RadarControllerHeader() | ||||
self.processingHeaderObj = ProcessingHeader() | ||||
self.online = 0 | ||||
self.fp = None | ||||
self.idFile = None | ||||
self.dtype = None | ||||
self.fileSizeByHeader = None | ||||
self.filenameList = [] | ||||
self.filename = None | ||||
self.fileSize = None | ||||
self.firstHeaderSize = 0 | ||||
self.basicHeaderSize = 24 | ||||
self.pathList = [] | ||||
self.filenameList = [] | ||||
self.lastUTTime = 0 | ||||
self.maxTimeStep = 30 | ||||
self.flagNoMoreFiles = 0 | ||||
self.set = 0 | ||||
self.path = None | ||||
|
r319 | self.profileIndex = 2**32-1 | ||
|
r174 | |||
self.delay = 3 #seconds | ||||
self.nTries = 3 #quantity tries | ||||
self.nFiles = 3 #number of files for searching | ||||
self.nReadBlocks = 0 | ||||
self.flagIsNewFile = 1 | ||||
|
r321 | |||
self.__isFirstTimeOnline = 1 | ||||
|
r174 | |||
self.ippSeconds = 0 | ||||
self.flagTimeBlock = 0 | ||||
self.flagIsNewBlock = 0 | ||||
self.nTotalBlocks = 0 | ||||
self.blocksize = 0 | ||||
|
r186 | |||
|
r187 | self.dataOut = self.createObjByDefault() | ||
|
r174 | |||
def createObjByDefault(self): | ||||
dataObj = Voltage() | ||||
return dataObj | ||||
def __hasNotDataInBuffer(self): | ||||
if self.profileIndex >= self.processingHeaderObj.profilesPerBlock: | ||||
return 1 | ||||
return 0 | ||||
def getBlockDimension(self): | ||||
""" | ||||
Obtiene la cantidad de puntos a leer por cada bloque de datos | ||||
Affected: | ||||
self.blocksize | ||||
Return: | ||||
None | ||||
""" | ||||
pts2read = self.processingHeaderObj.profilesPerBlock * self.processingHeaderObj.nHeights * self.systemHeaderObj.nChannels | ||||
self.blocksize = pts2read | ||||
def readBlock(self): | ||||
""" | ||||
readBlock lee el bloque de datos desde la posicion actual del puntero del archivo | ||||
(self.fp) y actualiza todos los parametros relacionados al bloque de datos | ||||
(metadata + data). La data leida es almacenada en el buffer y el contador del buffer | ||||
es seteado a 0 | ||||
Inputs: | ||||
None | ||||
Return: | ||||
None | ||||
Affected: | ||||
self.profileIndex | ||||
self.datablock | ||||
self.flagIsNewFile | ||||
self.flagIsNewBlock | ||||
self.nTotalBlocks | ||||
Exceptions: | ||||
Si un bloque leido no es un bloque valido | ||||
""" | ||||
|
r434 | current_pointer_location = self.fp.tell() | ||
|
r174 | junk = numpy.fromfile( self.fp, self.dtype, self.blocksize ) | ||
try: | ||||
junk = junk.reshape( (self.processingHeaderObj.profilesPerBlock, self.processingHeaderObj.nHeights, self.systemHeaderObj.nChannels) ) | ||||
except: | ||||
|
r434 | #print "The read block (%3d) has not enough data" %self.nReadBlocks | ||
if self.waitDataBlock(pointer_location=current_pointer_location): | ||||
junk = numpy.fromfile( self.fp, self.dtype, self.blocksize ) | ||||
junk = junk.reshape( (self.processingHeaderObj.profilesPerBlock, self.processingHeaderObj.nHeights, self.systemHeaderObj.nChannels) ) | ||||
# return 0 | ||||
|
r174 | |||
junk = numpy.transpose(junk, (2,0,1)) | ||||
self.datablock = junk['real'] + junk['imag']*1j | ||||
self.profileIndex = 0 | ||||
self.flagIsNewFile = 0 | ||||
self.flagIsNewBlock = 1 | ||||
self.nTotalBlocks += 1 | ||||
self.nReadBlocks += 1 | ||||
return 1 | ||||
|
r344 | def getFirstHeader(self): | ||
self.dataOut.dtype = self.dtype | ||||
self.dataOut.nProfiles = self.processingHeaderObj.profilesPerBlock | ||||
xf = self.processingHeaderObj.firstHeight + self.processingHeaderObj.nHeights*self.processingHeaderObj.deltaHeight | ||||
self.dataOut.heightList = numpy.arange(self.processingHeaderObj.firstHeight, xf, self.processingHeaderObj.deltaHeight) | ||||
self.dataOut.channelList = range(self.systemHeaderObj.nChannels) | ||||
self.dataOut.ippSeconds = self.ippSeconds | ||||
self.dataOut.timeInterval = self.ippSeconds * self.processingHeaderObj.nCohInt | ||||
self.dataOut.nCohInt = self.processingHeaderObj.nCohInt | ||||
self.dataOut.flagShiftFFT = False | ||||
if self.radarControllerHeaderObj.code != None: | ||||
self.dataOut.nCode = self.radarControllerHeaderObj.nCode | ||||
self.dataOut.nBaud = self.radarControllerHeaderObj.nBaud | ||||
self.dataOut.code = self.radarControllerHeaderObj.code | ||||
self.dataOut.systemHeaderObj = self.systemHeaderObj.copy() | ||||
self.dataOut.radarControllerHeaderObj = self.radarControllerHeaderObj.copy() | ||||
self.dataOut.flagDecodeData = False #asumo q la data no esta decodificada | ||||
self.dataOut.flagDeflipData = False #asumo q la data no esta sin flip | ||||
self.dataOut.flagShiftFFT = False | ||||
|
r174 | |||
def getData(self): | ||||
""" | ||||
getData obtiene una unidad de datos del buffer de lectura y la copia a la clase "Voltage" | ||||
con todos los parametros asociados a este (metadata). cuando no hay datos en el buffer de | ||||
lectura es necesario hacer una nueva lectura de los bloques de datos usando "readNextBlock" | ||||
Ademas incrementa el contador del buffer en 1. | ||||
Return: | ||||
data : retorna un perfil de voltages (alturas * canales) copiados desde el | ||||
buffer. Si no hay mas archivos a leer retorna None. | ||||
Variables afectadas: | ||||
|
r179 | self.dataOut | ||
|
r174 | self.profileIndex | ||
Affected: | ||||
|
r179 | self.dataOut | ||
|
r174 | self.profileIndex | ||
self.flagTimeBlock | ||||
self.flagIsNewBlock | ||||
""" | ||||
|
r199 | |||
if self.flagNoMoreFiles: | ||||
self.dataOut.flagNoData = True | ||||
print 'Process finished' | ||||
return 0 | ||||
|
r174 | self.flagTimeBlock = 0 | ||
self.flagIsNewBlock = 0 | ||||
if self.__hasNotDataInBuffer(): | ||||
if not( self.readNextBlock() ): | ||||
return 0 | ||||
|
r220 | |||
|
r344 | self.getFirstHeader() | ||
|
r174 | |||
if self.datablock == None: | ||||
|
r179 | self.dataOut.flagNoData = True | ||
|
r174 | return 0 | ||
|
r179 | self.dataOut.data = self.datablock[:,self.profileIndex,:] | ||
|
r174 | |||
|
r179 | self.dataOut.flagNoData = False | ||
|
r174 | |||
|
r344 | self.getBasicHeader() | ||
|
r220 | |||
|
r344 | self.profileIndex += 1 | ||
|
r220 | |||
|
r360 | self.dataOut.realtime = self.online | ||
|
r179 | return self.dataOut.data | ||
|
r174 | |||
class VoltageWriter(JRODataWriter): | ||||
""" | ||||
Esta clase permite escribir datos de voltajes a archivos procesados (.r). La escritura | ||||
de los datos siempre se realiza por bloques. | ||||
""" | ||||
ext = ".r" | ||||
optchar = "D" | ||||
shapeBuffer = None | ||||
|
r186 | def __init__(self): | ||
|
r174 | """ | ||
Inicializador de la clase VoltageWriter para la escritura de datos de espectros. | ||||
Affected: | ||||
|
r179 | self.dataOut | ||
|
r174 | |||
Return: None | ||||
""" | ||||
self.nTotalBlocks = 0 | ||||
self.profileIndex = 0 | ||||
|
r179 | self.isConfig = False | ||
|
r174 | |||
self.fp = None | ||||
self.flagIsNewFile = 1 | ||||
self.nTotalBlocks = 0 | ||||
self.flagIsNewBlock = 0 | ||||
self.setFile = None | ||||
self.dtype = None | ||||
self.path = None | ||||
self.filename = None | ||||
|
r234 | self.basicHeaderObj = BasicHeader(LOCALTIME) | ||
|
r174 | |||
self.systemHeaderObj = SystemHeader() | ||||
self.radarControllerHeaderObj = RadarControllerHeader() | ||||
self.processingHeaderObj = ProcessingHeader() | ||||
def hasAllDataInBuffer(self): | ||||
if self.profileIndex >= self.processingHeaderObj.profilesPerBlock: | ||||
return 1 | ||||
return 0 | ||||
def setBlockDimension(self): | ||||
""" | ||||
Obtiene las formas dimensionales del los subbloques de datos que componen un bloque | ||||
Affected: | ||||
self.shape_spc_Buffer | ||||
self.shape_cspc_Buffer | ||||
self.shape_dc_Buffer | ||||
Return: None | ||||
""" | ||||
self.shapeBuffer = (self.processingHeaderObj.profilesPerBlock, | ||||
self.processingHeaderObj.nHeights, | ||||
self.systemHeaderObj.nChannels) | ||||
self.datablock = numpy.zeros((self.systemHeaderObj.nChannels, | ||||
self.processingHeaderObj.profilesPerBlock, | ||||
self.processingHeaderObj.nHeights), | ||||
|
r276 | dtype=numpy.dtype('complex64')) | ||
|
r174 | |||
def writeBlock(self): | ||||
""" | ||||
Escribe el buffer en el file designado | ||||
Affected: | ||||
self.profileIndex | ||||
self.flagIsNewFile | ||||
self.flagIsNewBlock | ||||
self.nTotalBlocks | ||||
self.blockIndex | ||||
Return: None | ||||
""" | ||||
data = numpy.zeros( self.shapeBuffer, self.dtype ) | ||||
junk = numpy.transpose(self.datablock, (1,2,0)) | ||||
data['real'] = junk.real | ||||
data['imag'] = junk.imag | ||||
data = data.reshape( (-1) ) | ||||
data.tofile( self.fp ) | ||||
self.datablock.fill(0) | ||||
self.profileIndex = 0 | ||||
self.flagIsNewFile = 0 | ||||
self.flagIsNewBlock = 1 | ||||
self.blockIndex += 1 | ||||
self.nTotalBlocks += 1 | ||||
def putData(self): | ||||
""" | ||||
Setea un bloque de datos y luego los escribe en un file | ||||
Affected: | ||||
self.flagIsNewBlock | ||||
self.profileIndex | ||||
Return: | ||||
0 : Si no hay data o no hay mas files que puedan escribirse | ||||
1 : Si se escribio la data de un bloque en un file | ||||
""" | ||||
|
r179 | if self.dataOut.flagNoData: | ||
|
r174 | return 0 | ||
self.flagIsNewBlock = 0 | ||||
|
r179 | if self.dataOut.flagTimeBlock: | ||
|
r174 | |||
self.datablock.fill(0) | ||||
self.profileIndex = 0 | ||||
self.setNextFile() | ||||
if self.profileIndex == 0: | ||||
|
r344 | self.setBasicHeader() | ||
|
r174 | |||
|
r179 | self.datablock[:,self.profileIndex,:] = self.dataOut.data | ||
|
r174 | |||
self.profileIndex += 1 | ||||
if self.hasAllDataInBuffer(): | ||||
#if self.flagIsNewFile: | ||||
self.writeNextBlock() | ||||
|
r344 | # self.setFirstHeader() | ||
|
r174 | |||
return 1 | ||||
def __getProcessFlags(self): | ||||
processFlags = 0 | ||||
dtype0 = numpy.dtype([('real','<i1'),('imag','<i1')]) | ||||
dtype1 = numpy.dtype([('real','<i2'),('imag','<i2')]) | ||||
dtype2 = numpy.dtype([('real','<i4'),('imag','<i4')]) | ||||
dtype3 = numpy.dtype([('real','<i8'),('imag','<i8')]) | ||||
dtype4 = numpy.dtype([('real','<f4'),('imag','<f4')]) | ||||
dtype5 = numpy.dtype([('real','<f8'),('imag','<f8')]) | ||||
dtypeList = [dtype0, dtype1, dtype2, dtype3, dtype4, dtype5] | ||||
datatypeValueList = [PROCFLAG.DATATYPE_CHAR, | ||||
PROCFLAG.DATATYPE_SHORT, | ||||
PROCFLAG.DATATYPE_LONG, | ||||
PROCFLAG.DATATYPE_INT64, | ||||
PROCFLAG.DATATYPE_FLOAT, | ||||
PROCFLAG.DATATYPE_DOUBLE] | ||||
for index in range(len(dtypeList)): | ||||
|
r179 | if self.dataOut.dtype == dtypeList[index]: | ||
|
r174 | dtypeValue = datatypeValueList[index] | ||
break | ||||
processFlags += dtypeValue | ||||
|
r179 | if self.dataOut.flagDecodeData: | ||
|
r174 | processFlags += PROCFLAG.DECODE_DATA | ||
|
r179 | if self.dataOut.flagDeflipData: | ||
|
r174 | processFlags += PROCFLAG.DEFLIP_DATA | ||
|
r179 | if self.dataOut.code != None: | ||
|
r174 | processFlags += PROCFLAG.DEFINE_PROCESS_CODE | ||
|
r179 | if self.dataOut.nCohInt > 1: | ||
|
r174 | processFlags += PROCFLAG.COHERENT_INTEGRATION | ||
return processFlags | ||||
def __getBlockSize(self): | ||||
''' | ||||
Este metodos determina el cantidad de bytes para un bloque de datos de tipo Voltage | ||||
''' | ||||
dtype0 = numpy.dtype([('real','<i1'),('imag','<i1')]) | ||||
dtype1 = numpy.dtype([('real','<i2'),('imag','<i2')]) | ||||
dtype2 = numpy.dtype([('real','<i4'),('imag','<i4')]) | ||||
dtype3 = numpy.dtype([('real','<i8'),('imag','<i8')]) | ||||
dtype4 = numpy.dtype([('real','<f4'),('imag','<f4')]) | ||||
dtype5 = numpy.dtype([('real','<f8'),('imag','<f8')]) | ||||
dtypeList = [dtype0, dtype1, dtype2, dtype3, dtype4, dtype5] | ||||
datatypeValueList = [1,2,4,8,4,8] | ||||
for index in range(len(dtypeList)): | ||||
|
r179 | if self.dataOut.dtype == dtypeList[index]: | ||
|
r174 | datatypeValue = datatypeValueList[index] | ||
break | ||||
|
r414 | blocksize = int(self.dataOut.nHeights * self.dataOut.nChannels * self.profilesPerBlock * datatypeValue * 2) | ||
|
r174 | |||
return blocksize | ||||
|
r344 | def setFirstHeader(self): | ||
|
r174 | |||
""" | ||||
Obtiene una copia del First Header | ||||
Affected: | ||||
self.systemHeaderObj | ||||
self.radarControllerHeaderObj | ||||
self.dtype | ||||
Return: | ||||
None | ||||
""" | ||||
|
r179 | self.systemHeaderObj = self.dataOut.systemHeaderObj.copy() | ||
self.systemHeaderObj.nChannels = self.dataOut.nChannels | ||||
self.radarControllerHeaderObj = self.dataOut.radarControllerHeaderObj.copy() | ||||
|
r174 | |||
|
r344 | self.setBasicHeader() | ||
|
r174 | |||
processingHeaderSize = 40 # bytes | ||||
self.processingHeaderObj.dtype = 0 # Voltage | ||||
self.processingHeaderObj.blockSize = self.__getBlockSize() | ||||
self.processingHeaderObj.profilesPerBlock = self.profilesPerBlock | ||||
self.processingHeaderObj.dataBlocksPerFile = self.blocksPerFile | ||||
|
r179 | self.processingHeaderObj.nWindows = 1 #podria ser 1 o self.dataOut.processingHeaderObj.nWindows | ||
|
r174 | self.processingHeaderObj.processFlags = self.__getProcessFlags() | ||
|
r179 | self.processingHeaderObj.nCohInt = self.dataOut.nCohInt | ||
|
r174 | self.processingHeaderObj.nIncohInt = 1 # Cuando la data de origen es de tipo Voltage | ||
self.processingHeaderObj.totalSpectra = 0 # Cuando la data de origen es de tipo Voltage | ||||
|
r414 | # if self.dataOut.code != None: | ||
# self.processingHeaderObj.code = self.dataOut.code | ||||
# self.processingHeaderObj.nCode = self.dataOut.nCode | ||||
# self.processingHeaderObj.nBaud = self.dataOut.nBaud | ||||
# codesize = int(8 + 4 * self.dataOut.nCode * self.dataOut.nBaud) | ||||
# processingHeaderSize += codesize | ||||
|
r174 | |||
if self.processingHeaderObj.nWindows != 0: | ||||
|
r179 | self.processingHeaderObj.firstHeight = self.dataOut.heightList[0] | ||
self.processingHeaderObj.deltaHeight = self.dataOut.heightList[1] - self.dataOut.heightList[0] | ||||
self.processingHeaderObj.nHeights = self.dataOut.nHeights | ||||
self.processingHeaderObj.samplesWin = self.dataOut.nHeights | ||||
|
r174 | processingHeaderSize += 12 | ||
self.processingHeaderObj.size = processingHeaderSize | ||||
class SpectraReader(JRODataReader): | ||||
""" | ||||
Esta clase permite leer datos de espectros desde archivos procesados (.pdata). La lectura | ||||
de los datos siempre se realiza por bloques. Los datos leidos (array de 3 dimensiones) | ||||
son almacenados en tres buffer's para el Self Spectra, el Cross Spectra y el DC Channel. | ||||
paresCanalesIguales * alturas * perfiles (Self Spectra) | ||||
paresCanalesDiferentes * alturas * perfiles (Cross Spectra) | ||||
canales * alturas (DC Channels) | ||||
Esta clase contiene instancias (objetos) de las clases BasicHeader, SystemHeader, | ||||
RadarControllerHeader y Spectra. Los tres primeros se usan para almacenar informacion de la | ||||
cabecera de datos (metadata), y el cuarto (Spectra) para obtener y almacenar un bloque de | ||||
datos desde el "buffer" cada vez que se ejecute el metodo "getData". | ||||
Example: | ||||
dpath = "/home/myuser/data" | ||||
startTime = datetime.datetime(2010,1,20,0,0,0,0,0,0) | ||||
endTime = datetime.datetime(2010,1,21,23,59,59,0,0,0) | ||||
readerObj = SpectraReader() | ||||
readerObj.setup(dpath, startTime, endTime) | ||||
while(True): | ||||
readerObj.getData() | ||||
print readerObj.data_spc | ||||
print readerObj.data_cspc | ||||
print readerObj.data_dc | ||||
if readerObj.flagNoMoreFiles: | ||||
break | ||||
""" | ||||
pts2read_SelfSpectra = 0 | ||||
pts2read_CrossSpectra = 0 | ||||
pts2read_DCchannels = 0 | ||||
ext = ".pdata" | ||||
optchar = "P" | ||||
|
r179 | dataOut = None | ||
|
r174 | |||
nRdChannels = None | ||||
nRdPairs = None | ||||
rdPairList = [] | ||||
|
r186 | def __init__(self): | ||
|
r174 | """ | ||
Inicializador de la clase SpectraReader para la lectura de datos de espectros. | ||||
Inputs: | ||||
|
r179 | dataOut : Objeto de la clase Spectra. Este objeto sera utilizado para | ||
|
r174 | almacenar un perfil de datos cada vez que se haga un requerimiento | ||
(getData). El perfil sera obtenido a partir del buffer de datos, | ||||
si el buffer esta vacio se hara un nuevo proceso de lectura de un | ||||
bloque de datos. | ||||
Si este parametro no es pasado se creara uno internamente. | ||||
Affected: | ||||
|
r179 | self.dataOut | ||
|
r174 | |||
Return : None | ||||
""" | ||||
|
r176 | |||
|
r179 | self.isConfig = False | ||
|
r176 | |||
|
r174 | self.pts2read_SelfSpectra = 0 | ||
self.pts2read_CrossSpectra = 0 | ||||
self.pts2read_DCchannels = 0 | ||||
self.datablock = None | ||||
self.utc = None | ||||
self.ext = ".pdata" | ||||
self.optchar = "P" | ||||
|
r234 | self.basicHeaderObj = BasicHeader(LOCALTIME) | ||
|
r174 | |||
self.systemHeaderObj = SystemHeader() | ||||
self.radarControllerHeaderObj = RadarControllerHeader() | ||||
self.processingHeaderObj = ProcessingHeader() | ||||
self.online = 0 | ||||
self.fp = None | ||||
self.idFile = None | ||||
self.dtype = None | ||||
self.fileSizeByHeader = None | ||||
self.filenameList = [] | ||||
self.filename = None | ||||
self.fileSize = None | ||||
self.firstHeaderSize = 0 | ||||
self.basicHeaderSize = 24 | ||||
self.pathList = [] | ||||
self.lastUTTime = 0 | ||||
self.maxTimeStep = 30 | ||||
self.flagNoMoreFiles = 0 | ||||
self.set = 0 | ||||
self.path = None | ||||
|
r292 | self.delay = 60 #seconds | ||
|
r174 | |||
self.nTries = 3 #quantity tries | ||||
self.nFiles = 3 #number of files for searching | ||||
self.nReadBlocks = 0 | ||||
self.flagIsNewFile = 1 | ||||
|
r321 | |||
self.__isFirstTimeOnline = 1 | ||||
|
r174 | |||
self.ippSeconds = 0 | ||||
self.flagTimeBlock = 0 | ||||
self.flagIsNewBlock = 0 | ||||
self.nTotalBlocks = 0 | ||||
self.blocksize = 0 | ||||
|
r186 | |||
|
r187 | self.dataOut = self.createObjByDefault() | ||
|
r344 | |||
self.profileIndex = 1 #Always | ||||
|
r174 | |||
def createObjByDefault(self): | ||||
dataObj = Spectra() | ||||
return dataObj | ||||
def __hasNotDataInBuffer(self): | ||||
return 1 | ||||
def getBlockDimension(self): | ||||
""" | ||||
Obtiene la cantidad de puntos a leer por cada bloque de datos | ||||
Affected: | ||||
self.nRdChannels | ||||
self.nRdPairs | ||||
self.pts2read_SelfSpectra | ||||
self.pts2read_CrossSpectra | ||||
self.pts2read_DCchannels | ||||
self.blocksize | ||||
|
r179 | self.dataOut.nChannels | ||
self.dataOut.nPairs | ||||
|
r174 | |||
Return: | ||||
None | ||||
""" | ||||
self.nRdChannels = 0 | ||||
self.nRdPairs = 0 | ||||
self.rdPairList = [] | ||||
for i in range(0, self.processingHeaderObj.totalSpectra*2, 2): | ||||
if self.processingHeaderObj.spectraComb[i] == self.processingHeaderObj.spectraComb[i+1]: | ||||
self.nRdChannels = self.nRdChannels + 1 #par de canales iguales | ||||
else: | ||||
self.nRdPairs = self.nRdPairs + 1 #par de canales diferentes | ||||
self.rdPairList.append((self.processingHeaderObj.spectraComb[i], self.processingHeaderObj.spectraComb[i+1])) | ||||
pts2read = self.processingHeaderObj.nHeights * self.processingHeaderObj.profilesPerBlock | ||||
self.pts2read_SelfSpectra = int(self.nRdChannels * pts2read) | ||||
self.blocksize = self.pts2read_SelfSpectra | ||||
if self.processingHeaderObj.flag_cspc: | ||||
self.pts2read_CrossSpectra = int(self.nRdPairs * pts2read) | ||||
self.blocksize += self.pts2read_CrossSpectra | ||||
if self.processingHeaderObj.flag_dc: | ||||
self.pts2read_DCchannels = int(self.systemHeaderObj.nChannels * self.processingHeaderObj.nHeights) | ||||
self.blocksize += self.pts2read_DCchannels | ||||
# self.blocksize = self.pts2read_SelfSpectra + self.pts2read_CrossSpectra + self.pts2read_DCchannels | ||||
def readBlock(self): | ||||
""" | ||||
Lee el bloque de datos desde la posicion actual del puntero del archivo | ||||
(self.fp) y actualiza todos los parametros relacionados al bloque de datos | ||||
(metadata + data). La data leida es almacenada en el buffer y el contador del buffer | ||||
es seteado a 0 | ||||
Return: None | ||||
Variables afectadas: | ||||
self.flagIsNewFile | ||||
self.flagIsNewBlock | ||||
self.nTotalBlocks | ||||
self.data_spc | ||||
self.data_cspc | ||||
self.data_dc | ||||
Exceptions: | ||||
Si un bloque leido no es un bloque valido | ||||
""" | ||||
blockOk_flag = False | ||||
fpointer = self.fp.tell() | ||||
spc = numpy.fromfile( self.fp, self.dtype[0], self.pts2read_SelfSpectra ) | ||||
spc = spc.reshape( (self.nRdChannels, self.processingHeaderObj.nHeights, self.processingHeaderObj.profilesPerBlock) ) #transforma a un arreglo 3D | ||||
if self.processingHeaderObj.flag_cspc: | ||||
cspc = numpy.fromfile( self.fp, self.dtype, self.pts2read_CrossSpectra ) | ||||
cspc = cspc.reshape( (self.nRdPairs, self.processingHeaderObj.nHeights, self.processingHeaderObj.profilesPerBlock) ) #transforma a un arreglo 3D | ||||
if self.processingHeaderObj.flag_dc: | ||||
dc = numpy.fromfile( self.fp, self.dtype, self.pts2read_DCchannels ) #int(self.processingHeaderObj.nHeights*self.systemHeaderObj.nChannels) ) | ||||
dc = dc.reshape( (self.systemHeaderObj.nChannels, self.processingHeaderObj.nHeights) ) #transforma a un arreglo 2D | ||||
if not(self.processingHeaderObj.shif_fft): | ||||
|
r199 | #desplaza a la derecha en el eje 2 determinadas posiciones | ||
shift = int(self.processingHeaderObj.profilesPerBlock/2) | ||||
spc = numpy.roll( spc, shift , axis=2 ) | ||||
|
r174 | |||
if self.processingHeaderObj.flag_cspc: | ||||
|
r199 | #desplaza a la derecha en el eje 2 determinadas posiciones | ||
cspc = numpy.roll( cspc, shift, axis=2 ) | ||||
|
r266 | |||
|
r268 | # self.processingHeaderObj.shif_fft = True | ||
|
r174 | |||
spc = numpy.transpose( spc, (0,2,1) ) | ||||
self.data_spc = spc | ||||
if self.processingHeaderObj.flag_cspc: | ||||
cspc = numpy.transpose( cspc, (0,2,1) ) | ||||
self.data_cspc = cspc['real'] + cspc['imag']*1j | ||||
else: | ||||
self.data_cspc = None | ||||
if self.processingHeaderObj.flag_dc: | ||||
self.data_dc = dc['real'] + dc['imag']*1j | ||||
else: | ||||
self.data_dc = None | ||||
self.flagIsNewFile = 0 | ||||
self.flagIsNewBlock = 1 | ||||
self.nTotalBlocks += 1 | ||||
self.nReadBlocks += 1 | ||||
return 1 | ||||
|
r344 | def getFirstHeader(self): | ||
self.dataOut.dtype = self.dtype | ||||
self.dataOut.nPairs = self.nRdPairs | ||||
self.dataOut.pairsList = self.rdPairList | ||||
self.dataOut.nProfiles = self.processingHeaderObj.profilesPerBlock | ||||
self.dataOut.nFFTPoints = self.processingHeaderObj.profilesPerBlock | ||||
self.dataOut.nCohInt = self.processingHeaderObj.nCohInt | ||||
self.dataOut.nIncohInt = self.processingHeaderObj.nIncohInt | ||||
xf = self.processingHeaderObj.firstHeight + self.processingHeaderObj.nHeights*self.processingHeaderObj.deltaHeight | ||||
|
r174 | |||
|
r344 | self.dataOut.heightList = numpy.arange(self.processingHeaderObj.firstHeight, xf, self.processingHeaderObj.deltaHeight) | ||
self.dataOut.channelList = range(self.systemHeaderObj.nChannels) | ||||
self.dataOut.ippSeconds = self.ippSeconds | ||||
self.dataOut.timeInterval = self.ippSeconds * self.processingHeaderObj.nCohInt * self.processingHeaderObj.nIncohInt * self.dataOut.nFFTPoints | ||||
self.dataOut.systemHeaderObj = self.systemHeaderObj.copy() | ||||
self.dataOut.radarControllerHeaderObj = self.radarControllerHeaderObj.copy() | ||||
self.dataOut.flagShiftFFT = self.processingHeaderObj.shif_fft | ||||
self.dataOut.flagDecodeData = False #asumo q la data no esta decodificada | ||||
self.dataOut.flagDeflipData = True #asumo q la data no esta sin flip | ||||
|
r464 | if self.radarControllerHeaderObj.code != None: | ||
|
r344 | |||
|
r464 | self.dataOut.nCode = self.radarControllerHeaderObj.nCode | ||
|
r344 | |||
|
r464 | self.dataOut.nBaud = self.radarControllerHeaderObj.nBaud | ||
|
r344 | |||
|
r464 | self.dataOut.code = self.radarControllerHeaderObj.code | ||
|
r344 | |||
self.dataOut.flagDecodeData = True | ||||
|
r174 | def getData(self): | ||
""" | ||||
Copia el buffer de lectura a la clase "Spectra", | ||||
con todos los parametros asociados a este (metadata). cuando no hay datos en el buffer de | ||||
lectura es necesario hacer una nueva lectura de los bloques de datos usando "readNextBlock" | ||||
Return: | ||||
0 : Si no hay mas archivos disponibles | ||||
1 : Si hizo una buena copia del buffer | ||||
Affected: | ||||
|
r179 | self.dataOut | ||
|
r174 | |||
self.flagTimeBlock | ||||
self.flagIsNewBlock | ||||
""" | ||||
|
r199 | if self.flagNoMoreFiles: | ||
self.dataOut.flagNoData = True | ||||
print 'Process finished' | ||||
return 0 | ||||
|
r174 | |||
self.flagTimeBlock = 0 | ||||
self.flagIsNewBlock = 0 | ||||
if self.__hasNotDataInBuffer(): | ||||
if not( self.readNextBlock() ): | ||||
|
r199 | self.dataOut.flagNoData = True | ||
|
r344 | return 0 | ||
|
r174 | |||
#data es un numpy array de 3 dmensiones (perfiles, alturas y canales) | ||||
if self.data_dc == None: | ||||
|
r179 | self.dataOut.flagNoData = True | ||
|
r174 | return 0 | ||
|
r344 | |||
self.getBasicHeader() | ||||
self.getFirstHeader() | ||||
|
r174 | |||
|
r179 | self.dataOut.data_spc = self.data_spc | ||
|
r174 | |||
|
r179 | self.dataOut.data_cspc = self.data_cspc | ||
|
r174 | |||
|
r179 | self.dataOut.data_dc = self.data_dc | ||
|
r174 | |||
|
r179 | self.dataOut.flagNoData = False | ||
|
r174 | |||
|
r360 | self.dataOut.realtime = self.online | ||
|
r179 | return self.dataOut.data_spc | ||
|
r174 | |||
class SpectraWriter(JRODataWriter): | ||||
""" | ||||
Esta clase permite escribir datos de espectros a archivos procesados (.pdata). La escritura | ||||
de los datos siempre se realiza por bloques. | ||||
""" | ||||
ext = ".pdata" | ||||
optchar = "P" | ||||
shape_spc_Buffer = None | ||||
shape_cspc_Buffer = None | ||||
shape_dc_Buffer = None | ||||
data_spc = None | ||||
data_cspc = None | ||||
data_dc = None | ||||
|
r179 | # dataOut = None | ||
|
r174 | |||
|
r186 | def __init__(self): | ||
|
r174 | """ | ||
Inicializador de la clase SpectraWriter para la escritura de datos de espectros. | ||||
Affected: | ||||
|
r179 | self.dataOut | ||
|
r174 | self.basicHeaderObj | ||
self.systemHeaderObj | ||||
self.radarControllerHeaderObj | ||||
self.processingHeaderObj | ||||
Return: None | ||||
""" | ||||
|
r179 | self.isConfig = False | ||
|
r176 | |||
|
r174 | self.nTotalBlocks = 0 | ||
self.data_spc = None | ||||
self.data_cspc = None | ||||
self.data_dc = None | ||||
self.fp = None | ||||
self.flagIsNewFile = 1 | ||||
self.nTotalBlocks = 0 | ||||
self.flagIsNewBlock = 0 | ||||
self.setFile = None | ||||
self.dtype = None | ||||
self.path = None | ||||
self.noMoreFiles = 0 | ||||
self.filename = None | ||||
|
r234 | self.basicHeaderObj = BasicHeader(LOCALTIME) | ||
|
r174 | |||
self.systemHeaderObj = SystemHeader() | ||||
self.radarControllerHeaderObj = RadarControllerHeader() | ||||
self.processingHeaderObj = ProcessingHeader() | ||||
def hasAllDataInBuffer(self): | ||||
return 1 | ||||
def setBlockDimension(self): | ||||
""" | ||||
Obtiene las formas dimensionales del los subbloques de datos que componen un bloque | ||||
Affected: | ||||
self.shape_spc_Buffer | ||||
self.shape_cspc_Buffer | ||||
self.shape_dc_Buffer | ||||
Return: None | ||||
""" | ||||
|
r179 | self.shape_spc_Buffer = (self.dataOut.nChannels, | ||
|
r174 | self.processingHeaderObj.nHeights, | ||
self.processingHeaderObj.profilesPerBlock) | ||||
|
r179 | self.shape_cspc_Buffer = (self.dataOut.nPairs, | ||
|
r174 | self.processingHeaderObj.nHeights, | ||
self.processingHeaderObj.profilesPerBlock) | ||||
|
r179 | self.shape_dc_Buffer = (self.dataOut.nChannels, | ||
|
r174 | self.processingHeaderObj.nHeights) | ||
def writeBlock(self): | ||||
""" | ||||
Escribe el buffer en el file designado | ||||
Affected: | ||||
self.data_spc | ||||
self.data_cspc | ||||
self.data_dc | ||||
self.flagIsNewFile | ||||
self.flagIsNewBlock | ||||
self.nTotalBlocks | ||||
self.nWriteBlocks | ||||
Return: None | ||||
""" | ||||
spc = numpy.transpose( self.data_spc, (0,2,1) ) | ||||
if not( self.processingHeaderObj.shif_fft ): | ||||
spc = numpy.roll( spc, self.processingHeaderObj.profilesPerBlock/2, axis=2 ) #desplaza a la derecha en el eje 2 determinadas posiciones | ||||
data = spc.reshape((-1)) | ||||
|
r276 | data = data.astype(self.dtype[0]) | ||
|
r174 | data.tofile(self.fp) | ||
if self.data_cspc != None: | ||||
data = numpy.zeros( self.shape_cspc_Buffer, self.dtype ) | ||||
cspc = numpy.transpose( self.data_cspc, (0,2,1) ) | ||||
if not( self.processingHeaderObj.shif_fft ): | ||||
cspc = numpy.roll( cspc, self.processingHeaderObj.profilesPerBlock/2, axis=2 ) #desplaza a la derecha en el eje 2 determinadas posiciones | ||||
data['real'] = cspc.real | ||||
data['imag'] = cspc.imag | ||||
data = data.reshape((-1)) | ||||
data.tofile(self.fp) | ||||
if self.data_dc != None: | ||||
data = numpy.zeros( self.shape_dc_Buffer, self.dtype ) | ||||
dc = self.data_dc | ||||
data['real'] = dc.real | ||||
data['imag'] = dc.imag | ||||
data = data.reshape((-1)) | ||||
data.tofile(self.fp) | ||||
self.data_spc.fill(0) | ||||
|
r404 | |||
if self.data_dc != None: | ||||
self.data_dc.fill(0) | ||||
|
r174 | if self.data_cspc != None: | ||
self.data_cspc.fill(0) | ||||
self.flagIsNewFile = 0 | ||||
self.flagIsNewBlock = 1 | ||||
self.nTotalBlocks += 1 | ||||
self.nWriteBlocks += 1 | ||||
self.blockIndex += 1 | ||||
def putData(self): | ||||
""" | ||||
Setea un bloque de datos y luego los escribe en un file | ||||
Affected: | ||||
self.data_spc | ||||
self.data_cspc | ||||
self.data_dc | ||||
Return: | ||||
0 : Si no hay data o no hay mas files que puedan escribirse | ||||
1 : Si se escribio la data de un bloque en un file | ||||
""" | ||||
|
r179 | if self.dataOut.flagNoData: | ||
|
r174 | return 0 | ||
self.flagIsNewBlock = 0 | ||||
|
r179 | if self.dataOut.flagTimeBlock: | ||
|
r174 | self.data_spc.fill(0) | ||
self.data_cspc.fill(0) | ||||
self.data_dc.fill(0) | ||||
self.setNextFile() | ||||
if self.flagIsNewFile == 0: | ||||
|
r344 | self.setBasicHeader() | ||
|
r174 | |||
|
r268 | self.data_spc = self.dataOut.data_spc.copy() | ||
|
r404 | if self.dataOut.data_cspc != None: | ||
self.data_cspc = self.dataOut.data_cspc.copy() | ||||
|
r268 | self.data_dc = self.dataOut.data_dc.copy() | ||
|
r174 | |||
# #self.processingHeaderObj.dataBlocksPerFile) | ||||
if self.hasAllDataInBuffer(): | ||||
|
r344 | # self.setFirstHeader() | ||
|
r174 | self.writeNextBlock() | ||
return 1 | ||||
def __getProcessFlags(self): | ||||
processFlags = 0 | ||||
dtype0 = numpy.dtype([('real','<i1'),('imag','<i1')]) | ||||
dtype1 = numpy.dtype([('real','<i2'),('imag','<i2')]) | ||||
dtype2 = numpy.dtype([('real','<i4'),('imag','<i4')]) | ||||
dtype3 = numpy.dtype([('real','<i8'),('imag','<i8')]) | ||||
dtype4 = numpy.dtype([('real','<f4'),('imag','<f4')]) | ||||
dtype5 = numpy.dtype([('real','<f8'),('imag','<f8')]) | ||||
dtypeList = [dtype0, dtype1, dtype2, dtype3, dtype4, dtype5] | ||||
datatypeValueList = [PROCFLAG.DATATYPE_CHAR, | ||||
PROCFLAG.DATATYPE_SHORT, | ||||
PROCFLAG.DATATYPE_LONG, | ||||
PROCFLAG.DATATYPE_INT64, | ||||
PROCFLAG.DATATYPE_FLOAT, | ||||
PROCFLAG.DATATYPE_DOUBLE] | ||||
for index in range(len(dtypeList)): | ||||
|
r179 | if self.dataOut.dtype == dtypeList[index]: | ||
|
r174 | dtypeValue = datatypeValueList[index] | ||
break | ||||
processFlags += dtypeValue | ||||
|
r179 | if self.dataOut.flagDecodeData: | ||
|
r174 | processFlags += PROCFLAG.DECODE_DATA | ||
|
r179 | if self.dataOut.flagDeflipData: | ||
|
r174 | processFlags += PROCFLAG.DEFLIP_DATA | ||
|
r179 | if self.dataOut.code != None: | ||
|
r174 | processFlags += PROCFLAG.DEFINE_PROCESS_CODE | ||
|
r179 | if self.dataOut.nIncohInt > 1: | ||
|
r174 | processFlags += PROCFLAG.INCOHERENT_INTEGRATION | ||
|
r179 | if self.dataOut.data_dc != None: | ||
|
r174 | processFlags += PROCFLAG.SAVE_CHANNELS_DC | ||
return processFlags | ||||
def __getBlockSize(self): | ||||
''' | ||||
Este metodos determina el cantidad de bytes para un bloque de datos de tipo Spectra | ||||
''' | ||||
dtype0 = numpy.dtype([('real','<i1'),('imag','<i1')]) | ||||
dtype1 = numpy.dtype([('real','<i2'),('imag','<i2')]) | ||||
dtype2 = numpy.dtype([('real','<i4'),('imag','<i4')]) | ||||
dtype3 = numpy.dtype([('real','<i8'),('imag','<i8')]) | ||||
dtype4 = numpy.dtype([('real','<f4'),('imag','<f4')]) | ||||
dtype5 = numpy.dtype([('real','<f8'),('imag','<f8')]) | ||||
dtypeList = [dtype0, dtype1, dtype2, dtype3, dtype4, dtype5] | ||||
datatypeValueList = [1,2,4,8,4,8] | ||||
for index in range(len(dtypeList)): | ||||
|
r179 | if self.dataOut.dtype == dtypeList[index]: | ||
|
r174 | datatypeValue = datatypeValueList[index] | ||
break | ||||
|
r179 | pts2write = self.dataOut.nHeights * self.dataOut.nFFTPoints | ||
|
r174 | |||
|
r179 | pts2write_SelfSpectra = int(self.dataOut.nChannels * pts2write) | ||
|
r174 | blocksize = (pts2write_SelfSpectra*datatypeValue) | ||
|
r179 | if self.dataOut.data_cspc != None: | ||
pts2write_CrossSpectra = int(self.dataOut.nPairs * pts2write) | ||||
|
r174 | blocksize += (pts2write_CrossSpectra*datatypeValue*2) | ||
|
r179 | if self.dataOut.data_dc != None: | ||
pts2write_DCchannels = int(self.dataOut.nChannels * self.dataOut.nHeights) | ||||
|
r174 | blocksize += (pts2write_DCchannels*datatypeValue*2) | ||
blocksize = blocksize #* datatypeValue * 2 #CORREGIR ESTO | ||||
return blocksize | ||||
|
r344 | def setFirstHeader(self): | ||
|
r174 | |||
""" | ||||
Obtiene una copia del First Header | ||||
Affected: | ||||
self.systemHeaderObj | ||||
self.radarControllerHeaderObj | ||||
self.dtype | ||||
Return: | ||||
None | ||||
""" | ||||
|
r179 | self.systemHeaderObj = self.dataOut.systemHeaderObj.copy() | ||
self.systemHeaderObj.nChannels = self.dataOut.nChannels | ||||
self.radarControllerHeaderObj = self.dataOut.radarControllerHeaderObj.copy() | ||||
|
r466 | old_code_size = self.dataOut.radarControllerHeaderObj.code_size | ||
new_code_size = int(numpy.ceil(self.dataOut.nBaud/32.))*self.dataOut.nCode*4 | ||||
self.radarControllerHeaderObj.size = self.radarControllerHeaderObj.size - old_code_size + new_code_size | ||||
|
r174 | |||
|
r344 | self.setBasicHeader() | ||
|
r174 | |||
processingHeaderSize = 40 # bytes | ||||
|
r333 | self.processingHeaderObj.dtype = 1 # Spectra | ||
|
r174 | self.processingHeaderObj.blockSize = self.__getBlockSize() | ||
|
r179 | self.processingHeaderObj.profilesPerBlock = self.dataOut.nFFTPoints | ||
|
r174 | self.processingHeaderObj.dataBlocksPerFile = self.blocksPerFile | ||
|
r179 | self.processingHeaderObj.nWindows = 1 #podria ser 1 o self.dataOut.processingHeaderObj.nWindows | ||
|
r174 | self.processingHeaderObj.processFlags = self.__getProcessFlags() | ||
|
r179 | self.processingHeaderObj.nCohInt = self.dataOut.nCohInt# Se requiere para determinar el valor de timeInterval | ||
self.processingHeaderObj.nIncohInt = self.dataOut.nIncohInt | ||||
self.processingHeaderObj.totalSpectra = self.dataOut.nPairs + self.dataOut.nChannels | ||||
|
r344 | self.processingHeaderObj.shif_fft = self.dataOut.flagShiftFFT | ||
|
r174 | |||
if self.processingHeaderObj.totalSpectra > 0: | ||||
channelList = [] | ||||
|
r179 | for channel in range(self.dataOut.nChannels): | ||
|
r174 | channelList.append(channel) | ||
channelList.append(channel) | ||||
|
r404 | |||
|
r174 | pairsList = [] | ||
|
r404 | if self.dataOut.nPairs > 0: | ||
for pair in self.dataOut.pairsList: | ||||
pairsList.append(pair[0]) | ||||
pairsList.append(pair[1]) | ||||
|
r174 | spectraComb = channelList + pairsList | ||
spectraComb = numpy.array(spectraComb,dtype="u1") | ||||
self.processingHeaderObj.spectraComb = spectraComb | ||||
sizeOfSpcComb = len(spectraComb) | ||||
processingHeaderSize += sizeOfSpcComb | ||||
|
r333 | |||
# The processing header should not have information about code | ||||
# if self.dataOut.code != None: | ||||
# self.processingHeaderObj.code = self.dataOut.code | ||||
# self.processingHeaderObj.nCode = self.dataOut.nCode | ||||
# self.processingHeaderObj.nBaud = self.dataOut.nBaud | ||||
# nCodeSize = 4 # bytes | ||||
# nBaudSize = 4 # bytes | ||||
# codeSize = 4 # bytes | ||||
# sizeOfCode = int(nCodeSize + nBaudSize + codeSize * self.dataOut.nCode * self.dataOut.nBaud) | ||||
# processingHeaderSize += sizeOfCode | ||||
|
r174 | |||
if self.processingHeaderObj.nWindows != 0: | ||||
|
r179 | self.processingHeaderObj.firstHeight = self.dataOut.heightList[0] | ||
self.processingHeaderObj.deltaHeight = self.dataOut.heightList[1] - self.dataOut.heightList[0] | ||||
self.processingHeaderObj.nHeights = self.dataOut.nHeights | ||||
self.processingHeaderObj.samplesWin = self.dataOut.nHeights | ||||
|
r174 | sizeOfFirstHeight = 4 | ||
sizeOfdeltaHeight = 4 | ||||
sizeOfnHeights = 4 | ||||
sizeOfWindows = (sizeOfFirstHeight + sizeOfdeltaHeight + sizeOfnHeights)*self.processingHeaderObj.nWindows | ||||
processingHeaderSize += sizeOfWindows | ||||
self.processingHeaderObj.size = processingHeaderSize | ||||
|
r340 | class SpectraHeisWriter(Operation): | ||
# set = None | ||||
setFile = None | ||||
idblock = None | ||||
doypath = None | ||||
subfolder = None | ||||
|
r174 | |||
|
r340 | def __init__(self): | ||
|
r174 | self.wrObj = FITS() | ||
|
r340 | # self.dataOut = dataOut | ||
self.nTotalBlocks=0 | ||||
# self.set = None | ||||
self.setFile = None | ||||
self.idblock = 0 | ||||
self.wrpath = None | ||||
self.doypath = None | ||||
self.subfolder = None | ||||
self.isConfig = False | ||||
|
r174 | |||
def isNumber(str): | ||||
""" | ||||
Chequea si el conjunto de caracteres que componen un string puede ser convertidos a un numero. | ||||
Excepciones: | ||||
Si un determinado string no puede ser convertido a numero | ||||
Input: | ||||
str, string al cual se le analiza para determinar si convertible a un numero o no | ||||
Return: | ||||
True : si el string es uno numerico | ||||
False : no es un string numerico | ||||
""" | ||||
try: | ||||
float( str ) | ||||
return True | ||||
except: | ||||
|
r340 | return False | ||
|
r174 | |||
|
r340 | def setup(self, dataOut, wrpath): | ||
|
r174 | |||
if not(os.path.exists(wrpath)): | ||||
os.mkdir(wrpath) | ||||
self.wrpath = wrpath | ||||
|
r340 | # self.setFile = 0 | ||
self.dataOut = dataOut | ||||
|
r174 | |||
def putData(self): | ||||
|
r179 | name= time.localtime( self.dataOut.utctime) | ||
|
r340 | ext=".fits" | ||
if self.doypath == None: | ||||
self.subfolder = 'F%4.4d%3.3d_%d' % (name.tm_year,name.tm_yday,time.mktime(datetime.datetime.now().timetuple())) | ||||
self.doypath = os.path.join( self.wrpath, self.subfolder ) | ||||
os.mkdir(self.doypath) | ||||
if self.setFile == None: | ||||
# self.set = self.dataOut.set | ||||
self.setFile = 0 | ||||
# if self.set != self.dataOut.set: | ||||
## self.set = self.dataOut.set | ||||
# self.setFile = 0 | ||||
#make the filename | ||||
file = 'D%4.4d%3.3d_%3.3d%s' % (name.tm_year,name.tm_yday,self.setFile,ext) | ||||
|
r174 | |||
|
r340 | filename = os.path.join(self.wrpath,self.subfolder, file) | ||
idblock = numpy.array([self.idblock],dtype="int64") | ||||
header=self.wrObj.cFImage(idblock=idblock, | ||||
year=time.gmtime(self.dataOut.utctime).tm_year, | ||||
month=time.gmtime(self.dataOut.utctime).tm_mon, | ||||
day=time.gmtime(self.dataOut.utctime).tm_mday, | ||||
hour=time.gmtime(self.dataOut.utctime).tm_hour, | ||||
minute=time.gmtime(self.dataOut.utctime).tm_min, | ||||
second=time.gmtime(self.dataOut.utctime).tm_sec) | ||||
c=3E8 | ||||
deltaHeight = self.dataOut.heightList[1] - self.dataOut.heightList[0] | ||||
freq=numpy.arange(-1*self.dataOut.nHeights/2.,self.dataOut.nHeights/2.)*(c/(2*deltaHeight*1000)) | ||||
colList = [] | ||||
colFreq=self.wrObj.setColF(name="freq", format=str(self.dataOut.nFFTPoints)+'E', array=freq) | ||||
colList.append(colFreq) | ||||
nchannel=self.dataOut.nChannels | ||||
for i in range(nchannel): | ||||
col = self.wrObj.writeData(name="PCh"+str(i+1), | ||||
format=str(self.dataOut.nFFTPoints)+'E', | ||||
data=10*numpy.log10(self.dataOut.data_spc[i,:])) | ||||
colList.append(col) | ||||
data=self.wrObj.Ctable(colList=colList) | ||||
self.wrObj.CFile(header,data) | ||||
self.wrObj.wFile(filename) | ||||
#update the setFile | ||||
|
r174 | self.setFile += 1 | ||
|
r340 | self.idblock += 1 | ||
|
r174 | return 1 | ||
|
r340 | |||
def run(self, dataOut, **kwargs): | ||||
if not(self.isConfig): | ||||
self.setup(dataOut, **kwargs) | ||||
self.isConfig = True | ||||
self.putData() | ||||
|
r174 | |||
|
r351 | |||
class ParameterConf: | ||||
ELEMENTNAME = 'Parameter' | ||||
def __init__(self): | ||||
self.name = '' | ||||
self.value = '' | ||||
def readXml(self, parmElement): | ||||
self.name = parmElement.get('name') | ||||
self.value = parmElement.get('value') | ||||
def getElementName(self): | ||||
return self.ELEMENTNAME | ||||
class Metadata: | ||||
def __init__(self, filename): | ||||
self.parmConfObjList = [] | ||||
self.readXml(filename) | ||||
def readXml(self, filename): | ||||
self.projectElement = None | ||||
self.procUnitConfObjDict = {} | ||||
self.projectElement = ElementTree().parse(filename) | ||||
self.project = self.projectElement.tag | ||||
parmElementList = self.projectElement.getiterator(ParameterConf().getElementName()) | ||||
for parmElement in parmElementList: | ||||
parmConfObj = ParameterConf() | ||||
parmConfObj.readXml(parmElement) | ||||
self.parmConfObjList.append(parmConfObj) | ||||
class FitsWriter(Operation): | ||||
def __init__(self): | ||||
self.isConfig = False | ||||
self.dataBlocksPerFile = None | ||||
self.blockIndex = 0 | ||||
self.flagIsNewFile = 1 | ||||
self.fitsObj = None | ||||
self.optchar = 'P' | ||||
self.ext = '.fits' | ||||
self.setFile = 0 | ||||
def setFitsHeader(self, dataOut, metadatafile): | ||||
header_data = pyfits.PrimaryHDU() | ||||
metadata4fits = Metadata(metadatafile) | ||||
for parameter in metadata4fits.parmConfObjList: | ||||
parm_name = parameter.name | ||||
parm_value = parameter.value | ||||
|
r439 | # if parm_value == 'fromdatadatetime': | ||
# value = time.strftime("%b %d %Y %H:%M:%S", dataOut.datatime.timetuple()) | ||||
# elif parm_value == 'fromdataheights': | ||||
# value = dataOut.nHeights | ||||
# elif parm_value == 'fromdatachannel': | ||||
# value = dataOut.nChannels | ||||
# elif parm_value == 'fromdatasamples': | ||||
# value = dataOut.nFFTPoints | ||||
# else: | ||||
# value = parm_value | ||||
|
r351 | |||
|
r439 | header_data.header[parm_name] = parm_value | ||
|
r351 | |||
|
r439 | |||
header_data.header['DATETIME'] = time.strftime("%b %d %Y %H:%M:%S", dataOut.datatime.timetuple()) | ||||
header_data.header['CHANNELLIST'] = str(dataOut.channelList) | ||||
header_data.header['NCHANNELS'] = dataOut.nChannels | ||||
#header_data.header['HEIGHTS'] = dataOut.heightList | ||||
header_data.header['NHEIGHTS'] = dataOut.nHeights | ||||
header_data.header['IPPSECONDS'] = dataOut.ippSeconds | ||||
header_data.header['NCOHINT'] = dataOut.nCohInt | ||||
header_data.header['NINCOHINT'] = dataOut.nIncohInt | ||||
header_data.header['TIMEZONE'] = dataOut.timeZone | ||||
|
r351 | header_data.header['NBLOCK'] = self.blockIndex | ||
header_data.writeto(self.filename) | ||||
|
r439 | self.addExtension(dataOut.heightList,'HEIGHTLIST') | ||
|
r351 | |||
def setup(self, dataOut, path, dataBlocksPerFile, metadatafile): | ||||
self.path = path | ||||
self.dataOut = dataOut | ||||
self.metadatafile = metadatafile | ||||
self.dataBlocksPerFile = dataBlocksPerFile | ||||
def open(self): | ||||
self.fitsObj = pyfits.open(self.filename, mode='update') | ||||
|
r439 | def addExtension(self, data, tagname): | ||
self.open() | ||||
extension = pyfits.ImageHDU(data=data, name=tagname) | ||||
#extension.header['TAG'] = tagname | ||||
self.fitsObj.append(extension) | ||||
self.write() | ||||
|
r351 | def addData(self, data): | ||
self.open() | ||||
|
r439 | extension = pyfits.ImageHDU(data=data, name=self.fitsObj[0].header['DATATYPE']) | ||
|
r351 | extension.header['UTCTIME'] = self.dataOut.utctime | ||
self.fitsObj.append(extension) | ||||
self.blockIndex += 1 | ||||
self.fitsObj[0].header['NBLOCK'] = self.blockIndex | ||||
self.write() | ||||
def write(self): | ||||
self.fitsObj.flush(verbose=True) | ||||
self.fitsObj.close() | ||||
def setNextFile(self): | ||||
ext = self.ext | ||||
path = self.path | ||||
timeTuple = time.localtime( self.dataOut.utctime) | ||||
subfolder = 'd%4.4d%3.3d' % (timeTuple.tm_year,timeTuple.tm_yday) | ||||
fullpath = os.path.join( path, subfolder ) | ||||
if not( os.path.exists(fullpath) ): | ||||
os.mkdir(fullpath) | ||||
self.setFile = -1 #inicializo mi contador de seteo | ||||
else: | ||||
filesList = os.listdir( fullpath ) | ||||
if len( filesList ) > 0: | ||||
filesList = sorted( filesList, key=str.lower ) | ||||
filen = filesList[-1] | ||||
if isNumber( filen[8:11] ): | ||||
self.setFile = int( filen[8:11] ) #inicializo mi contador de seteo al seteo del ultimo file | ||||
else: | ||||
self.setFile = -1 | ||||
else: | ||||
self.setFile = -1 #inicializo mi contador de seteo | ||||
setFile = self.setFile | ||||
setFile += 1 | ||||
file = '%s%4.4d%3.3d%3.3d%s' % (self.optchar, | ||||
timeTuple.tm_year, | ||||
timeTuple.tm_yday, | ||||
setFile, | ||||
ext ) | ||||
filename = os.path.join( path, subfolder, file ) | ||||
self.blockIndex = 0 | ||||
self.filename = filename | ||||
self.setFile = setFile | ||||
self.flagIsNewFile = 1 | ||||
print 'Writing the file: %s'%self.filename | ||||
self.setFitsHeader(self.dataOut, self.metadatafile) | ||||
return 1 | ||||
def writeBlock(self): | ||||
self.addData(self.dataOut.data_spc) | ||||
self.flagIsNewFile = 0 | ||||
def __setNewBlock(self): | ||||
if self.flagIsNewFile: | ||||
return 1 | ||||
if self.blockIndex < self.dataBlocksPerFile: | ||||
return 1 | ||||
if not( self.setNextFile() ): | ||||
return 0 | ||||
return 1 | ||||
def writeNextBlock(self): | ||||
if not( self.__setNewBlock() ): | ||||
return 0 | ||||
self.writeBlock() | ||||
return 1 | ||||
def putData(self): | ||||
if self.flagIsNewFile: | ||||
self.setNextFile() | ||||
self.writeNextBlock() | ||||
def run(self, dataOut, **kwargs): | ||||
if not(self.isConfig): | ||||
self.setup(dataOut, **kwargs) | ||||
self.isConfig = True | ||||
self.putData() | ||||
|
r353 | class FitsReader(ProcessingUnit): | ||
|
r439 | # __TIMEZONE = time.timezone | ||
|
r353 | |||
expName = None | ||||
datetimestr = None | ||||
utc = None | ||||
nChannels = None | ||||
nSamples = None | ||||
dataBlocksPerFile = None | ||||
comments = None | ||||
lastUTTime = None | ||||
header_dict = None | ||||
data = None | ||||
data_header_dict = None | ||||
def __init__(self): | ||||
self.isConfig = False | ||||
self.ext = '.fits' | ||||
self.setFile = 0 | ||||
self.flagNoMoreFiles = 0 | ||||
self.flagIsNewFile = 1 | ||||
self.flagTimeBlock = None | ||||
self.fileIndex = None | ||||
self.filename = None | ||||
self.fileSize = None | ||||
self.fitsObj = None | ||||
|
r439 | self.timeZone = None | ||
|
r353 | self.nReadBlocks = 0 | ||
self.nTotalBlocks = 0 | ||||
self.dataOut = self.createObjByDefault() | ||||
self.maxTimeStep = 10# deberia ser definido por el usuario usando el metodo setup() | ||||
self.blockIndex = 1 | ||||
def createObjByDefault(self): | ||||
dataObj = Fits() | ||||
return dataObj | ||||
def isFileinThisTime(self, filename, startTime, endTime, useLocalTime=False): | ||||
try: | ||||
fitsObj = pyfits.open(filename,'readonly') | ||||
except: | ||||
raise IOError, "The file %s can't be opened" %(filename) | ||||
header = fitsObj[0].header | ||||
struct_time = time.strptime(header['DATETIME'], "%b %d %Y %H:%M:%S") | ||||
utc = time.mktime(struct_time) - time.timezone #TIMEZONE debe ser un parametro del header FITS | ||||
ltc = utc | ||||
if useLocalTime: | ||||
ltc -= time.timezone | ||||
thisDatetime = datetime.datetime.utcfromtimestamp(ltc) | ||||
thisTime = thisDatetime.time() | ||||
if not ((startTime <= thisTime) and (endTime > thisTime)): | ||||
return None | ||||
return thisDatetime | ||||
def __setNextFileOnline(self): | ||||
raise ValueError, "No implemented" | ||||
def __setNextFileOffline(self): | ||||
idFile = self.fileIndex | ||||
while (True): | ||||
idFile += 1 | ||||
if not(idFile < len(self.filenameList)): | ||||
self.flagNoMoreFiles = 1 | ||||
print "No more Files" | ||||
return 0 | ||||
filename = self.filenameList[idFile] | ||||
# if not(self.__verifyFile(filename)): | ||||
# continue | ||||
fileSize = os.path.getsize(filename) | ||||
fitsObj = pyfits.open(filename,'readonly') | ||||
break | ||||
self.flagIsNewFile = 1 | ||||
self.fileIndex = idFile | ||||
self.filename = filename | ||||
self.fileSize = fileSize | ||||
self.fitsObj = fitsObj | ||||
|
r439 | self.blockIndex = 0 | ||
|
r353 | print "Setting the file: %s"%self.filename | ||
return 1 | ||||
def readHeader(self): | ||||
headerObj = self.fitsObj[0] | ||||
self.header_dict = headerObj.header | ||||
|
r439 | if 'EXPNAME' in headerObj.header.keys(): | ||
self.expName = headerObj.header['EXPNAME'] | ||||
if 'DATATYPE' in headerObj.header.keys(): | ||||
self.dataType = headerObj.header['DATATYPE'] | ||||
|
r353 | self.datetimestr = headerObj.header['DATETIME'] | ||
|
r446 | channelList = headerObj.header['CHANNELLIST'] | ||
channelList = channelList.split('[') | ||||
channelList = channelList[1].split(']') | ||||
channelList = channelList[0].split(',') | ||||
channelList = [int(ch) for ch in channelList] | ||||
self.channelList = channelList | ||||
|
r439 | self.nChannels = headerObj.header['NCHANNELS'] | ||
self.nHeights = headerObj.header['NHEIGHTS'] | ||||
self.ippSeconds = headerObj.header['IPPSECONDS'] | ||||
self.nCohInt = headerObj.header['NCOHINT'] | ||||
self.nIncohInt = headerObj.header['NINCOHINT'] | ||||
|
r353 | self.dataBlocksPerFile = headerObj.header['NBLOCK'] | ||
|
r439 | self.timeZone = headerObj.header['TIMEZONE'] | ||
self.timeInterval = self.ippSeconds * self.nCohInt * self.nIncohInt | ||||
if 'COMMENT' in headerObj.header.keys(): | ||||
self.comments = headerObj.header['COMMENT'] | ||||
|
r353 | |||
|
r439 | self.readHeightList() | ||
def readHeightList(self): | ||||
self.blockIndex = self.blockIndex + 1 | ||||
obj = self.fitsObj[self.blockIndex] | ||||
self.heightList = obj.data | ||||
self.blockIndex = self.blockIndex + 1 | ||||
def readExtension(self): | ||||
obj = self.fitsObj[self.blockIndex] | ||||
self.heightList = obj.data | ||||
self.blockIndex = self.blockIndex + 1 | ||||
|
r353 | |||
def setNextFile(self): | ||||
if self.online: | ||||
newFile = self.__setNextFileOnline() | ||||
else: | ||||
newFile = self.__setNextFileOffline() | ||||
if not(newFile): | ||||
return 0 | ||||
self.readHeader() | ||||
self.nReadBlocks = 0 | ||||
|
r439 | # self.blockIndex = 1 | ||
|
r353 | return 1 | ||
def __searchFilesOffLine(self, | ||||
path, | ||||
startDate, | ||||
endDate, | ||||
startTime=datetime.time(0,0,0), | ||||
endTime=datetime.time(23,59,59), | ||||
set=None, | ||||
expLabel='', | ||||
ext='.fits', | ||||
walk=True): | ||||
pathList = [] | ||||
if not walk: | ||||
pathList.append(path) | ||||
else: | ||||
dirList = [] | ||||
for thisPath in os.listdir(path): | ||||
if not os.path.isdir(os.path.join(path,thisPath)): | ||||
continue | ||||
if not isDoyFolder(thisPath): | ||||
continue | ||||
dirList.append(thisPath) | ||||
if not(dirList): | ||||
return None, None | ||||
thisDate = startDate | ||||
while(thisDate <= endDate): | ||||
year = thisDate.timetuple().tm_year | ||||
doy = thisDate.timetuple().tm_yday | ||||
matchlist = fnmatch.filter(dirList, '?' + '%4.4d%3.3d' % (year,doy) + '*') | ||||
if len(matchlist) == 0: | ||||
thisDate += datetime.timedelta(1) | ||||
continue | ||||
for match in matchlist: | ||||
pathList.append(os.path.join(path,match,expLabel)) | ||||
thisDate += datetime.timedelta(1) | ||||
if pathList == []: | ||||
print "Any folder was found for the date range: %s-%s" %(startDate, endDate) | ||||
return None, None | ||||
print "%d folder(s) was(were) found for the date range: %s - %s" %(len(pathList), startDate, endDate) | ||||
filenameList = [] | ||||
datetimeList = [] | ||||
for i in range(len(pathList)): | ||||
thisPath = pathList[i] | ||||
fileList = glob.glob1(thisPath, "*%s" %ext) | ||||
fileList.sort() | ||||
for file in fileList: | ||||
filename = os.path.join(thisPath,file) | ||||
|
r442 | thisDatetime = self.isFileinThisTime(filename, startTime, endTime) | ||
|
r353 | |||
if not(thisDatetime): | ||||
continue | ||||
filenameList.append(filename) | ||||
datetimeList.append(thisDatetime) | ||||
if not(filenameList): | ||||
print "Any file was found for the time range %s - %s" %(startTime, endTime) | ||||
return None, None | ||||
print "%d file(s) was(were) found for the time range: %s - %s" %(len(filenameList), startTime, endTime) | ||||
for i in range(len(filenameList)): | ||||
print "%s -> [%s]" %(filenameList[i], datetimeList[i].ctime()) | ||||
self.filenameList = filenameList | ||||
self.datetimeList = datetimeList | ||||
return pathList, filenameList | ||||
def setup(self, path=None, | ||||
startDate=None, | ||||
endDate=None, | ||||
startTime=datetime.time(0,0,0), | ||||
endTime=datetime.time(23,59,59), | ||||
set=0, | ||||
expLabel = "", | ||||
ext = None, | ||||
online = False, | ||||
delay = 60, | ||||
walk = True): | ||||
if path == None: | ||||
raise ValueError, "The path is not valid" | ||||
if ext == None: | ||||
ext = self.ext | ||||
if not(online): | ||||
print "Searching files in offline mode ..." | ||||
pathList, filenameList = self.__searchFilesOffLine(path, startDate=startDate, endDate=endDate, | ||||
startTime=startTime, endTime=endTime, | ||||
set=set, expLabel=expLabel, ext=ext, | ||||
walk=walk) | ||||
if not(pathList): | ||||
print "No *%s files into the folder %s \nfor the range: %s - %s"%(ext, path, | ||||
datetime.datetime.combine(startDate,startTime).ctime(), | ||||
datetime.datetime.combine(endDate,endTime).ctime()) | ||||
sys.exit(-1) | ||||
self.fileIndex = -1 | ||||
self.pathList = pathList | ||||
self.filenameList = filenameList | ||||
self.online = online | ||||
self.delay = delay | ||||
ext = ext.lower() | ||||
self.ext = ext | ||||
if not(self.setNextFile()): | ||||
if (startDate!=None) and (endDate!=None): | ||||
print "No files in range: %s - %s" %(datetime.datetime.combine(startDate,startTime).ctime(), datetime.datetime.combine(endDate,endTime).ctime()) | ||||
elif startDate != None: | ||||
print "No files in range: %s" %(datetime.datetime.combine(startDate,startTime).ctime()) | ||||
else: | ||||
print "No files" | ||||
sys.exit(-1) | ||||
def readBlock(self): | ||||
dataObj = self.fitsObj[self.blockIndex] | ||||
self.data = dataObj.data | ||||
self.data_header_dict = dataObj.header | ||||
self.utc = self.data_header_dict['UTCTIME'] | ||||
self.flagIsNewFile = 0 | ||||
self.blockIndex += 1 | ||||
self.nTotalBlocks += 1 | ||||
self.nReadBlocks += 1 | ||||
return 1 | ||||
def __jumpToLastBlock(self): | ||||
raise ValueError, "No implemented" | ||||
def __waitNewBlock(self): | ||||
""" | ||||
Return 1 si se encontro un nuevo bloque de datos, 0 de otra forma. | ||||
Si el modo de lectura es OffLine siempre retorn 0 | ||||
""" | ||||
if not self.online: | ||||
return 0 | ||||
|
r439 | if (self.nReadBlocks >= self.dataBlocksPerFile): | ||
|
r353 | return 0 | ||
currentPointer = self.fp.tell() | ||||
neededSize = self.processingHeaderObj.blockSize + self.basicHeaderSize | ||||
for nTries in range( self.nTries ): | ||||
self.fp.close() | ||||
self.fp = open( self.filename, 'rb' ) | ||||
self.fp.seek( currentPointer ) | ||||
self.fileSize = os.path.getsize( self.filename ) | ||||
currentSize = self.fileSize - currentPointer | ||||
if ( currentSize >= neededSize ): | ||||
self.__rdBasicHeader() | ||||
return 1 | ||||
print "\tWaiting %0.2f seconds for the next block, try %03d ..." % (self.delay, nTries+1) | ||||
time.sleep( self.delay ) | ||||
return 0 | ||||
def __setNewBlock(self): | ||||
if self.online: | ||||
self.__jumpToLastBlock() | ||||
if self.flagIsNewFile: | ||||
return 1 | ||||
self.lastUTTime = self.utc | ||||
if self.online: | ||||
if self.__waitNewBlock(): | ||||
return 1 | ||||
if self.nReadBlocks < self.dataBlocksPerFile: | ||||
return 1 | ||||
if not(self.setNextFile()): | ||||
return 0 | ||||
deltaTime = self.utc - self.lastUTTime | ||||
self.flagTimeBlock = 0 | ||||
if deltaTime > self.maxTimeStep: | ||||
self.flagTimeBlock = 1 | ||||
return 1 | ||||
def readNextBlock(self): | ||||
if not(self.__setNewBlock()): | ||||
return 0 | ||||
if not(self.readBlock()): | ||||
return 0 | ||||
return 1 | ||||
def getData(self): | ||||
if self.flagNoMoreFiles: | ||||
self.dataOut.flagNoData = True | ||||
print 'Process finished' | ||||
return 0 | ||||
self.flagTimeBlock = 0 | ||||
self.flagIsNewBlock = 0 | ||||
if not(self.readNextBlock()): | ||||
return 0 | ||||
if self.data == None: | ||||
self.dataOut.flagNoData = True | ||||
return 0 | ||||
self.dataOut.data = self.data | ||||
self.dataOut.data_header = self.data_header_dict | ||||
self.dataOut.utctime = self.utc | ||||
self.dataOut.header = self.header_dict | ||||
self.dataOut.expName = self.expName | ||||
self.dataOut.nChannels = self.nChannels | ||||
|
r439 | self.dataOut.timeZone = self.timeZone | ||
|
r353 | self.dataOut.dataBlocksPerFile = self.dataBlocksPerFile | ||
self.dataOut.comments = self.comments | ||||
|
r439 | self.dataOut.timeInterval = self.timeInterval | ||
self.dataOut.channelList = self.channelList | ||||
self.dataOut.heightList = self.heightList | ||||
|
r353 | self.dataOut.flagNoData = False | ||
return self.dataOut.data | ||||
def run(self, **kwargs): | ||||
if not(self.isConfig): | ||||
self.setup(**kwargs) | ||||
self.isConfig = True | ||||
|
r473 | self.getData() | ||
class RadacHeader(): | ||||
def __init__(self, fp): | ||||
header = 'Raw11/Data/RadacHeader' | ||||
self.beamCode = fp.get(header+'/BeamCode') | ||||
self.code = fp.get(header+'/Code') | ||||
self.frameCount = fp.get(header+'/FrameCount') | ||||
self.modeGroup = fp.get(header+'/ModeGroup') | ||||
self.nsamplesPulse = fp.get(header+'/NSamplesPulse') | ||||
self.pulseCount = fp.get(header+'/PulseCount') | ||||
self.radacTime = fp.get(header+'/RadacTime') | ||||
self.timeCount = fp.get(header+'/TimeCount') | ||||
self.timeStatus = fp.get(header+'/TimeStatus') | ||||
self.nrecords = self.pulseCount.shape[0] #numero de bloques | ||||
self.npulses = self.pulseCount.shape[1] #numero de perfiles | ||||
self.nsamples = self.nsamplesPulse[0,0] #numero de alturas | ||||
def getIndexRangeToPulse(self, idrecord=0): | ||||
indexToZero = numpy.where(self.pulseCount.value[idrecord,:]==0) | ||||
startPulseCountId = indexToZero[0][0] | ||||
endPulseCountId = startPulseCountId - 1 | ||||
range1 = numpy.arange(startPulseCountId,self.npulses,1) | ||||
range2 = numpy.arange(0,startPulseCountId,1) | ||||
return range1, range2 | ||||
class AMISRReader(ProcessingUnit): | ||||
path = None | ||||
startDate = None | ||||
endDate = None | ||||
startTime = None | ||||
endTime = None | ||||
walk = None | ||||
isConfig = False | ||||
def __init__(self): | ||||
self.set = None | ||||
self.subset = None | ||||
self.extension_file = '.h5' | ||||
self.dtc_str = 'dtc' | ||||
self.dtc_id = 0 | ||||
self.status = True | ||||
self.isConfig = False | ||||
self.dirnameList = [] | ||||
self.filenameList = [] | ||||
self.fileIndex = None | ||||
self.flagNoMoreFiles = False | ||||
self.flagIsNewFile = 0 | ||||
self.filename = '' | ||||
self.amisrFilePointer = None | ||||
self.radacHeaderObj = None | ||||
self.dataOut = self.__createObjByDefault() | ||||
self.datablock = None | ||||
self.rest_datablock = None | ||||
self.range = None | ||||
self.idrecord_count = 0 | ||||
self.profileIndex = 0 | ||||
self.idpulse_range1 = None | ||||
self.idpulse_range2 = None | ||||
self.beamCodeByFrame = None | ||||
self.radacTimeByFrame = None | ||||
#atributos originales tal y como esta en el archivo de datos | ||||
self.beamCodesFromFile = None | ||||
self.radacTimeFromFile = None | ||||
self.rangeFromFile = None | ||||
self.dataByFrame = None | ||||
self.dataset = None | ||||
def __createObjByDefault(self): | ||||
dataObj = AMISR() | ||||
return dataObj | ||||
def __setParameters(self,path,startDate,endDate,startTime,endTime,walk): | ||||
self.path = path | ||||
self.startDate = startDate | ||||
self.endDate = endDate | ||||
self.startTime = startTime | ||||
self.endTime = endTime | ||||
self.walk = walk | ||||
def __checkPath(self): | ||||
if os.path.exists(self.path): | ||||
self.status = 1 | ||||
else: | ||||
self.status = 0 | ||||
print 'Path:%s does not exists'%self.path | ||||
return | ||||
def __selDates(self, amisr_dirname_format): | ||||
year = int(amisr_dirname_format[0:4]) | ||||
month = int(amisr_dirname_format[4:6]) | ||||
dom = int(amisr_dirname_format[6:8]) | ||||
thisDate = datetime.date(year,month,dom) | ||||
if (thisDate>=self.startDate and thisDate <= self.endDate): | ||||
return amisr_dirname_format | ||||
def __findDataForDates(self): | ||||
import re | ||||
if not(self.status): | ||||
return None | ||||
pat = '\d+.\d+' | ||||
dirnameList = [re.search(pat,x).string for x in os.listdir(self.path)] | ||||
dirnameList = [self.__selDates(x) for x in dirnameList] | ||||
dirnameList = filter(lambda x:x!=None,dirnameList) | ||||
if len(dirnameList)>0: | ||||
self.status = 1 | ||||
self.dirnameList = dirnameList | ||||
self.dirnameList.sort() | ||||
else: | ||||
self.status = 0 | ||||
return None | ||||
def __getTimeFromData(self): | ||||
pass | ||||
def __filterByGlob1(self, dirName): | ||||
filter_files = glob.glob1(dirName, '*.*%s'%self.extension_file) | ||||
filterDict = {} | ||||
filterDict.setdefault(dirName) | ||||
filterDict[dirName] = filter_files | ||||
return filterDict | ||||
def __getFilenameList(self, fileListInKeys, dirList): | ||||
for value in fileListInKeys: | ||||
dirName = value.keys()[0] | ||||
for file in value[dirName]: | ||||
filename = os.path.join(dirName, file) | ||||
self.filenameList.append(filename) | ||||
def __selectDataForTimes(self): | ||||
#aun no esta implementado el filtro for tiempo | ||||
if not(self.status): | ||||
return None | ||||
dirList = [os.path.join(self.path,x) for x in self.dirnameList] | ||||
fileListInKeys = [self.__filterByGlob1(x) for x in dirList] | ||||
self.__getFilenameList(fileListInKeys, dirList) | ||||
if len(self.filenameList)>0: | ||||
self.status = 1 | ||||
self.filenameList.sort() | ||||
else: | ||||
self.status = 0 | ||||
return None | ||||
def __searchFilesOffline(self, | ||||
path, | ||||
startDate, | ||||
endDate, | ||||
startTime=datetime.time(0,0,0), | ||||
endTime=datetime.time(23,59,59), | ||||
walk=True): | ||||
self.__setParameters(path, startDate, endDate, startTime, endTime, walk) | ||||
self.__checkPath() | ||||
self.__findDataForDates() | ||||
self.__selectDataForTimes() | ||||
for i in range(len(self.filenameList)): | ||||
print "%s" %(self.filenameList[i]) | ||||
return | ||||
def __setNextFileOffline(self): | ||||
idFile = self.fileIndex | ||||
while (True): | ||||
idFile += 1 | ||||
if not(idFile < len(self.filenameList)): | ||||
self.flagNoMoreFiles = 1 | ||||
print "No more Files" | ||||
return 0 | ||||
filename = self.filenameList[idFile] | ||||
amisrFilePointer = h5py.File(filename,'r') | ||||
break | ||||
self.flagIsNewFile = 1 | ||||
self.fileIndex = idFile | ||||
self.filename = filename | ||||
self.amisrFilePointer = amisrFilePointer | ||||
print "Setting the file: %s"%self.filename | ||||
return 1 | ||||
def __readHeader(self): | ||||
self.radacHeaderObj = RadacHeader(self.amisrFilePointer) | ||||
self.flagIsNewFile = 1 | ||||
def __setNextFile(self): | ||||
newFile = self.__setNextFileOffline() | ||||
if not(newFile): | ||||
return 0 | ||||
self.__readHeader() | ||||
self.readDataBlock() | ||||
def setup(self,path=None, | ||||
startDate=None, | ||||
endDate=None, | ||||
startTime=datetime.time(0,0,0), | ||||
endTime=datetime.time(23,59,59), | ||||
walk=True): | ||||
#Busqueda de archivos offline | ||||
self.__searchFilesOffline(path, startDate, endDate, startTime, endTime, walk) | ||||
if not(self.filenameList): | ||||
print "There is no files into the folder: %s"%(path) | ||||
sys.exit(-1) | ||||
self.fileIndex = -1 | ||||
self.__setNextFile() | ||||
def readRanges(self): | ||||
dataset = self.amisrFilePointer.get('Raw11/Data/Samples/Range') | ||||
|
r474 | #self.rangeFromFile = dataset.value | ||
self.rangeFromFile = numpy.reshape(dataset.value,(-1)) | ||||
|
r473 | return range | ||
def readRadacTime(self,idrecord, range1, range2): | ||||
self.radacTimeFromFile = self.radacHeaderObj.radacTime.value | ||||
radacTimeByFrame = numpy.zeros((self.radacHeaderObj.npulses)) | ||||
#radacTimeByFrame = dataset[idrecord - 1,range1] | ||||
#radacTimeByFrame = dataset[idrecord,range2] | ||||
return radacTimeByFrame | ||||
def readBeamCode(self, idrecord, range1, range2): | ||||
dataset = self.amisrFilePointer.get('Raw11/Data/RadacHeader/BeamCode') | ||||
beamcodeByFrame = numpy.zeros((self.radacHeaderObj.npulses)) | ||||
self.beamCodesFromFile = dataset.value | ||||
#beamcodeByFrame[range1] = dataset[idrecord - 1, range1] | ||||
#beamcodeByFrame[range2] = dataset[idrecord, range2] | ||||
beamcodeByFrame[range1] = dataset[idrecord, range1] | ||||
beamcodeByFrame[range2] = dataset[idrecord, range2] | ||||
return beamcodeByFrame | ||||
def __setDataByFrame(self): | ||||
ndata = 2 # porque es complejo | ||||
dataByFrame = numpy.zeros((self.radacHeaderObj.npulses, self.radacHeaderObj.nsamples, ndata)) | ||||
return dataByFrame | ||||
def __readDataSet(self): | ||||
dataset = self.amisrFilePointer.get('Raw11/Data/Samples/Data') | ||||
return dataset | ||||
def __setDataBlock(self,): | ||||
real = self.dataByFrame[:,:,0] #asumo que 0 es real | ||||
imag = self.dataByFrame[:,:,1] #asumo que 1 es imaginario | ||||
datablock = real + imag*1j #armo el complejo | ||||
return datablock | ||||
def readSamples_version1(self,idrecord): | ||||
#estas tres primeras lineas solo se deben ejecutar una vez | ||||
if self.flagIsNewFile: | ||||
self.idpulse_range1, self.idpulse_range2 = self.radacHeaderObj.getIndexRangeToPulse(0) | ||||
self.dataByFrame = self.__setDataByFrame() | ||||
self.beamCodeByFrame = self.readBeamCode(idrecord, self.idpulse_range1, self.idpulse_range2) | ||||
self.radacTimeByFrame = self.readRadacTime(idrecord, self.idpulse_range1, self.idpulse_range2) | ||||
#reading dataset | ||||
self.dataset = self.__readDataSet() | ||||
if idrecord == 0: | ||||
if len(numpy.where(self.dataByFrame!=0.0)[0]) or len(numpy.where(self.dataByFrame!=0.0)[1]) or len(numpy.where(self.dataByFrame!=0.0)[2]): | ||||
#falta agregar una condicion para datos discontinuos | ||||
#por defecto une los datos del record anterior | ||||
self.dataByFrame[self.idpulse_range2, :, :] = self.dataset[idrecord, self.idpulse_range2, :, :] | ||||
#timepulse | ||||
self.radacTimeByFrame[self.idpulse_range2] = self.radacHeaderObj.radacTime[idrecord, self.idpulse_range2] | ||||
else: | ||||
self.dataByFrame[self.idpulse_range1, :, :] = self.dataset[idrecord, self.idpulse_range1, :, :] | ||||
self.radacTimeByFrame[self.idpulse_range1] = self.radacHeaderObj.radacTime[idrecord, self.idpulse_range1] | ||||
datablock = self.__setDataBlock() | ||||
return datablock | ||||
self.dataByFrame[self.idpulse_range1, :, :] = self.dataset[idrecord - 1,self.idpulse_range1, :, :] | ||||
self.dataByFrame[self.idpulse_range2, :, :] = self.dataset[idrecord, self.idpulse_range2, :, :] | ||||
datablock = self.__setDataBlock() | ||||
self.flagIsNewFile = 0 | ||||
self.dataByFrame[self.idpulse_range1, :, :] = self.dataset[idrecord, self.idpulse_range1, :, :] | ||||
return datablock | ||||
def readSamples(self,idrecord): | ||||
if self.flagIsNewFile: | ||||
self.dataByFrame = self.__setDataByFrame() | ||||
self.beamCodeByFrame = self.amisrFilePointer.get('Raw11/Data/RadacHeader/BeamCode').value[idrecord, :] | ||||
|
r474 | |||
#reading ranges | ||||
self.readRanges() | ||||
|
r473 | #reading dataset | ||
self.dataset = self.__readDataSet() | ||||
self.flagIsNewFile = 0 | ||||
self.radacTimeByFrame = self.radacHeaderObj.radacTime.value[idrecord, :] | ||||
self.dataByFrame = self.dataset[idrecord, :, :, :] | ||||
datablock = self.__setDataBlock() | ||||
return datablock | ||||
def readDataBlock(self): | ||||
#self.datablock = self.readSamples(self.idrecord_count) | ||||
self.datablock = self.readSamples(self.idrecord_count) | ||||
#print 'record:', self.idrecord_count | ||||
self.idrecord_count += 1 | ||||
self.profileIndex = 0 | ||||
if self.idrecord_count >= self.radacHeaderObj.nrecords: | ||||
self.idrecord_count = 0 | ||||
self.flagIsNewFile = 1 | ||||
def readNextBlock(self): | ||||
self.readDataBlock() | ||||
if self.flagIsNewFile: | ||||
self.__setNextFile() | ||||
pass | ||||
def __hasNotDataInBuffer(self): | ||||
#self.radacHeaderObj.npulses debe ser otra variable para considerar el numero de pulsos a tomar en el primer y ultimo record | ||||
if self.profileIndex >= self.radacHeaderObj.npulses: | ||||
return 1 | ||||
return 0 | ||||
|
r474 | def setObjProperties(self): | ||
self.dataOut.heightList = self.rangeFromFile/1000.0 #km | ||||
self.dataOut.nProfiles = self.radacHeaderObj.npulses | ||||
self.dataOut.timeInterval = self.dataOut.ippSeconds * self.dataOut.nCohInt | ||||
self.dataOut.nBaud = None | ||||
self.dataOut.nCode = None | ||||
self.dataOut.code = None | ||||
|
r473 | |||
def getData(self): | ||||
if self.flagNoMoreFiles: | ||||
self.dataOut.flagNoData = True | ||||
print 'Process finished' | ||||
return 0 | ||||
if self.__hasNotDataInBuffer(): | ||||
self.readNextBlock() | ||||
# if not( self.readNextBlock() ): | ||||
# return 0 | ||||
# self.getFirstHeader() | ||||
if self.datablock == None: # setear esta condicion cuando no hayan datos por leers | ||||
self.dataOut.flagNoData = True | ||||
return 0 | ||||
self.dataOut.data = numpy.reshape(self.datablock[self.profileIndex,:],(1,-1)) | ||||
self.dataOut.utctime = self.radacTimeByFrame[self.profileIndex] | ||||
self.dataOut.flagNoData = False | ||||
self.profileIndex += 1 | ||||
return self.dataOut.data | ||||
def run(self, **kwargs): | ||||
if not(self.isConfig): | ||||
self.setup(**kwargs) | ||||
|
r474 | self.setObjProperties() | ||
|
r473 | self.isConfig = True | ||
self.getData() | ||||