jroIO_param.py
1095 lines
| 33.1 KiB
| text/x-python
|
PythonLexer
|
r848 | import numpy | ||
import time | ||||
import os | ||||
import h5py | ||||
import re | ||||
import datetime | ||||
from schainpy.model.data.jrodata import * | ||||
from schainpy.model.proc.jroproc_base import ProcessingUnit, Operation | ||||
# from jroIO_base import * | ||||
from schainpy.model.io.jroIO_base import * | ||||
import schainpy | ||||
class ParamReader(ProcessingUnit): | ||||
''' | ||||
Reads HDF5 format files | ||||
|
r897 | |||
|
r848 | path | ||
|
r897 | |||
|
r848 | startDate | ||
|
r897 | |||
|
r848 | endDate | ||
|
r897 | |||
|
r848 | startTime | ||
|
r897 | |||
|
r848 | endTime | ||
''' | ||||
|
r897 | |||
|
r848 | ext = ".hdf5" | ||
|
r897 | |||
|
r848 | optchar = "D" | ||
|
r897 | |||
|
r848 | timezone = None | ||
|
r897 | |||
|
r848 | startTime = None | ||
|
r897 | |||
|
r848 | endTime = None | ||
|
r897 | |||
|
r848 | fileIndex = None | ||
|
r897 | |||
|
r848 | utcList = None #To select data in the utctime list | ||
|
r897 | |||
|
r848 | blockList = None #List to blocks to be read from the file | ||
|
r897 | |||
blocksPerFile = None #Number of blocks to be read | ||||
|
r848 | blockIndex = None | ||
|
r897 | |||
|
r848 | path = None | ||
|
r897 | |||
|
r848 | #List of Files | ||
|
r897 | |||
|
r848 | filenameList = None | ||
|
r897 | |||
|
r848 | datetimeList = None | ||
|
r897 | |||
|
r848 | #Hdf5 File | ||
|
r897 | |||
|
r848 | listMetaname = None | ||
|
r897 | |||
|
r848 | listMeta = None | ||
|
r897 | |||
|
r848 | listDataname = None | ||
|
r897 | |||
|
r848 | listData = None | ||
|
r897 | |||
|
r848 | listShapes = None | ||
|
r897 | |||
|
r848 | fp = None | ||
|
r897 | |||
|
r848 | #dataOut reconstruction | ||
|
r897 | |||
|
r848 | dataOut = None | ||
|
r897 | |||
def __init__(self, **kwargs): | ||||
ProcessingUnit.__init__(self, **kwargs) | ||||
|
r848 | self.dataOut = Parameters() | ||
return | ||||
|
r897 | |||
def setup(self, **kwargs): | ||||
|
r848 | path = kwargs['path'] | ||
startDate = kwargs['startDate'] | ||||
endDate = kwargs['endDate'] | ||||
startTime = kwargs['startTime'] | ||||
endTime = kwargs['endTime'] | ||||
walk = kwargs['walk'] | ||||
if kwargs.has_key('ext'): | ||||
ext = kwargs['ext'] | ||||
else: | ||||
ext = '.hdf5' | ||||
if kwargs.has_key('timezone'): | ||||
self.timezone = kwargs['timezone'] | ||||
else: | ||||
self.timezone = 'lt' | ||||
|
r897 | |||
|
r848 | print "[Reading] Searching files in offline mode ..." | ||
|
r1052 | pathList, filenameList = self.searchFilesOffLine(path, startDate=startDate, endDate=endDate, | ||
|
r848 | startTime=startTime, endTime=endTime, | ||
|
r897 | ext=ext, walk=walk) | ||
|
r848 | if not(filenameList): | ||
print "There is no files into the folder: %s"%(path) | ||||
sys.exit(-1) | ||||
self.fileIndex = -1 | ||||
self.startTime = startTime | ||||
self.endTime = endTime | ||||
|
r897 | |||
|
r848 | self.__readMetadata() | ||
|
r897 | |||
|
r848 | self.__setNextFileOffline() | ||
|
r897 | |||
|
r848 | return | ||
|
r897 | |||
|
r1052 | def searchFilesOffLine(self, | ||
|
r848 | path, | ||
startDate=None, | ||||
endDate=None, | ||||
startTime=datetime.time(0,0,0), | ||||
endTime=datetime.time(23,59,59), | ||||
ext='.hdf5', | ||||
walk=True): | ||||
|
r897 | |||
|
r848 | expLabel = '' | ||
self.filenameList = [] | ||||
self.datetimeList = [] | ||||
|
r897 | |||
|
r848 | pathList = [] | ||
|
r897 | |||
|
r848 | JRODataObj = JRODataReader() | ||
dateList, pathList = JRODataObj.findDatafiles(path, startDate, endDate, expLabel, ext, walk, include_path=True) | ||||
|
r897 | |||
|
r848 | if dateList == []: | ||
print "[Reading] No *%s files in %s from %s to %s)"%(ext, path, | ||||
datetime.datetime.combine(startDate,startTime).ctime(), | ||||
datetime.datetime.combine(endDate,endTime).ctime()) | ||||
|
r897 | |||
|
r848 | return None, None | ||
|
r897 | |||
|
r848 | if len(dateList) > 1: | ||
print "[Reading] %d days were found in date range: %s - %s" %(len(dateList), startDate, endDate) | ||||
else: | ||||
print "[Reading] data was found for the date %s" %(dateList[0]) | ||||
|
r897 | |||
|
r848 | filenameList = [] | ||
datetimeList = [] | ||||
|
r897 | |||
|
r848 | #---------------------------------------------------------------------------------- | ||
|
r897 | |||
|
r848 | for thisPath in pathList: | ||
# thisPath = pathList[pathDict[file]] | ||||
|
r897 | |||
|
r848 | fileList = glob.glob1(thisPath, "*%s" %ext) | ||
fileList.sort() | ||||
|
r897 | |||
|
r848 | for file in fileList: | ||
|
r897 | |||
|
r848 | filename = os.path.join(thisPath,file) | ||
|
r897 | |||
|
r848 | if not isFileInDateRange(filename, startDate, endDate): | ||
continue | ||||
|
r897 | |||
|
r848 | thisDatetime = self.__isFileInTimeRange(filename, startDate, endDate, startTime, endTime) | ||
|
r897 | |||
|
r848 | if not(thisDatetime): | ||
continue | ||||
|
r897 | |||
|
r848 | filenameList.append(filename) | ||
datetimeList.append(thisDatetime) | ||||
|
r897 | |||
|
r848 | if not(filenameList): | ||
print "[Reading] Any file was found int time range %s - %s" %(datetime.datetime.combine(startDate,startTime).ctime(), datetime.datetime.combine(endDate,endTime).ctime()) | ||||
return None, None | ||||
|
r897 | |||
|
r848 | print "[Reading] %d file(s) was(were) found in time range: %s - %s" %(len(filenameList), startTime, endTime) | ||
|
r897 | |||
|
r1001 | # for i in range(len(filenameList)): | ||
# print "[Reading] %s -> [%s]" %(filenameList[i], datetimeList[i].ctime()) | ||||
|
r848 | |||
self.filenameList = filenameList | ||||
self.datetimeList = datetimeList | ||||
|
r897 | |||
|
r848 | return pathList, filenameList | ||
|
r897 | |||
|
r848 | def __isFileInTimeRange(self,filename, startDate, endDate, startTime, endTime): | ||
|
r897 | |||
|
r848 | """ | ||
Retorna 1 si el archivo de datos se encuentra dentro del rango de horas especificado. | ||||
|
r897 | |||
|
r848 | Inputs: | ||
filename : nombre completo del archivo de datos en formato Jicamarca (.r) | ||||
|
r897 | |||
|
r848 | startDate : fecha inicial del rango seleccionado en formato datetime.date | ||
|
r897 | |||
|
r848 | endDate : fecha final del rango seleccionado en formato datetime.date | ||
|
r897 | |||
|
r848 | startTime : tiempo inicial del rango seleccionado en formato datetime.time | ||
|
r897 | |||
|
r848 | endTime : tiempo final del rango seleccionado en formato datetime.time | ||
|
r897 | |||
|
r848 | Return: | ||
Boolean : Retorna True si el archivo de datos contiene datos en el rango de | ||||
fecha especificado, de lo contrario retorna False. | ||||
|
r897 | |||
|
r848 | Excepciones: | ||
Si el archivo no existe o no puede ser abierto | ||||
Si la cabecera no puede ser leida. | ||||
|
r897 | |||
|
r848 | """ | ||
|
r897 | |||
|
r848 | try: | ||
fp = h5py.File(filename,'r') | ||||
grp1 = fp['Data'] | ||||
|
r897 | |||
|
r848 | except IOError: | ||
traceback.print_exc() | ||||
raise IOError, "The file %s can't be opened" %(filename) | ||||
#chino rata | ||||
#In case has utctime attribute | ||||
grp2 = grp1['utctime'] | ||||
# thisUtcTime = grp2.value[0] - 5*3600 #To convert to local time | ||||
thisUtcTime = grp2.value[0] | ||||
fp.close() | ||||
|
r897 | |||
|
r848 | if self.timezone == 'lt': | ||
thisUtcTime -= 5*3600 | ||||
|
r897 | |||
|
r848 | thisDatetime = datetime.datetime.fromtimestamp(thisUtcTime[0] + 5*3600) | ||
# thisDatetime = datetime.datetime.fromtimestamp(thisUtcTime[0]) | ||||
thisDate = thisDatetime.date() | ||||
thisTime = thisDatetime.time() | ||||
|
r897 | |||
|
r848 | startUtcTime = (datetime.datetime.combine(thisDate,startTime)- datetime.datetime(1970, 1, 1)).total_seconds() | ||
endUtcTime = (datetime.datetime.combine(thisDate,endTime)- datetime.datetime(1970, 1, 1)).total_seconds() | ||||
|
r897 | |||
|
r848 | #General case | ||
# o>>>>>>>>>>>>>><<<<<<<<<<<<<<o | ||||
#-----------o----------------------------o----------- | ||||
# startTime endTime | ||||
|
r897 | |||
|
r848 | if endTime >= startTime: | ||
|
r897 | thisUtcLog = numpy.logical_and(thisUtcTime > startUtcTime, thisUtcTime < endUtcTime) | ||
|
r848 | if numpy.any(thisUtcLog): #If there is one block between the hours mentioned | ||
return thisDatetime | ||||
return None | ||||
|
r897 | |||
#If endTime < startTime then endTime belongs to the next day | ||||
|
r848 | #<<<<<<<<<<<o o>>>>>>>>>>> | ||
#-----------o----------------------------o----------- | ||||
# endTime startTime | ||||
|
r897 | |||
|
r848 | if (thisDate == startDate) and numpy.all(thisUtcTime < startUtcTime): | ||
return None | ||||
|
r897 | |||
|
r848 | if (thisDate == endDate) and numpy.all(thisUtcTime > endUtcTime): | ||
return None | ||||
|
r897 | |||
|
r848 | if numpy.all(thisUtcTime < startUtcTime) and numpy.all(thisUtcTime > endUtcTime): | ||
return None | ||||
|
r897 | |||
|
r848 | return thisDatetime | ||
|
r897 | |||
|
r848 | def __setNextFileOffline(self): | ||
|
r897 | |||
|
r848 | self.fileIndex += 1 | ||
idFile = self.fileIndex | ||||
|
r897 | |||
|
r848 | if not(idFile < len(self.filenameList)): | ||
print "No more Files" | ||||
return 0 | ||||
filename = self.filenameList[idFile] | ||||
filePointer = h5py.File(filename,'r') | ||||
|
r897 | |||
|
r848 | self.filename = filename | ||
self.fp = filePointer | ||||
print "Setting the file: %s"%self.filename | ||||
|
r897 | |||
|
r848 | # self.__readMetadata() | ||
self.__setBlockList() | ||||
self.__readData() | ||||
# self.nRecords = self.fp['Data'].attrs['blocksPerFile'] | ||||
# self.nRecords = self.fp['Data'].attrs['nRecords'] | ||||
self.blockIndex = 0 | ||||
return 1 | ||||
|
r897 | |||
|
r848 | def __setBlockList(self): | ||
''' | ||||
Selects the data within the times defined | ||||
|
r897 | |||
|
r848 | self.fp | ||
self.startTime | ||||
self.endTime | ||||
|
r897 | |||
|
r848 | self.blockList | ||
self.blocksPerFile | ||||
|
r897 | |||
|
r848 | ''' | ||
fp = self.fp | ||||
startTime = self.startTime | ||||
endTime = self.endTime | ||||
|
r897 | |||
|
r848 | grp = fp['Data'] | ||
thisUtcTime = grp['utctime'].value.astype(numpy.float)[0] | ||||
|
r897 | |||
|
r848 | #ERROOOOR | ||
if self.timezone == 'lt': | ||||
thisUtcTime -= 5*3600 | ||||
|
r897 | |||
|
r848 | thisDatetime = datetime.datetime.fromtimestamp(thisUtcTime[0] + 5*3600) | ||
|
r897 | |||
|
r848 | thisDate = thisDatetime.date() | ||
thisTime = thisDatetime.time() | ||||
|
r897 | |||
|
r848 | startUtcTime = (datetime.datetime.combine(thisDate,startTime) - datetime.datetime(1970, 1, 1)).total_seconds() | ||
endUtcTime = (datetime.datetime.combine(thisDate,endTime) - datetime.datetime(1970, 1, 1)).total_seconds() | ||||
|
r897 | |||
|
r848 | ind = numpy.where(numpy.logical_and(thisUtcTime >= startUtcTime, thisUtcTime < endUtcTime))[0] | ||
|
r897 | |||
|
r848 | self.blockList = ind | ||
self.blocksPerFile = len(ind) | ||||
|
r897 | |||
|
r848 | return | ||
|
r897 | |||
|
r848 | def __readMetadata(self): | ||
''' | ||||
Reads Metadata | ||||
|
r897 | |||
self.pathMeta | ||||
|
r848 | self.listShapes | ||
self.listMetaname | ||||
self.listMeta | ||||
|
r897 | |||
|
r848 | ''' | ||
|
r897 | |||
|
r848 | # grp = self.fp['Data'] | ||
# pathMeta = os.path.join(self.path, grp.attrs['metadata']) | ||||
|
r897 | # | ||
|
r848 | # if pathMeta == self.pathMeta: | ||
# return | ||||
# else: | ||||
# self.pathMeta = pathMeta | ||||
|
r897 | # | ||
|
r848 | # filePointer = h5py.File(self.pathMeta,'r') | ||
# groupPointer = filePointer['Metadata'] | ||||
|
r897 | |||
|
r848 | filename = self.filenameList[0] | ||
fp = h5py.File(filename,'r') | ||||
|
r897 | |||
|
r848 | gp = fp['Metadata'] | ||
|
r897 | |||
|
r848 | listMetaname = [] | ||
listMetadata = [] | ||||
for item in gp.items(): | ||||
name = item[0] | ||||
|
r897 | |||
|
r848 | if name=='array dimensions': | ||
table = gp[name][:] | ||||
listShapes = {} | ||||
for shapes in table: | ||||
listShapes[shapes[0]] = numpy.array([shapes[1],shapes[2],shapes[3],shapes[4],shapes[5]]) | ||||
else: | ||||
data = gp[name].value | ||||
listMetaname.append(name) | ||||
listMetadata.append(data) | ||||
|
r897 | |||
|
r848 | # if name=='type': | ||
# self.__initDataOut(data) | ||||
|
r897 | |||
|
r848 | self.listShapes = listShapes | ||
self.listMetaname = listMetaname | ||||
self.listMeta = listMetadata | ||||
|
r897 | |||
|
r848 | fp.close() | ||
return | ||||
|
r897 | |||
|
r848 | def __readData(self): | ||
grp = self.fp['Data'] | ||||
listdataname = [] | ||||
listdata = [] | ||||
|
r897 | |||
|
r848 | for item in grp.items(): | ||
name = item[0] | ||||
listdataname.append(name) | ||||
|
r897 | |||
|
r848 | array = self.__setDataArray(grp[name],self.listShapes[name]) | ||
listdata.append(array) | ||||
|
r897 | |||
|
r848 | self.listDataname = listdataname | ||
self.listData = listdata | ||||
return | ||||
|
r897 | |||
|
r872 | def __setDataArray(self, dataset, shapes): | ||
|
r897 | |||
nDims = shapes[0] | ||||
|
r872 | nDim2 = shapes[1] #Dimension 0 | ||
|
r897 | |||
|
r872 | nDim1 = shapes[2] #Dimension 1, number of Points or Parameters | ||
|
r897 | |||
|
r872 | nDim0 = shapes[3] #Dimension 2, number of samples or ranges | ||
|
r897 | |||
|
r872 | mode = shapes[4] #Mode of storing | ||
|
r897 | |||
|
r872 | blockList = self.blockList | ||
|
r897 | |||
|
r872 | blocksPerFile = self.blocksPerFile | ||
|
r897 | |||
|
r872 | #Depending on what mode the data was stored | ||
if mode == 0: #Divided in channels | ||||
arrayData = dataset.value.astype(numpy.float)[0][blockList] | ||||
if mode == 1: #Divided in parameter | ||||
strds = 'table' | ||||
nDatas = nDim1 | ||||
newShapes = (blocksPerFile,nDim2,nDim0) | ||||
elif mode==2: #Concatenated in a table | ||||
strds = 'table0' | ||||
arrayData = dataset[strds].value | ||||
#Selecting part of the dataset | ||||
utctime = arrayData[:,0] | ||||
u, indices = numpy.unique(utctime, return_index=True) | ||||
|
r897 | |||
|
r872 | if blockList.size != indices.size: | ||
indMin = indices[blockList[0]] | ||||
if blockList[1] + 1 >= indices.size: | ||||
arrayData = arrayData[indMin:,:] | ||||
else: | ||||
indMax = indices[blockList[1] + 1] | ||||
arrayData = arrayData[indMin:indMax,:] | ||||
return arrayData | ||||
|
r897 | |||
# One dimension | ||||
|
r872 | if nDims == 0: | ||
arrayData = dataset.value.astype(numpy.float)[0][blockList] | ||||
|
r897 | |||
# Two dimensions | ||||
|
r872 | elif nDims == 2: | ||
arrayData = numpy.zeros((blocksPerFile,nDim1,nDim0)) | ||||
newShapes = (blocksPerFile,nDim0) | ||||
|
r897 | nDatas = nDim1 | ||
for i in range(nDatas): | ||||
|
r872 | data = dataset[strds + str(i)].value | ||
|
r897 | arrayData[:,i,:] = data[blockList,:] | ||
# Three dimensions | ||||
|
r872 | else: | ||
|
r897 | arrayData = numpy.zeros((blocksPerFile,nDim2,nDim1,nDim0)) | ||
|
r872 | for i in range(nDatas): | ||
|
r897 | |||
|
r872 | data = dataset[strds + str(i)].value | ||
|
r897 | |||
|
r872 | for b in range(blockList.size): | ||
arrayData[b,:,i,:] = data[:,:,blockList[b]] | ||||
|
r897 | |||
return arrayData | ||||
|
r848 | def __setDataOut(self): | ||
listMeta = self.listMeta | ||||
listMetaname = self.listMetaname | ||||
listDataname = self.listDataname | ||||
listData = self.listData | ||||
listShapes = self.listShapes | ||||
|
r897 | |||
|
r848 | blockIndex = self.blockIndex | ||
# blockList = self.blockList | ||||
|
r897 | |||
|
r848 | for i in range(len(listMeta)): | ||
setattr(self.dataOut,listMetaname[i],listMeta[i]) | ||||
|
r897 | |||
|
r848 | for j in range(len(listData)): | ||
nShapes = listShapes[listDataname[j]][0] | ||||
mode = listShapes[listDataname[j]][4] | ||||
if nShapes == 1: | ||||
setattr(self.dataOut,listDataname[j],listData[j][blockIndex]) | ||||
elif nShapes > 1: | ||||
setattr(self.dataOut,listDataname[j],listData[j][blockIndex,:]) | ||||
elif mode==0: | ||||
setattr(self.dataOut,listDataname[j],listData[j][blockIndex]) | ||||
#Mode Meteors | ||||
elif mode ==2: | ||||
selectedData = self.__selectDataMode2(listData[j], blockIndex) | ||||
setattr(self.dataOut, listDataname[j], selectedData) | ||||
return | ||||
|
r897 | |||
|
r848 | def __selectDataMode2(self, data, blockIndex): | ||
utctime = data[:,0] | ||||
aux, indices = numpy.unique(utctime, return_inverse=True) | ||||
selInd = numpy.where(indices == blockIndex)[0] | ||||
selData = data[selInd,:] | ||||
|
r897 | |||
|
r848 | return selData | ||
|
r897 | |||
|
r848 | def getData(self): | ||
|
r897 | |||
|
r848 | # if self.flagNoMoreFiles: | ||
# self.dataOut.flagNoData = True | ||||
# print 'Process finished' | ||||
# return 0 | ||||
|
r897 | # | ||
|
r848 | if self.blockIndex==self.blocksPerFile: | ||
if not( self.__setNextFileOffline() ): | ||||
self.dataOut.flagNoData = True | ||||
return 0 | ||||
# if self.datablock == None: # setear esta condicion cuando no hayan datos por leers | ||||
|
r897 | # self.dataOut.flagNoData = True | ||
|
r848 | # return 0 | ||
# self.__readData() | ||||
self.__setDataOut() | ||||
self.dataOut.flagNoData = False | ||||
|
r897 | |||
|
r848 | self.blockIndex += 1 | ||
|
r897 | |||
|
r848 | return | ||
|
r897 | |||
|
r848 | def run(self, **kwargs): | ||
|
r897 | |||
|
r848 | if not(self.isConfig): | ||
self.setup(**kwargs) | ||||
# self.setObjProperties() | ||||
self.isConfig = True | ||||
|
r897 | |||
|
r848 | self.getData() | ||
|
r897 | |||
|
r848 | return | ||
class ParamWriter(Operation): | ||||
''' | ||||
|
r897 | HDF5 Writer, stores parameters data in HDF5 format files | ||
|
r848 | path: path where the files will be stored | ||
|
r897 | |||
|
r848 | blocksPerFile: number of blocks that will be saved in per HDF5 format file | ||
|
r897 | |||
|
r848 | mode: selects the data stacking mode: '0' channels, '1' parameters, '3' table (for meteors) | ||
|
r897 | |||
|
r848 | metadataList: list of attributes that will be stored as metadata | ||
|
r897 | |||
|
r848 | dataList: list of attributes that will be stores as data | ||
|
r897 | |||
|
r848 | ''' | ||
|
r897 | |||
|
r848 | ext = ".hdf5" | ||
|
r897 | |||
|
r848 | optchar = "D" | ||
|
r897 | |||
|
r848 | metaoptchar = "M" | ||
|
r897 | |||
|
r848 | metaFile = None | ||
|
r897 | |||
|
r848 | filename = None | ||
|
r897 | |||
|
r848 | path = None | ||
|
r897 | |||
|
r848 | setFile = None | ||
|
r897 | |||
|
r848 | fp = None | ||
|
r897 | |||
|
r848 | grp = None | ||
|
r897 | |||
|
r848 | ds = None | ||
|
r897 | |||
|
r848 | firsttime = True | ||
|
r897 | |||
|
r848 | #Configurations | ||
|
r897 | |||
|
r848 | blocksPerFile = None | ||
|
r897 | |||
|
r848 | blockIndex = None | ||
|
r897 | |||
|
r848 | dataOut = None | ||
|
r897 | |||
|
r848 | #Data Arrays | ||
|
r897 | |||
|
r848 | dataList = None | ||
|
r897 | |||
|
r848 | metadataList = None | ||
|
r897 | |||
|
r848 | # arrayDim = None | ||
dsList = None #List of dictionaries with dataset properties | ||||
|
r897 | |||
|
r848 | tableDim = None | ||
|
r897 | |||
|
r848 | # dtype = [('arrayName', 'S20'),('nChannels', 'i'), ('nPoints', 'i'), ('nSamples', 'i'),('mode', 'b')] | ||
|
r897 | |||
|
r848 | dtype = [('arrayName', 'S20'),('nDimensions', 'i'), ('dim2', 'i'), ('dim1', 'i'),('dim0', 'i'),('mode', 'b')] | ||
currentDay = None | ||||
|
r897 | |||
|
r853 | lastTime = None | ||
|
r897 | |||
def __init__(self, **kwargs): | ||||
Operation.__init__(self, **kwargs) | ||||
|
r848 | self.isConfig = False | ||
return | ||||
|
r897 | |||
|
r955 | def setup(self, dataOut, path=None, blocksPerFile=10, metadataList=None, dataList=None, mode=None, **kwargs): | ||
self.path = path | ||||
self.blocksPerFile = blocksPerFile | ||||
self.metadataList = metadataList | ||||
self.dataList = dataList | ||||
|
r848 | self.dataOut = dataOut | ||
self.mode = mode | ||||
|
r955 | |||
if self.mode is not None: | ||||
self.mode = numpy.zeros(len(self.dataList)) + mode | ||||
else: | ||||
self.mode = numpy.ones(len(self.dataList)) | ||||
|
r897 | |||
|
r848 | arrayDim = numpy.zeros((len(self.dataList),5)) | ||
|
r897 | |||
#Table dimensions | ||||
|
r848 | dtype0 = self.dtype | ||
tableList = [] | ||||
|
r897 | |||
|
r848 | #Dictionary and list of tables | ||
dsList = [] | ||||
|
r897 | |||
|
r848 | for i in range(len(self.dataList)): | ||
dsDict = {} | ||||
dataAux = getattr(self.dataOut, self.dataList[i]) | ||||
dsDict['variable'] = self.dataList[i] | ||||
#--------------------- Conditionals ------------------------ | ||||
#There is no data | ||||
|
r897 | if dataAux is None: | ||
|
r848 | return 0 | ||
|
r897 | |||
|
r848 | #Not array, just a number | ||
#Mode 0 | ||||
if type(dataAux)==float or type(dataAux)==int: | ||||
dsDict['mode'] = 0 | ||||
dsDict['nDim'] = 0 | ||||
arrayDim[i,0] = 0 | ||||
dsList.append(dsDict) | ||||
#Mode 2: meteors | ||||
elif mode[i] == 2: | ||||
|
r897 | # dsDict['nDim'] = 0 | ||
dsDict['dsName'] = 'table0' | ||||
|
r848 | dsDict['mode'] = 2 # Mode meteors | ||
dsDict['shape'] = dataAux.shape[-1] | ||||
dsDict['nDim'] = 0 | ||||
dsDict['dsNumber'] = 1 | ||||
|
r897 | |||
|
r848 | arrayDim[i,3] = dataAux.shape[-1] | ||
arrayDim[i,4] = mode[i] #Mode the data was stored | ||||
|
r897 | |||
|
r848 | dsList.append(dsDict) | ||
|
r897 | |||
|
r848 | #Mode 1 | ||
else: | ||||
arrayDim0 = dataAux.shape #Data dimensions | ||||
arrayDim[i,0] = len(arrayDim0) #Number of array dimensions | ||||
arrayDim[i,4] = mode[i] #Mode the data was stored | ||||
|
r897 | |||
|
r848 | strtable = 'table' | ||
dsDict['mode'] = 1 # Mode parameters | ||||
|
r897 | |||
|
r848 | # Three-dimension arrays | ||
if len(arrayDim0) == 3: | ||||
arrayDim[i,1:-1] = numpy.array(arrayDim0) | ||||
nTables = int(arrayDim[i,2]) | ||||
dsDict['dsNumber'] = nTables | ||||
dsDict['shape'] = arrayDim[i,2:4] | ||||
dsDict['nDim'] = 3 | ||||
|
r897 | |||
|
r848 | for j in range(nTables): | ||
dsDict = dsDict.copy() | ||||
dsDict['dsName'] = strtable + str(j) | ||||
dsList.append(dsDict) | ||||
|
r897 | |||
|
r848 | # Two-dimension arrays | ||
elif len(arrayDim0) == 2: | ||||
arrayDim[i,2:-1] = numpy.array(arrayDim0) | ||||
nTables = int(arrayDim[i,2]) | ||||
dsDict['dsNumber'] = nTables | ||||
dsDict['shape'] = arrayDim[i,3] | ||||
dsDict['nDim'] = 2 | ||||
|
r897 | |||
|
r848 | for j in range(nTables): | ||
dsDict = dsDict.copy() | ||||
dsDict['dsName'] = strtable + str(j) | ||||
dsList.append(dsDict) | ||||
|
r897 | |||
|
r848 | # One-dimension arrays | ||
elif len(arrayDim0) == 1: | ||||
arrayDim[i,3] = arrayDim0[0] | ||||
dsDict['shape'] = arrayDim0[0] | ||||
dsDict['dsNumber'] = 1 | ||||
dsDict['dsName'] = strtable + str(0) | ||||
dsDict['nDim'] = 1 | ||||
dsList.append(dsDict) | ||||
|
r897 | |||
|
r848 | table = numpy.array((self.dataList[i],) + tuple(arrayDim[i,:]),dtype = dtype0) | ||
tableList.append(table) | ||||
|
r897 | |||
|
r848 | # self.arrayDim = arrayDim | ||
|
r897 | self.dsList = dsList | ||
self.tableDim = numpy.array(tableList, dtype = dtype0) | ||||
|
r848 | self.blockIndex = 0 | ||
|
r897 | |||
|
r848 | timeTuple = time.localtime(dataOut.utctime) | ||
self.currentDay = timeTuple.tm_yday | ||||
return 1 | ||||
def putMetadata(self): | ||||
|
r897 | |||
|
r848 | fp = self.createMetadataFile() | ||
|
r897 | self.writeMetadata(fp) | ||
|
r848 | fp.close() | ||
return | ||||
|
r897 | |||
|
r848 | def createMetadataFile(self): | ||
ext = self.ext | ||||
path = self.path | ||||
setFile = self.setFile | ||||
|
r897 | |||
|
r848 | timeTuple = time.localtime(self.dataOut.utctime) | ||
|
r897 | |||
subfolder = '' | ||||
|
r848 | fullpath = os.path.join( path, subfolder ) | ||
|
r897 | |||
|
r848 | if not( os.path.exists(fullpath) ): | ||
os.mkdir(fullpath) | ||||
setFile = -1 #inicializo mi contador de seteo | ||||
|
r897 | |||
subfolder = 'd%4.4d%3.3d' % (timeTuple.tm_year,timeTuple.tm_yday) | ||||
|
r848 | fullpath = os.path.join( path, subfolder ) | ||
|
r897 | |||
|
r848 | if not( os.path.exists(fullpath) ): | ||
os.mkdir(fullpath) | ||||
|
r897 | setFile = -1 #inicializo mi contador de seteo | ||
|
r848 | |||
else: | ||||
filesList = os.listdir( fullpath ) | ||||
filesList = sorted( filesList, key=str.lower ) | ||||
if len( filesList ) > 0: | ||||
filesList = [k for k in filesList if 'M' in k] | ||||
filen = filesList[-1] | ||||
# el filename debera tener el siguiente formato | ||||
# 0 1234 567 89A BCDE (hex) | ||||
# x YYYY DDD SSS .ext | ||||
if isNumber( filen[8:11] ): | ||||
setFile = int( filen[8:11] ) #inicializo mi contador de seteo al seteo del ultimo file | ||||
|
r897 | else: | ||
|
r848 | setFile = -1 | ||
else: | ||||
setFile = -1 #inicializo mi contador de seteo | ||||
|
r986 | if self.setType is None: | ||
setFile += 1 | ||||
file = '%s%4.4d%3.3d%03d%s' % (self.metaoptchar, | ||||
timeTuple.tm_year, | ||||
timeTuple.tm_yday, | ||||
setFile, | ||||
ext ) | ||||
else: | ||||
setFile = timeTuple.tm_hour*60+timeTuple.tm_min | ||||
file = '%s%4.4d%3.3d%04d%s' % (self.metaoptchar, | ||||
timeTuple.tm_year, | ||||
timeTuple.tm_yday, | ||||
setFile, | ||||
ext ) | ||||
|
r848 | |||
filename = os.path.join( path, subfolder, file ) | ||||
self.metaFile = file | ||||
#Setting HDF5 File | ||||
fp = h5py.File(filename,'w') | ||||
return fp | ||||
|
r897 | |||
def writeMetadata(self, fp): | ||||
|
r848 | grp = fp.create_group("Metadata") | ||
grp.create_dataset('array dimensions', data = self.tableDim, dtype = self.dtype) | ||||
|
r897 | |||
|
r848 | for i in range(len(self.metadataList)): | ||
grp.create_dataset(self.metadataList[i], data=getattr(self.dataOut, self.metadataList[i])) | ||||
return | ||||
|
r897 | |||
|
r853 | def timeFlag(self): | ||
currentTime = self.dataOut.utctime | ||||
|
r897 | |||
|
r853 | if self.lastTime is None: | ||
self.lastTime = currentTime | ||||
|
r897 | |||
|
r853 | #Day | ||
timeTuple = time.localtime(currentTime) | ||||
|
r848 | dataDay = timeTuple.tm_yday | ||
|
r897 | |||
|
r853 | #Time | ||
timeDiff = currentTime - self.lastTime | ||||
|
r897 | |||
|
r853 | #Si el dia es diferente o si la diferencia entre un dato y otro supera la hora | ||
if dataDay != self.currentDay: | ||||
self.currentDay = dataDay | ||||
return True | ||||
elif timeDiff > 3*60*60: | ||||
self.lastTime = currentTime | ||||
return True | ||||
else: | ||||
self.lastTime = currentTime | ||||
return False | ||||
|
r848 | |||
def setNextFile(self): | ||||
|
r897 | |||
|
r848 | ext = self.ext | ||
path = self.path | ||||
setFile = self.setFile | ||||
mode = self.mode | ||||
|
r897 | |||
|
r848 | timeTuple = time.localtime(self.dataOut.utctime) | ||
subfolder = 'd%4.4d%3.3d' % (timeTuple.tm_year,timeTuple.tm_yday) | ||||
fullpath = os.path.join( path, subfolder ) | ||||
|
r897 | |||
|
r848 | if os.path.exists(fullpath): | ||
filesList = os.listdir( fullpath ) | ||||
|
r1216 | filesList = [k for k in filesList if 'M' in k] | ||
|
r848 | if len( filesList ) > 0: | ||
filesList = sorted( filesList, key=str.lower ) | ||||
filen = filesList[-1] | ||||
# el filename debera tener el siguiente formato | ||||
# 0 1234 567 89A BCDE (hex) | ||||
# x YYYY DDD SSS .ext | ||||
if isNumber( filen[8:11] ): | ||||
setFile = int( filen[8:11] ) #inicializo mi contador de seteo al seteo del ultimo file | ||||
|
r897 | else: | ||
|
r848 | setFile = -1 | ||
else: | ||||
setFile = -1 #inicializo mi contador de seteo | ||||
else: | ||||
|
r928 | os.makedirs(fullpath) | ||
|
r897 | setFile = -1 #inicializo mi contador de seteo | ||
|
r986 | if self.setType is None: | ||
setFile += 1 | ||||
file = '%s%4.4d%3.3d%03d%s' % (self.metaoptchar, | ||||
timeTuple.tm_year, | ||||
timeTuple.tm_yday, | ||||
setFile, | ||||
ext ) | ||||
else: | ||||
setFile = timeTuple.tm_hour*60+timeTuple.tm_min | ||||
file = '%s%4.4d%3.3d%04d%s' % (self.metaoptchar, | ||||
timeTuple.tm_year, | ||||
timeTuple.tm_yday, | ||||
setFile, | ||||
ext ) | ||||
|
r848 | |||
filename = os.path.join( path, subfolder, file ) | ||||
#Setting HDF5 File | ||||
fp = h5py.File(filename,'w') | ||||
#write metadata | ||||
|
r897 | self.writeMetadata(fp) | ||
|
r848 | #Write data | ||
grp = fp.create_group("Data") | ||||
# grp.attrs['metadata'] = self.metaFile | ||||
|
r897 | |||
|
r848 | # grp.attrs['blocksPerFile'] = 0 | ||
ds = [] | ||||
data = [] | ||||
dsList = self.dsList | ||||
i = 0 | ||||
|
r897 | while i < len(dsList): | ||
|
r848 | dsInfo = dsList[i] | ||
#One-dimension data | ||||
if dsInfo['mode'] == 0: | ||||
# ds0 = grp.create_dataset(self.dataList[i], (1,1), maxshape=(1,self.blocksPerFile) , chunks = True, dtype='S20') | ||||
ds0 = grp.create_dataset(dsInfo['variable'], (1,1), maxshape=(1,self.blocksPerFile) , chunks = True, dtype=numpy.float64) | ||||
ds.append(ds0) | ||||
data.append([]) | ||||
i += 1 | ||||
continue | ||||
# nDimsForDs.append(nDims[i]) | ||||
elif dsInfo['mode'] == 2: | ||||
grp0 = grp.create_group(dsInfo['variable']) | ||||
ds0 = grp0.create_dataset(dsInfo['dsName'], (1,dsInfo['shape']), data = numpy.zeros((1,dsInfo['shape'])) , maxshape=(None,dsInfo['shape']), chunks=True) | ||||
ds.append(ds0) | ||||
data.append([]) | ||||
i += 1 | ||||
continue | ||||
|
r897 | |||
|
r848 | elif dsInfo['mode'] == 1: | ||
grp0 = grp.create_group(dsInfo['variable']) | ||||
|
r897 | |||
|
r848 | for j in range(dsInfo['dsNumber']): | ||
dsInfo = dsList[i] | ||||
tableName = dsInfo['dsName'] | ||||
|
r881 | shape = int(dsInfo['shape']) | ||
|
r897 | |||
if dsInfo['nDim'] == 3: | ||||
|
r848 | ds0 = grp0.create_dataset(tableName, (shape[0],shape[1],1) , data = numpy.zeros((shape[0],shape[1],1)), maxshape = (None,shape[1],None), chunks=True) | ||
else: | ||||
ds0 = grp0.create_dataset(tableName, (1,shape), data = numpy.zeros((1,shape)) , maxshape=(None,shape), chunks=True) | ||||
|
r897 | |||
|
r848 | ds.append(ds0) | ||
data.append([]) | ||||
i += 1 | ||||
# nDimsForDs.append(nDims[i]) | ||||
|
r897 | |||
|
r848 | fp.flush() | ||
fp.close() | ||||
|
r897 | |||
|
r848 | # self.nDatas = nDatas | ||
# self.nDims = nDims | ||||
# self.nDimsForDs = nDimsForDs | ||||
|
r897 | #Saving variables | ||
|
r848 | print 'Writing the file: %s'%filename | ||
self.filename = filename | ||||
# self.fp = fp | ||||
# self.grp = grp | ||||
# self.grp.attrs.modify('nRecords', 1) | ||||
self.ds = ds | ||||
self.data = data | ||||
# self.setFile = setFile | ||||
self.firsttime = True | ||||
self.blockIndex = 0 | ||||
return | ||||
|
r897 | |||
|
r848 | def putData(self): | ||
|
r853 | if self.blockIndex == self.blocksPerFile or self.timeFlag(): | ||
|
r897 | self.setNextFile() | ||
|
r848 | # if not self.firsttime: | ||
self.readBlock() | ||||
self.setBlock() #Prepare data to be written | ||||
self.writeBlock() #Write data | ||||
|
r897 | |||
|
r848 | return | ||
|
r897 | |||
|
r848 | def readBlock(self): | ||
|
r897 | |||
|
r848 | ''' | ||
data Array configured | ||||
|
r897 | |||
|
r848 | self.data | ||
''' | ||||
dsList = self.dsList | ||||
ds = self.ds | ||||
#Setting HDF5 File | ||||
fp = h5py.File(self.filename,'r+') | ||||
grp = fp["Data"] | ||||
ind = 0 | ||||
|
r897 | |||
|
r848 | # grp.attrs['blocksPerFile'] = 0 | ||
|
r897 | while ind < len(dsList): | ||
|
r848 | dsInfo = dsList[ind] | ||
|
r897 | |||
|
r848 | if dsInfo['mode'] == 0: | ||
ds0 = grp[dsInfo['variable']] | ||||
ds[ind] = ds0 | ||||
ind += 1 | ||||
else: | ||||
|
r897 | |||
|
r848 | grp0 = grp[dsInfo['variable']] | ||
|
r897 | |||
|
r848 | for j in range(dsInfo['dsNumber']): | ||
dsInfo = dsList[ind] | ||||
ds0 = grp0[dsInfo['dsName']] | ||||
ds[ind] = ds0 | ||||
ind += 1 | ||||
|
r897 | |||
|
r848 | self.fp = fp | ||
self.grp = grp | ||||
self.ds = ds | ||||
|
r897 | |||
|
r848 | return | ||
def setBlock(self): | ||||
''' | ||||
data Array configured | ||||
|
r897 | |||
|
r848 | self.data | ||
''' | ||||
#Creating Arrays | ||||
dsList = self.dsList | ||||
data = self.data | ||||
ind = 0 | ||||
|
r897 | |||
while ind < len(dsList): | ||||
|
r848 | dsInfo = dsList[ind] | ||
dataAux = getattr(self.dataOut, dsInfo['variable']) | ||||
|
r897 | |||
|
r848 | mode = dsInfo['mode'] | ||
nDim = dsInfo['nDim'] | ||||
|
r897 | |||
|
r848 | if mode == 0 or mode == 2 or nDim == 1: | ||
data[ind] = dataAux | ||||
|
r897 | ind += 1 | ||
|
r848 | # elif nDim == 1: | ||
# data[ind] = numpy.reshape(dataAux,(numpy.size(dataAux),1)) | ||||
# ind += 1 | ||||
elif nDim == 2: | ||||
for j in range(dsInfo['dsNumber']): | ||||
data[ind] = dataAux[j,:] | ||||
ind += 1 | ||||
elif nDim == 3: | ||||
for j in range(dsInfo['dsNumber']): | ||||
data[ind] = dataAux[:,j,:] | ||||
ind += 1 | ||||
self.data = data | ||||
return | ||||
|
r897 | |||
|
r848 | def writeBlock(self): | ||
''' | ||||
Saves the block in the HDF5 file | ||||
''' | ||||
|
r897 | dsList = self.dsList | ||
|
r848 | for i in range(len(self.ds)): | ||
|
r897 | dsInfo = dsList[i] | ||
|
r848 | nDim = dsInfo['nDim'] | ||
mode = dsInfo['mode'] | ||||
|
r897 | |||
|
r848 | # First time | ||
if self.firsttime: | ||||
# self.ds[i].resize(self.data[i].shape) | ||||
# self.ds[i][self.blockIndex,:] = self.data[i] | ||||
if type(self.data[i]) == numpy.ndarray: | ||||
|
r897 | |||
|
r848 | if nDim == 3: | ||
self.data[i] = self.data[i].reshape((self.data[i].shape[0],self.data[i].shape[1],1)) | ||||
self.ds[i].resize(self.data[i].shape) | ||||
if mode == 2: | ||||
self.ds[i].resize(self.data[i].shape) | ||||
|
r897 | self.ds[i][:] = self.data[i] | ||
else: | ||||
|
r848 | # From second time | ||
# Meteors! | ||||
if mode == 2: | ||||
dataShape = self.data[i].shape | ||||
dsShape = self.ds[i].shape | ||||
|
r897 | self.ds[i].resize((self.ds[i].shape[0] + dataShape[0],self.ds[i].shape[1])) | ||
|
r848 | self.ds[i][dsShape[0]:,:] = self.data[i] | ||
# No dimension | ||||
elif mode == 0: | ||||
self.ds[i].resize((self.ds[i].shape[0], self.ds[i].shape[1] + 1)) | ||||
self.ds[i][0,-1] = self.data[i] | ||||
# One dimension | ||||
elif nDim == 1: | ||||
self.ds[i].resize((self.ds[i].shape[0] + 1, self.ds[i].shape[1])) | ||||
|
r897 | self.ds[i][-1,:] = self.data[i] | ||
|
r848 | # Two dimension | ||
elif nDim == 2: | ||||
|
r897 | self.ds[i].resize((self.ds[i].shape[0] + 1,self.ds[i].shape[1])) | ||
|
r848 | self.ds[i][self.blockIndex,:] = self.data[i] | ||
# Three dimensions | ||||
elif nDim == 3: | ||||
|
r897 | self.ds[i].resize((self.ds[i].shape[0],self.ds[i].shape[1],self.ds[i].shape[2]+1)) | ||
|
r848 | self.ds[i][:,:,-1] = self.data[i] | ||
|
r897 | |||
self.firsttime = False | ||||
|
r848 | self.blockIndex += 1 | ||
|
r897 | |||
|
r848 | #Close to save changes | ||
self.fp.flush() | ||||
self.fp.close() | ||||
return | ||||
|
r897 | |||
|
r955 | def run(self, dataOut, path=None, blocksPerFile=10, metadataList=None, dataList=None, mode=None, **kwargs): | ||
|
r848 | |||
if not(self.isConfig): | ||||
|
r955 | flagdata = self.setup(dataOut, path=path, blocksPerFile=blocksPerFile, | ||
metadataList=metadataList, dataList=dataList, mode=mode, **kwargs) | ||||
|
r897 | |||
|
r848 | if not(flagdata): | ||
return | ||||
|
r897 | |||
|
r848 | self.isConfig = True | ||
# self.putMetadata() | ||||
self.setNextFile() | ||||
|
r897 | |||
|
r848 | self.putData() | ||
return | ||||