diff --git a/schainpy/model/graphics/jroplot_parameters.py b/schainpy/model/graphics/jroplot_parameters.py index 3f022a0..1effcca 100644 --- a/schainpy/model/graphics/jroplot_parameters.py +++ b/schainpy/model/graphics/jroplot_parameters.py @@ -5,6 +5,7 @@ import numpy from schainpy.model.graphics.jroplot_base import Plot, plt from schainpy.model.graphics.jroplot_spectra import SpectraPlot, RTIPlot, CoherencePlot from schainpy.utils import log +import wradlib as wrl EARTH_RADIUS = 6.3710e3 @@ -50,7 +51,7 @@ class SnrPlot(RTIPlot): def update(self, dataOut): data = { - 'snr': 10*numpy.log10(dataOut.data_snr) + 'snr': 10*numpy.log10(dataOut.data_snr) } return data, {} @@ -66,7 +67,7 @@ class DopplerPlot(RTIPlot): def update(self, dataOut): data = { - 'dop': 10*numpy.log10(dataOut.data_dop) + 'dop': 10*numpy.log10(dataOut.data_dop) } return data, {} @@ -82,7 +83,7 @@ class PowerPlot(RTIPlot): def update(self, dataOut): data = { - 'pow': 10*numpy.log10(dataOut.data_pow) + 'pow': 10*numpy.log10(dataOut.data_pow) } return data, {} @@ -166,7 +167,7 @@ class GenericRTIPlot(Plot): self.nrows = self.data.shape(self.attr_data)[0] self.nplots = self.nrows self.plots_adjust.update({'hspace':0.8, 'left': 0.1, 'bottom': 0.08, 'right':0.95, 'top': 0.95}) - + if not self.xlabel: self.xlabel = 'Time' @@ -184,7 +185,7 @@ class GenericRTIPlot(Plot): meta = {} return data, meta - + def plot(self): # self.data.normalize_heights() self.x = self.data.times @@ -356,3 +357,209 @@ class PolarMapPlot(Plot): self.titles = ['{} {}'.format( self.data.parameters[x], title) for x in self.channels] +class WeatherPlot(Plot): + CODE = 'weather' + plot_name = 'weather' + plot_type = 'ppistyle' + buffering = False + + def setup(self): + self.ncols = 1 + self.nrows = 1 + self.nplots= 1 + self.ylabel= 'Range [Km]' + self.titles= ['Weather'] + self.colorbar=False + self.width =8 + self.height =8 + self.ini =0 + self.len_azi =0 + self.buffer_ini = None + self.buffer_azi = None + self.plots_adjust.update({'wspace': 0.4, 'hspace':0.4, 'left': 0.1, 'right': 0.9, 'bottom': 0.08}) + self.flag =0 + self.indicador= 0 + + def update(self, dataOut): + + data = {} + meta = {} + data['weather'] = 10*numpy.log10(dataOut.data_360/(650**2)) + data['azi'] = dataOut.data_azi + + return data, meta + + def plot(self): + stoprange = float(33*1.5) + rangestep = float(0.15) + r = numpy.arange(0, stoprange, rangestep) + self.y = 2*r + data = self.data[-1] + + tmp_v = data['weather'] + tmp_z = data['azi'] + res = 1 + step = (360/(res*tmp_v.shape[0])) + mode = 1 + if mode==0: + print("self.ini",self.ini) + val = numpy.mean(tmp_v[:,0]) + self.len_azi = len(tmp_z) + ones = numpy.ones([(360-tmp_v.shape[0]),tmp_v.shape[1]])*val + self.buffer_ini = numpy.vstack((tmp_v,ones)) + + n = ((360/res)-len(tmp_z)) + start = tmp_z[-1]+res + end = tmp_z[0]-res + if start>end: + end = end+360 + azi_zeros = numpy.linspace(start,end,int(n)) + azi_zeros = numpy.where(azi_zeros>360,azi_zeros-360,azi_zeros) + self.buffer_ini_azi = numpy.hstack((tmp_z,azi_zeros)) + self.ini = self.ini+1 + + if mode==1: + print("self.ini",self.ini) + if self.ini==0: + res = 1 + step = (360/(res*tmp_v.shape[0])) + val = numpy.mean(tmp_v[:,0]) + self.len_azi = len(tmp_z) + self.buf_tmp = tmp_v + ones = numpy.ones([(360-tmp_v.shape[0]),tmp_v.shape[1]])*val + self.buffer_ini = numpy.vstack((tmp_v,ones)) + + n = ((360/res)-len(tmp_z)) + start = tmp_z[-1]+res + end = tmp_z[0]-res + if start>end: + end =end+360 + azi_zeros = numpy.linspace(start,end,int(n)) + azi_zeros = numpy.where(azi_zeros>360,azi_zeros-360,azi_zeros) + self.buf_azi = tmp_z + self.buffer_ini_azi = numpy.hstack((tmp_z,azi_zeros)) + self.ini = self.ini+1 + elif 031: + start= tmp_z[0] + end =tmp_z[-1] + print("start","end",start,end) + if self.ini==32: + tmp_v=tmp_v+20 + if self.ini==33: + tmp_v=tmp_v+10 + if self.ini==34: + tmp_v=tmp_v+20 + if self.ini==35: + tmp_v=tmp_v+20 + ''' + self.buf_tmp= numpy.vstack((self.buf_tmp,tmp_v)) + if self.buf_tmp.shape[0]==360: + self.buffer_ini=self.buf_tmp + else: + val=30.0 + ones = numpy.ones([(360-self.buf_tmp.shape[0]),self.buf_tmp.shape[1]])*val + self.buffer_ini = numpy.vstack((self.buf_tmp,ones)) + + self.buf_azi = numpy.hstack((self.buf_azi,tmp_z)) + n = ((360/res)-len(self.buf_azi)) + if n==0: + self.buffer_ini_azi = self.buf_azi + else: + start = self.buf_azi[-1]+res + end = self.buf_azi[0]-res + if start>end: + end =end+360 + azi_zeros = numpy.linspace(start,end,int(n)) + azi_zeros = numpy.where(azi_zeros>360,azi_zeros-360,azi_zeros) + if tmp_z[0]=2: + if self.flag=tmp_z[-1])[0][0]) + print("tmp_r",tmp_r) + index_f=(self.flag+1)*len(tmp_z)+tmp_r + + if len(tmp_z[index_i:])>len(self.buf_azi[len(tmp_z)*(self.flag+1):index_f]): + final = len(self.buf_azi[len(tmp_z)*(self.flag+1):index_f]) + else: + final= len(tmp_z[index_i:]) + self.buf_azi[len(tmp_z)*(self.flag+1):index_f]=tmp_z[index_i:index_i+final] + self.buf_tmp[len(tmp_z)*(self.flag+1):index_f,:]=tmp_v[index_i:index_i+final,:] + if limit_i=tmp_z[-1])[0][0]) + n_p =index_f-len(tmp_z)*(self.flag+1) + if n_p>0: + self.buf_azi[len(tmp_z)*(self.flag+1):index_f]=tmp_z[-1]*numpy.ones(n_p) + self.buf_tmp[len(tmp_z)*(self.flag+1):index_f,:]=tmp_v[-1,:]*numpy.ones([n_p,tmp_v.shape[1]]) + + ''' + if self.buf_azi[len(tmp_z)]=tmp_z[-1])[0][0]) + #print("index",index_i,index_f) + if len(tmp_z[index_i:])>len(self.buf_azi[len(tmp_z):index_f]): + final = len(self.buf_azi[len(tmp_z):index_f]) + else: + final = len(tmp_z[index_i:]) + self.buf_azi[len(tmp_z):index_f]=tmp_z[index_i:index_i+final] + self.buf_tmp[len(tmp_z):index_f,:]=tmp_v[index_i:index_i+final,:] + ''' + self.buf_tmp[len(tmp_z)*(self.flag):len(tmp_z)*(self.flag+1),:]=tmp_v + self.buf_azi[len(tmp_z)*(self.flag):len(tmp_z)*(self.flag+1)] = tmp_z + self.buffer_ini=self.buf_tmp + self.buffer_ini_azi = self.buf_azi + print("--------salida------------") + start= tmp_z[0] + end = tmp_z[-1] + print("start","end",start,end) + print(self.buffer_ini_azi[:120]) + self.ini= self.ini+1 + self.flag = self.flag +1 + if self.flag==step: + self.flag=0 + + for i,ax in enumerate(self.axes): + if ax.firsttime: + plt.clf() + cgax, pm = wrl.vis.plot_ppi(self.buffer_ini,r=r,az=self.buffer_ini_azi,fig=self.figures[0], proj='cg', vmin=30, vmax=70) + else: + plt.clf() + cgax, pm = wrl.vis.plot_ppi(self.buffer_ini,r=r,az=self.buffer_ini_azi,fig=self.figures[0], proj='cg', vmin=30, vmax=70) + caax = cgax.parasites[0] + paax = cgax.parasites[1] + cbar = plt.gcf().colorbar(pm, pad=0.075) + caax.set_xlabel('x_range [km]') + caax.set_ylabel('y_range [km]') + plt.text(1.0, 1.05, 'azimuth', transform=caax.transAxes, va='bottom',ha='right') diff --git a/schainpy/model/graphics/jroplot_spectra.py b/schainpy/model/graphics/jroplot_spectra.py index 28b72ec..0d18717 100644 --- a/schainpy/model/graphics/jroplot_spectra.py +++ b/schainpy/model/graphics/jroplot_spectra.py @@ -46,9 +46,9 @@ class SpectraPlot(Plot): meta['xrange'] = (dataOut.getFreqRange(1)/1000., dataOut.getAcfRange(1), dataOut.getVelRange(1)) if self.CODE == 'spc_moments': data['moments'] = dataOut.moments - - return data, meta - + + return data, meta + def plot(self): if self.xaxis == "frequency": x = self.data.xrange[0] @@ -146,8 +146,8 @@ class CrossSpectraPlot(Plot): data['cspc'] = numpy.array(tmp) - return data, meta - + return data, meta + def plot(self): if self.xaxis == "frequency": @@ -159,7 +159,7 @@ class CrossSpectraPlot(Plot): else: x = self.data.xrange[2] self.xlabel = "Velocity (m/s)" - + self.titles = [] y = self.data.yrange @@ -183,13 +183,13 @@ class CrossSpectraPlot(Plot): ax.plt.set_array(coh.T.ravel()) self.titles.append( 'Coherence Ch{} * Ch{}'.format(pair[0], pair[1])) - + ax = self.axes[2 * n + 1] if ax.firsttime: ax.plt = ax.pcolormesh(x, y, phase.T, vmin=-180, vmax=180, - cmap=plt.get_cmap(self.colormap_phase) + cmap=plt.get_cmap(self.colormap_phase) ) else: ax.plt.set_array(phase.T.ravel()) @@ -317,7 +317,7 @@ class PhasePlot(CoherencePlot): class NoisePlot(Plot): ''' - Plot for noise + Plot for noise ''' CODE = 'noise' @@ -362,7 +362,7 @@ class NoisePlot(Plot): y = Y[ch] self.axes[0].lines[ch].set_data(x, y) - + class PowerProfilePlot(Plot): CODE = 'pow_profile' @@ -394,10 +394,10 @@ class PowerProfilePlot(Plot): self.y = y x = self.data[-1][self.CODE] - + if self.xmin is None: self.xmin = numpy.nanmin(x)*0.9 if self.xmax is None: self.xmax = numpy.nanmax(x)*1.1 - + if self.axes[0].firsttime: for ch in self.data.channels: self.axes[0].plot(x[ch], y, lw=1, label='Ch{}'.format(ch)) @@ -559,7 +559,7 @@ class BeaconPhase(Plot): server=None, folder=None, username=None, password=None, ftp_wei=0, exp_code=0, sub_exp_code=0, plot_pos=0): - if dataOut.flagNoData: + if dataOut.flagNoData: return dataOut if not isTimeInHourRange(dataOut.datatime, xmin, xmax): @@ -699,4 +699,91 @@ class BeaconPhase(Plot): thisDatetime=thisDatetime, update_figfile=update_figfile) - return dataOut \ No newline at end of file + return dataOut + +class WeatherPlot(Plot): + CODE = 'weather' + plot_name = 'weather' + plot_type = 'ppistyle' + buffering = False + + def setup(self): + self.nplots = len(self.data.channels) + self.ncols = int(numpy.sqrt(self.nplots) + 0.9) + self.nrows = int((1.0 * self.nplots / self.ncols) + 0.9) + self.height = 2.6 * self.nrows + self.cb_label = 'dB' + if self.showprofile: + self.width = 4 * self.ncols + else: + self.width = 3.5 * self.ncols + self.plots_adjust.update({'wspace': 0.4, 'hspace':0.4, 'left': 0.1, 'right': 0.9, 'bottom': 0.08}) + self.ylabel = 'Range [km]' + + def update(self, dataOut): + + data = {} + meta = {} + spc = 10*numpy.log10(dataOut.data_spc/dataOut.normFactor) + data['spc'] = spc + data['rti'] = dataOut.getPower() + data['noise'] = 10*numpy.log10(dataOut.getNoise()/dataOut.normFactor) + meta['xrange'] = (dataOut.getFreqRange(1)/1000., dataOut.getAcfRange(1), dataOut.getVelRange(1)) + if self.CODE == 'spc_moments': + data['moments'] = dataOut.moments + + return data, meta + + def plot(self): + if self.xaxis == "frequency": + x = self.data.xrange[0] + self.xlabel = "Frequency (kHz)" + elif self.xaxis == "time": + x = self.data.xrange[1] + self.xlabel = "Time (ms)" + else: + x = self.data.xrange[2] + self.xlabel = "Velocity (m/s)" + + if self.CODE == 'spc_moments': + x = self.data.xrange[2] + self.xlabel = "Velocity (m/s)" + + self.titles = [] + + y = self.data.yrange + self.y = y + + data = self.data[-1] + z = data['spc'] + + for n, ax in enumerate(self.axes): + noise = data['noise'][n] + if self.CODE == 'spc_moments': + mean = data['moments'][n, 2] + if ax.firsttime: + self.xmax = self.xmax if self.xmax else numpy.nanmax(x) + self.xmin = self.xmin if self.xmin else -self.xmax + self.zmin = self.zmin if self.zmin else numpy.nanmin(z) + self.zmax = self.zmax if self.zmax else numpy.nanmax(z) + ax.plt = ax.pcolormesh(x, y, z[n].T, + vmin=self.zmin, + vmax=self.zmax, + cmap=plt.get_cmap(self.colormap) + ) + + if self.showprofile: + ax.plt_profile = self.pf_axes[n].plot( + data['rti'][n], y)[0] + ax.plt_noise = self.pf_axes[n].plot(numpy.repeat(noise, len(y)), y, + color="k", linestyle="dashed", lw=1)[0] + if self.CODE == 'spc_moments': + ax.plt_mean = ax.plot(mean, y, color='k')[0] + else: + ax.plt.set_array(z[n].T.ravel()) + if self.showprofile: + ax.plt_profile.set_data(data['rti'][n], y) + ax.plt_noise.set_data(numpy.repeat(noise, len(y)), y) + if self.CODE == 'spc_moments': + ax.plt_mean.set_data(mean, y) + self.titles.append('CH {}: {:3.2f}dB'.format(n, noise)) diff --git a/schainpy/model/io/jroIO_base.py b/schainpy/model/io/jroIO_base.py index 7d17366..c01cc63 100644 --- a/schainpy/model/io/jroIO_base.py +++ b/schainpy/model/io/jroIO_base.py @@ -384,7 +384,7 @@ def isRadarFolder(folder): def isRadarFile(file): - try: + try: year = int(file[1:5]) doy = int(file[5:8]) set = int(file[8:11]) @@ -395,10 +395,10 @@ def isRadarFile(file): def getDateFromRadarFile(file): - try: + try: year = int(file[1:5]) doy = int(file[5:8]) - set = int(file[8:11]) + set = int(file[8:11]) except: return None @@ -417,7 +417,7 @@ def getDateFromRadarFolder(folder): return thisDate def parse_format(s, fmt): - + for i in range(fmt.count('%')): x = fmt.index('%') d = DT_DIRECTIVES[fmt[x:x+2]] @@ -484,7 +484,7 @@ class Reader(object): def run(self): - raise NotImplementedError + raise NotImplementedError def getAllowedArgs(self): if hasattr(self, '__attrs__'): @@ -496,19 +496,19 @@ class Reader(object): for key, value in kwargs.items(): setattr(self, key, value) - + def find_folders(self, path, startDate, endDate, folderfmt, last=False): - folders = [x for f in path.split(',') + folders = [x for f in path.split(',') for x in os.listdir(f) if os.path.isdir(os.path.join(f, x))] folders.sort() if last: folders = [folders[-1]] - for folder in folders: - try: - dt = datetime.datetime.strptime(parse_format(folder, folderfmt), folderfmt).date() + for folder in folders: + try: + dt = datetime.datetime.strptime(parse_format(folder, folderfmt), folderfmt).date() if dt >= startDate and dt <= endDate: yield os.path.join(path, folder) else: @@ -517,38 +517,38 @@ class Reader(object): log.log('Skiping folder {}'.format(folder), self.name) continue return - - def find_files(self, folders, ext, filefmt, startDate=None, endDate=None, + + def find_files(self, folders, ext, filefmt, startDate=None, endDate=None, expLabel='', last=False): - - for path in folders: + + for path in folders: files = glob.glob1(path, '*{}'.format(ext)) files.sort() if last: - if files: + if files: fo = files[-1] - try: + try: dt = datetime.datetime.strptime(parse_format(fo, filefmt), filefmt).date() - yield os.path.join(path, expLabel, fo) - except Exception as e: + yield os.path.join(path, expLabel, fo) + except Exception as e: pass return else: return for fo in files: - try: - dt = datetime.datetime.strptime(parse_format(fo, filefmt), filefmt).date() + try: + dt = datetime.datetime.strptime(parse_format(fo, filefmt), filefmt).date() if dt >= startDate and dt <= endDate: yield os.path.join(path, expLabel, fo) else: log.log('Skiping file {}'.format(fo), self.name) except Exception as e: log.log('Skiping file {}'.format(fo), self.name) - continue + continue def searchFilesOffLine(self, path, startDate, endDate, - expLabel, ext, walk, + expLabel, ext, walk, filefmt, folderfmt): """Search files in offline mode for the given arguments @@ -561,12 +561,12 @@ class Reader(object): path, startDate, endDate, folderfmt) else: folders = path.split(',') - + return self.find_files( - folders, ext, filefmt, startDate, endDate, expLabel) + folders, ext, filefmt, startDate, endDate, expLabel) def searchFilesOnLine(self, path, startDate, endDate, - expLabel, ext, walk, + expLabel, ext, walk, filefmt, folderfmt): """Search for the last file of the last folder @@ -579,13 +579,13 @@ class Reader(object): Return: generator with the full path of last filename """ - + if walk: folders = self.find_folders( path, startDate, endDate, folderfmt, last=True) else: folders = path.split(',') - + return self.find_files( folders, ext, filefmt, startDate, endDate, expLabel, last=True) @@ -594,13 +594,13 @@ class Reader(object): while True: if self.fp != None: - self.fp.close() + self.fp.close() if self.online: newFile = self.setNextFileOnline() else: newFile = self.setNextFileOffline() - + if not(newFile): if self.online: raise schainpy.admin.SchainError('Time to wait for new files reach') @@ -609,10 +609,10 @@ class Reader(object): raise schainpy.admin.SchainWarning('No files found in the given path') else: raise schainpy.admin.SchainWarning('No more files to read') - + if self.verifyFile(self.filename): break - + log.log('Opening file: %s' % self.filename, self.name) self.readFirstHeader() @@ -625,7 +625,7 @@ class Reader(object): self.filename self.fp self.filesize - + Return: boolean @@ -633,7 +633,7 @@ class Reader(object): nextFile = True nextDay = False - for nFiles in range(self.nFiles+1): + for nFiles in range(self.nFiles+1): for nTries in range(self.nTries): fullfilename, filename = self.checkForRealPath(nextFile, nextDay) if fullfilename is not None: @@ -643,18 +643,18 @@ class Reader(object): self.name) time.sleep(self.delay) nextFile = False - continue - + continue + if fullfilename is not None: break - + self.nTries = 1 - nextFile = True + nextFile = True if nFiles == (self.nFiles - 1): log.log('Trying with next day...', self.name) nextDay = True - self.nTries = 3 + self.nTries = 3 if fullfilename: self.fileSize = os.path.getsize(fullfilename) @@ -666,18 +666,18 @@ class Reader(object): self.flagNoMoreFiles = 0 self.fileIndex += 1 return 1 - else: + else: return 0 - + def setNextFileOffline(self): """Open the next file to be readed in offline mode""" - + try: filename = next(self.filenameList) self.fileIndex +=1 except StopIteration: self.flagNoMoreFiles = 1 - return 0 + return 0 self.filename = filename self.fileSize = os.path.getsize(filename) @@ -685,22 +685,22 @@ class Reader(object): self.flagIsNewFile = 1 return 1 - + @staticmethod def isDateTimeInRange(dt, startDate, endDate, startTime, endTime): """Check if the given datetime is in range""" - + if startDate <= dt.date() <= endDate: if startTime <= dt.time() <= endTime: return True return False - + def verifyFile(self, filename): """Check for a valid file - + Arguments: filename -- full path filename - + Return: boolean """ @@ -711,7 +711,7 @@ class Reader(object): """Check if the next file to be readed exists""" raise NotImplementedError - + def readFirstHeader(self): """Parse the file header""" @@ -783,8 +783,8 @@ class JRODataReader(Reader): Return: str -- fullpath of the file """ - - + + if nextFile: self.set += 1 if nextDay: @@ -796,7 +796,7 @@ class JRODataReader(Reader): prefixFileList = ['d', 'D'] elif self.ext.lower() == ".pdata": # spectra prefixFileList = ['p', 'P'] - + # barrido por las combinaciones posibles for prefixDir in prefixDirList: thispath = self.path @@ -816,9 +816,9 @@ class JRODataReader(Reader): if os.path.exists(fullfilename): return fullfilename, filename - - return None, filename - + + return None, filename + def __waitNewBlock(self): """ Return 1 si se encontro un nuevo bloque de datos, 0 de otra forma. @@ -860,9 +860,9 @@ class JRODataReader(Reader): def __setNewBlock(self): if self.fp == None: - return 0 - - if self.flagIsNewFile: + return 0 + + if self.flagIsNewFile: self.lastUTTime = self.basicHeaderObj.utc return 1 @@ -875,12 +875,12 @@ class JRODataReader(Reader): currentSize = self.fileSize - self.fp.tell() neededSize = self.processingHeaderObj.blockSize + self.basicHeaderSize - + if (currentSize >= neededSize): self.basicHeaderObj.read(self.fp) self.lastUTTime = self.basicHeaderObj.utc return 1 - + if self.__waitNewBlock(): self.lastUTTime = self.basicHeaderObj.utc return 1 @@ -966,10 +966,10 @@ class JRODataReader(Reader): except IOError: log.error("File {} can't be opened".format(filename), self.name) return False - + if self.online and self.waitDataBlock(0): pass - + basicHeaderObj = BasicHeader(LOCALTIME) systemHeaderObj = SystemHeader() radarControllerHeaderObj = RadarControllerHeader() @@ -996,7 +996,7 @@ class JRODataReader(Reader): dt2 = basicHeaderObj.datatime if not self.isDateTimeInRange(dt1, self.startDate, self.endDate, self.startTime, self.endTime) and not \ self.isDateTimeInRange(dt2, self.startDate, self.endDate, self.startTime, self.endTime): - flag = False + flag = False fp.close() return flag @@ -1105,11 +1105,11 @@ class JRODataReader(Reader): return dateList def setup(self, **kwargs): - + self.set_kwargs(**kwargs) if not self.ext.startswith('.'): self.ext = '.{}'.format(self.ext) - + if self.server is not None: if 'tcp://' in self.server: address = server @@ -1131,36 +1131,36 @@ class JRODataReader(Reader): for nTries in range(self.nTries): fullpath = self.searchFilesOnLine(self.path, self.startDate, - self.endDate, self.expLabel, self.ext, self.walk, + self.endDate, self.expLabel, self.ext, self.walk, self.filefmt, self.folderfmt) try: fullpath = next(fullpath) except: fullpath = None - + if fullpath: break log.warning( 'Waiting {} sec for a valid file in {}: try {} ...'.format( - self.delay, self.path, nTries + 1), + self.delay, self.path, nTries + 1), self.name) time.sleep(self.delay) if not(fullpath): raise schainpy.admin.SchainError( - 'There isn\'t any valid file in {}'.format(self.path)) + 'There isn\'t any valid file in {}'.format(self.path)) pathname, filename = os.path.split(fullpath) self.year = int(filename[1:5]) self.doy = int(filename[5:8]) - self.set = int(filename[8:11]) - 1 + self.set = int(filename[8:11]) - 1 else: log.log("Searching files in {}".format(self.path), self.name) - self.filenameList = self.searchFilesOffLine(self.path, self.startDate, + self.filenameList = self.searchFilesOffLine(self.path, self.startDate, self.endDate, self.expLabel, self.ext, self.walk, self.filefmt, self.folderfmt) - + self.setNextFile() return @@ -1181,7 +1181,7 @@ class JRODataReader(Reader): self.dataOut.useLocalTime = self.basicHeaderObj.useLocalTime self.dataOut.ippSeconds = self.radarControllerHeaderObj.ippSeconds / self.nTxs - + def getFirstHeader(self): raise NotImplementedError @@ -1214,8 +1214,8 @@ class JRODataReader(Reader): """ Arguments: - path : - startDate : + path : + startDate : endDate : startTime : endTime : @@ -1284,7 +1284,7 @@ class JRODataWriter(Reader): dtype_width = get_dtype_width(dtype_index) return dtype_width - + def getProcessFlags(self): processFlags = 0 @@ -1322,9 +1322,9 @@ class JRODataWriter(Reader): self.basicHeaderObj.size = self.basicHeaderSize # bytes self.basicHeaderObj.version = self.versionFile - self.basicHeaderObj.dataBlock = self.nTotalBlocks + self.basicHeaderObj.dataBlock = self.nTotalBlocks utc = numpy.floor(self.dataOut.utctime) - milisecond = (self.dataOut.utctime - utc) * 1000.0 + milisecond = (self.dataOut.utctime - utc) * 1000.0 self.basicHeaderObj.utc = utc self.basicHeaderObj.miliSecond = milisecond self.basicHeaderObj.timeZone = self.dataOut.timeZone @@ -1465,9 +1465,9 @@ class JRODataWriter(Reader): if self.dataOut.datatime.date() > self.fileDate: setFile = 0 self.nTotalBlocks = 0 - + filen = '{}{:04d}{:03d}{:03d}{}'.format( - self.optchar, timeTuple.tm_year, timeTuple.tm_yday, setFile, ext) + self.optchar, timeTuple.tm_year, timeTuple.tm_yday, setFile, ext) filename = os.path.join(path, subfolder, filen) @@ -1515,11 +1515,11 @@ class JRODataWriter(Reader): self.ext = ext.lower() self.path = path - + if set is None: self.setFile = -1 else: - self.setFile = set - 1 + self.setFile = set - 1 self.blocksPerFile = blocksPerFile self.profilesPerBlock = profilesPerBlock diff --git a/schainpy/model/io/jroIO_param.py b/schainpy/model/io/jroIO_param.py index 4733785..07bbdb3 100644 --- a/schainpy/model/io/jroIO_param.py +++ b/schainpy/model/io/jroIO_param.py @@ -17,7 +17,7 @@ class HDFReader(Reader, ProcessingUnit): This unit reads HDF5 files created with `HDFWriter` operation contains by default two groups Data and Metadata all variables would be saved as `dataOut` - attributes. + attributes. It is possible to read any HDF5 file by given the structure in the `description` parameter, also you can add extra values to metadata with the parameter `extras`. @@ -37,10 +37,10 @@ class HDFReader(Reader, ProcessingUnit): Dictionary with the description of the HDF5 file extras : dict, optional Dictionary with extra metadata to be be added to `dataOut` - + Examples -------- - + desc = { 'Data': { 'data_output': ['u', 'v', 'w'], @@ -64,7 +64,7 @@ class HDFReader(Reader, ProcessingUnit): extras = { 'timeZone': 300 } - + reader = project.addReadUnit( name='HDFReader', path='/path/to/files', @@ -95,45 +95,43 @@ class HDFReader(Reader, ProcessingUnit): self.folderfmt = "*%Y%j" def setup(self, **kwargs): - self.set_kwargs(**kwargs) if not self.ext.startswith('.'): - self.ext = '.{}'.format(self.ext) + self.ext = '.{}'.format(self.ext) if self.online: log.log("Searching files in online mode...", self.name) for nTries in range(self.nTries): fullpath = self.searchFilesOnLine(self.path, self.startDate, - self.endDate, self.expLabel, self.ext, self.walk, + self.endDate, self.expLabel, self.ext, self.walk, self.filefmt, self.folderfmt) try: fullpath = next(fullpath) except: fullpath = None - + if fullpath: break log.warning( 'Waiting {} sec for a valid file in {}: try {} ...'.format( - self.delay, self.path, nTries + 1), + self.delay, self.path, nTries + 1), self.name) time.sleep(self.delay) if not(fullpath): raise schainpy.admin.SchainError( - 'There isn\'t any valid file in {}'.format(self.path)) + 'There isn\'t any valid file in {}'.format(self.path)) pathname, filename = os.path.split(fullpath) self.year = int(filename[1:5]) self.doy = int(filename[5:8]) - self.set = int(filename[8:11]) - 1 + self.set = int(filename[8:11]) - 1 else: log.log("Searching files in {}".format(self.path), self.name) - self.filenameList = self.searchFilesOffLine(self.path, self.startDate, + self.filenameList = self.searchFilesOffLine(self.path, self.startDate, self.endDate, self.expLabel, self.ext, self.walk, self.filefmt, self.folderfmt) - self.setNextFile() return @@ -141,18 +139,18 @@ class HDFReader(Reader, ProcessingUnit): def readFirstHeader(self): '''Read metadata and data''' - self.__readMetadata() + self.__readMetadata() self.__readData() self.__setBlockList() - + if 'type' in self.meta: self.dataOut = eval(self.meta['type'])() - + for attr in self.meta: setattr(self.dataOut, attr, self.meta[attr]) - + self.blockIndex = 0 - + return def __setBlockList(self): @@ -194,13 +192,29 @@ class HDFReader(Reader, ProcessingUnit): meta = {} + desc = { + 'Data': { + 'dataPP_POW': 'Data/dataPP_POW/table00', + 'utctime':'Data/utctime' + }, + 'Metadata': { + 'azimuth' :'Metadata/azimuth', + 'heightList' :'Metadata/heightList', + 'flagDataAsBlock':'Metadata/flagDataAsBlock' + } + } + + self.description = desc if self.description: for key, value in self.description['Metadata'].items(): - meta[key] = self.fp[value].value + try: + meta[key] = self.fp[value].value + except: + meta[key] = self.fp[value][()] else: grp = self.fp['Metadata'] for name in grp: - meta[name] = grp[name].value + meta[name] = grp[name].value if self.extras: for key, value in self.extras.items(): @@ -212,12 +226,19 @@ class HDFReader(Reader, ProcessingUnit): def __readData(self): data = {} - + if self.description: for key, value in self.description['Data'].items(): if isinstance(value, str): if isinstance(self.fp[value], h5py.Dataset): - data[key] = self.fp[value].value + try: + data[key] = self.fp[value].value + except: + ndim= self.fp[value][()].ndim + if ndim==2: + data[key] = numpy.swapaxes(self.fp[value][()],0,1) + if ndim==1: + data[key] = self.fp[value][()] elif isinstance(self.fp[value], h5py.Group): array = [] for ch in self.fp[value]: @@ -240,7 +261,7 @@ class HDFReader(Reader, ProcessingUnit): array = numpy.array(array) else: log.warning('Unknown type: {}'.format(name)) - + if name in self.description: key = self.description[name] else: @@ -249,7 +270,7 @@ class HDFReader(Reader, ProcessingUnit): self.data = data return - + def getData(self): for attr in self.data: @@ -288,8 +309,8 @@ class HDFWriter(Operation): The HDF5 file contains by default two groups Data and Metadata where you can save any `dataOut` attribute specified by `dataList` and `metadataList` parameters, data attributes are normaly time dependent where the metadata - are not. - It is possible to customize the structure of the HDF5 file with the + are not. + It is possible to customize the structure of the HDF5 file with the optional description parameter see the examples. Parameters: @@ -306,10 +327,10 @@ class HDFWriter(Operation): If True the name of the files corresponds to the timestamp of the data description : dict, optional Dictionary with the desired description of the HDF5 file - + Examples -------- - + desc = { 'data_output': {'winds': ['z', 'w', 'v']}, 'utctime': 'timestamps', @@ -329,7 +350,7 @@ class HDFWriter(Operation): 'heightList': 'heights' } } - + writer = proc_unit.addOperation(name='HDFWriter') writer.addParameter(name='path', value='/path/to/file') writer.addParameter(name='blocksPerFile', value='32') @@ -357,7 +378,7 @@ class HDFWriter(Operation): lastTime = None def __init__(self): - + Operation.__init__(self) return @@ -393,7 +414,7 @@ class HDFWriter(Operation): dsDict['shape'] = dataAux.shape dsDict['dsNumber'] = dataAux.shape[0] dsDict['dtype'] = dataAux.dtype - + dsList.append(dsDict) self.dsList = dsList @@ -408,7 +429,7 @@ class HDFWriter(Operation): self.lastTime = currentTime self.currentDay = dataDay return False - + timeDiff = currentTime - self.lastTime #Si el dia es diferente o si la diferencia entre un dato y otro supera la hora @@ -427,7 +448,7 @@ class HDFWriter(Operation): self.dataOut = dataOut if not(self.isConfig): - self.setup(path=path, blocksPerFile=blocksPerFile, + self.setup(path=path, blocksPerFile=blocksPerFile, metadataList=metadataList, dataList=dataList, setType=setType, description=description) @@ -436,9 +457,9 @@ class HDFWriter(Operation): self.putData() return - + def setNextFile(self): - + ext = self.ext path = self.path setFile = self.setFile @@ -523,7 +544,7 @@ class HDFWriter(Operation): return 'pair{:02d}'.format(x) else: return 'channel{:02d}'.format(x) - + def writeMetadata(self, fp): if self.description: @@ -548,7 +569,7 @@ class HDFWriter(Operation): return def writeData(self, fp): - + if self.description: if 'Data' in self.description: grp = fp.create_group('Data') @@ -559,13 +580,13 @@ class HDFWriter(Operation): dtsets = [] data = [] - + for dsInfo in self.dsList: if dsInfo['nDim'] == 0: ds = grp.create_dataset( - self.getLabel(dsInfo['variable']), + self.getLabel(dsInfo['variable']), (self.blocksPerFile, ), - chunks=True, + chunks=True, dtype=numpy.float64) dtsets.append(ds) data.append((dsInfo['variable'], -1)) @@ -577,7 +598,7 @@ class HDFWriter(Operation): sgrp = grp for i in range(dsInfo['dsNumber']): ds = sgrp.create_dataset( - self.getLabel(dsInfo['variable'], i), + self.getLabel(dsInfo['variable'], i), (self.blocksPerFile, ) + dsInfo['shape'][1:], chunks=True, dtype=dsInfo['dtype']) @@ -586,7 +607,7 @@ class HDFWriter(Operation): fp.flush() log.log('Creating file: {}'.format(fp.filename), self.name) - + self.ds = dtsets self.data = data self.firsttime = True diff --git a/schainpy/model/proc/jroproc_parameters.py b/schainpy/model/proc/jroproc_parameters.py index eecc054..8b401e1 100755 --- a/schainpy/model/proc/jroproc_parameters.py +++ b/schainpy/model/proc/jroproc_parameters.py @@ -109,6 +109,13 @@ class ParametersProc(ProcessingUnit): self.dataOut.flagNoData = False self.dataOut.utctimeInit = self.dataIn.utctime self.dataOut.paramInterval = self.dataIn.nProfiles*self.dataIn.nCohInt*self.dataIn.ippSeconds + + if hasattr(self.dataIn, 'flagDataAsBlock'): + self.dataOut.flagDataAsBlock = self.dataIn.flagDataAsBlock + + if hasattr(self.dataIn, 'profileIndex'): + self.dataOut.profileIndex = self.dataIn.profileIndex + if hasattr(self.dataIn, 'dataPP_POW'): self.dataOut.dataPP_POW = self.dataIn.dataPP_POW @@ -3998,3 +4005,134 @@ class SMOperations(): # error[indInvalid1] = 13 # # return heights, error + +class Block360(Operation): + ''' + ''' + isConfig = False + __profIndex = 0 + __initime = None + __lastdatatime = None + __buffer = None + __dataReady = False + n = None + __nch = 0 + __nHeis = 0 + index = 0 + + def __init__(self,**kwargs): + Operation.__init__(self,**kwargs) + + def setup(self, dataOut, n = None): + ''' + n= Numero de PRF's de entrada + ''' + self.__initime = None + self.__lastdatatime = 0 + self.__dataReady = False + self.__buffer = 0 + self.__buffer_1D = 0 + self.__profIndex = 0 + self.index = 0 + + + #print("ELVALOR DE n es:", n) + if n == None: + raise ValueError("n should be specified.") + + if n != None: + if n<2: + raise ValueError("n should be greater than 2") + + self.n = n + #print("nHeights") + self.__buffer = numpy.zeros(( n, dataOut.nHeights)) + self.__buffer2= numpy.zeros(n) + + def putData(self,data): + ''' + Add a profile to he __buffer and increase in one the __profiel Index + ''' + #print("line 4049",data.dataPP_POW.shape,data.dataPP_POW[:10]) + #print("line 4049",data.azimuth.shape,data.azimuth) + self.__buffer[self.__profIndex,:]= data.dataPP_POW + #print("me casi",self.index,data.azimuth[self.index]) + #print(self.__profIndex, self.index , data.azimuth[self.index] ) + #print("magic",data.profileIndex) + #print(data.azimuth[self.index]) + #print("index",self.index) + + self.__buffer2[self.__profIndex] = data.azimuth[self.index] + #print("q pasa") + self.index+=1 + #print("index",self.index,data.azimuth[:10]) + self.__profIndex += 1 + return #················· Remove DC··································· + + def pushData(self,data): + ''' + Return the PULSEPAIR and the profiles used in the operation + Affected : self.__profileIndex + ''' + #print("pushData") + + data_360 = self.__buffer + data_p = self.__buffer2 + n = self.__profIndex + + self.__buffer = numpy.zeros(( self.n,330)) + self.__buffer2 = numpy.zeros(self.n) + self.__profIndex = 0 + #print("pushData") + return data_360,n,data_p + + + def byProfiles(self,dataOut): + + self.__dataReady = False + data_360 = None + data_p = None + #print("dataOu",dataOut.dataPP_POW) + self.putData(data=dataOut) + #print("profIndex",self.__profIndex) + if self.__profIndex == self.n: + data_360,n,data_p = self.pushData(data=dataOut) + self.__dataReady = True + + return data_360,data_p + + + def blockOp(self, dataOut, datatime= None): + if self.__initime == None: + self.__initime = datatime + data_360,data_p = self.byProfiles(dataOut) + self.__lastdatatime = datatime + + if data_360 is None: + return None, None,None + + avgdatatime = self.__initime + deltatime = datatime - self.__lastdatatime + self.__initime = datatime + #print(data_360.shape,avgdatatime,data_p.shape) + return data_360,avgdatatime,data_p + + def run(self, dataOut,n = None,**kwargs): + + if not self.isConfig: + self.setup(dataOut = dataOut, n = n , **kwargs) + self.index = 0 + #print("comova",self.isConfig) + self.isConfig = True + if self.index==dataOut.azimuth.shape[0]: + self.index=0 + data_360, avgdatatime,data_p = self.blockOp(dataOut, dataOut.utctime) + dataOut.flagNoData = True + + if self.__dataReady: + dataOut.data_360 = data_360 # S + dataOut.data_azi = data_p + #print("jroproc_parameters",data_p[0],data_p[-1])#,data_360.shape,avgdatatime) + dataOut.utctime = avgdatatime + dataOut.flagNoData = False + return dataOut diff --git a/schainpy/model/proc/jroproc_voltage.py b/schainpy/model/proc/jroproc_voltage.py index 9c71e4d..2584d57 100644 --- a/schainpy/model/proc/jroproc_voltage.py +++ b/schainpy/model/proc/jroproc_voltage.py @@ -1287,7 +1287,7 @@ class CombineProfiles(Operation): return dataOut -class PulsePairVoltage(Operation): +class PulsePair(Operation): ''' Function PulsePair(Signal Power, Velocity) The real component of Lag[0] provides Intensity Information @@ -1318,18 +1318,21 @@ class PulsePairVoltage(Operation): lambda_ = 0 def __init__(self,**kwargs): + Operation.__init__(self,**kwargs) def setup(self, dataOut, n = None, removeDC=False): ''' n= Numero de PRF's de entrada ''' + print("[INICIO]-setup del METODO PULSE PAIR") self.__initime = None self.__lastdatatime = 0 self.__dataReady = False self.__buffer = 0 self.__profIndex = 0 self.noise = None + self.__nch = dataOut.nChannels self.__nHeis = dataOut.nHeights self.removeDC = removeDC @@ -1367,17 +1370,17 @@ class PulsePairVoltage(Operation): Return the PULSEPAIR and the profiles used in the operation Affected : self.__profileIndex ''' - #----------------- Remove DC----------------------------------- + #················· Remove DC··································· if self.removeDC==True: mean = numpy.mean(self.__buffer,1) tmp = mean.reshape(self.__nch,1,self.__nHeis) dc= numpy.tile(tmp,[1,self.__nProf,1]) self.__buffer = self.__buffer - dc - #------------------Calculo de Potencia ------------------------ + #··················Calculo de Potencia ························ pair0 = self.__buffer*numpy.conj(self.__buffer) pair0 = pair0.real lag_0 = numpy.sum(pair0,1) - #------------------Calculo de Ruido x canal-------------------- + #··················Calculo de Ruido x canal···················· self.noise = numpy.zeros(self.__nch) for i in range(self.__nch): daux = numpy.sort(pair0[i,:,:],axis= None) @@ -1387,11 +1390,11 @@ class PulsePairVoltage(Operation): self.noise = numpy.tile(self.noise,[1,self.__nHeis]) noise_buffer = self.noise.reshape(self.__nch,1,self.__nHeis) noise_buffer = numpy.tile(noise_buffer,[1,self.__nProf,1]) - #------------------ Potencia recibida= P , Potencia senal = S , Ruido= N-- - #------------------ P= S+N ,P=lag_0/N --------------------------------- - #-------------------- Power -------------------------------------------------- + #·················· Potencia recibida= P , Potencia senal = S , Ruido= N·· + #·················· P= S+N ,P=lag_0/N ································· + #···················· Power ·················································· data_power = lag_0/(self.n*self.nCohInt) - #------------------ Senal --------------------------------------------------- + #------------------ Senal ··················································· data_intensity = pair0 - noise_buffer data_intensity = numpy.sum(data_intensity,axis=1)*(self.n*self.nCohInt)#*self.nCohInt) #data_intensity = (lag_0-self.noise*self.n)*(self.n*self.nCohInt) @@ -1400,28 +1403,28 @@ class PulsePairVoltage(Operation): if data_intensity[i][j] < 0: data_intensity[i][j] = numpy.min(numpy.absolute(data_intensity[i][j])) - #----------------- Calculo de Frecuencia y Velocidad doppler-------- + #················· Calculo de Frecuencia y Velocidad doppler········ pair1 = self.__buffer[:,:-1,:]*numpy.conjugate(self.__buffer[:,1:,:]) lag_1 = numpy.sum(pair1,1) data_freq = (-1/(2.0*math.pi*self.ippSec*self.nCohInt))*numpy.angle(lag_1) data_velocity = (self.lambda_/2.0)*data_freq - #---------------- Potencia promedio estimada de la Senal----------- + #················ Potencia promedio estimada de la Senal··········· lag_0 = lag_0/self.n S = lag_0-self.noise - #---------------- Frecuencia Doppler promedio --------------------- + #················ Frecuencia Doppler promedio ····················· lag_1 = lag_1/(self.n-1) R1 = numpy.abs(lag_1) - #---------------- Calculo del SNR---------------------------------- + #················ Calculo del SNR·································· data_snrPP = S/self.noise for i in range(self.__nch): for j in range(self.__nHeis): if data_snrPP[i][j] < 1.e-20: data_snrPP[i][j] = 1.e-20 - #----------------- Calculo del ancho espectral ---------------------- + #················· Calculo del ancho espectral ······················ L = S/R1 L = numpy.where(L<0,1,L) L = numpy.log(L) @@ -1466,10 +1469,10 @@ class PulsePairVoltage(Operation): return data_power, data_intensity, data_velocity, data_snrPP, data_specwidth, avgdatatime - def run(self, dataOut,n = None,removeDC= False, overlapping= False,**kwargs): + def run(self, dataOut,n = None,removeDC= False,**kwargs): if not self.isConfig: - self.setup(dataOut = dataOut, n = n , removeDC=removeDC , **kwargs) + self.setup( dataOut, n = n , removeDC=removeDC , **kwargs) self.isConfig = True data_power, data_intensity, data_velocity,data_snrPP,data_specwidth, avgdatatime = self.pulsePairOp(dataOut, dataOut.utctime) dataOut.flagNoData = True @@ -1482,12 +1485,12 @@ class PulsePairVoltage(Operation): dataOut.dataPP_SNR = data_snrPP dataOut.dataPP_WIDTH = data_specwidth dataOut.PRFbyAngle = self.n #numero de PRF*cada angulo rotado que equivale a un tiempo. + dataOut.nProfiles = int(dataOut.nProfiles/n) dataOut.utctime = avgdatatime dataOut.flagNoData = False return dataOut - # import collections # from scipy.stats import mode # diff --git a/schainpy/scripts/test_sim00011.py b/schainpy/scripts/test_sim00011.py new file mode 100644 index 0000000..6be9733 --- /dev/null +++ b/schainpy/scripts/test_sim00011.py @@ -0,0 +1,37 @@ +import os,sys +import datetime +import time +from schainpy.controller import Project +#path = '/home/alex/Downloads/hdf5_testPP2' +#path = '/home/alex/Downloads/hdf5_test' +#path='/home/alex/Downloads/hdf5_wr' +path='/home/developer/Downloads/HDF5_WR' +figpath = path +desc = "Simulator Test" + +controllerObj = Project() + +controllerObj.setup(id='10',name='Test Simulator',description=desc) + +readUnitConfObj = controllerObj.addReadUnit(datatype='HDFReader', + path=path, + startDate="2021/01/01", #"2020/01/01",#today, + endDate= "2021/12/01", #"2020/12/30",#today, + startTime='00:00:00', + endTime='23:59:59', + delay=0, + #set=0, + online=0, + walk=0)#1 + +procUnitConfObjA = controllerObj.addProcUnit(datatype='ParametersProc',inputId=readUnitConfObj.getId()) +opObj11 = procUnitConfObjA.addOperation(name='Block360') +opObj11.addParameter(name='n', value='10', format='int') + +opObj11= procUnitConfObjA.addOperation(name='WeatherPlot',optype='other') +#opObj11.addParameter(name='save', value=figpath) +#opObj11.addParameter(name='save_period', value=1) +#opObj11 = procUnitConfObjA.addOperation(name='PowerPlot', optype='other')#PulsepairPowerPlot +#opObj11 = procUnitConfObjA.addOperation(name='PPSignalPlot', optype='other') + +controllerObj.start()