diff --git a/schainpy/admin.py b/schainpy/admin.py
index 5eb45dc..7a686fe 100644
--- a/schainpy/admin.py
+++ b/schainpy/admin.py
@@ -129,7 +129,7 @@ class SchainConfigure():
__SCHAIN_SENDER_EMAIL = "MAILSERVER_ACCOUNT"
__SCHAIN_SENDER_PASS = "MAILSERVER_PASSWORD"
- def __init__(self, initFile = None):
+ def __init__(self, initFile=None):
# Set configuration file
if (initFile == None):
@@ -251,7 +251,7 @@ class SchainNotify:
Written by "Miguel Urco":mailto:miguel.urco@jro.igp.gob.pe Dec. 1, 2015
"""
- #constants
+ # constants
def __init__(self):
"""__init__ initializes SchainNotify by getting some basic information from SchainDB and SchainSite.
@@ -275,7 +275,7 @@ class SchainNotify:
self.__emailFromAddress = confObj.getSenderEmail()
self.__emailPass = confObj.getSenderPass()
self.__emailToAddress = confObj.getAdminEmail()
- self.__emailServer = confObj.getEmailServer()
+ self.__emailServer = confObj.getEmailServer()
def sendEmail(self, email_from, email_to, subject='Error running ...', message="", subtitle="", filename="", html_format=True):
@@ -297,7 +297,7 @@ class SchainNotify:
msg.preamble = 'SChainPy'
if html_format:
- message = "
%s
" %subject + "" + subtitle.replace("\n", "
\n") + "
" + message.replace("\n", "
\n")
+ message = " %s
" % subject + "" + subtitle.replace("\n", "
\n") + "
" + message.replace("\n", "
\n")
message = "\n" + message + ''
# This is the textual part:
@@ -310,8 +310,8 @@ class SchainNotify:
if filename and os.path.isfile(filename):
# This is the binary part(The Attachment):
- part = MIMEApplication(open(filename,"rb").read())
- part.add_header('Content-Disposition',
+ part = MIMEApplication(open(filename, "rb").read())
+ part.add_header('Content-Disposition',
'attachment',
filename=os.path.basename(filename))
msg.attach(part)
@@ -342,7 +342,7 @@ class SchainNotify:
return 1
- def sendAlert(self, message, subject = "", subtitle="", filename=""):
+ def sendAlert(self, message, subject="", subtitle="", filename=""):
"""sendAlert sends an email with the given message and optional title.
Inputs: message (string), and optional title (string)
@@ -357,14 +357,14 @@ class SchainNotify:
if not self.__emailToAddress:
return 0
- print("***** Sending alert to %s *****" %self.__emailToAddress)
+ print("***** Sending alert to %s *****" % self.__emailToAddress)
# set up message
- sent=self.sendEmail(email_from=self.__emailFromAddress,
+ sent = self.sendEmail(email_from=self.__emailFromAddress,
email_to=self.__emailToAddress,
subject=subject,
message=message,
- subtitle=subtitle,
+ subtitle=subtitle,
filename=filename)
if not sent:
@@ -372,7 +372,7 @@ class SchainNotify:
return 1
- def notify(self, email, message, subject = "", subtitle="", filename=""):
+ def notify(self, email, message, subject="", subtitle="", filename=""):
"""notify sends an email with the given message and title to email.
Inputs: email (string), message (string), and subject (string)
@@ -392,7 +392,7 @@ class SchainNotify:
email_to=email,
subject=subject,
message=message,
- subtitle=subtitle,
+ subtitle=subtitle,
filename=filename
)
@@ -502,4 +502,4 @@ if __name__ == '__main__':
test.sendAlert('This is a message from the python module SchainNotify', 'Test from SchainNotify')
- print('Hopefully message sent - check.')
\ No newline at end of file
+ print('Hopefully message sent - check.')
diff --git a/schainpy/cli/cli.py b/schainpy/cli/cli.py
index 897b198..11e0015 100644
--- a/schainpy/cli/cli.py
+++ b/schainpy/cli/cli.py
@@ -161,7 +161,7 @@ def search(nextcommand):
except Exception as e:
log.error('Module `{}` does not exists'.format(nextcommand), '')
allModules = getAll()
- similar = [t[0] for t in process.extract(nextcommand, allModules, limit=12) if t[1]>80]
+ similar = [t[0] for t in process.extract(nextcommand, allModules, limit=12) if t[1] > 80]
log.success('Possible modules are: {}'.format(', '.join(similar)), '')
def runschain(nextcommand):
diff --git a/schainpy/controller.py b/schainpy/controller.py
index dc9c5d8..4b80dda 100644
--- a/schainpy/controller.py
+++ b/schainpy/controller.py
@@ -203,7 +203,7 @@ class ProcUnitConf(ConfBase):
def removeOperation(self, id):
- i = [1 if x.id==id else 0 for x in self.operations]
+ i = [1 if x.id == id else 0 for x in self.operations]
self.operations.pop(i.index(1))
def getOperation(self, id):
@@ -293,12 +293,12 @@ class ReadUnitConf(ProcUnitConf):
if name == None:
if 'Reader' in datatype:
name = datatype
- datatype = name.replace('Reader','')
+ datatype = name.replace('Reader', '')
else:
name = '{}Reader'.format(datatype)
if datatype == None:
if 'Reader' in name:
- datatype = name.replace('Reader','')
+ datatype = name.replace('Reader', '')
else:
datatype = name
name = '{}Reader'.format(name)
@@ -572,7 +572,7 @@ class Project(Process):
if '#_start_#' in msg:
procs += 1
elif '#_end_#' in msg:
- procs -=1
+ procs -= 1
else:
err_msg = msg
@@ -617,7 +617,7 @@ class Project(Process):
subtitle += '[End time = %s]\n' % readUnitConfObj.parameters['endTime']
a = Alarm(
- modes=self.alarm,
+ modes=self.alarm,
email=self.email,
message=message,
subject=subject,
@@ -656,4 +656,4 @@ class Project(Process):
self.runProcs()
log.success('{} Done (Time: {:4.2f}s)'.format(
self.name,
- time.time()-self.start_time), '')
+ time.time() - self.start_time), '')
diff --git a/schainpy/model/data/BLTRheaderIO.py b/schainpy/model/data/BLTRheaderIO.py
index 534f109..08f5edb 100644
--- a/schainpy/model/data/BLTRheaderIO.py
+++ b/schainpy/model/data/BLTRheaderIO.py
@@ -12,49 +12,49 @@ import datetime
SPEED_OF_LIGHT = 299792458
SPEED_OF_LIGHT = 3e8
-FILE_STRUCTURE = numpy.dtype([ #HEADER 48bytes
- ('FileMgcNumber',' vertical)
- ('AntennaCoord',' vertical)
+ ('AntennaCoord', ' endFp:
- sys.stderr.write("Warning %s: Size value read from System Header is lower than it has to be\n" %fp.name)
+ sys.stderr.write("Warning %s: Size value read from System Header is lower than it has to be\n" % fp.name)
return 0
if fp.tell() < endFp:
- sys.stderr.write("Warning %s: Size value read from System Header size is greater than it has to be\n" %fp.name)
+ sys.stderr.write("Warning %s: Size value read from System Header size is greater than it has to be\n" % fp.name)
return 0
return 1
@@ -335,38 +335,38 @@ class RecordHeader(Header):
def write(self, fp):
headerTuple = (self.RecMgcNumber,
- self.RecCounter,
- self.Off2StartNxtRec,
- self.EpTimeStamp,
- self.msCompTimeStamp,
- self.ExpTagName,
- self.ExpComment,
- self.SiteLatDegrees,
- self.SiteLongDegrees,
- self.RTCgpsStatus,
- self.TransmitFrec,
- self.ReceiveFrec,
- self.FirstOsciFrec,
- self.Polarisation,
- self.ReceiverFiltSett,
- self.nModesInUse,
- self.DualModeIndex,
- self.DualModeRange,
+ self.RecCounter,
+ self.Off2StartNxtRec,
+ self.EpTimeStamp,
+ self.msCompTimeStamp,
+ self.ExpTagName,
+ self.ExpComment,
+ self.SiteLatDegrees,
+ self.SiteLongDegrees,
+ self.RTCgpsStatus,
+ self.TransmitFrec,
+ self.ReceiveFrec,
+ self.FirstOsciFrec,
+ self.Polarisation,
+ self.ReceiverFiltSett,
+ self.nModesInUse,
+ self.DualModeIndex,
+ self.DualModeRange,
self.nDigChannels,
- self.SampResolution,
- self.nRangeGatesSamp,
- self.StartRangeSamp,
- self.PRFhz,
- self.Integrations,
- self.nDataPointsTrsf,
- self.nReceiveBeams,
- self.nSpectAverages,
- self.FFTwindowingInd,
- self.BeamAngleAzim,
- self.BeamAngleZen,
- self.AntennaCoord,
- self.RecPhaseCalibr,
- self.RecAmpCalibr,
+ self.SampResolution,
+ self.nRangeGatesSamp,
+ self.StartRangeSamp,
+ self.PRFhz,
+ self.Integrations,
+ self.nDataPointsTrsf,
+ self.nReceiveBeams,
+ self.nSpectAverages,
+ self.FFTwindowingInd,
+ self.BeamAngleAzim,
+ self.BeamAngleZen,
+ self.AntennaCoord,
+ self.RecPhaseCalibr,
+ self.RecAmpCalibr,
self.ReceiverGaindB)
# self.size,self.nSamples,
@@ -375,7 +375,7 @@ class RecordHeader(Header):
# self.adcResolution,
# self.pciDioBusWidth
- header = numpy.array(headerTuple,RECORD_STRUCTURE)
+ header = numpy.array(headerTuple, RECORD_STRUCTURE)
header.tofile(fp)
return 1
@@ -394,11 +394,11 @@ def get_dtype_index(numpy_dtype):
def get_numpy_dtype(index):
- #dtype4 = numpy.dtype([('real','= self.xrange*60*60:
+ if self.data and 'time' in self.xaxis and (tm - self.tmin) >= self.xrange * 60 * 60:
self.save_time = tm
self.__plot()
- self.tmin += self.xrange*60*60
+ self.tmin += self.xrange * 60 * 60
self.data.setup()
self.clear_figures()
@@ -666,9 +666,9 @@ class Plot(Operation):
if self.xmin is None:
self.tmin = tm
self.xmin = dt.hour
- minutes = (self.xmin-int(self.xmin)) * 60
+ minutes = (self.xmin - int(self.xmin)) * 60
seconds = (minutes - int(minutes)) * 60
- self.tmin = (dt.replace(hour=int(self.xmin), minute=int(minutes), second=int(seconds)) -
+ self.tmin = (dt.replace(hour=int(self.xmin), minute=int(minutes), second=int(seconds)) -
datetime.datetime(1970, 1, 1)).total_seconds()
if self.localtime:
self.tmin += time.timezone
@@ -679,7 +679,7 @@ class Plot(Operation):
if self.throttle == 0:
self.__plot()
else:
- self.__throttle_plot(self.__plot)#, coerce=coerce)
+ self.__throttle_plot(self.__plot) # , coerce=coerce)
def close(self):
diff --git a/schainpy/model/graphics/jroplot_correlation.py b/schainpy/model/graphics/jroplot_correlation.py
index f91aa2d..3a6730b 100644
--- a/schainpy/model/graphics/jroplot_correlation.py
+++ b/schainpy/model/graphics/jroplot_correlation.py
@@ -32,8 +32,8 @@ class CorrelationPlot(Plot):
def getSubplots(self):
- ncol = int(numpy.sqrt(self.nplots)+0.9)
- nrow = int(self.nplots*1./ncol + 0.9)
+ ncol = int(numpy.sqrt(self.nplots) + 0.9)
+ nrow = int(self.nplots * 1. / ncol + 0.9)
return nrow, ncol
@@ -50,10 +50,10 @@ class CorrelationPlot(Plot):
colspan = 2
self.__nsubplots = 2
- self.createFigure(id = id,
- wintitle = wintitle,
- widthplot = self.WIDTH + self.WIDTHPROF,
- heightplot = self.HEIGHT + self.HEIGHTPROF,
+ self.createFigure(id=id,
+ wintitle=wintitle,
+ widthplot=self.WIDTH + self.WIDTHPROF,
+ heightplot=self.HEIGHT + self.HEIGHTPROF,
show=show)
nrow, ncol = self.getSubplots()
@@ -65,10 +65,10 @@ class CorrelationPlot(Plot):
if counter >= self.nplots:
break
- self.addAxes(nrow, ncol*ncolspan, y, x*ncolspan, colspan, 1)
+ self.addAxes(nrow, ncol * ncolspan, y, x * ncolspan, colspan, 1)
if showprofile:
- self.addAxes(nrow, ncol*ncolspan, y, x*ncolspan+colspan, 1, 1)
+ self.addAxes(nrow, ncol * ncolspan, y, x * ncolspan + colspan, 1, 1)
counter += 1
@@ -98,7 +98,7 @@ class CorrelationPlot(Plot):
return None
if realtime:
- if not(isRealtime(utcdatatime = dataOut.utctime)):
+ if not(isRealtime(utcdatatime=dataOut.utctime)):
print('Skipping this plot function')
return
@@ -116,16 +116,16 @@ class CorrelationPlot(Plot):
x = dataOut.getLagTRange(1)
y = dataOut.heightList
- z = copy.copy(dataOut.data_corr[:,:,0,:])
+ z = copy.copy(dataOut.data_corr[:, :, 0, :])
for i in range(dataOut.data_corr.shape[0]):
- z[i,:,:] = z[i,:,:]/factor[i,:]
+ z[i, :, :] = z[i, :, :] / factor[i, :]
zdB = numpy.abs(z)
avg = numpy.average(z, axis=1)
# avg = numpy.nanmean(z, axis=1)
# noise = dataOut.noise/factor
- #thisDatetime = dataOut.datatime
+ # thisDatetime = dataOut.datatime
thisDatetime = datetime.datetime.utcfromtimestamp(dataOut.getTimeRange()[0])
title = wintitle + " Correlation"
xlabel = "Lag T (s)"
@@ -158,10 +158,10 @@ class CorrelationPlot(Plot):
self.setWinTitle(title)
for i in range(self.nplots):
- str_datetime = '%s %s'%(thisDatetime.strftime("%Y/%m/%d"),thisDatetime.strftime("%H:%M:%S"))
- title = "Channel %d and %d: : %s" %(dataOut.pairsList[i][0],dataOut.pairsList[i][1] , str_datetime)
- axes = self.axesList[i*self.__nsubplots]
- axes.pcolor(x, y, zdB[i,:,:],
+ str_datetime = '%s %s' % (thisDatetime.strftime("%Y/%m/%d"), thisDatetime.strftime("%H:%M:%S"))
+ title = "Channel %d and %d: : %s" % (dataOut.pairsList[i][0], dataOut.pairsList[i][1] , str_datetime)
+ axes = self.axesList[i * self.__nsubplots]
+ axes.pcolor(x, y, zdB[i, :, :],
xmin=xmin, xmax=xmax, ymin=ymin, ymax=ymax, zmin=zmin, zmax=zmax,
xlabel=xlabel, ylabel=ylabel, title=title,
ticksize=9, cblabel='')
@@ -184,4 +184,4 @@ class CorrelationPlot(Plot):
save=save,
ftp=ftp,
wr_period=wr_period,
- thisDatetime=thisDatetime)
\ No newline at end of file
+ thisDatetime=thisDatetime)
diff --git a/schainpy/model/graphics/jroplot_heispectra.py b/schainpy/model/graphics/jroplot_heispectra.py
index a98a5cd..44eaf9c 100644
--- a/schainpy/model/graphics/jroplot_heispectra.py
+++ b/schainpy/model/graphics/jroplot_heispectra.py
@@ -31,7 +31,7 @@ class SpectraHeisPlot(Plot):
data = {}
meta = {}
- spc = 10*numpy.log10(dataOut.data_spc / dataOut.normFactor)
+ spc = 10 * numpy.log10(dataOut.data_spc / dataOut.normFactor)
data['spc_heis'] = spc
return data, meta
@@ -40,12 +40,12 @@ class SpectraHeisPlot(Plot):
c = 3E8
deltaHeight = self.data.yrange[1] - self.data.yrange[0]
- x = numpy.arange(-1*len(self.data.yrange)/2., len(self.data.yrange)/2.)*(c/(2*deltaHeight*len(self.data.yrange)*1000))
+ x = numpy.arange(-1 * len(self.data.yrange) / 2., len(self.data.yrange) / 2.) * (c / (2 * deltaHeight * len(self.data.yrange) * 1000))
self.y = self.data[-1]['spc_heis']
self.titles = []
for n, ax in enumerate(self.axes):
- ychannel = self.y[n,:]
+ ychannel = self.y[n, :]
if ax.firsttime:
self.xmin = min(x) if self.xmin is None else self.xmin
self.xmax = max(x) if self.xmax is None else self.xmax
@@ -78,7 +78,7 @@ class RTIHeisPlot(Plot):
data = {}
meta = {}
spc = dataOut.data_spc / dataOut.normFactor
- spc = 10*numpy.log10(numpy.average(spc, axis=1))
+ spc = 10 * numpy.log10(numpy.average(spc, axis=1))
data['rti_heis'] = spc
return data, meta
diff --git a/schainpy/model/graphics/jroplot_parameters.py b/schainpy/model/graphics/jroplot_parameters.py
index 06032b4..dec2de4 100644
--- a/schainpy/model/graphics/jroplot_parameters.py
+++ b/schainpy/model/graphics/jroplot_parameters.py
@@ -12,13 +12,13 @@ EARTH_RADIUS = 6.3710e3
def ll2xy(lat1, lon1, lat2, lon2):
p = 0.017453292519943295
- a = 0.5 - numpy.cos((lat2 - lat1) * p)/2 + numpy.cos(lat1 * p) * \
+ a = 0.5 - numpy.cos((lat2 - lat1) * p) / 2 + numpy.cos(lat1 * p) * \
numpy.cos(lat2 * p) * (1 - numpy.cos((lon2 - lon1) * p)) / 2
r = 12742 * numpy.arcsin(numpy.sqrt(a))
- theta = numpy.arctan2(numpy.sin((lon2-lon1)*p)*numpy.cos(lat2*p), numpy.cos(lat1*p)
- * numpy.sin(lat2*p)-numpy.sin(lat1*p)*numpy.cos(lat2*p)*numpy.cos((lon2-lon1)*p))
- theta = -theta + numpy.pi/2
- return r*numpy.cos(theta), r*numpy.sin(theta)
+ theta = numpy.arctan2(numpy.sin((lon2 - lon1) * p) * numpy.cos(lat2 * p), numpy.cos(lat1 * p)
+ * numpy.sin(lat2 * p) - numpy.sin(lat1 * p) * numpy.cos(lat2 * p) * numpy.cos((lon2 - lon1) * p))
+ theta = -theta + numpy.pi / 2
+ return r * numpy.cos(theta), r * numpy.sin(theta)
def km2deg(km):
@@ -26,7 +26,7 @@ def km2deg(km):
Convert distance in km to degrees
'''
- return numpy.rad2deg(km/EARTH_RADIUS)
+ return numpy.rad2deg(km / EARTH_RADIUS)
@@ -50,7 +50,7 @@ class SnrPlot(RTIPlot):
def update(self, dataOut):
data = {
- 'snr': 10*numpy.log10(dataOut.data_snr)
+ 'snr': 10 * numpy.log10(dataOut.data_snr)
}
return data, {}
@@ -66,7 +66,7 @@ class DopplerPlot(RTIPlot):
def update(self, dataOut):
data = {
- 'dop': 10*numpy.log10(dataOut.data_dop)
+ 'dop': 10 * numpy.log10(dataOut.data_dop)
}
return data, {}
@@ -82,7 +82,7 @@ class PowerPlot(RTIPlot):
def update(self, dataOut):
data = {
- 'pow': 10*numpy.log10(dataOut.data_pow)
+ 'pow': 10 * numpy.log10(dataOut.data_pow)
}
return data, {}
@@ -269,22 +269,22 @@ class PolarMapPlot(Plot):
zeniths = numpy.linspace(
0, self.data.meta['max_range'], data.shape[1])
if self.mode == 'E':
- azimuths = -numpy.radians(self.data.yrange)+numpy.pi/2
+ azimuths = -numpy.radians(self.data.yrange) + numpy.pi / 2
r, theta = numpy.meshgrid(zeniths, azimuths)
- x, y = r*numpy.cos(theta)*numpy.cos(numpy.radians(self.data.meta['elevation'])), r*numpy.sin(
- theta)*numpy.cos(numpy.radians(self.data.meta['elevation']))
+ x, y = r * numpy.cos(theta) * numpy.cos(numpy.radians(self.data.meta['elevation'])), r * numpy.sin(
+ theta) * numpy.cos(numpy.radians(self.data.meta['elevation']))
x = km2deg(x) + self.lon
y = km2deg(y) + self.lat
else:
azimuths = numpy.radians(self.data.yrange)
r, theta = numpy.meshgrid(zeniths, azimuths)
- x, y = r*numpy.cos(theta), r*numpy.sin(theta)
+ x, y = r * numpy.cos(theta), r * numpy.sin(theta)
self.y = zeniths
if ax.firsttime:
if self.zlimits is not None:
self.zmin, self.zmax = self.zlimits[n]
- ax.plt = ax.pcolormesh( # r, theta, numpy.ma.array(data, mask=numpy.isnan(data)),
+ ax.plt = ax.pcolormesh(# r, theta, numpy.ma.array(data, mask=numpy.isnan(data)),
x, y, numpy.ma.array(data, mask=numpy.isnan(data)),
vmin=self.zmin,
vmax=self.zmax,
@@ -293,7 +293,7 @@ class PolarMapPlot(Plot):
if self.zlimits is not None:
self.zmin, self.zmax = self.zlimits[n]
ax.collections.remove(ax.collections[0])
- ax.plt = ax.pcolormesh( # r, theta, numpy.ma.array(data, mask=numpy.isnan(data)),
+ ax.plt = ax.pcolormesh(# r, theta, numpy.ma.array(data, mask=numpy.isnan(data)),
x, y, numpy.ma.array(data, mask=numpy.isnan(data)),
vmin=self.zmin,
vmax=self.zmax,
@@ -339,8 +339,8 @@ class PolarMapPlot(Plot):
ax.add_artist(plt.Circle((self.lon, self.lat),
km2deg(r), color='0.6', fill=False, lw=0.2))
ax.text(
- self.lon + (km2deg(r))*numpy.cos(60*numpy.pi/180),
- self.lat + (km2deg(r))*numpy.sin(60*numpy.pi/180),
+ self.lon + (km2deg(r)) * numpy.cos(60 * numpy.pi / 180),
+ self.lat + (km2deg(r)) * numpy.sin(60 * numpy.pi / 180),
'{}km'.format(r),
ha='center', va='bottom', size='8', color='0.6', weight='heavy')
diff --git a/schainpy/model/graphics/jroplot_spectra.py b/schainpy/model/graphics/jroplot_spectra.py
index 8a5d813..9c7c8b2 100644
--- a/schainpy/model/graphics/jroplot_spectra.py
+++ b/schainpy/model/graphics/jroplot_spectra.py
@@ -17,7 +17,7 @@ class SpectraPlot(Plot):
Plot for Spectra data
'''
- CODE = 'spc'
+ CODE = 'spc_moments'
colormap = 'jet'
plot_type = 'pcolor'
buffering = False
@@ -39,11 +39,11 @@ class SpectraPlot(Plot):
data = {}
meta = {}
- spc = 10*numpy.log10(dataOut.data_spc/dataOut.normFactor)
+ spc = 10 * numpy.log10(dataOut.data_spc / dataOut.normFactor)
data['spc'] = spc
data['rti'] = dataOut.getPower()
- data['noise'] = 10*numpy.log10(dataOut.getNoise()/dataOut.normFactor)
- meta['xrange'] = (dataOut.getFreqRange(1)/1000., dataOut.getAcfRange(1), dataOut.getVelRange(1))
+ data['noise'] = 10 * numpy.log10(dataOut.getNoise() / dataOut.normFactor)
+ meta['xrange'] = (dataOut.getFreqRange(1) / 1000., dataOut.getAcfRange(1), dataOut.getVelRange(1))
if self.CODE == 'spc_moments':
data['moments'] = dataOut.moments
@@ -71,11 +71,12 @@ class SpectraPlot(Plot):
data = self.data[-1]
z = data['spc']
-
+ #self.CODE = 'spc_moments'
for n, ax in enumerate(self.axes):
noise = data['noise'][n]
+ print(n,self.CODE)
if self.CODE == 'spc_moments':
- mean = data['moments'][n, 1]
+ mean = data['moments'][n,1]
if ax.firsttime:
self.xmax = self.xmax if self.xmax else numpy.nanmax(x)
self.xmin = self.xmin if self.xmin else -self.xmax
@@ -132,7 +133,7 @@ class CrossSpectraPlot(Plot):
spc = dataOut.data_spc
cspc = dataOut.data_cspc
- meta['xrange'] = (dataOut.getFreqRange(1)/1000., dataOut.getAcfRange(1), dataOut.getVelRange(1))
+ meta['xrange'] = (dataOut.getFreqRange(1) / 1000., dataOut.getAcfRange(1), dataOut.getVelRange(1))
meta['pairs'] = dataOut.pairsList
tmp = []
@@ -170,8 +171,8 @@ class CrossSpectraPlot(Plot):
for n in range(len(self.data.pairs)):
pair = self.data.pairs[n]
- coh = cspc[n*2]
- phase = cspc[n*2+1]
+ coh = cspc[n * 2]
+ phase = cspc[n * 2 + 1]
ax = self.axes[2 * n]
if ax.firsttime:
ax.plt = ax.pcolormesh(x, y, coh.T,
@@ -222,7 +223,7 @@ class RTIPlot(Plot):
data = {}
meta = {}
data['rti'] = dataOut.getPower()
- data['noise'] = 10*numpy.log10(dataOut.getNoise()/dataOut.normFactor)
+ data['noise'] = 10 * numpy.log10(dataOut.getNoise() / dataOut.normFactor)
return data, meta
@@ -279,7 +280,7 @@ class CoherencePlot(RTIPlot):
self.nplots = len(self.data.pairs)
self.ylabel = 'Range [km]'
self.xlabel = 'Time'
- self.plots_adjust.update({'hspace':0.6, 'left': 0.1, 'bottom': 0.1,'right':0.95})
+ self.plots_adjust.update({'hspace':0.6, 'left': 0.1, 'bottom': 0.1, 'right':0.95})
if self.CODE == 'coh':
self.cb_label = ''
self.titles = [
@@ -338,7 +339,7 @@ class NoisePlot(Plot):
data = {}
meta = {}
- data['noise'] = 10*numpy.log10(dataOut.getNoise()/dataOut.normFactor).reshape(dataOut.nChannels, 1)
+ data['noise'] = 10 * numpy.log10(dataOut.getNoise() / dataOut.normFactor).reshape(dataOut.nChannels, 1)
meta['yrange'] = numpy.array([])
return data, meta
@@ -395,8 +396,8 @@ class PowerProfilePlot(Plot):
x = self.data[-1][self.CODE]
- if self.xmin is None: self.xmin = numpy.nanmin(x)*0.9
- if self.xmax is None: self.xmax = numpy.nanmax(x)*1.1
+ if self.xmin is None: self.xmin = numpy.nanmin(x) * 0.9
+ if self.xmax is None: self.xmax = numpy.nanmax(x) * 1.1
if self.axes[0].firsttime:
for ch in self.data.channels:
@@ -428,9 +429,9 @@ class SpectraCutPlot(Plot):
data = {}
meta = {}
- spc = 10*numpy.log10(dataOut.data_spc/dataOut.normFactor)
+ spc = 10 * numpy.log10(dataOut.data_spc / dataOut.normFactor)
data['spc'] = spc
- meta['xrange'] = (dataOut.getFreqRange(1)/1000., dataOut.getAcfRange(1), dataOut.getVelRange(1))
+ meta['xrange'] = (dataOut.getFreqRange(1) / 1000., dataOut.getAcfRange(1), dataOut.getVelRange(1))
return data, meta
@@ -453,7 +454,7 @@ class SpectraCutPlot(Plot):
if self.height_index:
index = numpy.array(self.height_index)
else:
- index = numpy.arange(0, len(y), int((len(y))/9))
+ index = numpy.arange(0, len(y), int((len(y)) / 9))
for n, ax in enumerate(self.axes):
if ax.firsttime:
@@ -479,7 +480,7 @@ class BeaconPhase(Plot):
def __init__(self):
Plot.__init__(self)
- self.timerange = 24*60*60
+ self.timerange = 24 * 60 * 60
self.isConfig = False
self.__nsubplots = 1
self.counter_imagwr = 0
@@ -520,25 +521,25 @@ class BeaconPhase(Plot):
colspan = 6
self.__nsubplots = 2
- self.createFigure(id = id,
- wintitle = wintitle,
- widthplot = self.WIDTH+self.WIDTHPROF,
- heightplot = self.HEIGHT+self.HEIGHTPROF,
+ self.createFigure(id=id,
+ wintitle=wintitle,
+ widthplot=self.WIDTH + self.WIDTHPROF,
+ heightplot=self.HEIGHT + self.HEIGHTPROF,
show=show)
nrow, ncol = self.getSubplots()
- self.addAxes(nrow, ncol*ncolspan, 0, 0, colspan, 1)
+ self.addAxes(nrow, ncol * ncolspan, 0, 0, colspan, 1)
def save_phase(self, filename_phase):
- f = open(filename_phase,'w+')
+ f = open(filename_phase, 'w+')
f.write('\n\n')
f.write('JICAMARCA RADIO OBSERVATORY - Beacon Phase \n')
- f.write('DD MM YYYY HH MM SS pair(2,0) pair(2,1) pair(2,3) pair(2,4)\n\n' )
+ f.write('DD MM YYYY HH MM SS pair(2,0) pair(2,1) pair(2,3) pair(2,4)\n\n')
f.close()
def save_data(self, filename_phase, data, data_datetime):
- f=open(filename_phase,'a')
+ f = open(filename_phase, 'a')
timetuple_data = data_datetime.timetuple()
day = str(timetuple_data.tm_mday)
month = str(timetuple_data.tm_mon)
@@ -546,7 +547,7 @@ class BeaconPhase(Plot):
hour = str(timetuple_data.tm_hour)
minute = str(timetuple_data.tm_min)
second = str(timetuple_data.tm_sec)
- f.write(day+' '+month+' '+year+' '+hour+' '+minute+' '+second+' '+str(data[0])+' '+str(data[1])+' '+str(data[2])+' '+str(data[3])+'\n')
+ f.write(day + ' ' + month + ' ' + year + ' ' + hour + ' ' + minute + ' ' + second + ' ' + str(data[0]) + ' ' + str(data[1]) + ' ' + str(data[2]) + ' ' + str(data[3]) + '\n')
f.close()
def plot(self):
@@ -571,7 +572,7 @@ class BeaconPhase(Plot):
pairsIndexList = []
for pair in pairsList:
if pair not in dataOut.pairsList:
- raise ValueError("Pair %s is not in dataOut.pairsList" %(pair))
+ raise ValueError("Pair %s is not in dataOut.pairsList" % (pair))
pairsIndexList.append(dataOut.pairsList.index(pair))
if pairsIndexList == []:
@@ -592,28 +593,28 @@ class BeaconPhase(Plot):
hmin_index = hmin_list[0]
if hmax_list.any():
- hmax_index = hmax_list[-1]+1
+ hmax_index = hmax_list[-1] + 1
x = dataOut.getTimeRange()
thisDatetime = dataOut.datatime
- title = wintitle + " Signal Phase" # : %s" %(thisDatetime.strftime("%d-%b-%Y"))
+ title = wintitle + " Signal Phase" # : %s" %(thisDatetime.strftime("%d-%b-%Y"))
xlabel = "Local Time"
ylabel = "Phase (degrees)"
update_figfile = False
nplots = len(pairsIndexList)
- #phase = numpy.zeros((len(pairsIndexList),len(dataOut.beacon_heiIndexList)))
+ # phase = numpy.zeros((len(pairsIndexList),len(dataOut.beacon_heiIndexList)))
phase_beacon = numpy.zeros(len(pairsIndexList))
for i in range(nplots):
pair = dataOut.pairsList[pairsIndexList[i]]
ccf = numpy.average(dataOut.data_cspc[pairsIndexList[i], :, hmin_index:hmax_index], axis=0)
powa = numpy.average(dataOut.data_spc[pair[0], :, hmin_index:hmax_index], axis=0)
powb = numpy.average(dataOut.data_spc[pair[1], :, hmin_index:hmax_index], axis=0)
- avgcoherenceComplex = ccf/numpy.sqrt(powa*powb)
- phase = numpy.arctan2(avgcoherenceComplex.imag, avgcoherenceComplex.real)*180/numpy.pi
+ avgcoherenceComplex = ccf / numpy.sqrt(powa * powb)
+ phase = numpy.arctan2(avgcoherenceComplex.imag, avgcoherenceComplex.real) * 180 / numpy.pi
if dataOut.beacon_heiIndexList:
phase_beacon[i] = numpy.average(phase[dataOut.beacon_heiIndexList])
@@ -651,31 +652,31 @@ class BeaconPhase(Plot):
update_figfile = True
- #open file beacon phase
- path = '%s%03d' %(self.PREFIX, self.id)
- beacon_file = os.path.join(path,'%s.txt'%self.name)
- self.filename_phase = os.path.join(figpath,beacon_file)
- #self.save_phase(self.filename_phase)
+ # open file beacon phase
+ path = '%s%03d' % (self.PREFIX, self.id)
+ beacon_file = os.path.join(path, '%s.txt' % self.name)
+ self.filename_phase = os.path.join(figpath, beacon_file)
+ # self.save_phase(self.filename_phase)
- #store data beacon phase
- #self.save_data(self.filename_phase, phase_beacon, thisDatetime)
+ # store data beacon phase
+ # self.save_data(self.filename_phase, phase_beacon, thisDatetime)
self.setWinTitle(title)
- title = "Phase Plot %s" %(thisDatetime.strftime("%Y/%m/%d %H:%M:%S"))
+ title = "Phase Plot %s" % (thisDatetime.strftime("%Y/%m/%d %H:%M:%S"))
- legendlabels = ["Pair (%d,%d)"%(pair[0], pair[1]) for pair in dataOut.pairsList]
+ legendlabels = ["Pair (%d,%d)" % (pair[0], pair[1]) for pair in dataOut.pairsList]
axes = self.axesList[0]
self.xdata = numpy.hstack((self.xdata, x[0:1]))
- if len(self.ydata)==0:
- self.ydata = phase_beacon.reshape(-1,1)
+ if len(self.ydata) == 0:
+ self.ydata = phase_beacon.reshape(-1, 1)
else:
- self.ydata = numpy.hstack((self.ydata, phase_beacon.reshape(-1,1)))
+ self.ydata = numpy.hstack((self.ydata, phase_beacon.reshape(-1, 1)))
axes.pmultilineyaxis(x=self.xdata, y=self.ydata,
@@ -699,4 +700,4 @@ class BeaconPhase(Plot):
thisDatetime=thisDatetime,
update_figfile=update_figfile)
- return dataOut
\ No newline at end of file
+ return dataOut
diff --git a/schainpy/model/graphics/jroplot_voltage.py b/schainpy/model/graphics/jroplot_voltage.py
index 6faf42a..e2663ac 100644
--- a/schainpy/model/graphics/jroplot_voltage.py
+++ b/schainpy/model/graphics/jroplot_voltage.py
@@ -54,9 +54,9 @@ class ScopePlot(Plot):
def plot_iq(self, x, y, channelIndexList, thisDatetime, wintitle):
- yreal = y[channelIndexList,:].real
- yimag = y[channelIndexList,:].imag
- title = wintitle + " Scope: %s" %(thisDatetime.strftime("%d-%b-%Y"))
+ yreal = y[channelIndexList, :].real
+ yimag = y[channelIndexList, :].imag
+ title = wintitle + " Scope: %s" % (thisDatetime.strftime("%d-%b-%Y"))
self.xlabel = "Range (Km)"
self.ylabel = "Intensity - IQ"
@@ -65,32 +65,32 @@ class ScopePlot(Plot):
self.titles[0] = title
- for i,ax in enumerate(self.axes):
- title = "Channel %d" %(i)
+ for i, ax in enumerate(self.axes):
+ title = "Channel %d" % (i)
if ax.firsttime:
self.xmin = min(x)
self.xmax = max(x)
- ax.plt_r = ax.plot(x, yreal[i,:], color='b')[0]
- ax.plt_i = ax.plot(x, yimag[i,:], color='r')[0]
+ ax.plt_r = ax.plot(x, yreal[i, :], color='b')[0]
+ ax.plt_i = ax.plot(x, yimag[i, :], color='r')[0]
else:
- ax.plt_r.set_data(x, yreal[i,:])
- ax.plt_i.set_data(x, yimag[i,:])
+ ax.plt_r.set_data(x, yreal[i, :])
+ ax.plt_i.set_data(x, yimag[i, :])
def plot_power(self, x, y, channelIndexList, thisDatetime, wintitle):
- y = y[channelIndexList,:] * numpy.conjugate(y[channelIndexList,:])
+ y = y[channelIndexList, :] * numpy.conjugate(y[channelIndexList, :])
yreal = y.real
- yreal = 10*numpy.log10(yreal)
+ yreal = 10 * numpy.log10(yreal)
self.y = yreal
- title = wintitle + " Power: %s" %(thisDatetime.strftime("%d-%b-%Y"))
+ title = wintitle + " Power: %s" % (thisDatetime.strftime("%d-%b-%Y"))
self.xlabel = "Range (Km)"
self.ylabel = "Intensity [dB]"
self.titles[0] = title
- for i,ax in enumerate(self.axes):
- title = "Channel %d" %(i)
- ychannel = yreal[i,:]
+ for i, ax in enumerate(self.axes):
+ title = "Channel %d" % (i)
+ ychannel = yreal[i, :]
if ax.firsttime:
self.xmin = min(x)
@@ -102,66 +102,66 @@ class ScopePlot(Plot):
def plot_weatherpower(self, x, y, channelIndexList, thisDatetime, wintitle):
- y = y[channelIndexList,:]
- yreal = y.real
- yreal = 10*numpy.log10(yreal)
+ y = y[channelIndexList, :]
+ yreal = y.real
+ yreal = 10 * numpy.log10(yreal)
self.y = yreal
- title = wintitle + " Scope: %s" %(thisDatetime.strftime("%d-%b-%Y %H:%M:%S"))
+ title = wintitle + " Scope: %s" % (thisDatetime.strftime("%d-%b-%Y %H:%M:%S"))
self.xlabel = "Range (Km)"
self.ylabel = "Intensity"
- self.xmin = min(x)
- self.xmax = max(x)
+ self.xmin = min(x)
+ self.xmax = max(x)
- self.titles[0] =title
- for i,ax in enumerate(self.axes):
- title = "Channel %d" %(i)
+ self.titles[0] = title
+ for i, ax in enumerate(self.axes):
+ title = "Channel %d" % (i)
- ychannel = yreal[i,:]
+ ychannel = yreal[i, :]
if ax.firsttime:
ax.plt_r = ax.plot(x, ychannel)[0]
else:
- #pass
+ # pass
ax.plt_r.set_data(x, ychannel)
def plot_weathervelocity(self, x, y, channelIndexList, thisDatetime, wintitle):
- x = x[channelIndexList,:]
- yreal = y
+ x = x[channelIndexList, :]
+ yreal = y
self.y = yreal
- title = wintitle + " Scope: %s" %(thisDatetime.strftime("%d-%b-%Y %H:%M:%S"))
+ title = wintitle + " Scope: %s" % (thisDatetime.strftime("%d-%b-%Y %H:%M:%S"))
self.xlabel = "Velocity (m/s)"
self.ylabel = "Range (Km)"
- self.xmin = numpy.min(x)
- self.xmax = numpy.max(x)
- self.titles[0] =title
- for i,ax in enumerate(self.axes):
- title = "Channel %d" %(i)
- xchannel = x[i,:]
+ self.xmin = numpy.min(x)
+ self.xmax = numpy.max(x)
+ self.titles[0] = title
+ for i, ax in enumerate(self.axes):
+ title = "Channel %d" % (i)
+ xchannel = x[i, :]
if ax.firsttime:
ax.plt_r = ax.plot(xchannel, yreal)[0]
else:
- #pass
+ # pass
ax.plt_r.set_data(xchannel, yreal)
def plot_weatherspecwidth(self, x, y, channelIndexList, thisDatetime, wintitle):
- x = x[channelIndexList,:]
- yreal = y
+ x = x[channelIndexList, :]
+ yreal = y
self.y = yreal
- title = wintitle + " Scope: %s" %(thisDatetime.strftime("%d-%b-%Y %H:%M:%S"))
+ title = wintitle + " Scope: %s" % (thisDatetime.strftime("%d-%b-%Y %H:%M:%S"))
self.xlabel = "width "
self.ylabel = "Range (Km)"
- self.xmin = numpy.min(x)
- self.xmax = numpy.max(x)
- self.titles[0] =title
- for i,ax in enumerate(self.axes):
- title = "Channel %d" %(i)
- xchannel = x[i,:]
+ self.xmin = numpy.min(x)
+ self.xmax = numpy.max(x)
+ self.titles[0] = title
+ for i, ax in enumerate(self.axes):
+ title = "Channel %d" % (i)
+ xchannel = x[i, :]
if ax.firsttime:
ax.plt_r = ax.plot(xchannel, yreal)[0]
else:
- #pass
+ # pass
ax.plt_r.set_data(xchannel, yreal)
def plot(self):
@@ -178,11 +178,11 @@ class ScopePlot(Plot):
for i in range(self.data.nProfiles):
- wintitle1 = " [Profile = %d] " %i
- if self.CODE =="scope":
+ wintitle1 = " [Profile = %d] " % i
+ if self.CODE == "scope":
if self.type == "power":
self.plot_power(self.data.yrange,
- scope[:,i,:],
+ scope[:, i, :],
channels,
thisDatetime,
wintitle1
@@ -190,42 +190,42 @@ class ScopePlot(Plot):
if self.type == "iq":
self.plot_iq(self.data.yrange,
- scope[:,i,:],
+ scope[:, i, :],
channels,
thisDatetime,
wintitle1
)
- if self.CODE=="pp_power":
+ if self.CODE == "pp_power":
self.plot_weatherpower(self.data.yrange,
- scope[:,i,:],
+ scope[:, i, :],
channels,
thisDatetime,
wintitle
)
- if self.CODE=="pp_signal":
+ if self.CODE == "pp_signal":
self.plot_weatherpower(self.data.yrange,
- scope[:,i,:],
+ scope[:, i, :],
channels,
thisDatetime,
wintitle
)
- if self.CODE=="pp_velocity":
- self.plot_weathervelocity(scope[:,i,:],
+ if self.CODE == "pp_velocity":
+ self.plot_weathervelocity(scope[:, i, :],
self.data.yrange,
channels,
thisDatetime,
wintitle
)
- if self.CODE=="pp_spcwidth":
- self.plot_weatherspecwidth(scope[:,i,:],
+ if self.CODE == "pp_spcwidth":
+ self.plot_weatherspecwidth(scope[:, i, :],
self.data.yrange,
channels,
thisDatetime,
wintitle
)
else:
- wintitle = " [Profile = %d] " %self.data.profileIndex
- if self.CODE== "scope":
+ wintitle = " [Profile = %d] " % self.data.profileIndex
+ if self.CODE == "scope":
if self.type == "power":
self.plot_power(self.data.yrange,
scope,
@@ -241,28 +241,28 @@ class ScopePlot(Plot):
thisDatetime,
wintitle
)
- if self.CODE=="pp_power":
+ if self.CODE == "pp_power":
self.plot_weatherpower(self.data.yrange,
scope,
channels,
thisDatetime,
wintitle
)
- if self.CODE=="pp_signal":
+ if self.CODE == "pp_signal":
self.plot_weatherpower(self.data.yrange,
scope,
channels,
thisDatetime,
wintitle
)
- if self.CODE=="pp_velocity":
+ if self.CODE == "pp_velocity":
self.plot_weathervelocity(scope,
self.data.yrange,
channels,
thisDatetime,
wintitle
)
- if self.CODE=="pp_specwidth":
+ if self.CODE == "pp_specwidth":
self.plot_weatherspecwidth(scope,
self.data.yrange,
channels,
diff --git a/schainpy/model/graphics/plotting_codes.py b/schainpy/model/graphics/plotting_codes.py
index 3432f0c..59a815c 100644
--- a/schainpy/model/graphics/plotting_codes.py
+++ b/schainpy/model/graphics/plotting_codes.py
@@ -1,23 +1,23 @@
'''
@author: roj-idl71
'''
-#USED IN jroplot_spectra.py
-RTI_CODE = 0 #Range time intensity (RTI).
-SPEC_CODE = 1 #Spectra (and Cross-spectra) information.
-CROSS_CODE = 2 #Cross-Correlation information.
-COH_CODE = 3 #Coherence map.
-BASE_CODE = 4 #Base lines graphic.
-ROW_CODE = 5 #Row Spectra.
-TOTAL_CODE = 6 #Total Power.
-DRIFT_CODE = 7 #Drifts graphics.
-HEIGHT_CODE = 8 #Height profile.
-PHASE_CODE = 9 #Signal Phase.
+# USED IN jroplot_spectra.py
+RTI_CODE = 0 # Range time intensity (RTI).
+SPEC_CODE = 1 # Spectra (and Cross-spectra) information.
+CROSS_CODE = 2 # Cross-Correlation information.
+COH_CODE = 3 # Coherence map.
+BASE_CODE = 4 # Base lines graphic.
+ROW_CODE = 5 # Row Spectra.
+TOTAL_CODE = 6 # Total Power.
+DRIFT_CODE = 7 # Drifts graphics.
+HEIGHT_CODE = 8 # Height profile.
+PHASE_CODE = 9 # Signal Phase.
POWER_CODE = 16
NOISE_CODE = 17
BEACON_CODE = 18
-#USED IN jroplot_parameters.py
+# USED IN jroplot_parameters.py
WIND_CODE = 22
MSKYMAP_CODE = 23
MPHASE_CODE = 24
diff --git a/schainpy/model/io/MIRAtest.py b/schainpy/model/io/MIRAtest.py
index a4e8e20..942a358 100644
--- a/schainpy/model/io/MIRAtest.py
+++ b/schainpy/model/io/MIRAtest.py
@@ -24,9 +24,9 @@ except:
from time import sleep
from schainpy.model.data.jrodata import Spectra
-#from schainpy.model.data.BLTRheaderIO import FileHeader, RecordHeader
+# from schainpy.model.data.BLTRheaderIO import FileHeader, RecordHeader
from schainpy.model.proc.jroproc_base import ProcessingUnit, Operation
-#from schainpy.model.io.jroIO_bltr import BLTRReader
+# from schainpy.model.io.jroIO_bltr import BLTRReader
from numpy import imag, shape, NaN
@@ -225,26 +225,26 @@ SPARprc = header['SPARprc'][0]
SRVI_STRUCTURE = numpy.dtype([
('frame_cnt', 'self.datatime.date():
+ if dt.date() > self.datatime.date():
self.flagDiscontinuousBlock = 1
self.datatime = dt
@@ -352,4 +352,4 @@ class BLTRParamReader(Reader, ProcessingUnit):
self.getData()
- return
\ No newline at end of file
+ return
diff --git a/schainpy/model/io/bltrIO_spectra.py b/schainpy/model/io/bltrIO_spectra.py
index f98874d..778b84c 100644
--- a/schainpy/model/io/bltrIO_spectra.py
+++ b/schainpy/model/io/bltrIO_spectra.py
@@ -181,8 +181,8 @@ class RecordHeaderBLTR():
self.Off2StartData = int(header['Off2StartData'][0])
self.nUtime = header['nUtime'][0]
self.nMilisec = header['nMilisec'][0]
- self.ExpTagName = '' # str(header['ExpTagName'][0])
- self.ExpComment = '' # str(header['ExpComment'][0])
+ self.ExpTagName = '' # str(header['ExpTagName'][0])
+ self.ExpComment = '' # str(header['ExpComment'][0])
self.SiteLatDegrees = header['SiteLatDegrees'][0]
self.SiteLongDegrees = header['SiteLongDegrees'][0]
self.RTCgpsStatus = header['RTCgpsStatus'][0]
@@ -293,7 +293,7 @@ class BLTRSpectraReader (ProcessingUnit):
self.getData()
- def setup(self,
+ def setup(self,
path=None,
startDate=None,
endDate=None,
@@ -374,7 +374,7 @@ class BLTRSpectraReader (ProcessingUnit):
return
if self.mode == 1:
- self.rheader.read(self.BlockCounter+1)
+ self.rheader.read(self.BlockCounter + 1)
elif self.mode == 0:
self.rheader.read(self.BlockCounter)
@@ -393,13 +393,13 @@ class BLTRSpectraReader (ProcessingUnit):
self.dataOut.nRdPairs = self.nRdPairs
self.dataOut.heightList = (self.rheader.StartRangeSamp + numpy.arange(self.nHeights) * self.rheader.SampResolution) / 1000.
self.dataOut.channelList = range(self.nChannels)
- self.dataOut.nProfiles=self.rheader.nProfiles
- self.dataOut.nIncohInt=self.rheader.nIncohInt
- self.dataOut.nCohInt=self.rheader.nCohInt
- self.dataOut.ippSeconds= 1/float(self.rheader.PRFhz)
- self.dataOut.PRF=self.rheader.PRFhz
- self.dataOut.nFFTPoints=self.rheader.nProfiles
- self.dataOut.utctime = self.rheader.nUtime + self.rheader.nMilisec/1000.
+ self.dataOut.nProfiles = self.rheader.nProfiles
+ self.dataOut.nIncohInt = self.rheader.nIncohInt
+ self.dataOut.nCohInt = self.rheader.nCohInt
+ self.dataOut.ippSeconds = 1 / float(self.rheader.PRFhz)
+ self.dataOut.PRF = self.rheader.PRFhz
+ self.dataOut.nFFTPoints = self.rheader.nProfiles
+ self.dataOut.utctime = self.rheader.nUtime + self.rheader.nMilisec / 1000.
self.dataOut.timeZone = 0
self.dataOut.useLocalTime = False
self.dataOut.nmodes = 2
@@ -408,10 +408,10 @@ class BLTRSpectraReader (ProcessingUnit):
self.Off2StartNxtRec + self.Off2StartData
self.fp.seek(OffDATA, os.SEEK_SET)
- self.data_fft = numpy.fromfile(self.fp, [('complex','=self.startDate and thisDate <= self.endDate):
+ if (thisDate >= self.startDate and thisDate <= self.endDate):
return amisr_dirname_format
except:
return None
- def __findDataForDates(self,online=False):
+ def __findDataForDates(self, online=False):
@@ -168,13 +168,13 @@ class AMISRReader(ProcessingUnit):
return None
pat = '\d+.\d+'
- dirnameList = [re.search(pat,x) for x in os.listdir(self.path)]
- dirnameList = [x for x in dirnameList if x!=None]
+ dirnameList = [re.search(pat, x) for x in os.listdir(self.path)]
+ dirnameList = [x for x in dirnameList if x != None]
dirnameList = [x.string for x in dirnameList]
if not(online):
dirnameList = [self.__selDates(x) for x in dirnameList]
- dirnameList = [x for x in dirnameList if x!=None]
- if len(dirnameList)>0:
+ dirnameList = [x for x in dirnameList if x != None]
+ if len(dirnameList) > 0:
self.status = 1
self.dirnameList = dirnameList
self.dirnameList.sort()
@@ -183,34 +183,34 @@ class AMISRReader(ProcessingUnit):
return None
def __getTimeFromData(self):
- startDateTime_Reader = datetime.datetime.combine(self.startDate,self.startTime)
- endDateTime_Reader = datetime.datetime.combine(self.endDate,self.endTime)
+ startDateTime_Reader = datetime.datetime.combine(self.startDate, self.startTime)
+ endDateTime_Reader = datetime.datetime.combine(self.endDate, self.endTime)
- print('Filtering Files from %s to %s'%(startDateTime_Reader, endDateTime_Reader))
+ print('Filtering Files from %s to %s' % (startDateTime_Reader, endDateTime_Reader))
print('........................................')
filter_filenameList = []
self.filenameList.sort()
- for i in range(len(self.filenameList)-1):
+ for i in range(len(self.filenameList) - 1):
filename = self.filenameList[i]
- fp = h5py.File(filename,'r')
+ fp = h5py.File(filename, 'r')
time_str = fp.get('Time/RadacTimeString')
startDateTimeStr_File = time_str[0][0].split('.')[0]
junk = time.strptime(startDateTimeStr_File, '%Y-%m-%d %H:%M:%S')
- startDateTime_File = datetime.datetime(junk.tm_year,junk.tm_mon,junk.tm_mday,junk.tm_hour, junk.tm_min, junk.tm_sec)
+ startDateTime_File = datetime.datetime(junk.tm_year, junk.tm_mon, junk.tm_mday, junk.tm_hour, junk.tm_min, junk.tm_sec)
endDateTimeStr_File = time_str[-1][-1].split('.')[0]
junk = time.strptime(endDateTimeStr_File, '%Y-%m-%d %H:%M:%S')
- endDateTime_File = datetime.datetime(junk.tm_year,junk.tm_mon,junk.tm_mday,junk.tm_hour, junk.tm_min, junk.tm_sec)
+ endDateTime_File = datetime.datetime(junk.tm_year, junk.tm_mon, junk.tm_mday, junk.tm_hour, junk.tm_min, junk.tm_sec)
fp.close()
if self.timezone == 'lt':
- startDateTime_File = startDateTime_File - datetime.timedelta(minutes = 300)
- endDateTime_File = endDateTime_File - datetime.timedelta(minutes = 300)
+ startDateTime_File = startDateTime_File - datetime.timedelta(minutes=300)
+ endDateTime_File = endDateTime_File - datetime.timedelta(minutes=300)
- if (endDateTime_File>=startDateTime_Reader and endDateTime_File= startDateTime_Reader and endDateTime_File < endDateTime_Reader):
+ # self.filenameList.remove(filename)
filter_filenameList.append(filename)
filter_filenameList.sort()
@@ -218,7 +218,7 @@ class AMISRReader(ProcessingUnit):
return 1
def __filterByGlob1(self, dirName):
- filter_files = glob.glob1(dirName, '*.*%s'%self.extension_file)
+ filter_files = glob.glob1(dirName, '*.*%s' % self.extension_file)
filterDict = {}
filterDict.setdefault(dirName)
filterDict[dirName] = filter_files
@@ -233,21 +233,21 @@ class AMISRReader(ProcessingUnit):
def __selectDataForTimes(self, online=False):
- #aun no esta implementado el filtro for tiempo
+ # aun no esta implementado el filtro for tiempo
if not(self.status):
return None
- dirList = [os.path.join(self.path,x) for x in self.dirnameList]
+ dirList = [os.path.join(self.path, x) for x in self.dirnameList]
fileListInKeys = [self.__filterByGlob1(x) for x in dirList]
self.__getFilenameList(fileListInKeys, dirList)
if not(online):
- #filtro por tiempo
+ # filtro por tiempo
if not(self.all):
self.__getTimeFromData()
- if len(self.filenameList)>0:
+ if len(self.filenameList) > 0:
self.status = 1
self.filenameList.sort()
else:
@@ -255,7 +255,7 @@ class AMISRReader(ProcessingUnit):
return None
else:
- #get the last file - 1
+ # get the last file - 1
self.filenameList = [self.filenameList[-2]]
new_dirnameList = []
@@ -291,8 +291,8 @@ class AMISRReader(ProcessingUnit):
path,
startDate,
endDate,
- startTime=datetime.time(0,0,0),
- endTime=datetime.time(23,59,59),
+ startTime=datetime.time(0, 0, 0),
+ endTime=datetime.time(23, 59, 59),
walk=True):
self.__setParameters(path, startDate, endDate, startTime, endTime, walk)
@@ -304,7 +304,7 @@ class AMISRReader(ProcessingUnit):
self.__selectDataForTimes()
for i in range(len(self.filenameList)):
- print("%s" %(self.filenameList[i]))
+ print("%s" % (self.filenameList[i]))
return
@@ -320,7 +320,7 @@ class AMISRReader(ProcessingUnit):
filename = self.filenameList[idFile]
- amisrFilePointer = h5py.File(filename,'r')
+ amisrFilePointer = h5py.File(filename, 'r')
break
@@ -330,7 +330,7 @@ class AMISRReader(ProcessingUnit):
self.amisrFilePointer = amisrFilePointer
- print("Setting the file: %s"%self.filename)
+ print("Setting the file: %s" % self.filename)
return 1
@@ -341,47 +341,47 @@ class AMISRReader(ProcessingUnit):
self.__selectDataForTimes(online=True)
filename = self.filenameList[0]
while self.__filename_online == filename:
- print('waiting %d seconds to get a new file...'%(self.__waitForNewFile))
+ print('waiting %d seconds to get a new file...' % (self.__waitForNewFile))
sleep(self.__waitForNewFile)
self.__selectDataForTimes(online=True)
filename = self.filenameList[0]
self.__filename_online = filename
- self.amisrFilePointer = h5py.File(filename,'r')
+ self.amisrFilePointer = h5py.File(filename, 'r')
self.flagIsNewFile = 1
self.filename = filename
- print("Setting the file: %s"%self.filename)
+ print("Setting the file: %s" % self.filename)
return 1
def __readHeader(self):
self.radacHeaderObj = RadacHeader(self.amisrFilePointer)
- #update values from experiment cfg file
+ # update values from experiment cfg file
if self.radacHeaderObj.nrecords == self.recordsperfile_fromfile:
self.radacHeaderObj.nrecords = self.recordsperfile_fromfile
self.radacHeaderObj.nbeams = self.nbeamcodes_fromfile
self.radacHeaderObj.npulses = self.npulsesint_fromfile
self.radacHeaderObj.nsamples = self.ngates_fromfile
- #looking index list for data
- start_index = self.radacHeaderObj.pulseCount[0,:][0]
+ # looking index list for data
+ start_index = self.radacHeaderObj.pulseCount[0, :][0]
end_index = self.radacHeaderObj.npulses
range4data = list(range(start_index, end_index))
self.index4_schain_datablock = numpy.array(range4data)
buffer_start_index = 0
- buffer_end_index = self.radacHeaderObj.pulseCount[0,:][0]
+ buffer_end_index = self.radacHeaderObj.pulseCount[0, :][0]
range4buffer = list(range(buffer_start_index, buffer_end_index))
self.index4_buffer = numpy.array(range4buffer)
self.linear_pulseCount = numpy.array(range4data + range4buffer)
- self.npulseByFrame = max(self.radacHeaderObj.pulseCount[0,:]+1)
+ self.npulseByFrame = max(self.radacHeaderObj.pulseCount[0, :] + 1)
- #get tuning frequency
- frequency_h5file_dataset = self.amisrFilePointer.get('Rx'+'/TuningFrequency')
- self.frequency_h5file = frequency_h5file_dataset[0,0]
+ # get tuning frequency
+ frequency_h5file_dataset = self.amisrFilePointer.get('Rx' + '/TuningFrequency')
+ self.frequency_h5file = frequency_h5file_dataset[0, 0]
self.flagIsNewFile = 1
@@ -391,20 +391,20 @@ class AMISRReader(ProcessingUnit):
beamCodeMap = self.amisrFilePointer.get('Setup/BeamcodeMap')
- for i in range(len(self.radacHeaderObj.beamCode[0,:])):
+ for i in range(len(self.radacHeaderObj.beamCode[0, :])):
self.beamCodeDict.setdefault(i)
self.beamRangeDict.setdefault(i)
- beamcodeValue = self.radacHeaderObj.beamCode[0,i]
- beamcodeIndex = numpy.where(beamCodeMap[:,0] == beamcodeValue)[0][0]
+ beamcodeValue = self.radacHeaderObj.beamCode[0, i]
+ beamcodeIndex = numpy.where(beamCodeMap[:, 0] == beamcodeValue)[0][0]
x = beamCodeMap[beamcodeIndex][1]
y = beamCodeMap[beamcodeIndex][2]
z = beamCodeMap[beamcodeIndex][3]
self.beamCodeDict[i] = [beamcodeValue, x, y, z]
- just4record0 = self.radacHeaderObj.beamCodeByPulse[0,:]
+ just4record0 = self.radacHeaderObj.beamCodeByPulse[0, :]
for i in range(len(list(self.beamCodeDict.values()))):
- xx = numpy.where(just4record0==list(self.beamCodeDict.values())[i][0])
+ xx = numpy.where(just4record0 == list(self.beamCodeDict.values())[i][0])
indexPulseByBeam = self.linear_pulseCount[xx[0]]
self.beamRangeDict[i] = indexPulseByBeam
@@ -414,22 +414,22 @@ class AMISRReader(ProcessingUnit):
experimentCfgPath = os.path.join(self.path, self.dirnameList[0], 'Setup')
- expFinder = glob.glob1(experimentCfgPath,'*.exp')
- if len(expFinder)== 0:
+ expFinder = glob.glob1(experimentCfgPath, '*.exp')
+ if len(expFinder) == 0:
self.status = 0
return None
- experimentFilename = os.path.join(experimentCfgPath,expFinder[0])
+ experimentFilename = os.path.join(experimentCfgPath, expFinder[0])
f = open(experimentFilename)
lines = f.readlines()
f.close()
- parmsList = ['npulsesint*','recordsperfile*','nbeamcodes*','ngates*']
+ parmsList = ['npulsesint*', 'recordsperfile*', 'nbeamcodes*', 'ngates*']
filterList = [fnmatch.filter(lines, x) for x in parmsList]
- values = [re.sub(r'\D',"",x[0]) for x in filterList]
+ values = [re.sub(r'\D', "", x[0]) for x in filterList]
self.npulsesint_fromfile = int(values[0])
self.recordsperfile_fromfile = int(values[1])
@@ -439,12 +439,12 @@ class AMISRReader(ProcessingUnit):
tufileFinder = fnmatch.filter(lines, 'tufile=*')
tufile = tufileFinder[0].split('=')[1].split('\n')[0]
tufile = tufile.split('\r')[0]
- tufilename = os.path.join(experimentCfgPath,tufile)
+ tufilename = os.path.join(experimentCfgPath, tufile)
f = open(tufilename)
lines = f.readlines()
f.close()
- self.ippSeconds_fromfile = float(lines[1].split()[2])/1E6
+ self.ippSeconds_fromfile = float(lines[1].split()[2]) / 1E6
self.status = 1
@@ -459,7 +459,7 @@ class AMISRReader(ProcessingUnit):
self.buffer_radactime = numpy.zeros_like(self.radacTimeByFrame)
- def __setNextFile(self,online=False):
+ def __setNextFile(self, online=False):
if not(online):
newFile = self.__setNextFileOffline()
@@ -479,11 +479,11 @@ class AMISRReader(ProcessingUnit):
self.readDataBlock()
- def setup(self,path=None,
- startDate=None,
- endDate=None,
- startTime=datetime.time(0,0,0),
- endTime=datetime.time(23,59,59),
+ def setup(self, path=None,
+ startDate=None,
+ endDate=None,
+ startTime=datetime.time(0, 0, 0),
+ endTime=datetime.time(23, 59, 59),
walk=True,
timezone='ut',
all=0,
@@ -493,13 +493,13 @@ class AMISRReader(ProcessingUnit):
self.all = all
self.online = online
if not(online):
- #Busqueda de archivos offline
+ # Busqueda de archivos offline
self.searchFilesOffLine(path, startDate, endDate, startTime, endTime, walk)
else:
self.searchFilesOnLine(path, walk)
if not(self.filenameList):
- print("There is no files into the folder: %s"%(path))
+ print("There is no files into the folder: %s" % (path))
sys.exit(-1)
@@ -511,22 +511,22 @@ class AMISRReader(ProcessingUnit):
# first_beamcode = self.radacHeaderObj.beamCodeByPulse[0,0]
# index = numpy.where(self.radacHeaderObj.beamCodeByPulse[0,:]!=first_beamcode)[0][0]
- self.profileIndex_offset = self.radacHeaderObj.pulseCount[0,:][0]
+ self.profileIndex_offset = self.radacHeaderObj.pulseCount[0, :][0]
self.profileIndex = self.profileIndex_offset
def readRanges(self):
dataset = self.amisrFilePointer.get('Raw11/Data/Samples/Range')
- self.rangeFromFile = numpy.reshape(dataset.value,(-1))
+ self.rangeFromFile = numpy.reshape(dataset.value, (-1))
return self.rangeFromFile
- def readRadacTime(self,idrecord, range1, range2):
+ def readRadacTime(self, idrecord, range1, range2):
self.radacTimeFromFile = self.radacHeaderObj.radacTime.value
radacTimeByFrame = numpy.zeros((self.radacHeaderObj.npulses))
- #radacTimeByFrame = dataset[idrecord - 1,range1]
- #radacTimeByFrame = dataset[idrecord,range2]
+ # radacTimeByFrame = dataset[idrecord - 1,range1]
+ # radacTimeByFrame = dataset[idrecord,range2]
return radacTimeByFrame
@@ -535,8 +535,8 @@ class AMISRReader(ProcessingUnit):
beamcodeByFrame = numpy.zeros((self.radacHeaderObj.npulses))
self.beamCodesFromFile = dataset.value
- #beamcodeByFrame[range1] = dataset[idrecord - 1, range1]
- #beamcodeByFrame[range2] = dataset[idrecord, range2]
+ # beamcodeByFrame[range1] = dataset[idrecord - 1, range1]
+ # beamcodeByFrame[range2] = dataset[idrecord, range2]
beamcodeByFrame[range1] = dataset[idrecord, range1]
beamcodeByFrame[range2] = dataset[idrecord, range2]
@@ -544,7 +544,7 @@ class AMISRReader(ProcessingUnit):
def __setDataByFrame(self):
- ndata = 2 # porque es complejo
+ ndata = 2 # porque es complejo
dataByFrame = numpy.zeros((self.radacHeaderObj.npulses, self.radacHeaderObj.nsamples, ndata))
return dataByFrame
@@ -553,31 +553,31 @@ class AMISRReader(ProcessingUnit):
return dataset
def __setDataBlock(self,):
- real = self.dataByFrame[:,:,0] #asumo que 0 es real
- imag = self.dataByFrame[:,:,1] #asumo que 1 es imaginario
- datablock = real + imag*1j #armo el complejo
+ real = self.dataByFrame[:, :, 0] # asumo que 0 es real
+ imag = self.dataByFrame[:, :, 1] # asumo que 1 es imaginario
+ datablock = real + imag * 1j # armo el complejo
return datablock
- def readSamples_version1(self,idrecord):
- #estas tres primeras lineas solo se deben ejecutar una vez
+ def readSamples_version1(self, idrecord):
+ # estas tres primeras lineas solo se deben ejecutar una vez
if self.flagIsNewFile:
- #reading dataset
+ # reading dataset
self.dataset = self.__readDataSet()
self.flagIsNewFile = 0
if idrecord == 0:
- self.dataByFrame[self.index4_schain_datablock, : ,:] = self.dataset[0, self.index_amisr_sample,:,:]
+ self.dataByFrame[self.index4_schain_datablock, : , :] = self.dataset[0, self.index_amisr_sample, :, :]
self.radacTimeByFrame[self.index4_schain_datablock] = self.radacHeaderObj.radacTime[0, self.index_amisr_sample]
datablock = self.__setDataBlock()
if len(self.index_amisr_buffer) > 0:
- self.buffer = self.dataset[0, self.index_amisr_buffer,:,:]
+ self.buffer = self.dataset[0, self.index_amisr_buffer, :, :]
self.buffer_radactime = self.radacHeaderObj.radacTime[0, self.index_amisr_buffer]
return datablock
if len(self.index_amisr_buffer) > 0:
- self.dataByFrame[self.index4_buffer,:,:] = self.buffer.copy()
+ self.dataByFrame[self.index4_buffer, :, :] = self.buffer.copy()
self.radacTimeByFrame[self.index4_buffer] = self.buffer_radactime.copy()
- self.dataByFrame[self.index4_schain_datablock,:,:] = self.dataset[idrecord, self.index_amisr_sample,:,:]
+ self.dataByFrame[self.index4_schain_datablock, :, :] = self.dataset[idrecord, self.index_amisr_sample, :, :]
self.radacTimeByFrame[self.index4_schain_datablock] = self.radacHeaderObj.radacTime[idrecord, self.index_amisr_sample]
datablock = self.__setDataBlock()
if len(self.index_amisr_buffer) > 0:
@@ -587,14 +587,14 @@ class AMISRReader(ProcessingUnit):
return datablock
- def readSamples(self,idrecord):
+ def readSamples(self, idrecord):
if self.flagIsNewFile:
self.dataByFrame = self.__setDataByFrame()
self.beamCodeByFrame = self.amisrFilePointer.get('Raw11/Data/RadacHeader/BeamCode').value[idrecord, :]
- #reading ranges
+ # reading ranges
self.readRanges()
- #reading dataset
+ # reading dataset
self.dataset = self.__readDataSet()
self.flagIsNewFile = 0
@@ -607,8 +607,8 @@ class AMISRReader(ProcessingUnit):
def readDataBlock(self):
self.datablock = self.readSamples_version1(self.idrecord_count)
- #self.datablock = self.readSamples(self.idrecord_count)
- #print 'record:', self.idrecord_count
+ # self.datablock = self.readSamples(self.idrecord_count)
+ # print 'record:', self.idrecord_count
self.idrecord_count += 1
self.profileIndex = 0
@@ -626,7 +626,7 @@ class AMISRReader(ProcessingUnit):
pass
def __hasNotDataInBuffer(self):
- #self.radacHeaderObj.npulses debe ser otra variable para considerar el numero de pulsos a tomar en el primer y ultimo record
+ # self.radacHeaderObj.npulses debe ser otra variable para considerar el numero de pulsos a tomar en el primer y ultimo record
if self.profileIndex >= self.radacHeaderObj.npulses:
return 1
return 0
@@ -637,7 +637,7 @@ class AMISRReader(ProcessingUnit):
def setObjProperties(self):
- self.dataOut.heightList = self.rangeFromFile/1000.0 #km
+ self.dataOut.heightList = self.rangeFromFile / 1000.0 # km
self.dataOut.nProfiles = self.radacHeaderObj.npulses
self.dataOut.nRecords = self.radacHeaderObj.nrecords
self.dataOut.nBeams = self.radacHeaderObj.nbeams
@@ -653,9 +653,9 @@ class AMISRReader(ProcessingUnit):
self.dataOut.beamRangeDict = self.beamRangeDict
if self.timezone == 'lt':
- self.dataOut.timeZone = time.timezone / 60. #get the timezone in minutes
+ self.dataOut.timeZone = time.timezone / 60. # get the timezone in minutes
else:
- self.dataOut.timeZone = 0 #by default time is UTC
+ self.dataOut.timeZone = 0 # by default time is UTC
def getData(self):
@@ -667,11 +667,11 @@ class AMISRReader(ProcessingUnit):
self.readNextBlock()
- if self.datablock is None: # setear esta condicion cuando no hayan datos por leers
+ if self.datablock is None: # setear esta condicion cuando no hayan datos por leers
self.dataOut.flagNoData = True
return 0
- self.dataOut.data = numpy.reshape(self.datablock[self.profileIndex,:],(1,-1))
+ self.dataOut.data = numpy.reshape(self.datablock[self.profileIndex, :], (1, -1))
self.dataOut.utctime = self.radacTimeByFrame[self.profileIndex]
self.dataOut.profileIndex = self.profileIndex
@@ -688,4 +688,4 @@ class AMISRReader(ProcessingUnit):
self.setObjProperties()
self.isConfig = True
- self.getData()
\ No newline at end of file
+ self.getData()
diff --git a/schainpy/model/io/jroIO_base.py b/schainpy/model/io/jroIO_base.py
index 7d17366..7aadbf5 100644
--- a/schainpy/model/io/jroIO_base.py
+++ b/schainpy/model/io/jroIO_base.py
@@ -194,7 +194,7 @@ def isFileInTimeRange(filename, startDate, endDate, startTime, endTime):
# If endTime < startTime then endTime belongs to the next day
- #<<<<<<<<<<>>>>>>>>>>
+ # <<<<<<<<<<>>>>>>>>>>
#-----------o----------------------------o-----------
# endTime startTime
@@ -420,8 +420,8 @@ def parse_format(s, fmt):
for i in range(fmt.count('%')):
x = fmt.index('%')
- d = DT_DIRECTIVES[fmt[x:x+2]]
- fmt = fmt.replace(fmt[x:x+2], s[x:x+d])
+ d = DT_DIRECTIVES[fmt[x:x + 2]]
+ fmt = fmt.replace(fmt[x:x + 2], s[x:x + d])
return fmt
class Reader(object):
@@ -518,7 +518,7 @@ class Reader(object):
continue
return
- def find_files(self, folders, ext, filefmt, startDate=None, endDate=None,
+ def find_files(self, folders, ext, filefmt, startDate=None, endDate=None,
expLabel='', last=False):
for path in folders:
@@ -548,7 +548,7 @@ class Reader(object):
continue
def searchFilesOffLine(self, path, startDate, endDate,
- expLabel, ext, walk,
+ expLabel, ext, walk,
filefmt, folderfmt):
"""Search files in offline mode for the given arguments
@@ -566,7 +566,7 @@ class Reader(object):
folders, ext, filefmt, startDate, endDate, expLabel)
def searchFilesOnLine(self, path, startDate, endDate,
- expLabel, ext, walk,
+ expLabel, ext, walk,
filefmt, folderfmt):
"""Search for the last file of the last folder
@@ -633,7 +633,7 @@ class Reader(object):
nextFile = True
nextDay = False
- for nFiles in range(self.nFiles+1):
+ for nFiles in range(self.nFiles + 1):
for nTries in range(self.nTries):
fullfilename, filename = self.checkForRealPath(nextFile, nextDay)
if fullfilename is not None:
@@ -674,7 +674,7 @@ class Reader(object):
try:
filename = next(self.filenameList)
- self.fileIndex +=1
+ self.fileIndex += 1
except StopIteration:
self.flagNoMoreFiles = 1
return 0
@@ -803,7 +803,7 @@ class JRODataReader(Reader):
if prefixDir != None:
# formo el nombre del directorio xYYYYDDD (x=d o x=D)
if foldercounter == 0:
- thispath = os.path.join(self.path, "%s%04d%03d" %
+ thispath = os.path.join(self.path, "%s%04d%03d" %
(prefixDir, self.year, self.doy))
else:
thispath = os.path.join(self.path, "%s%04d%03d_%02d" % (
@@ -931,7 +931,7 @@ class JRODataReader(Reader):
self.processingHeaderObj.read(self.fp)
self.firstHeaderSize = self.basicHeaderObj.size
- datatype = int(numpy.log2((self.processingHeaderObj.processFlags &
+ datatype = int(numpy.log2((self.processingHeaderObj.processFlags &
PROCFLAG.DATATYPE_MASK)) - numpy.log2(PROCFLAG.DATATYPE_CHAR))
if datatype == 0:
datatype_str = numpy.dtype([('real', ' endDate:
+ if thisDate > endDate:
break
dateList.append(thisDate)
@@ -250,10 +250,10 @@ class DigitalRFReader(ProcessingUnit):
online
delay
'''
- self.path = path
- self.nCohInt = nCohInt
+ self.path = path
+ self.nCohInt = nCohInt
self.flagDecodeData = flagDecodeData
- self.i = 0
+ self.i = 0
if not os.path.isdir(path):
raise ValueError("[Reading] Directory %s does not exist" % path)
@@ -263,7 +263,7 @@ class DigitalRFReader(ProcessingUnit):
except:
self.digitalReadObj = digital_rf.DigitalRFReader(path)
- channelNameList = self.digitalReadObj.get_channels()
+ channelNameList = self.digitalReadObj.get_channels()
if not channelNameList:
raise ValueError("[Reading] Directory %s does not have any files" % path)
@@ -273,27 +273,27 @@ class DigitalRFReader(ProcessingUnit):
########## Reading metadata ######################
- top_properties = self.digitalReadObj.get_properties(
+ top_properties = self.digitalReadObj.get_properties(
channelNameList[channelList[0]])
- self.__num_subchannels = top_properties['num_subchannels']
- self.__sample_rate = 1.0 * \
+ self.__num_subchannels = top_properties['num_subchannels']
+ self.__sample_rate = 1.0 * \
top_properties['sample_rate_numerator'] / \
top_properties['sample_rate_denominator']
# self.__samples_per_file = top_properties['samples_per_file'][0]
- self.__deltaHeigth = 1e6 * 0.15 / self.__sample_rate # why 0.15?
+ self.__deltaHeigth = 1e6 * 0.15 / self.__sample_rate # why 0.15?
- this_metadata_file = self.digitalReadObj.get_digital_metadata(
+ this_metadata_file = self.digitalReadObj.get_digital_metadata(
channelNameList[channelList[0]])
- metadata_bounds = this_metadata_file.get_bounds()
+ metadata_bounds = this_metadata_file.get_bounds()
self.fixed_metadata_dict = this_metadata_file.read(
metadata_bounds[0])[metadata_bounds[0]] # GET FIRST HEADER
try:
- self.__processingHeader = self.fixed_metadata_dict['processingHeader']
+ self.__processingHeader = self.fixed_metadata_dict['processingHeader']
self.__radarControllerHeader = self.fixed_metadata_dict['radarControllerHeader']
- self.__systemHeader = self.fixed_metadata_dict['systemHeader']
- self.dtype = pickle.loads(self.fixed_metadata_dict['dtype'])
+ self.__systemHeader = self.fixed_metadata_dict['systemHeader']
+ self.dtype = pickle.loads(self.fixed_metadata_dict['dtype'])
except:
pass
@@ -311,15 +311,15 @@ class DigitalRFReader(ProcessingUnit):
self.__firstHeigth = 0
try:
- codeType = self.__radarControllerHeader['codeType']
+ codeType = self.__radarControllerHeader['codeType']
except:
- codeType = 0
+ codeType = 0
try:
if codeType:
nCode = self.__radarControllerHeader['nCode']
nBaud = self.__radarControllerHeader['nBaud']
- code = self.__radarControllerHeader['code']
+ code = self.__radarControllerHeader['code']
except:
pass
@@ -330,18 +330,18 @@ class DigitalRFReader(ProcessingUnit):
except:
ippKm = None
####################################################
- self.__ippKm = ippKm
+ self.__ippKm = ippKm
startUTCSecond = None
- endUTCSecond = None
+ endUTCSecond = None
if startDate:
- startDatetime = datetime.datetime.combine(startDate, startTime)
+ startDatetime = datetime.datetime.combine(startDate, startTime)
startUTCSecond = (
startDatetime - datetime.datetime(1970, 1, 1)).total_seconds() + self.__timezone
if endDate:
- endDatetime = datetime.datetime.combine(endDate, endTime)
- endUTCSecond = (endDatetime - datetime.datetime(1970,
+ endDatetime = datetime.datetime.combine(endDate, endTime)
+ endUTCSecond = (endDatetime - datetime.datetime(1970,
1, 1)).total_seconds() + self.__timezone
start_index, end_index = self.digitalReadObj.get_bounds(
@@ -350,50 +350,50 @@ class DigitalRFReader(ProcessingUnit):
if not startUTCSecond:
startUTCSecond = start_index / self.__sample_rate
- if start_index > startUTCSecond * self.__sample_rate:
+ if start_index > startUTCSecond * self.__sample_rate:
startUTCSecond = start_index / self.__sample_rate
if not endUTCSecond:
- endUTCSecond = end_index / self.__sample_rate
+ endUTCSecond = end_index / self.__sample_rate
- if end_index < endUTCSecond * self.__sample_rate:
- endUTCSecond = end_index / self.__sample_rate
+ if end_index < endUTCSecond * self.__sample_rate:
+ endUTCSecond = end_index / self.__sample_rate
if not nSamples:
if not ippKm:
raise ValueError("[Reading] nSamples or ippKm should be defined")
- nSamples = int(ippKm / (1e6 * 0.15 / self.__sample_rate))
- channelBoundList = []
+ nSamples = int(ippKm / (1e6 * 0.15 / self.__sample_rate))
+ channelBoundList = []
channelNameListFiltered = []
for thisIndexChannel in channelList:
- thisChannelName = channelNameList[thisIndexChannel]
+ thisChannelName = channelNameList[thisIndexChannel]
start_index, end_index = self.digitalReadObj.get_bounds(
thisChannelName)
channelBoundList.append((start_index, end_index))
channelNameListFiltered.append(thisChannelName)
self.profileIndex = 0
- self.i = 0
- self.__delay = delay
-
- self.__codeType = codeType
- self.__nCode = nCode
- self.__nBaud = nBaud
- self.__code = code
-
- self.__datapath = path
- self.__online = online
- self.__channelList = channelList
- self.__channelNameList = channelNameListFiltered
+ self.i = 0
+ self.__delay = delay
+
+ self.__codeType = codeType
+ self.__nCode = nCode
+ self.__nBaud = nBaud
+ self.__code = code
+
+ self.__datapath = path
+ self.__online = online
+ self.__channelList = channelList
+ self.__channelNameList = channelNameListFiltered
self.__channelBoundList = channelBoundList
- self.__nSamples = nSamples
- self.__samples_to_read = int(nSamples) # FIJO: AHORA 40
- self.__nChannels = len(self.__channelList)
+ self.__nSamples = nSamples
+ self.__samples_to_read = int(nSamples) # FIJO: AHORA 40
+ self.__nChannels = len(self.__channelList)
- self.__startUTCSecond = startUTCSecond
- self.__endUTCSecond = endUTCSecond
+ self.__startUTCSecond = startUTCSecond
+ self.__endUTCSecond = endUTCSecond
- self.__timeInterval = 1.0 * self.__samples_to_read / \
+ self.__timeInterval = 1.0 * self.__samples_to_read / \
self.__sample_rate # Time interval
if online:
@@ -403,7 +403,7 @@ class DigitalRFReader(ProcessingUnit):
# por que en el otro metodo lo primero q se hace es sumar samplestoread
self.__thisUnixSample = int(startUTCSecond * self.__sample_rate) - self.__samples_to_read
- self.__data_buffer = numpy.zeros(
+ self.__data_buffer = numpy.zeros(
(self.__num_subchannels, self.__samples_to_read), dtype=numpy.complex)
self.__setFileHeader()
@@ -420,8 +420,8 @@ class DigitalRFReader(ProcessingUnit):
datetime.datetime.utcfromtimestamp(
endUTCSecond - self.__timezone)
))
- self.oldAverage = None
- self.count = 0
+ self.oldAverage = None
+ self.count = 0
self.executionTime = 0
def __reload(self):
@@ -438,13 +438,13 @@ class DigitalRFReader(ProcessingUnit):
except:
self.digitalReadObj = digital_rf.DigitalRFReader(self.path)
- start_index, end_index = self.digitalReadObj.get_bounds(
+ start_index, end_index = self.digitalReadObj.get_bounds(
self.__channelNameList[self.__channelList[0]])
- if start_index > self.__startUTCSecond * self.__sample_rate:
+ if start_index > self.__startUTCSecond * self.__sample_rate:
self.__startUTCSecond = 1.0 * start_index / self.__sample_rate
- if end_index > self.__endUTCSecond * self.__sample_rate:
+ if end_index > self.__endUTCSecond * self.__sample_rate:
self.__endUTCSecond = 1.0 * end_index / self.__sample_rate
print()
print("[Reading] New timerange found [%s, %s] " % (
@@ -459,14 +459,14 @@ class DigitalRFReader(ProcessingUnit):
return False
def timeit(self, toExecute):
- t0 = time.time()
+ t0 = time.time()
toExecute()
- self.executionTime = time.time() - t0
+ self.executionTime = time.time() - t0
if self.oldAverage is None:
self.oldAverage = self.executionTime
- self.oldAverage = (self.executionTime + self.count *
+ self.oldAverage = (self.executionTime + self.count *
self.oldAverage) / (self.count + 1.0)
- self.count = self.count + 1.0
+ self.count = self.count + 1.0
return
def __readNextBlock(self, seconds=30, volt_scale=1):
@@ -475,7 +475,7 @@ class DigitalRFReader(ProcessingUnit):
# Set the next data
self.__flagDiscontinuousBlock = False
- self.__thisUnixSample += self.__samples_to_read
+ self.__thisUnixSample += self.__samples_to_read
if self.__thisUnixSample + 2 * self.__samples_to_read > self.__endUTCSecond * self.__sample_rate:
print ("[Reading] There are no more data into selected time-range")
@@ -496,14 +496,14 @@ class DigitalRFReader(ProcessingUnit):
for thisChannelName in self.__channelNameList: # TODO VARIOS CHANNELS?
for indexSubchannel in range(self.__num_subchannels):
try:
- t0 = time()
+ t0 = time()
result = self.digitalReadObj.read_vector_c81d(self.__thisUnixSample,
self.__samples_to_read,
thisChannelName, sub_channel=indexSubchannel)
- self.executionTime = time() - t0
+ self.executionTime = time() - t0
if self.oldAverage is None:
self.oldAverage = self.executionTime
- self.oldAverage = (
+ self.oldAverage = (
self.executionTime + self.count * self.oldAverage) / (self.count + 1.0)
self.count = self.count + 1.0
@@ -521,11 +521,11 @@ class DigitalRFReader(ProcessingUnit):
break
self.__data_buffer[indexSubchannel, :] = result * volt_scale
- indexChannel+=1
+ indexChannel += 1
- dataOk = True
+ dataOk = True
- self.__utctime = self.__thisUnixSample / self.__sample_rate
+ self.__utctime = self.__thisUnixSample / self.__sample_rate
if not dataOk:
return False
@@ -534,7 +534,7 @@ class DigitalRFReader(ProcessingUnit):
self.__samples_to_read,
self.__timeInterval))
- self.__bufferIndex = 0
+ self.__bufferIndex = 0
return True
@@ -557,16 +557,16 @@ class DigitalRFReader(ProcessingUnit):
self.flagDiscontinuousBlock
self.flagIsNewBlock
'''
- #print("getdata")
+ # print("getdata")
err_counter = 0
self.dataOut.flagNoData = True
if self.__isBufferEmpty():
- #print("hi")
+ # print("hi")
self.__flagDiscontinuousBlock = False
while True:
- #print ("q ha pasado")
+ # print ("q ha pasado")
if self.__readNextBlock():
break
if self.__thisUnixSample > self.__endUTCSecond * self.__sample_rate:
@@ -589,14 +589,14 @@ class DigitalRFReader(ProcessingUnit):
print('[Reading] waiting %d seconds to read a new block' % seconds)
time.sleep(seconds)
- self.dataOut.data = self.__data_buffer[:, self.__bufferIndex:self.__bufferIndex + self.__nSamples]
- self.dataOut.utctime = ( self.__thisUnixSample + self.__bufferIndex) / self.__sample_rate
- self.dataOut.flagNoData = False
+ self.dataOut.data = self.__data_buffer[:, self.__bufferIndex:self.__bufferIndex + self.__nSamples]
+ self.dataOut.utctime = (self.__thisUnixSample + self.__bufferIndex) / self.__sample_rate
+ self.dataOut.flagNoData = False
self.dataOut.flagDiscontinuousBlock = self.__flagDiscontinuousBlock
- self.dataOut.profileIndex = self.profileIndex
+ self.dataOut.profileIndex = self.profileIndex
self.__bufferIndex += self.__nSamples
- self.profileIndex += 1
+ self.profileIndex += 1
if self.profileIndex == self.dataOut.nProfiles:
self.profileIndex = 0
@@ -627,7 +627,7 @@ class DigitalRFReader(ProcessingUnit):
if not self.isConfig:
self.setup(**kwargs)
- #self.i = self.i+1
+ # self.i = self.i+1
self.getData(seconds=self.__delay)
return
@@ -644,25 +644,25 @@ class DigitalRFWriter(Operation):
'''
Operation.__init__(self, **kwargs)
self.metadata_dict = {}
- self.dataOut = None
- self.dtype = None
- self.oldAverage = 0
+ self.dataOut = None
+ self.dtype = None
+ self.oldAverage = 0
def setHeader(self):
- self.metadata_dict['frequency'] = self.dataOut.frequency
- self.metadata_dict['timezone'] = self.dataOut.timeZone
- self.metadata_dict['dtype'] = pickle.dumps(self.dataOut.dtype)
- self.metadata_dict['nProfiles'] = self.dataOut.nProfiles
- self.metadata_dict['heightList'] = self.dataOut.heightList
- self.metadata_dict['channelList'] = self.dataOut.channelList
+ self.metadata_dict['frequency'] = self.dataOut.frequency
+ self.metadata_dict['timezone'] = self.dataOut.timeZone
+ self.metadata_dict['dtype'] = pickle.dumps(self.dataOut.dtype)
+ self.metadata_dict['nProfiles'] = self.dataOut.nProfiles
+ self.metadata_dict['heightList'] = self.dataOut.heightList
+ self.metadata_dict['channelList'] = self.dataOut.channelList
self.metadata_dict['flagDecodeData'] = self.dataOut.flagDecodeData
self.metadata_dict['flagDeflipData'] = self.dataOut.flagDeflipData
- self.metadata_dict['flagShiftFFT'] = self.dataOut.flagShiftFFT
- self.metadata_dict['useLocalTime'] = self.dataOut.useLocalTime
- self.metadata_dict['nCohInt'] = self.dataOut.nCohInt
- self.metadata_dict['type'] = self.dataOut.type
- self.metadata_dict['flagDataAsBlock']= getattr(
+ self.metadata_dict['flagShiftFFT'] = self.dataOut.flagShiftFFT
+ self.metadata_dict['useLocalTime'] = self.dataOut.useLocalTime
+ self.metadata_dict['nCohInt'] = self.dataOut.nCohInt
+ self.metadata_dict['type'] = self.dataOut.type
+ self.metadata_dict['flagDataAsBlock'] = getattr(
self.dataOut, 'flagDataAsBlock', None) # chequear
def setup(self, dataOut, path, frequency, fileCadence, dirCadence, metadataCadence, set=0, metadataFile='metadata', ext='.h5'):
@@ -672,13 +672,13 @@ class DigitalRFWriter(Operation):
dataOut: Input data will also be outputa data
'''
self.setHeader()
- self.__ippSeconds = dataOut.ippSeconds
- self.__deltaH = dataOut.getDeltaH()
+ self.__ippSeconds = dataOut.ippSeconds
+ self.__deltaH = dataOut.getDeltaH()
self.__sample_rate = 1e6 * 0.15 / self.__deltaH
- self.__dtype = dataOut.dtype
+ self.__dtype = dataOut.dtype
if len(dataOut.dtype) == 2:
self.__dtype = dataOut.dtype[0]
- self.__nSamples = dataOut.systemHeaderObj.nSamples
+ self.__nSamples = dataOut.systemHeaderObj.nSamples
self.__nProfiles = dataOut.nProfiles
if self.dataOut.type != 'Voltage':
@@ -689,44 +689,44 @@ class DigitalRFWriter(Operation):
self.arr_data = numpy.ones((self.__nSamples, len(
self.dataOut.channelList)), dtype=[('r', self.__dtype), ('i', self.__dtype)])
- file_cadence_millisecs = 1000
+ file_cadence_millisecs = 1000
- sample_rate_fraction = Fraction(self.__sample_rate).limit_denominator()
- sample_rate_numerator = int(sample_rate_fraction.numerator)
+ sample_rate_fraction = Fraction(self.__sample_rate).limit_denominator()
+ sample_rate_numerator = int(sample_rate_fraction.numerator)
sample_rate_denominator = int(sample_rate_fraction.denominator)
- start_global_index = dataOut.utctime * self.__sample_rate
+ start_global_index = dataOut.utctime * self.__sample_rate
- uuid = 'prueba'
+ uuid = 'prueba'
compression_level = 0
- checksum = False
- is_complex = True
- num_subchannels = len(dataOut.channelList)
- is_continuous = True
- marching_periods = False
+ checksum = False
+ is_complex = True
+ num_subchannels = len(dataOut.channelList)
+ is_continuous = True
+ marching_periods = False
self.digitalWriteObj = digital_rf.DigitalRFWriter(path, self.__dtype, dirCadence,
fileCadence, start_global_index,
sample_rate_numerator, sample_rate_denominator, uuid, compression_level, checksum,
is_complex, num_subchannels, is_continuous, marching_periods)
- metadata_dir = os.path.join(path, 'metadata')
+ metadata_dir = os.path.join(path, 'metadata')
os.system('mkdir %s' % (metadata_dir))
self.digitalMetadataWriteObj = digital_rf.DigitalMetadataWriter(metadata_dir, dirCadence, 1, # 236, file_cadence_millisecs / 1000
sample_rate_numerator, sample_rate_denominator,
metadataFile)
- self.isConfig = True
+ self.isConfig = True
self.currentSample = 0
- self.oldAverage = 0
- self.count = 0
+ self.oldAverage = 0
+ self.count = 0
return
def writeMetadata(self):
- start_idx = self.__sample_rate * self.dataOut.utctime
+ start_idx = self.__sample_rate * self.dataOut.utctime
- self.metadata_dict['processingHeader'] = self.dataOut.processingHeaderObj.getAsDict(
+ self.metadata_dict['processingHeader'] = self.dataOut.processingHeaderObj.getAsDict(
)
self.metadata_dict['radarControllerHeader'] = self.dataOut.radarControllerHeaderObj.getAsDict(
)
- self.metadata_dict['systemHeader'] = self.dataOut.systemHeaderObj.getAsDict(
+ self.metadata_dict['systemHeader'] = self.dataOut.systemHeaderObj.getAsDict(
)
self.digitalMetadataWriteObj.write(start_idx, self.metadata_dict)
return
@@ -734,12 +734,12 @@ class DigitalRFWriter(Operation):
def timeit(self, toExecute):
t0 = time()
toExecute()
- self.executionTime = time() - t0
+ self.executionTime = time() - t0
if self.oldAverage is None:
self.oldAverage = self.executionTime
- self.oldAverage = (self.executionTime + self.count *
+ self.oldAverage = (self.executionTime + self.count *
self.oldAverage) / (self.count + 1.0)
- self.count = self.count + 1.0
+ self.count = self.count + 1.0
return
def writeData(self):
@@ -747,9 +747,9 @@ class DigitalRFWriter(Operation):
raise 'Digital RF cannot be used with this data type'
for channel in self.dataOut.channelList:
for i in range(self.dataOut.nFFTPoints):
- self.arr_data[1][channel * self.dataOut.nFFTPoints +
+ self.arr_data[1][channel * self.dataOut.nFFTPoints +
i]['r'] = self.dataOut.data[channel][i].real
- self.arr_data[1][channel * self.dataOut.nFFTPoints +
+ self.arr_data[1][channel * self.dataOut.nFFTPoints +
i]['i'] = self.dataOut.data[channel][i].imag
else:
for i in range(self.dataOut.systemHeaderObj.nSamples):
@@ -777,12 +777,12 @@ class DigitalRFWriter(Operation):
self.writeData()
- ## self.currentSample += 1
+ # # self.currentSample += 1
# if self.dataOut.flagDataAsBlock or self.currentSample == 1:
# self.writeMetadata()
- ## if self.currentSample == self.__nProfiles: self.currentSample = 0
+ # # if self.currentSample == self.__nProfiles: self.currentSample = 0
- return dataOut# en la version 2.7 no aparece este return
+ return dataOut # en la version 2.7 no aparece este return
def close(self):
print('[Writing] - Closing files ')
diff --git a/schainpy/model/io/jroIO_heispectra.py b/schainpy/model/io/jroIO_heispectra.py
index 3832760..65d9872 100644
--- a/schainpy/model/io/jroIO_heispectra.py
+++ b/schainpy/model/io/jroIO_heispectra.py
@@ -25,23 +25,23 @@ from schainpy.utils import log
class PyFits(object):
- name=None
- format=None
- array =None
- data =None
- thdulist=None
- prihdr=None
- hdu=None
+ name = None
+ format = None
+ array = None
+ data = None
+ thdulist = None
+ prihdr = None
+ hdu = None
def __init__(self):
pass
- def setColF(self,name,format,array):
- self.name=name
- self.format=format
- self.array=array
- a1=numpy.array([self.array],dtype=numpy.float32)
+ def setColF(self, name, format, array):
+ self.name = name
+ self.format = format
+ self.array = array
+ a1 = numpy.array([self.array], dtype=numpy.float32)
self.col1 = pyfits.Column(name=self.name, format=self.format, array=a1)
return self.col1
@@ -54,35 +54,35 @@ class PyFits(object):
# return self.col2
- def writeData(self,name,format,data):
- self.name=name
- self.format=format
- self.data=data
- a2=numpy.array([self.data],dtype=numpy.float32)
+ def writeData(self, name, format, data):
+ self.name = name
+ self.format = format
+ self.data = data
+ a2 = numpy.array([self.data], dtype=numpy.float32)
self.col2 = pyfits.Column(name=self.name, format=self.format, array=a2)
return self.col2
- def cFImage(self,idblock,year,month,day,hour,minute,second):
- self.hdu= pyfits.PrimaryHDU(idblock)
- self.hdu.header.set("Year",year)
- self.hdu.header.set("Month",month)
- self.hdu.header.set("Day",day)
- self.hdu.header.set("Hour",hour)
- self.hdu.header.set("Minute",minute)
- self.hdu.header.set("Second",second)
+ def cFImage(self, idblock, year, month, day, hour, minute, second):
+ self.hdu = pyfits.PrimaryHDU(idblock)
+ self.hdu.header.set("Year", year)
+ self.hdu.header.set("Month", month)
+ self.hdu.header.set("Day", day)
+ self.hdu.header.set("Hour", hour)
+ self.hdu.header.set("Minute", minute)
+ self.hdu.header.set("Second", second)
return self.hdu
- def Ctable(self,colList):
- self.cols=pyfits.ColDefs(colList)
+ def Ctable(self, colList):
+ self.cols = pyfits.ColDefs(colList)
self.tbhdu = pyfits.new_table(self.cols)
return self.tbhdu
- def CFile(self,hdu,tbhdu):
- self.thdulist=pyfits.HDUList([hdu,tbhdu])
+ def CFile(self, hdu, tbhdu):
+ self.thdulist = pyfits.HDUList([hdu, tbhdu])
- def wFile(self,filename):
+ def wFile(self, filename):
if os.path.isfile(filename):
os.remove(filename)
self.thdulist.writeto(filename)
@@ -154,7 +154,7 @@ class FitsWriter(Operation):
header_data.header['DATETIME'] = time.strftime("%b %d %Y %H:%M:%S", dataOut.datatime.timetuple())
header_data.header['CHANNELLIST'] = str(dataOut.channelList)
header_data.header['NCHANNELS'] = dataOut.nChannels
- #header_data.header['HEIGHTS'] = dataOut.heightList
+ # header_data.header['HEIGHTS'] = dataOut.heightList
header_data.header['NHEIGHTS'] = dataOut.nHeights
header_data.header['IPPSECONDS'] = dataOut.ippSeconds
@@ -165,7 +165,7 @@ class FitsWriter(Operation):
header_data.writeto(self.filename)
- self.addExtension(dataOut.heightList,'HEIGHTLIST')
+ self.addExtension(dataOut.heightList, 'HEIGHTLIST')
def setup(self, dataOut, path, dataBlocksPerFile=100, metadatafile=None):
@@ -182,7 +182,7 @@ class FitsWriter(Operation):
def addExtension(self, data, tagname):
self.open()
extension = pyfits.ImageHDU(data=data, name=tagname)
- #extension.header['TAG'] = tagname
+ # extension.header['TAG'] = tagname
self.fitsObj.append(extension)
self.write()
@@ -207,25 +207,25 @@ class FitsWriter(Operation):
ext = self.ext
path = self.path
- timeTuple = time.localtime( self.dataOut.utctime)
- subfolder = 'd%4.4d%3.3d' % (timeTuple.tm_year,timeTuple.tm_yday)
+ timeTuple = time.localtime(self.dataOut.utctime)
+ subfolder = 'd%4.4d%3.3d' % (timeTuple.tm_year, timeTuple.tm_yday)
- fullpath = os.path.join( path, subfolder )
- if not( os.path.exists(fullpath) ):
+ fullpath = os.path.join(path, subfolder)
+ if not(os.path.exists(fullpath)):
os.mkdir(fullpath)
- self.setFile = -1 #inicializo mi contador de seteo
+ self.setFile = -1 # inicializo mi contador de seteo
else:
- filesList = os.listdir( fullpath )
- if len( filesList ) > 0:
- filesList = sorted( filesList, key=str.lower )
+ filesList = os.listdir(fullpath)
+ if len(filesList) > 0:
+ filesList = sorted(filesList, key=str.lower)
filen = filesList[-1]
- if isNumber( filen[8:11] ):
- self.setFile = int( filen[8:11] ) #inicializo mi contador de seteo al seteo del ultimo file
+ if isNumber(filen[8:11]):
+ self.setFile = int(filen[8:11]) # inicializo mi contador de seteo al seteo del ultimo file
else:
self.setFile = -1
else:
- self.setFile = -1 #inicializo mi contador de seteo
+ self.setFile = -1 # inicializo mi contador de seteo
setFile = self.setFile
setFile += 1
@@ -234,16 +234,16 @@ class FitsWriter(Operation):
timeTuple.tm_year,
timeTuple.tm_yday,
setFile,
- ext )
+ ext)
- filename = os.path.join( path, subfolder, thisFile )
+ filename = os.path.join(path, subfolder, thisFile)
self.blockIndex = 0
self.filename = filename
self.setFile = setFile
self.flagIsNewFile = 1
- print('Writing the file: %s'%self.filename)
+ print('Writing the file: %s' % self.filename)
self.setFitsHeader(self.dataOut, self.metadatafile)
@@ -262,13 +262,13 @@ class FitsWriter(Operation):
if self.blockIndex < self.dataBlocksPerFile:
return 1
- if not( self.setNextFile() ):
+ if not(self.setNextFile()):
return 0
return 1
def writeNextBlock(self):
- if not( self.__setNewBlock() ):
+ if not(self.__setNewBlock()):
return 0
self.writeBlock()
return 1
@@ -301,8 +301,8 @@ class FitsReader(ProcessingUnit):
data = None
data_header_dict = None
- def __init__(self):#, **kwargs):
- ProcessingUnit.__init__(self)#, **kwargs)
+ def __init__(self): # , **kwargs):
+ ProcessingUnit.__init__(self) # , **kwargs)
self.isConfig = False
self.ext = '.fits'
self.setFile = 0
@@ -317,7 +317,7 @@ class FitsReader(ProcessingUnit):
self.nReadBlocks = 0
self.nTotalBlocks = 0
self.dataOut = self.createObjByDefault()
- self.maxTimeStep = 10# deberia ser definido por el usuario usando el metodo setup()
+ self.maxTimeStep = 10 # deberia ser definido por el usuario usando el metodo setup()
self.blockIndex = 1
def createObjByDefault(self):
@@ -328,14 +328,14 @@ class FitsReader(ProcessingUnit):
def isFileinThisTime(self, filename, startTime, endTime, useLocalTime=False):
try:
- fitsObj = pyfits.open(filename,'readonly')
+ fitsObj = pyfits.open(filename, 'readonly')
except:
- print("File %s can't be opened" %(filename))
+ print("File %s can't be opened" % (filename))
return None
header = fitsObj[0].header
struct_time = time.strptime(header['DATETIME'], "%b %d %Y %H:%M:%S")
- utc = time.mktime(struct_time) - time.timezone #TIMEZONE debe ser un parametro del header FITS
+ utc = time.mktime(struct_time) - time.timezone # TIMEZONE debe ser un parametro del header FITS
ltc = utc
if useLocalTime:
@@ -367,7 +367,7 @@ class FitsReader(ProcessingUnit):
# continue
fileSize = os.path.getsize(filename)
- fitsObj = pyfits.open(filename,'readonly')
+ fitsObj = pyfits.open(filename, 'readonly')
break
self.flagIsNewFile = 1
@@ -376,7 +376,7 @@ class FitsReader(ProcessingUnit):
self.fileSize = fileSize
self.fitsObj = fitsObj
self.blockIndex = 0
- print("Setting the file: %s"%self.filename)
+ print("Setting the file: %s" % self.filename)
return 1
@@ -459,8 +459,8 @@ class FitsReader(ProcessingUnit):
path,
startDate,
endDate,
- startTime=datetime.time(0,0,0),
- endTime=datetime.time(23,59,59),
+ startTime=datetime.time(0, 0, 0),
+ endTime=datetime.time(23, 59, 59),
set=None,
expLabel='',
ext='.fits',
@@ -474,7 +474,7 @@ class FitsReader(ProcessingUnit):
else:
dirList = []
for thisPath in os.listdir(path):
- if not os.path.isdir(os.path.join(path,thisPath)):
+ if not os.path.isdir(os.path.join(path, thisPath)):
continue
if not isRadarFolder(thisPath):
continue
@@ -490,20 +490,20 @@ class FitsReader(ProcessingUnit):
year = thisDate.timetuple().tm_year
doy = thisDate.timetuple().tm_yday
- matchlist = fnmatch.filter(dirList, '?' + '%4.4d%3.3d' % (year,doy) + '*')
+ matchlist = fnmatch.filter(dirList, '?' + '%4.4d%3.3d' % (year, doy) + '*')
if len(matchlist) == 0:
thisDate += datetime.timedelta(1)
continue
for match in matchlist:
- pathList.append(os.path.join(path,match,expLabel))
+ pathList.append(os.path.join(path, match, expLabel))
thisDate += datetime.timedelta(1)
if pathList == []:
- print("Any folder was found for the date range: %s-%s" %(startDate, endDate))
+ print("Any folder was found for the date range: %s-%s" % (startDate, endDate))
return None, None
- print("%d folder(s) was(were) found for the date range: %s - %s" %(len(pathList), startDate, endDate))
+ print("%d folder(s) was(were) found for the date range: %s - %s" % (len(pathList), startDate, endDate))
filenameList = []
datetimeList = []
@@ -512,12 +512,12 @@ class FitsReader(ProcessingUnit):
thisPath = pathList[i]
- fileList = glob.glob1(thisPath, "*%s" %ext)
+ fileList = glob.glob1(thisPath, "*%s" % ext)
fileList.sort()
for thisFile in fileList:
- filename = os.path.join(thisPath,thisFile)
+ filename = os.path.join(thisPath, thisFile)
thisDatetime = self.isFileinThisTime(filename, startTime, endTime)
if not(thisDatetime):
@@ -527,14 +527,14 @@ class FitsReader(ProcessingUnit):
datetimeList.append(thisDatetime)
if not(filenameList):
- print("Any file was found for the time range %s - %s" %(startTime, endTime))
+ print("Any file was found for the time range %s - %s" % (startTime, endTime))
return None, None
- print("%d file(s) was(were) found for the time range: %s - %s" %(len(filenameList), startTime, endTime))
+ print("%d file(s) was(were) found for the time range: %s - %s" % (len(filenameList), startTime, endTime))
print()
for i in range(len(filenameList)):
- print("%s -> [%s]" %(filenameList[i], datetimeList[i].ctime()))
+ print("%s -> [%s]" % (filenameList[i], datetimeList[i].ctime()))
self.filenameList = filenameList
self.datetimeList = datetimeList
@@ -544,14 +544,14 @@ class FitsReader(ProcessingUnit):
def setup(self, path=None,
startDate=None,
endDate=None,
- startTime=datetime.time(0,0,0),
- endTime=datetime.time(23,59,59),
+ startTime=datetime.time(0, 0, 0),
+ endTime=datetime.time(23, 59, 59),
set=0,
- expLabel = "",
- ext = None,
- online = False,
- delay = 60,
- walk = True):
+ expLabel="",
+ ext=None,
+ online=False,
+ delay=60,
+ walk=True):
if path == None:
raise ValueError("The path is not valid")
@@ -567,9 +567,9 @@ class FitsReader(ProcessingUnit):
walk=walk)
if not(pathList):
- print("No *%s files into the folder %s \nfor the range: %s - %s"%(ext, path,
- datetime.datetime.combine(startDate,startTime).ctime(),
- datetime.datetime.combine(endDate,endTime).ctime()))
+ print("No *%s files into the folder %s \nfor the range: %s - %s" % (ext, path,
+ datetime.datetime.combine(startDate, startTime).ctime(),
+ datetime.datetime.combine(endDate, endTime).ctime()))
sys.exit(-1)
@@ -583,10 +583,10 @@ class FitsReader(ProcessingUnit):
self.ext = ext
if not(self.setNextFile()):
- if (startDate!=None) and (endDate!=None):
- print("No files in range: %s - %s" %(datetime.datetime.combine(startDate,startTime).ctime(), datetime.datetime.combine(endDate,endTime).ctime()))
+ if (startDate != None) and (endDate != None):
+ print("No files in range: %s - %s" % (datetime.datetime.combine(startDate, startTime).ctime(), datetime.datetime.combine(endDate, endTime).ctime()))
elif startDate != None:
- print("No files in range: %s" %(datetime.datetime.combine(startDate,startTime).ctime()))
+ print("No files in range: %s" % (datetime.datetime.combine(startDate, startTime).ctime()))
else:
print("No files")
@@ -627,21 +627,21 @@ class FitsReader(ProcessingUnit):
neededSize = self.processingHeaderObj.blockSize + self.basicHeaderSize
- for nTries in range( self.nTries ):
+ for nTries in range(self.nTries):
self.fp.close()
- self.fp = open( self.filename, 'rb' )
- self.fp.seek( currentPointer )
+ self.fp = open(self.filename, 'rb')
+ self.fp.seek(currentPointer)
- self.fileSize = os.path.getsize( self.filename )
+ self.fileSize = os.path.getsize(self.filename)
currentSize = self.fileSize - currentPointer
- if ( currentSize >= neededSize ):
+ if (currentSize >= neededSize):
self.__rdBasicHeader()
return 1
- print("\tWaiting %0.2f seconds for the next block, try %03d ..." % (self.delay, nTries+1))
- sleep( self.delay )
+ print("\tWaiting %0.2f seconds for the next block, try %03d ..." % (self.delay, nTries + 1))
+ sleep(self.delay)
return 0
@@ -737,11 +737,11 @@ class SpectraHeisWriter(Operation):
doypath = None
subfolder = None
- def __init__(self):#, **kwargs):
- Operation.__init__(self)#, **kwargs)
+ def __init__(self): # , **kwargs):
+ Operation.__init__(self) # , **kwargs)
self.wrObj = PyFits()
# self.dataOut = dataOut
- self.nTotalBlocks=0
+ self.nTotalBlocks = 0
# self.set = None
self.setFile = None
self.idblock = 0
@@ -764,7 +764,7 @@ class SpectraHeisWriter(Operation):
False : no es un string numerico
"""
try:
- float( str )
+ float(str)
return True
except:
return False
@@ -779,28 +779,28 @@ class SpectraHeisWriter(Operation):
self.dataOut = dataOut
def putData(self):
- name= time.localtime( self.dataOut.utctime)
- ext=".fits"
+ name = time.localtime(self.dataOut.utctime)
+ ext = ".fits"
if self.doypath == None:
- self.subfolder = 'F%4.4d%3.3d_%d' % (name.tm_year,name.tm_yday,time.mktime(datetime.datetime.now().timetuple()))
- self.doypath = os.path.join( self.wrpath, self.subfolder )
+ self.subfolder = 'F%4.4d%3.3d_%d' % (name.tm_year, name.tm_yday, time.mktime(datetime.datetime.now().timetuple()))
+ self.doypath = os.path.join(self.wrpath, self.subfolder)
os.mkdir(self.doypath)
if self.setFile == None:
# self.set = self.dataOut.set
self.setFile = 0
# if self.set != self.dataOut.set:
-## self.set = self.dataOut.set
+# # self.set = self.dataOut.set
# self.setFile = 0
- #make the filename
- thisFile = 'D%4.4d%3.3d_%3.3d%s' % (name.tm_year,name.tm_yday,self.setFile,ext)
+ # make the filename
+ thisFile = 'D%4.4d%3.3d_%3.3d%s' % (name.tm_year, name.tm_yday, self.setFile, ext)
- filename = os.path.join(self.wrpath,self.subfolder, thisFile)
+ filename = os.path.join(self.wrpath, self.subfolder, thisFile)
- idblock = numpy.array([self.idblock],dtype="int64")
- header=self.wrObj.cFImage(idblock=idblock,
+ idblock = numpy.array([self.idblock], dtype="int64")
+ header = self.wrObj.cFImage(idblock=idblock,
year=time.gmtime(self.dataOut.utctime).tm_year,
month=time.gmtime(self.dataOut.utctime).tm_mon,
day=time.gmtime(self.dataOut.utctime).tm_mday,
@@ -808,32 +808,32 @@ class SpectraHeisWriter(Operation):
minute=time.gmtime(self.dataOut.utctime).tm_min,
second=time.gmtime(self.dataOut.utctime).tm_sec)
- c=3E8
+ c = 3E8
deltaHeight = self.dataOut.heightList[1] - self.dataOut.heightList[0]
- freq=numpy.arange(-1*self.dataOut.nHeights/2.,self.dataOut.nHeights/2.)*(c/(2*deltaHeight*1000))
+ freq = numpy.arange(-1 * self.dataOut.nHeights / 2., self.dataOut.nHeights / 2.) * (c / (2 * deltaHeight * 1000))
colList = []
- colFreq=self.wrObj.setColF(name="freq", format=str(self.dataOut.nFFTPoints)+'E', array=freq)
+ colFreq = self.wrObj.setColF(name="freq", format=str(self.dataOut.nFFTPoints) + 'E', array=freq)
colList.append(colFreq)
- nchannel=self.dataOut.nChannels
+ nchannel = self.dataOut.nChannels
for i in range(nchannel):
- col = self.wrObj.writeData(name="PCh"+str(i+1),
- format=str(self.dataOut.nFFTPoints)+'E',
- data=10*numpy.log10(self.dataOut.data_spc[i,:]))
+ col = self.wrObj.writeData(name="PCh" + str(i + 1),
+ format=str(self.dataOut.nFFTPoints) + 'E',
+ data=10 * numpy.log10(self.dataOut.data_spc[i, :]))
colList.append(col)
- data=self.wrObj.Ctable(colList=colList)
+ data = self.wrObj.Ctable(colList=colList)
- self.wrObj.CFile(header,data)
+ self.wrObj.CFile(header, data)
self.wrObj.wFile(filename)
- #update the setFile
+ # update the setFile
self.setFile += 1
self.idblock += 1
@@ -847,4 +847,4 @@ class SpectraHeisWriter(Operation):
self.isConfig = True
self.putData()
- return dataOut
\ No newline at end of file
+ return dataOut
diff --git a/schainpy/model/io/jroIO_hf.py b/schainpy/model/io/jroIO_hf.py
index 03d2596..9738168 100644
--- a/schainpy/model/io/jroIO_hf.py
+++ b/schainpy/model/io/jroIO_hf.py
@@ -4,8 +4,8 @@ Created on Jul 3, 2014
@author: roj-com0419
'''
-import os,sys
-import time,datetime
+import os, sys
+import time, datetime
import h5py
import numpy
import fnmatch
@@ -30,7 +30,7 @@ def isNumber(str):
False : no es un string numerico
"""
try:
- float( str )
+ float(str)
return True
except:
return False
@@ -48,7 +48,7 @@ def getFileFromSet(path, ext, set=None):
for thisFile in fileList:
try:
- number= int(thisFile[6:16])
+ number = int(thisFile[6:16])
# year = int(thisFile[1:5])
# doy = int(thisFile[5:8])
@@ -63,31 +63,31 @@ def getFileFromSet(path, ext, set=None):
if len(validFilelist) < 1:
return None
- validFilelist = sorted( validFilelist, key=str.lower )
+ validFilelist = sorted(validFilelist, key=str.lower)
if set == None:
return validFilelist[-1]
- print("set =" ,set)
+ print("set =" , set)
for thisFile in validFilelist:
if set <= int(thisFile[6:16]):
- print(thisFile,int(thisFile[6:16]))
+ print(thisFile, int(thisFile[6:16]))
return thisFile
return validFilelist[-1]
- myfile = fnmatch.filter(validFilelist,'*%10d*'%(set))
- #myfile = fnmatch.filter(validFilelist,'*%4.4d%3.3d%3.3d*'%(year,doy,set))
+ myfile = fnmatch.filter(validFilelist, '*%10d*' % (set))
+ # myfile = fnmatch.filter(validFilelist,'*%4.4d%3.3d%3.3d*'%(year,doy,set))
- if len(myfile)!= 0:
+ if len(myfile) != 0:
return myfile[0]
else:
- filename = '*%10.10d%s'%(set,ext.lower())
- print('the filename %s does not exist'%filename)
+ filename = '*%10.10d%s' % (set, ext.lower())
+ print('the filename %s does not exist' % filename)
print('...going to the last file: ')
if validFilelist:
- validFilelist = sorted( validFilelist, key=str.lower )
+ validFilelist = sorted(validFilelist, key=str.lower)
return validFilelist[-1]
return None
@@ -113,7 +113,7 @@ Depura el fileList dejando solo los que cumplan el formato de "res-xxxxxx.ext"
for thisFile in fileList:
try:
- number= int(thisFile[6:16])
+ number = int(thisFile[6:16])
except:
print("There is a file or folder with different format")
if not isNumber(number):
@@ -127,7 +127,7 @@ Depura el fileList dejando solo los que cumplan el formato de "res-xxxxxx.ext"
# if not isNumber(doy):
# continue
- number= int(number)
+ number = int(number)
# year = int(year)
# doy = int(doy)
@@ -139,7 +139,7 @@ Depura el fileList dejando solo los que cumplan el formato de "res-xxxxxx.ext"
if validFilelist:
- validFilelist = sorted( validFilelist, key=str.lower )
+ validFilelist = sorted(validFilelist, key=str.lower)
return validFilelist[-1]
return None
@@ -150,16 +150,16 @@ class HFReader(ProcessingUnit):
'''
classdocs
'''
- path = None
- startDate= None
- endDate = None
- startTime= None
- endTime = None
- walk = None
+ path = None
+ startDate = None
+ endDate = None
+ startTime = None
+ endTime = None
+ walk = None
isConfig = False
- dataOut=None
+ dataOut = None
nTries = 3
- ext = ".hdf5"
+ ext = ".hdf5"
def __init__(self, **kwargs):
'''
@@ -167,32 +167,32 @@ class HFReader(ProcessingUnit):
'''
ProcessingUnit.__init__(self, **kwargs)
- self.isConfig =False
+ self.isConfig = False
self.datablock = None
- self.filename_current=None
+ self.filename_current = None
self.utc = 0
- self.ext='.hdf5'
+ self.ext = '.hdf5'
self.flagIsNewFile = 1
#-------------------------------------------------
- self.fileIndex=None
+ self.fileIndex = None
- self.profileIndex_offset=None
+ self.profileIndex_offset = None
- self.filenameList=[]
+ self.filenameList = []
- self.hfFilePointer= None
+ self.hfFilePointer = None
self.filename_online = None
- self.status=True
+ self.status = True
- self.flagNoMoreFiles= False
+ self.flagNoMoreFiles = False
self.__waitForNewFile = 20
@@ -222,7 +222,7 @@ class HFReader(ProcessingUnit):
Return:
None
"""
- pts2read =self.nChannels*self.nHeights*self.nProfiles
+ pts2read = self.nChannels * self.nHeights * self.nProfiles
self.blocksize = pts2read
def __readHeader(self):
@@ -230,20 +230,20 @@ class HFReader(ProcessingUnit):
self.nProfiles = 100
self.nHeights = 1000
self.nChannels = 2
- self.__firstHeigth=0
- self.__nSamples=1000
- self.__deltaHeigth=1.5
- self.__sample_rate=1e5
- #self.__frequency=2.72e6
- #self.__frequency=3.64e6
- self.__frequency=None
+ self.__firstHeigth = 0
+ self.__nSamples = 1000
+ self.__deltaHeigth = 1.5
+ self.__sample_rate = 1e5
+ # self.__frequency=2.72e6
+ # self.__frequency=3.64e6
+ self.__frequency = None
self.__online = False
- self.filename_next_set=None
+ self.filename_next_set = None
- #print "Frequency of Operation:", self.__frequency
+ # print "Frequency of Operation:", self.__frequency
- def __setParameters(self,path='', startDate='',endDate='',startTime='', endTime='', walk=''):
+ def __setParameters(self, path='', startDate='', endDate='', startTime='', endTime='', walk=''):
self.path = path
self.startDate = startDate
self.endDate = endDate
@@ -253,43 +253,43 @@ class HFReader(ProcessingUnit):
def __checkPath(self):
if os.path.exists(self.path):
- self.status=1
+ self.status = 1
else:
- self.status=0
- print('Path %s does not exits'%self.path)
+ self.status = 0
+ print('Path %s does not exits' % self.path)
return
return
def __selDates(self, hf_dirname_format):
try:
- dir_hf_filename= self.path+"/"+hf_dirname_format
- fp= h5py.File(dir_hf_filename,'r')
- hipoc=fp['t'].value
+ dir_hf_filename = self.path + "/" + hf_dirname_format
+ fp = h5py.File(dir_hf_filename, 'r')
+ hipoc = fp['t'].value
fp.close()
- date_time=datetime.datetime.utcfromtimestamp(hipoc)
- year =int(date_time[0:4])
- month=int(date_time[5:7])
- dom =int(date_time[8:10])
- thisDate= datetime.date(year,month,dom)
- if (thisDate>=self.startDate and thisDate <= self.endDate):
+ date_time = datetime.datetime.utcfromtimestamp(hipoc)
+ year = int(date_time[0:4])
+ month = int(date_time[5:7])
+ dom = int(date_time[8:10])
+ thisDate = datetime.date(year, month, dom)
+ if (thisDate >= self.startDate and thisDate <= self.endDate):
return hf_dirname_format
except:
return None
- def __findDataForDates(self,online=False):
+ def __findDataForDates(self, online=False):
if not(self.status):
return None
pat = '\d+.\d+'
- dirnameList = [re.search(pat,x) for x in os.listdir(self.path)]
- dirnameList = [x for x in dirnameList if x!=None]
+ dirnameList = [re.search(pat, x) for x in os.listdir(self.path)]
+ dirnameList = [x for x in dirnameList if x != None]
dirnameList = [x.string for x in dirnameList]
if not(online):
dirnameList = [self.__selDates(x) for x in dirnameList]
- dirnameList = [x for x in dirnameList if x!=None]
+ dirnameList = [x for x in dirnameList if x != None]
- if len(dirnameList)>0:
+ if len(dirnameList) > 0:
self.status = 1
self.dirnameList = dirnameList
self.dirnameList.sort()
@@ -299,40 +299,40 @@ class HFReader(ProcessingUnit):
return None
def __getTimeFromData(self):
- startDateTime_Reader = datetime.datetime.combine(self.startDate,self.startTime)
- endDateTime_Reader = datetime.datetime.combine(self.endDate,self.endTime)
- print('Filtering Files from %s to %s'%(startDateTime_Reader, endDateTime_Reader))
+ startDateTime_Reader = datetime.datetime.combine(self.startDate, self.startTime)
+ endDateTime_Reader = datetime.datetime.combine(self.endDate, self.endTime)
+ print('Filtering Files from %s to %s' % (startDateTime_Reader, endDateTime_Reader))
print('........................................')
- filter_filenameList=[]
+ filter_filenameList = []
self.filenameList.sort()
- for i in range(len(self.filenameList)-1):
- filename=self.filenameList[i]
- dir_hf_filename= filename
- fp= h5py.File(dir_hf_filename,'r')
- hipoc=fp['t'].value
- hipoc=hipoc+self.timezone
- date_time=datetime.datetime.utcfromtimestamp(hipoc)
+ for i in range(len(self.filenameList) - 1):
+ filename = self.filenameList[i]
+ dir_hf_filename = filename
+ fp = h5py.File(dir_hf_filename, 'r')
+ hipoc = fp['t'].value
+ hipoc = hipoc + self.timezone
+ date_time = datetime.datetime.utcfromtimestamp(hipoc)
fp.close()
- year =int(date_time[0:4])
- month=int(date_time[5:7])
- dom =int(date_time[8:10])
- hour =int(date_time[11:13])
- min =int(date_time[14:16])
- sec =int(date_time[17:19])
- this_time=datetime.datetime(year,month,dom,hour,min,sec)
- if (this_time>=startDateTime_Reader and this_time <= endDateTime_Reader):
+ year = int(date_time[0:4])
+ month = int(date_time[5:7])
+ dom = int(date_time[8:10])
+ hour = int(date_time[11:13])
+ min = int(date_time[14:16])
+ sec = int(date_time[17:19])
+ this_time = datetime.datetime(year, month, dom, hour, min, sec)
+ if (this_time >= startDateTime_Reader and this_time <= endDateTime_Reader):
filter_filenameList.append(filename)
filter_filenameList.sort()
self.filenameList = filter_filenameList
return 1
def __getFilenameList(self):
- #print "hola"
- #print self.dirnameList
- dirList = [os.path.join(self.path,x) for x in self.dirnameList]
- self.filenameList= dirList
- #print self.filenameList
- #print "pase",len(self.filenameList)
+ # print "hola"
+ # print self.dirnameList
+ dirList = [os.path.join(self.path, x) for x in self.dirnameList]
+ self.filenameList = dirList
+ # print self.filenameList
+ # print "pase",len(self.filenameList)
def __selectDataForTimes(self, online=False):
@@ -344,70 +344,70 @@ class HFReader(ProcessingUnit):
if not(online):
if not(self.all):
self.__getTimeFromData()
- if len(self.filenameList)>0:
- self.status=1
+ if len(self.filenameList) > 0:
+ self.status = 1
self.filenameList.sort()
else:
- self.status=0
+ self.status = 0
return None
else:
if self.set != None:
- filename=getFileFromSet(self.path,self.ext,self.set)
+ filename = getFileFromSet(self.path, self.ext, self.set)
- if self.flag_nextfile==True:
- self.dirnameList=[filename]
- fullfilename=self.path+"/"+filename
- self.filenameList=[fullfilename]
- self.filename_next_set=int(filename[6:16])+10
+ if self.flag_nextfile == True:
+ self.dirnameList = [filename]
+ fullfilename = self.path + "/" + filename
+ self.filenameList = [fullfilename]
+ self.filename_next_set = int(filename[6:16]) + 10
- self.flag_nextfile=False
+ self.flag_nextfile = False
else:
print(filename)
print("PRIMERA CONDICION")
- #if self.filename_next_set== int(filename[6:16]):
+ # if self.filename_next_set== int(filename[6:16]):
print("TODO BIEN")
if filename == None:
raise ValueError("corregir")
- self.dirnameList=[filename]
- fullfilename=self.path+"/"+filename
- self.filenameList=[fullfilename]
- self.filename_next_set=int(filename[6:16])+10
- print("Setting next file",self.filename_next_set)
- self.set=int(filename[6:16])
+ self.dirnameList = [filename]
+ fullfilename = self.path + "/" + filename
+ self.filenameList = [fullfilename]
+ self.filename_next_set = int(filename[6:16]) + 10
+ print("Setting next file", self.filename_next_set)
+ self.set = int(filename[6:16])
if True:
pass
else:
print("ESTOY AQUI PORQUE NO EXISTE EL SIGUIENTE ARCHIVO")
else:
- filename =getlastFileFromPath(self.path,self.ext)
+ filename = getlastFileFromPath(self.path, self.ext)
- if self.flag_nextfile==True:
- self.dirnameList=[filename]
- fullfilename=self.path+"/"+filename
- self.filenameList=[self.filenameList[-1]]
- self.filename_next_set=int(filename[6:16])+10
+ if self.flag_nextfile == True:
+ self.dirnameList = [filename]
+ fullfilename = self.path + "/" + filename
+ self.filenameList = [self.filenameList[-1]]
+ self.filename_next_set = int(filename[6:16]) + 10
- self.flag_nextfile=False
+ self.flag_nextfile = False
else:
- filename=getFileFromSet(self.path,self.ext,self.set)
+ filename = getFileFromSet(self.path, self.ext, self.set)
print(filename)
print("PRIMERA CONDICION")
- #if self.filename_next_set== int(filename[6:16]):
+ # if self.filename_next_set== int(filename[6:16]):
print("TODO BIEN")
if filename == None:
raise ValueError("corregir")
- self.dirnameList=[filename]
- fullfilename=self.path+"/"+filename
- self.filenameList=[fullfilename]
- self.filename_next_set=int(filename[6:16])+10
- print("Setting next file",self.filename_next_set)
- self.set=int(filename[6:16])
+ self.dirnameList = [filename]
+ fullfilename = self.path + "/" + filename
+ self.filenameList = [fullfilename]
+ self.filename_next_set = int(filename[6:16]) + 10
+ print("Setting next file", self.filename_next_set)
+ self.set = int(filename[6:16])
if True:
pass
else:
@@ -420,8 +420,8 @@ class HFReader(ProcessingUnit):
startDate,
endDate,
ext,
- startTime=datetime.time(0,0,0),
- endTime=datetime.time(23,59,59),
+ startTime=datetime.time(0, 0, 0),
+ endTime=datetime.time(23, 59, 59),
walk=True):
self.__setParameters(path, startDate, endDate, startTime, endTime, walk)
@@ -429,18 +429,18 @@ class HFReader(ProcessingUnit):
self.__checkPath()
self.__findDataForDates()
- #print self.dirnameList
+ # print self.dirnameList
self.__selectDataForTimes()
for i in range(len(self.filenameList)):
- print("%s"% (self.filenameList[i]))
+ print("%s" % (self.filenameList[i]))
return
def searchFilesOnLine(self,
path,
- expLabel= "",
+ expLabel="",
ext=None,
startDate=None,
endDate=None,
@@ -451,27 +451,27 @@ class HFReader(ProcessingUnit):
startDate = datetime.datetime.utcnow().date()
endDate = datetime.datetime.utcnow().date()
- self.__setParameters(path=path,startDate=startDate,endDate=endDate,walk=walk)
+ self.__setParameters(path=path, startDate=startDate, endDate=endDate, walk=walk)
self.__checkPath()
- fullpath=path
- print("%s folder was found: " %(fullpath ))
+ fullpath = path
+ print("%s folder was found: " % (fullpath))
if set == None:
- self.set=None
- filename =getlastFileFromPath(fullpath,ext)
- startDate= datetime.datetime.utcnow().date
- endDate= datetime.datetime.utcnow().date()
+ self.set = None
+ filename = getlastFileFromPath(fullpath, ext)
+ startDate = datetime.datetime.utcnow().date
+ endDate = datetime.datetime.utcnow().date()
#
else:
- filename= getFileFromSet(fullpath,ext,set)
- startDate=None
- endDate=None
+ filename = getFileFromSet(fullpath, ext, set)
+ startDate = None
+ endDate = None
#
if not (filename):
- return None,None,None,None,None
- #print "%s file was found" %(filename)
+ return None, None, None, None, None
+ # print "%s file was found" %(filename)
#
# dir_hf_filename= self.path+"/"+filename
@@ -485,20 +485,20 @@ class HFReader(ProcessingUnit):
# dom =int(date_time[8:10])
# set= int(filename[4:10])
# self.set=set-1
- #self.dirnameList=[filename]
- filenameList= fullpath+"/"+filename
- self.dirnameList=[filename]
- self.filenameList=[filenameList]
- self.flag_nextfile=True
-
- #self.__findDataForDates(online=True)
- #self.dirnameList=[self.dirnameList[-1]]
- #print self.dirnameList
- #self.__selectDataForTimes(online=True)
- #return fullpath,filename,year,month,dom,set
+ # self.dirnameList=[filename]
+ filenameList = fullpath + "/" + filename
+ self.dirnameList = [filename]
+ self.filenameList = [filenameList]
+ self.flag_nextfile = True
+
+ # self.__findDataForDates(online=True)
+ # self.dirnameList=[self.dirnameList[-1]]
+ # print self.dirnameList
+ # self.__selectDataForTimes(online=True)
+ # return fullpath,filename,year,month,dom,set
return
- def __setNextFile(self,online=False):
+ def __setNextFile(self, online=False):
"""
"""
if not(online):
@@ -513,7 +513,7 @@ class HFReader(ProcessingUnit):
def __setNextFileOffline(self):
"""
"""
- idFile= self.fileIndex
+ idFile = self.fileIndex
while(True):
idFile += 1
if not (idFile < len(self.filenameList)):
@@ -521,10 +521,10 @@ class HFReader(ProcessingUnit):
print("No more Files")
return 0
filename = self.filenameList[idFile]
- hfFilePointer =h5py.File(filename,'r')
+ hfFilePointer = h5py.File(filename, 'r')
- epoc=hfFilePointer['t'].value
- #this_time=datetime.datetime(year,month,dom,hour,min,sec)
+ epoc = hfFilePointer['t'].value
+ # this_time=datetime.datetime(year,month,dom,hour,min,sec)
break
self.flagIsNewFile = 1
@@ -533,70 +533,70 @@ class HFReader(ProcessingUnit):
self.hfFilePointer = hfFilePointer
hfFilePointer.close()
- self.__t0=epoc
- print("Setting the file: %s"%self.filename)
+ self.__t0 = epoc
+ print("Setting the file: %s" % self.filename)
return 1
def __setNextFileOnline(self):
"""
"""
- print("SOY NONE",self.set)
- if self.set==None:
+ print("SOY NONE", self.set)
+ if self.set == None:
pass
else:
- self.set +=10
+ self.set += 10
- filename = self.filenameList[0]#fullfilename
+ filename = self.filenameList[0] # fullfilename
if self.filename_online != None:
self.__selectDataForTimes(online=True)
filename = self.filenameList[0]
while self.filename_online == filename:
- print('waiting %d seconds to get a new file...'%(self.__waitForNewFile))
+ print('waiting %d seconds to get a new file...' % (self.__waitForNewFile))
time.sleep(self.__waitForNewFile)
- #self.__findDataForDates(online=True)
- self.set=self.filename_next_set
+ # self.__findDataForDates(online=True)
+ self.set = self.filename_next_set
self.__selectDataForTimes(online=True)
filename = self.filenameList[0]
- sizeoffile=os.path.getsize(filename)
-
- #print filename
- sizeoffile=os.path.getsize(filename)
- if sizeoffile<1670240:
- print("%s is not the rigth size"%filename)
- delay=50
- print('waiting %d seconds for delay...'%(delay))
+ sizeoffile = os.path.getsize(filename)
+
+ # print filename
+ sizeoffile = os.path.getsize(filename)
+ if sizeoffile < 1670240:
+ print("%s is not the rigth size" % filename)
+ delay = 50
+ print('waiting %d seconds for delay...' % (delay))
time.sleep(delay)
- sizeoffile=os.path.getsize(filename)
- if sizeoffile<1670240:
- delay=50
- print('waiting %d more seconds for delay...'%(delay))
+ sizeoffile = os.path.getsize(filename)
+ if sizeoffile < 1670240:
+ delay = 50
+ print('waiting %d more seconds for delay...' % (delay))
time.sleep(delay)
- sizeoffile=os.path.getsize(filename)
- if sizeoffile<1670240:
- delay=50
- print('waiting %d more seconds for delay...'%(delay))
+ sizeoffile = os.path.getsize(filename)
+ if sizeoffile < 1670240:
+ delay = 50
+ print('waiting %d more seconds for delay...' % (delay))
time.sleep(delay)
try:
- hfFilePointer=h5py.File(filename,'r')
+ hfFilePointer = h5py.File(filename, 'r')
except:
- print("Error reading file %s"%filename)
+ print("Error reading file %s" % filename)
- self.filename_online=filename
- epoc=hfFilePointer['t'].value
+ self.filename_online = filename
+ epoc = hfFilePointer['t'].value
- self.hfFilePointer=hfFilePointer
+ self.hfFilePointer = hfFilePointer
hfFilePointer.close()
- self.__t0=epoc
+ self.__t0 = epoc
self.flagIsNewFile = 1
self.filename = filename
- print("Setting the file: %s"%self.filename)
+ print("Setting the file: %s" % self.filename)
return 1
def __getExpParameters(self):
@@ -604,46 +604,46 @@ class HFReader(ProcessingUnit):
return None
def setup(self,
- path = None,
- startDate = None,
- endDate = None,
- startTime = datetime.time(0,0,0),
- endTime = datetime.time(23,59,59),
- set = None,
- expLabel = "",
- ext = None,
+ path=None,
+ startDate=None,
+ endDate=None,
+ startTime=datetime.time(0, 0, 0),
+ endTime=datetime.time(23, 59, 59),
+ set=None,
+ expLabel="",
+ ext=None,
all=0,
timezone=0,
- online = False,
- delay = 60,
- walk = True):
+ online=False,
+ delay=60,
+ walk=True):
'''
In this method we should set all initial parameters.
'''
- if path==None:
+ if path == None:
raise ValueError("The path is not valid")
- if ext==None:
+ if ext == None:
ext = self.ext
- self.timezone= timezone
- self.online= online
- self.all=all
- #if set==None:
+ self.timezone = timezone
+ self.online = online
+ self.all = all
+ # if set==None:
- #print set
+ # print set
if not(online):
print("Searching files in offline mode...")
self.searchFilesOffLine(path, startDate, endDate, ext, startTime, endTime, walk)
else:
print("Searching files in online mode...")
- self.searchFilesOnLine(path, walk,ext,set=set)
- if set==None:
+ self.searchFilesOnLine(path, walk, ext, set=set)
+ if set == None:
pass
else:
- self.set=set-10
+ self.set = set - 10
# for nTries in range(self.nTries):
#
@@ -659,7 +659,7 @@ class HFReader(ProcessingUnit):
if not(self.filenameList):
- print("There is no files into the folder: %s"%(path))
+ print("There is no files into the folder: %s" % (path))
sys.exit(-1)
self.__getExpParameters()
@@ -674,9 +674,9 @@ class HFReader(ProcessingUnit):
self.__setLocalVariables()
self.__setHeaderDO()
- #self.profileIndex_offset= 0
+ # self.profileIndex_offset= 0
- #self.profileIndex = self.profileIndex_offset
+ # self.profileIndex = self.profileIndex_offset
self.isConfig = True
@@ -686,7 +686,7 @@ class HFReader(ProcessingUnit):
def __setLocalVariables(self):
- self.datablock = numpy.zeros((self.nChannels, self.nHeights,self.nProfiles), dtype = numpy.complex)
+ self.datablock = numpy.zeros((self.nChannels, self.nHeights, self.nProfiles), dtype=numpy.complex)
#
@@ -703,34 +703,34 @@ class HFReader(ProcessingUnit):
#---------------------------------------------------------
- self.dataOut.systemHeaderObj.nProfiles=100
- self.dataOut.systemHeaderObj.nSamples=1000
+ self.dataOut.systemHeaderObj.nProfiles = 100
+ self.dataOut.systemHeaderObj.nSamples = 1000
- SAMPLING_STRUCTURE=[('h0', '=self.startDate and thisDate <= self.endDate):
+ if (thisDate >= self.startDate and thisDate <= self.endDate):
return amisr_dirname_format
except:
return None
- def __findDataForDates(self,online=False):
+ def __findDataForDates(self, online=False):
if not(self.status):
return None
pat = '\d+.\d+'
- dirnameList = [re.search(pat,x) for x in os.listdir(self.path)]
- dirnameList = [x for x in dirnameList if x!=None]
+ dirnameList = [re.search(pat, x) for x in os.listdir(self.path)]
+ dirnameList = [x for x in dirnameList if x != None]
dirnameList = [x.string for x in dirnameList]
if not(online):
dirnameList = [self.__selDates(x) for x in dirnameList]
- dirnameList = [x for x in dirnameList if x!=None]
- if len(dirnameList)>0:
+ dirnameList = [x for x in dirnameList if x != None]
+ if len(dirnameList) > 0:
self.status = 1
self.dirnameList = dirnameList
self.dirnameList.sort()
@@ -239,38 +239,38 @@ class AMISRReader(ProcessingUnit):
return None
def __getTimeFromData(self):
- startDateTime_Reader = datetime.datetime.combine(self.startDate,self.startTime)
- endDateTime_Reader = datetime.datetime.combine(self.endDate,self.endTime)
+ startDateTime_Reader = datetime.datetime.combine(self.startDate, self.startTime)
+ endDateTime_Reader = datetime.datetime.combine(self.endDate, self.endTime)
- print('Filtering Files from %s to %s'%(startDateTime_Reader, endDateTime_Reader))
+ print('Filtering Files from %s to %s' % (startDateTime_Reader, endDateTime_Reader))
print('........................................')
filter_filenameList = []
self.filenameList.sort()
- #for i in range(len(self.filenameList)-1):
+ # for i in range(len(self.filenameList)-1):
for i in range(len(self.filenameList)):
filename = self.filenameList[i]
- fp = h5py.File(filename,'r')
+ fp = h5py.File(filename, 'r')
time_str = fp.get('Time/RadacTimeString')
startDateTimeStr_File = time_str[0][0].split('.')[0]
junk = time.strptime(startDateTimeStr_File, '%Y-%m-%d %H:%M:%S')
- startDateTime_File = datetime.datetime(junk.tm_year,junk.tm_mon,junk.tm_mday,junk.tm_hour, junk.tm_min, junk.tm_sec)
+ startDateTime_File = datetime.datetime(junk.tm_year, junk.tm_mon, junk.tm_mday, junk.tm_hour, junk.tm_min, junk.tm_sec)
endDateTimeStr_File = time_str[-1][-1].split('.')[0]
junk = time.strptime(endDateTimeStr_File, '%Y-%m-%d %H:%M:%S')
- endDateTime_File = datetime.datetime(junk.tm_year,junk.tm_mon,junk.tm_mday,junk.tm_hour, junk.tm_min, junk.tm_sec)
+ endDateTime_File = datetime.datetime(junk.tm_year, junk.tm_mon, junk.tm_mday, junk.tm_hour, junk.tm_min, junk.tm_sec)
fp.close()
if self.timezone == 'lt':
- startDateTime_File = startDateTime_File - datetime.timedelta(minutes = 300)
- endDateTime_File = endDateTime_File - datetime.timedelta(minutes = 300)
+ startDateTime_File = startDateTime_File - datetime.timedelta(minutes=300)
+ endDateTime_File = endDateTime_File - datetime.timedelta(minutes=300)
- if (endDateTime_File>=startDateTime_Reader and endDateTime_File= startDateTime_Reader and endDateTime_File < endDateTime_Reader):
+ # self.filenameList.remove(filename)
filter_filenameList.append(filename)
- if (endDateTime_File>=endDateTime_Reader):
+ if (endDateTime_File >= endDateTime_Reader):
break
@@ -279,7 +279,7 @@ class AMISRReader(ProcessingUnit):
return 1
def __filterByGlob1(self, dirName):
- filter_files = glob.glob1(dirName, '*.*%s'%self.extension_file)
+ filter_files = glob.glob1(dirName, '*.*%s' % self.extension_file)
filter_files.sort()
filterDict = {}
filterDict.setdefault(dirName)
@@ -295,21 +295,21 @@ class AMISRReader(ProcessingUnit):
def __selectDataForTimes(self, online=False):
- #aun no esta implementado el filtro for tiempo
+ # aun no esta implementado el filtro for tiempo
if not(self.status):
return None
- dirList = [os.path.join(self.path,x) for x in self.dirnameList]
+ dirList = [os.path.join(self.path, x) for x in self.dirnameList]
fileListInKeys = [self.__filterByGlob1(x) for x in dirList]
self.__getFilenameList(fileListInKeys, dirList)
if not(online):
- #filtro por tiempo
+ # filtro por tiempo
if not(self.all):
self.__getTimeFromData()
- if len(self.filenameList)>0:
+ if len(self.filenameList) > 0:
self.status = 1
self.filenameList.sort()
else:
@@ -317,7 +317,7 @@ class AMISRReader(ProcessingUnit):
return None
else:
- #get the last file - 1
+ # get the last file - 1
self.filenameList = [self.filenameList[-2]]
new_dirnameList = []
@@ -329,14 +329,14 @@ class AMISRReader(ProcessingUnit):
self.dirnameList = new_dirnameList
return 1
- def searchFilesOnLine(self, path, startDate, endDate, startTime=datetime.time(0,0,0),
- endTime=datetime.time(23,59,59),walk=True):
+ def searchFilesOnLine(self, path, startDate, endDate, startTime=datetime.time(0, 0, 0),
+ endTime=datetime.time(23, 59, 59), walk=True):
- if endDate ==None:
+ if endDate == None:
startDate = datetime.datetime.utcnow().date()
endDate = datetime.datetime.utcnow().date()
- self.__setParameters(path=path, startDate=startDate, endDate=endDate,startTime = startTime,endTime=endTime, walk=walk)
+ self.__setParameters(path=path, startDate=startDate, endDate=endDate, startTime=startTime, endTime=endTime, walk=walk)
self.__checkPath()
@@ -353,8 +353,8 @@ class AMISRReader(ProcessingUnit):
path,
startDate,
endDate,
- startTime=datetime.time(0,0,0),
- endTime=datetime.time(23,59,59),
+ startTime=datetime.time(0, 0, 0),
+ endTime=datetime.time(23, 59, 59),
walk=True):
self.__setParameters(path, startDate, endDate, startTime, endTime, walk)
@@ -366,7 +366,7 @@ class AMISRReader(ProcessingUnit):
self.__selectDataForTimes()
for i in range(len(self.filenameList)):
- print("%s" %(self.filenameList[i]))
+ print("%s" % (self.filenameList[i]))
return
@@ -382,7 +382,7 @@ class AMISRReader(ProcessingUnit):
filename = self.filenameList[idFile]
- amisrFilePointer = h5py.File(filename,'r')
+ amisrFilePointer = h5py.File(filename, 'r')
break
@@ -392,7 +392,7 @@ class AMISRReader(ProcessingUnit):
self.amisrFilePointer = amisrFilePointer
- print("Setting the file: %s"%self.filename)
+ print("Setting the file: %s" % self.filename)
return 1
@@ -404,7 +404,7 @@ class AMISRReader(ProcessingUnit):
filename = self.filenameList[0]
wait = 0
while self.__filename_online == filename:
- print('waiting %d seconds to get a new file...'%(self.__waitForNewFile))
+ print('waiting %d seconds to get a new file...' % (self.__waitForNewFile))
if wait == 5:
return 0
sleep(self.__waitForNewFile)
@@ -414,40 +414,40 @@ class AMISRReader(ProcessingUnit):
self.__filename_online = filename
- self.amisrFilePointer = h5py.File(filename,'r')
+ self.amisrFilePointer = h5py.File(filename, 'r')
self.flagIsNewFile = 1
self.filename = filename
- print("Setting the file: %s"%self.filename)
+ print("Setting the file: %s" % self.filename)
return 1
def readData(self):
buffer = self.amisrFilePointer.get('Raw11/Data/Samples/Data')
- re = buffer[:,:,:,0]
- im = buffer[:,:,:,1]
- dataset = re + im*1j
+ re = buffer[:, :, :, 0]
+ im = buffer[:, :, :, 1]
+ dataset = re + im * 1j
self.radacTime = self.amisrFilePointer.get('Raw11/Data/RadacHeader/RadacTime')
- timeset = self.radacTime[:,0]
- return dataset,timeset
+ timeset = self.radacTime[:, 0]
+ return dataset, timeset
def reshapeData(self):
- #self.beamCodeByPulse, self.beamCode, self.nblocks, self.nprofiles, self.nsa,
- channels = self.beamCodeByPulse[0,:]
+ # self.beamCodeByPulse, self.beamCode, self.nblocks, self.nprofiles, self.nsa,
+ channels = self.beamCodeByPulse[0, :]
nchan = self.nchannels
- #self.newProfiles = self.nprofiles/nchan #must be defined on filljroheader
+ # self.newProfiles = self.nprofiles/nchan #must be defined on filljroheader
nblocks = self.nblocks
nsamples = self.nsa
- #Dimensions : nChannels, nProfiles, nSamples
+ # Dimensions : nChannels, nProfiles, nSamples
new_block = numpy.empty((nblocks, nchan, self.newProfiles, nsamples), dtype="complex64")
############################################
for thisChannel in range(nchan):
- new_block[:,thisChannel,:,:] = self.dataset[:,numpy.where(channels==self.beamCode[0][thisChannel])[0],:]
+ new_block[:, thisChannel, :, :] = self.dataset[:, numpy.where(channels == self.beamCode[0][thisChannel])[0], :]
- new_block = numpy.transpose(new_block, (1,0,2,3))
- new_block = numpy.reshape(new_block, (nchan,-1, nsamples))
+ new_block = numpy.transpose(new_block, (1, 0, 2, 3))
+ new_block = numpy.reshape(new_block, (nchan, -1, nsamples))
return new_block
@@ -457,7 +457,7 @@ class AMISRReader(ProcessingUnit):
def fillJROHeader(self):
- #fill radar controller header
+ # fill radar controller header
self.dataOut.radarControllerHeaderObj = RadarControllerHeader(ippKm=self.__ippKm,
txA=self.__txA,
txB=0,
@@ -467,12 +467,12 @@ class AMISRReader(ProcessingUnit):
deltaHeight=self.__deltaHeight,
codeType=self.__codeType,
nCode=self.__nCode, nBaud=self.__nBaud,
- code = self.__code,
+ code=self.__code,
fClock=1)
- #fill system header
+ # fill system header
self.dataOut.systemHeaderObj = SystemHeader(nSamples=self.__nSamples,
nProfiles=self.newProfiles,
nChannels=len(self.__channelList),
@@ -483,17 +483,17 @@ class AMISRReader(ProcessingUnit):
self.dataOut.data = None
- self.dataOut.dtype = numpy.dtype([('real','endDateTime_Reader):
+ startDateTime_File = startDateTime_File - datetime.timedelta(minutes=300)
+ if (startDateTime_File > endDateTime_Reader):
return 0
self.jrodataset = self.reshapeData()
@@ -576,7 +576,7 @@ class AMISRReader(ProcessingUnit):
def __hasNotDataInBuffer(self):
- if self.profileIndex >= (self.newProfiles*self.nblocks):
+ if self.profileIndex >= (self.newProfiles * self.nblocks):
return 1
return 0
@@ -592,20 +592,20 @@ class AMISRReader(ProcessingUnit):
return 0
- if self.dataset is None: # setear esta condicion cuando no hayan datos por leers
+ if self.dataset is None: # setear esta condicion cuando no hayan datos por leers
self.dataOut.flagNoData = True
return 0
- #self.dataOut.data = numpy.reshape(self.jrodataset[self.profileIndex,:],(1,-1))
+ # self.dataOut.data = numpy.reshape(self.jrodataset[self.profileIndex,:],(1,-1))
- self.dataOut.data = self.jrodataset[:,self.profileIndex,:]
+ self.dataOut.data = self.jrodataset[:, self.profileIndex, :]
- #self.dataOut.utctime = self.jrotimeset[self.profileIndex]
- #verificar basic header de jro data y ver si es compatible con este valor
- #self.dataOut.utctime = self.timeset + (self.profileIndex * self.ippSeconds * self.nchannels)
+ # self.dataOut.utctime = self.jrotimeset[self.profileIndex]
+ # verificar basic header de jro data y ver si es compatible con este valor
+ # self.dataOut.utctime = self.timeset + (self.profileIndex * self.ippSeconds * self.nchannels)
indexprof = numpy.mod(self.profileIndex, self.newProfiles)
- indexblock = self.profileIndex/self.newProfiles
- #print indexblock, indexprof
+ indexblock = self.profileIndex / self.newProfiles
+ # print indexblock, indexprof
self.dataOut.utctime = self.timeset[indexblock] + (indexprof * self.ippSeconds * self.nchannels)
self.dataOut.profileIndex = self.profileIndex
self.dataOut.flagNoData = False
diff --git a/schainpy/model/io/jroIO_madrigal.py b/schainpy/model/io/jroIO_madrigal.py
index 485428c..c8b8720 100644
--- a/schainpy/model/io/jroIO_madrigal.py
+++ b/schainpy/model/io/jroIO_madrigal.py
@@ -91,7 +91,7 @@ class MADReader(Reader, ProcessingUnit):
self.flagNoMoreFiles = 0
self.filename = None
self.intervals = set()
- self.datatime = datetime.datetime(1900,1,1)
+ self.datatime = datetime.datetime(1900, 1, 1)
self.format = None
self.filefmt = "***%Y%m%d*******"
@@ -125,7 +125,7 @@ class MADReader(Reader, ProcessingUnit):
for nTries in range(self.nTries):
fullpath = self.searchFilesOnLine(self.path, self.startDate,
- self.endDate, self.expLabel, self.ext, self.walk,
+ self.endDate, self.expLabel, self.ext, self.walk,
self.filefmt, self.folderfmt)
try:
@@ -138,7 +138,7 @@ class MADReader(Reader, ProcessingUnit):
log.warning(
'Waiting {} sec for a valid file in {}: try {} ...'.format(
- self.delay, self.path, nTries + 1),
+ self.delay, self.path, nTries + 1),
self.name)
time.sleep(self.delay)
@@ -148,7 +148,7 @@ class MADReader(Reader, ProcessingUnit):
else:
log.log("Searching files in {}".format(self.path), self.name)
- self.filenameList = self.searchFilesOffLine(self.path, self.startDate,
+ self.filenameList = self.searchFilesOffLine(self.path, self.startDate,
self.endDate, self.expLabel, self.ext, self.walk, self.filefmt, self.folderfmt)
self.setNextFile()
@@ -212,7 +212,7 @@ class MADReader(Reader, ProcessingUnit):
if self.ext == '.txt':
self.data = numpy.genfromtxt(self.fp, missing_values=('missing'))
self.nrecords = self.data.shape[0]
- self.ranges = numpy.unique(self.data[:,self.parameters.index(self.independentParam.lower())])
+ self.ranges = numpy.unique(self.data[:, self.parameters.index(self.independentParam.lower())])
self.counter_records = 0
elif self.ext == '.hdf5':
self.data = self.fp['Data']
@@ -268,14 +268,14 @@ class MADReader(Reader, ProcessingUnit):
if self.counter_records == self.nrecords:
break
continue
- self.intervals.add((datatime-self.datatime).seconds)
+ self.intervals.add((datatime - self.datatime).seconds)
break
elif self.ext == '.hdf5':
datatime = datetime.datetime.utcfromtimestamp(
self.times[self.counter_records])
- dum = self.data['Table Layout'][self.data['Table Layout']['recno']==self.counter_records]
- self.intervals.add((datatime-self.datatime).seconds)
- if datatime.date()>self.datatime.date():
+ dum = self.data['Table Layout'][self.data['Table Layout']['recno'] == self.counter_records]
+ self.intervals.add((datatime - self.datatime).seconds)
+ if datatime.date() > self.datatime.date():
self.flagDiscontinuousBlock = 1
self.datatime = datatime
self.counter_records += 1
@@ -299,11 +299,11 @@ class MADReader(Reader, ProcessingUnit):
if self.ext == '.txt':
x = self.parameters.index(param.lower())
y = self.parameters.index(self.independentParam.lower())
- ranges = self.buffer[:,y]
- #if self.ranges.size == ranges.size:
+ ranges = self.buffer[:, y]
+ # if self.ranges.size == ranges.size:
# continue
index = numpy.where(numpy.in1d(self.ranges, ranges))[0]
- dummy[index] = self.buffer[:,x]
+ dummy[index] = self.buffer[:, x]
else:
ranges = self.buffer[self.independentParam.lower()]
index = numpy.where(numpy.in1d(self.ranges, ranges))[0]
@@ -311,7 +311,7 @@ class MADReader(Reader, ProcessingUnit):
if isinstance(value, str):
if value not in self.independentParam:
- setattr(self.dataOut, value, dummy.reshape(1,-1))
+ setattr(self.dataOut, value, dummy.reshape(1, -1))
elif isinstance(value, list):
self.output[value[0]][value[1]] = dummy
parameters[value[1]] = param
@@ -382,7 +382,7 @@ Inputs:
format hdf5, cedar
blocks number of blocks per file'''
- __attrs__ = ['path', 'oneDDict', 'ind2DList', 'twoDDict','metadata', 'format', 'blocks']
+ __attrs__ = ['path', 'oneDDict', 'ind2DList', 'twoDDict', 'metadata', 'format', 'blocks']
missing = -32767
def __init__(self):
@@ -438,7 +438,7 @@ Inputs:
Create new cedar file object
'''
- self.mnemonic = MNEMONICS[self.kinst] #TODO get mnemonic from madrigal
+ self.mnemonic = MNEMONICS[self.kinst] # TODO get mnemonic from madrigal
date = datetime.datetime.utcfromtimestamp(self.dataOut.utctime)
filename = '{}{}{}'.format(self.mnemonic,
@@ -499,7 +499,7 @@ Inputs:
if 'db' in value.lower():
tmp = getattr(self.dataOut, value.replace('_db', ''))
SNRavg = numpy.average(tmp, axis=0)
- tmp = 10*numpy.log10(SNRavg)
+ tmp = 10 * numpy.log10(SNRavg)
else:
tmp = getattr(self.dataOut, value)
out[key] = tmp.flatten()[:len(heights)]
@@ -521,14 +521,14 @@ Inputs:
startTime.hour,
startTime.minute,
startTime.second,
- startTime.microsecond/10000,
+ startTime.microsecond / 10000,
endTime.year,
endTime.month,
endTime.day,
endTime.hour,
endTime.minute,
endTime.second,
- endTime.microsecond/10000,
+ endTime.microsecond / 10000,
list(self.oneDDict.keys()),
list(self.twoDDict.keys()),
len(index),
@@ -592,4 +592,4 @@ Inputs:
def close(self):
if self.counter > 0:
- self.setHeader()
\ No newline at end of file
+ self.setHeader()
diff --git a/schainpy/model/io/jroIO_matlab.py b/schainpy/model/io/jroIO_matlab.py
index 175ba9a..aaa7be0 100644
--- a/schainpy/model/io/jroIO_matlab.py
+++ b/schainpy/model/io/jroIO_matlab.py
@@ -12,14 +12,14 @@ import cmath
class matoffReader(ProcessingUnit):
- index=None
- list=None
- firsttime=True
- utccounter=None
- utcfiletime=None
- utcmatcounter=0
- utcfirst=None
- utclist=None
+ index = None
+ list = None
+ firsttime = True
+ utccounter = None
+ utcfiletime = None
+ utcmatcounter = 0
+ utcfirst = None
+ utclist = None
def __init__(self):
self.dataOut = Spectra()
@@ -28,12 +28,12 @@ class matoffReader(ProcessingUnit):
def __setHeader(self, datastuff):
- self.dataOut.pairsList=[(0,1)]
- self.dataOut.channelList = list(range(np.array(datastuff.get('power')).shape[1]))
- self.dataOut.nProfiles = len(np.array(datastuff.get('vel')).flatten()) #this!
+ self.dataOut.pairsList = [(0, 1)]
+ self.dataOut.channelList = list(range(np.array(datastuff.get('power')).shape[1]))
+ self.dataOut.nProfiles = len(np.array(datastuff.get('vel')).flatten()) # this!
self.dataOut.nIncohInt = 20
- self.dataOut.nCohInt = 1 #this!
- self.dataOut.ippSeconds = 0.004 #this!
+ self.dataOut.nCohInt = 1 # this!
+ self.dataOut.ippSeconds = 0.004 # this!
self.dataOut.nFFTPoints = len(np.array(datastuff.get('vel')).flatten())
self.dataOut.timeZone = 0
self.dataOut.heightList = np.array(datastuff.get('hts')).flatten()
@@ -41,21 +41,21 @@ class matoffReader(ProcessingUnit):
def __readFile(self, currentfile):
print("Reading from this file:" + currentfile)
- #filesplit=currentfile.split("\\")
- filesplit=currentfile.split("/")
- newsplit=filesplit[-2]
- newnewsplit=newsplit.split(".")
- newnewsplit=[int(i) for i in newnewsplit]
- gooblist=datetime.datetime(newnewsplit[0],newnewsplit[1],newnewsplit[2],newnewsplit[3],newnewsplit[4],newnewsplit[5])
- self.utcfirst=(gooblist-datetime.datetime(1970,1,1)).total_seconds()
+ # filesplit=currentfile.split("\\")
+ filesplit = currentfile.split("/")
+ newsplit = filesplit[-2]
+ newnewsplit = newsplit.split(".")
+ newnewsplit = [int(i) for i in newnewsplit]
+ gooblist = datetime.datetime(newnewsplit[0], newnewsplit[1], newnewsplit[2], newnewsplit[3], newnewsplit[4], newnewsplit[5])
+ self.utcfirst = (gooblist - datetime.datetime(1970, 1, 1)).total_seconds()
- newsplit=filesplit[-1]
- newnewsplit=newsplit.split(".")
- goobnum=newnewsplit[0]
- goobnum=int(goobnum)
+ newsplit = filesplit[-1]
+ newnewsplit = newsplit.split(".")
+ goobnum = newnewsplit[0]
+ goobnum = int(goobnum)
- self.utcfirst=self.utcfirst+goobnum*2
+ self.utcfirst = self.utcfirst + goobnum * 2
# if (currentfile[43:]=='0.mat'):
# self.utcmatcounter=0
# self.utcfirst=self.utclist[self.index]
@@ -66,26 +66,26 @@ class matoffReader(ProcessingUnit):
# print self.utcmatcounter
print(self.utcfirst)
try:
- datastuff=sio.loadmat(currentfile)
+ datastuff = sio.loadmat(currentfile)
except:
return None, None
- dataphase=datastuff.get('phase')
- data3=datastuff.get('doppler0')
- data4=datastuff.get('doppler1')
- data3= np.array(data3)
+ dataphase = datastuff.get('phase')
+ data3 = datastuff.get('doppler0')
+ data4 = datastuff.get('doppler1')
+ data3 = np.array(data3)
data4 = np.array(data4)
- datacoh=datastuff.get('coherence2')
+ datacoh = datastuff.get('coherence2')
- datacohphase=datacoh*np.exp(-dataphase*1j)
+ datacohphase = datacoh * np.exp(-dataphase * 1j)
# data31 = np.fliplr(data3)
# data41 = np.fliplr(data4)
- data31 = data3.reshape((1,data3.shape[0],data3.shape[1]))
- data41 = data4.reshape((1,data4.shape[0],data4.shape[1]))
- datacohphase1 = datacohphase.reshape((1,datacoh.shape[0],datacoh.shape[1]))
+ data31 = data3.reshape((1, data3.shape[0], data3.shape[1]))
+ data41 = data4.reshape((1, data4.shape[0], data4.shape[1]))
+ datacohphase1 = datacohphase.reshape((1, datacoh.shape[0], datacoh.shape[1]))
- datastack = np.vstack((data31,data41))
+ datastack = np.vstack((data31, data41))
self.__setHeader(datastuff)
@@ -94,46 +94,46 @@ class matoffReader(ProcessingUnit):
return spc, cspc
- def __findFiles(self, path, startDate=None, endDate=None,startTime=datetime.time(0,0,0), endTime=datetime.time(23,59,59)):
+ def __findFiles(self, path, startDate=None, endDate=None, startTime=datetime.time(0, 0, 0), endTime=datetime.time(23, 59, 59)):
if startDate == None:
- startDate = datetime.date(1970,1,1)
+ startDate = datetime.date(1970, 1, 1)
if endDate == None:
- endDate = datetime.date(2050,1,1)
+ endDate = datetime.date(2050, 1, 1)
- startsearch1=datetime.datetime.combine(startDate,startTime)
- startsearch2=(startsearch1-datetime.datetime(1970,1,1)).total_seconds()
- endsearch1=datetime.datetime.combine(endDate,endTime)
- endsearch2=(endsearch1-datetime.datetime(1970,1,1)).total_seconds()
+ startsearch1 = datetime.datetime.combine(startDate, startTime)
+ startsearch2 = (startsearch1 - datetime.datetime(1970, 1, 1)).total_seconds()
+ endsearch1 = datetime.datetime.combine(endDate, endTime)
+ endsearch2 = (endsearch1 - datetime.datetime(1970, 1, 1)).total_seconds()
dirList = listdir(path)
dirList = sorted(dirList)
- dirListFiltered=[]
- fileListFiltered=[]
- utclist=[]
+ dirListFiltered = []
+ fileListFiltered = []
+ utclist = []
if not dirList:
print("No directories found")
return []
- #if self.online:
+ # if self.online:
# dirList= [dirList[-1]]
if self.online:
currentdate = datetime.datetime.now()
- strsplit1=currentdate.strftime('%Y.%m.%d')
- dirList = fnmatch.filter(dirList,strsplit1+'*')
+ strsplit1 = currentdate.strftime('%Y.%m.%d')
+ dirList = fnmatch.filter(dirList, strsplit1 + '*')
for thisDir in dirList:
if not os.path.isdir(os.path.join(path, thisDir)):
continue
- strsplit=thisDir.split('.')
- timeints=[int(i) for i in strsplit]
- timelist=datetime.datetime(timeints[0],timeints[1],timeints[2],timeints[3],timeints[4],timeints[5])
- utctime=(timelist-datetime.datetime(1970,1,1)).total_seconds()
+ strsplit = thisDir.split('.')
+ timeints = [int(i) for i in strsplit]
+ timelist = datetime.datetime(timeints[0], timeints[1], timeints[2], timeints[3], timeints[4], timeints[5])
+ utctime = (timelist - datetime.datetime(1970, 1, 1)).total_seconds()
if not self.online:
if (utctime > endsearch2):
@@ -159,7 +159,7 @@ class matoffReader(ProcessingUnit):
continue
for k in range(len(fileList)):
- thisFile = str(k)+'.mat'
+ thisFile = str(k) + '.mat'
if not os.path.isfile(os.path.join(pathFile, thisFile)):
continue
@@ -168,7 +168,7 @@ class matoffReader(ProcessingUnit):
return fileListFiltered
- def __getNextOnlineFile(self, seconds = 40):
+ def __getNextOnlineFile(self, seconds=40):
filename = self.__getNextOfflineFile()
@@ -188,7 +188,7 @@ class matoffReader(ProcessingUnit):
if nTries > 3:
break
- print("Waiting %d seconds ..." %seconds)
+ print("Waiting %d seconds ..." % seconds)
time.sleep(40)
if not (len(filelist) > ncurrentfiles):
@@ -204,7 +204,7 @@ class matoffReader(ProcessingUnit):
if self.index >= len(self.fileList):
return None
- filename=self.fileList[self.index]
+ filename = self.fileList[self.index]
self.index += 1
return filename
@@ -216,12 +216,12 @@ class matoffReader(ProcessingUnit):
filename = self.__getNextOfflineFile()
return filename
- def setup(self, path, startDate=None, endDate=None,startTime=datetime.time(0,0,0), endTime=datetime.time(23,59,59)):
+ def setup(self, path, startDate=None, endDate=None, startTime=datetime.time(0, 0, 0), endTime=datetime.time(23, 59, 59)):
fileList = self.__findFiles(path, startDate, endDate, startTime, endTime)
if self.online:
- self.index = len(fileList) -1
+ self.index = len(fileList) - 1
else:
self.index = 0
@@ -229,24 +229,24 @@ class matoffReader(ProcessingUnit):
print("fin setup")
- def run(self,path=None,startDate=None, endDate=None,
- startTime=datetime.time(0,0,0),
- endTime=datetime.time(23,59,59),
- walk=True,timezone='ut',
- all=0,online=False,ext=None,**kwargs):
-
- self.path=path
- self.ext=ext
- self.startDate=startDate
- self.endDate=endDate
- self.startTime=startTime
- self.endTime=endTime
+ def run(self, path=None, startDate=None, endDate=None,
+ startTime=datetime.time(0, 0, 0),
+ endTime=datetime.time(23, 59, 59),
+ walk=True, timezone='ut',
+ all=0, online=False, ext=None, **kwargs):
+
+ self.path = path
+ self.ext = ext
+ self.startDate = startDate
+ self.endDate = endDate
+ self.startTime = startTime
+ self.endTime = endTime
self.online = online
self.dataOut.flagNoData = True
- if (self.firsttime==True):
+ if (self.firsttime == True):
self.setup(path, startDate, endDate, startTime, endTime)
- self.firsttime=False
+ self.firsttime = False
if not self.fileList:
@@ -262,7 +262,7 @@ class matoffReader(ProcessingUnit):
spc, cspc = self.__readFile(currentfile)
- if spc!=None:
+ if spc != None:
self.dataOut.data_spc = spc
self.dataOut.data_cspc = cspc
@@ -270,4 +270,4 @@ class matoffReader(ProcessingUnit):
self.dataOut.flagNoData = False
return 1
-
\ No newline at end of file
+
diff --git a/schainpy/model/io/jroIO_mira35c.py b/schainpy/model/io/jroIO_mira35c.py
index ab348a6..0e71fdb 100644
--- a/schainpy/model/io/jroIO_mira35c.py
+++ b/schainpy/model/io/jroIO_mira35c.py
@@ -23,9 +23,9 @@ except:
from time import sleep
from schainpy.model.data.jrodata import Spectra
-#from schainpy.model.data.BLTRheaderIO import FileHeader, RecordHeader
+# from schainpy.model.data.BLTRheaderIO import FileHeader, RecordHeader
from schainpy.model.proc.jroproc_base import ProcessingUnit, Operation
-#from schainpy.model.io.jroIO_bltr import BLTRReader
+# from schainpy.model.io.jroIO_bltr import BLTRReader
from numpy import imag, shape, NaN, empty
@@ -315,7 +315,7 @@ SRVI_HEADER = numpy.dtype([
class SRVIHeader(Header):
- def __init__(self, SignatureSRVI1=0, SizeOfDataBlock1=0, DataBlockTitleSRVI1=0, SizeOfSRVI1=0):
+ def __init__(self, SignatureSRVI1=0, SizeOfDataBlock1=0, DataBlockTitleSRVI1=0, SizeOfSRVI1=0):
self.SignatureSRVI1 = SignatureSRVI1
self.SizeOfDataBlock1 = SizeOfDataBlock1
@@ -338,34 +338,34 @@ class SRVIHeader(Header):
SRVI_STRUCTURE = numpy.dtype([
('frame_cnt', ' 3*60*60:
+ elif timeDiff > 3 * 60 * 60:
self.lastTime = currentTime
return True
else:
@@ -427,7 +427,7 @@ class HDFWriter(Operation):
self.dataOut = dataOut
if not(self.isConfig):
- self.setup(path=path, blocksPerFile=blocksPerFile,
+ self.setup(path=path, blocksPerFile=blocksPerFile,
metadataList=metadataList, dataList=dataList,
setType=setType, description=description)
@@ -444,27 +444,27 @@ class HDFWriter(Operation):
setFile = self.setFile
timeTuple = time.localtime(self.dataOut.utctime)
- subfolder = 'd%4.4d%3.3d' % (timeTuple.tm_year,timeTuple.tm_yday)
+ subfolder = 'd%4.4d%3.3d' % (timeTuple.tm_year, timeTuple.tm_yday)
fullpath = os.path.join(path, subfolder)
if os.path.exists(fullpath):
filesList = os.listdir(fullpath)
filesList = [k for k in filesList if k.startswith(self.optchar)]
- if len( filesList ) > 0:
+ if len(filesList) > 0:
filesList = sorted(filesList, key=str.lower)
filen = filesList[-1]
# el filename debera tener el siguiente formato
# 0 1234 567 89A BCDE (hex)
# x YYYY DDD SSS .ext
if isNumber(filen[8:11]):
- setFile = int(filen[8:11]) #inicializo mi contador de seteo al seteo del ultimo file
+ setFile = int(filen[8:11]) # inicializo mi contador de seteo al seteo del ultimo file
else:
setFile = -1
else:
- setFile = -1 #inicializo mi contador de seteo
+ setFile = -1 # inicializo mi contador de seteo
else:
os.makedirs(fullpath)
- setFile = -1 #inicializo mi contador de seteo
+ setFile = -1 # inicializo mi contador de seteo
if self.setType is None:
setFile += 1
@@ -472,22 +472,22 @@ class HDFWriter(Operation):
timeTuple.tm_year,
timeTuple.tm_yday,
setFile,
- ext )
+ ext)
else:
- setFile = timeTuple.tm_hour*60+timeTuple.tm_min
+ setFile = timeTuple.tm_hour * 60 + timeTuple.tm_min
file = '%s%4.4d%3.3d%04d%s' % (self.optchar,
timeTuple.tm_year,
timeTuple.tm_yday,
setFile,
- ext )
+ ext)
- self.filename = os.path.join( path, subfolder, file )
+ self.filename = os.path.join(path, subfolder, file)
- #Setting HDF5 File
+ # Setting HDF5 File
self.fp = h5py.File(self.filename, 'w')
- #write metadata
+ # write metadata
self.writeMetadata(self.fp)
- #Write data
+ # Write data
self.writeData(self.fp)
def getLabel(self, name, x=None):
@@ -563,9 +563,9 @@ class HDFWriter(Operation):
for dsInfo in self.dsList:
if dsInfo['nDim'] == 0:
ds = grp.create_dataset(
- self.getLabel(dsInfo['variable']),
- (self.blocksPerFile, ),
- chunks=True,
+ self.getLabel(dsInfo['variable']),
+ (self.blocksPerFile,),
+ chunks=True,
dtype=numpy.float64)
dtsets.append(ds)
data.append((dsInfo['variable'], -1))
@@ -577,8 +577,8 @@ class HDFWriter(Operation):
sgrp = grp
for i in range(dsInfo['dsNumber']):
ds = sgrp.create_dataset(
- self.getLabel(dsInfo['variable'], i),
- (self.blocksPerFile, ) + dsInfo['shape'][1:],
+ self.getLabel(dsInfo['variable'], i),
+ (self.blocksPerFile,) + dsInfo['shape'][1:],
chunks=True,
dtype=dsInfo['dtype'])
dtsets.append(ds)
diff --git a/schainpy/model/io/jroIO_simulator.py b/schainpy/model/io/jroIO_simulator.py
index b84df46..7f359e9 100644
--- a/schainpy/model/io/jroIO_simulator.py
+++ b/schainpy/model/io/jroIO_simulator.py
@@ -1,47 +1,47 @@
-import numpy,math,random,time
+import numpy, math, random, time
#---------------1 Heredamos JRODatareader
from schainpy.model.io.jroIO_base import *
#---------------2 Heredamos las propiedades de ProcessingUnit
-from schainpy.model.proc.jroproc_base import ProcessingUnit,Operation,MPDecorator
+from schainpy.model.proc.jroproc_base import ProcessingUnit, Operation, MPDecorator
#---------------3 Importaremos las clases BascicHeader, SystemHeader, RadarControlHeader, ProcessingHeader
-from schainpy.model.data.jroheaderIO import PROCFLAG, BasicHeader,SystemHeader,RadarControllerHeader, ProcessingHeader
+from schainpy.model.data.jroheaderIO import PROCFLAG, BasicHeader, SystemHeader, RadarControllerHeader, ProcessingHeader
#---------------4 Importaremos el objeto Voltge
from schainpy.model.data.jrodata import Voltage
class SimulatorReader(JRODataReader, ProcessingUnit):
- incIntFactor = 1
- nFFTPoints = 0
- FixPP_IncInt = 1
- FixRCP_IPP = 1000
- FixPP_CohInt = 1
- Tau_0 = 250
- AcqH0_0 = 70
- H0 = AcqH0_0
- AcqDH_0 = 1.25
- DH0 = AcqDH_0
- Bauds = 32
- BaudWidth = None
- FixRCP_TXA = 40
- FixRCP_TXB = 70
- fAngle = 2.0*math.pi*(1/16)
- DC_level = 500
- stdev = 8
- Num_Codes = 2
- #code0 = numpy.array([1,1,1,0,1,1,0,1,1,1,1,0,0,0,1,0,1,1,1,0,1,1,0,1,0,0,0,1,1,1,0,1])
- #code1 = numpy.array([1,1,1,0,1,1,0,1,1,1,1,0,0,0,1,0,0,0,0,1,0,0,1,0,1,1,1,0,0,0,1,0])
- #Dyn_snCode = numpy.array([Num_Codes,Bauds])
- Dyn_snCode = None
- Samples = 200
- channels = 2
- pulses = None
- Reference = None
- pulse_size = None
- prof_gen = None
- Fdoppler = 100
- Hdoppler = 36
- Adoppler = 300
- frequency = 9345
- nTotalReadFiles = 1000
+ incIntFactor = 1
+ nFFTPoints = 0
+ FixPP_IncInt = 1
+ FixRCP_IPP = 1000
+ FixPP_CohInt = 1
+ Tau_0 = 250
+ AcqH0_0 = 70
+ H0 = AcqH0_0
+ AcqDH_0 = 1.25
+ DH0 = AcqDH_0
+ Bauds = 32
+ BaudWidth = None
+ FixRCP_TXA = 40
+ FixRCP_TXB = 70
+ fAngle = 2.0 * math.pi * (1 / 16)
+ DC_level = 500
+ stdev = 8
+ Num_Codes = 2
+ # code0 = numpy.array([1,1,1,0,1,1,0,1,1,1,1,0,0,0,1,0,1,1,1,0,1,1,0,1,0,0,0,1,1,1,0,1])
+ # code1 = numpy.array([1,1,1,0,1,1,0,1,1,1,1,0,0,0,1,0,0,0,0,1,0,0,1,0,1,1,1,0,0,0,1,0])
+ # Dyn_snCode = numpy.array([Num_Codes,Bauds])
+ Dyn_snCode = None
+ Samples = 200
+ channels = 2
+ pulses = None
+ Reference = None
+ pulse_size = None
+ prof_gen = None
+ Fdoppler = 100
+ Hdoppler = 36
+ Adoppler = 300
+ frequency = 9345
+ nTotalReadFiles = 1000
def __init__(self):
"""
@@ -56,19 +56,19 @@ class SimulatorReader(JRODataReader, ProcessingUnit):
ProcessingUnit.__init__(self)
print(" [ START ] init - Metodo Simulator Reader")
- self.isConfig = False
- self.basicHeaderObj = BasicHeader(LOCALTIME)
- self.systemHeaderObj = SystemHeader()
- self.radarControllerHeaderObj = RadarControllerHeader()
- self.processingHeaderObj = ProcessingHeader()
- self.profileIndex = 2**32-1
- self.dataOut = Voltage()
- #code0 = numpy.array([1,1,1,0,1,1,0,1,1,1,1,0,0,0,1,0,1,1,1,0,1,1,0,1,0,0,0,1,1,1,0,1])
- code0 = numpy.array([1,1,1,-1,1,1,-1,1,1,1,1,-1,-1,-1,1,-1,1,1,1,-1,1,1,-1,1,-1,-1,-1,1,1,1,-1,1])
- #code1 = numpy.array([1,1,1,0,1,1,0,1,1,1,1,0,0,0,1,0,0,0,0,1,0,0,1,0,1,1,1,0,0,0,1,0])
- code1 = numpy.array([1,1,1,-1,1,1,-1,1,1,1,1,-1,-1,-1,1,-1,-1,-1,-1,1,-1,-1,1,-1,1,1,1,-1,-1,-1,1,-1])
- #self.Dyn_snCode = numpy.array([code0,code1])
- self.Dyn_snCode = None
+ self.isConfig = False
+ self.basicHeaderObj = BasicHeader(LOCALTIME)
+ self.systemHeaderObj = SystemHeader()
+ self.radarControllerHeaderObj = RadarControllerHeader()
+ self.processingHeaderObj = ProcessingHeader()
+ self.profileIndex = 2 ** 32 - 1
+ self.dataOut = Voltage()
+ # code0 = numpy.array([1,1,1,0,1,1,0,1,1,1,1,0,0,0,1,0,1,1,1,0,1,1,0,1,0,0,0,1,1,1,0,1])
+ code0 = numpy.array([1, 1, 1, -1, 1, 1, -1, 1, 1, 1, 1, -1, -1, -1, 1, -1, 1, 1, 1, -1, 1, 1, -1, 1, -1, -1, -1, 1, 1, 1, -1, 1])
+ # code1 = numpy.array([1,1,1,0,1,1,0,1,1,1,1,0,0,0,1,0,0,0,0,1,0,0,1,0,1,1,1,0,0,0,1,0])
+ code1 = numpy.array([1, 1, 1, -1, 1, 1, -1, 1, 1, 1, 1, -1, -1, -1, 1, -1, -1, -1, -1, 1, -1, -1, 1, -1, 1, 1, 1, -1, -1, -1, 1, -1])
+ # self.Dyn_snCode = numpy.array([code0,code1])
+ self.Dyn_snCode = None
def set_kwargs(self, **kwargs):
for key, value in kwargs.items():
@@ -76,13 +76,13 @@ class SimulatorReader(JRODataReader, ProcessingUnit):
def __hasNotDataInBuffer(self):
- if self.profileIndex >= self.processingHeaderObj.profilesPerBlock* self.nTxs:
- if self.nReadBlocks>0:
- tmp = self.dataOut.utctime
- tmp_utc = int(self.dataOut.utctime)
- tmp_milisecond = int((tmp-tmp_utc)*1000)
- self.basicHeaderObj.utc = tmp_utc
- self.basicHeaderObj.miliSecond= tmp_milisecond
+ if self.profileIndex >= self.processingHeaderObj.profilesPerBlock * self.nTxs:
+ if self.nReadBlocks > 0:
+ tmp = self.dataOut.utctime
+ tmp_utc = int(self.dataOut.utctime)
+ tmp_milisecond = int((tmp - tmp_utc) * 1000)
+ self.basicHeaderObj.utc = tmp_utc
+ self.basicHeaderObj.miliSecond = tmp_milisecond
return 1
return 0
@@ -90,14 +90,14 @@ class SimulatorReader(JRODataReader, ProcessingUnit):
"""Set the next file to be readed open it and parse de file header"""
if (self.nReadBlocks >= self.processingHeaderObj.dataBlocksPerFile):
- self.nReadFiles=self.nReadFiles+1
+ self.nReadFiles = self.nReadFiles + 1
if self.nReadFiles > self.nTotalReadFiles:
- self.flagNoMoreFiles=1
+ self.flagNoMoreFiles = 1
raise schainpy.admin.SchainWarning('No more files to read')
- print('------------------- [Opening file] ------------------------------',self.nReadFiles)
- self.nReadBlocks = 0
- #if self.nReadBlocks==0:
+ print('------------------- [Opening file] ------------------------------', self.nReadFiles)
+ self.nReadBlocks = 0
+ # if self.nReadBlocks==0:
# self.readFirstHeader()
def __setNewBlock(self):
@@ -113,43 +113,43 @@ class SimulatorReader(JRODataReader, ProcessingUnit):
self.getBasicHeader()
break
if self.verbose:
- print("[Reading] Block No. %d/%d -> %s" %(self.nReadBlocks,
+ print("[Reading] Block No. %d/%d -> %s" % (self.nReadBlocks,
self.processingHeaderObj.dataBlocksPerFile,
- self.dataOut.datatime.ctime()) )
+ self.dataOut.datatime.ctime()))
return 1
def getFirstHeader(self):
self.getBasicHeader()
- self.dataOut.processingHeaderObj = self.processingHeaderObj.copy()
- self.dataOut.systemHeaderObj = self.systemHeaderObj.copy()
+ self.dataOut.processingHeaderObj = self.processingHeaderObj.copy()
+ self.dataOut.systemHeaderObj = self.systemHeaderObj.copy()
self.dataOut.radarControllerHeaderObj = self.radarControllerHeaderObj.copy()
- self.dataOut.dtype = self.dtype
+ self.dataOut.dtype = self.dtype
- self.dataOut.nProfiles = self.processingHeaderObj.profilesPerBlock
- self.dataOut.heightList = numpy.arange(self.processingHeaderObj.nHeights) * self.processingHeaderObj.deltaHeight + self.processingHeaderObj.firstHeight
+ self.dataOut.nProfiles = self.processingHeaderObj.profilesPerBlock
+ self.dataOut.heightList = numpy.arange(self.processingHeaderObj.nHeights) * self.processingHeaderObj.deltaHeight + self.processingHeaderObj.firstHeight
self.dataOut.channelList = list(range(self.systemHeaderObj.nChannels))
- self.dataOut.nCohInt = self.processingHeaderObj.nCohInt
+ self.dataOut.nCohInt = self.processingHeaderObj.nCohInt
# asumo q la data no esta decodificada
- self.dataOut.flagDecodeData = self.processingHeaderObj.flag_decode
+ self.dataOut.flagDecodeData = self.processingHeaderObj.flag_decode
# asumo q la data no esta sin flip
- self.dataOut.flagDeflipData = self.processingHeaderObj.flag_deflip
- self.dataOut.flagShiftFFT = self.processingHeaderObj.shif_fft
- self.dataOut.frequency = self.frequency
+ self.dataOut.flagDeflipData = self.processingHeaderObj.flag_deflip
+ self.dataOut.flagShiftFFT = self.processingHeaderObj.shif_fft
+ self.dataOut.frequency = self.frequency
def getBasicHeader(self):
self.dataOut.utctime = self.basicHeaderObj.utc + self.basicHeaderObj.miliSecond / \
1000. + self.profileIndex * self.radarControllerHeaderObj.ippSeconds
self.dataOut.flagDiscontinuousBlock = self.flagDiscontinuousBlock
- self.dataOut.timeZone = self.basicHeaderObj.timeZone
- self.dataOut.dstFlag = self.basicHeaderObj.dstFlag
- self.dataOut.errorCount = self.basicHeaderObj.errorCount
- self.dataOut.useLocalTime = self.basicHeaderObj.useLocalTime
- self.dataOut.ippSeconds = self.radarControllerHeaderObj.ippSeconds / self.nTxs
+ self.dataOut.timeZone = self.basicHeaderObj.timeZone
+ self.dataOut.dstFlag = self.basicHeaderObj.dstFlag
+ self.dataOut.errorCount = self.basicHeaderObj.errorCount
+ self.dataOut.useLocalTime = self.basicHeaderObj.useLocalTime
+ self.dataOut.ippSeconds = self.radarControllerHeaderObj.ippSeconds / self.nTxs
def readFirstHeader(self):
- datatype = int(numpy.log2((self.processingHeaderObj.processFlags &
+ datatype = int(numpy.log2((self.processingHeaderObj.processFlags &
PROCFLAG.DATATYPE_MASK)) - numpy.log2(PROCFLAG.DATATYPE_CHAR))
if datatype == 0:
datatype_str = numpy.dtype([('real', ' self.incIntFactor):
- self.incIntFactor = self.FixPP_IncInt/ self.incIntFactor
- elif(self.FixPP_IncInt< self.incIntFactor):
+ self.incIntFactor = self.FixPP_IncInt / self.incIntFactor
+ elif(self.FixPP_IncInt < self.incIntFactor):
print("False alert...")
- ProfilesperBlock = self.processingHeaderObj.profilesPerBlock
+ ProfilesperBlock = self.processingHeaderObj.profilesPerBlock
- self.timeperblock =int(((self.FixRCP_IPP
- *ProfilesperBlock
- *self.FixPP_CohInt
- *self.incIntFactor)
- /150.0)
- *0.9
- +0.5)
+ self.timeperblock = int(((self.FixRCP_IPP
+ * ProfilesperBlock
+ * self.FixPP_CohInt
+ * self.incIntFactor)
+ / 150.0)
+ * 0.9
+ + 0.5)
# para cada canal
- self.profiles = ProfilesperBlock*self.FixPP_CohInt
- self.profiles = ProfilesperBlock
- self.Reference = int((self.Tau_0-self.AcqH0_0)/(self.AcqDH_0)+0.5)
- self.BaudWidth = int((self.FixRCP_TXA/self.AcqDH_0)/self.Bauds + 0.5 )
+ self.profiles = ProfilesperBlock * self.FixPP_CohInt
+ self.profiles = ProfilesperBlock
+ self.Reference = int((self.Tau_0 - self.AcqH0_0) / (self.AcqDH_0) + 0.5)
+ self.BaudWidth = int((self.FixRCP_TXA / self.AcqDH_0) / self.Bauds + 0.5)
- if (self.BaudWidth==0):
- self.BaudWidth=1
+ if (self.BaudWidth == 0):
+ self.BaudWidth = 1
- def init_pulse(self,Num_Codes=Num_Codes,Bauds=Bauds,BaudWidth=BaudWidth,Dyn_snCode=Dyn_snCode):
+ def init_pulse(self, Num_Codes=Num_Codes, Bauds=Bauds, BaudWidth=BaudWidth, Dyn_snCode=Dyn_snCode):
- Num_Codes = Num_Codes
- Bauds = Bauds
- BaudWidth = BaudWidth
- Dyn_snCode = Dyn_snCode
+ Num_Codes = Num_Codes
+ Bauds = Bauds
+ BaudWidth = BaudWidth
+ Dyn_snCode = Dyn_snCode
if Dyn_snCode:
print("EXISTE")
else:
print("No existe")
- if Dyn_snCode: # if Bauds:
- pulses = list(range(0,Num_Codes))
- num_codes = Num_Codes
+ if Dyn_snCode: # if Bauds:
+ pulses = list(range(0, Num_Codes))
+ num_codes = Num_Codes
for i in range(num_codes):
- pulse_size = Bauds*BaudWidth
- pulses[i] = numpy.zeros(pulse_size)
+ pulse_size = Bauds * BaudWidth
+ pulses[i] = numpy.zeros(pulse_size)
for j in range(Bauds):
for k in range(BaudWidth):
- pulses[i][j*BaudWidth+k] = int(Dyn_snCode[i][j]*600)
+ pulses[i][j * BaudWidth + k] = int(Dyn_snCode[i][j] * 600)
else:
print("sin code")
- pulses = list(range(1))
- if self.AcqDH_0>0.149:
- pulse_size = int(self.FixRCP_TXB/0.15+0.5)
+ pulses = list(range(1))
+ if self.AcqDH_0 > 0.149:
+ pulse_size = int(self.FixRCP_TXB / 0.15 + 0.5)
else:
- pulse_size = int((self.FixRCP_TXB/self.AcqDH_0)+0.5) #0.0375
- pulses[0] = numpy.ones(pulse_size)
- pulses = 600*pulses[0]
-
- return pulses,pulse_size
-
- def jro_GenerateBlockOfData(self,Samples=Samples,DC_level= DC_level,stdev=stdev,
- Reference= Reference,pulses= pulses,
- Num_Codes= Num_Codes,pulse_size=pulse_size,
- prof_gen= prof_gen,H0 = H0,DH0=DH0,
- Adoppler=Adoppler,Fdoppler= Fdoppler,Hdoppler=Hdoppler):
- Samples = Samples
- DC_level = DC_level
- stdev = stdev
- m_nR = Reference
- pulses = pulses
- num_codes = Num_Codes
- ps = pulse_size
- prof_gen = prof_gen
- channels = self.channels
- H0 = H0
- DH0 = DH0
- ippSec = self.radarControllerHeaderObj.ippSeconds
- Fdoppler = self.Fdoppler
- Hdoppler = self.Hdoppler
- Adoppler = self.Adoppler
-
- self.datablock = numpy.zeros([channels,prof_gen,Samples],dtype= numpy.complex64)
+ pulse_size = int((self.FixRCP_TXB / self.AcqDH_0) + 0.5) # 0.0375
+ pulses[0] = numpy.ones(pulse_size)
+ pulses = 600 * pulses[0]
+
+ return pulses, pulse_size
+
+ def jro_GenerateBlockOfData(self, Samples=Samples, DC_level=DC_level, stdev=stdev,
+ Reference=Reference, pulses=pulses,
+ Num_Codes=Num_Codes, pulse_size=pulse_size,
+ prof_gen=prof_gen, H0=H0, DH0=DH0,
+ Adoppler=Adoppler, Fdoppler=Fdoppler, Hdoppler=Hdoppler):
+ Samples = Samples
+ DC_level = DC_level
+ stdev = stdev
+ m_nR = Reference
+ pulses = pulses
+ num_codes = Num_Codes
+ ps = pulse_size
+ prof_gen = prof_gen
+ channels = self.channels
+ H0 = H0
+ DH0 = DH0
+ ippSec = self.radarControllerHeaderObj.ippSeconds
+ Fdoppler = self.Fdoppler
+ Hdoppler = self.Hdoppler
+ Adoppler = self.Adoppler
+
+ self.datablock = numpy.zeros([channels, prof_gen, Samples], dtype=numpy.complex64)
for i in range(channels):
for k in range(prof_gen):
#-----------------------NOISE---------------
- Noise_r = numpy.random.normal(DC_level,stdev,Samples)
- Noise_i = numpy.random.normal(DC_level,stdev,Samples)
- Noise = numpy.zeros(Samples,dtype=complex)
+ Noise_r = numpy.random.normal(DC_level, stdev, Samples)
+ Noise_i = numpy.random.normal(DC_level, stdev, Samples)
+ Noise = numpy.zeros(Samples, dtype=complex)
Noise.real = Noise_r
Noise.imag = Noise_i
#-----------------------PULSOS--------------
- Pulso = numpy.zeros(pulse_size,dtype=complex)
- Pulso.real = pulses[k%num_codes]
- Pulso.imag = pulses[k%num_codes]
+ Pulso = numpy.zeros(pulse_size, dtype=complex)
+ Pulso.real = pulses[k % num_codes]
+ Pulso.imag = pulses[k % num_codes]
#--------------------- PULSES+NOISE----------
- InBuffer = numpy.zeros(Samples,dtype=complex)
- InBuffer[m_nR:m_nR+ps] = Pulso
- InBuffer = InBuffer+Noise
+ InBuffer = numpy.zeros(Samples, dtype=complex)
+ InBuffer[m_nR:m_nR + ps] = Pulso
+ InBuffer = InBuffer + Noise
#--------------------- ANGLE -------------------------------
- InBuffer.real[m_nR:m_nR+ps] = InBuffer.real[m_nR:m_nR+ps]*(math.cos( self.fAngle)*5)
- InBuffer.imag[m_nR:m_nR+ps] = InBuffer.imag[m_nR:m_nR+ps]*(math.sin( self.fAngle)*5)
- InBuffer=InBuffer
- self.datablock[i][k]= InBuffer
+ InBuffer.real[m_nR:m_nR + ps] = InBuffer.real[m_nR:m_nR + ps] * (math.cos(self.fAngle) * 5)
+ InBuffer.imag[m_nR:m_nR + ps] = InBuffer.imag[m_nR:m_nR + ps] * (math.sin(self.fAngle) * 5)
+ InBuffer = InBuffer
+ self.datablock[i][k] = InBuffer
#----------------DOPPLER SIGNAL...............................................
- time_vec = numpy.linspace(0,(prof_gen-1)*ippSec,int(prof_gen))+self.nReadBlocks*ippSec*prof_gen+(self.nReadFiles-1)*ippSec*prof_gen
- fd = Fdoppler #+(600.0/120)*self.nReadBlocks
- d_signal = Adoppler*numpy.array(numpy.exp(1.0j*2.0*math.pi*fd*time_vec),dtype=numpy.complex64)
+ time_vec = numpy.linspace(0, (prof_gen - 1) * ippSec, int(prof_gen)) + self.nReadBlocks * ippSec * prof_gen + (self.nReadFiles - 1) * ippSec * prof_gen
+ fd = Fdoppler # +(600.0/120)*self.nReadBlocks
+ d_signal = Adoppler * numpy.array(numpy.exp(1.0j * 2.0 * math.pi * fd * time_vec), dtype=numpy.complex64)
#-------------Senal con ancho espectral--------------------
- if prof_gen%2==0:
- min = int(prof_gen/2.0-1.0)
- max = int(prof_gen/2.0)
+ if prof_gen % 2 == 0:
+ min = int(prof_gen / 2.0 - 1.0)
+ max = int(prof_gen / 2.0)
else:
- min = int(prof_gen/2.0)
- max = int(prof_gen/2.0)
- specw_sig = numpy.linspace(-min,max,prof_gen)
- w = 4
- A = 20
- specw_sig = specw_sig/w
- specw_sig = numpy.sinc(specw_sig)
- specw_sig = A*numpy.array(specw_sig,dtype=numpy.complex64)
+ min = int(prof_gen / 2.0)
+ max = int(prof_gen / 2.0)
+ specw_sig = numpy.linspace(-min, max, prof_gen)
+ w = 4
+ A = 20
+ specw_sig = specw_sig / w
+ specw_sig = numpy.sinc(specw_sig)
+ specw_sig = A * numpy.array(specw_sig, dtype=numpy.complex64)
#------------------ DATABLOCK + DOPPLER--------------------
- HD=int(Hdoppler/self.AcqDH_0)
+ HD = int(Hdoppler / self.AcqDH_0)
for i in range(12):
- self.datablock[0,:,HD+i]=self.datablock[0,:,HD+i]+ d_signal# RESULT
+ self.datablock[0, :, HD + i] = self.datablock[0, :, HD + i] + d_signal # RESULT
#------------------ DATABLOCK + DOPPLER*Sinc(x)--------------------
- HD=int(Hdoppler/self.AcqDH_0)
- HD=int(HD/2)
+ HD = int(Hdoppler / self.AcqDH_0)
+ HD = int(HD / 2)
for i in range(12):
- self.datablock[0,:,HD+i]=self.datablock[0,:,HD+i]+ specw_sig*d_signal# RESULT
+ self.datablock[0, :, HD + i] = self.datablock[0, :, HD + i] + specw_sig * d_signal # RESULT
def readBlock(self):
- self.jro_GenerateBlockOfData(Samples= self.samples,DC_level=self.DC_level,
- stdev=self.stdev,Reference= self.Reference,
- pulses = self.pulses,Num_Codes=self.Num_Codes,
- pulse_size=self.pulse_size,prof_gen=self.profiles,
- H0=self.H0,DH0=self.DH0)
+ self.jro_GenerateBlockOfData(Samples=self.samples, DC_level=self.DC_level,
+ stdev=self.stdev, Reference=self.Reference,
+ pulses=self.pulses, Num_Codes=self.Num_Codes,
+ pulse_size=self.pulse_size, prof_gen=self.profiles,
+ H0=self.H0, DH0=self.DH0)
- self.profileIndex = 0
- self.flagIsNewFile = 0
+ self.profileIndex = 0
+ self.flagIsNewFile = 0
self.flagIsNewBlock = 1
- self.nTotalBlocks += 1
- self.nReadBlocks += 1
+ self.nTotalBlocks += 1
+ self.nReadBlocks += 1
return 1
@@ -404,11 +404,11 @@ class SimulatorReader(JRODataReader, ProcessingUnit):
self.dataOut.flagNodata = True
return 0
self.flagDiscontinuousBlock = 0
- self.flagIsNewBlock = 0
- if self.__hasNotDataInBuffer(): # aqui es verdad
- if not(self.readNextBlock()): # return 1 y por eso el if not salta a getBasic Header
+ self.flagIsNewBlock = 0
+ if self.__hasNotDataInBuffer(): # aqui es verdad
+ if not(self.readNextBlock()): # return 1 y por eso el if not salta a getBasic Header
return 0
- self.getFirstHeader() # atributo
+ self.getFirstHeader() # atributo
if not self.getByBlock:
self.dataOut.flagDataAsBlock = False
@@ -423,36 +423,36 @@ class SimulatorReader(JRODataReader, ProcessingUnit):
return self.dataOut.data
- def setup(self,frequency=49.92e6,incIntFactor= 1, nFFTPoints = 0, FixPP_IncInt=1,FixRCP_IPP=1000,
- FixPP_CohInt= 1,Tau_0= 250,AcqH0_0 = 70 ,AcqDH_0=1.25, Bauds= 32,
- FixRCP_TXA = 40, FixRCP_TXB = 50, fAngle = 2.0*math.pi*(1/16),DC_level= 50,
- stdev= 8,Num_Codes = 1 , Dyn_snCode = None, samples=200,
- channels=2,Fdoppler=20,Hdoppler=36,Adoppler=500,
- profilesPerBlock=300,dataBlocksPerFile=120,nTotalReadFiles=10000,
+ def setup(self, frequency=49.92e6, incIntFactor=1, nFFTPoints=0, FixPP_IncInt=1, FixRCP_IPP=1000,
+ FixPP_CohInt=1, Tau_0=250, AcqH0_0=70 , AcqDH_0=1.25, Bauds=32,
+ FixRCP_TXA=40, FixRCP_TXB=50, fAngle=2.0 * math.pi * (1 / 16), DC_level=50,
+ stdev=8, Num_Codes=1 , Dyn_snCode=None, samples=200,
+ channels=2, Fdoppler=20, Hdoppler=36, Adoppler=500,
+ profilesPerBlock=300, dataBlocksPerFile=120, nTotalReadFiles=10000,
**kwargs):
self.set_kwargs(**kwargs)
self.nReadBlocks = 0
- self.nReadFiles = 1
- print('------------------- [Opening file: ] ------------------------------',self.nReadFiles)
+ self.nReadFiles = 1
+ print('------------------- [Opening file: ] ------------------------------', self.nReadFiles)
- tmp = time.time()
- tmp_utc = int(tmp)
- tmp_milisecond = int((tmp-tmp_utc)*1000)
- print(" SETUP -basicHeaderObj.utc",datetime.datetime.utcfromtimestamp(tmp))
+ tmp = time.time()
+ tmp_utc = int(tmp)
+ tmp_milisecond = int((tmp - tmp_utc) * 1000)
+ print(" SETUP -basicHeaderObj.utc", datetime.datetime.utcfromtimestamp(tmp))
if Dyn_snCode is None:
- Num_Codes=1
- Bauds =1
+ Num_Codes = 1
+ Bauds = 1
- self.set_BH(utc= tmp_utc,miliSecond= tmp_milisecond,timeZone=300 )
- self.set_RCH( expType=0, nTx=150,ipp=FixRCP_IPP, txA=FixRCP_TXA, txB= FixRCP_TXB,
+ self.set_BH(utc=tmp_utc, miliSecond=tmp_milisecond, timeZone=300)
+ self.set_RCH(expType=0, nTx=150, ipp=FixRCP_IPP, txA=FixRCP_TXA, txB=FixRCP_TXB,
nWindows=1 , nHeights=samples, firstHeight=AcqH0_0, deltaHeight=AcqDH_0,
numTaus=1, line6Function=0, line5Function=0, fClock=None,
prePulseBefore=0, prePulseAfter=0,
codeType=0, nCode=Num_Codes, nBaud=32, code=Dyn_snCode,
- flip1=0, flip2=0,Taus=Tau_0)
+ flip1=0, flip2=0, Taus=Tau_0)
self.set_PH(dtype=0, blockSize=0, profilesPerBlock=profilesPerBlock,
dataBlocksPerFile=dataBlocksPerFile, nWindows=1, processFlags=numpy.array([1024]), nCohInt=1,
@@ -465,54 +465,54 @@ class SimulatorReader(JRODataReader, ProcessingUnit):
self.readFirstHeader()
- self.frequency = frequency
- self.incIntFactor = incIntFactor
- self.nFFTPoints = nFFTPoints
- self.FixPP_IncInt = FixPP_IncInt
- self.FixRCP_IPP = FixRCP_IPP
- self.FixPP_CohInt = FixPP_CohInt
- self.Tau_0 = Tau_0
- self.AcqH0_0 = AcqH0_0
- self.H0 = AcqH0_0
- self.AcqDH_0 = AcqDH_0
- self.DH0 = AcqDH_0
- self.Bauds = Bauds
- self.FixRCP_TXA = FixRCP_TXA
- self.FixRCP_TXB = FixRCP_TXB
- self.fAngle = fAngle
- self.DC_level = DC_level
- self.stdev = stdev
- self.Num_Codes = Num_Codes
- self.Dyn_snCode = Dyn_snCode
- self.samples = samples
- self.channels = channels
- self.profiles = None
- self.m_nReference = None
- self.Baudwidth = None
- self.Fdoppler = Fdoppler
- self.Hdoppler = Hdoppler
- self.Adoppler = Adoppler
- self.nTotalReadFiles = int(nTotalReadFiles)
+ self.frequency = frequency
+ self.incIntFactor = incIntFactor
+ self.nFFTPoints = nFFTPoints
+ self.FixPP_IncInt = FixPP_IncInt
+ self.FixRCP_IPP = FixRCP_IPP
+ self.FixPP_CohInt = FixPP_CohInt
+ self.Tau_0 = Tau_0
+ self.AcqH0_0 = AcqH0_0
+ self.H0 = AcqH0_0
+ self.AcqDH_0 = AcqDH_0
+ self.DH0 = AcqDH_0
+ self.Bauds = Bauds
+ self.FixRCP_TXA = FixRCP_TXA
+ self.FixRCP_TXB = FixRCP_TXB
+ self.fAngle = fAngle
+ self.DC_level = DC_level
+ self.stdev = stdev
+ self.Num_Codes = Num_Codes
+ self.Dyn_snCode = Dyn_snCode
+ self.samples = samples
+ self.channels = channels
+ self.profiles = None
+ self.m_nReference = None
+ self.Baudwidth = None
+ self.Fdoppler = Fdoppler
+ self.Hdoppler = Hdoppler
+ self.Adoppler = Adoppler
+ self.nTotalReadFiles = int(nTotalReadFiles)
print("IPP ", self.FixRCP_IPP)
- print("Tau_0 ",self.Tau_0)
- print("AcqH0_0",self.AcqH0_0)
- print("samples,window ",self.samples)
- print("AcqDH_0",AcqDH_0)
- print("FixRCP_TXA",self.FixRCP_TXA)
- print("FixRCP_TXB",self.FixRCP_TXB)
- print("Dyn_snCode",Dyn_snCode)
+ print("Tau_0 ", self.Tau_0)
+ print("AcqH0_0", self.AcqH0_0)
+ print("samples,window ", self.samples)
+ print("AcqDH_0", AcqDH_0)
+ print("FixRCP_TXA", self.FixRCP_TXA)
+ print("FixRCP_TXB", self.FixRCP_TXB)
+ print("Dyn_snCode", Dyn_snCode)
print("Fdoppler", Fdoppler)
- print("Hdoppler",Hdoppler)
- print("Vdopplermax",Fdoppler*(3.0e8/self.frequency)/2.0)
+ print("Hdoppler", Hdoppler)
+ print("Vdopplermax", Fdoppler * (3.0e8 / self.frequency) / 2.0)
print("nTotalReadFiles", nTotalReadFiles)
self.init_acquisition()
- self.pulses,self.pulse_size=self.init_pulse(Num_Codes=self.Num_Codes,Bauds=self.Bauds,BaudWidth=self.BaudWidth,Dyn_snCode=Dyn_snCode)
+ self.pulses, self.pulse_size = self.init_pulse(Num_Codes=self.Num_Codes, Bauds=self.Bauds, BaudWidth=self.BaudWidth, Dyn_snCode=Dyn_snCode)
print(" [ END ] - SETUP metodo")
return
- def run(self,**kwargs): # metodo propio
+ def run(self, **kwargs): # metodo propio
if not(self.isConfig):
self.setup(**kwargs)
self.isConfig = True
diff --git a/schainpy/model/io/jroIO_spectra.py b/schainpy/model/io/jroIO_spectra.py
index 589c9b6..9710330 100644
--- a/schainpy/model/io/jroIO_spectra.py
+++ b/schainpy/model/io/jroIO_spectra.py
@@ -53,7 +53,7 @@ class SpectraReader(JRODataReader, ProcessingUnit):
"""
- def __init__(self):#, **kwargs):
+ def __init__(self): # , **kwargs):
"""
Inicializador de la clase SpectraReader para la lectura de datos de espectros.
@@ -121,12 +121,12 @@ class SpectraReader(JRODataReader, ProcessingUnit):
self.nRdPairs = 0
self.rdPairList = []
- for i in range(0, self.processingHeaderObj.totalSpectra*2, 2):
- if self.processingHeaderObj.spectraComb[i] == self.processingHeaderObj.spectraComb[i+1]:
- self.nRdChannels = self.nRdChannels + 1 #par de canales iguales
+ for i in range(0, self.processingHeaderObj.totalSpectra * 2, 2):
+ if self.processingHeaderObj.spectraComb[i] == self.processingHeaderObj.spectraComb[i + 1]:
+ self.nRdChannels = self.nRdChannels + 1 # par de canales iguales
else:
- self.nRdPairs = self.nRdPairs + 1 #par de canales diferentes
- self.rdPairList.append((self.processingHeaderObj.spectraComb[i], self.processingHeaderObj.spectraComb[i+1]))
+ self.nRdPairs = self.nRdPairs + 1 # par de canales diferentes
+ self.rdPairList.append((self.processingHeaderObj.spectraComb[i], self.processingHeaderObj.spectraComb[i + 1]))
pts2read = self.processingHeaderObj.nHeights * self.processingHeaderObj.profilesPerBlock
@@ -165,38 +165,38 @@ class SpectraReader(JRODataReader, ProcessingUnit):
fpointer = self.fp.tell()
- spc = numpy.fromfile( self.fp, self.dtype[0], self.pts2read_SelfSpectra )
- spc = spc.reshape( (self.nRdChannels, self.processingHeaderObj.nHeights, self.processingHeaderObj.profilesPerBlock) ) #transforma a un arreglo 3D
+ spc = numpy.fromfile(self.fp, self.dtype[0], self.pts2read_SelfSpectra)
+ spc = spc.reshape((self.nRdChannels, self.processingHeaderObj.nHeights, self.processingHeaderObj.profilesPerBlock)) # transforma a un arreglo 3D
if self.processingHeaderObj.flag_cspc:
- cspc = numpy.fromfile( self.fp, self.dtype, self.pts2read_CrossSpectra )
- cspc = cspc.reshape( (self.nRdPairs, self.processingHeaderObj.nHeights, self.processingHeaderObj.profilesPerBlock) ) #transforma a un arreglo 3D
+ cspc = numpy.fromfile(self.fp, self.dtype, self.pts2read_CrossSpectra)
+ cspc = cspc.reshape((self.nRdPairs, self.processingHeaderObj.nHeights, self.processingHeaderObj.profilesPerBlock)) # transforma a un arreglo 3D
if self.processingHeaderObj.flag_dc:
- dc = numpy.fromfile( self.fp, self.dtype, self.pts2read_DCchannels ) #int(self.processingHeaderObj.nHeights*self.systemHeaderObj.nChannels) )
- dc = dc.reshape( (self.systemHeaderObj.nChannels, self.processingHeaderObj.nHeights) ) #transforma a un arreglo 2D
+ dc = numpy.fromfile(self.fp, self.dtype, self.pts2read_DCchannels) # int(self.processingHeaderObj.nHeights*self.systemHeaderObj.nChannels) )
+ dc = dc.reshape((self.systemHeaderObj.nChannels, self.processingHeaderObj.nHeights)) # transforma a un arreglo 2D
if not self.processingHeaderObj.shif_fft:
- #desplaza a la derecha en el eje 2 determinadas posiciones
- shift = int(self.processingHeaderObj.profilesPerBlock/2)
- spc = numpy.roll( spc, shift , axis=2 )
+ # desplaza a la derecha en el eje 2 determinadas posiciones
+ shift = int(self.processingHeaderObj.profilesPerBlock / 2)
+ spc = numpy.roll(spc, shift , axis=2)
if self.processingHeaderObj.flag_cspc:
- #desplaza a la derecha en el eje 2 determinadas posiciones
- cspc = numpy.roll( cspc, shift, axis=2 )
+ # desplaza a la derecha en el eje 2 determinadas posiciones
+ cspc = numpy.roll(cspc, shift, axis=2)
- #Dimensions : nChannels, nProfiles, nSamples
- spc = numpy.transpose( spc, (0,2,1) )
+ # Dimensions : nChannels, nProfiles, nSamples
+ spc = numpy.transpose(spc, (0, 2, 1))
self.data_spc = spc
if self.processingHeaderObj.flag_cspc:
- cspc = numpy.transpose( cspc, (0,2,1) )
- self.data_cspc = cspc['real'] + cspc['imag']*1j
+ cspc = numpy.transpose(cspc, (0, 2, 1))
+ self.data_cspc = cspc['real'] + cspc['imag'] * 1j
else:
self.data_cspc = None
if self.processingHeaderObj.flag_dc:
- self.data_dc = dc['real'] + dc['imag']*1j
+ self.data_dc = dc['real'] + dc['imag'] * 1j
else:
self.data_dc = None
@@ -219,12 +219,12 @@ class SpectraReader(JRODataReader, ProcessingUnit):
self.dataOut.nFFTPoints = self.processingHeaderObj.profilesPerBlock
self.dataOut.nCohInt = self.processingHeaderObj.nCohInt
self.dataOut.nIncohInt = self.processingHeaderObj.nIncohInt
- xf = self.processingHeaderObj.firstHeight + self.processingHeaderObj.nHeights*self.processingHeaderObj.deltaHeight
+ xf = self.processingHeaderObj.firstHeight + self.processingHeaderObj.nHeights * self.processingHeaderObj.deltaHeight
self.dataOut.heightList = numpy.arange(self.processingHeaderObj.firstHeight, xf, self.processingHeaderObj.deltaHeight)
self.dataOut.channelList = list(range(self.systemHeaderObj.nChannels))
- self.dataOut.flagShiftFFT = True #Data is always shifted
- self.dataOut.flagDecodeData = self.processingHeaderObj.flag_decode #asumo q la data no esta decodificada
- self.dataOut.flagDeflipData = self.processingHeaderObj.flag_deflip #asumo q la data esta sin flip
+ self.dataOut.flagShiftFFT = True # Data is always shifted
+ self.dataOut.flagDecodeData = self.processingHeaderObj.flag_decode # asumo q la data no esta decodificada
+ self.dataOut.flagDeflipData = self.processingHeaderObj.flag_deflip # asumo q la data esta sin flip
def getData(self):
"""
@@ -253,11 +253,11 @@ class SpectraReader(JRODataReader, ProcessingUnit):
if self.__hasNotDataInBuffer():
- if not( self.readNextBlock() ):
+ if not(self.readNextBlock()):
self.dataOut.flagNoData = True
return 0
- #data es un numpy array de 3 dmensiones (perfiles, alturas y canales)
+ # data es un numpy array de 3 dmensiones (perfiles, alturas y canales)
if self.data_spc is None:
self.dataOut.flagNoData = True
@@ -356,20 +356,20 @@ class SpectraWriter(JRODataWriter, Operation):
Return: None
"""
- spc = numpy.transpose( self.data_spc, (0,2,1) )
+ spc = numpy.transpose(self.data_spc, (0, 2, 1))
if not self.processingHeaderObj.shif_fft:
- spc = numpy.roll( spc, int(self.processingHeaderObj.profilesPerBlock/2), axis=2 ) #desplaza a la derecha en el eje 2 determinadas posiciones
+ spc = numpy.roll(spc, int(self.processingHeaderObj.profilesPerBlock / 2), axis=2) # desplaza a la derecha en el eje 2 determinadas posiciones
data = spc.reshape((-1))
data = data.astype(self.dtype[0])
data.tofile(self.fp)
if self.data_cspc is not None:
- cspc = numpy.transpose( self.data_cspc, (0,2,1) )
- data = numpy.zeros( numpy.shape(cspc), self.dtype )
- #print 'data.shape', self.shape_cspc_Buffer
+ cspc = numpy.transpose(self.data_cspc, (0, 2, 1))
+ data = numpy.zeros(numpy.shape(cspc), self.dtype)
+ # print 'data.shape', self.shape_cspc_Buffer
if not self.processingHeaderObj.shif_fft:
- cspc = numpy.roll( cspc, int(self.processingHeaderObj.profilesPerBlock/2), axis=2 ) #desplaza a la derecha en el eje 2 determinadas posiciones
+ cspc = numpy.roll(cspc, int(self.processingHeaderObj.profilesPerBlock / 2), axis=2) # desplaza a la derecha en el eje 2 determinadas posiciones
data['real'] = cspc.real
data['imag'] = cspc.imag
data = data.reshape((-1))
@@ -378,7 +378,7 @@ class SpectraWriter(JRODataWriter, Operation):
if self.data_dc is not None:
dc = self.data_dc
- data = numpy.zeros( numpy.shape(dc), self.dtype )
+ data = numpy.zeros(numpy.shape(dc), self.dtype)
data['real'] = dc.real
data['imag'] = dc.imag
data = data.reshape((-1))
@@ -453,15 +453,15 @@ class SpectraWriter(JRODataWriter, Operation):
pts2write = self.dataOut.nHeights * self.dataOut.nFFTPoints
pts2write_SelfSpectra = int(self.dataOut.nChannels * pts2write)
- blocksize = (pts2write_SelfSpectra*dtype_width)
+ blocksize = (pts2write_SelfSpectra * dtype_width)
if self.dataOut.data_cspc is not None:
pts2write_CrossSpectra = int(self.dataOut.nPairs * pts2write)
- blocksize += (pts2write_CrossSpectra*dtype_width*2)
+ blocksize += (pts2write_CrossSpectra * dtype_width * 2)
if self.dataOut.data_dc is not None:
pts2write_DCchannels = int(self.dataOut.nChannels * self.dataOut.nHeights)
- blocksize += (pts2write_DCchannels*dtype_width*2)
+ blocksize += (pts2write_DCchannels * dtype_width * 2)
# blocksize = blocksize #* datatypeValue * 2 #CORREGIR ESTO
@@ -485,12 +485,12 @@ class SpectraWriter(JRODataWriter, Operation):
self.systemHeaderObj.nChannels = self.dataOut.nChannels
self.radarControllerHeaderObj = self.dataOut.radarControllerHeaderObj.copy()
- self.processingHeaderObj.dtype = 1 # Spectra
+ self.processingHeaderObj.dtype = 1 # Spectra
self.processingHeaderObj.blockSize = self.__getBlockSize()
self.processingHeaderObj.profilesPerBlock = self.dataOut.nFFTPoints
self.processingHeaderObj.dataBlocksPerFile = self.blocksPerFile
- self.processingHeaderObj.nWindows = 1 #podria ser 1 o self.dataOut.processingHeaderObj.nWindows
- self.processingHeaderObj.nCohInt = self.dataOut.nCohInt# Se requiere para determinar el valor de timeInterval
+ self.processingHeaderObj.nWindows = 1 # podria ser 1 o self.dataOut.processingHeaderObj.nWindows
+ self.processingHeaderObj.nCohInt = self.dataOut.nCohInt # Se requiere para determinar el valor de timeInterval
self.processingHeaderObj.nIncohInt = self.dataOut.nIncohInt
self.processingHeaderObj.totalSpectra = self.dataOut.nPairs + self.dataOut.nChannels
self.processingHeaderObj.shif_fft = self.dataOut.flagShiftFFT
@@ -524,4 +524,4 @@ class SpectraWriter(JRODataWriter, Operation):
self.processingHeaderObj.processFlags = self.getProcessFlags()
- self.setBasicHeader()
\ No newline at end of file
+ self.setBasicHeader()
diff --git a/schainpy/model/io/jroIO_usrp.py b/schainpy/model/io/jroIO_usrp.py
index 1ac362b..286663d 100644
--- a/schainpy/model/io/jroIO_usrp.py
+++ b/schainpy/model/io/jroIO_usrp.py
@@ -46,7 +46,7 @@ class USRPReader(ProcessingUnit):
def __getCurrentSecond(self):
- return self.__thisUnixSample/self.__sample_rate
+ return self.__thisUnixSample / self.__sample_rate
thisSecond = property(__getCurrentSecond, "I'm the 'thisSecond' property.")
@@ -54,9 +54,9 @@ class USRPReader(ProcessingUnit):
'''
In this method will be initialized every parameter of dataOut object (header, no data)
'''
- ippSeconds = 1.0*self.__nSamples/self.__sample_rate
+ ippSeconds = 1.0 * self.__nSamples / self.__sample_rate
- nProfiles = 1.0/ippSeconds #Number of profiles in one second
+ nProfiles = 1.0 / ippSeconds # Number of profiles in one second
self.dataOut.radarControllerHeaderObj = RadarControllerHeader(ipp=self.__ippKm,
txA=0,
@@ -67,7 +67,7 @@ class USRPReader(ProcessingUnit):
deltaHeight=self.__deltaHeigth,
codeType=self.__codeType,
nCode=self.__nCode, nBaud=self.__nBaud,
- code = self.__code)
+ code=self.__code)
self.dataOut.systemHeaderObj = SystemHeader(nSamples=self.__nSamples,
nProfiles=nProfiles,
@@ -78,7 +78,7 @@ class USRPReader(ProcessingUnit):
self.dataOut.data = None
- self.dataOut.dtype = numpy.dtype([('real',' startUTCSecond*self.__sample_rate:
- startUTCSecond = start_index/self.__sample_rate
+ if start_index > startUTCSecond * self.__sample_rate:
+ startUTCSecond = start_index / self.__sample_rate
if not endUTCSecond:
- endUTCSecond = end_index/self.__sample_rate
+ endUTCSecond = end_index / self.__sample_rate
- if end_index < endUTCSecond*self.__sample_rate:
- endUTCSecond = end_index/self.__sample_rate
+ if end_index < endUTCSecond * self.__sample_rate:
+ endUTCSecond = end_index / self.__sample_rate
if not nSamples:
if not ippKm:
raise ValueError("[Reading] nSamples or ippKm should be defined")
- nSamples = int(ippKm / (1e6*0.15/self.__sample_rate))
+ nSamples = int(ippKm / (1e6 * 0.15 / self.__sample_rate))
channelBoundList = []
channelNameListFiltered = []
for thisIndexChannel in channelList:
- thisChannelName = channelNameList[thisIndexChannel]
+ thisChannelName = channelNameList[thisIndexChannel]
start_index, end_index = self.digitalReadObj.get_bounds(thisChannelName)
channelBoundList.append((start_index, end_index))
channelNameListFiltered.append(thisChannelName)
@@ -327,31 +327,31 @@ class USRPReader(ProcessingUnit):
self.__channelNameList = channelNameListFiltered
self.__channelBoundList = channelBoundList
self.__nSamples = nSamples
- self.__samples_to_read = int(buffer_size*nSamples)
+ self.__samples_to_read = int(buffer_size * nSamples)
self.__nChannels = len(self.__channelList)
self.__startUTCSecond = startUTCSecond
self.__endUTCSecond = endUTCSecond
- self.__timeInterval = 1.0 * self.__samples_to_read/self.__sample_rate #Time interval
+ self.__timeInterval = 1.0 * self.__samples_to_read / self.__sample_rate # Time interval
if online:
# self.__thisUnixSample = int(endUTCSecond*self.__sample_rate - 4*self.__samples_to_read)
startUTCSecond = numpy.floor(endUTCSecond)
- self.__thisUnixSample = int(startUTCSecond*self.__sample_rate) - self.__samples_to_read
+ self.__thisUnixSample = int(startUTCSecond * self.__sample_rate) - self.__samples_to_read
- self.__data_buffer = numpy.zeros((self.__nChannels, self.__samples_to_read), dtype = numpy.complex)
+ self.__data_buffer = numpy.zeros((self.__nChannels, self.__samples_to_read), dtype=numpy.complex)
self.__setFileHeader()
self.isConfig = True
- print("[Reading] USRP Data was found from %s to %s " %(
+ print("[Reading] USRP Data was found from %s to %s " % (
datetime.datetime.utcfromtimestamp(self.__startUTCSecond - self.__timezone),
datetime.datetime.utcfromtimestamp(self.__endUTCSecond - self.__timezone)
))
- print("[Reading] Starting process from %s to %s" %(datetime.datetime.utcfromtimestamp(startUTCSecond - self.__timezone),
+ print("[Reading] Starting process from %s to %s" % (datetime.datetime.utcfromtimestamp(startUTCSecond - self.__timezone),
datetime.datetime.utcfromtimestamp(endUTCSecond - self.__timezone)
))
@@ -375,13 +375,13 @@ class USRPReader(ProcessingUnit):
start_index, end_index = self.digitalReadObj.get_bounds(self.__channelNameList[self.__channelList[0]])
- if start_index > self.__startUTCSecond*self.__sample_rate:
- self.__startUTCSecond = 1.0*start_index/self.__sample_rate
+ if start_index > self.__startUTCSecond * self.__sample_rate:
+ self.__startUTCSecond = 1.0 * start_index / self.__sample_rate
- if end_index > self.__endUTCSecond*self.__sample_rate:
- self.__endUTCSecond = 1.0*end_index/self.__sample_rate
+ if end_index > self.__endUTCSecond * self.__sample_rate:
+ self.__endUTCSecond = 1.0 * end_index / self.__sample_rate
print()
- print("[Reading] New timerange found [%s, %s] " %(
+ print("[Reading] New timerange found [%s, %s] " % (
datetime.datetime.utcfromtimestamp(self.__startUTCSecond - self.__timezone),
datetime.datetime.utcfromtimestamp(self.__endUTCSecond - self.__timezone)
))
@@ -390,21 +390,21 @@ class USRPReader(ProcessingUnit):
return False
- def __readNextBlock(self, seconds=30, volt_scale = 218776):
+ def __readNextBlock(self, seconds=30, volt_scale=218776):
'''
'''
- #Set the next data
+ # Set the next data
self.__flagDiscontinuousBlock = False
self.__thisUnixSample += self.__samples_to_read
- if self.__thisUnixSample + 2*self.__samples_to_read > self.__endUTCSecond*self.__sample_rate:
+ if self.__thisUnixSample + 2 * self.__samples_to_read > self.__endUTCSecond * self.__sample_rate:
print("[Reading] There are no more data into selected time-range")
self.__reload()
- if self.__thisUnixSample + 2*self.__samples_to_read > self.__endUTCSecond*self.__sample_rate:
- self.__thisUnixSample -= self.__samples_to_read
+ if self.__thisUnixSample + 2 * self.__samples_to_read > self.__endUTCSecond * self.__sample_rate:
+ self.__thisUnixSample -= self.__samples_to_read
return False
indexChannel = 0
@@ -419,30 +419,30 @@ class USRPReader(ProcessingUnit):
thisChannelName)
except IOError as e:
- #read next profile
+ # read next profile
self.__flagDiscontinuousBlock = True
- print("[Reading] %s" %datetime.datetime.utcfromtimestamp(self.thisSecond - self.__timezone), e)
+ print("[Reading] %s" % datetime.datetime.utcfromtimestamp(self.thisSecond - self.__timezone), e)
break
if result.shape[0] != self.__samples_to_read:
self.__flagDiscontinuousBlock = True
- print("[Reading] %s: Too few samples were found, just %d/%d samples" %(datetime.datetime.utcfromtimestamp(self.thisSecond - self.__timezone),
+ print("[Reading] %s: Too few samples were found, just %d/%d samples" % (datetime.datetime.utcfromtimestamp(self.thisSecond - self.__timezone),
result.shape[0],
self.__samples_to_read))
break
- self.__data_buffer[indexChannel,:] = result*volt_scale
+ self.__data_buffer[indexChannel, :] = result * volt_scale
indexChannel += 1
dataOk = True
- self.__utctime = self.__thisUnixSample/self.__sample_rate
+ self.__utctime = self.__thisUnixSample / self.__sample_rate
if not dataOk:
return False
- print("[Reading] %s: %d samples <> %f sec" %(datetime.datetime.utcfromtimestamp(self.thisSecond - self.__timezone),
+ print("[Reading] %s: %d samples <> %f sec" % (datetime.datetime.utcfromtimestamp(self.thisSecond - self.__timezone),
self.__samples_to_read,
self.__timeInterval))
@@ -486,7 +486,7 @@ class USRPReader(ProcessingUnit):
if self.__readNextBlock():
break
- if self.__thisUnixSample > self.__endUTCSecond*self.__sample_rate:
+ if self.__thisUnixSample > self.__endUTCSecond * self.__sample_rate:
return False
if self.__flagDiscontinuousBlock:
@@ -500,11 +500,11 @@ class USRPReader(ProcessingUnit):
if err_counter > nTries:
return False
- print('[Reading] waiting %d seconds to read a new block' %seconds)
+ print('[Reading] waiting %d seconds to read a new block' % seconds)
sleep(seconds)
- self.dataOut.data = self.__data_buffer[:,self.__bufferIndex:self.__bufferIndex+self.__nSamples]
- self.dataOut.utctime = (self.__thisUnixSample + self.__bufferIndex)/self.__sample_rate
+ self.dataOut.data = self.__data_buffer[:, self.__bufferIndex:self.__bufferIndex + self.__nSamples]
+ self.dataOut.utctime = (self.__thisUnixSample + self.__bufferIndex) / self.__sample_rate
self.dataOut.flagNoData = False
self.dataOut.flagDiscontinuousBlock = self.__flagDiscontinuousBlock
self.dataOut.profileIndex = self.profileIndex
@@ -599,4 +599,4 @@ if __name__ == '__main__':
while True:
readObj.run(path='/Volumes/DATA/haystack/passive_radar/')
# readObj.printInfo()
- readObj.printNumberOfBlock()
\ No newline at end of file
+ readObj.printNumberOfBlock()
diff --git a/schainpy/model/io/jroIO_usrp_api.py b/schainpy/model/io/jroIO_usrp_api.py
index 1fc35e0..8f92e2f 100644
--- a/schainpy/model/io/jroIO_usrp_api.py
+++ b/schainpy/model/io/jroIO_usrp_api.py
@@ -22,7 +22,7 @@ class USRPReaderAPI(USRPReader, threading.Thread):
# __isBufferEmpty = True
- __DATAKEYLIST = ['data','utctime','flagNoData']
+ __DATAKEYLIST = ['data', 'utctime', 'flagNoData']
def __init__(self, serializer='msgpack'):
@@ -71,7 +71,7 @@ class USRPReaderAPI(USRPReader, threading.Thread):
return None
myMetadataSerial = obj2Serial(self.dataOut,
- serializer = self.__serializer)
+ serializer=self.__serializer)
return myMetadataSerial
@@ -125,8 +125,8 @@ class USRPReaderAPI(USRPReader, threading.Thread):
print(".", end=' ')
self.__mySerial = obj2Serial(self.dataOut,
- keyList = self.__DATAKEYLIST,
- serializer = self.__serializer)
+ keyList=self.__DATAKEYLIST,
+ serializer=self.__serializer)
self.__isBufferEmpty = False
# print self.profileIndex
@@ -136,4 +136,4 @@ class USRPReaderAPI(USRPReader, threading.Thread):
print("Closing thread")
- return
\ No newline at end of file
+ return
diff --git a/schainpy/model/io/jroIO_voltage.py b/schainpy/model/io/jroIO_voltage.py
index cb484db..ce5c1df 100644
--- a/schainpy/model/io/jroIO_voltage.py
+++ b/schainpy/model/io/jroIO_voltage.py
@@ -81,7 +81,7 @@ class VoltageReader(JRODataReader, ProcessingUnit):
self.radarControllerHeaderObj = RadarControllerHeader()
self.processingHeaderObj = ProcessingHeader()
self.lastUTTime = 0
- self.profileIndex = 2**32 - 1
+ self.profileIndex = 2 ** 32 - 1
self.dataOut = Voltage()
self.selBlocksize = None
self.selBlocktime = None
@@ -251,7 +251,7 @@ class VoltageReader(JRODataReader, ProcessingUnit):
self.firstHeaderSize = self.basicHeaderObj.size
- datatype = int(numpy.log2((self.processingHeaderObj.processFlags &
+ datatype = int(numpy.log2((self.processingHeaderObj.processFlags &
PROCFLAG.DATATYPE_MASK)) - numpy.log2(PROCFLAG.DATATYPE_CHAR))
if datatype == 0:
datatype_str = numpy.dtype([('real', ' dateFile) or (endDate < dateFile):
continue
@@ -259,7 +259,7 @@ class JULIAParamReader(JRODataReader, ProcessingUnit):
nheights = int(self.header_rec['nheights'])
hours = float(self.header_rec['hours'][0])
heights = numpy.arange(nheights) * self.dH + self.header_rec['h0']
- datatime = datetime.datetime(self.year, 1, 1) + datetime.timedelta(days=self.doy-1, hours=hours)
+ datatime = datetime.datetime(self.year, 1, 1) + datetime.timedelta(days=self.doy - 1, hours=hours)
return heights, datatime
else:
return False
@@ -269,30 +269,30 @@ class JULIAParamReader(JRODataReader, ProcessingUnit):
Parse data
'''
- buffer = numpy.fromfile(self.fp, 'f', 8*N).reshape(N, 8)
+ buffer = numpy.fromfile(self.fp, 'f', 8 * N).reshape(N, 8)
pow0 = buffer[:, 0]
pow1 = buffer[:, 1]
- acf0 = (buffer[:,2] + buffer[:,3]*1j) / pow0
- acf1 = (buffer[:,4] + buffer[:,5]*1j) / pow1
- dccf = (buffer[:,6] + buffer[:,7]*1j) / (pow0*pow1)
+ acf0 = (buffer[:, 2] + buffer[:, 3] * 1j) / pow0
+ acf1 = (buffer[:, 4] + buffer[:, 5] * 1j) / pow1
+ dccf = (buffer[:, 6] + buffer[:, 7] * 1j) / (pow0 * pow1)
- ### SNR
+ # ## SNR
sno = (pow0 + pow1 - self.header_rec['snr']) / self.header_rec['snr']
sno10 = numpy.log10(sno)
# dsno = 1.0 / numpy.sqrt(self.header_file['nint'] * self.header_file['navg']) * (1 + (1 / sno))
- ### Vertical Drift
- sp = numpy.sqrt(numpy.abs(acf0)*numpy.abs(acf1))
+ # ## Vertical Drift
+ sp = numpy.sqrt(numpy.abs(acf0) * numpy.abs(acf1))
sp[numpy.where(numpy.abs(sp) >= 1.0)] = numpy.sqrt(0.9999)
- vzo = -numpy.arctan2(acf0.imag + acf1.imag,acf0.real + acf1.real)*1.5E5*1.5/(self.ipp*numpy.pi)
- dvzo = numpy.sqrt(1.0 - sp*sp)*0.338*1.5E5/(numpy.sqrt(self.header_file['nint']*self.header_file['navg'])*sp*self.ipp)
+ vzo = -numpy.arctan2(acf0.imag + acf1.imag, acf0.real + acf1.real) * 1.5E5 * 1.5 / (self.ipp * numpy.pi)
+ dvzo = numpy.sqrt(1.0 - sp * sp) * 0.338 * 1.5E5 / (numpy.sqrt(self.header_file['nint'] * self.header_file['navg']) * sp * self.ipp)
err = numpy.where(dvzo <= 0.1)
dvzo[err] = 0.1
- #Zonal Drifts
- dt = self.header_file['nint']*self.ipp / 1.5E5
+ # Zonal Drifts
+ dt = self.header_file['nint'] * self.ipp / 1.5E5
coh = numpy.sqrt(numpy.abs(dccf))
err = numpy.where(coh >= 1.0)
coh[err] = numpy.sqrt(0.99999)
@@ -300,8 +300,8 @@ class JULIAParamReader(JRODataReader, ProcessingUnit):
err = numpy.where(coh <= 0.1)
coh[err] = numpy.sqrt(0.1)
- vxo = numpy.arctan2(dccf.imag, dccf.real)*self.header_rec['h0']*1.0E3/(self.kd*dt)
- dvxo = numpy.sqrt(1.0 - coh*coh)*self.header_rec['h0']*1.0E3/(numpy.sqrt(self.header_file['nint']*self.header_file['navg'])*coh*self.kd*dt)
+ vxo = numpy.arctan2(dccf.imag, dccf.real) * self.header_rec['h0'] * 1.0E3 / (self.kd * dt)
+ dvxo = numpy.sqrt(1.0 - coh * coh) * self.header_rec['h0'] * 1.0E3 / (numpy.sqrt(self.header_file['nint'] * self.header_file['navg']) * coh * self.kd * dt)
err = numpy.where(dvxo <= 0.1)
dvxo[err] = 0.1
@@ -315,7 +315,7 @@ class JULIAParamReader(JRODataReader, ProcessingUnit):
self.dataOut.data_snr = self.buffer[4].reshape(1, -1)
self.dataOut.heightList = self.heights
- self.dataOut.data_param = self.buffer[0:4,]
+ self.dataOut.data_param = self.buffer[0:4, ]
self.dataOut.utctimeInit = self.time
self.dataOut.utctime = self.time
self.dataOut.useLocalTime = True
@@ -340,4 +340,4 @@ class JULIAParamReader(JRODataReader, ProcessingUnit):
self.set_output()
- return 1
\ No newline at end of file
+ return 1
diff --git a/schainpy/model/io/pxIO_param.py b/schainpy/model/io/pxIO_param.py
index 86d6755..81378e0 100644
--- a/schainpy/model/io/pxIO_param.py
+++ b/schainpy/model/io/pxIO_param.py
@@ -59,7 +59,7 @@ class PXReader(JRODataReader, ProcessingUnit):
self.endDate = endDate
self.startTime = startTime
self.endTime = endTime
- self.datatime = datetime.datetime(1900,1,1)
+ self.datatime = datetime.datetime(1900, 1, 1)
self.walk = walk
self.nTries = kwargs.get('nTries', 10)
self.online = kwargs.get('online', False)
diff --git a/schainpy/model/proc/bltrproc_parameters.py b/schainpy/model/proc/bltrproc_parameters.py
index 925d791..96ca074 100644
--- a/schainpy/model/proc/bltrproc_parameters.py
+++ b/schainpy/model/proc/bltrproc_parameters.py
@@ -65,7 +65,7 @@ class BLTRParametersProc(ProcessingUnit):
self.dataOut.heightList = self.dataOut.height[0]
self.dataOut.data_snr = self.dataOut.data_snr[mode]
SNRavg = numpy.average(self.dataOut.data_snr, axis=0)
- SNRavgdB = 10*numpy.log10(SNRavg)
+ SNRavgdB = 10 * numpy.log10(SNRavg)
self.dataOut.data_snr_avg_db = SNRavgdB.reshape(1, *SNRavgdB.shape)
# Censoring Data
@@ -114,29 +114,29 @@ class OutliersFilter(Operation):
nvalues_valid = len(numpy.where(numpy.isfinite(value_temp[h]))[0])
minvalid = npoints
- #only if valid values greater than the minimum required (10%)
+ # only if valid values greater than the minimum required (10%)
if nvalues_valid > minvalid:
if method == 0:
- #SMOOTH
+ # SMOOTH
w = value_temp[h] - self.Smooth(input=value_temp[h], width=npoints, edge_truncate=1)
if method == 1:
- #MEDIAN
- w = value_temp[h] - self.Median(input=value_temp[h], width = npoints)
+ # MEDIAN
+ w = value_temp[h] - self.Median(input=value_temp[h], width=npoints)
- dw = numpy.std(w[numpy.where(numpy.isfinite(w))],ddof = 1)
+ dw = numpy.std(w[numpy.where(numpy.isfinite(w))], ddof=1)
- threshold = dw*factor
- value_temp[numpy.where(w > threshold),h] = numpy.nan
- value_temp[numpy.where(w < -1*threshold),h] = numpy.nan
+ threshold = dw * factor
+ value_temp[numpy.where(w > threshold), h] = numpy.nan
+ value_temp[numpy.where(w < -1 * threshold), h] = numpy.nan
- #At the end
+ # At the end
if svalue2 == 'inHeight':
value_temp = numpy.transpose(value_temp)
- output_array[:,m] = value_temp
+ output_array[:, m] = value_temp
if svalue == 'zonal':
self.dataOut.data_output[0] = output_array
@@ -150,7 +150,7 @@ class OutliersFilter(Operation):
return self.dataOut.data_output
- def Median(self,input,width):
+ def Median(self, input, width):
'''
Inputs:
input - Velocity array
@@ -158,26 +158,26 @@ class OutliersFilter(Operation):
'''
- if numpy.mod(width,2) == 1:
+ if numpy.mod(width, 2) == 1:
pc = int((width - 1) / 2)
cont = 0
output = []
for i in range(len(input)):
if i >= pc and i < len(input) - pc:
- new2 = input[i-pc:i+pc+1]
+ new2 = input[i - pc:i + pc + 1]
temp = numpy.where(numpy.isfinite(new2))
new = new2[temp]
value = numpy.median(new)
output.append(value)
output = numpy.array(output)
- output = numpy.hstack((input[0:pc],output))
- output = numpy.hstack((output,input[-pc:len(input)]))
+ output = numpy.hstack((input[0:pc], output))
+ output = numpy.hstack((output, input[-pc:len(input)]))
return output
- def Smooth(self,input,width,edge_truncate = None):
+ def Smooth(self, input, width, edge_truncate=None):
'''
Inputs:
input - Velocity array
@@ -186,17 +186,17 @@ class OutliersFilter(Operation):
'''
- if numpy.mod(width,2) == 0:
+ if numpy.mod(width, 2) == 0:
real_width = width + 1
nzeros = width / 2
else:
real_width = width
nzeros = (width - 1) / 2
- half_width = int(real_width)/2
+ half_width = int(real_width) / 2
length = len(input)
- gate = numpy.ones(real_width,dtype='float')
+ gate = numpy.ones(real_width, dtype='float')
norm_of_gate = numpy.sum(gate)
nan_process = 0
@@ -208,21 +208,21 @@ class OutliersFilter(Operation):
input[nan_id] = 0.
if edge_truncate == True:
- output = numpy.convolve(input/norm_of_gate,gate,mode='same')
+ output = numpy.convolve(input / norm_of_gate, gate, mode='same')
elif edge_truncate == False or edge_truncate == None:
- output = numpy.convolve(input/norm_of_gate,gate,mode='valid')
- output = numpy.hstack((input[0:half_width],output))
- output = numpy.hstack((output,input[len(input)-half_width:len(input)]))
+ output = numpy.convolve(input / norm_of_gate, gate, mode='valid')
+ output = numpy.hstack((input[0:half_width], output))
+ output = numpy.hstack((output, input[len(input) - half_width:len(input)]))
if nan_process:
- pb = numpy.convolve(pb/norm_of_gate,gate,mode='valid')
- pb = numpy.hstack((numpy.zeros(half_width),pb))
- pb = numpy.hstack((pb,numpy.zeros(half_width)))
+ pb = numpy.convolve(pb / norm_of_gate, gate, mode='valid')
+ pb = numpy.hstack((numpy.zeros(half_width), pb))
+ pb = numpy.hstack((pb, numpy.zeros(half_width)))
output[numpy.where(pb > 0.9999)] = numpy.nan
input[nan_id] = numpy.nan
return output
- def Average(self,aver=0,nhaver=1):
+ def Average(self, aver=0, nhaver=1):
'''
Inputs:
aver - Indicates the time period over which is averaged or consensus data
@@ -235,27 +235,27 @@ class OutliersFilter(Operation):
lat_huancayo = -12.04
lat_porcuya = -5.8
- if '%2.2f'%self.dataOut.lat == '%2.2f'%lat_piura:
+ if '%2.2f' % self.dataOut.lat == '%2.2f' % lat_piura:
hcm = 3.
if self.dataOut.year == 2003 :
if self.dataOut.doy >= 25 and self.dataOut.doy < 64:
nhpoints = 12
- elif '%2.2f'%self.dataOut.lat == '%2.2f'%lat_huancayo:
+ elif '%2.2f' % self.dataOut.lat == '%2.2f' % lat_huancayo:
hcm = 3.
if self.dataOut.year == 2003 :
if self.dataOut.doy >= 25 and self.dataOut.doy < 64:
nhpoints = 12
- elif '%2.2f'%self.dataOut.lat == '%2.2f'%lat_porcuya:
- hcm = 5.#2
+ elif '%2.2f' % self.dataOut.lat == '%2.2f' % lat_porcuya:
+ hcm = 5. # 2
pdata = 0.2
- taver = [1,2,3,4,6,8,12,24]
+ taver = [1, 2, 3, 4, 6, 8, 12, 24]
t0 = 0
tf = 24
- ntime =(tf-t0)/taver[aver]
+ ntime = (tf - t0) / taver[aver]
ti = numpy.arange(ntime)
tf = numpy.arange(ntime) + taver[aver]
@@ -263,11 +263,11 @@ class OutliersFilter(Operation):
old_height = self.dataOut.heightList
if nhaver > 1:
- num_hei = len(self.dataOut.heightList)/nhaver/self.dataOut.nmodes
- deltha = 0.05*nhaver
- minhvalid = pdata*nhaver
+ num_hei = len(self.dataOut.heightList) / nhaver / self.dataOut.nmodes
+ deltha = 0.05 * nhaver
+ minhvalid = pdata * nhaver
for im in range(self.dataOut.nmodes):
- new_height = numpy.arange(num_hei)*deltha + self.dataOut.height[im,0] + deltha/2.
+ new_height = numpy.arange(num_hei) * deltha + self.dataOut.height[im, 0] + deltha / 2.
data_fHeigths_List = []
@@ -280,8 +280,8 @@ class OutliersFilter(Operation):
for i in range(ntime):
height = old_height
- start = datetime.datetime(self.dataOut.year,self.dataOut.month,self.dataOut.day) + datetime.timedelta(hours = int(ti[i])) - datetime.timedelta(hours = 5)
- stop = datetime.datetime(self.dataOut.year,self.dataOut.month,self.dataOut.day) + datetime.timedelta(hours = int(tf[i])) - datetime.timedelta(hours = 5)
+ start = datetime.datetime(self.dataOut.year, self.dataOut.month, self.dataOut.day) + datetime.timedelta(hours=int(ti[i])) - datetime.timedelta(hours=5)
+ stop = datetime.datetime(self.dataOut.year, self.dataOut.month, self.dataOut.day) + datetime.timedelta(hours=int(tf[i])) - datetime.timedelta(hours=5)
limit_sec1 = time.mktime(start.timetuple())
@@ -295,17 +295,17 @@ class OutliersFilter(Operation):
time_select.append(val_sec)
- time_select = numpy.array(time_select,dtype = 'int')
- minvalid = numpy.ceil(pdata*nhpoints)
+ time_select = numpy.array(time_select, dtype='int')
+ minvalid = numpy.ceil(pdata * nhpoints)
- zon_aver = numpy.zeros([self.dataOut.nranges,self.dataOut.nmodes],dtype='f4') + numpy.nan
- mer_aver = numpy.zeros([self.dataOut.nranges,self.dataOut.nmodes],dtype='f4') + numpy.nan
- ver_aver = numpy.zeros([self.dataOut.nranges,self.dataOut.nmodes],dtype='f4') + numpy.nan
+ zon_aver = numpy.zeros([self.dataOut.nranges, self.dataOut.nmodes], dtype='f4') + numpy.nan
+ mer_aver = numpy.zeros([self.dataOut.nranges, self.dataOut.nmodes], dtype='f4') + numpy.nan
+ ver_aver = numpy.zeros([self.dataOut.nranges, self.dataOut.nmodes], dtype='f4') + numpy.nan
if nhaver > 1:
- new_zon_aver = numpy.zeros([num_hei,self.dataOut.nmodes],dtype='f4') + numpy.nan
- new_mer_aver = numpy.zeros([num_hei,self.dataOut.nmodes],dtype='f4') + numpy.nan
- new_ver_aver = numpy.zeros([num_hei,self.dataOut.nmodes],dtype='f4') + numpy.nan
+ new_zon_aver = numpy.zeros([num_hei, self.dataOut.nmodes], dtype='f4') + numpy.nan
+ new_mer_aver = numpy.zeros([num_hei, self.dataOut.nmodes], dtype='f4') + numpy.nan
+ new_ver_aver = numpy.zeros([num_hei, self.dataOut.nmodes], dtype='f4') + numpy.nan
if len(time_select) > minvalid:
time_average = self.f_timesec[time_select]
@@ -313,27 +313,27 @@ class OutliersFilter(Operation):
for im in range(self.dataOut.nmodes):
for ih in range(self.dataOut.nranges):
- if numpy.sum(numpy.isfinite(self.f_zon[time_select,ih,im])) >= minvalid:
- zon_aver[ih,im] = numpy.nansum(self.f_zon[time_select,ih,im]) / numpy.sum(numpy.isfinite(self.f_zon[time_select,ih,im]))
+ if numpy.sum(numpy.isfinite(self.f_zon[time_select, ih, im])) >= minvalid:
+ zon_aver[ih, im] = numpy.nansum(self.f_zon[time_select, ih, im]) / numpy.sum(numpy.isfinite(self.f_zon[time_select, ih, im]))
- if numpy.sum(numpy.isfinite(self.f_mer[time_select,ih,im])) >= minvalid:
- mer_aver[ih,im] = numpy.nansum(self.f_mer[time_select,ih,im]) / numpy.sum(numpy.isfinite(self.f_mer[time_select,ih,im]))
+ if numpy.sum(numpy.isfinite(self.f_mer[time_select, ih, im])) >= minvalid:
+ mer_aver[ih, im] = numpy.nansum(self.f_mer[time_select, ih, im]) / numpy.sum(numpy.isfinite(self.f_mer[time_select, ih, im]))
- if numpy.sum(numpy.isfinite(self.f_ver[time_select,ih,im])) >= minvalid:
- ver_aver[ih,im] = numpy.nansum(self.f_ver[time_select,ih,im]) / numpy.sum(numpy.isfinite(self.f_ver[time_select,ih,im]))
+ if numpy.sum(numpy.isfinite(self.f_ver[time_select, ih, im])) >= minvalid:
+ ver_aver[ih, im] = numpy.nansum(self.f_ver[time_select, ih, im]) / numpy.sum(numpy.isfinite(self.f_ver[time_select, ih, im]))
if nhaver > 1:
for ih in range(num_hei):
- hvalid = numpy.arange(nhaver) + nhaver*ih
+ hvalid = numpy.arange(nhaver) + nhaver * ih
- if numpy.sum(numpy.isfinite(zon_aver[hvalid,im])) >= minvalid:
- new_zon_aver[ih,im] = numpy.nansum(zon_aver[hvalid,im]) / numpy.sum(numpy.isfinite(zon_aver[hvalid,im]))
+ if numpy.sum(numpy.isfinite(zon_aver[hvalid, im])) >= minvalid:
+ new_zon_aver[ih, im] = numpy.nansum(zon_aver[hvalid, im]) / numpy.sum(numpy.isfinite(zon_aver[hvalid, im]))
- if numpy.sum(numpy.isfinite(mer_aver[hvalid,im])) >= minvalid:
- new_mer_aver[ih,im] = numpy.nansum(mer_aver[hvalid,im]) / numpy.sum(numpy.isfinite(mer_aver[hvalid,im]))
+ if numpy.sum(numpy.isfinite(mer_aver[hvalid, im])) >= minvalid:
+ new_mer_aver[ih, im] = numpy.nansum(mer_aver[hvalid, im]) / numpy.sum(numpy.isfinite(mer_aver[hvalid, im]))
- if numpy.sum(numpy.isfinite(ver_aver[hvalid,im])) >= minvalid:
- new_ver_aver[ih,im] = numpy.nansum(ver_aver[hvalid,im]) / numpy.sum(numpy.isfinite(ver_aver[hvalid,im]))
+ if numpy.sum(numpy.isfinite(ver_aver[hvalid, im])) >= minvalid:
+ new_ver_aver[ih, im] = numpy.nansum(ver_aver[hvalid, im]) / numpy.sum(numpy.isfinite(ver_aver[hvalid, im]))
if nhaver > 1:
zon_aver = new_zon_aver
mer_aver = new_mer_aver
@@ -352,7 +352,7 @@ class OutliersFilter(Operation):
minute = startTime.tm_min
second = startTime.tm_sec
- startDTList.append(datetime.datetime(year,month,day,hour,minute,second))
+ startDTList.append(datetime.datetime(year, month, day, hour, minute, second))
o_height = numpy.array([])
@@ -363,17 +363,17 @@ class OutliersFilter(Operation):
for im in range(self.dataOut.nmodes):
if im == 0:
- h_select = numpy.where(numpy.bitwise_and(height[0,:] >=0,height[0,:] <= hcm,numpy.isfinite(height[0,:])))
+ h_select = numpy.where(numpy.bitwise_and(height[0, :] >= 0, height[0, :] <= hcm, numpy.isfinite(height[0, :])))
else:
- h_select = numpy.where(numpy.bitwise_and(height[1,:] > hcm,height[1,:] < 20,numpy.isfinite(height[1,:])))
+ h_select = numpy.where(numpy.bitwise_and(height[1, :] > hcm, height[1, :] < 20, numpy.isfinite(height[1, :])))
ht = h_select[0]
- o_height = numpy.hstack((o_height,height[im,ht]))
- o_zon_aver = numpy.hstack((o_zon_aver,zon_aver[ht,im]))
- o_mer_aver = numpy.hstack((o_mer_aver,mer_aver[ht,im]))
- o_ver_aver = numpy.hstack((o_ver_aver,ver_aver[ht,im]))
+ o_height = numpy.hstack((o_height, height[im, ht]))
+ o_zon_aver = numpy.hstack((o_zon_aver, zon_aver[ht, im]))
+ o_mer_aver = numpy.hstack((o_mer_aver, mer_aver[ht, im]))
+ o_ver_aver = numpy.hstack((o_ver_aver, ver_aver[ht, im]))
data_fHeigths_List.append(o_height)
data_fZonal_List.append(o_zon_aver)
@@ -382,12 +382,12 @@ class OutliersFilter(Operation):
else:
- h_select = numpy.where(numpy.bitwise_and(height[0,:] <= hcm,numpy.isfinite(height[0,:])))
+ h_select = numpy.where(numpy.bitwise_and(height[0, :] <= hcm, numpy.isfinite(height[0, :])))
ht = h_select[0]
- o_height = numpy.hstack((o_height,height[im,ht]))
- o_zon_aver = numpy.hstack((o_zon_aver,zon_aver[ht,im]))
- o_mer_aver = numpy.hstack((o_mer_aver,mer_aver[ht,im]))
- o_ver_aver = numpy.hstack((o_ver_aver,ver_aver[ht,im]))
+ o_height = numpy.hstack((o_height, height[im, ht]))
+ o_zon_aver = numpy.hstack((o_zon_aver, zon_aver[ht, im]))
+ o_mer_aver = numpy.hstack((o_mer_aver, mer_aver[ht, im]))
+ o_ver_aver = numpy.hstack((o_ver_aver, ver_aver[ht, im]))
data_fHeigths_List.append(o_height)
data_fZonal_List.append(o_zon_aver)
diff --git a/schainpy/model/proc/jroproc_amisr.py b/schainpy/model/proc/jroproc_amisr.py
index 93f3dd8..ac1c0b9 100644
--- a/schainpy/model/proc/jroproc_amisr.py
+++ b/schainpy/model/proc/jroproc_amisr.py
@@ -24,14 +24,14 @@ class PrintInfoAMISR(Operation):
def run(self, dataOut):
if not self.__isPrinted:
- print('Number of Records by File: %d'%dataOut.nRecords)
- print('Number of Pulses: %d'%dataOut.nProfiles)
- print('Number of Pulses by Frame: %d'%dataOut.npulseByFrame)
- print('Number of Samples by Pulse: %d'%len(dataOut.heightList))
- print('Ipp Seconds: %f'%dataOut.ippSeconds)
- print('Number of Beams: %d'%dataOut.nBeams)
+ print('Number of Records by File: %d' % dataOut.nRecords)
+ print('Number of Pulses: %d' % dataOut.nProfiles)
+ print('Number of Pulses by Frame: %d' % dataOut.npulseByFrame)
+ print('Number of Samples by Pulse: %d' % len(dataOut.heightList))
+ print('Ipp Seconds: %f' % dataOut.ippSeconds)
+ print('Number of Beams: %d' % dataOut.nBeams)
print('BeamCodes:')
- beamStrList = ['Beam %d -> Code=%d, azimuth=%2.2f, zenith=%2.2f, gain=%2.2f'%(k,v[0],v[1],v[2],v[3]) for k,v in list(dataOut.beamCodeDict.items())]
+ beamStrList = ['Beam %d -> Code=%d, azimuth=%2.2f, zenith=%2.2f, gain=%2.2f' % (k, v[0], v[1], v[2], v[3]) for k, v in list(dataOut.beamCodeDict.items())]
for b in beamStrList:
print(b)
self.__isPrinted = True
@@ -119,7 +119,7 @@ class ProfileToChannels(Operation):
if not(self.__isConfig):
nchannels = len(list(dataOut.beamRangeDict.keys()))
nsamples = dataOut.nHeights
- self.buffer = numpy.zeros((nchannels, nsamples), dtype = 'complex128')
+ self.buffer = numpy.zeros((nchannels, nsamples), dtype='complex128')
dataOut.beam.codeList = [dataOut.beamCodeDict[x][0] for x in range(nchannels)]
dataOut.beam.azimuthList = [dataOut.beamCodeDict[x][1] for x in range(nchannels)]
dataOut.beam.zenithList = [dataOut.beamCodeDict[x][2] for x in range(nchannels)]
@@ -127,7 +127,7 @@ class ProfileToChannels(Operation):
for i in range(self.buffer.shape[0]):
if dataOut.profileIndex in dataOut.beamRangeDict[i]:
- self.buffer[i,:] = dataOut.data
+ self.buffer[i, :] = dataOut.data
break
@@ -140,4 +140,4 @@ class ProfileToChannels(Operation):
self.__isConfig = False
dataOut.flagNoData = False
pass
-
\ No newline at end of file
+
diff --git a/schainpy/model/proc/jroproc_base.py b/schainpy/model/proc/jroproc_base.py
index b359d8a..126ee81 100644
--- a/schainpy/model/proc/jroproc_base.py
+++ b/schainpy/model/proc/jroproc_base.py
@@ -198,6 +198,6 @@ def MPDecorator(BaseClass):
def close(self):
BaseClass.close(self)
- log.success('Done...(Time:{:4.2f} secs)'.format(time.time()-self.start_time), self.name)
+ log.success('Done...(Time:{:4.2f} secs)'.format(time.time() - self.start_time), self.name)
return MPClass
diff --git a/schainpy/model/proc/jroproc_correlation.py b/schainpy/model/proc/jroproc_correlation.py
index 76a92fe..51712e0 100644
--- a/schainpy/model/proc/jroproc_correlation.py
+++ b/schainpy/model/proc/jroproc_correlation.py
@@ -30,7 +30,7 @@ class CorrelationProc(ProcessingUnit):
self.dataOut.systemHeaderObj = self.dataIn.systemHeaderObj.copy()
self.dataOut.channelList = self.dataIn.channelList
self.dataOut.heightList = self.dataIn.heightList
- self.dataOut.dtype = numpy.dtype([('real','= 0:
if mode == 'time':
- ccf0 = data_pre[ch0,:nProfiles-idx,:]*numpy.conj(data_pre[ch1,idx:,:]) #time
+ ccf0 = data_pre[ch0, :nProfiles - idx, :] * numpy.conj(data_pre[ch1, idx:, :]) # time
else:
- ccf0 = data_pre[ch0,:,nHeights-idx]*numpy.conj(data_pre[ch1,:,idx:]) #heights
+ ccf0 = data_pre[ch0, :, nHeights - idx] * numpy.conj(data_pre[ch1, :, idx:]) # heights
else:
if mode == 'time':
- ccf0 = data_pre[ch0,-idx:,:]*numpy.conj(data_pre[ch1,:nProfiles+idx,:]) #time
+ ccf0 = data_pre[ch0, -idx:, :] * numpy.conj(data_pre[ch1, :nProfiles + idx, :]) # time
else:
- ccf0 = data_pre[ch0,:,-idx:]*numpy.conj(data_pre[ch1,:,:nHeights+idx]) #heights
+ ccf0 = data_pre[ch0, :, -idx:] * numpy.conj(data_pre[ch1, :, :nHeights + idx]) # heights
if fullBuffer:
- tmp[l,i,:ccf0.shape[0],:] = ccf0
+ tmp[l, i, :ccf0.shape[0], :] = ccf0
else:
- tmp[l,i,:] = numpy.sum(ccf0, axis=0)
+ tmp[l, i, :] = numpy.sum(ccf0, axis=0)
#-----------------------------------------------------------------
if fullBuffer:
- tmp = numpy.sum(numpy.reshape(tmp,(tmp.shape[0],tmp.shape[1],tmp.shape[2]/nAvg,nAvg,tmp.shape[3])),axis=3)
+ tmp = numpy.sum(numpy.reshape(tmp, (tmp.shape[0], tmp.shape[1], tmp.shape[2] / nAvg, nAvg, tmp.shape[3])), axis=3)
self.dataOut.nAvg = nAvg
self.dataOut.data_cf = tmp
@@ -166,12 +166,12 @@ class CorrelationProc(ProcessingUnit):
self.dataOut.pairsList = pairsList
self.dataOut.nPairs = len(pairsList)
- #Se Calcula los factores de Normalizacion
+ # Se Calcula los factores de Normalizacion
if mode == 'time':
- delta = self.dataIn.ippSeconds*self.dataIn.nCohInt
+ delta = self.dataIn.ippSeconds * self.dataIn.nCohInt
else:
delta = self.dataIn.heightList[1] - self.dataIn.heightList[0]
- self.dataOut.lagRange = numpy.array(lags)*delta
+ self.dataOut.lagRange = numpy.array(lags) * delta
# self.dataOut.nCohInt = self.dataIn.nCohInt*nAvg
self.dataOut.flagNoData = False
# a = self.dataOut.normFactor
diff --git a/schainpy/model/proc/jroproc_heispectra.py b/schainpy/model/proc/jroproc_heispectra.py
index be414c1..bbb7a47 100644
--- a/schainpy/model/proc/jroproc_heispectra.py
+++ b/schainpy/model/proc/jroproc_heispectra.py
@@ -8,9 +8,9 @@ from schainpy.utils import log
class SpectraHeisProc(ProcessingUnit):
- def __init__(self):#, **kwargs):
+ def __init__(self): # , **kwargs):
- ProcessingUnit.__init__(self)#, **kwargs)
+ ProcessingUnit.__init__(self) # , **kwargs)
# self.buffer = None
# self.firstdatatime = None
@@ -24,12 +24,12 @@ class SpectraHeisProc(ProcessingUnit):
self.dataOut.errorCount = self.dataIn.errorCount
self.dataOut.useLocalTime = self.dataIn.useLocalTime
- self.dataOut.radarControllerHeaderObj = self.dataIn.radarControllerHeaderObj.copy()#
- self.dataOut.systemHeaderObj = self.dataIn.systemHeaderObj.copy()#
+ self.dataOut.radarControllerHeaderObj = self.dataIn.radarControllerHeaderObj.copy() #
+ self.dataOut.systemHeaderObj = self.dataIn.systemHeaderObj.copy() #
self.dataOut.channelList = self.dataIn.channelList
self.dataOut.heightList = self.dataIn.heightList
# self.dataOut.dtype = self.dataIn.dtype
- self.dataOut.dtype = numpy.dtype([('real',' change this line
+ self.__integrationtime = timeInterval # * 60. #if (type(timeInterval)!=integer) -> change this line
self.n = 9999
self.__byTime = True
@@ -219,25 +219,25 @@ class IncohInt4SpectraHeis(Operation):
self.__profIndex += 1
return
- #Overlapping data
+ # Overlapping data
nChannels, nHeis = data.shape
data = numpy.reshape(data, (1, nChannels, nHeis))
- #If the buffer is empty then it takes the data value
+ # If the buffer is empty then it takes the data value
if self.__buffer is None:
self.__buffer = data
self.__profIndex += 1
return
- #If the buffer length is lower than n then stakcing the data value
+ # If the buffer length is lower than n then stakcing the data value
if self.__profIndex < self.n:
self.__buffer = numpy.vstack((self.__buffer, data))
self.__profIndex += 1
return
- #If the buffer length is equal to n then replacing the last buffer value with the data value
+ # If the buffer length is equal to n then replacing the last buffer value with the data value
self.__buffer = numpy.roll(self.__buffer, -1, axis=0)
- self.__buffer[self.n-1] = data
+ self.__buffer[self.n - 1] = data
self.__profIndex = self.n
return
@@ -261,7 +261,7 @@ class IncohInt4SpectraHeis(Operation):
return data, n
- #Integration with Overlapping
+ # Integration with Overlapping
data = numpy.sum(self.__buffer, axis=0)
n = self.__profIndex
@@ -315,7 +315,7 @@ class IncohInt4SpectraHeis(Operation):
avgdatatime = self.__initime
- deltatime = datatime -self.__lastdatatime
+ deltatime = datatime - self.__lastdatatime
if not self.__withOverapping:
self.__initime = datatime
@@ -344,4 +344,4 @@ class IncohInt4SpectraHeis(Operation):
# dataOut.timeInterval = self.__timeInterval*self.n
dataOut.flagNoData = False
- return dataOut
\ No newline at end of file
+ return dataOut
diff --git a/schainpy/model/proc/jroproc_parameters.py b/schainpy/model/proc/jroproc_parameters.py
index 62904ec..2946935 100755
--- a/schainpy/model/proc/jroproc_parameters.py
+++ b/schainpy/model/proc/jroproc_parameters.py
@@ -1,6 +1,7 @@
import numpy
import math
from scipy import optimize, interpolate, signal, stats, ndimage
+from scipy.stats import norm
import scipy
import re
import datetime
@@ -8,25 +9,25 @@ import copy
import sys
import importlib
import itertools
-from multiprocessing import Pool, TimeoutError
+from multiprocessing import Pool, TimeoutError
from multiprocessing.pool import ThreadPool
import time
from scipy.optimize import fmin_l_bfgs_b #optimize with bounds on state papameters
-from .jroproc_base import ProcessingUnit, Operation, MPDecorator
+from .jroproc_base import ProcessingUnit, Operation #, MPDecorator
from schainpy.model.data.jrodata import Parameters, hildebrand_sekhon
from scipy import asarray as ar,exp
from scipy.optimize import curve_fit
-from schainpy.utils import log
+#from schainpy.utils import log
import warnings
from numpy import NaN
from scipy.optimize.optimize import OptimizeWarning
warnings.filterwarnings('ignore')
-import matplotlib.pyplot as plt
SPEED_OF_LIGHT = 299792458
+
'''solving pickling issue'''
def _pickle_method(method):
@@ -45,15 +46,15 @@ def _unpickle_method(func_name, obj, cls):
break
return func.__get__(obj, cls)
-
+#@MPDecorator
class ParametersProc(ProcessingUnit):
-
+
METHODS = {}
nSeconds = None
def __init__(self):
ProcessingUnit.__init__(self)
-
+
# self.objectDict = {}
self.buffer = None
self.firstdatatime = None
@@ -62,73 +63,59 @@ class ParametersProc(ProcessingUnit):
self.setupReq = False #Agregar a todas las unidades de proc
def __updateObjFromInput(self):
-
+
self.dataOut.inputUnit = self.dataIn.type
-
+
self.dataOut.timeZone = self.dataIn.timeZone
self.dataOut.dstFlag = self.dataIn.dstFlag
self.dataOut.errorCount = self.dataIn.errorCount
self.dataOut.useLocalTime = self.dataIn.useLocalTime
-
+
self.dataOut.radarControllerHeaderObj = self.dataIn.radarControllerHeaderObj.copy()
self.dataOut.systemHeaderObj = self.dataIn.systemHeaderObj.copy()
self.dataOut.channelList = self.dataIn.channelList
self.dataOut.heightList = self.dataIn.heightList
self.dataOut.dtype = numpy.dtype([('real','0))
- j2index = numpy.squeeze(numpy.where(numpy.diff(junk)<0))
- if ((numpy.size(j1index)<=1) | (numpy.size(j2index)<=1)) :
- continue
- junk3 = numpy.squeeze(numpy.diff(j1index))
- junk4 = numpy.squeeze(numpy.diff(j2index))
-
- valleyindex = j2index[numpy.where(junk4>1)]
- peakindex = j1index[numpy.where(junk3>1)]
-
- isvalid = numpy.squeeze(numpy.where(numpy.abs(VelRange[gc_values[peakindex]]) <= 2.5*dv))
- if numpy.size(isvalid) == 0 :
- continue
- if numpy.size(isvalid) >1 :
- vindex = numpy.argmax(self.spc[ich,gc_values[peakindex[isvalid]],ir])
- isvalid = isvalid[vindex]
-
- # clutter peak
- gcpeak = peakindex[isvalid]
- vl = numpy.where(valleyindex < gcpeak)
- if numpy.size(vl) == 0:
- continue
- gcvl = valleyindex[vl[0][-1]]
- vr = numpy.where(valleyindex > gcpeak)
- if numpy.size(vr) == 0:
- continue
- gcvr = valleyindex[vr[0][0]]
-
- # Removing the clutter
- interpindex = numpy.array([gc_values[gcvl], gc_values[gcvr]])
- gcindex = gc_values[gcvl+1:gcvr-1]
- self.spc_out[ich,gcindex,ir] = numpy.interp(VelRange[gcindex],VelRange[interpindex],self.spc[ich,interpindex,ir])
-
- dataOut.data_pre[0] = self.spc_out
- #print ('Leaving RemoveWideGC ... ')
- return dataOut
-
+
class SpectralFilters(Operation):
- ''' This class allows to replace the novalid values with noise for each channel
- This applies to CLAIRE RADAR
-
- PositiveLimit : RightLimit of novalid data
- NegativeLimit : LeftLimit of novalid data
-
+
+ '''This class allows the Rainfall / Wind Selection for CLAIRE RADAR
+
+ LimitR : It is the limit in m/s of Rainfall
+ LimitW : It is the limit in m/s for Winds
+
Input:
-
+
self.dataOut.data_pre : SPC and CSPC
self.dataOut.spc_range : To select wind and rainfall velocities
-
+
Affected:
-
+
self.dataOut.data_pre : It is used for the new SPC and CSPC ranges of wind
-
- Written by D. Scipión 29.01.2021
+ self.dataOut.spcparam_range : Used in SpcParamPlot
+ self.dataOut.SPCparam : Used in PrecipitationProc
+
+
'''
+
def __init__(self):
Operation.__init__(self)
- self.i = 0
-
- def run(self, dataOut, ):
-
+ self.i=0
+
+ def run(self, dataOut, PositiveLimit=1.5, NegativeLimit=2.5):
+
+
+ #Limite de vientos
+ LimitR = PositiveLimit
+ LimitN = NegativeLimit
+
self.spc = dataOut.data_pre[0].copy()
+ self.cspc = dataOut.data_pre[1].copy()
+
+ self.Num_Hei = self.spc.shape[2]
+ self.Num_Bin = self.spc.shape[1]
self.Num_Chn = self.spc.shape[0]
+
VelRange = dataOut.spc_range[2]
-
- # novalid corresponds to data within the Negative and PositiveLimit
+ TimeRange = dataOut.spc_range[1]
+ FrecRange = dataOut.spc_range[0]
+
+ Vmax= 2*numpy.max(dataOut.spc_range[2])
+ Tmax= 2*numpy.max(dataOut.spc_range[1])
+ Fmax= 2*numpy.max(dataOut.spc_range[0])
+ Breaker1R=VelRange[numpy.abs(VelRange-(-LimitN)).argmin()]
+ Breaker1R=numpy.where(VelRange == Breaker1R)
+
+ Delta = self.Num_Bin/2 - Breaker1R[0]
+
+
+ '''Reacomodando SPCrange'''
- # Removing novalid data from the spectra
+ VelRange=numpy.roll(VelRange,-(int(self.Num_Bin/2)) ,axis=0)
+
+ VelRange[-(int(self.Num_Bin/2)):]+= Vmax
+
+ FrecRange=numpy.roll(FrecRange,-(int(self.Num_Bin/2)),axis=0)
+
+ FrecRange[-(int(self.Num_Bin/2)):]+= Fmax
+
+ TimeRange=numpy.roll(TimeRange,-(int(self.Num_Bin/2)),axis=0)
+
+ TimeRange[-(int(self.Num_Bin/2)):]+= Tmax
+
+ ''' ------------------ '''
+
+ Breaker2R=VelRange[numpy.abs(VelRange-(LimitR)).argmin()]
+ Breaker2R=numpy.where(VelRange == Breaker2R)
+
+
+ SPCroll = numpy.roll(self.spc,-(int(self.Num_Bin/2)) ,axis=1)
+
+ SPCcut = SPCroll.copy()
for i in range(self.Num_Chn):
- self.spc[i,novalid,:] = dataOut.noise[i]
- dataOut.data_pre[0] = self.spc
+
+ SPCcut[i,0:int(Breaker2R[0]),:] = dataOut.noise[i]
+ SPCcut[i,-int(Delta):,:] = dataOut.noise[i]
+
+ SPCcut[i]=SPCcut[i]- dataOut.noise[i]
+ SPCcut[ numpy.where( SPCcut<0 ) ] = 1e-20
+
+ SPCroll[i]=SPCroll[i]-dataOut.noise[i]
+ SPCroll[ numpy.where( SPCroll<0 ) ] = 1e-20
+
+ SPC_ch1 = SPCroll
+
+ SPC_ch2 = SPCcut
+
+ SPCparam = (SPC_ch1, SPC_ch2, self.spc)
+ dataOut.SPCparam = numpy.asarray(SPCparam)
+
+
+ dataOut.spcparam_range=numpy.zeros([self.Num_Chn,self.Num_Bin+1])
+
+ dataOut.spcparam_range[2]=VelRange
+ dataOut.spcparam_range[1]=TimeRange
+ dataOut.spcparam_range[0]=FrecRange
return dataOut
-
+
class GaussianFit(Operation):
-
+
'''
- Function that fit of one and two generalized gaussians (gg) based
- on the PSD shape across an "power band" identified from a cumsum of
+ Function that fit of one and two generalized gaussians (gg) based
+ on the PSD shape across an "power band" identified from a cumsum of
the measured spectrum - noise.
-
+
Input:
self.dataOut.data_pre : SelfSpectra
-
+
Output:
self.dataOut.SPCparam : SPC_ch1, SPC_ch2
-
+
'''
def __init__(self):
Operation.__init__(self)
self.i=0
-
-
- # def run(self, dataOut, num_intg=7, pnoise=1., SNRlimit=-9): #num_intg: Incoherent integrations, pnoise: Noise, vel_arr: range of velocities, similar to the ftt points
- def run(self, dataOut, SNRdBlimit=-9, method='generalized'):
+
+
+ def run(self, dataOut, num_intg=7, pnoise=1., SNRlimit=-9): #num_intg: Incoherent integrations, pnoise: Noise, vel_arr: range of velocities, similar to the ftt points
"""This routine will find a couple of generalized Gaussians to a power spectrum
- methods: generalized, squared
input: spc
output:
- noise, amplitude0,shift0,width0,p0,Amplitude1,shift1,width1,p1
+ Amplitude0,shift0,width0,p0,Amplitude1,shift1,width1,p1,noise
"""
- print ('Entering ',method,' double Gaussian fit')
+
self.spc = dataOut.data_pre[0].copy()
self.Num_Hei = self.spc.shape[2]
self.Num_Bin = self.spc.shape[1]
self.Num_Chn = self.spc.shape[0]
+ Vrange = dataOut.abscissaList
+
+ GauSPC = numpy.empty([self.Num_Chn,self.Num_Bin,self.Num_Hei])
+ SPC_ch1 = numpy.empty([self.Num_Bin,self.Num_Hei])
+ SPC_ch2 = numpy.empty([self.Num_Bin,self.Num_Hei])
+ SPC_ch1[:] = numpy.NaN
+ SPC_ch2[:] = numpy.NaN
+
start_time = time.time()
-
- pool = Pool(processes=self.Num_Chn)
- args = [(dataOut.spc_range[2], ich, dataOut.spc_noise[ich], dataOut.nIncohInt, SNRdBlimit) for ich in range(self.Num_Chn)]
- objs = [self for __ in range(self.Num_Chn)]
- attrs = list(zip(objs, args))
- DGauFitParam = pool.map(target, attrs)
- # Parameters:
- # 0. Noise, 1. Amplitude, 2. Shift, 3. Width 4. Power
- dataOut.DGauFitParams = numpy.asarray(DGauFitParam)
-
- # Double Gaussian Curves
- gau0 = numpy.zeros([self.Num_Chn,self.Num_Bin,self.Num_Hei])
- gau0[:] = numpy.NaN
- gau1 = numpy.zeros([self.Num_Chn,self.Num_Bin,self.Num_Hei])
- gau1[:] = numpy.NaN
- x_mtr = numpy.transpose(numpy.tile(dataOut.getVelRange(1)[:-1], (self.Num_Hei,1)))
- for iCh in range(self.Num_Chn):
- N0 = numpy.transpose(numpy.transpose([dataOut.DGauFitParams[iCh][0,:,0]] * self.Num_Bin))
- N1 = numpy.transpose(numpy.transpose([dataOut.DGauFitParams[iCh][0,:,1]] * self.Num_Bin))
- A0 = numpy.transpose(numpy.transpose([dataOut.DGauFitParams[iCh][1,:,0]] * self.Num_Bin))
- A1 = numpy.transpose(numpy.transpose([dataOut.DGauFitParams[iCh][1,:,1]] * self.Num_Bin))
- v0 = numpy.transpose(numpy.transpose([dataOut.DGauFitParams[iCh][2,:,0]] * self.Num_Bin))
- v1 = numpy.transpose(numpy.transpose([dataOut.DGauFitParams[iCh][2,:,1]] * self.Num_Bin))
- s0 = numpy.transpose(numpy.transpose([dataOut.DGauFitParams[iCh][3,:,0]] * self.Num_Bin))
- s1 = numpy.transpose(numpy.transpose([dataOut.DGauFitParams[iCh][3,:,1]] * self.Num_Bin))
- if method == 'genealized':
- p0 = numpy.transpose(numpy.transpose([dataOut.DGauFitParams[iCh][4,:,0]] * self.Num_Bin))
- p1 = numpy.transpose(numpy.transpose([dataOut.DGauFitParams[iCh][4,:,1]] * self.Num_Bin))
- elif method == 'squared':
- p0 = 2.
- p1 = 2.
- gau0[iCh] = A0*numpy.exp(-0.5*numpy.abs((x_mtr-v0)/s0)**p0)+N0
- gau1[iCh] = A1*numpy.exp(-0.5*numpy.abs((x_mtr-v1)/s1)**p1)+N1
- dataOut.GaussFit0 = gau0
- dataOut.GaussFit1 = gau1
-
- print('Leaving ',method ,' double Gaussian fit')
- return dataOut
-
+
+ noise_ = dataOut.spc_noise[0].copy()
+
+
+ pool = Pool(processes=self.Num_Chn)
+ args = [(Vrange, Ch, pnoise, noise_, num_intg, SNRlimit) for Ch in range(self.Num_Chn)]
+ objs = [self for __ in range(self.Num_Chn)]
+ attrs = list(zip(objs, args))
+ gauSPC = pool.map(target, attrs)
+ dataOut.SPCparam = numpy.asarray(SPCparam)
+
+ ''' Parameters:
+ 1. Amplitude
+ 2. Shift
+ 3. Width
+ 4. Power
+ '''
+
def FitGau(self, X):
- # print('Entering FitGau')
- # Assigning the variables
- Vrange, ch, wnoise, num_intg, SNRlimit = X
- # Noise Limits
- noisebl = wnoise * 0.9
- noisebh = wnoise * 1.1
- # Radar Velocity
- Va = max(Vrange)
- deltav = Vrange[1] - Vrange[0]
- x = numpy.arange(self.Num_Bin)
-
- # print ('stop 0')
-
- # 5 parameters, 2 Gaussians
- DGauFitParam = numpy.zeros([5, self.Num_Hei,2])
- DGauFitParam[:] = numpy.NaN
-
- # SPCparam = []
- # SPC_ch1 = numpy.zeros([self.Num_Bin,self.Num_Hei])
- # SPC_ch2 = numpy.zeros([self.Num_Bin,self.Num_Hei])
- # SPC_ch1[:] = 0 #numpy.NaN
- # SPC_ch2[:] = 0 #numpy.NaN
- # print ('stop 1')
+
+ Vrange, ch, pnoise, noise_, num_intg, SNRlimit = X
+
+ SPCparam = []
+ SPC_ch1 = numpy.empty([self.Num_Bin,self.Num_Hei])
+ SPC_ch2 = numpy.empty([self.Num_Bin,self.Num_Hei])
+ SPC_ch1[:] = 0#numpy.NaN
+ SPC_ch2[:] = 0#numpy.NaN
+
+
+
for ht in range(self.Num_Hei):
- # print (ht)
- # print ('stop 2')
- # Spectra at each range
+
+
spc = numpy.asarray(self.spc)[ch,:,ht]
- snr = ( spc.mean() - wnoise ) / wnoise
- snrdB = 10.*numpy.log10(snr)
-
- #print ('stop 3')
- if snrdB < SNRlimit :
- # snr = numpy.NaN
- # SPC_ch1[:,ht] = 0#numpy.NaN
- # SPC_ch1[:,ht] = 0#numpy.NaN
- # SPCparam = (SPC_ch1,SPC_ch2)
- # print ('SNR less than SNRth')
- continue
- # wnoise = hildebrand_sekhon(spc,num_intg)
- # print ('stop 2.01')
+
#############################################
# normalizing spc and noise
# This part differs from gg1
- # spc_norm_max = max(spc) #commented by D. Scipión 19.03.2021
+ spc_norm_max = max(spc)
#spc = spc / spc_norm_max
- # pnoise = pnoise #/ spc_norm_max #commented by D. Scipión 19.03.2021
+ pnoise = pnoise #/ spc_norm_max
#############################################
-
- # print ('stop 2.1')
+
fatspectra=1.0
- # noise per channel.... we might want to use the noise at each range
- # wnoise = noise_ #/ spc_norm_max #commented by D. Scipión 19.03.2021
+ wnoise = noise_ #/ spc_norm_max
#wnoise,stdv,i_max,index =enoise(spc,num_intg) #noise estimate using Hildebrand Sekhon, only wnoise is used
- #if wnoise>1.1*pnoise: # to be tested later
+ #if wnoise>1.1*pnoise: # to be tested later
# wnoise=pnoise
- # noisebl = wnoise*0.9
- # noisebh = wnoise*1.1
- spc = spc - wnoise # signal
-
- # print ('stop 2.2')
- minx = numpy.argmin(spc)
- #spcs=spc.copy()
- spcs = numpy.roll(spc,-minx)
- cum = numpy.cumsum(spcs)
- # tot_noise = wnoise * self.Num_Bin #64;
-
- # print ('stop 2.3')
- # snr = sum(spcs) / tot_noise
- # snrdB = 10.*numpy.log10(snr)
- #print ('stop 3')
- # if snrdB < SNRlimit :
- # snr = numpy.NaN
- # SPC_ch1[:,ht] = 0#numpy.NaN
- # SPC_ch1[:,ht] = 0#numpy.NaN
- # SPCparam = (SPC_ch1,SPC_ch2)
- # print ('SNR less than SNRth')
- # continue
-
-
+ noisebl=wnoise*0.9;
+ noisebh=wnoise*1.1
+ spc=spc-wnoise
+
+ minx=numpy.argmin(spc)
+ #spcs=spc.copy()
+ spcs=numpy.roll(spc,-minx)
+ cum=numpy.cumsum(spcs)
+ tot_noise=wnoise * self.Num_Bin #64;
+
+ snr = sum(spcs)/tot_noise
+ snrdB=10.*numpy.log10(snr)
+
+ if snrdB < SNRlimit :
+ snr = numpy.NaN
+ SPC_ch1[:,ht] = 0#numpy.NaN
+ SPC_ch1[:,ht] = 0#numpy.NaN
+ SPCparam = (SPC_ch1,SPC_ch2)
+ continue
+
+
#if snrdB<-18 or numpy.isnan(snrdB) or num_intg<4:
# return [None,]*4,[None,]*4,None,snrdB,None,None,[None,]*5,[None,]*9,None
- # print ('stop 4')
- cummax = max(cum)
- epsi = 0.08 * fatspectra # cumsum to narrow down the energy region
- cumlo = cummax * epsi
- cumhi = cummax * (1-epsi)
- powerindex = numpy.array(numpy.where(numpy.logical_and(cum>cumlo, cumcumlo, cum-12: # when SNR is strong pick the peak with least shift (LOS velocity) error
if oneG:
- choice = 0
+ choice=0
else:
- w1 = lsq2[0][1]; w2 = lsq2[0][5]
- a1 = lsq2[0][2]; a2 = lsq2[0][6]
- p1 = lsq2[0][3]; p2 = lsq2[0][7]
- s1 = (2**(1+1./p1))*scipy.special.gamma(1./p1)/p1
- s2 = (2**(1+1./p2))*scipy.special.gamma(1./p2)/p2
- gp1 = a1*w1*s1; gp2 = a2*w2*s2 # power content of each ggaussian with proper p scaling
-
+ w1=lsq2[0][1]; w2=lsq2[0][5]
+ a1=lsq2[0][2]; a2=lsq2[0][6]
+ p1=lsq2[0][3]; p2=lsq2[0][7]
+ s1=(2**(1+1./p1))*scipy.special.gamma(1./p1)/p1;
+ s2=(2**(1+1./p2))*scipy.special.gamma(1./p2)/p2;
+ gp1=a1*w1*s1; gp2=a2*w2*s2 # power content of each ggaussian with proper p scaling
+
if gp1>gp2:
if a1>0.7*a2:
- choice = 1
+ choice=1
else:
- choice = 2
+ choice=2
elif gp2>gp1:
if a2>0.7*a1:
- choice = 2
+ choice=2
else:
- choice = 1
+ choice=1
else:
- choice = numpy.argmax([a1,a2])+1
+ choice=numpy.argmax([a1,a2])+1
#else:
#choice=argmin([std2a,std2b])+1
-
+
else: # with low SNR go to the most energetic peak
- choice = numpy.argmax([lsq1[0][2]*lsq1[0][1],lsq2[0][2]*lsq2[0][1],lsq2[0][6]*lsq2[0][5]])
-
- # print ('stop 14')
- shift0 = lsq2[0][0]
- vel0 = Vrange[0] + shift0 * deltav
- shift1 = lsq2[0][4]
- # vel1=Vrange[0] + shift1 * deltav
-
- # max_vel = 1.0
- # Va = max(Vrange)
- # deltav = Vrange[1]-Vrange[0]
- # print ('stop 15')
+ choice=numpy.argmax([lsq1[0][2]*lsq1[0][1],lsq2[0][2]*lsq2[0][1],lsq2[0][6]*lsq2[0][5]])
+
+
+ shift0=lsq2[0][0];
+ vel0=Vrange[0] + shift0*(Vrange[1]-Vrange[0])
+ shift1=lsq2[0][4];
+ vel1=Vrange[0] + shift1*(Vrange[1]-Vrange[0])
+
+ max_vel = 1.0
+
#first peak will be 0, second peak will be 1
- # if vel0 > -1.0 and vel0 < max_vel : #first peak is in the correct range # Commented by D.Scipión 19.03.2021
- if vel0 > -Va and vel0 < Va : #first peak is in the correct range
- shift0 = lsq2[0][0]
- width0 = lsq2[0][1]
- Amplitude0 = lsq2[0][2]
- p0 = lsq2[0][3]
-
- shift1 = lsq2[0][4]
- width1 = lsq2[0][5]
- Amplitude1 = lsq2[0][6]
- p1 = lsq2[0][7]
- noise = lsq2[0][8]
+ if vel0 > -1.0 and vel0 < max_vel : #first peak is in the correct range
+ shift0=lsq2[0][0]
+ width0=lsq2[0][1]
+ Amplitude0=lsq2[0][2]
+ p0=lsq2[0][3]
+
+ shift1=lsq2[0][4]
+ width1=lsq2[0][5]
+ Amplitude1=lsq2[0][6]
+ p1=lsq2[0][7]
+ noise=lsq2[0][8]
else:
- shift1 = lsq2[0][0]
- width1 = lsq2[0][1]
- Amplitude1 = lsq2[0][2]
- p1 = lsq2[0][3]
-
- shift0 = lsq2[0][4]
- width0 = lsq2[0][5]
- Amplitude0 = lsq2[0][6]
- p0 = lsq2[0][7]
- noise = lsq2[0][8]
-
+ shift1=lsq2[0][0]
+ width1=lsq2[0][1]
+ Amplitude1=lsq2[0][2]
+ p1=lsq2[0][3]
+
+ shift0=lsq2[0][4]
+ width0=lsq2[0][5]
+ Amplitude0=lsq2[0][6]
+ p0=lsq2[0][7]
+ noise=lsq2[0][8]
+
if Amplitude0<0.05: # in case the peak is noise
- shift0,width0,Amplitude0,p0 = 4*[numpy.NaN]
+ shift0,width0,Amplitude0,p0 = [0,0,0,0]#4*[numpy.NaN]
if Amplitude1<0.05:
- shift1,width1,Amplitude1,p1 = 4*[numpy.NaN]
-
- # print ('stop 16 ')
- # SPC_ch1[:,ht] = noise + Amplitude0*numpy.exp(-0.5*(abs(x-shift0)/width0)**p0)
- # SPC_ch2[:,ht] = noise + Amplitude1*numpy.exp(-0.5*(abs(x-shift1)/width1)**p1)
- # SPCparam = (SPC_ch1,SPC_ch2)
-
- DGauFitParam[0,ht,0] = noise
- DGauFitParam[0,ht,1] = noise
- DGauFitParam[1,ht,0] = Amplitude0
- DGauFitParam[1,ht,1] = Amplitude1
- DGauFitParam[2,ht,0] = Vrange[0] + shift0 * deltav
- DGauFitParam[2,ht,1] = Vrange[0] + shift1 * deltav
- DGauFitParam[3,ht,0] = width0 * deltav
- DGauFitParam[3,ht,1] = width1 * deltav
- DGauFitParam[4,ht,0] = p0
- DGauFitParam[4,ht,1] = p1
-
- # print (DGauFitParam.shape)
- # print ('Leaving FitGau')
- return DGauFitParam
- # return SPCparam
- # return GauSPC
-
+ shift1,width1,Amplitude1,p1 = [0,0,0,0]#4*[numpy.NaN]
+
+
+ SPC_ch1[:,ht] = noise + Amplitude0*numpy.exp(-0.5*(abs(x-shift0))/width0)**p0
+ SPC_ch2[:,ht] = noise + Amplitude1*numpy.exp(-0.5*(abs(x-shift1))/width1)**p1
+ SPCparam = (SPC_ch1,SPC_ch2)
+
+
+ return GauSPC
+
def y_model1(self,x,state):
- shift0, width0, amplitude0, power0, noise = state
- model0 = amplitude0*numpy.exp(-0.5*abs((x - shift0)/width0)**power0)
- model0u = amplitude0*numpy.exp(-0.5*abs((x - shift0 - self.Num_Bin)/width0)**power0)
- model0d = amplitude0*numpy.exp(-0.5*abs((x - shift0 + self.Num_Bin)/width0)**power0)
- return model0 + model0u + model0d + noise
-
- def y_model2(self,x,state): #Equation for two generalized Gaussians with Nyquist
- shift0, width0, amplitude0, power0, shift1, width1, amplitude1, power1, noise = state
- model0 = amplitude0*numpy.exp(-0.5*abs((x-shift0)/width0)**power0)
- model0u = amplitude0*numpy.exp(-0.5*abs((x - shift0 - self.Num_Bin)/width0)**power0)
- model0d = amplitude0*numpy.exp(-0.5*abs((x - shift0 + self.Num_Bin)/width0)**power0)
+ shift0,width0,amplitude0,power0,noise=state
+ model0=amplitude0*numpy.exp(-0.5*abs((x-shift0)/width0)**power0)
- model1 = amplitude1*numpy.exp(-0.5*abs((x - shift1)/width1)**power1)
- model1u = amplitude1*numpy.exp(-0.5*abs((x - shift1 - self.Num_Bin)/width1)**power1)
- model1d = amplitude1*numpy.exp(-0.5*abs((x - shift1 + self.Num_Bin)/width1)**power1)
- return model0 + model0u + model0d + model1 + model1u + model1d + noise
-
- def misfit1(self,state,y_data,x,num_intg): # This function compares how close real data is with the model data, the close it is, the better it is.
+ model0u=amplitude0*numpy.exp(-0.5*abs((x-shift0- self.Num_Bin )/width0)**power0)
+
+ model0d=amplitude0*numpy.exp(-0.5*abs((x-shift0+ self.Num_Bin )/width0)**power0)
+ return model0+model0u+model0d+noise
+
+ def y_model2(self,x,state): #Equation for two generalized Gaussians with Nyquist
+ shift0,width0,amplitude0,power0,shift1,width1,amplitude1,power1,noise=state
+ model0=amplitude0*numpy.exp(-0.5*abs((x-shift0)/width0)**power0)
+
+ model0u=amplitude0*numpy.exp(-0.5*abs((x-shift0- self.Num_Bin )/width0)**power0)
+
+ model0d=amplitude0*numpy.exp(-0.5*abs((x-shift0+ self.Num_Bin )/width0)**power0)
+ model1=amplitude1*numpy.exp(-0.5*abs((x-shift1)/width1)**power1)
+
+ model1u=amplitude1*numpy.exp(-0.5*abs((x-shift1- self.Num_Bin )/width1)**power1)
+
+ model1d=amplitude1*numpy.exp(-0.5*abs((x-shift1+ self.Num_Bin )/width1)**power1)
+ return model0+model0u+model0d+model1+model1u+model1d+noise
+
+ def misfit1(self,state,y_data,x,num_intg): # This function compares how close real data is with the model data, the close it is, the better it is.
return num_intg*sum((numpy.log(y_data)-numpy.log(self.y_model1(x,state)))**2)#/(64-5.) # /(64-5.) can be commented
-
+
def misfit2(self,state,y_data,x,num_intg):
return num_intg*sum((numpy.log(y_data)-numpy.log(self.y_model2(x,state)))**2)#/(64-9.)
-
-
+
+
class PrecipitationProc(Operation):
-
+
'''
Operator that estimates Reflectivity factor (Z), and estimates rainfall Rate (R)
-
- Input:
+
+ Input:
self.dataOut.data_pre : SelfSpectra
-
- Output:
-
- self.dataOut.data_output : Reflectivity factor, rainfall Rate
-
-
- Parameters affected:
+
+ Output:
+
+ self.dataOut.data_output : Reflectivity factor, rainfall Rate
+
+
+ Parameters affected:
'''
-
+
def __init__(self):
Operation.__init__(self)
self.i=0
-
- def run(self, dataOut, radar=None, Pt=5000, Gt=295.1209, Gr=70.7945, Lambda=0.6741, aL=2.5118,
- tauW=4e-06, ThetaT=0.1656317, ThetaR=0.36774087, Km2 = 0.93, Altitude=3350,SNRdBlimit=-30):
-
- # print ('Entering PrecepitationProc ... ')
-
- if radar == "MIRA35C" :
-
+
+
+ def gaus(self,xSamples,Amp,Mu,Sigma):
+ return ( Amp / ((2*numpy.pi)**0.5 * Sigma) ) * numpy.exp( -( xSamples - Mu )**2 / ( 2 * (Sigma**2) ))
+
+
+
+ def Moments(self, ySamples, xSamples):
+ Pot = numpy.nansum( ySamples ) # Potencia, momento 0
+ yNorm = ySamples / Pot
+
+ Vr = numpy.nansum( yNorm * xSamples ) # Velocidad radial, mu, corrimiento doppler, primer momento
+ Sigma2 = abs(numpy.nansum( yNorm * ( xSamples - Vr )**2 )) # Segundo Momento
+ Desv = Sigma2**0.5 # Desv. Estandar, Ancho espectral
+
+ return numpy.array([Pot, Vr, Desv])
+
+ def run(self, dataOut, radar=None, Pt=5000, Gt=295.1209, Gr=70.7945, Lambda=0.6741, aL=2.5118,
+ tauW=4e-06, ThetaT=0.1656317, ThetaR=0.36774087, Km = 0.93, Altitude=3350):
+
+
+ Velrange = dataOut.spcparam_range[2]
+ FrecRange = dataOut.spcparam_range[0]
+
+ dV= Velrange[1]-Velrange[0]
+ dF= FrecRange[1]-FrecRange[0]
+
+ if radar == "MIRA35C" :
+
self.spc = dataOut.data_pre[0].copy()
self.Num_Hei = self.spc.shape[2]
self.Num_Bin = self.spc.shape[1]
self.Num_Chn = self.spc.shape[0]
Ze = self.dBZeMODE2(dataOut)
-
+
else:
-
- self.spc = dataOut.data_pre[0].copy()
-
- #NOTA SE DEBE REMOVER EL RANGO DEL PULSO TX
- self.spc[:,:,0:7]= numpy.NaN
-
+
+ self.spc = dataOut.SPCparam[1].copy() #dataOut.data_pre[0].copy() #
+
+ """NOTA SE DEBE REMOVER EL RANGO DEL PULSO TX"""
+
+ self.spc[:,:,0:7]= numpy.NaN
+
+ """##########################################"""
+
self.Num_Hei = self.spc.shape[2]
self.Num_Bin = self.spc.shape[1]
self.Num_Chn = self.spc.shape[0]
-
- VelRange = dataOut.spc_range[2]
-
+
''' Se obtiene la constante del RADAR '''
-
+
self.Pt = Pt
self.Gt = Gt
self.Gr = Gr
self.Lambda = Lambda
self.aL = aL
self.tauW = tauW
- self.ThetaT = ThetaT
+ self.ThetaT = ThetaT
self.ThetaR = ThetaR
- self.GSys = 10**(36.63/10) # Ganancia de los LNA 36.63 dB
- self.lt = 10**(1.67/10) # Perdida en cables Tx 1.67 dB
- self.lr = 10**(5.73/10) # Perdida en cables Rx 5.73 dB
-
+
Numerator = ( (4*numpy.pi)**3 * aL**2 * 16 * numpy.log(2) )
Denominator = ( Pt * Gt * Gr * Lambda**2 * SPEED_OF_LIGHT * tauW * numpy.pi * ThetaT * ThetaR)
RadarConstant = 10e-26 * Numerator / Denominator #
- ExpConstant = 10**(40/10) #Constante Experimental
-
- SignalPower = numpy.zeros([self.Num_Chn,self.Num_Bin,self.Num_Hei])
- for i in range(self.Num_Chn):
- SignalPower[i,:,:] = self.spc[i,:,:] - dataOut.noise[i]
- SignalPower[numpy.where(SignalPower < 0)] = 1e-20
-
- SPCmean = numpy.mean(SignalPower, 0)
- Pr = SPCmean[:,:]/dataOut.normFactor
-
- # Declaring auxiliary variables
- Range = dataOut.heightList*1000. #Range in m
- # replicate the heightlist to obtain a matrix [Num_Bin,Num_Hei]
- rMtrx = numpy.transpose(numpy.transpose([dataOut.heightList*1000.] * self.Num_Bin))
- zMtrx = rMtrx+Altitude
- # replicate the VelRange to obtain a matrix [Num_Bin,Num_Hei]
- VelMtrx = numpy.transpose(numpy.tile(VelRange[:-1], (self.Num_Hei,1)))
-
- # height dependence to air density Foote and Du Toit (1969)
- delv_z = 1 + 3.68e-5 * zMtrx + 1.71e-9 * zMtrx**2
- VMtrx = VelMtrx / delv_z #Normalized velocity
- VMtrx[numpy.where(VMtrx> 9.6)] = numpy.NaN
- # Diameter is related to the fall speed of falling drops
- D_Vz = -1.667 * numpy.log( 0.9369 - 0.097087 * VMtrx ) # D in [mm]
- # Only valid for D>= 0.16 mm
- D_Vz[numpy.where(D_Vz < 0.16)] = numpy.NaN
-
- #Calculate Radar Reflectivity ETAn
- ETAn = (RadarConstant *ExpConstant) * Pr * rMtrx**2 #Reflectivity (ETA)
- ETAd = ETAn * 6.18 * exp( -0.6 * D_Vz ) * delv_z
- # Radar Cross Section
- sigmaD = Km2 * (D_Vz * 1e-3 )**6 * numpy.pi**5 / Lambda**4
- # Drop Size Distribution
- DSD = ETAn / sigmaD
- # Equivalente Reflectivy
- Ze_eqn = numpy.nansum( DSD * D_Vz**6 ,axis=0)
- Ze_org = numpy.nansum(ETAn * Lambda**4, axis=0) / (1e-18*numpy.pi**5 * Km2) # [mm^6 /m^3]
- # RainFall Rate
- RR = 0.0006*numpy.pi * numpy.nansum( D_Vz**3 * DSD * VelMtrx ,0) #mm/hr
-
- # Censoring the data
- # Removing data with SNRth < 0dB se debe considerar el SNR por canal
- SNRth = 10**(SNRdBlimit/10) #-30dB
- novalid = numpy.where((dataOut.data_snr[0,:] 20:
+ popt01[1]=numpy.NaN
+
+
+ V_mean[R]=popt01[1]
+
+ Z[R] = numpy.nansum( N_dist[:,R] * (D_range[:,R])**6 )#*10**-18
+
+ RR[R] = 0.0006*numpy.pi * numpy.nansum( D_range[:,R]**3 * N_dist[:,R] * Velrange[0:self.Num_Bin] ) #Rainfall rate
+
+ Ze[R] = (numpy.nansum( ETAn[:,R]) * Lambda**4) / ( 10**-18*numpy.pi**5 * Km)
+
+
+
+ RR2 = (Z/200)**(1/1.6)
+ dBRR = 10*numpy.log10(RR)
+ dBRR2 = 10*numpy.log10(RR2)
+
+ dBZe = 10*numpy.log10(Ze)
+ dBZ = 10*numpy.log10(Z)
+
dataOut.data_output = RR[8]
dataOut.data_param = numpy.ones([3,self.Num_Hei])
dataOut.channelList = [0,1,2]
- dataOut.data_param[0]=10*numpy.log10(Ze_org)
- dataOut.data_param[1]=-W
+ dataOut.data_param[0]=dBZ
+ dataOut.data_param[1]=V_mean
dataOut.data_param[2]=RR
- # print ('Leaving PrecepitationProc ... ')
return dataOut
-
+
def dBZeMODE2(self, dataOut): # Processing for MIRA35C
-
+
NPW = dataOut.NPW
COFA = dataOut.COFA
-
+
SNR = numpy.array([self.spc[0,:,:] / NPW[0]]) #, self.spc[1,:,:] / NPW[1]])
RadarConst = dataOut.RadarConst
#frequency = 34.85*10**9
-
+
ETA = numpy.zeros(([self.Num_Chn ,self.Num_Hei]))
data_output = numpy.ones([self.Num_Chn , self.Num_Hei])*numpy.NaN
-
+
ETA = numpy.sum(SNR,1)
-
- ETA = numpy.where(ETA != 0. , ETA, numpy.NaN)
-
+
+ ETA = numpy.where(ETA is not 0. , ETA, numpy.NaN)
+
Ze = numpy.ones([self.Num_Chn, self.Num_Hei] )
-
+
for r in range(self.Num_Hei):
-
+
Ze[0,r] = ( ETA[0,r] ) * COFA[0,r][0] * RadarConst * ((r/5000.)**2)
#Ze[1,r] = ( ETA[1,r] ) * COFA[1,r][0] * RadarConst * ((r/5000.)**2)
-
+
return Ze
-
+
# def GetRadarConstant(self):
-#
-# """
+#
+# """
# Constants:
-#
+#
# Pt: Transmission Power dB 5kW 5000
# Gt: Transmission Gain dB 24.7 dB 295.1209
# Gr: Reception Gain dB 18.5 dB 70.7945
@@ -838,416 +795,438 @@ class PrecipitationProc(Operation):
# tauW: Width of transmission pulse s 4us 4e-6
# ThetaT: Transmission antenna bean angle rad 0.1656317 rad 0.1656317
# ThetaR: Reception antenna beam angle rad 0.36774087 rad 0.36774087
-#
+#
# """
-#
+#
# Numerator = ( (4*numpy.pi)**3 * aL**2 * 16 * numpy.log(2) )
# Denominator = ( Pt * Gt * Gr * Lambda**2 * SPEED_OF_LIGHT * TauW * numpy.pi * ThetaT * TheraR)
# RadarConstant = Numerator / Denominator
-#
+#
# return RadarConstant
-
-
-
-class FullSpectralAnalysis(Operation):
-
+
+
+
+class FullSpectralAnalysis(Operation):
+
"""
- Function that implements Full Spectral Analysis technique.
-
- Input:
- self.dataOut.data_pre : SelfSpectra and CrossSpectra data
+ Function that implements Full Spectral Analisys technique.
+
+ Input:
+ self.dataOut.data_pre : SelfSpectra and CrossSPectra data
self.dataOut.groupList : Pairlist of channels
self.dataOut.ChanDist : Physical distance between receivers
-
-
- Output:
-
- self.dataOut.data_output : Zonal wind, Meridional wind, and Vertical wind
-
-
+
+
+ Output:
+
+ self.dataOut.data_output : Zonal wind, Meridional wind and Vertical wind
+
+
Parameters affected: Winds, height range, SNR
-
+
"""
- def run(self, dataOut, Xi01=None, Xi02=None, Xi12=None, Eta01=None, Eta02=None, Eta12=None, SNRdBlimit=-30,
- minheight=None, maxheight=None, NegativeLimit=None, PositiveLimit=None):
-
+ def run(self, dataOut, Xi01=None, Xi02=None, Xi12=None, Eta01=None, Eta02=None, Eta12=None, SNRlimit=7):
+
+ self.indice=int(numpy.random.rand()*1000)
+
spc = dataOut.data_pre[0].copy()
cspc = dataOut.data_pre[1]
+
+ """NOTA SE DEBE REMOVER EL RANGO DEL PULSO TX"""
+
+ SNRspc = spc.copy()
+ SNRspc[:,:,0:7]= numpy.NaN
+
+ """##########################################"""
+
+
+ nChannel = spc.shape[0]
+ nProfiles = spc.shape[1]
nHeights = spc.shape[2]
-
- # first_height = 0.75 #km (ref: data header 20170822)
- # resolution_height = 0.075 #km
- '''
- finding height range. check this when radar parameters are changed!
- '''
- if maxheight is not None:
- # range_max = math.ceil((maxheight - first_height) / resolution_height) # theoretical
- range_max = math.ceil(13.26 * maxheight - 3) # empirical, works better
- else:
- range_max = nHeights
- if minheight is not None:
- # range_min = int((minheight - first_height) / resolution_height) # theoretical
- range_min = int(13.26 * minheight - 5) # empirical, works better
- if range_min < 0:
- range_min = 0
- else:
- range_min = 0
-
+
pairsList = dataOut.groupList
if dataOut.ChanDist is not None :
ChanDist = dataOut.ChanDist
else:
ChanDist = numpy.array([[Xi01, Eta01],[Xi02,Eta02],[Xi12,Eta12]])
-
- # 4 variables: zonal, meridional, vertical, and average SNR
- data_param = numpy.zeros([4,nHeights]) * numpy.NaN
- velocityX = numpy.zeros([nHeights]) * numpy.NaN
- velocityY = numpy.zeros([nHeights]) * numpy.NaN
- velocityZ = numpy.zeros([nHeights]) * numpy.NaN
-
- dbSNR = 10*numpy.log10(numpy.average(dataOut.data_snr,0))
-
- '''***********************************************WIND ESTIMATION**************************************'''
+
+ FrecRange = dataOut.spc_range[0]
+
+ ySamples=numpy.ones([nChannel,nProfiles])
+ phase=numpy.ones([nChannel,nProfiles])
+ CSPCSamples=numpy.ones([nChannel,nProfiles],dtype=numpy.complex_)
+ coherence=numpy.ones([nChannel,nProfiles])
+ PhaseSlope=numpy.ones(nChannel)
+ PhaseInter=numpy.ones(nChannel)
+ data_SNR=numpy.zeros([nProfiles])
+
+ data = dataOut.data_pre
+ noise = dataOut.noise
+
+ dataOut.data_SNR = (numpy.mean(SNRspc,axis=1)- noise[0]) / noise[0]
+
+ dataOut.data_SNR[numpy.where( dataOut.data_SNR <0 )] = 1e-20
+
+
+ data_output=numpy.ones([spc.shape[0],spc.shape[2]])*numpy.NaN
+
+ velocityX=[]
+ velocityY=[]
+ velocityV=[]
+ PhaseLine=[]
+
+ dbSNR = 10*numpy.log10(dataOut.data_SNR)
+ dbSNR = numpy.average(dbSNR,0)
+
for Height in range(nHeights):
+
+ [Vzon,Vmer,Vver, GaussCenter, PhaseSlope, FitGaussCSPC]= self.WindEstimation(spc, cspc, pairsList, ChanDist, Height, noise, dataOut.spc_range, dbSNR[Height], SNRlimit)
+ PhaseLine = numpy.append(PhaseLine, PhaseSlope)
+
+ if abs(Vzon)<100. and abs(Vzon)> 0.:
+ velocityX=numpy.append(velocityX, Vzon)#Vmag
+
+ else:
+ velocityX=numpy.append(velocityX, numpy.NaN)
+
+ if abs(Vmer)<100. and abs(Vmer) > 0.:
+ velocityY=numpy.append(velocityY, -Vmer)#Vang
+
+ else:
+ velocityY=numpy.append(velocityY, numpy.NaN)
+
+ if dbSNR[Height] > SNRlimit:
+ velocityV=numpy.append(velocityV, -Vver)#FirstMoment[Height])
+ else:
+ velocityV=numpy.append(velocityV, numpy.NaN)
- if Height >= range_min and Height < range_max:
- # error_code will be useful in future analysis
- [Vzon,Vmer,Vver, error_code] = self.WindEstimation(spc[:,:,Height], cspc[:,:,Height], pairsList,
- ChanDist, Height, dataOut.noise, dataOut.spc_range, dbSNR[Height], SNRdBlimit, NegativeLimit, PositiveLimit,dataOut.frequency)
-
- if abs(Vzon) < 100. and abs(Vmer) < 100.:
- velocityX[Height] = Vzon
- velocityY[Height] = -Vmer
- velocityZ[Height] = Vver
- # Censoring data with SNR threshold
- dbSNR [dbSNR < SNRdBlimit] = numpy.NaN
-
- data_param[0] = velocityX
- data_param[1] = velocityY
- data_param[2] = velocityZ
- data_param[3] = dbSNR
- dataOut.data_param = data_param
+
+ '''Nota: Cambiar el signo de numpy.array(velocityX) cuando se intente procesar datos de BLTR'''
+ data_output[0] = numpy.array(velocityX) #self.moving_average(numpy.array(velocityX) , N=1)
+ data_output[1] = numpy.array(velocityY) #self.moving_average(numpy.array(velocityY) , N=1)
+ data_output[2] = velocityV#FirstMoment
+
+ xFrec=FrecRange[0:spc.shape[1]]
+
+ dataOut.data_output=data_output
+
return dataOut
-
+
+
def moving_average(self,x, N=2):
- """ convolution for smoothenig data. note that last N-1 values are convolution with zeroes """
return numpy.convolve(x, numpy.ones((N,))/N)[(N-1):]
-
+
def gaus(self,xSamples,Amp,Mu,Sigma):
- return Amp * numpy.exp(-0.5*((xSamples - Mu)/Sigma)**2)
-
+ return ( Amp / ((2*numpy.pi)**0.5 * Sigma) ) * numpy.exp( -( xSamples - Mu )**2 / ( 2 * (Sigma**2) ))
+
+
+
def Moments(self, ySamples, xSamples):
- Power = numpy.nanmean(ySamples) # Power, 0th Moment
- yNorm = ySamples / numpy.nansum(ySamples)
- RadVel = numpy.nansum(xSamples * yNorm) # Radial Velocity, 1st Moment
- Sigma2 = numpy.nansum(yNorm * (xSamples - RadVel)**2) # Spectral Width, 2nd Moment
- StdDev = numpy.sqrt(numpy.abs(Sigma2)) # Desv. Estandar, Ancho espectral
- return numpy.array([Power,RadVel,StdDev])
-
- def StopWindEstimation(self, error_code):
- Vzon = numpy.NaN
- Vmer = numpy.NaN
- Vver = numpy.NaN
- return Vzon, Vmer, Vver, error_code
-
- def AntiAliasing(self, interval, maxstep):
- """
- function to prevent errors from aliased values when computing phaseslope
- """
- antialiased = numpy.zeros(len(interval))
- copyinterval = interval.copy()
-
- antialiased[0] = copyinterval[0]
-
- for i in range(1,len(antialiased)):
- step = interval[i] - interval[i-1]
- if step > maxstep:
- copyinterval -= 2*numpy.pi
- antialiased[i] = copyinterval[i]
- elif step < maxstep*(-1):
- copyinterval += 2*numpy.pi
- antialiased[i] = copyinterval[i]
- else:
- antialiased[i] = copyinterval[i].copy()
-
- return antialiased
-
- def WindEstimation(self, spc, cspc, pairsList, ChanDist, Height, noise, AbbsisaRange, dbSNR, SNRlimit, NegativeLimit, PositiveLimit, radfreq):
- """
- Function that Calculates Zonal, Meridional and Vertical wind velocities.
- Initial Version by E. Bocanegra updated by J. Zibell until Nov. 2019.
-
- Input:
- spc, cspc : self spectra and cross spectra data. In Briggs notation something like S_i*(S_i)_conj, (S_j)_conj respectively.
- pairsList : Pairlist of channels
- ChanDist : array of xi_ij and eta_ij
- Height : height at which data is processed
- noise : noise in [channels] format for specific height
- Abbsisarange : range of the frequencies or velocities
- dbSNR, SNRlimit : signal to noise ratio in db, lower limit
-
- Output:
- Vzon, Vmer, Vver : wind velocities
- error_code : int that states where code is terminated
-
- 0 : no error detected
- 1 : Gaussian of mean spc exceeds widthlimit
- 2 : no Gaussian of mean spc found
- 3 : SNR to low or velocity to high -> prec. e.g.
- 4 : at least one Gaussian of cspc exceeds widthlimit
- 5 : zero out of three cspc Gaussian fits converged
- 6 : phase slope fit could not be found
- 7 : arrays used to fit phase have different length
- 8 : frequency range is either too short (len <= 5) or very long (> 30% of cspc)
-
- """
-
- error_code = 0
-
- nChan = spc.shape[0]
- nProf = spc.shape[1]
- nPair = cspc.shape[0]
-
- SPC_Samples = numpy.zeros([nChan, nProf]) # for normalized spc values for one height
- CSPC_Samples = numpy.zeros([nPair, nProf], dtype=numpy.complex_) # for normalized cspc values
- phase = numpy.zeros([nPair, nProf]) # phase between channels
- PhaseSlope = numpy.zeros(nPair) # slope of the phases, channelwise
- PhaseInter = numpy.zeros(nPair) # intercept to the slope of the phases, channelwise
- xFrec = AbbsisaRange[0][:-1] # frequency range
- xVel = AbbsisaRange[2][:-1] # velocity range
- xSamples = xFrec # the frequency range is taken
- delta_x = xSamples[1] - xSamples[0] # delta_f or delta_x
-
- # only consider velocities with in NegativeLimit and PositiveLimit
- if (NegativeLimit is None):
- NegativeLimit = numpy.min(xVel)
- if (PositiveLimit is None):
- PositiveLimit = numpy.max(xVel)
- xvalid = numpy.where((xVel > NegativeLimit) & (xVel < PositiveLimit))
- xSamples_zoom = xSamples[xvalid]
+ Pot = numpy.nansum( ySamples ) # Potencia, momento 0
+ yNorm = ySamples / Pot
+ Vr = numpy.nansum( yNorm * xSamples ) # Velocidad radial, mu, corrimiento doppler, primer momento
+ Sigma2 = abs(numpy.nansum( yNorm * ( xSamples - Vr )**2 )) # Segundo Momento
+ Desv = Sigma2**0.5 # Desv. Estandar, Ancho espectral
+
+ return numpy.array([Pot, Vr, Desv])
+
+ def WindEstimation(self, spc, cspc, pairsList, ChanDist, Height, noise, AbbsisaRange, dbSNR, SNRlimit):
+
+
+ ySamples=numpy.ones([spc.shape[0],spc.shape[1]])
+ phase=numpy.ones([spc.shape[0],spc.shape[1]])
+ CSPCSamples=numpy.ones([spc.shape[0],spc.shape[1]],dtype=numpy.complex_)
+ coherence=numpy.ones([spc.shape[0],spc.shape[1]])
+ PhaseSlope=numpy.zeros(spc.shape[0])
+ PhaseInter=numpy.ones(spc.shape[0])
+ xFrec=AbbsisaRange[0][0:spc.shape[1]]
+ xVel =AbbsisaRange[2][0:spc.shape[1]]
+ Vv=numpy.empty(spc.shape[2])*0
+ SPCav = numpy.average(spc, axis=0)-numpy.average(noise) #spc[0]-noise[0]#
+
+ SPCmoments = self.Moments(SPCav[:,Height], xVel )
+ CSPCmoments = []
+ cspcNoise = numpy.empty(3)
+
'''Getting Eij and Nij'''
- Xi01, Xi02, Xi12 = ChanDist[:,0]
- Eta01, Eta02, Eta12 = ChanDist[:,1]
-
- # spwd limit - updated by D. Scipión 30.03.2021
- widthlimit = 10
- '''************************* SPC is normalized ********************************'''
- spc_norm = spc.copy()
- # For each channel
- for i in range(nChan):
- spc_sub = spc_norm[i,:] - noise[i] # only the signal power
- SPC_Samples[i] = spc_sub / (numpy.nansum(spc_sub) * delta_x)
-
- '''********************** FITTING MEAN SPC GAUSSIAN **********************'''
-
- """ the gaussian of the mean: first subtract noise, then normalize. this is legal because
- you only fit the curve and don't need the absolute value of height for calculation,
- only for estimation of width. for normalization of cross spectra, you need initial,
- unnormalized self-spectra With noise.
-
- Technically, you don't even need to normalize the self-spectra, as you only need the
- width of the peak. However, it was left this way. Note that the normalization has a flaw:
- due to subtraction of the noise, some values are below zero. Raw "spc" values should be
- >= 0, as it is the modulus squared of the signals (complex * it's conjugate)
- """
- # initial conditions
- popt = [1e-10,0,1e-10]
- # Spectra average
- SPCMean = numpy.average(SPC_Samples,0)
- # Moments in frequency
- SPCMoments = self.Moments(SPCMean[xvalid], xSamples_zoom)
-
- # Gauss Fit SPC in frequency domain
- if dbSNR > SNRlimit: # only if SNR > SNRth
+
+ Xi01=ChanDist[0][0]
+ Eta01=ChanDist[0][1]
+
+ Xi02=ChanDist[1][0]
+ Eta02=ChanDist[1][1]
+
+ Xi12=ChanDist[2][0]
+ Eta12=ChanDist[2][1]
+
+ z = spc.copy()
+ z = numpy.where(numpy.isfinite(z), z, numpy.NAN)
+
+ for i in range(spc.shape[0]):
+
+ '''****** Line of Data SPC ******'''
+ zline=z[i,:,Height].copy() - noise[i] # Se resta ruido
+
+ '''****** SPC is normalized ******'''
+ SmoothSPC =self.moving_average(zline.copy(),N=1) # Se suaviza el ruido
+ FactNorm = SmoothSPC/numpy.nansum(SmoothSPC) # SPC Normalizado y suavizado
+
+ xSamples = xFrec # Se toma el rango de frecuncias
+ ySamples[i] = FactNorm # Se toman los valores de SPC normalizado
+
+ for i in range(spc.shape[0]):
+
+ '''****** Line of Data CSPC ******'''
+ cspcLine = ( cspc[i,:,Height].copy())# - noise[i] ) # no! Se resta el ruido
+ SmoothCSPC =self.moving_average(cspcLine,N=1) # Se suaviza el ruido
+ cspcNorm = SmoothCSPC/numpy.nansum(SmoothCSPC) # CSPC normalizado y suavizado
+
+ '''****** CSPC is normalized with respect to Briggs and Vincent ******'''
+ chan_index0 = pairsList[i][0]
+ chan_index1 = pairsList[i][1]
+
+ CSPCFactor= numpy.abs(numpy.nansum(ySamples[chan_index0]))**2 * numpy.abs(numpy.nansum(ySamples[chan_index1]))**2
+ CSPCNorm = cspcNorm / numpy.sqrt(CSPCFactor)
+
+ CSPCSamples[i] = CSPCNorm
+
+ coherence[i] = numpy.abs(CSPCSamples[i]) / numpy.sqrt(CSPCFactor)
+
+ #coherence[i]= self.moving_average(coherence[i],N=1)
+
+ phase[i] = self.moving_average( numpy.arctan2(CSPCSamples[i].imag, CSPCSamples[i].real),N=1)#*180/numpy.pi
+
+ CSPCmoments = numpy.vstack([self.Moments(numpy.abs(CSPCSamples[0]), xSamples),
+ self.Moments(numpy.abs(CSPCSamples[1]), xSamples),
+ self.Moments(numpy.abs(CSPCSamples[2]), xSamples)])
+
+
+ popt=[1e-10,0,1e-10]
+ popt01, popt02, popt12 = [1e-10,1e-10,1e-10], [1e-10,1e-10,1e-10] ,[1e-10,1e-10,1e-10]
+ FitGauss01, FitGauss02, FitGauss12 = numpy.empty(len(xSamples))*0, numpy.empty(len(xSamples))*0, numpy.empty(len(xSamples))*0
+
+ CSPCMask01 = numpy.abs(CSPCSamples[0])
+ CSPCMask02 = numpy.abs(CSPCSamples[1])
+ CSPCMask12 = numpy.abs(CSPCSamples[2])
+
+ mask01 = ~numpy.isnan(CSPCMask01)
+ mask02 = ~numpy.isnan(CSPCMask02)
+ mask12 = ~numpy.isnan(CSPCMask12)
+
+ #mask = ~numpy.isnan(CSPCMask01)
+ CSPCMask01 = CSPCMask01[mask01]
+ CSPCMask02 = CSPCMask02[mask02]
+ CSPCMask12 = CSPCMask12[mask12]
+ #CSPCMask01 = numpy.ma.masked_invalid(CSPCMask01)
+
+
+
+ '''***Fit Gauss CSPC01***'''
+ if dbSNR > SNRlimit and numpy.abs(SPCmoments[1])<3 :
try:
- popt,pcov = curve_fit(self.gaus,xSamples_zoom,SPCMean[xvalid],p0=SPCMoments)
- if popt[2] <= 0 or popt[2] > widthlimit: # CONDITION
- return self.StopWindEstimation(error_code = 1)
- FitGauss = self.gaus(xSamples_zoom,*popt)
+ popt01,pcov = curve_fit(self.gaus,xSamples[mask01],numpy.abs(CSPCMask01),p0=CSPCmoments[0])
+ popt02,pcov = curve_fit(self.gaus,xSamples[mask02],numpy.abs(CSPCMask02),p0=CSPCmoments[1])
+ popt12,pcov = curve_fit(self.gaus,xSamples[mask12],numpy.abs(CSPCMask12),p0=CSPCmoments[2])
+ FitGauss01 = self.gaus(xSamples,*popt01)
+ FitGauss02 = self.gaus(xSamples,*popt02)
+ FitGauss12 = self.gaus(xSamples,*popt12)
+ except:
+ FitGauss01=numpy.ones(len(xSamples))*numpy.mean(numpy.abs(CSPCSamples[0]))
+ FitGauss02=numpy.ones(len(xSamples))*numpy.mean(numpy.abs(CSPCSamples[1]))
+ FitGauss12=numpy.ones(len(xSamples))*numpy.mean(numpy.abs(CSPCSamples[2]))
+
+
+ CSPCopt = numpy.vstack([popt01,popt02,popt12])
+
+ '''****** Getting fij width ******'''
+
+ yMean = numpy.average(ySamples, axis=0) # ySamples[0]
+
+ '''******* Getting fitting Gaussian *******'''
+ meanGauss = sum(xSamples*yMean) / len(xSamples) # Mu, velocidad radial (frecuencia)
+ sigma2 = sum(yMean*(xSamples-meanGauss)**2) / len(xSamples) # Varianza, Ancho espectral (frecuencia)
+
+ yMoments = self.Moments(yMean, xSamples)
+
+ if dbSNR > SNRlimit and numpy.abs(SPCmoments[1])<3: # and abs(meanGauss/sigma2) > 0.00001:
+ try:
+ popt,pcov = curve_fit(self.gaus,xSamples,yMean,p0=yMoments)
+ FitGauss=self.gaus(xSamples,*popt)
+
except :#RuntimeError:
- return self.StopWindEstimation(error_code = 2)
+ FitGauss=numpy.ones(len(xSamples))*numpy.mean(yMean)
+
+
else:
- return self.StopWindEstimation(error_code = 3)
-
- '''***************************** CSPC Normalization *************************
- The Spc spectra are used to normalize the crossspectra. Peaks from precipitation
- influence the norm which is not desired. First, a range is identified where the
- wind peak is estimated -> sum_wind is sum of those frequencies. Next, the area
- around it gets cut off and values replaced by mean determined by the boundary
- data -> sum_noise (spc is not normalized here, thats why the noise is important)
-
- The sums are then added and multiplied by range/datapoints, because you need
- an integral and not a sum for normalization.
-
- A norm is found according to Briggs 92.
- '''
- # for each pair
- for i in range(nPair):
- cspc_norm = cspc[i,:].copy()
- chan_index0 = pairsList[i][0]
- chan_index1 = pairsList[i][1]
- CSPC_Samples[i] = cspc_norm / (numpy.sqrt(numpy.nansum(spc_norm[chan_index0])*numpy.nansum(spc_norm[chan_index1])) * delta_x)
- phase[i] = numpy.arctan2(CSPC_Samples[i].imag, CSPC_Samples[i].real)
-
- CSPCmoments = numpy.vstack([self.Moments(numpy.abs(CSPC_Samples[0,xvalid]), xSamples_zoom),
- self.Moments(numpy.abs(CSPC_Samples[1,xvalid]), xSamples_zoom),
- self.Moments(numpy.abs(CSPC_Samples[2,xvalid]), xSamples_zoom)])
-
- popt01, popt02, popt12 = [1e-10,0,1e-10], [1e-10,0,1e-10] ,[1e-10,0,1e-10]
- FitGauss01, FitGauss02, FitGauss12 = numpy.zeros(len(xSamples)), numpy.zeros(len(xSamples)), numpy.zeros(len(xSamples))
-
- '''*******************************FIT GAUSS CSPC************************************'''
- try:
- popt01,pcov = curve_fit(self.gaus,xSamples_zoom,numpy.abs(CSPC_Samples[0][xvalid]),p0=CSPCmoments[0])
- if popt01[2] > widthlimit: # CONDITION
- return self.StopWindEstimation(error_code = 4)
- popt02,pcov = curve_fit(self.gaus,xSamples_zoom,numpy.abs(CSPC_Samples[1][xvalid]),p0=CSPCmoments[1])
- if popt02[2] > widthlimit: # CONDITION
- return self.StopWindEstimation(error_code = 4)
- popt12,pcov = curve_fit(self.gaus,xSamples_zoom,numpy.abs(CSPC_Samples[2][xvalid]),p0=CSPCmoments[2])
- if popt12[2] > widthlimit: # CONDITION
- return self.StopWindEstimation(error_code = 4)
-
- FitGauss01 = self.gaus(xSamples_zoom, *popt01)
- FitGauss02 = self.gaus(xSamples_zoom, *popt02)
- FitGauss12 = self.gaus(xSamples_zoom, *popt12)
- except:
- return self.StopWindEstimation(error_code = 5)
-
-
- '''************* Getting Fij ***************'''
- # x-axis point of the gaussian where the center is located from GaussFit of spectra
- GaussCenter = popt[1]
- ClosestCenter = xSamples_zoom[numpy.abs(xSamples_zoom-GaussCenter).argmin()]
- PointGauCenter = numpy.where(xSamples_zoom==ClosestCenter)[0][0]
-
- # Point where e^-1 is located in the gaussian
- PeMinus1 = numpy.max(FitGauss) * numpy.exp(-1)
- FijClosest = FitGauss[numpy.abs(FitGauss-PeMinus1).argmin()] # The closest point to"Peminus1" in "FitGauss"
+ FitGauss=numpy.ones(len(xSamples))*numpy.mean(yMean)
+
+
+
+ '''****** Getting Fij ******'''
+ Fijcspc = CSPCopt[:,2]/2*3
+
+
+ GaussCenter = popt[1] #xFrec[GCpos]
+ #Punto en Eje X de la Gaussiana donde se encuentra el centro
+ ClosestCenter = xSamples[numpy.abs(xSamples-GaussCenter).argmin()]
+ PointGauCenter = numpy.where(xSamples==ClosestCenter)[0][0]
+
+ #Punto e^-1 hubicado en la Gaussiana
+ PeMinus1 = numpy.max(FitGauss)* numpy.exp(-1)
+ FijClosest = FitGauss[numpy.abs(FitGauss-PeMinus1).argmin()] # El punto mas cercano a "Peminus1" dentro de "FitGauss"
PointFij = numpy.where(FitGauss==FijClosest)[0][0]
- Fij = numpy.abs(xSamples_zoom[PointFij] - xSamples_zoom[PointGauCenter])
-
- '''********** Taking frequency ranges from mean SPCs **********'''
- GauWidth = popt[2] * 3/2 # Bandwidth of Gau01
+
+ if xSamples[PointFij] > xSamples[PointGauCenter]:
+ Fij = xSamples[PointFij] - xSamples[PointGauCenter]
+
+ else:
+ Fij = xSamples[PointGauCenter] - xSamples[PointFij]
+
+
+ '''****** Taking frequency ranges from SPCs ******'''
+
+
+ #GaussCenter = popt[1] #Primer momento 01
+ GauWidth = popt[2] *3/2 #Ancho de banda de Gau01
Range = numpy.empty(2)
Range[0] = GaussCenter - GauWidth
- Range[1] = GaussCenter + GauWidth
- # Point in x-axis where the bandwidth is located (min:max)
- ClosRangeMin = xSamples_zoom[numpy.abs(xSamples_zoom-Range[0]).argmin()]
- ClosRangeMax = xSamples_zoom[numpy.abs(xSamples_zoom-Range[1]).argmin()]
- PointRangeMin = numpy.where(xSamples_zoom==ClosRangeMin)[0][0]
- PointRangeMax = numpy.where(xSamples_zoom==ClosRangeMax)[0][0]
- Range = numpy.array([ PointRangeMin, PointRangeMax ])
- FrecRange = xSamples_zoom[ Range[0] : Range[1] ]
-
- '''************************** Getting Phase Slope ***************************'''
- for i in range(nPair):
- if len(FrecRange) > 5:
- PhaseRange = phase[i, xvalid[0][Range[0]:Range[1]]].copy()
+ Range[1] = GaussCenter + GauWidth
+ #Punto en Eje X de la Gaussiana donde se encuentra ancho de banda (min:max)
+ ClosRangeMin = xSamples[numpy.abs(xSamples-Range[0]).argmin()]
+ ClosRangeMax = xSamples[numpy.abs(xSamples-Range[1]).argmin()]
+
+ PointRangeMin = numpy.where(xSamples==ClosRangeMin)[0][0]
+ PointRangeMax = numpy.where(xSamples==ClosRangeMax)[0][0]
+
+ Range=numpy.array([ PointRangeMin, PointRangeMax ])
+
+ FrecRange = xFrec[ Range[0] : Range[1] ]
+ VelRange = xVel[ Range[0] : Range[1] ]
+
+
+ '''****** Getting SCPC Slope ******'''
+
+ for i in range(spc.shape[0]):
+
+ if len(FrecRange)>5 and len(FrecRange)4:
+ Vver=popt[1]
+ else:
+ Vver=numpy.NaN
+ FitGaussCSPC = numpy.array([FitGauss01,FitGauss02,FitGauss12])
+
+
+ return Vzon, Vmer, Vver, GaussCenter, PhaseSlope, FitGaussCSPC
+
class SpectralMoments(Operation):
-
+
'''
Function SpectralMoments()
-
+
Calculates moments (power, mean, standard deviation) and SNR of the signal
-
+
Type of dataIn: Spectra
-
+
Configuration Parameters:
-
+
dirCosx : Cosine director in X axis
dirCosy : Cosine director in Y axis
-
+
elevation :
azimuth :
-
+
Input:
- channelList : simple channel list to select e.g. [2,3,7]
+ channelList : simple channel list to select e.g. [2,3,7]
self.dataOut.data_pre : Spectral data
self.dataOut.abscissaList : List of frequencies
self.dataOut.noise : Noise level per channel
-
+
Affected:
self.dataOut.moments : Parameters per channel
- self.dataOut.data_snr : SNR per channel
-
+ self.dataOut.data_SNR : SNR per channel
+
'''
-
+
def run(self, dataOut):
-
+
+ #dataOut.data_pre = dataOut.data_pre[0]
data = dataOut.data_pre[0]
absc = dataOut.abscissaList[:-1]
noise = dataOut.noise
nChannel = data.shape[0]
data_param = numpy.zeros((nChannel, 4, data.shape[2]))
-
+
for ind in range(nChannel):
data_param[ind,:,:] = self.__calculateMoments( data[ind,:,:] , absc , noise[ind] )
-
+
dataOut.moments = data_param[:,1:,:]
- dataOut.data_snr = data_param[:,0]
- dataOut.data_pow = data_param[:,1]
- dataOut.data_dop = data_param[:,2]
- dataOut.data_width = data_param[:,3]
-
+ dataOut.data_SNR = data_param[:,0]
+ dataOut.data_POW = data_param[:,1]
+ dataOut.data_DOP = data_param[:,2]
+ dataOut.data_WIDTH = data_param[:,3]
return dataOut
-
- def __calculateMoments(self, oldspec, oldfreq, n0,
+
+ def __calculateMoments(self, oldspec, oldfreq, n0,
nicoh = None, graph = None, smooth = None, type1 = None, fwindow = None, snrth = None, dc = None, aliasing = None, oldfd = None, wwauto = None):
-
+
if (nicoh is None): nicoh = 1
- if (graph is None): graph = 0
+ if (graph is None): graph = 0
if (smooth is None): smooth = 0
elif (self.smooth < 3): smooth = 0
@@ -1258,105 +1237,98 @@ class SpectralMoments(Operation):
if (aliasing is None): aliasing = 0
if (oldfd is None): oldfd = 0
if (wwauto is None): wwauto = 0
-
+
if (n0 < 1.e-20): n0 = 1.e-20
-
+
freq = oldfreq
vec_power = numpy.zeros(oldspec.shape[1])
vec_fd = numpy.zeros(oldspec.shape[1])
vec_w = numpy.zeros(oldspec.shape[1])
vec_snr = numpy.zeros(oldspec.shape[1])
-
- # oldspec = numpy.ma.masked_invalid(oldspec)
+
+ oldspec = numpy.ma.masked_invalid(oldspec)
for ind in range(oldspec.shape[1]):
-
+
spec = oldspec[:,ind]
aux = spec*fwindow
max_spec = aux.max()
- m = aux.tolist().index(max_spec)
-
- # Smooth
- if (smooth == 0):
- spec2 = spec
- else:
- spec2 = scipy.ndimage.filters.uniform_filter1d(spec,size=smooth)
-
- # Moments Estimation
- bb = spec2[numpy.arange(m,spec2.size)]
+ m = list(aux).index(max_spec)
+
+ #Smooth
+ if (smooth == 0): spec2 = spec
+ else: spec2 = scipy.ndimage.filters.uniform_filter1d(spec,size=smooth)
+
+ # Calculo de Momentos
+ bb = spec2[list(range(m,spec2.size))]
bb = (bb m):
- ss1 = m
-
- valid = numpy.arange(int(m + bb0 - ss1 + 1)) + ss1
-
- signal_power = ((spec2[valid] - n0) * fwindow[valid]).mean() # D. Scipión added with correct definition
- total_power = (spec2[valid] * fwindow[valid]).mean() # D. Scipión added with correct definition
- power = ((spec2[valid] - n0) * fwindow[valid]).sum()
- fd = ((spec2[valid]- n0)*freq[valid] * fwindow[valid]).sum() / power
- w = numpy.sqrt(((spec2[valid] - n0)*fwindow[valid]*(freq[valid]- fd)**2).sum() / power)
- snr = (spec2.mean()-n0)/n0
- if (snr < 1.e-20) :
+
+ if (ss.size == 0): ss1 = 1
+ else: ss1 = max(ss) + 1
+
+ if (ss1 > m): ss1 = m
+
+ valid = numpy.asarray(list(range(int(m + bb0 - ss1 + 1)))) + ss1
+ power = ((spec2[valid] - n0)*fwindow[valid]).sum()
+ fd = ((spec2[valid]- n0)*freq[valid]*fwindow[valid]).sum()/power
+ w = math.sqrt(((spec2[valid] - n0)*fwindow[valid]*(freq[valid]- fd)**2).sum()/power)
+ snr = (spec2.mean()-n0)/n0
+
+ if (snr < 1.e-20) :
snr = 1.e-20
-
- # vec_power[ind] = power #D. Scipión replaced with the line below
- vec_power[ind] = total_power
+
+ vec_power[ind] = power
vec_fd[ind] = fd
vec_w[ind] = w
vec_snr[ind] = snr
-
- return numpy.vstack((vec_snr, vec_power, vec_fd, vec_w))
-
+
+ moments = numpy.vstack((vec_snr, vec_power, vec_fd, vec_w))
+ return moments
+
#------------------ Get SA Parameters --------------------------
-
+
def GetSAParameters(self):
#SA en frecuencia
pairslist = self.dataOut.groupList
num_pairs = len(pairslist)
-
+
vel = self.dataOut.abscissaList
spectra = self.dataOut.data_pre
cspectra = self.dataIn.data_cspc
- delta_v = vel[1] - vel[0]
-
+ delta_v = vel[1] - vel[0]
+
#Calculating the power spectrum
spc_pow = numpy.sum(spectra, 3)*delta_v
#Normalizing Spectra
norm_spectra = spectra/spc_pow
#Calculating the norm_spectra at peak
- max_spectra = numpy.max(norm_spectra, 3)
-
+ max_spectra = numpy.max(norm_spectra, 3)
+
#Normalizing Cross Spectra
norm_cspectra = numpy.zeros(cspectra.shape)
-
+
for i in range(num_chan):
norm_cspectra[i,:,:] = cspectra[i,:,:]/numpy.sqrt(spc_pow[pairslist[i][0],:]*spc_pow[pairslist[i][1],:])
-
+
max_cspectra = numpy.max(norm_cspectra,2)
max_cspectra_index = numpy.argmax(norm_cspectra, 2)
-
+
for i in range(num_pairs):
cspc_par[i,:,:] = __calculateMoments(norm_cspectra)
#------------------- Get Lags ----------------------------------
-
+
class SALags(Operation):
'''
Function GetMoments()
@@ -1366,281 +1338,1192 @@ class SALags(Operation):
self.dataOut.abscissaList
self.dataOut.noise
self.dataOut.normFactor
- self.dataOut.data_snr
+ self.dataOut.data_SNR
self.dataOut.groupList
self.dataOut.nChannels
-
+
Affected:
self.dataOut.data_param
-
+
'''
- def run(self, dataOut):
+ def run(self, dataOut):
data_acf = dataOut.data_pre[0]
data_ccf = dataOut.data_pre[1]
normFactor_acf = dataOut.normFactor[0]
normFactor_ccf = dataOut.normFactor[1]
pairs_acf = dataOut.groupList[0]
pairs_ccf = dataOut.groupList[1]
-
+
nHeights = dataOut.nHeights
absc = dataOut.abscissaList
noise = dataOut.noise
- SNR = dataOut.data_snr
+ SNR = dataOut.data_SNR
nChannels = dataOut.nChannels
# pairsList = dataOut.groupList
# pairsAutoCorr, pairsCrossCorr = self.__getPairsAutoCorr(pairsList, nChannels)
for l in range(len(pairs_acf)):
data_acf[l,:,:] = data_acf[l,:,:]/normFactor_acf[l,:]
-
+
for l in range(len(pairs_ccf)):
data_ccf[l,:,:] = data_ccf[l,:,:]/normFactor_ccf[l,:]
-
+
dataOut.data_param = numpy.zeros((len(pairs_ccf)*2 + 1, nHeights))
dataOut.data_param[:-1,:] = self.__calculateTaus(data_acf, data_ccf, absc)
dataOut.data_param[-1,:] = self.__calculateLag1Phase(data_acf, absc)
return
-
+
# def __getPairsAutoCorr(self, pairsList, nChannels):
-#
+#
# pairsAutoCorr = numpy.zeros(nChannels, dtype = 'int')*numpy.nan
-#
-# for l in range(len(pairsList)):
+#
+# for l in range(len(pairsList)):
# firstChannel = pairsList[l][0]
# secondChannel = pairsList[l][1]
-#
-# #Obteniendo pares de Autocorrelacion
+#
+# #Obteniendo pares de Autocorrelacion
# if firstChannel == secondChannel:
# pairsAutoCorr[firstChannel] = int(l)
-#
+#
# pairsAutoCorr = pairsAutoCorr.astype(int)
-#
+#
# pairsCrossCorr = range(len(pairsList))
# pairsCrossCorr = numpy.delete(pairsCrossCorr,pairsAutoCorr)
-#
+#
# return pairsAutoCorr, pairsCrossCorr
-
+
def __calculateTaus(self, data_acf, data_ccf, lagRange):
-
+
lag0 = data_acf.shape[1]/2
#Funcion de Autocorrelacion
mean_acf = stats.nanmean(data_acf, axis = 0)
-
+
#Obtencion Indice de TauCross
ind_ccf = data_ccf.argmax(axis = 1)
#Obtencion Indice de TauAuto
ind_acf = numpy.zeros(ind_ccf.shape,dtype = 'int')
ccf_lag0 = data_ccf[:,lag0,:]
-
+
for i in range(ccf_lag0.shape[0]):
ind_acf[i,:] = numpy.abs(mean_acf - ccf_lag0[i,:]).argmin(axis = 0)
-
+
#Obtencion de TauCross y TauAuto
tau_ccf = lagRange[ind_ccf]
tau_acf = lagRange[ind_acf]
-
+
Nan1, Nan2 = numpy.where(tau_ccf == lagRange[0])
-
+
tau_ccf[Nan1,Nan2] = numpy.nan
tau_acf[Nan1,Nan2] = numpy.nan
tau = numpy.vstack((tau_ccf,tau_acf))
-
+
return tau
-
+
def __calculateLag1Phase(self, data, lagTRange):
data1 = stats.nanmean(data, axis = 0)
lag1 = numpy.where(lagTRange == 0)[0][0] + 1
phase = numpy.angle(data1[lag1,:])
-
+
return phase
+def fit_func( x, a0, a1, a2): #, a3, a4, a5):
+ z = (x - a1) / a2
+ y = a0 * numpy.exp(-z**2 / a2) #+ a3 + a4 * x + a5 * x**2
+ return y
+
class SpectralFitting(Operation):
'''
Function GetMoments()
-
+
Input:
Output:
Variables modified:
'''
+ def __calculateMoments(self,oldspec, oldfreq, n0, nicoh = None, graph = None, smooth = None, type1 = None, fwindow = None, snrth = None, dc = None, aliasing = None, oldfd = None, wwauto = None):
+
+ if (nicoh is None): nicoh = 1
+ if (graph is None): graph = 0
+ if (smooth is None): smooth = 0
+ elif (self.smooth < 3): smooth = 0
+
+ if (type1 is None): type1 = 0
+ if (fwindow is None): fwindow = numpy.zeros(oldfreq.size) + 1
+ if (snrth is None): snrth = -3
+ if (dc is None): dc = 0
+ if (aliasing is None): aliasing = 0
+ if (oldfd is None): oldfd = 0
+ if (wwauto is None): wwauto = 0
+
+ if (n0 < 1.e-20): n0 = 1.e-20
+
+ freq = oldfreq
+ vec_power = numpy.zeros(oldspec.shape[1])
+ vec_fd = numpy.zeros(oldspec.shape[1])
+ vec_w = numpy.zeros(oldspec.shape[1])
+ vec_snr = numpy.zeros(oldspec.shape[1])
+
+ oldspec = numpy.ma.masked_invalid(oldspec)
+
+ for ind in range(oldspec.shape[1]):
+
+ spec = oldspec[:,ind]
+ aux = spec*fwindow
+ max_spec = aux.max()
+ m = list(aux).index(max_spec)
+
+ #Smooth
+ if (smooth == 0): spec2 = spec
+ else: spec2 = scipy.ndimage.filters.uniform_filter1d(spec,size=smooth)
+
+ # Calculo de Momentos
+ bb = spec2[list(range(m,spec2.size))]
+ bb = (bb m): ss1 = m
+
+ valid = numpy.asarray(list(range(int(m + bb0 - ss1 + 1)))) + ss1
+ power = ((spec2[valid] - n0)*fwindow[valid]).sum()
+ fd = ((spec2[valid]- n0)*freq[valid]*fwindow[valid]).sum()/power
+ w = math.sqrt(((spec2[valid] - n0)*fwindow[valid]*(freq[valid]- fd)**2).sum()/power)
+ snr = (spec2.mean()-n0)/n0
+
+ if (snr < 1.e-20) :
+ snr = 1.e-20
+
+ vec_power[ind] = power
+ vec_fd[ind] = fd
+ vec_w[ind] = w
+ vec_snr[ind] = snr
+
+ moments = numpy.vstack((vec_snr, vec_power, vec_fd, vec_w))
+ return moments
+
+ #def __DiffCoherent(self,snrth, spectra, cspectra, nProf, heights,nChan, nHei, nPairs, channels, noise, crosspairs):
+ def __DiffCoherent(self, spectra, cspectra, dataOut, noise, snrth, coh_th, hei_th):
+
+ import matplotlib.pyplot as plt
+ nProf = dataOut.nProfiles
+ heights = dataOut.heightList
+ nHei = len(heights)
+ channels = dataOut.channelList
+ nChan = len(channels)
+ crosspairs = dataOut.groupList
+ nPairs = len(crosspairs)
+ #Separar espectros incoherentes de coherentes snr > 20 dB'
+ snr_th = 10**(snrth/10.0)
+ my_incoh_spectra = numpy.zeros([nChan, nProf,nHei], dtype='float')
+ my_incoh_cspectra = numpy.zeros([nPairs,nProf, nHei], dtype='complex')
+ my_incoh_aver = numpy.zeros([nChan, nHei])
+ my_coh_aver = numpy.zeros([nChan, nHei])
+
+ coh_spectra = numpy.zeros([nChan, nProf, nHei], dtype='float')
+ coh_cspectra = numpy.zeros([nPairs, nProf, nHei], dtype='complex')
+ coh_aver = numpy.zeros([nChan, nHei])
+
+ incoh_spectra = numpy.zeros([nChan, nProf, nHei], dtype='float')
+ incoh_cspectra = numpy.zeros([nPairs, nProf, nHei], dtype='complex')
+ incoh_aver = numpy.zeros([nChan, nHei])
+ power = numpy.sum(spectra, axis=1)
+
+ if coh_th == None : coh_th = numpy.array([0.75,0.65,0.15]) # 0.65
+ if hei_th == None : hei_th = numpy.array([60,300,650])
+ for ic in range(2):
+ pair = crosspairs[ic]
+ #si el SNR es mayor que el SNR threshold los datos se toman coherentes
+ s_n0 = power[pair[0],:]/noise[pair[0]]
+ s_n1 = power[pair[1],:]/noise[pair[1]]
+
+ valid1 =(s_n0>=snr_th).nonzero()
+ valid2 = (s_n1>=snr_th).nonzero()
+ #valid = valid2 + valid1 #numpy.concatenate((valid1,valid2), axis=None)
+ valid1 = numpy.array(valid1[0])
+ valid2 = numpy.array(valid2[0])
+ valid = valid1
+ for iv in range(len(valid2)):
+ #for ivv in range(len(valid1)) :
+ indv = numpy.array((valid1 == valid2[iv]).nonzero())
+ if len(indv[0]) == 0 :
+ valid = numpy.concatenate((valid,valid2[iv]), axis=None)
+ if len(valid)>0:
+ my_coh_aver[pair[0],valid]=1
+ my_coh_aver[pair[1],valid]=1
+ # si la coherencia es mayor a la coherencia threshold los datos se toman
+ #print my_coh_aver[0,:]
+ coh = numpy.squeeze(numpy.nansum(cspectra[ic,:,:], axis=0)/numpy.sqrt(numpy.nansum(spectra[pair[0],:,:], axis=0)*numpy.nansum(spectra[pair[1],:,:], axis=0)))
+ #print('coh',numpy.absolute(coh))
+ for ih in range(len(hei_th)):
+ hvalid = (heights>hei_th[ih]).nonzero()
+ hvalid = hvalid[0]
+ if len(hvalid)>0:
+ valid = (numpy.absolute(coh[hvalid])>coh_th[ih]).nonzero()
+ valid = valid[0]
+ #print('hvalid:',hvalid)
+ #print('valid', valid)
+ if len(valid)>0:
+ my_coh_aver[pair[0],hvalid[valid]] =1
+ my_coh_aver[pair[1],hvalid[valid]] =1
+
+ coh_echoes = (my_coh_aver[pair[0],:] == 1).nonzero()
+ incoh_echoes = (my_coh_aver[pair[0],:] != 1).nonzero()
+ incoh_echoes = incoh_echoes[0]
+ if len(incoh_echoes) > 0:
+ my_incoh_spectra[pair[0],:,incoh_echoes] = spectra[pair[0],:,incoh_echoes]
+ my_incoh_spectra[pair[1],:,incoh_echoes] = spectra[pair[1],:,incoh_echoes]
+ my_incoh_cspectra[ic,:,incoh_echoes] = cspectra[ic,:,incoh_echoes]
+ my_incoh_aver[pair[0],incoh_echoes] = 1
+ my_incoh_aver[pair[1],incoh_echoes] = 1
- def run(self, dataOut, getSNR = True, path=None, file=None, groupList=None):
+
+ for ic in range(2):
+ pair = crosspairs[ic]
+
+ valid1 =(my_coh_aver[pair[0],:]==1 ).nonzero()
+ valid2 = (my_coh_aver[pair[1],:]==1).nonzero()
+ valid1 = numpy.array(valid1[0])
+ valid2 = numpy.array(valid2[0])
+ valid = valid1
+ #print valid1 , valid2
+ for iv in range(len(valid2)):
+ #for ivv in range(len(valid1)) :
+ indv = numpy.array((valid1 == valid2[iv]).nonzero())
+ if len(indv[0]) == 0 :
+ valid = numpy.concatenate((valid,valid2[iv]), axis=None)
+ #print valid
+ #valid = numpy.concatenate((valid1,valid2), axis=None)
+ valid1 =(my_coh_aver[pair[0],:] !=1 ).nonzero()
+ valid2 = (my_coh_aver[pair[1],:] !=1).nonzero()
+ valid1 = numpy.array(valid1[0])
+ valid2 = numpy.array(valid2[0])
+ incoh_echoes = valid1
+ #print valid1, valid2
+ #incoh_echoes= numpy.concatenate((valid1,valid2), axis=None)
+ for iv in range(len(valid2)):
+ #for ivv in range(len(valid1)) :
+ indv = numpy.array((valid1 == valid2[iv]).nonzero())
+ if len(indv[0]) == 0 :
+ incoh_echoes = numpy.concatenate(( incoh_echoes,valid2[iv]), axis=None)
+ #print incoh_echoes
+ if len(valid)>0:
+ #print pair
+ coh_spectra[pair[0],:,valid] = spectra[pair[0],:,valid]
+ coh_spectra[pair[1],:,valid] = spectra[pair[1],:,valid]
+ coh_cspectra[ic,:,valid] = cspectra[ic,:,valid]
+ coh_aver[pair[0],valid]=1
+ coh_aver[pair[1],valid]=1
+ if len(incoh_echoes)>0:
+ incoh_spectra[pair[0],:,incoh_echoes] = spectra[pair[0],:,incoh_echoes]
+ incoh_spectra[pair[1],:,incoh_echoes] = spectra[pair[1],:,incoh_echoes]
+ incoh_cspectra[ic,:,incoh_echoes] = cspectra[ic,:,incoh_echoes]
+ incoh_aver[pair[0],incoh_echoes]=1
+ incoh_aver[pair[1],incoh_echoes]=1
+ #plt.imshow(spectra[0,:,:],vmin=20000000)
+ #plt.show()
+ #my_incoh_aver = my_incoh_aver+1
+
+ #spec = my_incoh_spectra.copy()
+ #cspec = my_incoh_cspectra.copy()
+ #print('######################', spec)
+ #print(self.numpy)
+ #return spec, cspec,coh_aver
+ return my_incoh_spectra ,my_incoh_cspectra,my_incoh_aver,my_coh_aver, incoh_spectra, coh_spectra, incoh_cspectra, coh_cspectra, incoh_aver, coh_aver
+
+ def __CleanCoherent(self,snrth, spectra, cspectra, coh_aver,dataOut, noise,clean_coh_echoes,index):
+
+ import matplotlib.pyplot as plt
+ nProf = dataOut.nProfiles
+ heights = dataOut.heightList
+ nHei = len(heights)
+ channels = dataOut.channelList
+ nChan = len(channels)
+ crosspairs = dataOut.groupList
+ nPairs = len(crosspairs)
+
+ #data = dataOut.data_pre[0]
+ absc = dataOut.abscissaList[:-1]
+ #noise = dataOut.noise
+ #nChannel = data.shape[0]
+ data_param = numpy.zeros((nChan, 4, spectra.shape[2]))
+
+
+ #plt.plot(absc)
+ #plt.show()
+ clean_coh_spectra = spectra.copy()
+ clean_coh_cspectra = cspectra.copy()
+ clean_coh_aver = coh_aver.copy()
+
+ spwd_th=[10,6] #spwd_th[0] --> For satellites ; spwd_th[1] --> For special events like SUN.
+ coh_th = 0.75
+
+ rtime0 = [6,18] # periodo sin ESF
+ rtime1 = [10.5,13.5] # periodo con alta coherencia y alto ancho espectral (esperado): SOL.
+
+ time = index*5./60
+ if clean_coh_echoes == 1 :
+ for ind in range(nChan):
+ data_param[ind,:,:] = self.__calculateMoments( spectra[ind,:,:] , absc , noise[ind] )
+ #print data_param[:,3]
+ spwd = data_param[:,3]
+ #print spwd.shape
+ # SPECB_JULIA,header=anal_header,jspectra=spectra,vel=velocities,hei=heights, num_aver=1, mode_fit=0,smoothing=smoothing,jvelr=velr,jspwd=spwd,jsnr=snr,jnoise=noise,jstdvnoise=stdvnoise
+ #spwd1=[ 1.65607, 1.43416, 0.500373, 0.208361, 0.000000, 26.7767, 22.5936, 26.7530, 20.6962, 29.1098, 0.000000, 0.000000, 0.000000, 0.000000, 0.000000, 28.0300, 27.0511, 27.8810, 26.3126, 27.8445, 24.6181, 0.000000, 0.000000, 0.000000, 0.000000, 0.000000, 0.000000, 0.000000, 0.000000, 0.000000, 0.000000, 0.000000, 0.000000, 0.000000, 0.000000, 0.000000, 0.000000, 0.000000, 0.000000, 0.000000, 0.000000, 0.000000, 0.000000, 0.000000, 0.000000, 0.000000, 0.000000, 0.000000, 0.000000, 0.000000, 0.000000, 0.000000, 0.000000, 0.000000, 0.000000, 0.000000, 0.000000, 0.000000, 0.000000, 0.000000, 0.000000, 0.000000, 0.000000]
+ #spwd=numpy.array([spwd1,spwd1,spwd1,spwd1])
+ #print spwd.shape, heights.shape,coh_aver.shape
+ # para obtener spwd
+ for ic in range(nPairs):
+ pair = crosspairs[ic]
+ coh = numpy.squeeze(numpy.sum(cspectra[ic,:,:], axis=1)/numpy.sqrt(numpy.sum(spectra[pair[0],:,:], axis=1)*numpy.sum(spectra[pair[1],:,:], axis=1)))
+ for ih in range(nHei) :
+ # Considering heights higher than 200km in order to avoid removing phenomena like EEJ.
+ if heights[ih] >= 200 and coh_aver[pair[0],ih] == 1 and coh_aver[pair[1],ih] == 1 :
+ # Checking coherence
+ if (numpy.abs(coh[ih]) <= coh_th) or (time >= rtime0[0] and time <= rtime0[1]) :
+ # Checking spectral widths
+ if (spwd[pair[0],ih] > spwd_th[0]) or (spwd[pair[1],ih] > spwd_th[0]) :
+ # satelite
+ clean_coh_spectra[pair,ih,:] = 0.0
+ clean_coh_cspectra[ic,ih,:] = 0.0
+ clean_coh_aver[pair,ih] = 0
+ else :
+ if ((spwd[pair[0],ih] < spwd_th[1]) or (spwd[pair[1],ih] < spwd_th[1])) :
+ # Especial event like sun.
+ clean_coh_spectra[pair,ih,:] = 0.0
+ clean_coh_cspectra[ic,ih,:] = 0.0
+ clean_coh_aver[pair,ih] = 0
+
+ return clean_coh_spectra, clean_coh_cspectra, clean_coh_aver
+
+ isConfig = False
+ __dataReady = False
+ bloques = None
+ bloque0 = None
+ def __init__(self):
+ Operation.__init__(self)
+ self.i=0
+ self.isConfig = False
+
+ def setup(self,nChan,nProf,nHei,nBlocks):
+ self.__dataReady = False
+ self.bloques = numpy.zeros([2, nProf, nHei,nBlocks], dtype= complex)
+ self.bloque0 = numpy.zeros([nChan, nProf, nHei, nBlocks])
+
+ #def CleanRayleigh(self,dataOut,spectra,cspectra,out_spectra,out_cspectra,sat_spectra,sat_cspectra,crosspairs,heights, channels, nProf,nHei,nChan,nPairs,nIncohInt,nBlocks):
+ def CleanRayleigh(self,dataOut,spectra,cspectra,save_drifts):
+ #import matplotlib.pyplot as plt
+ #for k in range(149):
+
+ # self.bloque0[:,:,:,k] = spectra[:,:,0:nHei]
+ # self.bloques[:,:,:,k] = cspectra[:,:,0:nHei]
+ #if self.i==nBlocks:
+ # self.i==0
+ rfunc = cspectra.copy() #self.bloques
+ n_funct = len(rfunc[0,:,0,0])
+ val_spc = spectra*0.0 #self.bloque0*0.0
+ val_cspc = cspectra*0.0 #self.bloques*0.0
+ in_sat_spectra = spectra.copy() #self.bloque0
+ in_sat_cspectra = cspectra.copy() #self.bloques
+
+ #print( rfunc.shape)
+ min_hei = 200
+ nProf = dataOut.nProfiles
+ heights = dataOut.heightList
+ nHei = len(heights)
+ channels = dataOut.channelList
+ nChan = len(channels)
+ crosspairs = dataOut.groupList
+ nPairs = len(crosspairs)
+ hval=(heights >= min_hei).nonzero()
+ ih=hval[0]
+ #print numpy.absolute(rfunc[:,0,0,14])
+ for ih in range(hval[0][0],nHei):
+ for ifreq in range(nProf):
+ for ii in range(n_funct):
+
+ func2clean = 10*numpy.log10(numpy.absolute(rfunc[:,ii,ifreq,ih]))
+ #print numpy.amin(func2clean)
+ val = (numpy.isfinite(func2clean)==True).nonzero()
+ if len(val)>0:
+ min_val = numpy.around(numpy.amin(func2clean)-2) #> (-40)
+ if min_val <= -40 : min_val = -40
+ max_val = numpy.around(numpy.amax(func2clean)+2) #< 200
+ if max_val >= 200 : max_val = 200
+ #print min_val, max_val
+ step = 1
+ #Getting bins and the histogram
+ x_dist = min_val + numpy.arange(1 + ((max_val-(min_val))/step))*step
+ y_dist,binstep = numpy.histogram(func2clean,bins=range(int(min_val),int(max_val+2),step))
+ mean = numpy.sum(x_dist * y_dist) / numpy.sum(y_dist)
+ sigma = numpy.sqrt(numpy.sum(y_dist * (x_dist - mean)**2) / numpy.sum(y_dist))
+ parg = [numpy.amax(y_dist),mean,sigma]
+ try :
+ gauss_fit, covariance = curve_fit(fit_func, x_dist, y_dist,p0=parg)
+ mode = gauss_fit[1]
+ stdv = gauss_fit[2]
+ except:
+ mode = mean
+ stdv = sigma
+# if ih == 14 and ii == 0 and ifreq ==0 :
+# print x_dist.shape, y_dist.shape
+# print x_dist, y_dist
+# print min_val, max_val, binstep
+# print func2clean
+# print mean,sigma
+# mean1,std = norm.fit(y_dist)
+# print mean1, std, gauss_fit
+# print fit_func(x_dist,gauss_fit[0],gauss_fit[1],gauss_fit[2])
+ # 7.84616 53.9307 3.61863
+ #stdv = 3.61863 # 2.99089
+ #mode = 53.9307 #7.79008
+
+ #Removing echoes greater than mode + 3*stdv
+ factor_stdv = 2.5
+ noval = (abs(func2clean - mode)>=(factor_stdv*stdv)).nonzero()
+
+ if len(noval[0]) > 0:
+ novall = ((func2clean - mode) >= (factor_stdv*stdv)).nonzero()
+ cross_pairs = crosspairs[ii]
+ #Getting coherent echoes which are removed.
+ if len(novall[0]) > 0:
+ #val_spc[(0,1),novall[a],ih] = 1
+ #val_spc[,(2,3),novall[a],ih] = 1
+ val_spc[novall[0],cross_pairs[0],ifreq,ih] = 1
+ val_spc[novall[0],cross_pairs[1],ifreq,ih] = 1
+ val_cspc[novall[0],ii,ifreq,ih] = 1
+ #print("OUT NOVALL 1")
+ #Removing coherent from ISR data
+# if ih == 17 and ii == 0 and ifreq ==0 :
+# print spectra[:,cross_pairs[0],ifreq,ih]
+ spectra[noval,cross_pairs[0],ifreq,ih] = numpy.nan
+ spectra[noval,cross_pairs[1],ifreq,ih] = numpy.nan
+ cspectra[noval,ii,ifreq,ih] = numpy.nan
+# if ih == 17 and ii == 0 and ifreq ==0 :
+# print spectra[:,cross_pairs[0],ifreq,ih]
+# print noval, len(noval[0])
+# print novall, len(novall[0])
+# print factor_stdv*stdv
+# print func2clean-mode
+# print val_spc[:,cross_pairs[0],ifreq,ih]
+# print spectra[:,cross_pairs[0],ifreq,ih]
+ #no sale es para savedrifts >2
+ ''' channels = channels
+ cross_pairs = cross_pairs
+ #print("OUT NOVALL 2")
+
+ vcross0 = (cross_pairs[0] == channels[ii]).nonzero()
+ vcross1 = (cross_pairs[1] == channels[ii]).nonzero()
+ vcross = numpy.concatenate((vcross0,vcross1),axis=None)
+ #print('vcros =', vcross)
+
+ #Getting coherent echoes which are removed.
+ if len(novall) > 0:
+ #val_spc[novall,ii,ifreq,ih] = 1
+ val_spc[ii,ifreq,ih,novall] = 1
+ if len(vcross) > 0:
+ val_cspc[vcross,ifreq,ih,novall] = 1
+
+ #Removing coherent from ISR data.
+ self.bloque0[ii,ifreq,ih,noval] = numpy.nan
+ if len(vcross) > 0:
+ self.bloques[vcross,ifreq,ih,noval] = numpy.nan
+ '''
+ #Getting average of the spectra and cross-spectra from incoherent echoes.
+ out_spectra = numpy.zeros([nChan,nProf,nHei], dtype=float) #+numpy.nan
+ out_cspectra = numpy.zeros([nPairs,nProf,nHei], dtype=complex) #+numpy.nan
+ for ih in range(nHei):
+ for ifreq in range(nProf):
+ for ich in range(nChan):
+ tmp = spectra[:,ich,ifreq,ih]
+ valid = (numpy.isfinite(tmp[:])==True).nonzero()
+# if ich == 0 and ifreq == 0 and ih == 17 :
+# print tmp
+# print valid
+# print len(valid[0])
+ #print('TMP',tmp)
+ if len(valid[0]) >0 :
+ out_spectra[ich,ifreq,ih] = numpy.nansum(tmp)/len(valid[0])
+ #for icr in range(nPairs):
+ for icr in range(nPairs):
+ tmp = numpy.squeeze(cspectra[:,icr,ifreq,ih])
+ valid = (numpy.isfinite(tmp)==True).nonzero()
+ if len(valid[0]) > 0:
+ out_cspectra[icr,ifreq,ih] = numpy.nansum(tmp)/len(valid[0])
+ # print('##########################################################')
+ #Removing fake coherent echoes (at least 4 points around the point)
+
+ val_spectra = numpy.sum(val_spc,0)
+ val_cspectra = numpy.sum(val_cspc,0)
+
+ val_spectra = self.REM_ISOLATED_POINTS(val_spectra,4)
+ val_cspectra = self.REM_ISOLATED_POINTS(val_cspectra,4)
+
+ for i in range(nChan):
+ for j in range(nProf):
+ for k in range(nHei):
+ if numpy.isfinite(val_spectra[i,j,k]) and val_spectra[i,j,k] < 1 :
+ val_spc[:,i,j,k] = 0.0
+ for i in range(nPairs):
+ for j in range(nProf):
+ for k in range(nHei):
+ if numpy.isfinite(val_cspectra[i,j,k]) and val_cspectra[i,j,k] < 1 :
+ val_cspc[:,i,j,k] = 0.0
+# val_spc = numpy.reshape(val_spc, (len(spectra[:,0,0,0]),nProf*nHei*nChan))
+# if numpy.isfinite(val_spectra)==str(True):
+# noval = (val_spectra<1).nonzero()
+# if len(noval) > 0:
+# val_spc[:,noval] = 0.0
+# val_spc = numpy.reshape(val_spc, (149,nChan,nProf,nHei))
+
+ #val_cspc = numpy.reshape(val_spc, (149,nChan*nHei*nProf))
+ #if numpy.isfinite(val_cspectra)==str(True):
+ # noval = (val_cspectra<1).nonzero()
+ # if len(noval) > 0:
+ # val_cspc[:,noval] = 0.0
+ # val_cspc = numpy.reshape(val_cspc, (149,nChan,nProf,nHei))
+
+ tmp_sat_spectra = spectra.copy()
+ tmp_sat_spectra = tmp_sat_spectra*numpy.nan
+ tmp_sat_cspectra = cspectra.copy()
+ tmp_sat_cspectra = tmp_sat_cspectra*numpy.nan
+
+# fig = plt.figure(figsize=(6,5))
+# left, bottom, width, height = 0.1, 0.1, 0.8, 0.8
+# ax = fig.add_axes([left, bottom, width, height])
+# cp = ax.contour(10*numpy.log10(numpy.absolute(spectra[0,0,:,:])))
+# ax.clabel(cp, inline=True,fontsize=10)
+# plt.show()
+
+ val = (val_spc > 0).nonzero()
+ if len(val[0]) > 0:
+ tmp_sat_spectra[val] = in_sat_spectra[val]
+
+ val = (val_cspc > 0).nonzero()
+ if len(val[0]) > 0:
+ tmp_sat_cspectra[val] = in_sat_cspectra[val]
+
+ #Getting average of the spectra and cross-spectra from incoherent echoes.
+ sat_spectra = numpy.zeros((nChan,nProf,nHei), dtype=float)
+ sat_cspectra = numpy.zeros((nPairs,nProf,nHei), dtype=complex)
+ for ih in range(nHei):
+ for ifreq in range(nProf):
+ for ich in range(nChan):
+ tmp = numpy.squeeze(tmp_sat_spectra[:,ich,ifreq,ih])
+ valid = (numpy.isfinite(tmp)).nonzero()
+ if len(valid[0]) > 0:
+ sat_spectra[ich,ifreq,ih] = numpy.nansum(tmp)/len(valid[0])
+
+ for icr in range(nPairs):
+ tmp = numpy.squeeze(tmp_sat_cspectra[:,icr,ifreq,ih])
+ valid = (numpy.isfinite(tmp)).nonzero()
+ if len(valid[0]) > 0:
+ sat_cspectra[icr,ifreq,ih] = numpy.nansum(tmp)/len(valid[0])
+ #self.__dataReady= True
+ #sat_spectra, sat_cspectra= sat_spectra, sat_cspectra
+ #if not self.__dataReady:
+ #return None, None
+ return out_spectra, out_cspectra,sat_spectra,sat_cspectra
+ def REM_ISOLATED_POINTS(self,array,rth):
+# import matplotlib.pyplot as plt
+ if rth == None : rth = 4
+
+ num_prof = len(array[0,:,0])
+ num_hei = len(array[0,0,:])
+ n2d = len(array[:,0,0])
+
+ for ii in range(n2d) :
+ #print ii,n2d
+ tmp = array[ii,:,:]
+ #print tmp.shape, array[ii,101,:],array[ii,102,:]
+
+# fig = plt.figure(figsize=(6,5))
+# left, bottom, width, height = 0.1, 0.1, 0.8, 0.8
+# ax = fig.add_axes([left, bottom, width, height])
+# x = range(num_prof)
+# y = range(num_hei)
+# cp = ax.contour(y,x,tmp)
+# ax.clabel(cp, inline=True,fontsize=10)
+# plt.show()
+
+ #indxs = WHERE(FINITE(tmp) AND tmp GT 0,cindxs)
+ tmp = numpy.reshape(tmp,num_prof*num_hei)
+ indxs1 = (numpy.isfinite(tmp)==True).nonzero()
+ indxs2 = (tmp > 0).nonzero()
+
+ indxs1 = (indxs1[0])
+ indxs2 = indxs2[0]
+ #indxs1 = numpy.array(indxs1[0])
+ #indxs2 = numpy.array(indxs2[0])
+ indxs = None
+ #print indxs1 , indxs2
+ for iv in range(len(indxs2)):
+ indv = numpy.array((indxs1 == indxs2[iv]).nonzero())
+ #print len(indxs2), indv
+ if len(indv[0]) > 0 :
+ indxs = numpy.concatenate((indxs,indxs2[iv]), axis=None)
+# print indxs
+ indxs = indxs[1:]
+ #print indxs, len(indxs)
+ if len(indxs) < 4 :
+ array[ii,:,:] = 0.
+ return
+
+ xpos = numpy.mod(indxs ,num_hei)
+ ypos = (indxs / num_hei)
+ sx = numpy.argsort(xpos) # Ordering respect to "x" (time)
+ #print sx
+ xpos = xpos[sx]
+ ypos = ypos[sx]
+
+ # *********************************** Cleaning isolated points **********************************
+ ic = 0
+ while True :
+ r = numpy.sqrt(list(numpy.power((xpos[ic]-xpos),2)+ numpy.power((ypos[ic]-ypos),2)))
+ #no_coh = WHERE(FINITE(r) AND (r LE rth),cno_coh)
+ #plt.plot(r)
+ #plt.show()
+ no_coh1 = (numpy.isfinite(r)==True).nonzero()
+ no_coh2 = (r <= rth).nonzero()
+ #print r, no_coh1, no_coh2
+ no_coh1 = numpy.array(no_coh1[0])
+ no_coh2 = numpy.array(no_coh2[0])
+ no_coh = None
+ #print valid1 , valid2
+ for iv in range(len(no_coh2)):
+ indv = numpy.array((no_coh1 == no_coh2[iv]).nonzero())
+ if len(indv[0]) > 0 :
+ no_coh = numpy.concatenate((no_coh,no_coh2[iv]), axis=None)
+ no_coh = no_coh[1:]
+ #print len(no_coh), no_coh
+ if len(no_coh) < 4 :
+ #print xpos[ic], ypos[ic], ic
+# plt.plot(r)
+# plt.show()
+ xpos[ic] = numpy.nan
+ ypos[ic] = numpy.nan
+
+ ic = ic + 1
+ if (ic == len(indxs)) :
+ break
+ #print( xpos, ypos)
+
+ indxs = (numpy.isfinite(list(xpos))==True).nonzero()
+ #print indxs[0]
+ if len(indxs[0]) < 4 :
+ array[ii,:,:] = 0.
+ return
+
+ xpos = xpos[indxs[0]]
+ ypos = ypos[indxs[0]]
+ for i in range(0,len(ypos)):
+ ypos[i]=int(ypos[i])
+ junk = tmp
+ tmp = junk*0.0
+
+ tmp[list(xpos + (ypos*num_hei))] = junk[list(xpos + (ypos*num_hei))]
+ array[ii,:,:] = numpy.reshape(tmp,(num_prof,num_hei))
+
+ #print array.shape
+ #tmp = numpy.reshape(tmp,(num_prof,num_hei))
+ #print tmp.shape
+
+# fig = plt.figure(figsize=(6,5))
+# left, bottom, width, height = 0.1, 0.1, 0.8, 0.8
+# ax = fig.add_axes([left, bottom, width, height])
+# x = range(num_prof)
+# y = range(num_hei)
+# cp = ax.contour(y,x,array[ii,:,:])
+# ax.clabel(cp, inline=True,fontsize=10)
+# plt.show()
+ return array
+ def moments(self,doppler,yarray,npoints):
+ ytemp = yarray
+ #val = WHERE(ytemp GT 0,cval)
+ #if cval == 0 : val = range(npoints-1)
+ val = (ytemp > 0).nonzero()
+ val = val[0]
+ #print('hvalid:',hvalid)
+ #print('valid', valid)
+ if len(val) == 0 : val = range(npoints-1)
+
+ ynew = 0.5*(ytemp[val[0]]+ytemp[val[len(val)-1]])
+ ytemp[len(ytemp):] = [ynew]
+
+ index = 0
+ index = numpy.argmax(ytemp)
+ ytemp = numpy.roll(ytemp,int(npoints/2)-1-index)
+ ytemp = ytemp[0:npoints-1]
+
+ fmom = numpy.sum(doppler*ytemp)/numpy.sum(ytemp)+(index-(npoints/2-1))*numpy.abs(doppler[1]-doppler[0])
+ smom = numpy.sum(doppler*doppler*ytemp)/numpy.sum(ytemp)
+ return [fmom,numpy.sqrt(smom)]
+ # **********************************************************************************************
+ index = 0
+ fint = 0
+ buffer = 0
+ buffer2 = 0
+ buffer3 = 0
+ def run(self, dataOut, getSNR = True, path=None, file=None, groupList=None):
+ #print (dataOut.utctime)
+ import matplotlib.pyplot as plt
+ #nGroups = groupArray.shape[0]
+ nChannels = dataOut.nChannels
+ nHeights= dataOut.heightList.size
+ nProf = dataOut.nProfiles
+
+ tini=time.localtime(dataOut.utctime)
+ if (tini.tm_min % 5) == 0 and (tini.tm_sec < 5 and self.fint==0):
+# print tini.tm_min
+ self.index = 0
+ jspc = self.buffer
+ jcspc = self.buffer2
+ jnoise = self.buffer3
+
+ self.buffer = dataOut.data_spc
+ self.buffer2 = dataOut.data_cspc
+ self.buffer3 = dataOut.noise
+ self.fint = 1
+ #print self.buffer[0,:,0]
+
+ if numpy.any(jspc) :
+ #print (len(jspc), jspc.shape)
+ #print jspc[len(jspc)-4,:,0]
+ jspc= numpy.reshape(jspc,(int(len(jspc)/4),nChannels,nProf,nHeights))
+ jcspc= numpy.reshape(jcspc,(int(len(jcspc)/2),2,nProf,nHeights))
+ jnoise= numpy.reshape(jnoise,(int(len(jnoise)/4),nChannels))
+ #print jspc[len(jspc)-1,0,:,0]
+ else:
+ dataOut.flagNoData = True
+ return dataOut
+
+ else :
+ #print tini.tm_min
+ #self.fint = 0
+ if (tini.tm_min % 5) == 0 : self.fint = 1
+ else : self.fint = 0
+ self.index += 1
+ #print( len(self.buffer))
+
+ if numpy.any(self.buffer):
+ self.buffer = numpy.concatenate((self.buffer,dataOut.data_spc), axis=0)
+ self.buffer2 = numpy.concatenate((self.buffer2,dataOut.data_cspc), axis=0)
+ self.buffer3 = numpy.concatenate((self.buffer3,dataOut.noise), axis=0)
+ else:
+ self.buffer = dataOut.data_spc
+ self.buffer2 = dataOut.data_cspc
+ self.buffer3 = dataOut.noise
+ #print self.index, self.fint
+ #print self.buffer2.shape
+ dataOut.flagNoData = True
+ return dataOut
+# if self.index == 0 and self.fint == 1 :
+# if jspc != None:
+# print len(jspc), jspc.shape
+# jspc= numpy.reshape(jspc,(4,128,63,len(jspc)/4))
+# print jspc.shape
+# dataOut.flagNoData = True
+# return dataOut
if path != None:
sys.path.append(path)
- self.dataOut.library = importlib.import_module(file)
-
+ self.library = importlib.import_module(file)
+
#To be inserted as a parameter
groupArray = numpy.array(groupList)
-# groupArray = numpy.array([[0,1],[2,3]])
- self.dataOut.groupList = groupArray
-
+ #groupArray = numpy.array([[0,1],[2,3]])
+ dataOut.groupList = groupArray
+
nGroups = groupArray.shape[0]
- nChannels = self.dataIn.nChannels
- nHeights=self.dataIn.heightList.size
-
+ nChannels = dataOut.nChannels
+ nHeights= dataOut.heightList.size
+ # print self.index
#Parameters Array
- self.dataOut.data_param = None
-
+ dataOut.data_param = None
+ dataOut.data_paramC = None
+
#Set constants
- constants = self.dataOut.library.setConstants(self.dataIn)
- self.dataOut.constants = constants
- M = self.dataIn.normFactor
- N = self.dataIn.nFFTPoints
- ippSeconds = self.dataIn.ippSeconds
- K = self.dataIn.nIncohInt
- pairsArray = numpy.array(self.dataIn.pairsList)
-
+ constants = self.library.setConstants(dataOut)
+ dataOut.constants = constants
+ M = dataOut.normFactor
+ N = dataOut.nFFTPoints
+ ippSeconds = dataOut.ippSeconds
+ K = dataOut.nIncohInt
+ pairsArray = numpy.array(dataOut.pairsList)
+
+ snrth= 20
+ spectra = dataOut.data_spc
+ cspectra = dataOut.data_cspc
+ nProf = dataOut.nProfiles
+ heights = dataOut.heightList
+ nHei = len(heights)
+
+ channels = dataOut.channelList
+ nChan = len(channels)
+ nIncohInt = dataOut.nIncohInt
+ crosspairs = dataOut.groupList
+ noise = dataOut.noise
+ #print( nProf,heights)
+ #print( jspc.shape, jspc.shape[0])
+ #print noise
+ #print jnoise[len(jnoise)-1,:], numpy.nansum(jnoise,axis=0)/len(jnoise)
+ jnoise = jnoise/N
+ noise = numpy.nansum(jnoise,axis=0)#/len(jnoise)
+ #print( noise)
+ power = numpy.sum(spectra, axis=1)
+ #print power[0,:]
+ #print("CROSSPAIRS",crosspairs)
+ nPairs = len(crosspairs)
+ #print(numpy.shape(dataOut.data_spc))
+ absc = dataOut.abscissaList[:-1]
+ #print absc.shape
+ #nBlocks=149
+ #print('spectra', spectra.shape)
+ #print('noise print', crosspairs)
+ #print('spectra', spectra.shape)
+ #print('cspectra', cspectra.shape)
+ #print numpy.array(dataOut.data_pre[1]).shape
+ #spec, cspec = self.__DiffCoherent(snrth, spectra, cspectra, nProf, heights,nChan, nHei, nPairs, channels, noise*nProf, crosspairs)
+
+ if not self.isConfig:
+ #self.setup(nChan,nProf,nHei=35,nBlocks=nBlocks)
+ self.isConfig = True
+
+ #print ("configure todo")
+# dataOut.flagNoData = True
+ index = tini.tm_hour*12+tini.tm_min/5
+ #print index
+ jspc = jspc/N/N
+ jcspc = jcspc/N/N
+ #dataOut.data_spc,dataOut.data_cspc = self.CleanRayleigh(dataOut,jspc,jcspc,crosspairs,heights,channels,nProf,nHei,nChan,nPairs,nIncohInt,nBlocks=nBlocks)
+ tmp_spectra,tmp_cspectra,sat_spectra,sat_cspectra = self.CleanRayleigh(dataOut,jspc,jcspc,2)
+ jspectra = tmp_spectra*len(jspc[:,0,0,0])
+ jcspectra = tmp_cspectra*len(jspc[:,0,0,0])
+ #incoh_spectra, incoh_cspectra,coh_aver = self.__DiffCoherent(snrth, dataOut.data_spc, dataOut.data_cspc, nProf, heights,nChan, nHei, nPairs, channels, noise*nProf, crosspairs)
+ my_incoh_spectra ,my_incoh_cspectra,my_incoh_aver,my_coh_aver, incoh_spectra, coh_spectra, incoh_cspectra, coh_cspectra, incoh_aver, coh_aver = self.__DiffCoherent(jspectra, jcspectra, dataOut, noise, snrth, None, None)
+ clean_coh_spectra, clean_coh_cspectra, clean_coh_aver = self.__CleanCoherent(snrth, coh_spectra, coh_cspectra, coh_aver, dataOut, noise,1,index)
+ dataOut.data_spc = incoh_spectra
+ dataOut.data_cspc = incoh_cspectra
+ #dataOut.data_spc = tmp_spectra
+ #dataOut.data_cspc = tmp_cspectra
+
+ clean_num_aver = incoh_aver*len(jspc[:,0,0,0])
+ coh_num_aver = clean_coh_aver*len(jspc[:,0,0,0])
+ #plt.plot( tmp_spectra[0,:,17])
+ #plt.show()
+# plt.plot( incoh_spectra[0,64,:])
+# plt.show()
+
+# plt.imshow(dataOut.data_spc[0,:,:],vmin=20000000)
+# plt.show()
#List of possible combinations
listComb = itertools.combinations(numpy.arange(groupArray.shape[1]),2)
indCross = numpy.zeros(len(list(listComb)), dtype = 'int')
-
+ #print("listComb",listComb)
+
if getSNR:
listChannels = groupArray.reshape((groupArray.size))
listChannels.sort()
- noise = self.dataIn.getNoise()
- self.dataOut.data_snr = self.__getSNR(self.dataIn.data_spc[listChannels,:,:], noise[listChannels])
-
- for i in range(nGroups):
+ #noise = dataOut.getNoise()
+ #print noise
+ #print(numpy.shape(noise))
+ #dataOut.data_spc, dataOut.data_cspc = self.__DiffCoherent(snrth, spectra, cspectra, nProf, heights, nHei, nChan, channels, noise, nPairs, crosspairs)
+ dataOut.data_SNR = self.__getSNR(dataOut.data_spc[listChannels,:,:], noise[listChannels])
+ #dataOut.data_SNR = self.__getSNR(jspectra[listChannels,:,:], noise[listChannels])
+
+ if dataOut.data_paramC is None:
+ dataOut.data_paramC = numpy.zeros((nGroups*4, nHeights,2))*numpy.nan
+ for i in range(nGroups):
coord = groupArray[i,:]
-
#Input data array
- data = self.dataIn.data_spc[coord,:,:]/(M*N)
+ data = dataOut.data_spc[coord,:,:]/(M*N)
data = data.reshape((data.shape[0]*data.shape[1],data.shape[2]))
-
+
#Cross Spectra data array for Covariance Matrixes
ind = 0
for pairs in listComb:
pairsSel = numpy.array([coord[x],coord[y]])
indCross[ind] = int(numpy.where(numpy.all(pairsArray == pairsSel, axis = 1))[0][0])
ind += 1
- dataCross = self.dataIn.data_cspc[indCross,:,:]/(M*N)
- dataCross = dataCross**2/K
-
+ dataCross = dataOut.data_cspc[indCross,:,:]/(M*N)
+ dataCross = dataCross**2
+ #print dataOut.data_SNR.shape
+
+ nhei = nHeights
+ poweri = numpy.sum(dataOut.data_spc[:,1:nProf-0,:],axis=1)/clean_num_aver[:,:]
+ if i == 0 : my_noises = numpy.zeros(4,dtype=float) #FLTARR(4)
+ n0i = numpy.nanmin(poweri[0+i*2,0:nhei-0])/(nProf-1)
+ n1i = numpy.nanmin(poweri[1+i*2,0:nhei-0])/(nProf-1)
+
+ n0 = n0i
+ n1= n1i
+ my_noises[2*i+0] = n0
+ my_noises[2*i+1] = n1
+ snrth = -16.0
+ snrth = 10**(snrth/10.0)
+
for h in range(nHeights):
-
- #Input
+# print("I ", "H", i,h )
+ ##Input
d = data[:,h]
-
+ smooth = clean_num_aver[i+1,h] #dataOut.data_spc[:,1:nProf-0,:]
+ signalpn0 = (dataOut.data_spc[i*2,1:(nProf-0),h])/smooth
+ signalpn1 = (dataOut.data_spc[i*2+1,1:(nProf-0),h])/smooth
+ signal0 = signalpn0-n0
+ signal1 = signalpn1-n1
+ snr0 = numpy.sum(signal0/n0)/(nProf-1)
+ snr1 = numpy.sum(signal1/n1)/(nProf-1)
+ #print clean_num_aver[coord,h]
+ if snr0 > snrth and snr1 > snrth and clean_num_aver[i+1,h] > 0 :
#Covariance Matrix
- D = numpy.diag(d**2/K)
- ind = 0
- for pairs in listComb:
+ #print h, d.shape
+ D = numpy.diag(d**2)
+ ind = 0
+ for pairs in listComb:
#Coordinates in Covariance Matrix
- x = pairs[0]
- y = pairs[1]
+ x = pairs[0]
+ y = pairs[1]
#Channel Index
- S12 = dataCross[ind,:,h]
- D12 = numpy.diag(S12)
+ S12 = dataCross[ind,:,h]
+ D12 = numpy.diag(S12)
#Completing Covariance Matrix with Cross Spectras
- D[x*N:(x+1)*N,y*N:(y+1)*N] = D12
- D[y*N:(y+1)*N,x*N:(x+1)*N] = D12
- ind += 1
- Dinv=numpy.linalg.inv(D)
- L=numpy.linalg.cholesky(Dinv)
- LT=L.T
-
- dp = numpy.dot(LT,d)
+ D[x*N:(x+1)*N,y*N:(y+1)*N] = D12
+ D[y*N:(y+1)*N,x*N:(x+1)*N] = D12
+ ind += 1
+ diagD = numpy.zeros(256)
+ if h == 17 :
+ for ii in range(256): diagD[ii] = D[ii,ii]
+ #plt.plot(diagD)
+ #plt.show()
+
+# print hprint
+ #Dinv=numpy.linalg.inv(D)
+ #L=numpy.linalg.cholesky(Dinv)
+ try:
+ Dinv=numpy.linalg.inv(D)
+ L=numpy.linalg.cholesky(Dinv)
+ except:
+ Dinv = D*numpy.nan
+ L= D*numpy.nan
+ LT=L.T
+ dp = numpy.dot(LT,d)
+
#Initial values
- data_spc = self.dataIn.data_spc[coord,:,h]
-
- if (h>0)and(error1[3]<5):
- p0 = self.dataOut.data_param[i,:,h-1]
- else:
- p0 = numpy.array(self.dataOut.library.initialValuesFunction(data_spc, constants, i))
+ data_spc = dataOut.data_spc[coord,:,h]
- try:
+ if (h>0)and(error1[3]<5):
+ p0 = dataOut.data_param[i,:,h-1]
+ else:
+ #print("INSIDE ELSE")
+ #print(data_spc.shape,constants,i)
+ p0 = numpy.array(self.library.initialValuesFunction(data_spc, constants))# sin el i(data_spc, constants, i)
+ #print ("WAIT_p0",p0)
+ try:
#Least Squares
- minp,covp,infodict,mesg,ier = optimize.leastsq(self.__residFunction,p0,args=(dp,LT,constants),full_output=True)
-# minp,covp = optimize.leastsq(self.__residFunction,p0,args=(dp,LT,constants))
+ #print (dp,LT,constants)
+ #value =self.__residFunction(p0,dp,LT,constants)
+ #print ("valueREADY",value.shape, type(value))
+ #optimize.leastsq(value)
+ minp,covp,infodict,mesg,ier = optimize.leastsq(self.__residFunction,p0,args=(dp,LT,constants),full_output=True)
+
+ # print(minp)
+ #minp,covp = optimize.leastsq(self.__residFunction,p0,args=(dp,LT,constants))
#Chi square error
- error0 = numpy.sum(infodict['fvec']**2)/(2*N)
+ #print(minp,covp.infodict,mesg,ier)
+ #print("REALIZA OPTIMIZ")
+ error0 = numpy.sum(infodict['fvec']**2)/(2*N)
#Error with Jacobian
- error1 = self.dataOut.library.errorFunction(minp,constants,LT)
- except:
+ error1 = self.library.errorFunction(minp,constants,LT)
+# print self.__residFunction(p0,dp,LT, constants)
+# print infodict['fvec']
+# print self.__residFunction(minp,dp,LT,constants)
+
+ except:
+ minp = p0*numpy.nan
+ error0 = numpy.nan
+ error1 = p0*numpy.nan
+ #print ("EXCEPT 0000000000")
+# s_sq = (self.__residFunction(minp,dp,LT,constants)).sum()/(len(dp)-len(p0))
+# covp = covp*s_sq
+# #print("TRY___________________________________________1")
+# error = []
+# for ip in range(len(minp)):
+# try:
+# error.append(numpy.absolute(covp[ip][ip])**0.5)
+# except:
+# error.append( 0.00 )
+ else :
+ data_spc = dataOut.data_spc[coord,:,h]
+ p0 = numpy.array(self.library.initialValuesFunction(data_spc, constants))
minp = p0*numpy.nan
error0 = numpy.nan
- error1 = p0*numpy.nan
-
+ error1 = p0*numpy.nan
#Save
- if self.dataOut.data_param is None:
- self.dataOut.data_param = numpy.zeros((nGroups, p0.size, nHeights))*numpy.nan
- self.dataOut.data_error = numpy.zeros((nGroups, p0.size + 1, nHeights))*numpy.nan
-
- self.dataOut.data_error[i,:,h] = numpy.hstack((error0,error1))
- self.dataOut.data_param[i,:,h] = minp
- return
-
+ if dataOut.data_param is None:
+ dataOut.data_param = numpy.zeros((nGroups, p0.size, nHeights))*numpy.nan
+ dataOut.data_error = numpy.zeros((nGroups, p0.size + 1, nHeights))*numpy.nan
+
+ dataOut.data_error[i,:,h] = numpy.hstack((error0,error1))
+ dataOut.data_param[i,:,h] = minp
+ #print(minp)
+ #print("FIN")
+ #print ("DATA",minp.shape)
+
+ #plt.plot(dataOut.data_param[0,3,:])
+ #print(dataOut.data_param[:,3,:])
+ #dataOut.data_errorC = numpy.zeros((nGroups, nHeights,1))*numpy.nan
+ for ht in range(nHeights-1) :
+ smooth = coh_num_aver[i+1,ht] #datc[0,ht,0,beam]
+ dataOut.data_paramC[4*i,ht,1] = smooth
+ signalpn0 = (coh_spectra[i*2 ,1:(nProf-0),ht])/smooth #coh_spectra
+ signalpn1 = (coh_spectra[i*2+1,1:(nProf-0),ht])/smooth
+
+ #val0 = WHERE(signalpn0 > 0,cval0)
+ val0 = (signalpn0 > 0).nonzero()
+ val0 = val0[0]
+ #print('hvalid:',hvalid)
+ #print('valid', valid)
+ if len(val0) == 0 : val0_npoints = nProf
+ else : val0_npoints = len(val0)
+
+ #val1 = WHERE(signalpn1 > 0,cval1)
+ val1 = (signalpn1 > 0).nonzero()
+ val1 = val1[0]
+ if len(val1) == 0 : val1_npoints = nProf
+ else : val1_npoints = len(val1)
+
+ dataOut.data_paramC[0+4*i,ht,0] = numpy.sum((signalpn0/val0_npoints))/n0
+ dataOut.data_paramC[1+4*i,ht,0] = numpy.sum((signalpn1/val1_npoints))/n1
+
+ signal0 = (signalpn0-n0) # > 0
+ vali = (signal0 < 0).nonzero()
+ vali = vali[0]
+ if len(vali) > 0 : signal0[vali] = 0
+ signal1 = (signalpn1-n1) #> 0
+ vali = (signal1 < 0).nonzero()
+ vali = vali[0]
+ if len(vali) > 0 : signal1[vali] = 0
+ snr0 = numpy.sum(signal0/n0)/(nProf-1)
+ snr1 = numpy.sum(signal1/n1)/(nProf-1)
+ doppler = absc[1:]
+ if snr0 >= snrth and snr1 >= snrth and smooth :
+ signalpn0_n0 = signalpn0
+ signalpn0_n0[val0] = signalpn0[val0] - n0
+ mom0 = self.moments(doppler,signalpn0-n0,nProf)
+# sigtmp= numpy.transpose(numpy.tile(signalpn0, [4,1]))
+# momt= self.__calculateMoments( sigtmp, doppler , n0 )
+ signalpn1_n1 = signalpn1
+ signalpn1_n1[val1] = signalpn1[val1] - n1
+ mom1 = self.moments(doppler,signalpn1_n1,nProf)
+ dataOut.data_paramC[2+4*i,ht,0] = (mom0[0]+mom1[0])/2.
+ dataOut.data_paramC[3+4*i,ht,0] = (mom0[1]+mom1[1])/2.
+# if graph == 1 :
+# window, 13
+# plot,doppler,signalpn0
+# oplot,doppler,signalpn1,linest=1
+# oplot,mom0(0)*doppler/doppler,signalpn0
+# oplot,mom1(0)*doppler/doppler,signalpn1
+# print,interval/12.,beam,45+ht*15,snr0,snr1,mom0(0),mom1(0),mom0(1),mom1(1)
+ #ENDIF
+ #ENDIF
+ #ENDFOR End height
+ #plt.show()
+ #print dataOut.data_param[i,3,:]
+# if self.__dataReady:
+# dataOut.flagNoData = False
+ #print dataOut.data_error[:,3,:]
+ dataOut.data_spc = jspectra
+ if getSNR:
+ listChannels = groupArray.reshape((groupArray.size))
+ listChannels.sort()
+
+ dataOut.data_SNR = self.__getSNR(dataOut.data_spc[listChannels,:,:], my_noises[listChannels])
+ return dataOut
+
+
def __residFunction(self, p, dp, LT, constants):
- fm = self.dataOut.library.modelFunction(p, constants)
+ fm = self.library.modelFunction(p, constants)
fmp=numpy.dot(LT,fm)
-
+ #print ("DONE",dp -fmp)
+ #print ("ok")
return dp-fmp
def __getSNR(self, z, noise):
-
+
avg = numpy.average(z, axis=1)
SNR = (avg.T-noise)/noise
SNR = SNR.T
return SNR
-
- def __chisq(p,chindex,hindex):
+
+ def __chisq(self,p,chindex,hindex):
#similar to Resid but calculates CHI**2
[LT,d,fm]=setupLTdfm(p,chindex,hindex)
dp=numpy.dot(LT,d)
fmp=numpy.dot(LT,fm)
chisq=numpy.dot((dp-fmp).T,(dp-fmp))
return chisq
-
+
class WindProfiler(Operation):
-
+
__isConfig = False
-
+
__initime = None
__lastdatatime = None
__integrationtime = None
-
+
__buffer = None
-
+
__dataReady = False
-
+
__firstdata = None
-
+
n = None
-
- def __init__(self):
+
+ def __init__(self):
Operation.__init__(self)
-
+
def __calculateCosDir(self, elev, azim):
zen = (90 - elev)*numpy.pi/180
azim = azim*numpy.pi/180
- cosDirX = numpy.sqrt((1-numpy.cos(zen)**2)/((1+numpy.tan(azim)**2)))
+ cosDirX = numpy.sqrt((1-numpy.cos(zen)**2)/((1+numpy.tan(azim)**2)))
cosDirY = numpy.sqrt(1-numpy.cos(zen)**2-cosDirX**2)
-
+
signX = numpy.sign(numpy.cos(azim))
signY = numpy.sign(numpy.sin(azim))
-
+
cosDirX = numpy.copysign(cosDirX, signX)
cosDirY = numpy.copysign(cosDirY, signY)
return cosDirX, cosDirY
-
+
def __calculateAngles(self, theta_x, theta_y, azimuth):
-
+
dir_cosw = numpy.sqrt(1-theta_x**2-theta_y**2)
zenith_arr = numpy.arccos(dir_cosw)
azimuth_arr = numpy.arctan2(theta_x,theta_y) + azimuth*math.pi/180
-
+
dir_cosu = numpy.sin(azimuth_arr)*numpy.sin(zenith_arr)
dir_cosv = numpy.cos(azimuth_arr)*numpy.sin(zenith_arr)
-
+
return azimuth_arr, zenith_arr, dir_cosu, dir_cosv, dir_cosw
def __calculateMatA(self, dir_cosu, dir_cosv, dir_cosw, horOnly):
-
-#
+
+#
if horOnly:
A = numpy.c_[dir_cosu,dir_cosv]
else:
@@ -1654,37 +2537,37 @@ class WindProfiler(Operation):
listPhi = phi.tolist()
maxid = listPhi.index(max(listPhi))
minid = listPhi.index(min(listPhi))
-
- rango = list(range(len(phi)))
+
+ rango = list(range(len(phi)))
# rango = numpy.delete(rango,maxid)
-
+
heiRang1 = heiRang*math.cos(phi[maxid])
heiRangAux = heiRang*math.cos(phi[minid])
indOut = (heiRang1 < heiRangAux[0]).nonzero()
heiRang1 = numpy.delete(heiRang1,indOut)
-
+
velRadial1 = numpy.zeros([len(phi),len(heiRang1)])
SNR1 = numpy.zeros([len(phi),len(heiRang1)])
-
+
for i in rango:
x = heiRang*math.cos(phi[i])
y1 = velRadial[i,:]
f1 = interpolate.interp1d(x,y1,kind = 'cubic')
-
+
x1 = heiRang1
y11 = f1(x1)
-
+
y2 = SNR[i,:]
f2 = interpolate.interp1d(x,y2,kind = 'cubic')
y21 = f2(x1)
-
+
velRadial1[i,:] = y11
SNR1[i,:] = y21
-
+
return heiRang1, velRadial1, SNR1
def __calculateVelUVW(self, A, velRadial):
-
+
#Operacion Matricial
# velUVW = numpy.zeros((velRadial.shape[1],3))
# for ind in range(velRadial.shape[1]):
@@ -1692,27 +2575,27 @@ class WindProfiler(Operation):
# velUVW = velUVW.transpose()
velUVW = numpy.zeros((A.shape[0],velRadial.shape[1]))
velUVW[:,:] = numpy.dot(A,velRadial)
-
-
+
+
return velUVW
-
+
# def techniqueDBS(self, velRadial0, dirCosx, disrCosy, azimuth, correct, horizontalOnly, heiRang, SNR0):
-
+
def techniqueDBS(self, kwargs):
"""
Function that implements Doppler Beam Swinging (DBS) technique.
-
+
Input: Radial velocities, Direction cosines (x and y) of the Beam, Antenna azimuth,
Direction correction (if necessary), Ranges and SNR
-
+
Output: Winds estimation (Zonal, Meridional and Vertical)
-
+
Parameters affected: Winds, height range, SNR
"""
velRadial0 = kwargs['velRadial']
heiRang = kwargs['heightList']
SNR0 = kwargs['SNR']
-
+
if 'dirCosx' in kwargs and 'dirCosy' in kwargs:
theta_x = numpy.array(kwargs['dirCosx'])
theta_y = numpy.array(kwargs['dirCosy'])
@@ -1720,7 +2603,7 @@ class WindProfiler(Operation):
elev = numpy.array(kwargs['elevation'])
azim = numpy.array(kwargs['azimuth'])
theta_x, theta_y = self.__calculateCosDir(elev, azim)
- azimuth = kwargs['correctAzimuth']
+ azimuth = kwargs['correctAzimuth']
if 'horizontalOnly' in kwargs:
horizontalOnly = kwargs['horizontalOnly']
else: horizontalOnly = False
@@ -1735,22 +2618,22 @@ class WindProfiler(Operation):
param = param[arrayChannel,:,:]
theta_x = theta_x[arrayChannel]
theta_y = theta_y[arrayChannel]
-
- azimuth_arr, zenith_arr, dir_cosu, dir_cosv, dir_cosw = self.__calculateAngles(theta_x, theta_y, azimuth)
- heiRang1, velRadial1, SNR1 = self.__correctValues(heiRang, zenith_arr, correctFactor*velRadial0, SNR0)
+
+ azimuth_arr, zenith_arr, dir_cosu, dir_cosv, dir_cosw = self.__calculateAngles(theta_x, theta_y, azimuth)
+ heiRang1, velRadial1, SNR1 = self.__correctValues(heiRang, zenith_arr, correctFactor*velRadial0, SNR0)
A = self.__calculateMatA(dir_cosu, dir_cosv, dir_cosw, horizontalOnly)
-
+
#Calculo de Componentes de la velocidad con DBS
winds = self.__calculateVelUVW(A,velRadial1)
-
+
return winds, heiRang1, SNR1
-
+
def __calculateDistance(self, posx, posy, pairs_ccf, azimuth = None):
-
+
nPairs = len(pairs_ccf)
posx = numpy.asarray(posx)
posy = numpy.asarray(posy)
-
+
#Rotacion Inversa para alinear con el azimuth
if azimuth!= None:
azimuth = azimuth*math.pi/180
@@ -1759,126 +2642,126 @@ class WindProfiler(Operation):
else:
posx1 = posx
posy1 = posy
-
+
#Calculo de Distancias
distx = numpy.zeros(nPairs)
disty = numpy.zeros(nPairs)
dist = numpy.zeros(nPairs)
ang = numpy.zeros(nPairs)
-
+
for i in range(nPairs):
distx[i] = posx1[pairs_ccf[i][1]] - posx1[pairs_ccf[i][0]]
- disty[i] = posy1[pairs_ccf[i][1]] - posy1[pairs_ccf[i][0]]
+ disty[i] = posy1[pairs_ccf[i][1]] - posy1[pairs_ccf[i][0]]
dist[i] = numpy.sqrt(distx[i]**2 + disty[i]**2)
ang[i] = numpy.arctan2(disty[i],distx[i])
-
+
return distx, disty, dist, ang
- #Calculo de Matrices
+ #Calculo de Matrices
# nPairs = len(pairs)
# ang1 = numpy.zeros((nPairs, 2, 1))
# dist1 = numpy.zeros((nPairs, 2, 1))
-#
+#
# for j in range(nPairs):
# dist1[j,0,0] = dist[pairs[j][0]]
# dist1[j,1,0] = dist[pairs[j][1]]
# ang1[j,0,0] = ang[pairs[j][0]]
# ang1[j,1,0] = ang[pairs[j][1]]
-#
+#
# return distx,disty, dist1,ang1
-
+
def __calculateVelVer(self, phase, lagTRange, _lambda):
Ts = lagTRange[1] - lagTRange[0]
velW = -_lambda*phase/(4*math.pi*Ts)
-
+
return velW
-
+
def __calculateVelHorDir(self, dist, tau1, tau2, ang):
nPairs = tau1.shape[0]
nHeights = tau1.shape[1]
- vel = numpy.zeros((nPairs,3,nHeights))
+ vel = numpy.zeros((nPairs,3,nHeights))
dist1 = numpy.reshape(dist, (dist.size,1))
-
+
angCos = numpy.cos(ang)
angSin = numpy.sin(ang)
-
- vel0 = dist1*tau1/(2*tau2**2)
+
+ vel0 = dist1*tau1/(2*tau2**2)
vel[:,0,:] = (vel0*angCos).sum(axis = 1)
vel[:,1,:] = (vel0*angSin).sum(axis = 1)
-
+
ind = numpy.where(numpy.isinf(vel))
vel[ind] = numpy.nan
-
+
return vel
-
+
# def __getPairsAutoCorr(self, pairsList, nChannels):
-#
+#
# pairsAutoCorr = numpy.zeros(nChannels, dtype = 'int')*numpy.nan
-#
-# for l in range(len(pairsList)):
+#
+# for l in range(len(pairsList)):
# firstChannel = pairsList[l][0]
# secondChannel = pairsList[l][1]
-#
-# #Obteniendo pares de Autocorrelacion
+#
+# #Obteniendo pares de Autocorrelacion
# if firstChannel == secondChannel:
# pairsAutoCorr[firstChannel] = int(l)
-#
+#
# pairsAutoCorr = pairsAutoCorr.astype(int)
-#
+#
# pairsCrossCorr = range(len(pairsList))
# pairsCrossCorr = numpy.delete(pairsCrossCorr,pairsAutoCorr)
-#
+#
# return pairsAutoCorr, pairsCrossCorr
-
+
# def techniqueSA(self, pairsSelected, pairsList, nChannels, tau, azimuth, _lambda, position_x, position_y, lagTRange, correctFactor):
def techniqueSA(self, kwargs):
-
- """
+
+ """
Function that implements Spaced Antenna (SA) technique.
-
+
Input: Radial velocities, Direction cosines (x and y) of the Beam, Antenna azimuth,
Direction correction (if necessary), Ranges and SNR
-
+
Output: Winds estimation (Zonal, Meridional and Vertical)
-
+
Parameters affected: Winds
"""
position_x = kwargs['positionX']
position_y = kwargs['positionY']
azimuth = kwargs['azimuth']
-
+
if 'correctFactor' in kwargs:
correctFactor = kwargs['correctFactor']
else:
correctFactor = 1
-
+
groupList = kwargs['groupList']
pairs_ccf = groupList[1]
tau = kwargs['tau']
_lambda = kwargs['_lambda']
-
+
#Cross Correlation pairs obtained
# pairsAutoCorr, pairsCrossCorr = self.__getPairsAutoCorr(pairssList, nChannels)
# pairsArray = numpy.array(pairsList)[pairsCrossCorr]
# pairsSelArray = numpy.array(pairsSelected)
# pairs = []
-#
+#
# #Wind estimation pairs obtained
# for i in range(pairsSelArray.shape[0]/2):
# ind1 = numpy.where(numpy.all(pairsArray == pairsSelArray[2*i], axis = 1))[0][0]
# ind2 = numpy.where(numpy.all(pairsArray == pairsSelArray[2*i + 1], axis = 1))[0][0]
# pairs.append((ind1,ind2))
-
+
indtau = tau.shape[0]/2
tau1 = tau[:indtau,:]
tau2 = tau[indtau:-1,:]
# tau1 = tau1[pairs,:]
# tau2 = tau2[pairs,:]
phase1 = tau[-1,:]
-
+
#---------------------------------------------------------------------
- #Metodo Directo
+ #Metodo Directo
distx, disty, dist, ang = self.__calculateDistance(position_x, position_y, pairs_ccf,azimuth)
winds = self.__calculateVelHorDir(dist, tau1, tau2, ang)
winds = stats.nanmean(winds, axis=0)
@@ -1894,97 +2777,97 @@ class WindProfiler(Operation):
winds[2,:] = self.__calculateVelVer(phase1, lagTRange, _lambda)
winds = correctFactor*winds
return winds
-
+
def __checkTime(self, currentTime, paramInterval, outputInterval):
-
+
dataTime = currentTime + paramInterval
deltaTime = dataTime - self.__initime
-
+
if deltaTime >= outputInterval or deltaTime < 0:
self.__dataReady = True
- return
-
+ return
+
def techniqueMeteors(self, arrayMeteor, meteorThresh, heightMin, heightMax):
'''
Function that implements winds estimation technique with detected meteors.
-
+
Input: Detected meteors, Minimum meteor quantity to wind estimation
-
+
Output: Winds estimation (Zonal and Meridional)
-
+
Parameters affected: Winds
- '''
+ '''
#Settings
nInt = (heightMax - heightMin)/2
nInt = int(nInt)
- winds = numpy.zeros((2,nInt))*numpy.nan
-
+ winds = numpy.zeros((2,nInt))*numpy.nan
+
#Filter errors
error = numpy.where(arrayMeteor[:,-1] == 0)[0]
finalMeteor = arrayMeteor[error,:]
-
+
#Meteor Histogram
finalHeights = finalMeteor[:,2]
hist = numpy.histogram(finalHeights, bins = nInt, range = (heightMin,heightMax))
nMeteorsPerI = hist[0]
heightPerI = hist[1]
-
+
#Sort of meteors
indSort = finalHeights.argsort()
finalMeteor2 = finalMeteor[indSort,:]
-
+
# Calculating winds
ind1 = 0
- ind2 = 0
-
+ ind2 = 0
+
for i in range(nInt):
nMet = nMeteorsPerI[i]
ind1 = ind2
ind2 = ind1 + nMet
-
+
meteorAux = finalMeteor2[ind1:ind2,:]
-
+
if meteorAux.shape[0] >= meteorThresh:
vel = meteorAux[:, 6]
zen = meteorAux[:, 4]*numpy.pi/180
azim = meteorAux[:, 3]*numpy.pi/180
-
+
n = numpy.cos(zen)
# m = (1 - n**2)/(1 - numpy.tan(azim)**2)
# l = m*numpy.tan(azim)
l = numpy.sin(zen)*numpy.sin(azim)
m = numpy.sin(zen)*numpy.cos(azim)
-
+
A = numpy.vstack((l, m)).transpose()
A1 = numpy.dot(numpy.linalg.inv( numpy.dot(A.transpose(),A) ),A.transpose())
windsAux = numpy.dot(A1, vel)
-
+
winds[0,i] = windsAux[0]
winds[1,i] = windsAux[1]
-
+
return winds, heightPerI[:-1]
-
+
def techniqueNSM_SA(self, **kwargs):
metArray = kwargs['metArray']
heightList = kwargs['heightList']
timeList = kwargs['timeList']
-
+
rx_location = kwargs['rx_location']
groupList = kwargs['groupList']
azimuth = kwargs['azimuth']
dfactor = kwargs['dfactor']
k = kwargs['k']
-
+
azimuth1, dist = self.__calculateAzimuth1(rx_location, groupList, azimuth)
d = dist*dfactor
#Phase calculation
metArray1 = self.__getPhaseSlope(metArray, heightList, timeList)
-
+
metArray1[:,-2] = metArray1[:,-2]*metArray1[:,2]*1000/(k*d[metArray1[:,1].astype(int)]) #angles into velocities
-
+
velEst = numpy.zeros((heightList.size,2))*numpy.nan
azimuth1 = azimuth1*numpy.pi/180
-
+
for i in range(heightList.size):
h = heightList[i]
indH = numpy.where((metArray1[:,2] == h)&(numpy.abs(metArray1[:,-2]) < 100))[0]
@@ -1997,71 +2880,71 @@ class WindProfiler(Operation):
A = numpy.asmatrix(A)
A1 = numpy.linalg.pinv(A.transpose()*A)*A.transpose()
velHor = numpy.dot(A1,velAux)
-
+
velEst[i,:] = numpy.squeeze(velHor)
return velEst
-
+
def __getPhaseSlope(self, metArray, heightList, timeList):
meteorList = []
#utctime sec1 height SNR velRad ph0 ph1 ph2 coh0 coh1 coh2
#Putting back together the meteor matrix
utctime = metArray[:,0]
uniqueTime = numpy.unique(utctime)
-
+
phaseDerThresh = 0.5
ippSeconds = timeList[1] - timeList[0]
sec = numpy.where(timeList>1)[0][0]
nPairs = metArray.shape[1] - 6
nHeights = len(heightList)
-
+
for t in uniqueTime:
metArray1 = metArray[utctime==t,:]
# phaseDerThresh = numpy.pi/4 #reducir Phase thresh
tmet = metArray1[:,1].astype(int)
hmet = metArray1[:,2].astype(int)
-
+
metPhase = numpy.zeros((nPairs, heightList.size, timeList.size - 1))
metPhase[:,:] = numpy.nan
metPhase[:,hmet,tmet] = metArray1[:,6:].T
-
+
#Delete short trails
metBool = ~numpy.isnan(metPhase[0,:,:])
heightVect = numpy.sum(metBool, axis = 1)
metBool[heightVect phaseDerThresh))
metPhase[phDerAux] = numpy.nan
-
+
#--------------------------METEOR DETECTION -----------------------------------------
indMet = numpy.where(numpy.any(metBool,axis=1))[0]
-
+
for p in numpy.arange(nPairs):
phase = metPhase[p,:,:]
phDer = metDer[p,:,:]
-
+
for h in indMet:
height = heightList[h]
phase1 = phase[h,:] #82
phDer1 = phDer[h,:]
-
+
phase1[~numpy.isnan(phase1)] = numpy.unwrap(phase1[~numpy.isnan(phase1)]) #Unwrap
-
+
indValid = numpy.where(~numpy.isnan(phase1))[0]
initMet = indValid[0]
endMet = 0
-
+
for i in range(len(indValid)-1):
-
+
#Time difference
inow = indValid[i]
inext = indValid[i+1]
idiff = inext - inow
#Phase difference
- phDiff = numpy.abs(phase1[inext] - phase1[inow])
-
+ phDiff = numpy.abs(phase1[inext] - phase1[inow])
+
if idiff>sec or phDiff>numpy.pi/4 or inext==indValid[-1]: #End of Meteor
sizeTrail = inow - initMet + 1
if sizeTrail>3*sec: #Too short meteors
@@ -2077,43 +2960,43 @@ class WindProfiler(Operation):
vel = slope#*height*1000/(k*d)
estAux = numpy.array([utctime,p,height, vel, rsq])
meteorList.append(estAux)
- initMet = inext
+ initMet = inext
metArray2 = numpy.array(meteorList)
-
+
return metArray2
-
+
def __calculateAzimuth1(self, rx_location, pairslist, azimuth0):
-
+
azimuth1 = numpy.zeros(len(pairslist))
dist = numpy.zeros(len(pairslist))
-
+
for i in range(len(rx_location)):
ch0 = pairslist[i][0]
ch1 = pairslist[i][1]
-
+
diffX = rx_location[ch0][0] - rx_location[ch1][0]
diffY = rx_location[ch0][1] - rx_location[ch1][1]
azimuth1[i] = numpy.arctan2(diffY,diffX)*180/numpy.pi
dist[i] = numpy.sqrt(diffX**2 + diffY**2)
-
+
azimuth1 -= azimuth0
return azimuth1, dist
-
+
def techniqueNSM_DBS(self, **kwargs):
metArray = kwargs['metArray']
heightList = kwargs['heightList']
- timeList = kwargs['timeList']
+ timeList = kwargs['timeList']
azimuth = kwargs['azimuth']
theta_x = numpy.array(kwargs['theta_x'])
theta_y = numpy.array(kwargs['theta_y'])
-
+
utctime = metArray[:,0]
cmet = metArray[:,1].astype(int)
hmet = metArray[:,3].astype(int)
SNRmet = metArray[:,4]
vmet = metArray[:,5]
spcmet = metArray[:,6]
-
+
nChan = numpy.max(cmet) + 1
nHeights = len(heightList)
@@ -2129,20 +3012,20 @@ class WindProfiler(Operation):
thisH = (h1met>=hmin) & (h1met8) & (vmet<50) & (spcmet<10)
indthisH = numpy.where(thisH)
-
+
if numpy.size(indthisH) > 3:
-
+
vel_aux = vmet[thisH]
chan_aux = cmet[thisH]
cosu_aux = dir_cosu[chan_aux]
cosv_aux = dir_cosv[chan_aux]
cosw_aux = dir_cosw[chan_aux]
-
- nch = numpy.size(numpy.unique(chan_aux))
+
+ nch = numpy.size(numpy.unique(chan_aux))
if nch > 1:
A = self.__calculateMatA(cosu_aux, cosv_aux, cosw_aux, True)
velEst[i,:] = numpy.dot(A,vel_aux)
-
+
return velEst
def run(self, dataOut, technique, nHours=1, hmin=70, hmax=110, **kwargs):
@@ -2152,40 +3035,40 @@ class WindProfiler(Operation):
absc = dataOut.abscissaList[:-1]
# noise = dataOut.noise
heightList = dataOut.heightList
- SNR = dataOut.data_snr
-
+ SNR = dataOut.data_SNR
+
if technique == 'DBS':
-
- kwargs['velRadial'] = param[:,1,:] #Radial velocity
+
+ kwargs['velRadial'] = param[:,1,:] #Radial velocity
kwargs['heightList'] = heightList
kwargs['SNR'] = SNR
-
- dataOut.data_output, dataOut.heightList, dataOut.data_snr = self.techniqueDBS(kwargs) #DBS Function
+
+ dataOut.data_output, dataOut.heightList, dataOut.data_SNR = self.techniqueDBS(kwargs) #DBS Function
dataOut.utctimeInit = dataOut.utctime
dataOut.outputInterval = dataOut.paramInterval
-
+
elif technique == 'SA':
-
+
#Parameters
# position_x = kwargs['positionX']
# position_y = kwargs['positionY']
# azimuth = kwargs['azimuth']
-#
+#
# if kwargs.has_key('crosspairsList'):
# pairs = kwargs['crosspairsList']
# else:
-# pairs = None
-#
+# pairs = None
+#
# if kwargs.has_key('correctFactor'):
# correctFactor = kwargs['correctFactor']
# else:
# correctFactor = 1
-
+
# tau = dataOut.data_param
# _lambda = dataOut.C/dataOut.frequency
# pairsList = dataOut.groupList
# nChannels = dataOut.nChannels
-
+
kwargs['groupList'] = dataOut.groupList
kwargs['tau'] = dataOut.data_param
kwargs['_lambda'] = dataOut.C/dataOut.frequency
@@ -2193,30 +3076,30 @@ class WindProfiler(Operation):
dataOut.data_output = self.techniqueSA(kwargs)
dataOut.utctimeInit = dataOut.utctime
dataOut.outputInterval = dataOut.timeInterval
-
- elif technique == 'Meteors':
+
+ elif technique == 'Meteors':
dataOut.flagNoData = True
self.__dataReady = False
-
+
if 'nHours' in kwargs:
nHours = kwargs['nHours']
- else:
+ else:
nHours = 1
-
+
if 'meteorsPerBin' in kwargs:
meteorThresh = kwargs['meteorsPerBin']
else:
meteorThresh = 6
-
+
if 'hmin' in kwargs:
hmin = kwargs['hmin']
else: hmin = 70
if 'hmax' in kwargs:
hmax = kwargs['hmax']
else: hmax = 110
-
+
dataOut.outputInterval = nHours*3600
-
+
if self.__isConfig == False:
# self.__initime = dataOut.datatime.replace(minute = 0, second = 0, microsecond = 03)
#Get Initial LTC time
@@ -2224,29 +3107,29 @@ class WindProfiler(Operation):
self.__initime = (self.__initime.replace(minute = 0, second = 0, microsecond = 0) - datetime.datetime(1970, 1, 1)).total_seconds()
self.__isConfig = True
-
+
if self.__buffer is None:
self.__buffer = dataOut.data_param
self.__firstdata = copy.copy(dataOut)
else:
self.__buffer = numpy.vstack((self.__buffer, dataOut.data_param))
-
+
self.__checkTime(dataOut.utctime, dataOut.paramInterval, dataOut.outputInterval) #Check if the buffer is ready
-
+
if self.__dataReady:
dataOut.utctimeInit = self.__initime
-
+
self.__initime += dataOut.outputInterval #to erase time offset
-
+
dataOut.data_output, dataOut.heightList = self.techniqueMeteors(self.__buffer, meteorThresh, hmin, hmax)
dataOut.flagNoData = False
self.__buffer = None
-
+
elif technique == 'Meteors1':
dataOut.flagNoData = True
self.__dataReady = False
-
+
if 'nMins' in kwargs:
nMins = kwargs['nMins']
else: nMins = 20
@@ -2261,7 +3144,7 @@ class WindProfiler(Operation):
if 'mode' in kwargs:
mode = kwargs['mode']
if 'theta_x' in kwargs:
- theta_x = kwargs['theta_x']
+ theta_x = kwargs['theta_x']
if 'theta_y' in kwargs:
theta_y = kwargs['theta_y']
else: mode = 'SA'
@@ -2274,10 +3157,10 @@ class WindProfiler(Operation):
freq = 50e6
lamb = C/freq
k = 2*numpy.pi/lamb
-
+
timeList = dataOut.abscissaList
heightList = dataOut.heightList
-
+
if self.__isConfig == False:
dataOut.outputInterval = nMins*60
# self.__initime = dataOut.datatime.replace(minute = 0, second = 0, microsecond = 03)
@@ -2288,20 +3171,20 @@ class WindProfiler(Operation):
self.__initime = (initime.replace(minute = minuteNew, second = 0, microsecond = 0) - datetime.datetime(1970, 1, 1)).total_seconds()
self.__isConfig = True
-
+
if self.__buffer is None:
self.__buffer = dataOut.data_param
self.__firstdata = copy.copy(dataOut)
else:
self.__buffer = numpy.vstack((self.__buffer, dataOut.data_param))
-
+
self.__checkTime(dataOut.utctime, dataOut.paramInterval, dataOut.outputInterval) #Check if the buffer is ready
-
+
if self.__dataReady:
dataOut.utctimeInit = self.__initime
self.__initime += dataOut.outputInterval #to erase time offset
-
+
metArray = self.__buffer
if mode == 'SA':
dataOut.data_output = self.techniqueNSM_SA(rx_location=rx_location, groupList=groupList, azimuth=azimuth, dfactor=dfactor, k=k,metArray=metArray, heightList=heightList,timeList=timeList)
@@ -2312,74 +3195,234 @@ class WindProfiler(Operation):
self.__buffer = None
return
-
+
class EWDriftsEstimation(Operation):
-
- def __init__(self):
- Operation.__init__(self)
-
+
+ def __init__(self):
+ Operation.__init__(self)
+
def __correctValues(self, heiRang, phi, velRadial, SNR):
listPhi = phi.tolist()
maxid = listPhi.index(max(listPhi))
minid = listPhi.index(min(listPhi))
-
- rango = list(range(len(phi)))
+
+ rango = list(range(len(phi)))
# rango = numpy.delete(rango,maxid)
-
+
heiRang1 = heiRang*math.cos(phi[maxid])
heiRangAux = heiRang*math.cos(phi[minid])
indOut = (heiRang1 < heiRangAux[0]).nonzero()
heiRang1 = numpy.delete(heiRang1,indOut)
-
+
velRadial1 = numpy.zeros([len(phi),len(heiRang1)])
SNR1 = numpy.zeros([len(phi),len(heiRang1)])
-
+
for i in rango:
x = heiRang*math.cos(phi[i])
y1 = velRadial[i,:]
- f1 = interpolate.interp1d(x,y1,kind = 'cubic')
-
- x1 = heiRang1
+ vali= (numpy.isfinite(y1)==True).nonzero()
+ y1=y1[vali]
+ x = x[vali]
+ f1 = interpolate.interp1d(x,y1,kind = 'cubic',bounds_error=False)
+
+ #heiRang1 = x*math.cos(phi[maxid])
+ x1 = heiRang1
y11 = f1(x1)
-
+
y2 = SNR[i,:]
- f2 = interpolate.interp1d(x,y2,kind = 'cubic')
+ #print 'snr ', y2
+ x = heiRang*math.cos(phi[i])
+ vali= (y2 != -1).nonzero()
+ y2 = y2[vali]
+ x = x[vali]
+ #print 'snr ',y2
+ f2 = interpolate.interp1d(x,y2,kind = 'cubic',bounds_error=False)
y21 = f2(x1)
-
+
velRadial1[i,:] = y11
SNR1[i,:] = y21
-
+
return heiRang1, velRadial1, SNR1
+
+
def run(self, dataOut, zenith, zenithCorrection):
+ import matplotlib.pyplot as plt
heiRang = dataOut.heightList
velRadial = dataOut.data_param[:,3,:]
- SNR = dataOut.data_snr
-
+ velRadialm = dataOut.data_param[:,2:4,:]*-1
+
+ rbufc=dataOut.data_paramC[:,:,0]
+ ebufc=dataOut.data_paramC[:,:,1]
+ SNR = dataOut.data_SNR
+ velRerr = dataOut.data_error[:,4,:]
+ moments=numpy.vstack(([velRadialm[0,:]],[velRadialm[0,:]],[velRadialm[1,:]],[velRadialm[1,:]]))
+ dataOut.moments=moments
+ # Coherent
+ smooth_wC = ebufc[0,:]
+ p_w0C = rbufc[0,:]
+ p_w1C = rbufc[1,:]
+ w_wC = rbufc[2,:]*-1 #*radial_sign(radial EQ 1)
+ t_wC = rbufc[3,:]
+ my_nbeams = 2
+
+# plt.plot(w_wC)
+# plt.show()
zenith = numpy.array(zenith)
- zenith -= zenithCorrection
+ zenith -= zenithCorrection
zenith *= numpy.pi/180
-
- heiRang1, velRadial1, SNR1 = self.__correctValues(heiRang, numpy.abs(zenith), velRadial, SNR)
-
+ if zenithCorrection != 0 :
+ heiRang1, velRadial1, SNR1 = self.__correctValues(heiRang, numpy.abs(zenith), velRadial, SNR)
+ else :
+ heiRang1 = heiRang
+ velRadial1 = velRadial
+ SNR1 = SNR
+
alp = zenith[0]
- bet = zenith[1]
+ bet = zenith[1]
+ #t_w(bad) = t_wC(bad)
+ #t_w_err(bad)=!values.f_nan
+
w_w = velRadial1[0,:]
w_e = velRadial1[1,:]
-
- w = (w_w*numpy.sin(bet) - w_e*numpy.sin(alp))/(numpy.cos(alp)*numpy.sin(bet) - numpy.cos(bet)*numpy.sin(alp))
- u = (w_w*numpy.cos(bet) - w_e*numpy.cos(alp))/(numpy.sin(alp)*numpy.cos(bet) - numpy.sin(bet)*numpy.cos(alp))
-
- winds = numpy.vstack((u,w))
-
+ w_w_err = velRerr[0,:]
+ w_e_err = velRerr[1,:]
+ #plt.plot(w_w)
+ #plt.show()
+ #plt.plot(w_e)
+ #plt.show()
+ # bad = where((chisq_w GT 2.5 AND abs(w_w_err) GT 1. AND finite(w_wC))
+# OR abs(w_w) GT 200. OR (NOT finite(w_w)-254) OR ABS(w_w_err) GT 100, cbad)
+ val = (numpy.isfinite(w_w)==False).nonzero()
+ val = val[0]
+ bad = val
+ if len(bad) > 0 :
+ w_w[bad] = w_wC[bad]
+ w_w_err[bad]= numpy.nan
+ if my_nbeams == 2:
+ smooth_eC=ebufc[4,:]
+ p_e0C = rbufc[4,:]
+ p_e1C = rbufc[5,:]
+ w_eC = rbufc[6,:]*-1
+ t_eC = rbufc[7,:]
+ val = (numpy.isfinite(w_e)==False).nonzero()
+ val = val[0]
+ bad = val
+ if len(bad) > 0 :
+ w_e[bad] = w_eC[bad]
+ w_e_err[bad]= numpy.nan
+
+ w = (w_w*numpy.sin(bet) - w_e*numpy.sin(alp))/(numpy.cos(alp)*numpy.sin(bet) - numpy.cos(bet)*numpy.sin(alp))
+ u = (w_w*numpy.cos(bet) - w_e*numpy.cos(alp))/(numpy.sin(alp)*numpy.cos(bet) - numpy.sin(bet)*numpy.cos(alp))
+ #plt.plot(w)
+ #plt.show()
+ #error
+ w_err = numpy.sqrt((w_w_err*numpy.sin(bet))**2.+(w_e_err*numpy.sin(alp))**2.)/ numpy.absolute(numpy.cos(alp)*numpy.sin(bet)-numpy.cos(bet)*numpy.sin(alp))
+ u_err = numpy.sqrt((w_w_err*numpy.cos(bet))**2.+(w_e_err*numpy.cos(alp))**2.)/ numpy.absolute(numpy.cos(alp)*numpy.sin(bet)-numpy.cos(bet)*numpy.sin(alp))
+
+ winds = numpy.vstack((w,u))
+
dataOut.heightList = heiRang1
dataOut.data_output = winds
- dataOut.data_snr = SNR1
-
+ #dataOut.data_SNR = SNR1
+ snr1 = 10*numpy.log10(SNR1[0])
+ dataOut.data_snr1 = numpy.reshape(snr1,(1,snr1.shape[0]))
dataOut.utctimeInit = dataOut.utctime
dataOut.outputInterval = dataOut.timeInterval
- return
+
+ hei_aver0 = 218
+ jrange = 450 #900 para HA drifts
+ deltah = 15.0 #dataOut.spacing(0)
+ h0 = 0.0 #dataOut.first_height(0)
+ heights = dataOut.heightList
+ nhei = len(heights)
+
+ range1 = numpy.arange(nhei) * deltah + h0
+
+ #jhei = WHERE(range1 GE hei_aver0 , jcount)
+ jhei = (range1 >= hei_aver0).nonzero()
+ if len(jhei[0]) > 0 :
+ h0_index = jhei[0][0] # Initial height for getting averages 218km
+
+ mynhei = 7
+ nhei_avg = int(jrange/deltah)
+ h_avgs = int(nhei_avg/mynhei)
+ nhei_avg = h_avgs*(mynhei-1)+mynhei
+
+ navgs = numpy.zeros(mynhei,dtype='float')
+ delta_h = numpy.zeros(mynhei,dtype='float')
+ range_aver = numpy.zeros(mynhei,dtype='float')
+ for ih in range( mynhei-1 ):
+ range_aver[ih] = numpy.sum(range1[h0_index+h_avgs*ih:h0_index+h_avgs*(ih+1)-0])/h_avgs
+ navgs[ih] = h_avgs
+ delta_h[ih] = deltah*h_avgs
+
+ range_aver[mynhei-1] = numpy.sum(range1[h0_index:h0_index+6*h_avgs-0])/(6*h_avgs)
+ navgs[mynhei-1] = 6*h_avgs
+ delta_h[mynhei-1] = deltah*6*h_avgs
+
+ wA = w[h0_index:h0_index+nhei_avg-0]
+ wA_err = w_err[h0_index:h0_index+nhei_avg-0]
+ #print(wA, wA_err)
+ for i in range(5) :
+ vals = wA[i*h_avgs:(i+1)*h_avgs-0]
+ errs = wA_err[i*h_avgs:(i+1)*h_avgs-0]
+ avg = numpy.nansum(vals/errs**2.)/numpy.nansum(1./errs**2.)
+ sigma = numpy.sqrt(1./numpy.nansum(1./errs**2.))
+ wA[6*h_avgs+i] = avg
+ wA_err[6*h_avgs+i] = sigma
+
+
+ vals = wA[0:6*h_avgs-0]
+ errs=wA_err[0:6*h_avgs-0]
+ avg = numpy.nansum(vals/errs**2.)/numpy.nansum(1./errs**2)
+ sigma = numpy.sqrt(1./numpy.nansum(1./errs**2.))
+ wA[nhei_avg-1] = avg
+ wA_err[nhei_avg-1] = sigma
+
+ wA = wA[6*h_avgs:nhei_avg-0]
+ wA_err=wA_err[6*h_avgs:nhei_avg-0]
+ if my_nbeams == 2 :
+
+ uA = u[h0_index:h0_index+nhei_avg]
+ uA_err=u_err[h0_index:h0_index+nhei_avg]
+
+ for i in range(5) :
+ vals = uA[i*h_avgs:(i+1)*h_avgs-0]
+ errs=uA_err[i*h_avgs:(i+1)*h_avgs-0]
+ avg = numpy.nansum(vals/errs**2.)/numpy.nansum(1./errs**2.)
+ sigma = numpy.sqrt(1./numpy.nansum(1./errs**2.))
+ uA[6*h_avgs+i] = avg
+ uA_err[6*h_avgs+i]=sigma
+
+ vals = uA[0:6*h_avgs-0]
+ errs = uA_err[0:6*h_avgs-0]
+ avg = numpy.nansum(vals/errs**2.)/numpy.nansum(1./errs**2.)
+ sigma = numpy.sqrt(1./numpy.nansum(1./errs**2.))
+ uA[nhei_avg-1] = avg
+ uA_err[nhei_avg-1] = sigma
+ uA = uA[6*h_avgs:nhei_avg-0]
+ uA_err = uA_err[6*h_avgs:nhei_avg-0]
+
+ dataOut.drifts_avg = numpy.vstack((wA,uA))
+ #print(dataOut.drifts_avg)
+ tini=time.localtime(dataOut.utctime)
+ datefile= str(tini[0]).zfill(4)+str(tini[1]).zfill(2)+str(tini[2]).zfill(2)
+ nfile = '/home/pcondor/Database/ewdriftsschain2019/jro'+datefile+'drifts_sch3.txt'
+ #print(dataOut.drifts_avg)
+ f1 = open(nfile,'a')
+ #print(nfile)
+ #f.write(datefile)
+ #numpy.savetxt(f,[datefile,datefile],fmt='%10s')
+ datedriftavg=str(tini[0])+' '+str(tini[1])+' '+str(tini[2])+' '+str(tini[3])+' '+str(tini[4])
+ driftavgstr=str(dataOut.drifts_avg)
+ #f1.write(datedriftavg)
+ #f1.write(driftavgstr)
+ numpy.savetxt(f1,numpy.column_stack([tini[0],tini[1],tini[2],tini[3],tini[4]]),fmt='%4i')
+ numpy.savetxt(f1,dataOut.drifts_avg,fmt='%10.2f')
+ f1.close()
+ return dataOut
#--------------- Non Specular Meteor ----------------
@@ -2389,11 +3432,11 @@ class NonSpecularMeteorDetection(Operation):
data_acf = dataOut.data_pre[0]
data_ccf = dataOut.data_pre[1]
pairsList = dataOut.groupList[1]
-
+
lamb = dataOut.C/dataOut.frequency
tSamp = dataOut.ippSeconds*dataOut.nCohInt
paramInterval = dataOut.paramInterval
-
+
nChannels = data_acf.shape[0]
nLags = data_acf.shape[1]
nProfiles = data_acf.shape[2]
@@ -2403,7 +3446,7 @@ class NonSpecularMeteorDetection(Operation):
heightList = dataOut.heightList
ippSeconds = dataOut.ippSeconds*dataOut.nCohInt*dataOut.nAvg
utctime = dataOut.utctime
-
+
dataOut.abscissaList = numpy.arange(0,paramInterval+ippSeconds,ippSeconds)
#------------------------ SNR --------------------------------------
@@ -2415,7 +3458,7 @@ class NonSpecularMeteorDetection(Operation):
SNR[i] = (power[i]-noise[i])/noise[i]
SNRm = numpy.nanmean(SNR, axis = 0)
SNRdB = 10*numpy.log10(SNR)
-
+
if mode == 'SA':
dataOut.groupList = dataOut.groupList[1]
nPairs = data_ccf.shape[0]
@@ -2423,22 +3466,22 @@ class NonSpecularMeteorDetection(Operation):
phase = numpy.zeros(data_ccf[:,0,:,:].shape)
# phase1 = numpy.copy(phase)
coh1 = numpy.zeros(data_ccf[:,0,:,:].shape)
-
+
for p in range(nPairs):
ch0 = pairsList[p][0]
ch1 = pairsList[p][1]
ccf = data_ccf[p,0,:,:]/numpy.sqrt(data_acf[ch0,0,:,:]*data_acf[ch1,0,:,:])
- phase[p,:,:] = ndimage.median_filter(numpy.angle(ccf), size = (5,1)) #median filter
-# phase1[p,:,:] = numpy.angle(ccf) #median filter
- coh1[p,:,:] = ndimage.median_filter(numpy.abs(ccf), 5) #median filter
-# coh1[p,:,:] = numpy.abs(ccf) #median filter
+ phase[p,:,:] = ndimage.median_filter(numpy.angle(ccf), size = (5,1)) #median filter
+# phase1[p,:,:] = numpy.angle(ccf) #median filter
+ coh1[p,:,:] = ndimage.median_filter(numpy.abs(ccf), 5) #median filter
+# coh1[p,:,:] = numpy.abs(ccf) #median filter
coh = numpy.nanmax(coh1, axis = 0)
# struc = numpy.ones((5,1))
# coh = ndimage.morphology.grey_dilation(coh, size=(10,1))
#---------------------- Radial Velocity ----------------------------
phaseAux = numpy.mean(numpy.angle(data_acf[:,1,:,:]), axis = 0)
velRad = phaseAux*lamb/(4*numpy.pi*tSamp)
-
+
if allData:
boolMetFin = ~numpy.isnan(SNRm)
# coh[:-1,:] = numpy.nanmean(numpy.abs(phase[:,1:,:] - phase[:,:-1,:]),axis=0)
@@ -2446,31 +3489,31 @@ class NonSpecularMeteorDetection(Operation):
#------------------------ Meteor mask ---------------------------------
# #SNR mask
# boolMet = (SNRdB>SNRthresh)#|(~numpy.isnan(SNRdB))
-#
+#
# #Erase small objects
-# boolMet1 = self.__erase_small(boolMet, 2*sec, 5)
-#
+# boolMet1 = self.__erase_small(boolMet, 2*sec, 5)
+#
# auxEEJ = numpy.sum(boolMet1,axis=0)
# indOver = auxEEJ>nProfiles*0.8 #Use this later
# indEEJ = numpy.where(indOver)[0]
# indNEEJ = numpy.where(~indOver)[0]
-#
+#
# boolMetFin = boolMet1
-#
+#
# if indEEJ.size > 0:
-# boolMet1[:,indEEJ] = False #Erase heights with EEJ
-#
+# boolMet1[:,indEEJ] = False #Erase heights with EEJ
+#
# boolMet2 = coh > cohThresh
# boolMet2 = self.__erase_small(boolMet2, 2*sec,5)
-#
+#
# #Final Meteor mask
# boolMetFin = boolMet1|boolMet2
-
+
#Coherence mask
boolMet1 = coh > 0.75
struc = numpy.ones((30,1))
boolMet1 = ndimage.morphology.binary_dilation(boolMet1, structure=struc)
-
+
#Derivative mask
derPhase = numpy.nanmean(numpy.abs(phase[:,1:,:] - phase[:,:-1,:]),axis=0)
boolMet2 = derPhase < 0.2
@@ -2487,7 +3530,7 @@ class NonSpecularMeteorDetection(Operation):
tmet = coordMet[0]
hmet = coordMet[1]
-
+
data_param = numpy.zeros((tmet.size, 6 + nPairs))
data_param[:,0] = utctime
data_param[:,1] = tmet
@@ -2496,7 +3539,7 @@ class NonSpecularMeteorDetection(Operation):
data_param[:,4] = velRad[tmet,hmet]
data_param[:,5] = coh[tmet,hmet]
data_param[:,6:] = phase[:,tmet,hmet].T
-
+
elif mode == 'DBS':
dataOut.groupList = numpy.arange(nChannels)
@@ -2504,7 +3547,7 @@ class NonSpecularMeteorDetection(Operation):
phase = numpy.angle(data_acf[:,1,:,:])
# phase = ndimage.median_filter(numpy.angle(data_acf[:,1,:,:]), size = (1,5,1))
velRad = phase*lamb/(4*numpy.pi*tSamp)
-
+
#Spectral width
# acf1 = ndimage.median_filter(numpy.abs(data_acf[:,1,:,:]), size = (1,5,1))
# acf2 = ndimage.median_filter(numpy.abs(data_acf[:,2,:,:]), size = (1,5,1))
@@ -2519,24 +3562,24 @@ class NonSpecularMeteorDetection(Operation):
#SNR
boolMet1 = (SNRdB>SNRthresh) #SNR mask
boolMet1 = ndimage.median_filter(boolMet1, size=(1,5,5))
-
+
#Radial velocity
boolMet2 = numpy.abs(velRad) < 20
boolMet2 = ndimage.median_filter(boolMet2, (1,5,5))
-
+
#Spectral Width
boolMet3 = spcWidth < 30
boolMet3 = ndimage.median_filter(boolMet3, (1,5,5))
# boolMetFin = self.__erase_small(boolMet1, 10,5)
boolMetFin = boolMet1&boolMet2&boolMet3
-
+
#Creating data_param
coordMet = numpy.where(boolMetFin)
cmet = coordMet[0]
tmet = coordMet[1]
hmet = coordMet[2]
-
+
data_param = numpy.zeros((tmet.size, 7))
data_param[:,0] = utctime
data_param[:,1] = cmet
@@ -2545,7 +3588,7 @@ class NonSpecularMeteorDetection(Operation):
data_param[:,4] = SNR[cmet,tmet,hmet].T
data_param[:,5] = velRad[cmet,tmet,hmet].T
data_param[:,6] = spcWidth[cmet,tmet,hmet].T
-
+
# self.dataOut.data_param = data_int
if len(data_param) == 0:
dataOut.flagNoData = True
@@ -2555,21 +3598,21 @@ class NonSpecularMeteorDetection(Operation):
def __erase_small(self, binArray, threshX, threshY):
labarray, numfeat = ndimage.measurements.label(binArray)
binArray1 = numpy.copy(binArray)
-
+
for i in range(1,numfeat + 1):
auxBin = (labarray==i)
auxSize = auxBin.sum()
-
+
x,y = numpy.where(auxBin)
widthX = x.max() - x.min()
widthY = y.max() - y.min()
-
+
#width X: 3 seg -> 12.5*3
- #width Y:
-
+ #width Y:
+
if (auxSize < 50) or (widthX < threshX) or (widthY < threshY):
binArray1[auxBin] = False
-
+
return binArray1
#--------------- Specular Meteor ----------------
@@ -2579,36 +3622,36 @@ class SMDetection(Operation):
Function DetectMeteors()
Project developed with paper:
HOLDSWORTH ET AL. 2004
-
+
Input:
self.dataOut.data_pre
-
+
centerReceiverIndex: From the channels, which is the center receiver
-
+
hei_ref: Height reference for the Beacon signal extraction
tauindex:
predefinedPhaseShifts: Predefined phase offset for the voltge signals
-
+
cohDetection: Whether to user Coherent detection or not
cohDet_timeStep: Coherent Detection calculation time step
cohDet_thresh: Coherent Detection phase threshold to correct phases
-
+
noise_timeStep: Noise calculation time step
noise_multiple: Noise multiple to define signal threshold
-
+
multDet_timeLimit: Multiple Detection Removal time limit in seconds
multDet_rangeLimit: Multiple Detection Removal range limit in km
-
+
phaseThresh: Maximum phase difference between receiver to be consider a meteor
- SNRThresh: Minimum SNR threshold of the meteor signal to be consider a meteor
-
+ SNRThresh: Minimum SNR threshold of the meteor signal to be consider a meteor
+
hmin: Minimum Height of the meteor to use it in the further wind estimations
hmax: Maximum Height of the meteor to use it in the further wind estimations
azimuth: Azimuth angle correction
-
+
Affected:
self.dataOut.data_param
-
+
Rejection Criteria (Errors):
0: No error; analysis OK
1: SNR < SNR threshold
@@ -2627,9 +3670,9 @@ class SMDetection(Operation):
14: height ambiguous echo: more then one possible height within 70 to 110 km
15: radial drift velocity or projected horizontal velocity exceeds 200 m/s
16: oscilatory echo, indicating event most likely not an underdense echo
-
+
17: phase difference in meteor Reestimation
-
+
Data Storage:
Meteors for Wind Estimation (8):
Utc Time | Range Height
@@ -2637,75 +3680,75 @@ class SMDetection(Operation):
VelRad errorVelRad
Phase0 Phase1 Phase2 Phase3
TypeError
-
- '''
-
+
+ '''
+
def run(self, dataOut, hei_ref = None, tauindex = 0,
phaseOffsets = None,
- cohDetection = False, cohDet_timeStep = 1, cohDet_thresh = 25,
+ cohDetection = False, cohDet_timeStep = 1, cohDet_thresh = 25,
noise_timeStep = 4, noise_multiple = 4,
multDet_timeLimit = 1, multDet_rangeLimit = 3,
phaseThresh = 20, SNRThresh = 5,
hmin = 50, hmax=150, azimuth = 0,
channelPositions = None) :
-
-
+
+
#Getting Pairslist
if channelPositions is None:
# channelPositions = [(2.5,0), (0,2.5), (0,0), (0,4.5), (-2,0)] #T
channelPositions = [(4.5,2), (2,4.5), (2,2), (2,0), (0,2)] #Estrella
meteorOps = SMOperations()
pairslist0, distances = meteorOps.getPhasePairs(channelPositions)
- heiRang = dataOut.heightList
+ heiRang = dataOut.getHeiRange()
#Get Beacon signal - No Beacon signal anymore
# newheis = numpy.where(self.dataOut.heightList>self.dataOut.radarControllerHeaderObj.Taus[tauindex])
-#
+#
# if hei_ref != None:
# newheis = numpy.where(self.dataOut.heightList>hei_ref)
-#
-
-
+#
+
+
#****************REMOVING HARDWARE PHASE DIFFERENCES***************
# see if the user put in pre defined phase shifts
voltsPShift = dataOut.data_pre.copy()
-
+
# if predefinedPhaseShifts != None:
# hardwarePhaseShifts = numpy.array(predefinedPhaseShifts)*numpy.pi/180
-#
+#
# # elif beaconPhaseShifts:
# # #get hardware phase shifts using beacon signal
# # hardwarePhaseShifts = self.__getHardwarePhaseDiff(self.dataOut.data_pre, pairslist, newheis, 10)
# # hardwarePhaseShifts = numpy.insert(hardwarePhaseShifts,centerReceiverIndex,0)
-#
+#
# else:
-# hardwarePhaseShifts = numpy.zeros(5)
-#
+# hardwarePhaseShifts = numpy.zeros(5)
+#
# voltsPShift = numpy.zeros((self.dataOut.data_pre.shape[0],self.dataOut.data_pre.shape[1],self.dataOut.data_pre.shape[2]), dtype = 'complex')
# for i in range(self.dataOut.data_pre.shape[0]):
# voltsPShift[i,:,:] = self.__shiftPhase(self.dataOut.data_pre[i,:,:], hardwarePhaseShifts[i])
#******************END OF REMOVING HARDWARE PHASE DIFFERENCES*********
-
+
#Remove DC
voltsDC = numpy.mean(voltsPShift,1)
voltsDC = numpy.mean(voltsDC,1)
for i in range(voltsDC.shape[0]):
voltsPShift[i] = voltsPShift[i] - voltsDC[i]
-
- #Don't considerate last heights, theyre used to calculate Hardware Phase Shift
+
+ #Don't considerate last heights, theyre used to calculate Hardware Phase Shift
# voltsPShift = voltsPShift[:,:,:newheis[0][0]]
-
+
#************ FIND POWER OF DATA W/COH OR NON COH DETECTION (3.4) **********
#Coherent Detection
if cohDetection:
#use coherent detection to get the net power
cohDet_thresh = cohDet_thresh*numpy.pi/180
voltsPShift = self.__coherentDetection(voltsPShift, cohDet_timeStep, dataOut.timeInterval, pairslist0, cohDet_thresh)
-
+
#Non-coherent detection!
powerNet = numpy.nansum(numpy.abs(voltsPShift[:,:,:])**2,0)
#********** END OF COH/NON-COH POWER CALCULATION**********************
-
+
#********** FIND THE NOISE LEVEL AND POSSIBLE METEORS ****************
#Get noise
noise, noise1 = self.__getNoise(powerNet, noise_timeStep, dataOut.timeInterval)
@@ -2715,17 +3758,17 @@ class SMDetection(Operation):
#Meteor echoes detection
listMeteors = self.__findMeteors(powerNet, signalThresh)
#******* END OF NOISE LEVEL AND POSSIBLE METEORS CACULATION **********
-
+
#************** REMOVE MULTIPLE DETECTIONS (3.5) ***************************
#Parameters
- heiRange = dataOut.heightList
+ heiRange = dataOut.getHeiRange()
rangeInterval = heiRange[1] - heiRange[0]
rangeLimit = multDet_rangeLimit/rangeInterval
timeLimit = multDet_timeLimit/dataOut.timeInterval
#Multiple detection removals
listMeteors1 = self.__removeMultipleDetections(listMeteors, rangeLimit, timeLimit)
#************ END OF REMOVE MULTIPLE DETECTIONS **********************
-
+
#********************* METEOR REESTIMATION (3.7, 3.8, 3.9, 3.10) ********************
#Parameters
phaseThresh = phaseThresh*numpy.pi/180
@@ -2736,40 +3779,40 @@ class SMDetection(Operation):
#Estimation of decay times (Errors N 7, 8, 11)
listMeteors3 = self.__estimateDecayTime(listMeteors2, listMeteorsPower, dataOut.timeInterval, dataOut.frequency)
#******************* END OF METEOR REESTIMATION *******************
-
+
#********************* METEOR PARAMETERS CALCULATION (3.11, 3.12, 3.13) **************************
#Calculating Radial Velocity (Error N 15)
radialStdThresh = 10
- listMeteors4 = self.__getRadialVelocity(listMeteors3, listMeteorsVolts, radialStdThresh, pairslist0, dataOut.timeInterval)
+ listMeteors4 = self.__getRadialVelocity(listMeteors3, listMeteorsVolts, radialStdThresh, pairslist0, dataOut.timeInterval)
if len(listMeteors4) > 0:
#Setting New Array
date = dataOut.utctime
arrayParameters = self.__setNewArrays(listMeteors4, date, heiRang)
-
+
#Correcting phase offset
if phaseOffsets != None:
phaseOffsets = numpy.array(phaseOffsets)*numpy.pi/180
arrayParameters[:,8:12] = numpy.unwrap(arrayParameters[:,8:12] + phaseOffsets)
-
+
#Second Pairslist
pairsList = []
pairx = (0,1)
pairy = (2,3)
pairsList.append(pairx)
pairsList.append(pairy)
-
+
jph = numpy.array([0,0,0,0])
h = (hmin,hmax)
arrayParameters = meteorOps.getMeteorParams(arrayParameters, azimuth, h, pairsList, distances, jph)
-
+
# #Calculate AOA (Error N 3, 4)
# #JONES ET AL. 1998
# error = arrayParameters[:,-1]
# AOAthresh = numpy.pi/8
# phases = -arrayParameters[:,9:13]
# arrayParameters[:,4:7], arrayParameters[:,-1] = meteorOps.getAOA(phases, pairsList, error, AOAthresh, azimuth)
-#
+#
# #Calculate Heights (Error N 13 and 14)
# error = arrayParameters[:,-1]
# Ranges = arrayParameters[:,2]
@@ -2777,73 +3820,73 @@ class SMDetection(Operation):
# arrayParameters[:,3], arrayParameters[:,-1] = meteorOps.getHeights(Ranges, zenith, error, hmin, hmax)
# error = arrayParameters[:,-1]
#********************* END OF PARAMETERS CALCULATION **************************
-
- #***************************+ PASS DATA TO NEXT STEP **********************
+
+ #***************************+ PASS DATA TO NEXT STEP **********************
# arrayFinal = arrayParameters.reshape((1,arrayParameters.shape[0],arrayParameters.shape[1]))
dataOut.data_param = arrayParameters
-
+
if arrayParameters is None:
dataOut.flagNoData = True
else:
dataOut.flagNoData = True
-
+
return
-
+
def __getHardwarePhaseDiff(self, voltage0, pairslist, newheis, n):
-
+
minIndex = min(newheis[0])
maxIndex = max(newheis[0])
-
+
voltage = voltage0[:,:,minIndex:maxIndex+1]
nLength = voltage.shape[1]/n
nMin = 0
nMax = 0
phaseOffset = numpy.zeros((len(pairslist),n))
-
+
for i in range(n):
nMax += nLength
phaseCCF = -numpy.angle(self.__calculateCCF(voltage[:,nMin:nMax,:], pairslist, [0]))
phaseCCF = numpy.mean(phaseCCF, axis = 2)
- phaseOffset[:,i] = phaseCCF.transpose()
+ phaseOffset[:,i] = phaseCCF.transpose()
nMin = nMax
# phaseDiff, phaseArrival = self.estimatePhaseDifference(voltage, pairslist)
-
+
#Remove Outliers
factor = 2
wt = phaseOffset - signal.medfilt(phaseOffset,(1,5))
dw = numpy.std(wt,axis = 1)
dw = dw.reshape((dw.size,1))
- ind = numpy.where(numpy.logical_or(wt>dw*factor,wt<-dw*factor))
+ ind = numpy.where(numpy.logical_or(wt>dw*factor,wt<-dw*factor))
phaseOffset[ind] = numpy.nan
- phaseOffset = stats.nanmean(phaseOffset, axis=1)
-
+ phaseOffset = stats.nanmean(phaseOffset, axis=1)
+
return phaseOffset
-
+
def __shiftPhase(self, data, phaseShift):
#this will shift the phase of a complex number
- dataShifted = numpy.abs(data) * numpy.exp((numpy.angle(data)+phaseShift)*1j)
+ dataShifted = numpy.abs(data) * numpy.exp((numpy.angle(data)+phaseShift)*1j)
return dataShifted
-
+
def __estimatePhaseDifference(self, array, pairslist):
nChannel = array.shape[0]
nHeights = array.shape[2]
numPairs = len(pairslist)
# phaseCCF = numpy.zeros((nChannel, 5, nHeights))
phaseCCF = numpy.angle(self.__calculateCCF(array, pairslist, [-2,-1,0,1,2]))
-
+
#Correct phases
derPhaseCCF = phaseCCF[:,1:,:] - phaseCCF[:,0:-1,:]
indDer = numpy.where(numpy.abs(derPhaseCCF) > numpy.pi)
-
- if indDer[0].shape[0] > 0:
+
+ if indDer[0].shape[0] > 0:
for i in range(indDer[0].shape[0]):
signo = -numpy.sign(derPhaseCCF[indDer[0][i],indDer[1][i],indDer[2][i]])
phaseCCF[indDer[0][i],indDer[1][i]+1:,:] += signo*2*numpy.pi
-
+
# for j in range(numSides):
# phaseCCFAux = self.calculateCCF(arrayCenter, arraySides[j,:,:], [-2,1,0,1,2])
# phaseCCF[j,:,:] = numpy.angle(phaseCCFAux)
-#
+#
#Linear
phaseInt = numpy.zeros((numPairs,1))
angAllCCF = phaseCCF[:,[0,1,3,4],0]
@@ -2853,16 +3896,16 @@ class SMDetection(Operation):
#Phase Differences
phaseDiff = phaseInt - phaseCCF[:,2,:]
phaseArrival = phaseInt.reshape(phaseInt.size)
-
+
#Dealias
phaseArrival = numpy.angle(numpy.exp(1j*phaseArrival))
# indAlias = numpy.where(phaseArrival > numpy.pi)
# phaseArrival[indAlias] -= 2*numpy.pi
# indAlias = numpy.where(phaseArrival < -numpy.pi)
# phaseArrival[indAlias] += 2*numpy.pi
-
+
return phaseDiff, phaseArrival
-
+
def __coherentDetection(self, volts, timeSegment, timeInterval, pairslist, thresh):
#this function will run the coherent detection used in Holdworth et al. 2004 and return the net power
#find the phase shifts of each channel over 1 second intervals
@@ -2872,25 +3915,25 @@ class SMDetection(Operation):
numHeights = volts.shape[2]
nChannel = volts.shape[0]
voltsCohDet = volts.copy()
-
+
pairsarray = numpy.array(pairslist)
indSides = pairsarray[:,1]
# indSides = numpy.array(range(nChannel))
# indSides = numpy.delete(indSides, indCenter)
-#
+#
# listCenter = numpy.array_split(volts[indCenter,:,:], numBlocks, 0)
listBlocks = numpy.array_split(volts, numBlocks, 1)
-
+
startInd = 0
endInd = 0
-
+
for i in range(numBlocks):
startInd = endInd
- endInd = endInd + listBlocks[i].shape[1]
-
+ endInd = endInd + listBlocks[i].shape[1]
+
arrayBlock = listBlocks[i]
# arrayBlockCenter = listCenter[i]
-
+
#Estimate the Phase Difference
phaseDiff, aux = self.__estimatePhaseDifference(arrayBlock, pairslist)
#Phase Difference RMS
@@ -2902,21 +3945,21 @@ class SMDetection(Operation):
for j in range(indSides.size):
arrayBlock[indSides[j],:,indPhase] = self.__shiftPhase(arrayBlock[indSides[j],:,indPhase], phaseDiff[j,indPhase].transpose())
voltsCohDet[:,startInd:endInd,:] = arrayBlock
-
+
return voltsCohDet
-
+
def __calculateCCF(self, volts, pairslist ,laglist):
-
+
nHeights = volts.shape[2]
- nPoints = volts.shape[1]
+ nPoints = volts.shape[1]
voltsCCF = numpy.zeros((len(pairslist), len(laglist), nHeights),dtype = 'complex')
-
+
for i in range(len(pairslist)):
volts1 = volts[pairslist[i][0]]
- volts2 = volts[pairslist[i][1]]
-
+ volts2 = volts[pairslist[i][1]]
+
for t in range(len(laglist)):
- idxT = laglist[t]
+ idxT = laglist[t]
if idxT >= 0:
vStacked = numpy.vstack((volts2[idxT:,:],
numpy.zeros((idxT, nHeights),dtype='complex')))
@@ -2924,10 +3967,10 @@ class SMDetection(Operation):
vStacked = numpy.vstack((numpy.zeros((-idxT, nHeights),dtype='complex'),
volts2[:(nPoints + idxT),:]))
voltsCCF[i,t,:] = numpy.sum((numpy.conjugate(volts1)*vStacked),axis=0)
-
+
vStacked = None
return voltsCCF
-
+
def __getNoise(self, power, timeSegment, timeInterval):
numProfPerBlock = numpy.ceil(timeSegment/timeInterval)
numBlocks = int(power.shape[0]/numProfPerBlock)
@@ -2936,100 +3979,100 @@ class SMDetection(Operation):
listPower = numpy.array_split(power, numBlocks, 0)
noise = numpy.zeros((power.shape[0], power.shape[1]))
noise1 = numpy.zeros((power.shape[0], power.shape[1]))
-
+
startInd = 0
endInd = 0
-
+
for i in range(numBlocks): #split por canal
startInd = endInd
- endInd = endInd + listPower[i].shape[0]
-
+ endInd = endInd + listPower[i].shape[0]
+
arrayBlock = listPower[i]
noiseAux = numpy.mean(arrayBlock, 0)
# noiseAux = numpy.median(noiseAux)
# noiseAux = numpy.mean(arrayBlock)
- noise[startInd:endInd,:] = noise[startInd:endInd,:] + noiseAux
-
+ noise[startInd:endInd,:] = noise[startInd:endInd,:] + noiseAux
+
noiseAux1 = numpy.mean(arrayBlock)
- noise1[startInd:endInd,:] = noise1[startInd:endInd,:] + noiseAux1
-
+ noise1[startInd:endInd,:] = noise1[startInd:endInd,:] + noiseAux1
+
return noise, noise1
-
+
def __findMeteors(self, power, thresh):
nProf = power.shape[0]
nHeights = power.shape[1]
listMeteors = []
-
+
for i in range(nHeights):
powerAux = power[:,i]
threshAux = thresh[:,i]
-
+
indUPthresh = numpy.where(powerAux > threshAux)[0]
indDNthresh = numpy.where(powerAux <= threshAux)[0]
-
+
j = 0
-
+
while (j < indUPthresh.size - 2):
if (indUPthresh[j + 2] == indUPthresh[j] + 2):
indDNAux = numpy.where(indDNthresh > indUPthresh[j])
indDNthresh = indDNthresh[indDNAux]
-
+
if (indDNthresh.size > 0):
indEnd = indDNthresh[0] - 1
indInit = indUPthresh[j]
-
+
meteor = powerAux[indInit:indEnd + 1]
indPeak = meteor.argmax() + indInit
FLA = sum(numpy.conj(meteor)*numpy.hstack((meteor[1:],0)))
-
+
listMeteors.append(numpy.array([i,indInit,indPeak,indEnd,FLA])) #CHEQUEAR!!!!!
j = numpy.where(indUPthresh == indEnd)[0] + 1
else: j+=1
else: j+=1
-
+
return listMeteors
-
+
def __removeMultipleDetections(self,listMeteors, rangeLimit, timeLimit):
-
- arrayMeteors = numpy.asarray(listMeteors)
+
+ arrayMeteors = numpy.asarray(listMeteors)
listMeteors1 = []
-
+
while arrayMeteors.shape[0] > 0:
FLAs = arrayMeteors[:,4]
maxFLA = FLAs.argmax()
listMeteors1.append(arrayMeteors[maxFLA,:])
-
+
MeteorInitTime = arrayMeteors[maxFLA,1]
MeteorEndTime = arrayMeteors[maxFLA,3]
MeteorHeight = arrayMeteors[maxFLA,0]
-
+
#Check neighborhood
maxHeightIndex = MeteorHeight + rangeLimit
minHeightIndex = MeteorHeight - rangeLimit
minTimeIndex = MeteorInitTime - timeLimit
maxTimeIndex = MeteorEndTime + timeLimit
-
+
#Check Heights
indHeight = numpy.logical_and(arrayMeteors[:,0] >= minHeightIndex, arrayMeteors[:,0] <= maxHeightIndex)
indTime = numpy.logical_and(arrayMeteors[:,3] >= minTimeIndex, arrayMeteors[:,1] <= maxTimeIndex)
indBoth = numpy.where(numpy.logical_and(indTime,indHeight))
-
+
arrayMeteors = numpy.delete(arrayMeteors, indBoth, axis = 0)
-
+
return listMeteors1
-
+
def __meteorReestimation(self, listMeteors, volts, pairslist, thresh, noise, timeInterval,frequency):
numHeights = volts.shape[2]
nChannel = volts.shape[0]
-
+
thresholdPhase = thresh[0]
thresholdNoise = thresh[1]
thresholdDB = float(thresh[2])
-
+
thresholdDB1 = 10**(thresholdDB/10)
pairsarray = numpy.array(pairslist)
indSides = pairsarray[:,1]
-
+
pairslist1 = list(pairslist)
pairslist1.append((0,1))
pairslist1.append((3,4))
@@ -3038,31 +4081,31 @@ class SMDetection(Operation):
listPowerSeries = []
listVoltageSeries = []
#volts has the war data
-
+
if frequency == 30e6:
timeLag = 45*10**-3
else:
timeLag = 15*10**-3
lag = numpy.ceil(timeLag/timeInterval)
-
+
for i in range(len(listMeteors)):
-
+
###################### 3.6 - 3.7 PARAMETERS REESTIMATION #########################
meteorAux = numpy.zeros(16)
-
+
#Loading meteor Data (mHeight, mStart, mPeak, mEnd)
mHeight = listMeteors[i][0]
mStart = listMeteors[i][1]
mPeak = listMeteors[i][2]
mEnd = listMeteors[i][3]
-
+
#get the volt data between the start and end times of the meteor
meteorVolts = volts[:,mStart:mEnd+1,mHeight]
meteorVolts = meteorVolts.reshape(meteorVolts.shape[0], meteorVolts.shape[1], 1)
-
+
#3.6. Phase Difference estimation
phaseDiff, aux = self.__estimatePhaseDifference(meteorVolts, pairslist)
-
+
#3.7. Phase difference removal & meteor start, peak and end times reestimated
#meteorVolts0.- all Channels, all Profiles
meteorVolts0 = volts[:,:,mHeight]
@@ -3070,15 +4113,15 @@ class SMDetection(Operation):
meteorNoise = noise[:,mHeight]
meteorVolts0[indSides,:] = self.__shiftPhase(meteorVolts0[indSides,:], phaseDiff) #Phase Shifting
powerNet0 = numpy.nansum(numpy.abs(meteorVolts0)**2, axis = 0) #Power
-
+
#Times reestimation
mStart1 = numpy.where(powerNet0[:mPeak] < meteorThresh[:mPeak])[0]
if mStart1.size > 0:
mStart1 = mStart1[-1] + 1
-
- else:
+
+ else:
mStart1 = mPeak
-
+
mEnd1 = numpy.where(powerNet0[mPeak:] < meteorThresh[mPeak:])[0][0] + mPeak - 1
mEndDecayTime1 = numpy.where(powerNet0[mPeak:] < meteorNoise[mPeak:])[0]
if mEndDecayTime1.size == 0:
@@ -3086,7 +4129,7 @@ class SMDetection(Operation):
else:
mEndDecayTime1 = mEndDecayTime1[0] + mPeak - 1
# mPeak1 = meteorVolts0[mStart1:mEnd1 + 1].argmax()
-
+
#meteorVolts1.- all Channels, from start to end
meteorVolts1 = meteorVolts0[:,mStart1:mEnd1 + 1]
meteorVolts2 = meteorVolts0[:,mPeak + lag:mEnd1 + 1]
@@ -3095,17 +4138,17 @@ class SMDetection(Operation):
meteorVolts1 = meteorVolts1.reshape(meteorVolts1.shape[0], meteorVolts1.shape[1], 1)
meteorVolts2 = meteorVolts2.reshape(meteorVolts2.shape[0], meteorVolts2.shape[1], 1)
##################### END PARAMETERS REESTIMATION #########################
-
+
##################### 3.8 PHASE DIFFERENCE REESTIMATION ########################
# if mEnd1 - mStart1 > 4: #Error Number 6: echo less than 5 samples long; too short for analysis
- if meteorVolts2.shape[1] > 0:
+ if meteorVolts2.shape[1] > 0:
#Phase Difference re-estimation
phaseDiff1, phaseDiffint = self.__estimatePhaseDifference(meteorVolts2, pairslist1) #Phase Difference Estimation
# phaseDiff1, phaseDiffint = self.estimatePhaseDifference(meteorVolts2, pairslist)
meteorVolts2 = meteorVolts2.reshape(meteorVolts2.shape[0], meteorVolts2.shape[1])
phaseDiff11 = numpy.reshape(phaseDiff1, (phaseDiff1.shape[0],1))
meteorVolts2[indSides,:] = self.__shiftPhase(meteorVolts2[indSides,:], phaseDiff11[0:4]) #Phase Shifting
-
+
#Phase Difference RMS
phaseRMS1 = numpy.sqrt(numpy.mean(numpy.square(phaseDiff1)))
powerNet1 = numpy.nansum(numpy.abs(meteorVolts1[:,:])**2,0)
@@ -3120,27 +4163,27 @@ class SMDetection(Operation):
#Vectorize
meteorAux[0:7] = [mHeight, mStart1, mPeak1, mEnd1, mPeakPower1, mSNR1, phaseRMS1]
meteorAux[7:11] = phaseDiffint[0:4]
-
+
#Rejection Criterions
if phaseRMS1 > thresholdPhase: #Error Number 17: Phase variation
meteorAux[-1] = 17
elif mSNR1 < thresholdDB1: #Error Number 1: SNR < threshold dB
meteorAux[-1] = 1
-
-
- else:
+
+
+ else:
meteorAux[0:4] = [mHeight, mStart, mPeak, mEnd]
meteorAux[-1] = 6 #Error Number 6: echo less than 5 samples long; too short for analysis
PowerSeries = 0
-
+
listMeteors1.append(meteorAux)
listPowerSeries.append(PowerSeries)
listVoltageSeries.append(meteorVolts1)
-
- return listMeteors1, listPowerSeries, listVoltageSeries
-
+
+ return listMeteors1, listPowerSeries, listVoltageSeries
+
def __estimateDecayTime(self, listMeteors, listPower, timeInterval, frequency):
-
+
threshError = 10
#Depending if it is 30 or 50 MHz
if frequency == 30e6:
@@ -3148,22 +4191,22 @@ class SMDetection(Operation):
else:
timeLag = 15*10**-3
lag = numpy.ceil(timeLag/timeInterval)
-
+
listMeteors1 = []
-
+
for i in range(len(listMeteors)):
meteorPower = listPower[i]
meteorAux = listMeteors[i]
-
+
if meteorAux[-1] == 0:
- try:
+ try:
indmax = meteorPower.argmax()
indlag = indmax + lag
-
+
y = meteorPower[indlag:]
x = numpy.arange(0, y.size)*timeLag
-
+
#first guess
a = y[0]
tau = timeLag
@@ -3172,26 +4215,26 @@ class SMDetection(Operation):
y1 = self.__exponential_function(x, *popt)
#error estimation
error = sum((y - y1)**2)/(numpy.var(y)*(y.size - popt.size))
-
+
decayTime = popt[1]
riseTime = indmax*timeInterval
meteorAux[11:13] = [decayTime, error]
-
+
#Table items 7, 8 and 11
if (riseTime > 0.3): #Number 7: Echo rise exceeds 0.3s
- meteorAux[-1] = 7
+ meteorAux[-1] = 7
elif (decayTime < 2*riseTime) : #Number 8: Echo decay time less than than twice rise time
meteorAux[-1] = 8
if (error > threshError): #Number 11: Poor fit to amplitude for estimation of decay time
- meteorAux[-1] = 11
-
-
+ meteorAux[-1] = 11
+
+
except:
- meteorAux[-1] = 11
-
-
+ meteorAux[-1] = 11
+
+
listMeteors1.append(meteorAux)
-
+
return listMeteors1
#Exponential Function
@@ -3199,9 +4242,9 @@ class SMDetection(Operation):
def __exponential_function(self, x, a, tau):
y = a*numpy.exp(-x/tau)
return y
-
+
def __getRadialVelocity(self, listMeteors, listVolts, radialStdThresh, pairslist, timeInterval):
-
+
pairslist1 = list(pairslist)
pairslist1.append((0,1))
pairslist1.append((3,4))
@@ -3211,33 +4254,33 @@ class SMDetection(Operation):
c = 3e8
lag = numpy.ceil(timeLag/timeInterval)
freq = 30e6
-
+
listMeteors1 = []
-
+
for i in range(len(listMeteors)):
meteorAux = listMeteors[i]
if meteorAux[-1] == 0:
mStart = listMeteors[i][1]
- mPeak = listMeteors[i][2]
+ mPeak = listMeteors[i][2]
mLag = mPeak - mStart + lag
-
+
#get the volt data between the start and end times of the meteor
meteorVolts = listVolts[i]
meteorVolts = meteorVolts.reshape(meteorVolts.shape[0], meteorVolts.shape[1], 1)
#Get CCF
allCCFs = self.__calculateCCF(meteorVolts, pairslist1, [-2,-1,0,1,2])
-
+
#Method 2
slopes = numpy.zeros(numPairs)
time = numpy.array([-2,-1,1,2])*timeInterval
angAllCCF = numpy.angle(allCCFs[:,[0,1,3,4],0])
-
+
#Correct phases
derPhaseCCF = angAllCCF[:,1:] - angAllCCF[:,0:-1]
indDer = numpy.where(numpy.abs(derPhaseCCF) > numpy.pi)
-
- if indDer[0].shape[0] > 0:
+
+ if indDer[0].shape[0] > 0:
for i in range(indDer[0].shape[0]):
signo = -numpy.sign(derPhaseCCF[indDer[0][i],indDer[1][i]])
angAllCCF[indDer[0][i],indDer[1][i]+1:] += signo*2*numpy.pi
@@ -3246,51 +4289,51 @@ class SMDetection(Operation):
for j in range(numPairs):
fit = stats.linregress(time, angAllCCF[j,:])
slopes[j] = fit[0]
-
+
#Remove Outlier
# indOut = numpy.argmax(numpy.abs(slopes - numpy.mean(slopes)))
# slopes = numpy.delete(slopes,indOut)
# indOut = numpy.argmax(numpy.abs(slopes - numpy.mean(slopes)))
# slopes = numpy.delete(slopes,indOut)
-
+
radialVelocity = -numpy.mean(slopes)*(0.25/numpy.pi)*(c/freq)
radialError = numpy.std(slopes)*(0.25/numpy.pi)*(c/freq)
meteorAux[-2] = radialError
meteorAux[-3] = radialVelocity
-
+
#Setting Error
#Number 15: Radial Drift velocity or projected horizontal velocity exceeds 200 m/s
- if numpy.abs(radialVelocity) > 200:
+ if numpy.abs(radialVelocity) > 200:
meteorAux[-1] = 15
#Number 12: Poor fit to CCF variation for estimation of radial drift velocity
elif radialError > radialStdThresh:
meteorAux[-1] = 12
-
+
listMeteors1.append(meteorAux)
return listMeteors1
-
+
def __setNewArrays(self, listMeteors, date, heiRang):
-
+
#New arrays
arrayMeteors = numpy.array(listMeteors)
arrayParameters = numpy.zeros((len(listMeteors), 13))
-
+
#Date inclusion
# date = re.findall(r'\((.*?)\)', date)
# date = date[0].split(',')
# date = map(int, date)
-#
+#
# if len(date)<6:
# date.append(0)
-#
+#
# date = [date[0]*10000 + date[1]*100 + date[2], date[3]*10000 + date[4]*100 + date[5]]
# arrayDate = numpy.tile(date, (len(listMeteors), 1))
arrayDate = numpy.tile(date, (len(listMeteors)))
-
+
#Meteor array
# arrayMeteors[:,0] = heiRang[arrayMeteors[:,0].astype(int)]
# arrayMeteors = numpy.hstack((arrayDate, arrayMeteors))
-
+
#Parameters Array
arrayParameters[:,0] = arrayDate #Date
arrayParameters[:,1] = heiRang[arrayMeteors[:,0].astype(int)] #Range
@@ -3298,13 +4341,13 @@ class SMDetection(Operation):
arrayParameters[:,8:12] = arrayMeteors[:,7:11] #Phases
arrayParameters[:,-1] = arrayMeteors[:,-1] #Error
-
+
return arrayParameters
-
+
class CorrectSMPhases(Operation):
-
+
def run(self, dataOut, phaseOffsets, hmin = 50, hmax = 150, azimuth = 45, channelPositions = None):
-
+
arrayParameters = dataOut.data_param
pairsList = []
pairx = (0,1)
@@ -3312,49 +4355,49 @@ class CorrectSMPhases(Operation):
pairsList.append(pairx)
pairsList.append(pairy)
jph = numpy.zeros(4)
-
+
phaseOffsets = numpy.array(phaseOffsets)*numpy.pi/180
# arrayParameters[:,8:12] = numpy.unwrap(arrayParameters[:,8:12] + phaseOffsets)
arrayParameters[:,8:12] = numpy.angle(numpy.exp(1j*(arrayParameters[:,8:12] + phaseOffsets)))
-
+
meteorOps = SMOperations()
if channelPositions is None:
# channelPositions = [(2.5,0), (0,2.5), (0,0), (0,4.5), (-2,0)] #T
channelPositions = [(4.5,2), (2,4.5), (2,2), (2,0), (0,2)] #Estrella
-
+
pairslist0, distances = meteorOps.getPhasePairs(channelPositions)
h = (hmin,hmax)
-
+
arrayParameters = meteorOps.getMeteorParams(arrayParameters, azimuth, h, pairsList, distances, jph)
-
+
dataOut.data_param = arrayParameters
return
class SMPhaseCalibration(Operation):
-
+
__buffer = None
__initime = None
__dataReady = False
-
+
__isConfig = False
-
+
def __checkTime(self, currentTime, initTime, paramInterval, outputInterval):
-
+
dataTime = currentTime + paramInterval
deltaTime = dataTime - initTime
-
+
if deltaTime >= outputInterval or deltaTime < 0:
return True
-
+
return False
-
+
def __getGammas(self, pairs, d, phases):
gammas = numpy.zeros(2)
-
+
for i in range(len(pairs)):
-
+
pairi = pairs[i]
phip3 = phases[:,pairi[0]]
@@ -3368,7 +4411,7 @@ class SMPhaseCalibration(Operation):
jgamma = numpy.angle(numpy.exp(1j*jgamma))
# jgamma[jgamma>numpy.pi] -= 2*numpy.pi
# jgamma[jgamma<-numpy.pi] += 2*numpy.pi
-
+
#Revised distribution
jgammaArray = numpy.hstack((jgamma,jgamma+0.5*numpy.pi,jgamma-0.5*numpy.pi))
@@ -3377,39 +4420,39 @@ class SMPhaseCalibration(Operation):
rmin = -0.5*numpy.pi
rmax = 0.5*numpy.pi
phaseHisto = numpy.histogram(jgammaArray, bins=nBins, range=(rmin,rmax))
-
+
meteorsY = phaseHisto[0]
phasesX = phaseHisto[1][:-1]
width = phasesX[1] - phasesX[0]
phasesX += width/2
-
+
#Gaussian aproximation
bpeak = meteorsY.argmax()
peak = meteorsY.max()
jmin = bpeak - 5
jmax = bpeak + 5 + 1
-
+
if jmin<0:
jmin = 0
jmax = 6
elif jmax > meteorsY.size:
jmin = meteorsY.size - 6
jmax = meteorsY.size
-
+
x0 = numpy.array([peak,bpeak,50])
coeff = optimize.leastsq(self.__residualFunction, x0, args=(meteorsY[jmin:jmax], phasesX[jmin:jmax]))
-
+
#Gammas
gammas[i] = coeff[0][1]
-
+
return gammas
-
+
def __residualFunction(self, coeffs, y, t):
-
+
return y - self.__gauss_function(t, coeffs)
def __gauss_function(self, t, coeffs):
-
+
return coeffs[0]*numpy.exp(-0.5*((t - coeffs[1]) / coeffs[2])**2)
def __getPhases(self, azimuth, h, pairsList, d, gammas, meteorsArray):
@@ -3430,16 +4473,16 @@ class SMPhaseCalibration(Operation):
max_xangle = range_angle[iz]/2 + center_xangle
min_yangle = -range_angle[iz]/2 + center_yangle
max_yangle = range_angle[iz]/2 + center_yangle
-
+
inc_x = (max_xangle-min_xangle)/nstepsx
inc_y = (max_yangle-min_yangle)/nstepsy
-
+
alpha_y = numpy.arange(nstepsy)*inc_y + min_yangle
alpha_x = numpy.arange(nstepsx)*inc_x + min_xangle
penalty = numpy.zeros((nstepsx,nstepsy))
jph_array = numpy.zeros((nchan,nstepsx,nstepsy))
jph = numpy.zeros(nchan)
-
+
# Iterations looking for the offset
for iy in range(int(nstepsy)):
for ix in range(int(nstepsx)):
@@ -3447,46 +4490,46 @@ class SMPhaseCalibration(Operation):
d2 = d[pairsList[1][1]]
d5 = d[pairsList[0][0]]
d4 = d[pairsList[0][1]]
-
+
alp2 = alpha_y[iy] #gamma 1
- alp4 = alpha_x[ix] #gamma 0
-
+ alp4 = alpha_x[ix] #gamma 0
+
alp3 = -alp2*d3/d2 - gammas[1]
alp5 = -alp4*d5/d4 - gammas[0]
# jph[pairy[1]] = alpha_y[iy]
-# jph[pairy[0]] = -gammas[1] - alpha_y[iy]*d[pairy[1]]/d[pairy[0]]
-
+# jph[pairy[0]] = -gammas[1] - alpha_y[iy]*d[pairy[1]]/d[pairy[0]]
+
# jph[pairx[1]] = alpha_x[ix]
# jph[pairx[0]] = -gammas[0] - alpha_x[ix]*d[pairx[1]]/d[pairx[0]]
jph[pairsList[0][1]] = alp4
jph[pairsList[0][0]] = alp5
jph[pairsList[1][0]] = alp3
- jph[pairsList[1][1]] = alp2
+ jph[pairsList[1][1]] = alp2
jph_array[:,ix,iy] = jph
# d = [2.0,2.5,2.5,2.0]
- #falta chequear si va a leer bien los meteoros
+ #falta chequear si va a leer bien los meteoros
meteorsArray1 = meteorOps.getMeteorParams(meteorsArray, azimuth, h, pairsList, d, jph)
error = meteorsArray1[:,-1]
ind1 = numpy.where(error==0)[0]
penalty[ix,iy] = ind1.size
-
+
i,j = numpy.unravel_index(penalty.argmax(), penalty.shape)
phOffset = jph_array[:,i,j]
-
+
center_xangle = phOffset[pairx[1]]
center_yangle = phOffset[pairy[1]]
-
+
phOffset = numpy.angle(numpy.exp(1j*jph_array[:,i,j]))
- phOffset = phOffset*180/numpy.pi
+ phOffset = phOffset*180/numpy.pi
return phOffset
-
-
+
+
def run(self, dataOut, hmin, hmax, channelPositions=None, nHours = 1):
-
+
dataOut.flagNoData = True
- self.__dataReady = False
+ self.__dataReady = False
dataOut.outputInterval = nHours*3600
-
+
if self.__isConfig == False:
# self.__initime = dataOut.datatime.replace(minute = 0, second = 0, microsecond = 03)
#Get Initial LTC time
@@ -3494,19 +4537,19 @@ class SMPhaseCalibration(Operation):
self.__initime = (self.__initime.replace(minute = 0, second = 0, microsecond = 0) - datetime.datetime(1970, 1, 1)).total_seconds()
self.__isConfig = True
-
+
if self.__buffer is None:
self.__buffer = dataOut.data_param.copy()
else:
self.__buffer = numpy.vstack((self.__buffer, dataOut.data_param))
-
+
self.__dataReady = self.__checkTime(dataOut.utctime, self.__initime, dataOut.paramInterval, dataOut.outputInterval) #Check if the buffer is ready
-
+
if self.__dataReady:
dataOut.utctimeInit = self.__initime
self.__initime += dataOut.outputInterval #to erase time offset
-
+
freq = dataOut.frequency
c = dataOut.C #m/s
lamb = c/freq
@@ -3528,13 +4571,13 @@ class SMPhaseCalibration(Operation):
pairs.append((1,0))
else:
pairs.append((0,1))
-
+
if distances[3] > distances[2]:
pairs.append((3,2))
else:
pairs.append((2,3))
# distances1 = [-distances[0]*lamb, distances[1]*lamb, -distances[2]*lamb, distances[3]*lamb]
-
+
meteorsArray = self.__buffer
error = meteorsArray[:,-1]
boolError = (error==0)|(error==3)|(error==4)|(error==13)|(error==14)
@@ -3542,7 +4585,7 @@ class SMPhaseCalibration(Operation):
meteorsArray = meteorsArray[ind1,:]
meteorsArray[:,-1] = 0
phases = meteorsArray[:,8:12]
-
+
#Calculate Gammas
gammas = self.__getGammas(pairs, distances, phases)
# gammas = numpy.array([-21.70409463,45.76935864])*numpy.pi/180
@@ -3552,22 +4595,22 @@ class SMPhaseCalibration(Operation):
dataOut.data_output = -phasesOff
dataOut.flagNoData = False
self.__buffer = None
-
-
+
+
return
-
+
class SMOperations():
-
+
def __init__(self):
-
+
return
-
+
def getMeteorParams(self, arrayParameters0, azimuth, h, pairsList, distances, jph):
-
+
arrayParameters = arrayParameters0.copy()
hmin = h[0]
hmax = h[1]
-
+
#Calculate AOA (Error N 3, 4)
#JONES ET AL. 1998
AOAthresh = numpy.pi/8
@@ -3575,72 +4618,72 @@ class SMOperations():
phases = -arrayParameters[:,8:12] + jph
# phases = numpy.unwrap(phases)
arrayParameters[:,3:6], arrayParameters[:,-1] = self.__getAOA(phases, pairsList, distances, error, AOAthresh, azimuth)
-
+
#Calculate Heights (Error N 13 and 14)
error = arrayParameters[:,-1]
Ranges = arrayParameters[:,1]
zenith = arrayParameters[:,4]
arrayParameters[:,2], arrayParameters[:,-1] = self.__getHeights(Ranges, zenith, error, hmin, hmax)
-
+
#----------------------- Get Final data ------------------------------------
# error = arrayParameters[:,-1]
# ind1 = numpy.where(error==0)[0]
# arrayParameters = arrayParameters[ind1,:]
-
+
return arrayParameters
-
+
def __getAOA(self, phases, pairsList, directions, error, AOAthresh, azimuth):
-
+
arrayAOA = numpy.zeros((phases.shape[0],3))
cosdir0, cosdir = self.__getDirectionCosines(phases, pairsList,directions)
-
+
arrayAOA[:,:2] = self.__calculateAOA(cosdir, azimuth)
cosDirError = numpy.sum(numpy.abs(cosdir0 - cosdir), axis = 1)
arrayAOA[:,2] = cosDirError
-
+
azimuthAngle = arrayAOA[:,0]
zenithAngle = arrayAOA[:,1]
-
+
#Setting Error
indError = numpy.where(numpy.logical_or(error == 3, error == 4))[0]
error[indError] = 0
#Number 3: AOA not fesible
indInvalid = numpy.where(numpy.logical_and((numpy.logical_or(numpy.isnan(zenithAngle), numpy.isnan(azimuthAngle))),error == 0))[0]
- error[indInvalid] = 3
+ error[indInvalid] = 3
#Number 4: Large difference in AOAs obtained from different antenna baselines
indInvalid = numpy.where(numpy.logical_and(cosDirError > AOAthresh,error == 0))[0]
- error[indInvalid] = 4
+ error[indInvalid] = 4
return arrayAOA, error
-
+
def __getDirectionCosines(self, arrayPhase, pairsList, distances):
-
+
#Initializing some variables
ang_aux = numpy.array([-8,-7,-6,-5,-4,-3,-2,-1,0,1,2,3,4,5,6,7,8])*2*numpy.pi
ang_aux = ang_aux.reshape(1,ang_aux.size)
-
+
cosdir = numpy.zeros((arrayPhase.shape[0],2))
cosdir0 = numpy.zeros((arrayPhase.shape[0],2))
-
-
+
+
for i in range(2):
ph0 = arrayPhase[:,pairsList[i][0]]
ph1 = arrayPhase[:,pairsList[i][1]]
d0 = distances[pairsList[i][0]]
d1 = distances[pairsList[i][1]]
-
- ph0_aux = ph0 + ph1
+
+ ph0_aux = ph0 + ph1
ph0_aux = numpy.angle(numpy.exp(1j*ph0_aux))
# ph0_aux[ph0_aux > numpy.pi] -= 2*numpy.pi
-# ph0_aux[ph0_aux < -numpy.pi] += 2*numpy.pi
+# ph0_aux[ph0_aux < -numpy.pi] += 2*numpy.pi
#First Estimation
cosdir0[:,i] = (ph0_aux)/(2*numpy.pi*(d0 - d1))
-
+
#Most-Accurate Second Estimation
phi1_aux = ph0 - ph1
phi1_aux = phi1_aux.reshape(phi1_aux.size,1)
#Direction Cosine 1
cosdir1 = (phi1_aux + ang_aux)/(2*numpy.pi*(d0 + d1))
-
+
#Searching the correct Direction Cosine
cosdir0_aux = cosdir0[:,i]
cosdir0_aux = cosdir0_aux.reshape(cosdir0_aux.size,1)
@@ -3649,59 +4692,59 @@ class SMOperations():
indcos = cosDiff.argmin(axis = 1)
#Saving Value obtained
cosdir[:,i] = cosdir1[numpy.arange(len(indcos)),indcos]
-
+
return cosdir0, cosdir
-
+
def __calculateAOA(self, cosdir, azimuth):
cosdirX = cosdir[:,0]
cosdirY = cosdir[:,1]
-
+
zenithAngle = numpy.arccos(numpy.sqrt(1 - cosdirX**2 - cosdirY**2))*180/numpy.pi
azimuthAngle = numpy.arctan2(cosdirX,cosdirY)*180/numpy.pi + azimuth#0 deg north, 90 deg east
angles = numpy.vstack((azimuthAngle, zenithAngle)).transpose()
-
+
return angles
-
+
def __getHeights(self, Ranges, zenith, error, minHeight, maxHeight):
-
+
Ramb = 375 #Ramb = c/(2*PRF)
Re = 6371 #Earth Radius
heights = numpy.zeros(Ranges.shape)
-
+
R_aux = numpy.array([0,1,2])*Ramb
R_aux = R_aux.reshape(1,R_aux.size)
Ranges = Ranges.reshape(Ranges.size,1)
-
+
Ri = Ranges + R_aux
hi = numpy.sqrt(Re**2 + Ri**2 + (2*Re*numpy.cos(zenith*numpy.pi/180)*Ri.transpose()).transpose()) - Re
-
+
#Check if there is a height between 70 and 110 km
h_bool = numpy.sum(numpy.logical_and(hi > minHeight, hi < maxHeight), axis = 1)
ind_h = numpy.where(h_bool == 1)[0]
-
+
hCorr = hi[ind_h, :]
ind_hCorr = numpy.where(numpy.logical_and(hi > minHeight, hi < maxHeight))
-
+
hCorr = hi[ind_hCorr][:len(ind_h)]
heights[ind_h] = hCorr
-
+
#Setting Error
#Number 13: Height unresolvable echo: not valid height within 70 to 110 km
- #Number 14: Height ambiguous echo: more than one possible height within 70 to 110 km
+ #Number 14: Height ambiguous echo: more than one possible height within 70 to 110 km
indError = numpy.where(numpy.logical_or(error == 13, error == 14))[0]
error[indError] = 0
- indInvalid2 = numpy.where(numpy.logical_and(h_bool > 1, error == 0))[0]
+ indInvalid2 = numpy.where(numpy.logical_and(h_bool > 1, error == 0))[0]
error[indInvalid2] = 14
indInvalid1 = numpy.where(numpy.logical_and(h_bool == 0, error == 0))[0]
- error[indInvalid1] = 13
-
+ error[indInvalid1] = 13
+
return heights, error
-
+
def getPhasePairs(self, channelPositions):
chanPos = numpy.array(channelPositions)
listOper = list(itertools.combinations(list(range(5)),2))
-
+
distances = numpy.zeros(4)
axisX = []
axisY = []
@@ -3709,15 +4752,15 @@ class SMOperations():
distY = numpy.zeros(3)
ix = 0
iy = 0
-
+
pairX = numpy.zeros((2,2))
pairY = numpy.zeros((2,2))
-
+
for i in range(len(listOper)):
pairi = listOper[i]
-
+
posDif = numpy.abs(chanPos[pairi[0],:] - chanPos[pairi[1],:])
-
+
if posDif[0] == 0:
axisY.append(pairi)
distY[iy] = posDif[1]
@@ -3726,7 +4769,7 @@ class SMOperations():
axisX.append(pairi)
distX[ix] = posDif[0]
ix += 1
-
+
for i in range(2):
if i==0:
dist0 = distX
@@ -3734,7 +4777,7 @@ class SMOperations():
else:
dist0 = distY
axis0 = axisY
-
+
side = numpy.argsort(dist0)[:-1]
axis0 = numpy.array(axis0)[side,:]
chanC = int(numpy.intersect1d(axis0[0,:], axis0[1,:])[0])
@@ -3742,7 +4785,7 @@ class SMOperations():
side = axis1[axis1 != chanC]
diff1 = chanPos[chanC,i] - chanPos[side[0],i]
diff2 = chanPos[chanC,i] - chanPos[side[1],i]
- if diff1<0:
+ if diff1<0:
chan2 = side[0]
d2 = numpy.abs(diff1)
chan1 = side[1]
@@ -3752,7 +4795,7 @@ class SMOperations():
d2 = numpy.abs(diff2)
chan1 = side[0]
d1 = numpy.abs(diff1)
-
+
if i==0:
chanCX = chanC
chan1X = chan1
@@ -3764,10 +4807,10 @@ class SMOperations():
chan2Y = chan2
distances[2:4] = numpy.array([d1,d2])
# axisXsides = numpy.reshape(axisX[ix,:],4)
-#
+#
# channelCentX = int(numpy.intersect1d(pairX[0,:], pairX[1,:])[0])
# channelCentY = int(numpy.intersect1d(pairY[0,:], pairY[1,:])[0])
-#
+#
# ind25X = numpy.where(pairX[0,:] != channelCentX)[0][0]
# ind20X = numpy.where(pairX[1,:] != channelCentX)[0][0]
# channel25X = int(pairX[0,ind25X])
@@ -3776,59 +4819,59 @@ class SMOperations():
# ind20Y = numpy.where(pairY[1,:] != channelCentY)[0][0]
# channel25Y = int(pairY[0,ind25Y])
# channel20Y = int(pairY[1,ind20Y])
-
+
# pairslist = [(channelCentX, channel25X),(channelCentX, channel20X),(channelCentY,channel25Y),(channelCentY, channel20Y)]
- pairslist = [(chanCX, chan1X),(chanCX, chan2X),(chanCY,chan1Y),(chanCY, chan2Y)]
-
+ pairslist = [(chanCX, chan1X),(chanCX, chan2X),(chanCY,chan1Y),(chanCY, chan2Y)]
+
return pairslist, distances
# def __getAOA(self, phases, pairsList, error, AOAthresh, azimuth):
-#
+#
# arrayAOA = numpy.zeros((phases.shape[0],3))
# cosdir0, cosdir = self.__getDirectionCosines(phases, pairsList)
-#
+#
# arrayAOA[:,:2] = self.__calculateAOA(cosdir, azimuth)
# cosDirError = numpy.sum(numpy.abs(cosdir0 - cosdir), axis = 1)
# arrayAOA[:,2] = cosDirError
-#
+#
# azimuthAngle = arrayAOA[:,0]
# zenithAngle = arrayAOA[:,1]
-#
+#
# #Setting Error
# #Number 3: AOA not fesible
# indInvalid = numpy.where(numpy.logical_and((numpy.logical_or(numpy.isnan(zenithAngle), numpy.isnan(azimuthAngle))),error == 0))[0]
-# error[indInvalid] = 3
+# error[indInvalid] = 3
# #Number 4: Large difference in AOAs obtained from different antenna baselines
# indInvalid = numpy.where(numpy.logical_and(cosDirError > AOAthresh,error == 0))[0]
-# error[indInvalid] = 4
+# error[indInvalid] = 4
# return arrayAOA, error
-#
+#
# def __getDirectionCosines(self, arrayPhase, pairsList):
-#
+#
# #Initializing some variables
# ang_aux = numpy.array([-8,-7,-6,-5,-4,-3,-2,-1,0,1,2,3,4,5,6,7,8])*2*numpy.pi
# ang_aux = ang_aux.reshape(1,ang_aux.size)
-#
+#
# cosdir = numpy.zeros((arrayPhase.shape[0],2))
# cosdir0 = numpy.zeros((arrayPhase.shape[0],2))
-#
-#
+#
+#
# for i in range(2):
# #First Estimation
# phi0_aux = arrayPhase[:,pairsList[i][0]] + arrayPhase[:,pairsList[i][1]]
# #Dealias
# indcsi = numpy.where(phi0_aux > numpy.pi)
-# phi0_aux[indcsi] -= 2*numpy.pi
+# phi0_aux[indcsi] -= 2*numpy.pi
# indcsi = numpy.where(phi0_aux < -numpy.pi)
-# phi0_aux[indcsi] += 2*numpy.pi
+# phi0_aux[indcsi] += 2*numpy.pi
# #Direction Cosine 0
# cosdir0[:,i] = -(phi0_aux)/(2*numpy.pi*0.5)
-#
+#
# #Most-Accurate Second Estimation
# phi1_aux = arrayPhase[:,pairsList[i][0]] - arrayPhase[:,pairsList[i][1]]
# phi1_aux = phi1_aux.reshape(phi1_aux.size,1)
# #Direction Cosine 1
# cosdir1 = -(phi1_aux + ang_aux)/(2*numpy.pi*4.5)
-#
+#
# #Searching the correct Direction Cosine
# cosdir0_aux = cosdir0[:,i]
# cosdir0_aux = cosdir0_aux.reshape(cosdir0_aux.size,1)
@@ -3837,50 +4880,51 @@ class SMOperations():
# indcos = cosDiff.argmin(axis = 1)
# #Saving Value obtained
# cosdir[:,i] = cosdir1[numpy.arange(len(indcos)),indcos]
-#
+#
# return cosdir0, cosdir
-#
+#
# def __calculateAOA(self, cosdir, azimuth):
# cosdirX = cosdir[:,0]
# cosdirY = cosdir[:,1]
-#
+#
# zenithAngle = numpy.arccos(numpy.sqrt(1 - cosdirX**2 - cosdirY**2))*180/numpy.pi
# azimuthAngle = numpy.arctan2(cosdirX,cosdirY)*180/numpy.pi + azimuth #0 deg north, 90 deg east
# angles = numpy.vstack((azimuthAngle, zenithAngle)).transpose()
-#
+#
# return angles
-#
+#
# def __getHeights(self, Ranges, zenith, error, minHeight, maxHeight):
-#
+#
# Ramb = 375 #Ramb = c/(2*PRF)
# Re = 6371 #Earth Radius
# heights = numpy.zeros(Ranges.shape)
-#
+#
# R_aux = numpy.array([0,1,2])*Ramb
# R_aux = R_aux.reshape(1,R_aux.size)
-#
+#
# Ranges = Ranges.reshape(Ranges.size,1)
-#
+#
# Ri = Ranges + R_aux
# hi = numpy.sqrt(Re**2 + Ri**2 + (2*Re*numpy.cos(zenith*numpy.pi/180)*Ri.transpose()).transpose()) - Re
-#
+#
# #Check if there is a height between 70 and 110 km
# h_bool = numpy.sum(numpy.logical_and(hi > minHeight, hi < maxHeight), axis = 1)
# ind_h = numpy.where(h_bool == 1)[0]
-#
+#
# hCorr = hi[ind_h, :]
# ind_hCorr = numpy.where(numpy.logical_and(hi > minHeight, hi < maxHeight))
-#
-# hCorr = hi[ind_hCorr]
+#
+# hCorr = hi[ind_hCorr]
# heights[ind_h] = hCorr
-#
+#
# #Setting Error
# #Number 13: Height unresolvable echo: not valid height within 70 to 110 km
-# #Number 14: Height ambiguous echo: more than one possible height within 70 to 110 km
-#
-# indInvalid2 = numpy.where(numpy.logical_and(h_bool > 1, error == 0))[0]
+# #Number 14: Height ambiguous echo: more than one possible height within 70 to 110 km
+#
+# indInvalid2 = numpy.where(numpy.logical_and(h_bool > 1, error == 0))[0]
# error[indInvalid2] = 14
# indInvalid1 = numpy.where(numpy.logical_and(h_bool == 0, error == 0))[0]
-# error[indInvalid1] = 13
-#
-# return heights, error
+# error[indInvalid1] = 13
+#
+# return heights, error
+
diff --git a/schainpy/model/proc/jroproc_spectra.py b/schainpy/model/proc/jroproc_spectra.py
index fde1262..135cb93 100644
--- a/schainpy/model/proc/jroproc_spectra.py
+++ b/schainpy/model/proc/jroproc_spectra.py
@@ -31,7 +31,7 @@ class SpectraProc(ProcessingUnit):
self.dataOut = Spectra()
self.id_min = None
self.id_max = None
- self.setupReq = False #Agregar a todas las unidades de proc
+ self.setupReq = False # Agregar a todas las unidades de proc
def __updateSpecFromVoltage(self):
@@ -122,12 +122,12 @@ class SpectraProc(ProcessingUnit):
if self.dataIn.type == "Spectra":
self.dataOut.copy(self.dataIn)
if shift_fft:
- #desplaza a la derecha en el eje 2 determinadas posiciones
- shift = int(self.dataOut.nFFTPoints/2)
+ # desplaza a la derecha en el eje 2 determinadas posiciones
+ shift = int(self.dataOut.nFFTPoints / 2)
self.dataOut.data_spc = numpy.roll(self.dataOut.data_spc, shift , axis=1)
if self.dataOut.data_cspc is not None:
- #desplaza a la derecha en el eje 2 determinadas posiciones
+ # desplaza a la derecha en el eje 2 determinadas posiciones
self.dataOut.data_cspc = numpy.roll(self.dataOut.data_cspc, shift, axis=1)
if pairsList:
self.__selectPairs(pairsList)
@@ -215,7 +215,7 @@ class SpectraProc(ProcessingUnit):
return
- def selectFFTs(self, minFFT, maxFFT ):
+ def selectFFTs(self, minFFT, maxFFT):
"""
Selecciona un bloque de datos en base a un grupo de valores de puntos FFTs segun el rango
minFFT<= FFT <= maxFFT
@@ -264,7 +264,7 @@ class SpectraProc(ProcessingUnit):
heightList = self.dataOut.heightList[minIndex:maxIndex + 1]
# determina indices
- nheis = int(self.dataOut.radarControllerHeaderObj.txB /
+ nheis = int(self.dataOut.radarControllerHeaderObj.txB /
(self.dataOut.heightList[1] - self.dataOut.heightList[0]))
avg_dB = 10 * \
numpy.log10(numpy.sum(data_spc[channelindex, :, :], axis=0))
@@ -274,16 +274,16 @@ class SpectraProc(ProcessingUnit):
if val >= beacon_dB[0]:
beacon_heiIndexList.append(avg_dB.tolist().index(val))
- #data_spc = data_spc[:,:,beacon_heiIndexList]
+ # data_spc = data_spc[:,:,beacon_heiIndexList]
data_cspc = None
if self.dataOut.data_cspc is not None:
data_cspc = self.dataOut.data_cspc[:, :, minIndex:maxIndex + 1]
- #data_cspc = data_cspc[:,:,beacon_heiIndexList]
+ # data_cspc = data_cspc[:,:,beacon_heiIndexList]
data_dc = None
if self.dataOut.data_dc is not None:
data_dc = self.dataOut.data_dc[:, minIndex:maxIndex + 1]
- #data_dc = data_dc[:,beacon_heiIndexList]
+ # data_dc = data_dc[:,beacon_heiIndexList]
self.dataOut.data_spc = data_spc
self.dataOut.data_cspc = data_cspc
@@ -302,24 +302,24 @@ class SpectraProc(ProcessingUnit):
raise ValueError("Error selecting heights: Index range (%d,%d) is not valid" % (minIndex, maxIndex))
if (maxIndex >= self.dataOut.nProfiles):
- maxIndex = self.dataOut.nProfiles-1
+ maxIndex = self.dataOut.nProfiles - 1
- #Spectra
- data_spc = self.dataOut.data_spc[:,minIndex:maxIndex+1,:]
+ # Spectra
+ data_spc = self.dataOut.data_spc[:, minIndex:maxIndex + 1, :]
data_cspc = None
if self.dataOut.data_cspc is not None:
- data_cspc = self.dataOut.data_cspc[:,minIndex:maxIndex+1,:]
+ data_cspc = self.dataOut.data_cspc[:, minIndex:maxIndex + 1, :]
data_dc = None
if self.dataOut.data_dc is not None:
- data_dc = self.dataOut.data_dc[minIndex:maxIndex+1,:]
+ data_dc = self.dataOut.data_dc[minIndex:maxIndex + 1, :]
self.dataOut.data_spc = data_spc
self.dataOut.data_cspc = data_cspc
self.dataOut.data_dc = data_dc
- self.dataOut.ippSeconds = self.dataOut.ippSeconds*(self.dataOut.nFFTPoints / numpy.shape(data_cspc)[1])
+ self.dataOut.ippSeconds = self.dataOut.ippSeconds * (self.dataOut.nFFTPoints / numpy.shape(data_cspc)[1])
self.dataOut.nFFTPoints = numpy.shape(data_cspc)[1]
self.dataOut.profilesPerBlock = numpy.shape(data_cspc)[1]
@@ -452,7 +452,7 @@ class removeDC(Operation):
xx = numpy.zeros([4, 4])
for fil in range(4):
- xx[fil, :] = vel[fil]**numpy.asarray(list(range(4)))
+ xx[fil, :] = vel[fil] ** numpy.asarray(list(range(4)))
xx_inv = numpy.linalg.inv(xx)
xx_aux = xx_inv[0, :]
@@ -488,22 +488,22 @@ class removeInterference(Operation):
realCspc = numpy.abs(cspc)
for i in range(cspc.shape[0]):
- LinePower= numpy.sum(realCspc[i], axis=0)
- Threshold = numpy.amax(LinePower)-numpy.sort(LinePower)[len(Heights)-int(len(Heights)*0.1)]
- SelectedHeights = Heights[ numpy.where( LinePower < Threshold ) ]
- InterferenceSum = numpy.sum( realCspc[i,:,SelectedHeights], axis=0 )
- InterferenceThresholdMin = numpy.sort(InterferenceSum)[int(len(InterferenceSum)*0.98)]
- InterferenceThresholdMax = numpy.sort(InterferenceSum)[int(len(InterferenceSum)*0.99)]
+ LinePower = numpy.sum(realCspc[i], axis=0)
+ Threshold = numpy.amax(LinePower) - numpy.sort(LinePower)[len(Heights) - int(len(Heights) * 0.1)]
+ SelectedHeights = Heights[ numpy.where(LinePower < Threshold) ]
+ InterferenceSum = numpy.sum(realCspc[i, :, SelectedHeights], axis=0)
+ InterferenceThresholdMin = numpy.sort(InterferenceSum)[int(len(InterferenceSum) * 0.98)]
+ InterferenceThresholdMax = numpy.sort(InterferenceSum)[int(len(InterferenceSum) * 0.99)]
- InterferenceRange = numpy.where( ([InterferenceSum > InterferenceThresholdMin]))# , InterferenceSum < InterferenceThresholdMax]) )
- #InterferenceRange = numpy.where( ([InterferenceRange < InterferenceThresholdMax]))
- if len(InterferenceRange) InterferenceThresholdMin])) # , InterferenceSum < InterferenceThresholdMax]) )
+ # InterferenceRange = numpy.where( ([InterferenceRange < InterferenceThresholdMax]))
+ if len(InterferenceRange) < int(cspc.shape[1] * 0.3):
+ cspc[i, InterferenceRange, :] = numpy.NaN
self.dataOut.data_cspc = cspc
- def removeInterference(self, interf = 2, hei_interf = None, nhei_interf = None, offhei_interf = None):
+ def removeInterference(self, interf=2, hei_interf=None, nhei_interf=None, offhei_interf=None):
jspectra = self.dataOut.data_spc
jcspectra = self.dataOut.data_cspc
@@ -557,7 +557,7 @@ class removeInterference(Operation):
# tmp_noise = jnoise[ich] / num_prof
tmp_noise = jnoise[ich]
junkspc_interf = junkspc_interf - tmp_noise
- #junkspc_interf[:,comp_mask_prof] = 0
+ # junkspc_interf[:,comp_mask_prof] = 0
jspc_interf = junkspc_interf.sum(axis=0) / nhei_interf
jspc_interf = jspc_interf.transpose()
@@ -599,20 +599,20 @@ class removeInterference(Operation):
if cinterfid > 0:
for ip in range(cinterfid * (interf == 2) - 1):
- ind = (jspectra[ich, interfid[ip], :] < tmp_noise *
+ ind = (jspectra[ich, interfid[ip], :] < tmp_noise *
(1 + 1 / numpy.sqrt(num_incoh))).nonzero()
cind = len(ind)
if (cind > 0):
jspectra[ich, interfid[ip], ind] = tmp_noise * \
- (1 + (numpy.random.uniform(cind) - 0.5) /
+ (1 + (numpy.random.uniform(cind) - 0.5) /
numpy.sqrt(num_incoh))
ind = numpy.array([-2, -1, 1, 2])
xx = numpy.zeros([4, 4])
for id1 in range(4):
- xx[:, id1] = ind[id1]**numpy.asarray(list(range(4)))
+ xx[:, id1] = ind[id1] ** numpy.asarray(list(range(4)))
xx_inv = numpy.linalg.inv(xx)
xx = xx_inv[:, 0]
@@ -621,7 +621,7 @@ class removeInterference(Operation):
jspectra[ich, mask_prof[maxid], :] = numpy.dot(
yy.transpose(), xx)
- indAux = (jspectra[ich, :, :] < tmp_noise *
+ indAux = (jspectra[ich, :, :] < tmp_noise *
(1 - 1 / numpy.sqrt(num_incoh))).nonzero()
jspectra[ich, indAux[0], indAux[1]] = tmp_noise * \
(1 - 1 / numpy.sqrt(num_incoh))
@@ -671,7 +671,7 @@ class removeInterference(Operation):
xx = numpy.zeros([4, 4])
for id1 in range(4):
- xx[:, id1] = ind[id1]**numpy.asarray(list(range(4)))
+ xx[:, id1] = ind[id1] ** numpy.asarray(list(range(4)))
xx_inv = numpy.linalg.inv(xx)
xx = xx_inv[:, 0]
@@ -686,12 +686,12 @@ class removeInterference(Operation):
return 1
- def run(self, dataOut, interf = 2,hei_interf = None, nhei_interf = None, offhei_interf = None, mode=1):
+ def run(self, dataOut, interf=2, hei_interf=None, nhei_interf=None, offhei_interf=None, mode=1):
self.dataOut = dataOut
if mode == 1:
- self.removeInterference(interf = 2,hei_interf = None, nhei_interf = None, offhei_interf = None)
+ self.removeInterference(interf=2, hei_interf=None, nhei_interf=None, offhei_interf=None)
elif mode == 2:
self.removeInterference2()
@@ -888,11 +888,11 @@ class dopplerFlip(Operation):
freq_dc = int(num_profiles / 2)
# Flip con for
for j in range(num_profiles):
- jspectra_tmp[num_profiles-j-1]= jspectra[j]
+ jspectra_tmp[num_profiles - j - 1] = jspectra[j]
# Intercambio perfil de DC con perfil inmediato anterior
- jspectra_tmp[freq_dc-1]= jspectra[freq_dc-1]
- jspectra_tmp[freq_dc]= jspectra[freq_dc]
+ jspectra_tmp[freq_dc - 1] = jspectra[freq_dc - 1]
+ jspectra_tmp[freq_dc] = jspectra[freq_dc]
# canal modificado es re-escrito en el arreglo de canales
self.dataOut.data_spc[2] = jspectra_tmp
- return self.dataOut
\ No newline at end of file
+ return self.dataOut
diff --git a/schainpy/model/proc/jroproc_spectra_acf.py b/schainpy/model/proc/jroproc_spectra_acf.py
index 709922d..6814d0c 100644
--- a/schainpy/model/proc/jroproc_spectra_acf.py
+++ b/schainpy/model/proc/jroproc_spectra_acf.py
@@ -28,11 +28,11 @@ class SpectraAFCProc(ProcessingUnit):
self.dataOut.radarControllerHeaderObj = self.dataIn.radarControllerHeaderObj.copy()
self.dataOut.systemHeaderObj = self.dataIn.systemHeaderObj.copy()
- self.dataOut.ippSeconds = self.dataIn.getDeltaH()*(10**-6)/0.15
+ self.dataOut.ippSeconds = self.dataIn.getDeltaH() * (10 ** -6) / 0.15
self.dataOut.channelList = self.dataIn.channelList
self.dataOut.heightList = self.dataIn.heightList
- self.dataOut.dtype = numpy.dtype([('real','= self.dataOut.nHeights):
- maxIndex = self.dataOut.nHeights-1
+ maxIndex = self.dataOut.nHeights - 1
- #Spectra
- data_spc = self.dataOut.data_spc[:,:,minIndex:maxIndex+1]
+ # Spectra
+ data_spc = self.dataOut.data_spc[:, :, minIndex:maxIndex + 1]
data_cspc = None
if self.dataOut.data_cspc is not None:
- data_cspc = self.dataOut.data_cspc[:,:,minIndex:maxIndex+1]
+ data_cspc = self.dataOut.data_cspc[:, :, minIndex:maxIndex + 1]
data_dc = None
if self.dataOut.data_dc is not None:
- data_dc = self.dataOut.data_dc[:,minIndex:maxIndex+1]
+ data_dc = self.dataOut.data_dc[:, minIndex:maxIndex + 1]
self.dataOut.data_spc = data_spc
self.dataOut.data_cspc = data_cspc
self.dataOut.data_dc = data_dc
- self.dataOut.heightList = self.dataOut.heightList[minIndex:maxIndex+1]
+ self.dataOut.heightList = self.dataOut.heightList[minIndex:maxIndex + 1]
return 1
- def removeDC(self, mode = 2):
+ def removeDC(self, mode=2):
jspectra = self.dataOut.data_spc
jcspectra = self.dataOut.data_cspc
@@ -431,43 +431,43 @@ class SpectraAFCProc(ProcessingUnit):
num_pairs = jcspectra.shape[0]
else: jcspectraExist = False
- freq_dc = jspectra.shape[1]/2
- ind_vel = numpy.array([-2,-1,1,2]) + freq_dc
+ freq_dc = jspectra.shape[1] / 2
+ ind_vel = numpy.array([-2, -1, 1, 2]) + freq_dc
- if ind_vel[0]<0:
- ind_vel[list(range(0,1))] = ind_vel[list(range(0,1))] + self.num_prof
+ if ind_vel[0] < 0:
+ ind_vel[list(range(0, 1))] = ind_vel[list(range(0, 1))] + self.num_prof
if mode == 1:
- jspectra[:,freq_dc,:] = (jspectra[:,ind_vel[1],:] + jspectra[:,ind_vel[2],:])/2 #CORRECCION
+ jspectra[:, freq_dc, :] = (jspectra[:, ind_vel[1], :] + jspectra[:, ind_vel[2], :]) / 2 # CORRECCION
if jcspectraExist:
- jcspectra[:,freq_dc,:] = (jcspectra[:,ind_vel[1],:] + jcspectra[:,ind_vel[2],:])/2
+ jcspectra[:, freq_dc, :] = (jcspectra[:, ind_vel[1], :] + jcspectra[:, ind_vel[2], :]) / 2
if mode == 2:
- vel = numpy.array([-2,-1,1,2])
- xx = numpy.zeros([4,4])
+ vel = numpy.array([-2, -1, 1, 2])
+ xx = numpy.zeros([4, 4])
for fil in range(4):
- xx[fil,:] = vel[fil]**numpy.asarray(list(range(4)))
+ xx[fil, :] = vel[fil] ** numpy.asarray(list(range(4)))
xx_inv = numpy.linalg.inv(xx)
- xx_aux = xx_inv[0,:]
+ xx_aux = xx_inv[0, :]
for ich in range(num_chan):
- yy = jspectra[ich,ind_vel,:]
- jspectra[ich,freq_dc,:] = numpy.dot(xx_aux,yy)
+ yy = jspectra[ich, ind_vel, :]
+ jspectra[ich, freq_dc, :] = numpy.dot(xx_aux, yy)
- junkid = jspectra[ich,freq_dc,:]<=0
+ junkid = jspectra[ich, freq_dc, :] <= 0
cjunkid = sum(junkid)
if cjunkid.any():
- jspectra[ich,freq_dc,junkid.nonzero()] = (jspectra[ich,ind_vel[1],junkid] + jspectra[ich,ind_vel[2],junkid])/2
+ jspectra[ich, freq_dc, junkid.nonzero()] = (jspectra[ich, ind_vel[1], junkid] + jspectra[ich, ind_vel[2], junkid]) / 2
if jcspectraExist:
for ip in range(num_pairs):
- yy = jcspectra[ip,ind_vel,:]
- jcspectra[ip,freq_dc,:] = numpy.dot(xx_aux,yy)
+ yy = jcspectra[ip, ind_vel, :]
+ jcspectra[ip, freq_dc, :] = numpy.dot(xx_aux, yy)
self.dataOut.data_spc = jspectra
@@ -475,23 +475,23 @@ class SpectraAFCProc(ProcessingUnit):
return 1
- def removeInterference(self, interf = 2,hei_interf = None, nhei_interf = None, offhei_interf = None):
+ def removeInterference(self, interf=2, hei_interf=None, nhei_interf=None, offhei_interf=None):
jspectra = self.dataOut.data_spc
jcspectra = self.dataOut.data_cspc
jnoise = self.dataOut.getNoise()
num_incoh = self.dataOut.nIncohInt
- num_channel = jspectra.shape[0]
- num_prof = jspectra.shape[1]
- num_hei = jspectra.shape[2]
+ num_channel = jspectra.shape[0]
+ num_prof = jspectra.shape[1]
+ num_hei = jspectra.shape[2]
- #hei_interf
+ # hei_interf
if hei_interf is None:
- count_hei = num_hei/2 #Como es entero no importa
+ count_hei = num_hei / 2 # Como es entero no importa
hei_interf = numpy.asmatrix(list(range(count_hei))) + num_hei - count_hei
hei_interf = numpy.asarray(hei_interf)[0]
- #nhei_interf
+ # nhei_interf
if (nhei_interf == None):
nhei_interf = 5
if (nhei_interf < 1):
@@ -506,46 +506,46 @@ class SpectraAFCProc(ProcessingUnit):
# mask_prof[range(num_prof/2 - 1,len(mask_prof))] += 1
mask_prof = numpy.asarray(list(range(num_prof)))
num_mask_prof = mask_prof.size
- comp_mask_prof = [0, num_prof/2]
+ comp_mask_prof = [0, num_prof / 2]
- #noise_exist: Determina si la variable jnoise ha sido definida y contiene la informacion del ruido de cada canal
+ # noise_exist: Determina si la variable jnoise ha sido definida y contiene la informacion del ruido de cada canal
if (jnoise.size < num_channel or numpy.isnan(jnoise).any()):
jnoise = numpy.nan
noise_exist = jnoise[0] < numpy.Inf
- #Subrutina de Remocion de la Interferencia
+ # Subrutina de Remocion de la Interferencia
for ich in range(num_channel):
- #Se ordena los espectros segun su potencia (menor a mayor)
- power = jspectra[ich,mask_prof,:]
- power = power[:,hei_interf]
- power = power.sum(axis = 0)
+ # Se ordena los espectros segun su potencia (menor a mayor)
+ power = jspectra[ich, mask_prof, :]
+ power = power[:, hei_interf]
+ power = power.sum(axis=0)
psort = power.ravel().argsort()
- #Se estima la interferencia promedio en los Espectros de Potencia empleando
- junkspc_interf = jspectra[ich,:,hei_interf[psort[list(range(offhei_interf, nhei_interf + offhei_interf))]]]
+ # Se estima la interferencia promedio en los Espectros de Potencia empleando
+ junkspc_interf = jspectra[ich, :, hei_interf[psort[list(range(offhei_interf, nhei_interf + offhei_interf))]]]
if noise_exist:
# tmp_noise = jnoise[ich] / num_prof
tmp_noise = jnoise[ich]
junkspc_interf = junkspc_interf - tmp_noise
- #junkspc_interf[:,comp_mask_prof] = 0
+ # junkspc_interf[:,comp_mask_prof] = 0
- jspc_interf = junkspc_interf.sum(axis = 0) / nhei_interf
+ jspc_interf = junkspc_interf.sum(axis=0) / nhei_interf
jspc_interf = jspc_interf.transpose()
- #Calculando el espectro de interferencia promedio
- noiseid = numpy.where(jspc_interf <= tmp_noise/ numpy.sqrt(num_incoh))
+ # Calculando el espectro de interferencia promedio
+ noiseid = numpy.where(jspc_interf <= tmp_noise / numpy.sqrt(num_incoh))
noiseid = noiseid[0]
cnoiseid = noiseid.size
- interfid = numpy.where(jspc_interf > tmp_noise/ numpy.sqrt(num_incoh))
+ interfid = numpy.where(jspc_interf > tmp_noise / numpy.sqrt(num_incoh))
interfid = interfid[0]
cinterfid = interfid.size
if (cnoiseid > 0): jspc_interf[noiseid] = 0
- #Expandiendo los perfiles a limpiar
+ # Expandiendo los perfiles a limpiar
if (cinterfid > 0):
- new_interfid = (numpy.r_[interfid - 1, interfid, interfid + 1] + num_prof)%num_prof
+ new_interfid = (numpy.r_[interfid - 1, interfid, interfid + 1] + num_prof) % num_prof
new_interfid = numpy.asarray(new_interfid)
new_interfid = {x for x in new_interfid}
new_interfid = numpy.array(list(new_interfid))
@@ -553,89 +553,89 @@ class SpectraAFCProc(ProcessingUnit):
else: new_cinterfid = 0
for ip in range(new_cinterfid):
- ind = junkspc_interf[:,new_interfid[ip]].ravel().argsort()
- jspc_interf[new_interfid[ip]] = junkspc_interf[ind[nhei_interf/2],new_interfid[ip]]
+ ind = junkspc_interf[:, new_interfid[ip]].ravel().argsort()
+ jspc_interf[new_interfid[ip]] = junkspc_interf[ind[nhei_interf / 2], new_interfid[ip]]
- jspectra[ich,:,ind_hei] = jspectra[ich,:,ind_hei] - jspc_interf #Corregir indices
+ jspectra[ich, :, ind_hei] = jspectra[ich, :, ind_hei] - jspc_interf # Corregir indices
- #Removiendo la interferencia del punto de mayor interferencia
+ # Removiendo la interferencia del punto de mayor interferencia
ListAux = jspc_interf[mask_prof].tolist()
maxid = ListAux.index(max(ListAux))
if cinterfid > 0:
- for ip in range(cinterfid*(interf == 2) - 1):
- ind = (jspectra[ich,interfid[ip],:] < tmp_noise*(1 + 1/numpy.sqrt(num_incoh))).nonzero()
+ for ip in range(cinterfid * (interf == 2) - 1):
+ ind = (jspectra[ich, interfid[ip], :] < tmp_noise * (1 + 1 / numpy.sqrt(num_incoh))).nonzero()
cind = len(ind)
if (cind > 0):
- jspectra[ich,interfid[ip],ind] = tmp_noise*(1 + (numpy.random.uniform(cind) - 0.5)/numpy.sqrt(num_incoh))
+ jspectra[ich, interfid[ip], ind] = tmp_noise * (1 + (numpy.random.uniform(cind) - 0.5) / numpy.sqrt(num_incoh))
- ind = numpy.array([-2,-1,1,2])
- xx = numpy.zeros([4,4])
+ ind = numpy.array([-2, -1, 1, 2])
+ xx = numpy.zeros([4, 4])
for id1 in range(4):
- xx[:,id1] = ind[id1]**numpy.asarray(list(range(4)))
+ xx[:, id1] = ind[id1] ** numpy.asarray(list(range(4)))
xx_inv = numpy.linalg.inv(xx)
- xx = xx_inv[:,0]
- ind = (ind + maxid + num_mask_prof)%num_mask_prof
- yy = jspectra[ich,mask_prof[ind],:]
- jspectra[ich,mask_prof[maxid],:] = numpy.dot(yy.transpose(),xx)
+ xx = xx_inv[:, 0]
+ ind = (ind + maxid + num_mask_prof) % num_mask_prof
+ yy = jspectra[ich, mask_prof[ind], :]
+ jspectra[ich, mask_prof[maxid], :] = numpy.dot(yy.transpose(), xx)
- indAux = (jspectra[ich,:,:] < tmp_noise*(1-1/numpy.sqrt(num_incoh))).nonzero()
- jspectra[ich,indAux[0],indAux[1]] = tmp_noise * (1 - 1/numpy.sqrt(num_incoh))
+ indAux = (jspectra[ich, :, :] < tmp_noise * (1 - 1 / numpy.sqrt(num_incoh))).nonzero()
+ jspectra[ich, indAux[0], indAux[1]] = tmp_noise * (1 - 1 / numpy.sqrt(num_incoh))
- #Remocion de Interferencia en el Cross Spectra
+ # Remocion de Interferencia en el Cross Spectra
if jcspectra is None: return jspectra, jcspectra
- num_pairs = jcspectra.size/(num_prof*num_hei)
+ num_pairs = jcspectra.size / (num_prof * num_hei)
jcspectra = jcspectra.reshape(num_pairs, num_prof, num_hei)
for ip in range(num_pairs):
#-------------------------------------------
- cspower = numpy.abs(jcspectra[ip,mask_prof,:])
- cspower = cspower[:,hei_interf]
- cspower = cspower.sum(axis = 0)
+ cspower = numpy.abs(jcspectra[ip, mask_prof, :])
+ cspower = cspower[:, hei_interf]
+ cspower = cspower.sum(axis=0)
cspsort = cspower.ravel().argsort()
- junkcspc_interf = jcspectra[ip,:,hei_interf[cspsort[list(range(offhei_interf, nhei_interf + offhei_interf))]]]
+ junkcspc_interf = jcspectra[ip, :, hei_interf[cspsort[list(range(offhei_interf, nhei_interf + offhei_interf))]]]
junkcspc_interf = junkcspc_interf.transpose()
- jcspc_interf = junkcspc_interf.sum(axis = 1)/nhei_interf
+ jcspc_interf = junkcspc_interf.sum(axis=1) / nhei_interf
ind = numpy.abs(jcspc_interf[mask_prof]).ravel().argsort()
- median_real = numpy.median(numpy.real(junkcspc_interf[mask_prof[ind[list(range(3*num_prof/4))]],:]))
- median_imag = numpy.median(numpy.imag(junkcspc_interf[mask_prof[ind[list(range(3*num_prof/4))]],:]))
- junkcspc_interf[comp_mask_prof,:] = numpy.complex(median_real, median_imag)
+ median_real = numpy.median(numpy.real(junkcspc_interf[mask_prof[ind[list(range(3 * num_prof / 4))]], :]))
+ median_imag = numpy.median(numpy.imag(junkcspc_interf[mask_prof[ind[list(range(3 * num_prof / 4))]], :]))
+ junkcspc_interf[comp_mask_prof, :] = numpy.complex(median_real, median_imag)
for iprof in range(num_prof):
- ind = numpy.abs(junkcspc_interf[iprof,:]).ravel().argsort()
- jcspc_interf[iprof] = junkcspc_interf[iprof, ind[nhei_interf/2]]
+ ind = numpy.abs(junkcspc_interf[iprof, :]).ravel().argsort()
+ jcspc_interf[iprof] = junkcspc_interf[iprof, ind[nhei_interf / 2]]
- #Removiendo la Interferencia
- jcspectra[ip,:,ind_hei] = jcspectra[ip,:,ind_hei] - jcspc_interf
+ # Removiendo la Interferencia
+ jcspectra[ip, :, ind_hei] = jcspectra[ip, :, ind_hei] - jcspc_interf
ListAux = numpy.abs(jcspc_interf[mask_prof]).tolist()
maxid = ListAux.index(max(ListAux))
- ind = numpy.array([-2,-1,1,2])
- xx = numpy.zeros([4,4])
+ ind = numpy.array([-2, -1, 1, 2])
+ xx = numpy.zeros([4, 4])
for id1 in range(4):
- xx[:,id1] = ind[id1]**numpy.asarray(list(range(4)))
+ xx[:, id1] = ind[id1] ** numpy.asarray(list(range(4)))
xx_inv = numpy.linalg.inv(xx)
- xx = xx_inv[:,0]
+ xx = xx_inv[:, 0]
- ind = (ind + maxid + num_mask_prof)%num_mask_prof
- yy = jcspectra[ip,mask_prof[ind],:]
- jcspectra[ip,mask_prof[maxid],:] = numpy.dot(yy.transpose(),xx)
+ ind = (ind + maxid + num_mask_prof) % num_mask_prof
+ yy = jcspectra[ip, mask_prof[ind], :]
+ jcspectra[ip, mask_prof[maxid], :] = numpy.dot(yy.transpose(), xx)
- #Guardar Resultados
+ # Guardar Resultados
self.dataOut.data_spc = jspectra
self.dataOut.data_cspc = jcspectra
@@ -649,7 +649,7 @@ class SpectraAFCProc(ProcessingUnit):
return 1
def getNoise(self, minHei=None, maxHei=None, minVel=None, maxVel=None):
- #validacion de rango
+ # validacion de rango
if minHei == None:
minHei = self.dataOut.heightList[0]
@@ -657,13 +657,13 @@ class SpectraAFCProc(ProcessingUnit):
maxHei = self.dataOut.heightList[-1]
if (minHei < self.dataOut.heightList[0]) or (minHei > maxHei):
- print('minHei: %.2f is out of the heights range'%(minHei))
- print('minHei is setting to %.2f'%(self.dataOut.heightList[0]))
+ print('minHei: %.2f is out of the heights range' % (minHei))
+ print('minHei is setting to %.2f' % (self.dataOut.heightList[0]))
minHei = self.dataOut.heightList[0]
if (maxHei > self.dataOut.heightList[-1]) or (maxHei < minHei):
- print('maxHei: %.2f is out of the heights range'%(maxHei))
- print('maxHei is setting to %.2f'%(self.dataOut.heightList[-1]))
+ print('maxHei: %.2f is out of the heights range' % (maxHei))
+ print('maxHei is setting to %.2f' % (self.dataOut.heightList[-1]))
maxHei = self.dataOut.heightList[-1]
# validacion de velocidades
@@ -676,13 +676,13 @@ class SpectraAFCProc(ProcessingUnit):
maxVel = velrange[-1]
if (minVel < velrange[0]) or (minVel > maxVel):
- print('minVel: %.2f is out of the velocity range'%(minVel))
- print('minVel is setting to %.2f'%(velrange[0]))
+ print('minVel: %.2f is out of the velocity range' % (minVel))
+ print('minVel is setting to %.2f' % (velrange[0]))
minVel = velrange[0]
if (maxVel > velrange[-1]) or (maxVel < minVel):
- print('maxVel: %.2f is out of the velocity range'%(maxVel))
- print('maxVel is setting to %.2f'%(velrange[-1]))
+ print('maxVel: %.2f is out of the velocity range' % (maxVel))
+ print('maxVel is setting to %.2f' % (velrange[-1]))
maxVel = velrange[-1]
# seleccion de indices para rango
@@ -707,7 +707,7 @@ class SpectraAFCProc(ProcessingUnit):
raise ValueError("some value in (%d,%d) is not valid" % (minIndex, maxIndex))
if (maxIndex >= self.dataOut.nHeights):
- maxIndex = self.dataOut.nHeights-1
+ maxIndex = self.dataOut.nHeights - 1
# seleccion de indices para velocidades
indminvel = numpy.where(velrange >= minVel)
@@ -722,15 +722,15 @@ class SpectraAFCProc(ProcessingUnit):
except:
maxIndexVel = len(velrange)
- #seleccion del espectro
- data_spc = self.dataOut.data_spc[:,minIndexVel:maxIndexVel+1,minIndex:maxIndex+1]
- #estimacion de ruido
+ # seleccion del espectro
+ data_spc = self.dataOut.data_spc[:, minIndexVel:maxIndexVel + 1, minIndex:maxIndex + 1]
+ # estimacion de ruido
noise = numpy.zeros(self.dataOut.nChannels)
for channel in range(self.dataOut.nChannels):
- daux = data_spc[channel,:,:]
+ daux = data_spc[channel, :, :]
noise[channel] = hildebrand_sekhon(daux, self.dataOut.nIncohInt)
self.dataOut.noise_estimation = noise.copy()
- return 1
\ No newline at end of file
+ return 1
diff --git a/schainpy/model/proc/jroproc_spectra_lags.py b/schainpy/model/proc/jroproc_spectra_lags.py
index 90b88e5..59e6c05 100644
--- a/schainpy/model/proc/jroproc_spectra_lags.py
+++ b/schainpy/model/proc/jroproc_spectra_lags.py
@@ -30,11 +30,11 @@ class SpectraLagsProc(ProcessingUnit):
self.dataOut.radarControllerHeaderObj = self.dataIn.radarControllerHeaderObj.copy()
self.dataOut.systemHeaderObj = self.dataIn.systemHeaderObj.copy()
- self.dataOut.ippSeconds = self.dataIn.getDeltaH()*(10**-6)/0.15
+ self.dataOut.ippSeconds = self.dataIn.getDeltaH() * (10 ** -6) / 0.15
self.dataOut.channelList = self.dataIn.channelList
self.dataOut.heightList = self.dataIn.heightList
- self.dataOut.dtype = numpy.dtype([('real','= self.dataOut.nHeights):
- maxIndex = self.dataOut.nHeights-1
+ maxIndex = self.dataOut.nHeights - 1
- #Spectra
- data_spc = self.dataOut.data_spc[:,:,minIndex:maxIndex+1]
+ # Spectra
+ data_spc = self.dataOut.data_spc[:, :, minIndex:maxIndex + 1]
data_cspc = None
if self.dataOut.data_cspc is not None:
- data_cspc = self.dataOut.data_cspc[:,:,minIndex:maxIndex+1]
+ data_cspc = self.dataOut.data_cspc[:, :, minIndex:maxIndex + 1]
data_dc = None
if self.dataOut.data_dc is not None:
- data_dc = self.dataOut.data_dc[:,minIndex:maxIndex+1]
+ data_dc = self.dataOut.data_dc[:, minIndex:maxIndex + 1]
self.dataOut.data_spc = data_spc
self.dataOut.data_cspc = data_cspc
self.dataOut.data_dc = data_dc
- self.dataOut.heightList = self.dataOut.heightList[minIndex:maxIndex+1]
+ self.dataOut.heightList = self.dataOut.heightList[minIndex:maxIndex + 1]
return 1
- def removeDC(self, mode = 2):
+ def removeDC(self, mode=2):
jspectra = self.dataOut.data_spc
jcspectra = self.dataOut.data_cspc
@@ -434,43 +434,43 @@ class SpectraLagsProc(ProcessingUnit):
num_pairs = jcspectra.shape[0]
else: jcspectraExist = False
- freq_dc = jspectra.shape[1]/2
- ind_vel = numpy.array([-2,-1,1,2]) + freq_dc
+ freq_dc = jspectra.shape[1] / 2
+ ind_vel = numpy.array([-2, -1, 1, 2]) + freq_dc
- if ind_vel[0]<0:
- ind_vel[list(range(0,1))] = ind_vel[list(range(0,1))] + self.num_prof
+ if ind_vel[0] < 0:
+ ind_vel[list(range(0, 1))] = ind_vel[list(range(0, 1))] + self.num_prof
if mode == 1:
- jspectra[:,freq_dc,:] = (jspectra[:,ind_vel[1],:] + jspectra[:,ind_vel[2],:])/2 #CORRECCION
+ jspectra[:, freq_dc, :] = (jspectra[:, ind_vel[1], :] + jspectra[:, ind_vel[2], :]) / 2 # CORRECCION
if jcspectraExist:
- jcspectra[:,freq_dc,:] = (jcspectra[:,ind_vel[1],:] + jcspectra[:,ind_vel[2],:])/2
+ jcspectra[:, freq_dc, :] = (jcspectra[:, ind_vel[1], :] + jcspectra[:, ind_vel[2], :]) / 2
if mode == 2:
- vel = numpy.array([-2,-1,1,2])
- xx = numpy.zeros([4,4])
+ vel = numpy.array([-2, -1, 1, 2])
+ xx = numpy.zeros([4, 4])
for fil in range(4):
- xx[fil,:] = vel[fil]**numpy.asarray(list(range(4)))
+ xx[fil, :] = vel[fil] ** numpy.asarray(list(range(4)))
xx_inv = numpy.linalg.inv(xx)
- xx_aux = xx_inv[0,:]
+ xx_aux = xx_inv[0, :]
for ich in range(num_chan):
- yy = jspectra[ich,ind_vel,:]
- jspectra[ich,freq_dc,:] = numpy.dot(xx_aux,yy)
+ yy = jspectra[ich, ind_vel, :]
+ jspectra[ich, freq_dc, :] = numpy.dot(xx_aux, yy)
- junkid = jspectra[ich,freq_dc,:]<=0
+ junkid = jspectra[ich, freq_dc, :] <= 0
cjunkid = sum(junkid)
if cjunkid.any():
- jspectra[ich,freq_dc,junkid.nonzero()] = (jspectra[ich,ind_vel[1],junkid] + jspectra[ich,ind_vel[2],junkid])/2
+ jspectra[ich, freq_dc, junkid.nonzero()] = (jspectra[ich, ind_vel[1], junkid] + jspectra[ich, ind_vel[2], junkid]) / 2
if jcspectraExist:
for ip in range(num_pairs):
- yy = jcspectra[ip,ind_vel,:]
- jcspectra[ip,freq_dc,:] = numpy.dot(xx_aux,yy)
+ yy = jcspectra[ip, ind_vel, :]
+ jcspectra[ip, freq_dc, :] = numpy.dot(xx_aux, yy)
self.dataOut.data_spc = jspectra
@@ -478,23 +478,23 @@ class SpectraLagsProc(ProcessingUnit):
return 1
- def removeInterference(self, interf = 2,hei_interf = None, nhei_interf = None, offhei_interf = None):
+ def removeInterference(self, interf=2, hei_interf=None, nhei_interf=None, offhei_interf=None):
jspectra = self.dataOut.data_spc
jcspectra = self.dataOut.data_cspc
jnoise = self.dataOut.getNoise()
num_incoh = self.dataOut.nIncohInt
- num_channel = jspectra.shape[0]
- num_prof = jspectra.shape[1]
- num_hei = jspectra.shape[2]
+ num_channel = jspectra.shape[0]
+ num_prof = jspectra.shape[1]
+ num_hei = jspectra.shape[2]
- #hei_interf
+ # hei_interf
if hei_interf is None:
- count_hei = num_hei/2 #Como es entero no importa
+ count_hei = num_hei / 2 # Como es entero no importa
hei_interf = numpy.asmatrix(list(range(count_hei))) + num_hei - count_hei
hei_interf = numpy.asarray(hei_interf)[0]
- #nhei_interf
+ # nhei_interf
if (nhei_interf == None):
nhei_interf = 5
if (nhei_interf < 1):
@@ -509,46 +509,46 @@ class SpectraLagsProc(ProcessingUnit):
# mask_prof[range(num_prof/2 - 1,len(mask_prof))] += 1
mask_prof = numpy.asarray(list(range(num_prof)))
num_mask_prof = mask_prof.size
- comp_mask_prof = [0, num_prof/2]
+ comp_mask_prof = [0, num_prof / 2]
- #noise_exist: Determina si la variable jnoise ha sido definida y contiene la informacion del ruido de cada canal
+ # noise_exist: Determina si la variable jnoise ha sido definida y contiene la informacion del ruido de cada canal
if (jnoise.size < num_channel or numpy.isnan(jnoise).any()):
jnoise = numpy.nan
noise_exist = jnoise[0] < numpy.Inf
- #Subrutina de Remocion de la Interferencia
+ # Subrutina de Remocion de la Interferencia
for ich in range(num_channel):
- #Se ordena los espectros segun su potencia (menor a mayor)
- power = jspectra[ich,mask_prof,:]
- power = power[:,hei_interf]
- power = power.sum(axis = 0)
+ # Se ordena los espectros segun su potencia (menor a mayor)
+ power = jspectra[ich, mask_prof, :]
+ power = power[:, hei_interf]
+ power = power.sum(axis=0)
psort = power.ravel().argsort()
- #Se estima la interferencia promedio en los Espectros de Potencia empleando
- junkspc_interf = jspectra[ich,:,hei_interf[psort[list(range(offhei_interf, nhei_interf + offhei_interf))]]]
+ # Se estima la interferencia promedio en los Espectros de Potencia empleando
+ junkspc_interf = jspectra[ich, :, hei_interf[psort[list(range(offhei_interf, nhei_interf + offhei_interf))]]]
if noise_exist:
# tmp_noise = jnoise[ich] / num_prof
tmp_noise = jnoise[ich]
junkspc_interf = junkspc_interf - tmp_noise
- #junkspc_interf[:,comp_mask_prof] = 0
+ # junkspc_interf[:,comp_mask_prof] = 0
- jspc_interf = junkspc_interf.sum(axis = 0) / nhei_interf
+ jspc_interf = junkspc_interf.sum(axis=0) / nhei_interf
jspc_interf = jspc_interf.transpose()
- #Calculando el espectro de interferencia promedio
- noiseid = numpy.where(jspc_interf <= tmp_noise/ numpy.sqrt(num_incoh))
+ # Calculando el espectro de interferencia promedio
+ noiseid = numpy.where(jspc_interf <= tmp_noise / numpy.sqrt(num_incoh))
noiseid = noiseid[0]
cnoiseid = noiseid.size
- interfid = numpy.where(jspc_interf > tmp_noise/ numpy.sqrt(num_incoh))
+ interfid = numpy.where(jspc_interf > tmp_noise / numpy.sqrt(num_incoh))
interfid = interfid[0]
cinterfid = interfid.size
if (cnoiseid > 0): jspc_interf[noiseid] = 0
- #Expandiendo los perfiles a limpiar
+ # Expandiendo los perfiles a limpiar
if (cinterfid > 0):
- new_interfid = (numpy.r_[interfid - 1, interfid, interfid + 1] + num_prof)%num_prof
+ new_interfid = (numpy.r_[interfid - 1, interfid, interfid + 1] + num_prof) % num_prof
new_interfid = numpy.asarray(new_interfid)
new_interfid = {x for x in new_interfid}
new_interfid = numpy.array(list(new_interfid))
@@ -556,89 +556,89 @@ class SpectraLagsProc(ProcessingUnit):
else: new_cinterfid = 0
for ip in range(new_cinterfid):
- ind = junkspc_interf[:,new_interfid[ip]].ravel().argsort()
- jspc_interf[new_interfid[ip]] = junkspc_interf[ind[nhei_interf/2],new_interfid[ip]]
+ ind = junkspc_interf[:, new_interfid[ip]].ravel().argsort()
+ jspc_interf[new_interfid[ip]] = junkspc_interf[ind[nhei_interf / 2], new_interfid[ip]]
- jspectra[ich,:,ind_hei] = jspectra[ich,:,ind_hei] - jspc_interf #Corregir indices
+ jspectra[ich, :, ind_hei] = jspectra[ich, :, ind_hei] - jspc_interf # Corregir indices
- #Removiendo la interferencia del punto de mayor interferencia
+ # Removiendo la interferencia del punto de mayor interferencia
ListAux = jspc_interf[mask_prof].tolist()
maxid = ListAux.index(max(ListAux))
if cinterfid > 0:
- for ip in range(cinterfid*(interf == 2) - 1):
- ind = (jspectra[ich,interfid[ip],:] < tmp_noise*(1 + 1/numpy.sqrt(num_incoh))).nonzero()
+ for ip in range(cinterfid * (interf == 2) - 1):
+ ind = (jspectra[ich, interfid[ip], :] < tmp_noise * (1 + 1 / numpy.sqrt(num_incoh))).nonzero()
cind = len(ind)
if (cind > 0):
- jspectra[ich,interfid[ip],ind] = tmp_noise*(1 + (numpy.random.uniform(cind) - 0.5)/numpy.sqrt(num_incoh))
+ jspectra[ich, interfid[ip], ind] = tmp_noise * (1 + (numpy.random.uniform(cind) - 0.5) / numpy.sqrt(num_incoh))
- ind = numpy.array([-2,-1,1,2])
- xx = numpy.zeros([4,4])
+ ind = numpy.array([-2, -1, 1, 2])
+ xx = numpy.zeros([4, 4])
for id1 in range(4):
- xx[:,id1] = ind[id1]**numpy.asarray(list(range(4)))
+ xx[:, id1] = ind[id1] ** numpy.asarray(list(range(4)))
xx_inv = numpy.linalg.inv(xx)
- xx = xx_inv[:,0]
- ind = (ind + maxid + num_mask_prof)%num_mask_prof
- yy = jspectra[ich,mask_prof[ind],:]
- jspectra[ich,mask_prof[maxid],:] = numpy.dot(yy.transpose(),xx)
+ xx = xx_inv[:, 0]
+ ind = (ind + maxid + num_mask_prof) % num_mask_prof
+ yy = jspectra[ich, mask_prof[ind], :]
+ jspectra[ich, mask_prof[maxid], :] = numpy.dot(yy.transpose(), xx)
- indAux = (jspectra[ich,:,:] < tmp_noise*(1-1/numpy.sqrt(num_incoh))).nonzero()
- jspectra[ich,indAux[0],indAux[1]] = tmp_noise * (1 - 1/numpy.sqrt(num_incoh))
+ indAux = (jspectra[ich, :, :] < tmp_noise * (1 - 1 / numpy.sqrt(num_incoh))).nonzero()
+ jspectra[ich, indAux[0], indAux[1]] = tmp_noise * (1 - 1 / numpy.sqrt(num_incoh))
- #Remocion de Interferencia en el Cross Spectra
+ # Remocion de Interferencia en el Cross Spectra
if jcspectra is None: return jspectra, jcspectra
- num_pairs = jcspectra.size/(num_prof*num_hei)
+ num_pairs = jcspectra.size / (num_prof * num_hei)
jcspectra = jcspectra.reshape(num_pairs, num_prof, num_hei)
for ip in range(num_pairs):
#-------------------------------------------
- cspower = numpy.abs(jcspectra[ip,mask_prof,:])
- cspower = cspower[:,hei_interf]
- cspower = cspower.sum(axis = 0)
+ cspower = numpy.abs(jcspectra[ip, mask_prof, :])
+ cspower = cspower[:, hei_interf]
+ cspower = cspower.sum(axis=0)
cspsort = cspower.ravel().argsort()
- junkcspc_interf = jcspectra[ip,:,hei_interf[cspsort[list(range(offhei_interf, nhei_interf + offhei_interf))]]]
+ junkcspc_interf = jcspectra[ip, :, hei_interf[cspsort[list(range(offhei_interf, nhei_interf + offhei_interf))]]]
junkcspc_interf = junkcspc_interf.transpose()
- jcspc_interf = junkcspc_interf.sum(axis = 1)/nhei_interf
+ jcspc_interf = junkcspc_interf.sum(axis=1) / nhei_interf
ind = numpy.abs(jcspc_interf[mask_prof]).ravel().argsort()
- median_real = numpy.median(numpy.real(junkcspc_interf[mask_prof[ind[list(range(3*num_prof/4))]],:]))
- median_imag = numpy.median(numpy.imag(junkcspc_interf[mask_prof[ind[list(range(3*num_prof/4))]],:]))
- junkcspc_interf[comp_mask_prof,:] = numpy.complex(median_real, median_imag)
+ median_real = numpy.median(numpy.real(junkcspc_interf[mask_prof[ind[list(range(3 * num_prof / 4))]], :]))
+ median_imag = numpy.median(numpy.imag(junkcspc_interf[mask_prof[ind[list(range(3 * num_prof / 4))]], :]))
+ junkcspc_interf[comp_mask_prof, :] = numpy.complex(median_real, median_imag)
for iprof in range(num_prof):
- ind = numpy.abs(junkcspc_interf[iprof,:]).ravel().argsort()
- jcspc_interf[iprof] = junkcspc_interf[iprof, ind[nhei_interf/2]]
+ ind = numpy.abs(junkcspc_interf[iprof, :]).ravel().argsort()
+ jcspc_interf[iprof] = junkcspc_interf[iprof, ind[nhei_interf / 2]]
- #Removiendo la Interferencia
- jcspectra[ip,:,ind_hei] = jcspectra[ip,:,ind_hei] - jcspc_interf
+ # Removiendo la Interferencia
+ jcspectra[ip, :, ind_hei] = jcspectra[ip, :, ind_hei] - jcspc_interf
ListAux = numpy.abs(jcspc_interf[mask_prof]).tolist()
maxid = ListAux.index(max(ListAux))
- ind = numpy.array([-2,-1,1,2])
- xx = numpy.zeros([4,4])
+ ind = numpy.array([-2, -1, 1, 2])
+ xx = numpy.zeros([4, 4])
for id1 in range(4):
- xx[:,id1] = ind[id1]**numpy.asarray(list(range(4)))
+ xx[:, id1] = ind[id1] ** numpy.asarray(list(range(4)))
xx_inv = numpy.linalg.inv(xx)
- xx = xx_inv[:,0]
+ xx = xx_inv[:, 0]
- ind = (ind + maxid + num_mask_prof)%num_mask_prof
- yy = jcspectra[ip,mask_prof[ind],:]
- jcspectra[ip,mask_prof[maxid],:] = numpy.dot(yy.transpose(),xx)
+ ind = (ind + maxid + num_mask_prof) % num_mask_prof
+ yy = jcspectra[ip, mask_prof[ind], :]
+ jcspectra[ip, mask_prof[maxid], :] = numpy.dot(yy.transpose(), xx)
- #Guardar Resultados
+ # Guardar Resultados
self.dataOut.data_spc = jspectra
self.dataOut.data_cspc = jcspectra
@@ -652,7 +652,7 @@ class SpectraLagsProc(ProcessingUnit):
return 1
def getNoise(self, minHei=None, maxHei=None, minVel=None, maxVel=None):
- #validacion de rango
+ # validacion de rango
if minHei == None:
minHei = self.dataOut.heightList[0]
@@ -660,13 +660,13 @@ class SpectraLagsProc(ProcessingUnit):
maxHei = self.dataOut.heightList[-1]
if (minHei < self.dataOut.heightList[0]) or (minHei > maxHei):
- print('minHei: %.2f is out of the heights range'%(minHei))
- print('minHei is setting to %.2f'%(self.dataOut.heightList[0]))
+ print('minHei: %.2f is out of the heights range' % (minHei))
+ print('minHei is setting to %.2f' % (self.dataOut.heightList[0]))
minHei = self.dataOut.heightList[0]
if (maxHei > self.dataOut.heightList[-1]) or (maxHei < minHei):
- print('maxHei: %.2f is out of the heights range'%(maxHei))
- print('maxHei is setting to %.2f'%(self.dataOut.heightList[-1]))
+ print('maxHei: %.2f is out of the heights range' % (maxHei))
+ print('maxHei is setting to %.2f' % (self.dataOut.heightList[-1]))
maxHei = self.dataOut.heightList[-1]
# validacion de velocidades
@@ -679,13 +679,13 @@ class SpectraLagsProc(ProcessingUnit):
maxVel = velrange[-1]
if (minVel < velrange[0]) or (minVel > maxVel):
- print('minVel: %.2f is out of the velocity range'%(minVel))
- print('minVel is setting to %.2f'%(velrange[0]))
+ print('minVel: %.2f is out of the velocity range' % (minVel))
+ print('minVel is setting to %.2f' % (velrange[0]))
minVel = velrange[0]
if (maxVel > velrange[-1]) or (maxVel < minVel):
- print('maxVel: %.2f is out of the velocity range'%(maxVel))
- print('maxVel is setting to %.2f'%(velrange[-1]))
+ print('maxVel: %.2f is out of the velocity range' % (maxVel))
+ print('maxVel is setting to %.2f' % (velrange[-1]))
maxVel = velrange[-1]
# seleccion de indices para rango
@@ -710,7 +710,7 @@ class SpectraLagsProc(ProcessingUnit):
raise ValueError("some value in (%d,%d) is not valid" % (minIndex, maxIndex))
if (maxIndex >= self.dataOut.nHeights):
- maxIndex = self.dataOut.nHeights-1
+ maxIndex = self.dataOut.nHeights - 1
# seleccion de indices para velocidades
indminvel = numpy.where(velrange >= minVel)
@@ -725,15 +725,15 @@ class SpectraLagsProc(ProcessingUnit):
except:
maxIndexVel = len(velrange)
- #seleccion del espectro
- data_spc = self.dataOut.data_spc[:,minIndexVel:maxIndexVel+1,minIndex:maxIndex+1]
- #estimacion de ruido
+ # seleccion del espectro
+ data_spc = self.dataOut.data_spc[:, minIndexVel:maxIndexVel + 1, minIndex:maxIndex + 1]
+ # estimacion de ruido
noise = numpy.zeros(self.dataOut.nChannels)
for channel in range(self.dataOut.nChannels):
- daux = data_spc[channel,:,:]
+ daux = data_spc[channel, :, :]
noise[channel] = hildebrand_sekhon(daux, self.dataOut.nIncohInt)
self.dataOut.noise_estimation = noise.copy()
- return 1
\ No newline at end of file
+ return 1
diff --git a/schainpy/model/proc/jroproc_voltage.py b/schainpy/model/proc/jroproc_voltage.py
index 9c71e4d..a716b94 100644
--- a/schainpy/model/proc/jroproc_voltage.py
+++ b/schainpy/model/proc/jroproc_voltage.py
@@ -1,8 +1,8 @@
import sys
-import numpy,math
+import numpy, math
from scipy import interpolate
from schainpy.model.proc.jroproc_base import ProcessingUnit, Operation, MPDecorator
-from schainpy.model.data.jrodata import Voltage,hildebrand_sekhon
+from schainpy.model.data.jrodata import Voltage, hildebrand_sekhon
from schainpy.utils import log
from time import time
@@ -37,7 +37,7 @@ class VoltageProc(ProcessingUnit):
self.dataOut.data = self.dataIn.data
self.dataOut.utctime = self.dataIn.utctime
self.dataOut.channelList = self.dataIn.channelList
- #self.dataOut.timeInterval = self.dataIn.timeInterval
+ # self.dataOut.timeInterval = self.dataIn.timeInterval
self.dataOut.heightList = self.dataIn.heightList
self.dataOut.nProfiles = self.dataIn.nProfiles
@@ -61,7 +61,7 @@ class selectChannels(Operation):
self.dataOut = dataOut
for channel in channelList:
if channel not in self.dataOut.channelList:
- raise ValueError("Channel %d is not in %s" %(channel, str(self.dataOut.channelList)))
+ raise ValueError("Channel %d is not in %s" % (channel, str(self.dataOut.channelList)))
index = self.dataOut.channelList.index(channel)
channelIndexList.append(index)
@@ -89,16 +89,16 @@ class selectChannels(Operation):
for channelIndex in channelIndexList:
if channelIndex not in self.dataOut.channelIndexList:
- raise ValueError("The value %d in channelIndexList is not valid" %channelIndex)
+ raise ValueError("The value %d in channelIndexList is not valid" % channelIndex)
if self.dataOut.type == 'Voltage':
if self.dataOut.flagDataAsBlock:
"""
Si la data es obtenida por bloques, dimension = [nChannels, nProfiles, nHeis]
"""
- data = self.dataOut.data[channelIndexList,:,:]
+ data = self.dataOut.data[channelIndexList, :, :]
else:
- data = self.dataOut.data[channelIndexList,:]
+ data = self.dataOut.data[channelIndexList, :]
self.dataOut.data = data
# self.dataOut.channelList = [self.dataOut.channelList[i] for i in channelIndexList]
@@ -163,8 +163,14 @@ class selectHeights(Operation):
"""
self.dataOut = dataOut
-
- if minHei and maxHei:
+
+ #if minHei and maxHei:
+ if 1:
+ if minHei == None:
+ minHei = self.dataOut.heightList[0]
+
+ if maxHei == None:
+ maxHei = self.dataOut.heightList[-1]
if (minHei < self.dataOut.heightList[0]):
minHei = self.dataOut.heightList[0]
@@ -188,7 +194,7 @@ class selectHeights(Operation):
maxIndex = indb[0][-1]
except:
maxIndex = len(heights)
-
+
self.selectHeightsByIndex(minIndex, maxIndex)
return self.dataOut
@@ -217,12 +223,12 @@ class selectHeights(Operation):
if (maxIndex >= self.dataOut.nHeights):
maxIndex = self.dataOut.nHeights
- #voltage
+ # voltage
if self.dataOut.flagDataAsBlock:
"""
Si la data es obtenida por bloques, dimension = [nChannels, nProfiles, nHeis]
"""
- data = self.dataOut.data[:,:, minIndex:maxIndex]
+ data = self.dataOut.data[:, :, minIndex:maxIndex]
else:
data = self.dataOut.data[:, minIndex:maxIndex]
@@ -232,7 +238,7 @@ class selectHeights(Operation):
self.dataOut.heightList = self.dataOut.heightList[minIndex:maxIndex]
if self.dataOut.nHeights <= 1:
- raise ValueError("selectHeights: Too few heights. Current number of heights is %d" %(self.dataOut.nHeights))
+ raise ValueError("selectHeights: Too few heights. Current number of heights is %d" % (self.dataOut.nHeights))
elif self.dataOut.type == 'Spectra':
if (minIndex < 0) or (minIndex > maxIndex):
raise ValueError("Error selecting heights: Index range (%d,%d) is not valid" % (
@@ -268,30 +274,30 @@ class filterByHeights(Operation):
deltaHeight = dataOut.heightList[1] - dataOut.heightList[0]
if window == None:
- window = (dataOut.radarControllerHeaderObj.txA/dataOut.radarControllerHeaderObj.nBaud) / deltaHeight
+ window = (dataOut.radarControllerHeaderObj.txA / dataOut.radarControllerHeaderObj.nBaud) / deltaHeight
newdelta = deltaHeight * window
r = dataOut.nHeights % window
- newheights = (dataOut.nHeights-r)/window
+ newheights = (dataOut.nHeights - r) / window
if newheights <= 1:
- raise ValueError("filterByHeights: Too few heights. Current number of heights is %d and window is %d" %(dataOut.nHeights, window))
+ raise ValueError("filterByHeights: Too few heights. Current number of heights is %d and window is %d" % (dataOut.nHeights, window))
if dataOut.flagDataAsBlock:
"""
Si la data es obtenida por bloques, dimension = [nChannels, nProfiles, nHeis]
"""
- buffer = dataOut.data[:, :, 0:int(dataOut.nHeights-r)]
- buffer = buffer.reshape(dataOut.nChannels, dataOut.nProfiles, int(dataOut.nHeights/window), window)
- buffer = numpy.sum(buffer,3)
+ buffer = dataOut.data[:, :, 0:int(dataOut.nHeights - r)]
+ buffer = buffer.reshape(dataOut.nChannels, dataOut.nProfiles, int(dataOut.nHeights / window), window)
+ buffer = numpy.sum(buffer, 3)
else:
- buffer = dataOut.data[:,0:int(dataOut.nHeights-r)]
- buffer = buffer.reshape(dataOut.nChannels,int(dataOut.nHeights/window),int(window))
- buffer = numpy.sum(buffer,2)
+ buffer = dataOut.data[:, 0:int(dataOut.nHeights - r)]
+ buffer = buffer.reshape(dataOut.nChannels, int(dataOut.nHeights / window), int(window))
+ buffer = numpy.sum(buffer, 2)
dataOut.data = buffer
- dataOut.heightList = dataOut.heightList[0] + numpy.arange( newheights )*newdelta
+ dataOut.heightList = dataOut.heightList[0] + numpy.arange(newheights) * newdelta
dataOut.windowOfFilter = window
return dataOut
@@ -299,14 +305,14 @@ class filterByHeights(Operation):
class setH0(Operation):
- def run(self, dataOut, h0, deltaHeight = None):
+ def run(self, dataOut, h0, deltaHeight=None):
if not deltaHeight:
deltaHeight = dataOut.heightList[1] - dataOut.heightList[0]
nHeights = dataOut.nHeights
- newHeiRange = h0 + numpy.arange(nHeights)*deltaHeight
+ newHeiRange = h0 + numpy.arange(nHeights) * deltaHeight
dataOut.heightList = newHeiRange
@@ -315,7 +321,7 @@ class setH0(Operation):
class deFlip(Operation):
- def run(self, dataOut, channelList = []):
+ def run(self, dataOut, channelList=[]):
data = dataOut.data.copy()
@@ -325,7 +331,7 @@ class deFlip(Operation):
if not channelList:
for thisProfile in profileList:
- data[:,thisProfile,:] = data[:,thisProfile,:]*flip
+ data[:, thisProfile, :] = data[:, thisProfile, :] * flip
flip *= -1.0
else:
for thisChannel in channelList:
@@ -333,20 +339,20 @@ class deFlip(Operation):
continue
for thisProfile in profileList:
- data[thisChannel,thisProfile,:] = data[thisChannel,thisProfile,:]*flip
+ data[thisChannel, thisProfile, :] = data[thisChannel, thisProfile, :] * flip
flip *= -1.0
self.flip = flip
else:
if not channelList:
- data[:,:] = data[:,:]*self.flip
+ data[:, :] = data[:, :] * self.flip
else:
for thisChannel in channelList:
if thisChannel not in dataOut.channelList:
continue
- data[thisChannel,:] = data[thisChannel,:]*self.flip
+ data[thisChannel, :] = data[thisChannel, :] * self.flip
self.flip *= -1.
@@ -395,21 +401,21 @@ class printAttribute(Operation):
class interpolateHeights(Operation):
def run(self, dataOut, topLim, botLim):
- #69 al 72 para julia
- #82-84 para meteoros
- if len(numpy.shape(dataOut.data))==2:
- sampInterp = (dataOut.data[:,botLim-1] + dataOut.data[:,topLim+1])/2
- sampInterp = numpy.transpose(numpy.tile(sampInterp,(topLim-botLim + 1,1)))
- #dataOut.data[:,botLim:limSup+1] = sampInterp
- dataOut.data[:,botLim:topLim+1] = sampInterp
+ # 69 al 72 para julia
+ # 82-84 para meteoros
+ if len(numpy.shape(dataOut.data)) == 2:
+ sampInterp = (dataOut.data[:, botLim - 1] + dataOut.data[:, topLim + 1]) / 2
+ sampInterp = numpy.transpose(numpy.tile(sampInterp, (topLim - botLim + 1, 1)))
+ # dataOut.data[:,botLim:limSup+1] = sampInterp
+ dataOut.data[:, botLim:topLim + 1] = sampInterp
else:
nHeights = dataOut.data.shape[2]
- x = numpy.hstack((numpy.arange(botLim),numpy.arange(topLim+1,nHeights)))
- y = dataOut.data[:,:,list(range(botLim))+list(range(topLim+1,nHeights))]
- f = interpolate.interp1d(x, y, axis = 2)
- xnew = numpy.arange(botLim,topLim+1)
+ x = numpy.hstack((numpy.arange(botLim), numpy.arange(topLim + 1, nHeights)))
+ y = dataOut.data[:, :, list(range(botLim)) + list(range(topLim + 1, nHeights))]
+ f = interpolate.interp1d(x, y, axis=2)
+ xnew = numpy.arange(botLim, topLim + 1)
ynew = f(xnew)
- dataOut.data[:,:,botLim:topLim+1] = ynew
+ dataOut.data[:, :, botLim:topLim + 1] = ynew
return dataOut
@@ -458,7 +464,7 @@ class CohInt(Operation):
self.n = n
self.__byTime = False
else:
- self.__integrationtime = timeInterval #* 60. #if (type(timeInterval)!=integer) -> change this line
+ self.__integrationtime = timeInterval # * 60. #if (type(timeInterval)!=integer) -> change this line
self.n = 9999
self.__byTime = True
@@ -483,25 +489,25 @@ class CohInt(Operation):
self.__profIndex += 1
return
- #Overlapping data
+ # Overlapping data
nChannels, nHeis = data.shape
data = numpy.reshape(data, (1, nChannels, nHeis))
- #If the buffer is empty then it takes the data value
+ # If the buffer is empty then it takes the data value
if self.__buffer is None:
self.__buffer = data
self.__profIndex += 1
return
- #If the buffer length is lower than n then stakcing the data value
+ # If the buffer length is lower than n then stakcing the data value
if self.__profIndex < self.n:
self.__buffer = numpy.vstack((self.__buffer, data))
self.__profIndex += 1
return
- #If the buffer length is equal to n then replacing the last buffer value with the data value
+ # If the buffer length is equal to n then replacing the last buffer value with the data value
self.__buffer = numpy.roll(self.__buffer, -1, axis=0)
- self.__buffer[self.n-1] = data
+ self.__buffer[self.n - 1] = data
self.__profIndex = self.n
return
@@ -525,7 +531,7 @@ class CohInt(Operation):
return data, n
- #Integration with Overlapping
+ # Integration with Overlapping
data = numpy.sum(self.__buffer, axis=0)
# print data
# raise
@@ -568,7 +574,7 @@ class CohInt(Operation):
if self.__profIndex == 0:
self.__buffer = [[data.copy(), datatime]]
else:
- self.__buffer.append([data.copy(),datatime])
+ self.__buffer.append([data.copy(), datatime])
self.__profIndex += 1
self.__dataReady = False
@@ -625,19 +631,19 @@ class CohInt(Operation):
def integrateByBlock(self, dataOut):
- times = int(dataOut.data.shape[1]/self.n)
+ times = int(dataOut.data.shape[1] / self.n)
avgdata = numpy.zeros((dataOut.nChannels, times, dataOut.nHeights), dtype=numpy.complex)
id_min = 0
id_max = self.n
for i in range(times):
- junk = dataOut.data[:,id_min:id_max,:]
- avgdata[:,i,:] = junk.sum(axis=1)
+ junk = dataOut.data[:, id_min:id_max, :]
+ avgdata[:, i, :] = junk.sum(axis=1)
id_min += self.n
id_max += self.n
- timeInterval = dataOut.ippSeconds*self.n
+ timeInterval = dataOut.ippSeconds * self.n
avgdatatime = (times - 1) * timeInterval + dataOut.utctime
self.__dataReady = True
return avgdata, avgdatatime
@@ -704,47 +710,47 @@ class Decoder(Operation):
self.nCode = len(code)
self.nBaud = len(code[0])
- if (osamp != None) and (osamp >1):
+ if (osamp != None) and (osamp > 1):
self.osamp = osamp
self.code = numpy.repeat(code, repeats=self.osamp, axis=1)
- self.nBaud = self.nBaud*self.osamp
+ self.nBaud = self.nBaud * self.osamp
self.__nChannels = dataOut.nChannels
self.__nProfiles = dataOut.nProfiles
self.__nHeis = dataOut.nHeights
if self.__nHeis < self.nBaud:
- raise ValueError('Number of heights (%d) should be greater than number of bauds (%d)' %(self.__nHeis, self.nBaud))
+ raise ValueError('Number of heights (%d) should be greater than number of bauds (%d)' % (self.__nHeis, self.nBaud))
- #Frequency
+ # Frequency
__codeBuffer = numpy.zeros((self.nCode, self.__nHeis), dtype=numpy.complex)
- __codeBuffer[:,0:self.nBaud] = self.code
+ __codeBuffer[:, 0:self.nBaud] = self.code
self.fft_code = numpy.conj(numpy.fft.fft(__codeBuffer, axis=1))
if dataOut.flagDataAsBlock:
- self.ndatadec = self.__nHeis #- self.nBaud + 1
+ self.ndatadec = self.__nHeis # - self.nBaud + 1
self.datadecTime = numpy.zeros((self.__nChannels, self.__nProfiles, self.ndatadec), dtype=numpy.complex)
else:
- #Time
- self.ndatadec = self.__nHeis #- self.nBaud + 1
+ # Time
+ self.ndatadec = self.__nHeis # - self.nBaud + 1
self.datadecTime = numpy.zeros((self.__nChannels, self.ndatadec), dtype=numpy.complex)
def __convolutionInFreq(self, data):
- fft_code = self.fft_code[self.__profIndex].reshape(1,-1)
+ fft_code = self.fft_code[self.__profIndex].reshape(1, -1)
fft_data = numpy.fft.fft(data, axis=1)
- conv = fft_data*fft_code
+ conv = fft_data * fft_code
- data = numpy.fft.ifft(conv,axis=1)
+ data = numpy.fft.ifft(conv, axis=1)
return data
@@ -756,7 +762,7 @@ class Decoder(Operation):
code = self.code[self.__profIndex]
for i in range(self.__nChannels):
- self.datadecTime[i,:] = numpy.correlate(data[i,:], code, mode='full')[self.nBaud-1:]
+ self.datadecTime[i, :] = numpy.correlate(data[i, :], code, mode='full')[self.nBaud - 1:]
return self.datadecTime
@@ -765,12 +771,12 @@ class Decoder(Operation):
repetitions = int(self.__nProfiles / self.nCode)
junk = numpy.lib.stride_tricks.as_strided(self.code, (repetitions, self.code.size), (0, self.code.itemsize))
junk = junk.flatten()
- code_block = numpy.reshape(junk, (self.nCode*repetitions, self.nBaud))
+ code_block = numpy.reshape(junk, (self.nCode * repetitions, self.nBaud))
profilesList = range(self.__nProfiles)
for i in range(self.__nChannels):
for j in profilesList:
- self.datadecTime[i,j,:] = numpy.correlate(data[i,j,:], code_block[j,:], mode='full')[self.nBaud-1:]
+ self.datadecTime[i, j, :] = numpy.correlate(data[i, j, :], code_block[j, :], mode='full')[self.nBaud - 1:]
return self.datadecTime
def __convolutionByBlockInFreq(self, data):
@@ -778,18 +784,18 @@ class Decoder(Operation):
raise NotImplementedError("Decoder by frequency fro Blocks not implemented")
- fft_code = self.fft_code[self.__profIndex].reshape(1,-1)
+ fft_code = self.fft_code[self.__profIndex].reshape(1, -1)
fft_data = numpy.fft.fft(data, axis=2)
- conv = fft_data*fft_code
+ conv = fft_data * fft_code
- data = numpy.fft.ifft(conv,axis=2)
+ data = numpy.fft.ifft(conv, axis=2)
return data
- def run(self, dataOut, code=None, nCode=None, nBaud=None, mode = 0, osamp=None, times=None):
+ def run(self, dataOut, code=None, nCode=None, nBaud=None, mode=0, osamp=None, times=None):
if dataOut.flagDecodeData:
print("This data is already decoded, recoding again ...")
@@ -798,17 +804,17 @@ class Decoder(Operation):
if code is None:
if dataOut.code is None:
- raise ValueError("Code could not be read from %s instance. Enter a value in Code parameter" %dataOut.type)
+ raise ValueError("Code could not be read from %s instance. Enter a value in Code parameter" % dataOut.type)
code = dataOut.code
else:
- code = numpy.array(code).reshape(nCode,nBaud)
+ code = numpy.array(code).reshape(nCode, nBaud)
self.setup(code, osamp, dataOut)
self.isConfig = True
if mode == 3:
- sys.stderr.write("Decoder Warning: mode=%d is not valid, using mode=0\n" %mode)
+ sys.stderr.write("Decoder Warning: mode=%d is not valid, using mode=0\n" % mode)
if times != None:
sys.stderr.write("Decoder Warning: Argument 'times' in not used anymore\n")
@@ -846,7 +852,7 @@ class Decoder(Operation):
datadec = self.__convolutionInFreqOpt(dataOut.data)
if datadec is None:
- raise ValueError("Codification mode selected is not valid: mode=%d. Try selecting 0 or 1" %mode)
+ raise ValueError("Codification mode selected is not valid: mode=%d. Try selecting 0 or 1" % mode)
dataOut.code = self.code
dataOut.nCode = self.nCode
@@ -856,9 +862,9 @@ class Decoder(Operation):
dataOut.heightList = dataOut.heightList[0:datadec.shape[-1]]
- dataOut.flagDecodeData = True #asumo q la data esta decodificada
+ dataOut.flagDecodeData = True # asumo q la data esta decodificada
- if self.__profIndex == self.nCode-1:
+ if self.__profIndex == self.nCode - 1:
self.__profIndex = 0
return dataOut
@@ -884,14 +890,14 @@ class ProfileConcat(Operation):
self.times = 1
def setup(self, data, m, n=1):
- self.buffer = numpy.zeros((data.shape[0],data.shape[1]*m),dtype=type(data[0,0]))
- self.nHeights = data.shape[1]#.nHeights
+ self.buffer = numpy.zeros((data.shape[0], data.shape[1] * m), dtype=type(data[0, 0]))
+ self.nHeights = data.shape[1] # .nHeights
self.start_index = 0
self.times = 1
def concat(self, data):
- self.buffer[:,self.start_index:self.nHeights*self.times] = data.copy()
+ self.buffer[:, self.start_index:self.nHeights * self.times] = data.copy()
self.start_index = self.start_index + self.nHeights
def run(self, dataOut, m):
@@ -953,7 +959,7 @@ class ProfileSelector(Operation):
return True
- def run(self, dataOut, profileList=None, profileRangeList=None, beam=None, byblock=False, rangeList = None, nProfiles=None):
+ def run(self, dataOut, profileList=None, profileRangeList=None, beam=None, byblock=False, rangeList=None, nProfiles=None):
"""
ProfileSelector:
@@ -978,14 +984,14 @@ class ProfileSelector(Operation):
data dimension = [nChannels, nProfiles, nHeis]
"""
if profileList != None:
- dataOut.data = dataOut.data[:,profileList,:]
+ dataOut.data = dataOut.data[:, profileList, :]
if profileRangeList != None:
minIndex = profileRangeList[0]
maxIndex = profileRangeList[1]
- profileList = list(range(minIndex, maxIndex+1))
+ profileList = list(range(minIndex, maxIndex + 1))
- dataOut.data = dataOut.data[:,minIndex:maxIndex+1,:]
+ dataOut.data = dataOut.data[:, minIndex:maxIndex + 1, :]
if rangeList != None:
@@ -995,9 +1001,9 @@ class ProfileSelector(Operation):
minIndex = thisRange[0]
maxIndex = thisRange[1]
- profileList.extend(list(range(minIndex, maxIndex+1)))
+ profileList.extend(list(range(minIndex, maxIndex + 1)))
- dataOut.data = dataOut.data[:,profileList,:]
+ dataOut.data = dataOut.data[:, profileList, :]
dataOut.nProfiles = len(profileList)
dataOut.profileIndex = dataOut.nProfiles - 1
@@ -1065,7 +1071,7 @@ class ProfileSelector(Operation):
return dataOut
- if beam != None: #beam is only for AMISR data
+ if beam != None: # beam is only for AMISR data
if self.isThisProfileInList(dataOut.profileIndex, dataOut.beamRangeDict[beam]):
dataOut.flagNoData = False
dataOut.profileIndex = self.profileIndex
@@ -1089,8 +1095,8 @@ class Reshaper(Operation):
def __appendProfile(self, dataOut, nTxs):
if self.__buffer is None:
- shape = (dataOut.nChannels, int(dataOut.nHeights/nTxs) )
- self.__buffer = numpy.empty(shape, dtype = dataOut.data.dtype)
+ shape = (dataOut.nChannels, int(dataOut.nHeights / nTxs))
+ self.__buffer = numpy.empty(shape, dtype=dataOut.data.dtype)
ini = dataOut.nHeights * self.__nitems
end = ini + dataOut.nHeights
@@ -1099,11 +1105,11 @@ class Reshaper(Operation):
self.__nitems += 1
- return int(self.__nitems*nTxs)
+ return int(self.__nitems * nTxs)
def __getBuffer(self):
- if self.__nitems == int(1./self.__nTxs):
+ if self.__nitems == int(1. / self.__nTxs):
self.__nitems = 0
@@ -1120,15 +1126,15 @@ class Reshaper(Operation):
if nTxs < 0:
raise ValueError("nTxs should be greater than 0")
- if nTxs < 1 and dataOut.nProfiles % (1./nTxs) != 0:
- raise ValueError("nProfiles= %d is not divisibled by (1./nTxs) = %f" %(dataOut.nProfiles, (1./nTxs)))
+ if nTxs < 1 and dataOut.nProfiles % (1. / nTxs) != 0:
+ raise ValueError("nProfiles= %d is not divisibled by (1./nTxs) = %f" % (dataOut.nProfiles, (1. / nTxs)))
- shape = [dataOut.nChannels, dataOut.nProfiles*nTxs, dataOut.nHeights/nTxs]
+ shape = [dataOut.nChannels, dataOut.nProfiles * nTxs, dataOut.nHeights / nTxs]
return shape, nTxs
- if len(shape) != 2 and len(shape) != 3:
- raise ValueError("shape dimension should be equal to 2 or 3. shape = (nProfiles, nHeis) or (nChannels, nProfiles, nHeis). Actually shape = (%d, %d, %d)" %(dataOut.nChannels, dataOut.nProfiles, dataOut.nHeights))
+ if len(shape) != 2 and len(shape) != 3:
+ raise ValueError("shape dimension should be equal to 2 or 3. shape = (nProfiles, nHeis) or (nChannels, nProfiles, nHeis). Actually shape = (%d, %d, %d)" % (dataOut.nChannels, dataOut.nProfiles, dataOut.nHeights))
if len(shape) == 2:
shape_tuple = [dataOut.nChannels]
@@ -1136,7 +1142,7 @@ class Reshaper(Operation):
else:
shape_tuple = list(shape)
- nTxs = 1.0*shape_tuple[1]/dataOut.nProfiles
+ nTxs = 1.0 * shape_tuple[1] / dataOut.nProfiles
return shape_tuple, nTxs
@@ -1152,7 +1158,7 @@ class Reshaper(Operation):
dataOut.data = numpy.reshape(dataOut.data, shape_tuple)
dataOut.flagNoData = False
- profileIndex = int(dataOut.nProfiles*self.__nTxs) - 1
+ profileIndex = int(dataOut.nProfiles * self.__nTxs) - 1
else:
@@ -1165,16 +1171,16 @@ class Reshaper(Operation):
dataOut.data = new_data
dataOut.flagNoData = False
- profileIndex = dataOut.profileIndex*nTxs
+ profileIndex = dataOut.profileIndex * nTxs
else:
raise ValueError("nTxs should be greater than 0 and lower than 1, or use VoltageReader(..., getblock=True)")
deltaHeight = dataOut.heightList[1] - dataOut.heightList[0]
- dataOut.heightList = numpy.arange(dataOut.nHeights/self.__nTxs) * deltaHeight + dataOut.heightList[0]
+ dataOut.heightList = numpy.arange(dataOut.nHeights / self.__nTxs) * deltaHeight + dataOut.heightList[0]
- dataOut.nProfiles = int(dataOut.nProfiles*self.__nTxs)
+ dataOut.nProfiles = int(dataOut.nProfiles * self.__nTxs)
dataOut.profileIndex = profileIndex
@@ -1195,18 +1201,18 @@ class SplitProfiles(Operation):
if dataOut.flagDataAsBlock:
- #nchannels, nprofiles, nsamples
+ # nchannels, nprofiles, nsamples
shape = dataOut.data.shape
if shape[2] % n != 0:
- raise ValueError("Could not split the data, n=%d has to be multiple of %d" %(n, shape[2]))
+ raise ValueError("Could not split the data, n=%d has to be multiple of %d" % (n, shape[2]))
- new_shape = shape[0], shape[1]*n, int(shape[2]/n)
+ new_shape = shape[0], shape[1] * n, int(shape[2] / n)
dataOut.data = numpy.reshape(dataOut.data, new_shape)
dataOut.flagNoData = False
- profileIndex = int(dataOut.nProfiles/n) - 1
+ profileIndex = int(dataOut.nProfiles / n) - 1
else:
@@ -1214,9 +1220,9 @@ class SplitProfiles(Operation):
deltaHeight = dataOut.heightList[1] - dataOut.heightList[0]
- dataOut.heightList = numpy.arange(dataOut.nHeights/n) * deltaHeight + dataOut.heightList[0]
+ dataOut.heightList = numpy.arange(dataOut.nHeights / n) * deltaHeight + dataOut.heightList[0]
- dataOut.nProfiles = int(dataOut.nProfiles*n)
+ dataOut.nProfiles = int(dataOut.nProfiles * n)
dataOut.profileIndex = profileIndex
@@ -1239,21 +1245,21 @@ class CombineProfiles(Operation):
if dataOut.flagDataAsBlock:
- #nchannels, nprofiles, nsamples
+ # nchannels, nprofiles, nsamples
shape = dataOut.data.shape
- new_shape = shape[0], shape[1]/n, shape[2]*n
+ new_shape = shape[0], shape[1] / n, shape[2] * n
if shape[1] % n != 0:
- raise ValueError("Could not split the data, n=%d has to be multiple of %d" %(n, shape[1]))
+ raise ValueError("Could not split the data, n=%d has to be multiple of %d" % (n, shape[1]))
dataOut.data = numpy.reshape(dataOut.data, new_shape)
dataOut.flagNoData = False
- profileIndex = int(dataOut.nProfiles*n) - 1
+ profileIndex = int(dataOut.nProfiles * n) - 1
else:
- #nchannels, nsamples
+ # nchannels, nsamples
if self.__remData is None:
newData = dataOut.data
else:
@@ -1263,7 +1269,7 @@ class CombineProfiles(Operation):
if self.__profileIndex < n:
self.__remData = newData
- #continue
+ # continue
return
self.__profileIndex = 0
@@ -1272,14 +1278,14 @@ class CombineProfiles(Operation):
dataOut.data = newData
dataOut.flagNoData = False
- profileIndex = dataOut.profileIndex/n
+ profileIndex = dataOut.profileIndex / n
deltaHeight = dataOut.heightList[1] - dataOut.heightList[0]
- dataOut.heightList = numpy.arange(dataOut.nHeights*n) * deltaHeight + dataOut.heightList[0]
+ dataOut.heightList = numpy.arange(dataOut.nHeights * n) * deltaHeight + dataOut.heightList[0]
- dataOut.nProfiles = int(dataOut.nProfiles/n)
+ dataOut.nProfiles = int(dataOut.nProfiles / n)
dataOut.profileIndex = profileIndex
@@ -1303,50 +1309,50 @@ class PulsePairVoltage(Operation):
Affected:
self.dataOut.spc
'''
- isConfig = False
- __profIndex = 0
- __initime = None
+ isConfig = False
+ __profIndex = 0
+ __initime = None
__lastdatatime = None
- __buffer = None
- noise = None
- __dataReady = False
- n = None
- __nch = 0
- __nHeis = 0
- removeDC = False
- ipp = None
- lambda_ = 0
-
- def __init__(self,**kwargs):
- Operation.__init__(self,**kwargs)
-
- def setup(self, dataOut, n = None, removeDC=False):
+ __buffer = None
+ noise = None
+ __dataReady = False
+ n = None
+ __nch = 0
+ __nHeis = 0
+ removeDC = False
+ ipp = None
+ lambda_ = 0
+
+ def __init__(self, **kwargs):
+ Operation.__init__(self, **kwargs)
+
+ def setup(self, dataOut, n=None, removeDC=False):
'''
n= Numero de PRF's de entrada
'''
- self.__initime = None
- self.__lastdatatime = 0
- self.__dataReady = False
- self.__buffer = 0
- self.__profIndex = 0
- self.noise = None
- self.__nch = dataOut.nChannels
- self.__nHeis = dataOut.nHeights
- self.removeDC = removeDC
- self.lambda_ = 3.0e8/(9345.0e6)
- self.ippSec = dataOut.ippSeconds
- self.nCohInt = dataOut.nCohInt
- print("IPPseconds",dataOut.ippSeconds)
+ self.__initime = None
+ self.__lastdatatime = 0
+ self.__dataReady = False
+ self.__buffer = 0
+ self.__profIndex = 0
+ self.noise = None
+ self.__nch = dataOut.nChannels
+ self.__nHeis = dataOut.nHeights
+ self.removeDC = removeDC
+ self.lambda_ = 3.0e8 / (9345.0e6)
+ self.ippSec = dataOut.ippSeconds
+ self.nCohInt = dataOut.nCohInt
+ print("IPPseconds", dataOut.ippSeconds)
print("ELVALOR DE n es:", n)
if n == None:
raise ValueError("n should be specified.")
if n != None:
- if n<2:
+ if n < 2:
raise ValueError("n should be greater than 2")
- self.n = n
+ self.n = n
self.__nProf = n
self.__buffer = numpy.zeros((dataOut.nChannels,
@@ -1354,136 +1360,136 @@ class PulsePairVoltage(Operation):
dataOut.nHeights),
dtype='complex')
- def putData(self,data):
+ def putData(self, data):
'''
Add a profile to he __buffer and increase in one the __profiel Index
'''
- self.__buffer[:,self.__profIndex,:]= data
- self.__profIndex += 1
+ self.__buffer[:, self.__profIndex, :] = data
+ self.__profIndex += 1
return
- def pushData(self,dataOut):
+ def pushData(self, dataOut):
'''
Return the PULSEPAIR and the profiles used in the operation
Affected : self.__profileIndex
'''
#----------------- Remove DC-----------------------------------
- if self.removeDC==True:
- mean = numpy.mean(self.__buffer,1)
- tmp = mean.reshape(self.__nch,1,self.__nHeis)
- dc= numpy.tile(tmp,[1,self.__nProf,1])
- self.__buffer = self.__buffer - dc
+ if self.removeDC == True:
+ mean = numpy.mean(self.__buffer, 1)
+ tmp = mean.reshape(self.__nch, 1, self.__nHeis)
+ dc = numpy.tile(tmp, [1, self.__nProf, 1])
+ self.__buffer = self.__buffer - dc
#------------------Calculo de Potencia ------------------------
- pair0 = self.__buffer*numpy.conj(self.__buffer)
- pair0 = pair0.real
- lag_0 = numpy.sum(pair0,1)
+ pair0 = self.__buffer * numpy.conj(self.__buffer)
+ pair0 = pair0.real
+ lag_0 = numpy.sum(pair0, 1)
#------------------Calculo de Ruido x canal--------------------
- self.noise = numpy.zeros(self.__nch)
+ self.noise = numpy.zeros(self.__nch)
for i in range(self.__nch):
- daux = numpy.sort(pair0[i,:,:],axis= None)
- self.noise[i]=hildebrand_sekhon( daux ,self.nCohInt)
+ daux = numpy.sort(pair0[i, :, :], axis=None)
+ self.noise[i] = hildebrand_sekhon(daux , self.nCohInt)
- self.noise = self.noise.reshape(self.__nch,1)
- self.noise = numpy.tile(self.noise,[1,self.__nHeis])
- noise_buffer = self.noise.reshape(self.__nch,1,self.__nHeis)
- noise_buffer = numpy.tile(noise_buffer,[1,self.__nProf,1])
+ self.noise = self.noise.reshape(self.__nch, 1)
+ self.noise = numpy.tile(self.noise, [1, self.__nHeis])
+ noise_buffer = self.noise.reshape(self.__nch, 1, self.__nHeis)
+ noise_buffer = numpy.tile(noise_buffer, [1, self.__nProf, 1])
#------------------ Potencia recibida= P , Potencia senal = S , Ruido= N--
#------------------ P= S+N ,P=lag_0/N ---------------------------------
#-------------------- Power --------------------------------------------------
- data_power = lag_0/(self.n*self.nCohInt)
+ data_power = lag_0 / (self.n * self.nCohInt)
#------------------ Senal ---------------------------------------------------
- data_intensity = pair0 - noise_buffer
- data_intensity = numpy.sum(data_intensity,axis=1)*(self.n*self.nCohInt)#*self.nCohInt)
- #data_intensity = (lag_0-self.noise*self.n)*(self.n*self.nCohInt)
+ data_intensity = pair0 - noise_buffer
+ data_intensity = numpy.sum(data_intensity, axis=1) * (self.n * self.nCohInt) # *self.nCohInt)
+ # data_intensity = (lag_0-self.noise*self.n)*(self.n*self.nCohInt)
for i in range(self.__nch):
for j in range(self.__nHeis):
- if data_intensity[i][j] < 0:
+ if data_intensity[i][j] < 0:
data_intensity[i][j] = numpy.min(numpy.absolute(data_intensity[i][j]))
#----------------- Calculo de Frecuencia y Velocidad doppler--------
- pair1 = self.__buffer[:,:-1,:]*numpy.conjugate(self.__buffer[:,1:,:])
- lag_1 = numpy.sum(pair1,1)
- data_freq = (-1/(2.0*math.pi*self.ippSec*self.nCohInt))*numpy.angle(lag_1)
- data_velocity = (self.lambda_/2.0)*data_freq
+ pair1 = self.__buffer[:, :-1, :] * numpy.conjugate(self.__buffer[:, 1:, :])
+ lag_1 = numpy.sum(pair1, 1)
+ data_freq = (-1 / (2.0 * math.pi * self.ippSec * self.nCohInt)) * numpy.angle(lag_1)
+ data_velocity = (self.lambda_ / 2.0) * data_freq
#---------------- Potencia promedio estimada de la Senal-----------
- lag_0 = lag_0/self.n
- S = lag_0-self.noise
+ lag_0 = lag_0 / self.n
+ S = lag_0 - self.noise
#---------------- Frecuencia Doppler promedio ---------------------
- lag_1 = lag_1/(self.n-1)
- R1 = numpy.abs(lag_1)
+ lag_1 = lag_1 / (self.n - 1)
+ R1 = numpy.abs(lag_1)
#---------------- Calculo del SNR----------------------------------
- data_snrPP = S/self.noise
+ data_snrPP = S / self.noise
for i in range(self.__nch):
for j in range(self.__nHeis):
- if data_snrPP[i][j] < 1.e-20:
+ if data_snrPP[i][j] < 1.e-20:
data_snrPP[i][j] = 1.e-20
#----------------- Calculo del ancho espectral ----------------------
- L = S/R1
- L = numpy.where(L<0,1,L)
- L = numpy.log(L)
- tmp = numpy.sqrt(numpy.absolute(L))
- data_specwidth = (self.lambda_/(2*math.sqrt(2)*math.pi*self.ippSec*self.nCohInt))*tmp*numpy.sign(L)
- n = self.__profIndex
-
- self.__buffer = numpy.zeros((self.__nch, self.__nProf,self.__nHeis), dtype='complex')
+ L = S / R1
+ L = numpy.where(L < 0, 1, L)
+ L = numpy.log(L)
+ tmp = numpy.sqrt(numpy.absolute(L))
+ data_specwidth = (self.lambda_ / (2 * math.sqrt(2) * math.pi * self.ippSec * self.nCohInt)) * tmp * numpy.sign(L)
+ n = self.__profIndex
+
+ self.__buffer = numpy.zeros((self.__nch, self.__nProf, self.__nHeis), dtype='complex')
self.__profIndex = 0
- return data_power,data_intensity,data_velocity,data_snrPP,data_specwidth,n
+ return data_power, data_intensity, data_velocity, data_snrPP, data_specwidth, n
- def pulsePairbyProfiles(self,dataOut):
+ def pulsePairbyProfiles(self, dataOut):
- self.__dataReady = False
- data_power = None
- data_intensity = None
- data_velocity = None
- data_specwidth = None
- data_snrPP = None
+ self.__dataReady = False
+ data_power = None
+ data_intensity = None
+ data_velocity = None
+ data_specwidth = None
+ data_snrPP = None
self.putData(data=dataOut.data)
- if self.__profIndex == self.n:
- data_power,data_intensity, data_velocity,data_snrPP,data_specwidth, n = self.pushData(dataOut=dataOut)
- self.__dataReady = True
+ if self.__profIndex == self.n:
+ data_power, data_intensity, data_velocity, data_snrPP, data_specwidth, n = self.pushData(dataOut=dataOut)
+ self.__dataReady = True
return data_power, data_intensity, data_velocity, data_snrPP, data_specwidth
- def pulsePairOp(self, dataOut, datatime= None):
+ def pulsePairOp(self, dataOut, datatime=None):
if self.__initime == None:
self.__initime = datatime
data_power, data_intensity, data_velocity, data_snrPP, data_specwidth = self.pulsePairbyProfiles(dataOut)
- self.__lastdatatime = datatime
+ self.__lastdatatime = datatime
if data_power is None:
- return None, None, None,None,None,None
+ return None, None, None, None, None, None
- avgdatatime = self.__initime
- deltatime = datatime - self.__lastdatatime
+ avgdatatime = self.__initime
+ deltatime = datatime - self.__lastdatatime
self.__initime = datatime
return data_power, data_intensity, data_velocity, data_snrPP, data_specwidth, avgdatatime
- def run(self, dataOut,n = None,removeDC= False, overlapping= False,**kwargs):
+ def run(self, dataOut, n=None, removeDC=False, overlapping=False, **kwargs):
if not self.isConfig:
- self.setup(dataOut = dataOut, n = n , removeDC=removeDC , **kwargs)
- self.isConfig = True
- data_power, data_intensity, data_velocity,data_snrPP,data_specwidth, avgdatatime = self.pulsePairOp(dataOut, dataOut.utctime)
- dataOut.flagNoData = True
+ self.setup(dataOut=dataOut, n=n , removeDC=removeDC , **kwargs)
+ self.isConfig = True
+ data_power, data_intensity, data_velocity, data_snrPP, data_specwidth, avgdatatime = self.pulsePairOp(dataOut, dataOut.utctime)
+ dataOut.flagNoData = True
if self.__dataReady:
- dataOut.nCohInt *= self.n
- dataOut.dataPP_POW = data_intensity # S
- dataOut.dataPP_POWER = data_power # P
- dataOut.dataPP_DOP = data_velocity
- dataOut.dataPP_SNR = data_snrPP
- dataOut.dataPP_WIDTH = data_specwidth
- dataOut.PRFbyAngle = self.n #numero de PRF*cada angulo rotado que equivale a un tiempo.
- dataOut.utctime = avgdatatime
- dataOut.flagNoData = False
+ dataOut.nCohInt *= self.n
+ dataOut.dataPP_POW = data_intensity # S
+ dataOut.dataPP_POWER = data_power # P
+ dataOut.dataPP_DOP = data_velocity
+ dataOut.dataPP_SNR = data_snrPP
+ dataOut.dataPP_WIDTH = data_specwidth
+ dataOut.PRFbyAngle = self.n # numero de PRF*cada angulo rotado que equivale a un tiempo.
+ dataOut.utctime = avgdatatime
+ dataOut.flagNoData = False
return dataOut
diff --git a/schainpy/model/proc/pxproc_parameters.py b/schainpy/model/proc/pxproc_parameters.py
index 9ba0611..0bb50ff 100644
--- a/schainpy/model/proc/pxproc_parameters.py
+++ b/schainpy/model/proc/pxproc_parameters.py
@@ -49,7 +49,7 @@ class PXParametersProc(ProcessingUnit):
self.dataOut.data_param = numpy.array([self.dataOut.data[var] for var in self.dataOut.parameters])
self.dataOut.data_param[self.dataOut.data_param == self.dataOut.missing] = numpy.nan
- if mode.upper()=='E':
+ if mode.upper() == 'E':
self.dataOut.heightList = self.dataOut.data['Azimuth']
else:
self.dataOut.heightList = self.dataOut.data['Elevation']
@@ -61,4 +61,4 @@ class PXParametersProc(ProcessingUnit):
meta[attr] = getattr(self.dataOut, attr)
meta['mode'] = mode
- self.dataOut.meta = meta
\ No newline at end of file
+ self.dataOut.meta = meta
diff --git a/schainpy/model/utils/jroutils_ftp.py b/schainpy/model/utils/jroutils_ftp.py
index 8be1671..f2e8df6 100644
--- a/schainpy/model/utils/jroutils_ftp.py
+++ b/schainpy/model/utils/jroutils_ftp.py
@@ -64,9 +64,9 @@ class Remote(Thread):
self.stopFlag = False
- print("[Remote Server] Opening server: %s" %self.__server)
+ print("[Remote Server] Opening server: %s" % self.__server)
if self.open(self.__server, self.__username, self.__password, self.__remotefolder):
- print("[Remote Server] %s server was opened successfully" %self.__server)
+ print("[Remote Server] %s server was opened successfully" % self.__server)
self.close()
@@ -132,7 +132,7 @@ class Remote(Thread):
Returns:
0 in error case else 1
"""
- print("[Remote Server] Uploading %s to %s:%s" %(fullfilename, self.server, self.remotefolder))
+ print("[Remote Server] Uploading %s to %s:%s" % (fullfilename, self.server, self.remotefolder))
if not self.status:
return 0
@@ -144,7 +144,7 @@ class Remote(Thread):
return 0
if not self.sendFile(fullfilename):
- print("[Remote Server] Error uploading file %s" %fullfilename)
+ print("[Remote Server] Error uploading file %s" % fullfilename)
return 0
print("[Remote Server] upload finished successfully")
@@ -184,7 +184,7 @@ class Remote(Thread):
return
if not self.cd(self.remotefolder):
- raise ValueError("Could not access to the new remote directory: %s" %self.remotefolder)
+ raise ValueError("Could not access to the new remote directory: %s" % self.remotefolder)
while True:
@@ -199,7 +199,7 @@ class Remote(Thread):
# self.bussy = True
self.mutex.acquire()
- print("[Remote Server] Opening %s" %self.__server)
+ print("[Remote Server] Opening %s" % self.__server)
if not self.open(self.__server, self.__username, self.__password, self.__remotefolder):
self.mutex.release()
continue
@@ -207,7 +207,7 @@ class Remote(Thread):
for thisFile in self.fileList:
self.upload(thisFile, self.remotefolder)
- print("[Remote Server] Closing %s" %self.__server)
+ print("[Remote Server] Closing %s" % self.__server)
self.close()
self.mutex.release()
@@ -261,7 +261,7 @@ class FTPClient(Remote):
try:
ftpClientObj = ftplib.FTP(server)
except ftplib.all_errors as e:
- print("[FTP Server]: FTP server connection fail: %s" %server)
+ print("[FTP Server]: FTP server connection fail: %s" % server)
print("[FTP Server]:", e)
self.status = 0
return 0
@@ -279,7 +279,7 @@ class FTPClient(Remote):
try:
ftpClientObj.cwd(remotefolder)
except ftplib.all_errors:
- print("[FTP Server]: FTP remote folder is invalid: %s" %remotefolder)
+ print("[FTP Server]: FTP remote folder is invalid: %s" % remotefolder)
remotefolder = ftpClientObj.pwd()
self.server = server
@@ -316,7 +316,7 @@ class FTPClient(Remote):
try:
self.__ftpClientObj.mkd(dirname)
except ftplib.all_errors:
- print("[FTP Server]: Error creating remote folder: %s" %remotefolder)
+ print("[FTP Server]: Error creating remote folder: %s" % remotefolder)
return 0
return 1
@@ -343,7 +343,7 @@ class FTPClient(Remote):
try:
self.__ftpClientObj.cwd(remotefolder)
except ftplib.all_errors:
- print('[FTP Server]: Error changing to %s' %remotefolder)
+ print('[FTP Server]: Error changing to %s' % remotefolder)
print('[FTP Server]: Trying to create remote folder')
if not self.mkdir(remotefolder):
@@ -368,7 +368,7 @@ class FTPClient(Remote):
filename = os.path.basename(fullfilename)
- command = "STOR %s" %filename
+ command = "STOR %s" % filename
try:
self.__ftpClientObj.storbinary(command, fp)
@@ -463,7 +463,7 @@ class SSHClient(Remote):
self.status = 1
if not self.cd(remotefolder):
- raise ValueError("[SSH Server]: Could not access to remote folder: %s" %remotefolder)
+ raise ValueError("[SSH Server]: Could not access to remote folder: %s" % remotefolder)
return 0
self.remotefolder = remotefolder
@@ -516,7 +516,7 @@ class SSHClient(Remote):
0 in error case else 1
"""
- command = 'mkdir %s' %remotefolder
+ command = 'mkdir %s' % remotefolder
return self.__execute(command)
@@ -545,8 +545,8 @@ class SSHClient(Remote):
if remotefolder == self.remotefolder:
return 1
- chk_command = "cd %s; pwd" %remotefolder
- mkdir_command = "mkdir %s" %remotefolder
+ chk_command = "cd %s; pwd" % remotefolder
+ mkdir_command = "mkdir %s" % remotefolder
if not self.__execute(chk_command):
if not self.__execute(mkdir_command):
@@ -569,7 +569,7 @@ class SSHClient(Remote):
return 0
remotefile = os.path.join(self.remotefolder, os.path.split(fullfilename)[-1])
- command = 'chmod 775 %s' %remotefile
+ command = 'chmod 775 %s' % remotefile
return self.__execute(command)
@@ -596,7 +596,7 @@ class SendToServer(ProcessingUnit):
self.clientObj = SSHClient(server, username, password, remotefolder, period)
if not self.clientObj:
- raise ValueError("%s has been chosen as remote access protocol but it is not valid" %protocol)
+ raise ValueError("%s has been chosen as remote access protocol but it is not valid" % protocol)
self.clientObj.start()
@@ -607,16 +607,16 @@ class SendToServer(ProcessingUnit):
else:
folderList = self.localfolder
- #Remove duplicate items
+ # Remove duplicate items
folderList = list(set(folderList))
fullfilenameList = []
for thisFolder in folderList:
- print("[Remote Server]: Searching files on %s" %thisFolder)
+ print("[Remote Server]: Searching files on %s" % thisFolder)
- filenameList = glob.glob1(thisFolder, '*%s' %self.ext)
+ filenameList = glob.glob1(thisFolder, '*%s' % self.ext)
if len(filenameList) < 1:
@@ -628,8 +628,8 @@ class SendToServer(ProcessingUnit):
if fullfilename in fullfilenameList:
continue
- #Only files modified in the last 30 minutes are considered
- if os.path.getmtime(fullfilename) < time.time() - 30*60:
+ # Only files modified in the last 30 minutes are considered
+ if os.path.getmtime(fullfilename) < time.time() - 30 * 60:
continue
fullfilenameList.append(fullfilename)
@@ -667,7 +667,7 @@ class FTP(object):
Written by "Daniel Suarez":mailto:daniel.suarez@jro.igp.gob.pe Oct. 26, 2010
"""
- def __init__(self,server = None, username=None, password=None, remotefolder=None):
+ def __init__(self, server=None, username=None, password=None, remotefolder=None):
"""
This method is used to setting parameters for FTP and establishing connection to remote server
@@ -692,7 +692,7 @@ class FTP(object):
"""
- if ((server == None) and (username==None) and (password==None) and (remotefolder==None)):
+ if ((server == None) and (username == None) and (password == None) and (remotefolder == None)):
server, username, password, remotefolder = self.parmsByDefault()
self.server = server
@@ -705,7 +705,7 @@ class FTP(object):
try:
self.ftp = ftplib.FTP(self.server)
- self.ftp.login(self.username,self.password)
+ self.ftp.login(self.username, self.password)
self.ftp.cwd(self.remotefolder)
# print 'Connect to FTP Server: Successfully'
@@ -734,7 +734,7 @@ class FTP(object):
self.fileList = []
self.folderList = []
- #only for test
+ # only for test
for f in self.dirList:
name, ext = os.path.splitext(f)
if ext != '':
@@ -750,7 +750,7 @@ class FTP(object):
return server, username, password, remotefolder
- def mkd(self,dirname):
+ def mkd(self, dirname):
"""
mkd is used to make directory in remote server
@@ -763,13 +763,13 @@ class FTP(object):
try:
self.ftp.mkd(dirname)
except:
- print('Error creating remote folder:%s'%dirname)
+ print('Error creating remote folder:%s' % dirname)
return 1
return 0
- def delete(self,filename):
+ def delete(self, filename):
"""
delete is used to delete file in current working directory of remote server
@@ -783,12 +783,12 @@ class FTP(object):
try:
self.ftp.delete(filename)
except:
- print('Error deleting remote file:%s'%filename)
+ print('Error deleting remote file:%s' % filename)
return 1
return 0
- def download(self,filename,localfolder):
+ def download(self, filename, localfolder):
"""
download is used to downloading file from remote folder into local folder
@@ -805,11 +805,11 @@ class FTP(object):
if not(filename in self.fileList):
- print('filename:%s not exists'%filename)
+ print('filename:%s not exists' % filename)
self.status = 1
return self.status
- newfilename = os.path.join(localfolder,filename)
+ newfilename = os.path.join(localfolder, filename)
self.file = open(newfilename, 'wb')
@@ -827,14 +827,14 @@ class FTP(object):
return self.status
- def __handleDownload(self,block):
+ def __handleDownload(self, block):
"""
__handleDownload is used to handle writing file
"""
self.file.write(block)
- def upload(self,filename,remotefolder=None):
+ def upload(self, filename, remotefolder=None):
"""
upload is used to uploading local file to remote directory
@@ -872,13 +872,13 @@ class FTP(object):
self.file.close()
- #back to initial directory in __init__()
+ # back to initial directory in __init__()
self.ftp.cwd(self.remotefolder)
return self.status
- def dir(self,remotefolder):
+ def dir(self, remotefolder):
"""
dir is used to change working directory of remote server and get folder and file list
@@ -902,7 +902,7 @@ class FTP(object):
print('Error Change to ' + self.remotefolder)
infoList = None
self.folderList = None
- return infoList,self.folderList
+ return infoList, self.folderList
self.dirList = []
@@ -914,27 +914,27 @@ class FTP(object):
print("no files in this directory")
infoList = None
self.folderList = None
- return infoList,self.folderList
+ return infoList, self.folderList
except ftplib.all_errors:
print('Error Displaying Dir-Files')
infoList = None
self.folderList = None
- return infoList,self.folderList
+ return infoList, self.folderList
infoList = []
self.fileList = []
self.folderList = []
for f in self.dirList:
- name,ext = os.path.splitext(f)
+ name, ext = os.path.splitext(f)
if ext != '':
self.fileList.append(f)
- value = (f,self.ftp.size(f))
+ value = (f, self.ftp.size(f))
infoList.append(value)
if ext == '':
self.folderList.append(f)
- return infoList,self.folderList
+ return infoList, self.folderList
def close(self):
@@ -989,8 +989,8 @@ class SendByFTP(Operation):
return
def filterByExt(self, ext, localfolder):
- fnameList = glob.glob1(localfolder,ext)
- self.filenameList = [os.path.join(localfolder,x) for x in fnameList]
+ fnameList = glob.glob1(localfolder, ext)
+ self.filenameList = [os.path.join(localfolder, x) for x in fnameList]
if len(self.filenameList) == 0:
self.status = 0
@@ -1005,4 +1005,4 @@ class SendByFTP(Operation):
self.counter = 0
- self.status = 1
\ No newline at end of file
+ self.status = 1
diff --git a/schainpy/model/utils/jroutils_publish.py b/schainpy/model/utils/jroutils_publish.py
index 8b229fb..5d0c13f 100644
--- a/schainpy/model/utils/jroutils_publish.py
+++ b/schainpy/model/utils/jroutils_publish.py
@@ -23,26 +23,26 @@ from schainpy.utils import log
PLOT_CODES = {
- 'rti': 0, # Range time intensity (RTI).
- 'spc': 1, # Spectra (and Cross-spectra) information.
- 'cspc': 2, # Cross-Correlation information.
- 'coh': 3, # Coherence map.
- 'base': 4, # Base lines graphic.
- 'row': 5, # Row Spectra.
- 'total': 6, # Total Power.
- 'drift': 7, # Drifts graphics.
- 'height': 8, # Height profile.
- 'phase': 9, # Signal Phase.
+ 'rti': 0, # Range time intensity (RTI).
+ 'spc': 1, # Spectra (and Cross-spectra) information.
+ 'cspc': 2, # Cross-Correlation information.
+ 'coh': 3, # Coherence map.
+ 'base': 4, # Base lines graphic.
+ 'row': 5, # Row Spectra.
+ 'total': 6, # Total Power.
+ 'drift': 7, # Drifts graphics.
+ 'height': 8, # Height profile.
+ 'phase': 9, # Signal Phase.
'power': 16,
'noise': 17,
'beacon': 18,
'wind': 22,
'skymap': 23,
'Unknown': 24,
- 'V-E': 25, # PIP Velocity.
- 'Z-E': 26, # PIP Reflectivity.
- 'V-A': 27, # RHI Velocity.
- 'Z-A': 28, # RHI Reflectivity.
+ 'V-E': 25, # PIP Velocity.
+ 'Z-E': 26, # PIP Reflectivity.
+ 'V-A': 27, # RHI Velocity.
+ 'Z-A': 28, # RHI Reflectivity.
}
def get_plot_code(s):
@@ -247,7 +247,7 @@ class SendToFTP(Operation):
try:
if not self.ready:
- if time.time()-self.current_time < self.timeout:
+ if time.time() - self.current_time < self.timeout:
return
else:
self.current_time = time.time()
@@ -281,8 +281,8 @@ class SendToFTP(Operation):
def upload(self, src, dst):
log.log('Uploading {} -> {} '.format(
- src.split('/')[-1], dst.split('/')[-1]),
- self.name,
+ src.split('/')[-1], dst.split('/')[-1]),
+ self.name,
nl=False
)
@@ -338,10 +338,10 @@ class SendToFTP(Operation):
if not self.isConfig:
self.setup(
- server=server,
- username=username,
- password=password,
- timeout=timeout,
+ server=server,
+ username=username,
+ password=password,
+ timeout=timeout,
**kwargs
)
self.isConfig = True
diff --git a/schainpy/model/utils/scp.py b/schainpy/model/utils/scp.py
index 734acb7..ad9b60a 100644
--- a/schainpy/model/utils/scp.py
+++ b/schainpy/model/utils/scp.py
@@ -136,7 +136,7 @@ class SCPClient(object):
self._pushed = 0
self.channel.settimeout(self.socket_timeout)
scp_command = (b'scp -t ', b'scp -r -t ')[recursive]
- self.channel.exec_command(scp_command +
+ self.channel.exec_command(scp_command +
self.sanitize(asbytes(remote_path)))
self._recv_confirm()
@@ -174,20 +174,20 @@ class SCPClient(object):
not os.path.isdir(os.path.abspath(local_path)))
if len(remote_path) > 1:
if not os.path.exists(self._recv_dir):
- raise SCPException("Local path '%s' does not exist" %
+ raise SCPException("Local path '%s' does not exist" %
asunicode(self._recv_dir))
elif not os.path.isdir(self._recv_dir):
- raise SCPException("Local path '%s' is not a directory" %
+ raise SCPException("Local path '%s' is not a directory" %
asunicode(self._recv_dir))
rcsv = (b'', b' -r')[recursive]
prsv = (b'', b' -p')[preserve_times]
self.channel = self._open()
self._pushed = 0
self.channel.settimeout(self.socket_timeout)
- self.channel.exec_command(b"scp" +
- rcsv +
- prsv +
- b" -f " +
+ self.channel.exec_command(b"scp" +
+ rcsv +
+ prsv +
+ b" -f " +
b' '.join(remote_path))
self._recv_all()
self.close()
@@ -227,7 +227,7 @@ class SCPClient(object):
# The protocol can't handle \n in the filename.
# Quote them as the control sequence \^J for now,
# which is how openssh handles it.
- self.channel.sendall(("C%s %d " % (mode, size)).encode('ascii') +
+ self.channel.sendall(("C%s %d " % (mode, size)).encode('ascii') +
basename.replace(b'\n', b'\\^J') + b"\n")
self._recv_confirm()
file_pos = 0
@@ -287,7 +287,7 @@ class SCPClient(object):
basename = asbytes(os.path.basename(directory))
if self.preserve_times:
self._send_time(mtime, atime)
- self.channel.sendall(('D%s 0 ' % mode).encode('ascii') +
+ self.channel.sendall(('D%s 0 ' % mode).encode('ascii') +
basename.replace(b'\n', b'\\^J') + b'\n')
self._recv_confirm()
self._pushed += 1
@@ -461,4 +461,4 @@ class SCPClient(object):
class SCPException(Exception):
"""SCP exception class"""
- pass
\ No newline at end of file
+ pass
diff --git a/schainpy/scripts/JASMET30_MetDet.py b/schainpy/scripts/JASMET30_MetDet.py
index 106a7a8..ab8e719 100644
--- a/schainpy/scripts/JASMET30_MetDet.py
+++ b/schainpy/scripts/JASMET30_MetDet.py
@@ -4,27 +4,27 @@ import os, sys
from schainpy.controller import Project
controllerObj = Project()
-controllerObj.setup(id = '002', name='script02', description="JASMET Meteor Detection")
+controllerObj.setup(id='002', name='script02', description="JASMET Meteor Detection")
#-------------------------------------- Setup -----------------------------------------
-#Verificar estas variables
+# Verificar estas variables
-#Path para los archivos
+# Path para los archivos
# path = '/mnt/jars/2016_08/NOCHE'
# path = '/media/joscanoa/DATA_JASMET/JASMET/2016_08/DIA'
# path = '/media/joscanoa/DATA_JASMET/JASMET/2016_08/NOCHE'
path = '/home/nanosat/data/jasmet'
-#Path para los graficos
-pathfig = os.path.join(os.environ['HOME'],'Pictures/JASMET30/201608/graphics')
+# Path para los graficos
+pathfig = os.path.join(os.environ['HOME'], 'Pictures/JASMET30/201608/graphics')
-#Path para los archivos HDF5 de meteoros
-pathfile = os.path.join(os.environ['HOME'],'Pictures/JASMET30/201608/meteor')
+# Path para los archivos HDF5 de meteoros
+pathfile = os.path.join(os.environ['HOME'], 'Pictures/JASMET30/201608/meteor')
-#Fechas para busqueda de archivos
+# Fechas para busqueda de archivos
startDate = '2010/08/29'
endDate = '2017/09/11'
-#Horas para busqueda de archivos
+# Horas para busqueda de archivos
startTime = '00:00:00'
endTime = '23:59:59'
@@ -84,9 +84,9 @@ opObj10.addParameter(name='hmax', value='120', format='float')
opObj12 = procUnitConfObj1.addOperation(name='ParamWriter', optype='other')
opObj12.addParameter(name='path', value=pathfile)
opObj12.addParameter(name='blocksPerFile', value='1000', format='int')
-opObj12.addParameter(name='metadataList',value='type,heightList,paramInterval,timeZone',format='list')
-opObj12.addParameter(name='dataList',value='data_param,utctime',format='list')
-opObj12.addParameter(name='mode',value='2',format='int')
+opObj12.addParameter(name='metadataList', value='type,heightList,paramInterval,timeZone', format='list')
+opObj12.addParameter(name='dataList', value='data_param,utctime', format='list')
+opObj12.addParameter(name='mode', value='2', format='int')
#--------------------------------------------------------------------------------------------------
diff --git a/schainpy/scripts/JASMET30_PhaseCal.py b/schainpy/scripts/JASMET30_PhaseCal.py
index 1b0da1f..4668397 100644
--- a/schainpy/scripts/JASMET30_PhaseCal.py
+++ b/schainpy/scripts/JASMET30_PhaseCal.py
@@ -8,24 +8,24 @@ sys.path.insert(0, path)
from schainpy.controller import Project
controllerObj = Project()
-controllerObj.setup(id = '004', name='script04', description="JASMET Phase Calibration")
+controllerObj.setup(id='004', name='script04', description="JASMET Phase Calibration")
#-------------------------------------- Setup -----------------------------------------
-#Verificar estas variables
+# Verificar estas variables
-#Path donde estan los archivos HDF5 de meteoros
-path = os.path.join(os.environ['HOME'],'Pictures/JASMET30_mp/201608/meteor')
+# Path donde estan los archivos HDF5 de meteoros
+path = os.path.join(os.environ['HOME'], 'Pictures/JASMET30_mp/201608/meteor')
-#Path para los graficos
-pathfig = os.path.join(os.environ['HOME'],'Pictures/JASMET30_mp/201608/graphics')
+# Path para los graficos
+pathfig = os.path.join(os.environ['HOME'], 'Pictures/JASMET30_mp/201608/graphics')
-#Path donde se almacenaran las fases calculadas
-pathfile = os.path.join(os.environ['HOME'],'Pictures/JASMET30_mp/201608/phase')
+# Path donde se almacenaran las fases calculadas
+pathfile = os.path.join(os.environ['HOME'], 'Pictures/JASMET30_mp/201608/phase')
-#Fechas para busqueda de archivos
+# Fechas para busqueda de archivos
startDate = '2016/08/29'
endDate = '2016/09/11'
-#Horas para busqueda de archivos
+# Horas para busqueda de archivos
startTime = '00:00:00'
endTime = '23:59:59'
@@ -61,8 +61,8 @@ opObj32.addParameter(name='figpath', value=pathfig, format='str')
opObj33 = procUnitConfObj1.addOperation(name='ParamWriter', optype='other')
opObj33.addParameter(name='path', value=pathfile)
opObj33.addParameter(name='blocksPerFile', value='1000', format='int')
-opObj33.addParameter(name='metadataList',value='type,outputInterval,timeZone',format='list')
-opObj33.addParameter(name='dataList',value='data_output,utctime',format='list')
+opObj33.addParameter(name='metadataList', value='type,outputInterval,timeZone', format='list')
+opObj33.addParameter(name='dataList', value='data_output,utctime', format='list')
# # opObj25.addParameter(name='mode',value='1,0,0',format='intlist')
-controllerObj.start()
\ No newline at end of file
+controllerObj.start()
diff --git a/schainpy/scripts/JASMET30_Winds.py b/schainpy/scripts/JASMET30_Winds.py
index f489be5..a6d1745 100644
--- a/schainpy/scripts/JASMET30_Winds.py
+++ b/schainpy/scripts/JASMET30_Winds.py
@@ -8,28 +8,28 @@ sys.path.insert(0, path)
from schainpy.controller import Project
controllerObj = Project()
-controllerObj.setup(id = '005', name='script05', description="JASMET Wind Estimation")
+controllerObj.setup(id='005', name='script05', description="JASMET Wind Estimation")
#-------------------------------------- Setup -----------------------------------------
-#Verificar estas variables
+# Verificar estas variables
-#Path donde estan los archivos HDF5 de meteoros
-path = os.path.join(os.environ['HOME'],'Pictures/JASMET30_mp/201608/meteor')
+# Path donde estan los archivos HDF5 de meteoros
+path = os.path.join(os.environ['HOME'], 'Pictures/JASMET30_mp/201608/meteor')
-#Path para los graficos
-pathfig = os.path.join(os.environ['HOME'],'Pictures/JASMET30_mp/201608/graphics')
+# Path para los graficos
+pathfig = os.path.join(os.environ['HOME'], 'Pictures/JASMET30_mp/201608/graphics')
-#Path donde se almacenaran las estimaciones de vientos
-pathfile = os.path.join(os.environ['HOME'],'Pictures/JASMET30_mp/201608/phase')
+# Path donde se almacenaran las estimaciones de vientos
+pathfile = os.path.join(os.environ['HOME'], 'Pictures/JASMET30_mp/201608/phase')
-#Fechas para busqueda de archivos
+# Fechas para busqueda de archivos
startDate = '2016/08/29'
endDate = '2016/09/11'
-#Horas para busqueda de archivos
+# Horas para busqueda de archivos
startTime = '00:00:00'
endTime = '23:59:59'
-#Offsets optimos obtenidos con OptimumOffset.py
+# Offsets optimos obtenidos con OptimumOffset.py
phaseOffsets = '-2.84, -1.77, 11.94, 9.71'
phaseOffsets = '-5.86, -0.93, -7.29, 23.35'
#------------------------------------------------------------------------------------------------
@@ -43,7 +43,7 @@ readUnitConfObj = controllerObj.addReadUnit(datatype='ParamReader',
#--------------------------------------------------------------------------------------------------
procUnitConfObj1 = controllerObj.addProcUnit(datatype='ParametersProc', inputId=readUnitConfObj.getId())
-opObj10 = procUnitConfObj1.addOperation(name='CorrectSMPhases',optype='other')
+opObj10 = procUnitConfObj1.addOperation(name='CorrectSMPhases', optype='other')
opObj10.addParameter(name='phaseOffsets', value=phaseOffsets, format='floatlist')
opObj13 = procUnitConfObj1.addOperation(name='SkyMapPlot', optype='other')
@@ -67,7 +67,7 @@ opObj23 = procUnitConfObj1.addOperation(name='WindProfilerPlot', optype='other')
opObj23.addParameter(name='id', value='2', format='int')
opObj23.addParameter(name='wintitle', value='Wind Profiler', format='str')
opObj23.addParameter(name='save', value='1', format='bool')
-opObj23.addParameter(name='figpath', value = pathfig, format='str')
+opObj23.addParameter(name='figpath', value=pathfig, format='str')
opObj23.addParameter(name='zmin', value='-140', format='int')
opObj23.addParameter(name='zmax', value='140', format='int')
opObj23.addParameter(name='xmin', value='0', format='float')
@@ -78,8 +78,8 @@ opObj23.addParameter(name='ymax', value='110', format='float')
opObj33 = procUnitConfObj1.addOperation(name='ParamWriter', optype='other')
opObj33.addParameter(name='path', value=pathfile)
opObj33.addParameter(name='blocksPerFile', value='1000', format='int')
-opObj33.addParameter(name='metadataList',value='type,outputInterval,timeZone',format='list')
-opObj33.addParameter(name='dataList',value='data_output,utctime',format='list')
+opObj33.addParameter(name='metadataList', value='type,outputInterval,timeZone', format='list')
+opObj33.addParameter(name='dataList', value='data_output,utctime', format='list')
#--------------------------------------------------------------------------------------------------
-controllerObj.start()
\ No newline at end of file
+controllerObj.start()
diff --git a/schainpy/scripts/PPD.py b/schainpy/scripts/PPD.py
index 0c66244..6209580 100644
--- a/schainpy/scripts/PPD.py
+++ b/schainpy/scripts/PPD.py
@@ -17,13 +17,13 @@ def fiber(cursor, skip, q, dt):
startTime="00:00:00",
endTime="23:59:59",
online=0,
- #set=1426485881,
+ # set=1426485881,
walk=1,
queue=q,
cursor=cursor,
skip=skip,
verbose=1
- #timezone=-5*3600
+ # timezone=-5*3600
)
# #opObj11 = readUnitConfObj.addOperation(name='printNumberOfBlock')
diff --git a/schainpy/scripts/USRP_PLOT_THOR.py b/schainpy/scripts/USRP_PLOT_THOR.py
index d2e2eb0..5f90241 100644
--- a/schainpy/scripts/USRP_PLOT_THOR.py
+++ b/schainpy/scripts/USRP_PLOT_THOR.py
@@ -6,36 +6,36 @@ import os, sys
import datetime
import time
-#path = os.path.dirname(os.getcwd())
-#path = os.path.dirname(path)
-#sys.path.insert(0, path)
+# path = os.path.dirname(os.getcwd())
+# path = os.path.dirname(path)
+# sys.path.insert(0, path)
from schainpy.controller import Project
desc = "USRP_test"
filename = "USRP_processing.xml"
controllerObj = Project()
-controllerObj.setup(id = '191', name='Test_USRP', description=desc)
+controllerObj.setup(id='191', name='Test_USRP', description=desc)
############## USED TO PLOT IQ VOLTAGE, POWER AND SPECTRA #############
#######################################################################
######PATH DE LECTURA, ESCRITURA, GRAFICOS Y ENVIO WEB#################
#######################################################################
-#path = '/media/data/data/vientos/57.2063km/echoes/NCO_Woodman'
+# path = '/media/data/data/vientos/57.2063km/echoes/NCO_Woodman'
-path = '/home/soporte/data_hdf5' #### with clock 35.16 db noise
+path = '/home/soporte/data_hdf5' #### with clock 35.16 db noise
figpath = '/home/soporte/data_hdf5_imag'
-#remotefolder = "/home/wmaster/graficos"
+# remotefolder = "/home/wmaster/graficos"
#######################################################################
################# RANGO DE PLOTEO######################################
#######################################################################
dBmin = '30'
dBmax = '60'
xmin = '0'
-xmax ='24'
+xmax = '24'
ymin = '0'
ymax = '600'
#######################################################################
@@ -50,15 +50,15 @@ yesterday = str2.strftime("%Y/%m/%d")
#######################################################################
readUnitConfObj = controllerObj.addReadUnit(datatype='DigitalRFReader',
path=path,
- startDate="2019/01/01",#today,
- endDate="2109/12/30",#today,
+ startDate="2019/01/01", # today,
+ endDate="2109/12/30", # today,
startTime='00:00:00',
endTime='23:59:59',
delay=0,
- #set=0,
+ # set=0,
online=0,
walk=1,
- ippKm = 1000)
+ ippKm=1000)
opObj11 = readUnitConfObj.addOperation(name='printInfo')
opObj11 = readUnitConfObj.addOperation(name='printNumberOfBlock')
@@ -71,31 +71,31 @@ procUnitConfObjA = controllerObj.addProcUnit(datatype='VoltageProc', inputId=rea
# codigo64='1,1,1,0,1,1,0,1,1,1,1,0,0,0,1,0,1,1,1,0,1,1,0,1,0,0,0,1,1,1,0,1,1,1,1,0,1,1,0,1,1,1,1,0,0,0,1,0,0,0,0,1,0,0,1,0,1,1,1,0,0,0,1,0,'+\
# '1,1,1,0,1,1,0,1,1,1,1,0,0,0,1,0,1,1,1,0,1,1,0,1,0,0,0,1,1,1,0,1,0,0,0,1,0,0,1,0,0,0,0,1,1,1,0,1,1,1,1,0,1,1,0,1,0,0,0,1,1,1,0,1'
-#opObj11 = procUnitConfObjA.addOperation(name='setRadarFrequency')
-#opObj11.addParameter(name='frequency', value='30e6', format='float')
+# opObj11 = procUnitConfObjA.addOperation(name='setRadarFrequency')
+# opObj11.addParameter(name='frequency', value='30e6', format='float')
-#opObj10 = procUnitConfObjA.addOperation(name='Scope', optype='external')
-#opObj10.addParameter(name='id', value='10', format='int')
-##opObj10.addParameter(name='xmin', value='0', format='int')
-##opObj10.addParameter(name='xmax', value='50', format='int')
-#opObj10.addParameter(name='type', value='iq')
-#opObj10.addParameter(name='ymin', value='-5000', format='int')
-##opObj10.addParameter(name='ymax', value='8500', format='int')
+# opObj10 = procUnitConfObjA.addOperation(name='Scope', optype='external')
+# opObj10.addParameter(name='id', value='10', format='int')
+# #opObj10.addParameter(name='xmin', value='0', format='int')
+# #opObj10.addParameter(name='xmax', value='50', format='int')
+# opObj10.addParameter(name='type', value='iq')
+# opObj10.addParameter(name='ymin', value='-5000', format='int')
+# #opObj10.addParameter(name='ymax', value='8500', format='int')
-#opObj10 = procUnitConfObjA.addOperation(name='setH0')
-#opObj10.addParameter(name='h0', value='-5000', format='float')
+# opObj10 = procUnitConfObjA.addOperation(name='setH0')
+# opObj10.addParameter(name='h0', value='-5000', format='float')
-#opObj11 = procUnitConfObjA.addOperation(name='filterByHeights')
-#opObj11.addParameter(name='window', value='1', format='int')
+# opObj11 = procUnitConfObjA.addOperation(name='filterByHeights')
+# opObj11.addParameter(name='window', value='1', format='int')
-#codigo='1,1,-1,1,1,-1,1,-1,-1,1,-1,-1,-1,1,-1,-1,-1,1,-1,-1,-1,1,1,1,1,-1,-1,-1'
-#opObj11 = procUnitConfObjSousy.addOperation(name='Decoder', optype='other')
-#opObj11.addParameter(name='code', value=codigo, format='floatlist')
-#opObj11.addParameter(name='nCode', value='1', format='int')
-#opObj11.addParameter(name='nBaud', value='28', format='int')
+# codigo='1,1,-1,1,1,-1,1,-1,-1,1,-1,-1,-1,1,-1,-1,-1,1,-1,-1,-1,1,1,1,1,-1,-1,-1'
+# opObj11 = procUnitConfObjSousy.addOperation(name='Decoder', optype='other')
+# opObj11.addParameter(name='code', value=codigo, format='floatlist')
+# opObj11.addParameter(name='nCode', value='1', format='int')
+# opObj11.addParameter(name='nBaud', value='28', format='int')
-#opObj11 = procUnitConfObjA.addOperation(name='CohInt', optype='other')
-#opObj11.addParameter(name='n', value='100', format='int')
+# opObj11 = procUnitConfObjA.addOperation(name='CohInt', optype='other')
+# opObj11.addParameter(name='n', value='100', format='int')
#######################################################################
########## OPERACIONES DOMINIO DE LA FRECUENCIA########################
@@ -103,41 +103,41 @@ procUnitConfObjA = controllerObj.addProcUnit(datatype='VoltageProc', inputId=rea
procUnitConfObjSousySpectra = controllerObj.addProcUnit(datatype='SpectraProc', inputId=procUnitConfObjA.getId())
procUnitConfObjSousySpectra.addParameter(name='nFFTPoints', value='100', format='int')
procUnitConfObjSousySpectra.addParameter(name='nProfiles', value='100', format='int')
-#procUnitConfObjSousySpectra.addParameter(name='pairsList', value='(0,0),(1,1),(0,1)', format='pairsList')
+# procUnitConfObjSousySpectra.addParameter(name='pairsList', value='(0,0),(1,1),(0,1)', format='pairsList')
-#opObj13 = procUnitConfObjSousySpectra.addOperation(name='removeDC')
-#opObj13.addParameter(name='mode', value='2', format='int')
+# opObj13 = procUnitConfObjSousySpectra.addOperation(name='removeDC')
+# opObj13.addParameter(name='mode', value='2', format='int')
-#opObj11 = procUnitConfObjSousySpectra.addOperation(name='IncohInt', optype='other')
-#opObj11.addParameter(name='n', value='60', format='float')
+# opObj11 = procUnitConfObjSousySpectra.addOperation(name='IncohInt', optype='other')
+# opObj11.addParameter(name='n', value='60', format='float')
#######################################################################
########## PLOTEO DOMINIO DE LA FRECUENCIA#############################
#######################################################################
-#SpectraPlot
+# SpectraPlot
opObj11 = procUnitConfObjSousySpectra.addOperation(name='SpectraPlot', optype='external')
opObj11.addParameter(name='id', value='1', format='int')
opObj11.addParameter(name='wintitle', value='Spectra', format='str')
-#opObj11.addParameter(name='xmin', value=-0.01, format='float')
-#opObj11.addParameter(name='xmax', value=0.01, format='float')
-#opObj11.addParameter(name='zmin', value=dBmin, format='int')
-#opObj11.addParameter(name='zmax', value=dBmax, format='int')
-#opObj11.addParameter(name='ymin', value=ymin, format='int')
-#opObj11.addParameter(name='ymax', value=ymax, format='int')
+# opObj11.addParameter(name='xmin', value=-0.01, format='float')
+# opObj11.addParameter(name='xmax', value=0.01, format='float')
+# opObj11.addParameter(name='zmin', value=dBmin, format='int')
+# opObj11.addParameter(name='zmax', value=dBmax, format='int')
+# opObj11.addParameter(name='ymin', value=ymin, format='int')
+# opObj11.addParameter(name='ymax', value=ymax, format='int')
opObj11.addParameter(name='showprofile', value='1', format='int')
opObj11.addParameter(name='save', value=figpath, format='str')
opObj11.addParameter(name='save_period', value=10, format='int')
-#RTIPLOT
+# RTIPLOT
opObj11 = procUnitConfObjSousySpectra.addOperation(name='RTIPlot', optype='external')
opObj11.addParameter(name='id', value='2', format='int')
opObj11.addParameter(name='wintitle', value='RTIPlot', format='str')
-#opObj11.addParameter(name='zmin', value=dBmin, format='int')
-#opObj11.addParameter(name='zmax', value=dBmax, format='int')
-#opObj11.addParameter(name='ymin', value=ymin, format='int')
-#opObj11.addParameter(name='ymax', value=ymax, format='int')
+# opObj11.addParameter(name='zmin', value=dBmin, format='int')
+# opObj11.addParameter(name='zmax', value=dBmax, format='int')
+# opObj11.addParameter(name='ymin', value=ymin, format='int')
+# opObj11.addParameter(name='ymax', value=ymax, format='int')
opObj11.addParameter(name='xmin', value=0, format='int')
opObj11.addParameter(name='xmax', value=23, format='int')
@@ -171,9 +171,9 @@ opObj11.addParameter(name='save_period', value=10, format='int')
#######################################################################
############### UNIDAD DE ESCRITURA ###################################
#######################################################################
-#opObj11 = procUnitConfObjSousySpectra.addOperation(name='SpectraWriter', optype='other')
-#opObj11.addParameter(name='path', value=wr_path)
-#opObj11.addParameter(name='blocksPerFile', value='50', format='int')
+# opObj11 = procUnitConfObjSousySpectra.addOperation(name='SpectraWriter', optype='other')
+# opObj11.addParameter(name='path', value=wr_path)
+# opObj11.addParameter(name='blocksPerFile', value='50', format='int')
print ("Escribiendo el archivo XML")
print ("Leyendo el archivo XML")
diff --git a/schainpy/scripts/julia_mp.py b/schainpy/scripts/julia_mp.py
index 2a8eaf1..bafb67d 100644
--- a/schainpy/scripts/julia_mp.py
+++ b/schainpy/scripts/julia_mp.py
@@ -17,13 +17,13 @@ def fiber(cursor, skip, q, dt):
startTime="00:00:00",
endTime="23:59:59",
online=0,
- #set=1426485881,
+ # set=1426485881,
delay=10,
walk=1,
queue=q,
cursor=cursor,
skip=skip,
- #timezone=-5*3600
+ # timezone=-5*3600
)
# #opObj11 = readUnitConfObj.addOperation(name='printNumberOfBlock')
diff --git a/schainpy/scripts/optimun_offset.py b/schainpy/scripts/optimun_offset.py
index fa6e660..25a4526 100644
--- a/schainpy/scripts/optimun_offset.py
+++ b/schainpy/scripts/optimun_offset.py
@@ -12,34 +12,34 @@ def findFiles(path):
fileList = []
for thisPath in os.listdir(path):
- dirList.append(os.path.join(path,thisPath))
+ dirList.append(os.path.join(path, thisPath))
dirList.sort()
for thisDirectory in dirList:
files = glob.glob1(thisDirectory, "*.hdf5")
files.sort()
for thisFile in files:
- fileList.append(os.path.join(thisDirectory,thisFile))
+ fileList.append(os.path.join(thisDirectory, thisFile))
return fileList
def readFiles(fileList):
- meteors_array = numpy.zeros((1,4))
+ meteors_array = numpy.zeros((1, 4))
for thisFile in fileList:
- #Leer
- f1 = h5py.File(thisFile,'r')
+ # Leer
+ f1 = h5py.File(thisFile, 'r')
grp1 = f1['Data']
grp2 = grp1['data_output']
meteors1 = grp2['table0'][:]
- meteors_array = numpy.vstack((meteors_array,meteors1))
- #cerrar
+ meteors_array = numpy.vstack((meteors_array, meteors1))
+ # cerrar
f1.close()
meteors_array = numpy.delete(meteors_array, 0, axis=0)
- meteors_list = [meteors_array[:,0],meteors_array[:,1],meteors_array[:,2],meteors_array[:,3]]
+ meteors_list = [meteors_array[:, 0], meteors_array[:, 1], meteors_array[:, 2], meteors_array[:, 3]]
return meteors_list
def estimateMean(offset_list):
@@ -49,16 +49,16 @@ def estimateMean(offset_list):
axisX_off = []
for thisOffset in offset_list:
- mean_aux = numpy.mean(thisOffset, axis = 0)
+ mean_aux = numpy.mean(thisOffset, axis=0)
mean_off.append(mean_aux)
- axisX_off.append(numpy.array([0,numpy.size(thisOffset)]))
- axisY_off.append(numpy.array([mean_aux,mean_aux]))
+ axisX_off.append(numpy.array([0, numpy.size(thisOffset)]))
+ axisY_off.append(numpy.array([mean_aux, mean_aux]))
return mean_off, axisY_off, axisX_off
def plotPhases(offset0, axisY0, axisX0, title):
f, axarr = plt.subplots(4, sharey=True)
- color = ['b','g','r','c']
+ color = ['b', 'g', 'r', 'c']
# plt.grid()
for i in range(len(offset0)):
thisMeteor = offset0[i]
@@ -67,11 +67,11 @@ def plotPhases(offset0, axisY0, axisX0, title):
thisColor = color[i]
opt = thisColor + 'o'
- axarr[i].plot(thisMeteor,opt)
+ axarr[i].plot(thisMeteor, opt)
axarr[i].plot(thisX, thisY, thisColor)
axarr[i].set_ylabel('Offset ' + str(i))
- plt.ylim((-180,180))
+ plt.ylim((-180, 180))
axarr[0].set_title(title + ' Offsets')
axarr[3].set_xlabel('Number of estimations')
@@ -81,11 +81,11 @@ def filterOffsets(offsets0, stdvLimit):
offsets1 = []
for thisOffset in offsets0:
- pstd = numpy.std(thisOffset)*stdvLimit
+ pstd = numpy.std(thisOffset) * stdvLimit
pmean = numpy.mean(thisOffset)
outlier1 = thisOffset > pmean - pstd
outlier2 = thisOffset < pmean + pstd
- not_outlier = numpy.logical_and(outlier1,outlier2)
+ not_outlier = numpy.logical_and(outlier1, outlier2)
thisOffset1 = thisOffset[not_outlier]
offsets1.append(thisOffset1)
@@ -95,7 +95,7 @@ def filterOffsets(offsets0, stdvLimit):
<<<<<<< HEAD
path = '/home/nanosat/Pictures/JASMET30_mp/201608/phase'
-=======
+== == == =
path = '/home/jespinoza/Pictures/JASMET30/201608/phase'
>>>>>>> master
stdvLimit = 0.5
@@ -111,7 +111,7 @@ offsets1 = filterOffsets(offsets0, stdvLimit)
mean1, axisY1, axisX1 = estimateMean(offsets1)
plotPhases(offsets1, axisY1, axisX1, 'Filtered')
-print "Original Offsets: %.2f, %.2f, %.2f, %.2f" % (mean0[0],mean0[1],mean0[2],mean0[3])
-print "Filtered Offsets: %.2f, %.2f, %.2f, %.2f" % (mean1[0],mean1[1],mean1[2],mean1[3])
+print "Original Offsets: %.2f, %.2f, %.2f, %.2f" % (mean0[0], mean0[1], mean0[2], mean0[3])
+print "Filtered Offsets: %.2f, %.2f, %.2f, %.2f" % (mean1[0], mean1[1], mean1[2], mean1[3])
plt.show()
diff --git a/schainpy/scripts/pedestal_client.py b/schainpy/scripts/pedestal_client.py
index ec88d9e..ad2937b 100644
--- a/schainpy/scripts/pedestal_client.py
+++ b/schainpy/scripts/pedestal_client.py
@@ -5,83 +5,83 @@ import time
import h5py
import os
-path="/home/alex/Downloads/pedestal"
-ext=".hdf5"
+path = "/home/alex/Downloads/pedestal"
+ext = ".hdf5"
-port ="5556"
-if len(sys.argv)>1:
+port = "5556"
+if len(sys.argv) > 1:
port = sys.argv[1]
int(port)
-if len(sys.argv)>2:
+if len(sys.argv) > 2:
port1 = sys.argv[2]
int(port1)
-#Socket to talk to server
+# Socket to talk to server
context = zmq.Context()
-socket = context.socket(zmq.SUB)
+socket = context.socket(zmq.SUB)
print("Collecting updates from weather server...")
-socket.connect("tcp://localhost:%s"%port)
+socket.connect("tcp://localhost:%s" % port)
-if len(sys.argv)>2:
- socket.connect("tcp://localhost:%s"%port1)
+if len(sys.argv) > 2:
+ socket.connect("tcp://localhost:%s" % port1)
-#Subscribe to zipcode, default is NYC,10001
+# Subscribe to zipcode, default is NYC,10001
topicfilter = "10001"
-socket.setsockopt_string(zmq.SUBSCRIBE,topicfilter)
-#Process 5 updates
-total_value=0
-count= -1
-azi= []
-elev=[]
-time0=[]
-#for update_nbr in range(250):
+socket.setsockopt_string(zmq.SUBSCRIBE, topicfilter)
+# Process 5 updates
+total_value = 0
+count = -1
+azi = []
+elev = []
+time0 = []
+# for update_nbr in range(250):
while(True):
- string= socket.recv()
- topic,ang_elev,ang_elev_dec,ang_azi,ang_azi_dec,seconds,seconds_dec= string.split()
- ang_azi =float(ang_azi)+1e-3*float(ang_azi_dec)
- ang_elev =float(ang_elev)+1e-3*float(ang_elev_dec)
- seconds =float(seconds) +1e-6*float(seconds_dec)
+ string = socket.recv()
+ topic, ang_elev, ang_elev_dec, ang_azi, ang_azi_dec, seconds, seconds_dec = string.split()
+ ang_azi = float(ang_azi) + 1e-3 * float(ang_azi_dec)
+ ang_elev = float(ang_elev) + 1e-3 * float(ang_elev_dec)
+ seconds = float(seconds) + 1e-6 * float(seconds_dec)
azi.append(ang_azi)
elev.append(ang_elev)
time0.append(seconds)
- count +=1
+ count += 1
if count == 100:
- timetuple=time.localtime()
+ timetuple = time.localtime()
epoc = time.mktime(timetuple)
- #print(epoc)
- fullpath = path + ("/" if path[-1]!="/" else "")
+ # print(epoc)
+ fullpath = path + ("/" if path[-1] != "/" else "")
if not os.path.exists(fullpath):
os.mkdir(fullpath)
- azi_array = numpy.array(azi)
+ azi_array = numpy.array(azi)
elev_array = numpy.array(elev)
- time0_array= numpy.array(time0)
- pedestal_array=numpy.array([azi,elev,time0])
- count=0
- azi= []
- elev=[]
- time0=[]
- #print(pedestal_array[0])
- #print(pedestal_array[1])
+ time0_array = numpy.array(time0)
+ pedestal_array = numpy.array([azi, elev, time0])
+ count = 0
+ azi = []
+ elev = []
+ time0 = []
+ # print(pedestal_array[0])
+ # print(pedestal_array[1])
- meta='PE'
- filex="%s%4.4d%3.3d%10.4d%s"%(meta,timetuple.tm_year,timetuple.tm_yday,epoc,ext)
- filename = os.path.join(fullpath,filex)
- fp = h5py.File(filename,'w')
- #print("Escribiendo HDF5...",epoc)
- #·················· Data·....······································
+ meta = 'PE'
+ filex = "%s%4.4d%3.3d%10.4d%s" % (meta, timetuple.tm_year, timetuple.tm_yday, epoc, ext)
+ filename = os.path.join(fullpath, filex)
+ fp = h5py.File(filename, 'w')
+ # print("Escribiendo HDF5...",epoc)
+ # ·················· Data·....······································
grp = fp.create_group("Data")
dset = grp.create_dataset("azimuth" , data=pedestal_array[0])
dset = grp.create_dataset("elevacion", data=pedestal_array[1])
dset = grp.create_dataset("utc" , data=pedestal_array[2])
- #·················· Metadata·······································
+ # ·················· Metadata·······································
grp = fp.create_group("Metadata")
dset = grp.create_dataset("utctimeInit", data=pedestal_array[2][0])
- timeInterval = pedestal_array[2][1]-pedestal_array[2][0]
+ timeInterval = pedestal_array[2][1] - pedestal_array[2][0]
dset = grp.create_dataset("timeInterval", data=timeInterval)
fp.close()
-#print ("Average messagedata value for topic '%s' was %dF" % ( topicfilter,total_value / update_nbr))
+# print ("Average messagedata value for topic '%s' was %dF" % ( topicfilter,total_value / update_nbr))
diff --git a/schainpy/scripts/pedestal_server.py b/schainpy/scripts/pedestal_server.py
index 446a76a..db264e9 100644
--- a/schainpy/scripts/pedestal_server.py
+++ b/schainpy/scripts/pedestal_server.py
@@ -9,40 +9,40 @@ import struct
from time import sleep
import zmq
import pickle
-port="5556"
+port = "5556"
context = zmq.Context()
socket = context.socket(zmq.PUB)
-socket.bind("tcp://*:%s"%port)
+socket.bind("tcp://*:%s" % port)
###### PARAMETROS DE ENTRADA################################
print("PEDESTAL RESOLUCION 0.01")
print("MAXIMA VELOCIDAD DEL PEDESTAL")
ang_elev = 4.12
-ang_azi = 30
-velocidad= input ("Ingresa velocidad:")
-velocidad= float(velocidad)
+ang_azi = 30
+velocidad = input ("Ingresa velocidad:")
+velocidad = float(velocidad)
print (velocidad)
############################################################
sleep(3)
print("Start program")
t1 = time.time()
-count=0
+count = 0
while(True):
- tmp_vuelta = int(360/velocidad)
- t1=t1+tmp_vuelta*count
- count= count+1
+ tmp_vuelta = int(360 / velocidad)
+ t1 = t1 + tmp_vuelta * count
+ count = count + 1
muestras_seg = 100
t2 = time.time()
for i in range(tmp_vuelta):
for j in range(muestras_seg):
- tmp_variable = (i+j/100.0)
- ang_azi = (tmp_variable)*float(velocidad)
- seconds = t1+ tmp_variable
- topic=10001
- print ("Azim°: ","%.4f"%ang_azi,"Time:" ,"%.5f"%seconds)
- seconds_dec=(seconds-int(seconds))*1e6
- ang_azi_dec= (ang_azi-int(ang_azi))*1e3
- ang_elev_dec=(ang_elev-int(ang_elev))*1e3
+ tmp_variable = (i + j / 100.0)
+ ang_azi = (tmp_variable) * float(velocidad)
+ seconds = t1 + tmp_variable
+ topic = 10001
+ print ("Azim°: ", "%.4f" % ang_azi, "Time:" , "%.5f" % seconds)
+ seconds_dec = (seconds - int(seconds)) * 1e6
+ ang_azi_dec = (ang_azi - int(ang_azi)) * 1e3
+ ang_elev_dec = (ang_elev - int(ang_elev)) * 1e3
sleep(0.0088)
- socket.send_string("%d %d %d %d %d %d %d"%(topic,ang_elev,ang_elev_dec,ang_azi,ang_azi_dec,seconds,seconds_dec))
+ socket.send_string("%d %d %d %d %d %d %d" % (topic, ang_elev, ang_elev_dec, ang_azi, ang_azi_dec, seconds, seconds_dec))
t3 = time.time()
- print ("Total time for 1 vuelta in Seconds",t3-t2)
+ print ("Total time for 1 vuelta in Seconds", t3 - t2)
diff --git a/schainpy/scripts/test_001.py b/schainpy/scripts/test_001.py
index a77c4dd..c792559 100644
--- a/schainpy/scripts/test_001.py
+++ b/schainpy/scripts/test_001.py
@@ -6,37 +6,37 @@ import os, sys
import datetime
import time
-#path = os.path.dirname(os.getcwd())
-#path = os.path.dirname(path)
-#sys.path.insert(0, path)
+# path = os.path.dirname(os.getcwd())
+# path = os.path.dirname(path)
+# sys.path.insert(0, path)
from schainpy.controller import Project
desc = "USRP_test"
filename = "USRP_processing.xml"
controllerObj = Project()
-controllerObj.setup(id = '191', name='Test_USRP', description=desc)
+controllerObj.setup(id='191', name='Test_USRP', description=desc)
############## USED TO PLOT IQ VOLTAGE, POWER AND SPECTRA #############
#######################################################################
######PATH DE LECTURA, ESCRITURA, GRAFICOS Y ENVIO WEB#################
#######################################################################
-#path = '/media/data/data/vientos/57.2063km/echoes/NCO_Woodman'
+# path = '/media/data/data/vientos/57.2063km/echoes/NCO_Woodman'
-#path = '/home/soporte/data_hdf5' #### with clock 35.16 db noise
-path = '/home/alex/Downloads'
+# path = '/home/soporte/data_hdf5' #### with clock 35.16 db noise
+path = '/home/alex/Downloads'
figpath = '/home/alex/Downloads'
-#figpath = '/home/soporte/data_hdf5_imag'
-#remotefolder = "/home/wmaster/graficos"
+# figpath = '/home/soporte/data_hdf5_imag'
+# remotefolder = "/home/wmaster/graficos"
#######################################################################
################# RANGO DE PLOTEO######################################
#######################################################################
dBmin = '30'
dBmax = '60'
xmin = '0'
-xmax ='24'
+xmax = '24'
ymin = '0'
ymax = '600'
#######################################################################
@@ -51,17 +51,17 @@ yesterday = str2.strftime("%Y/%m/%d")
#######################################################################
readUnitConfObj = controllerObj.addReadUnit(datatype='VoltageReader',
path=path,
- startDate="2020/01/01", #"2020/01/01",#today,
- endDate= "2020/12/01", #"2020/12/30",#today,
+ startDate="2020/01/01", # "2020/01/01",#today,
+ endDate="2020/12/01", # "2020/12/30",#today,
startTime='00:00:00',
endTime='23:59:59',
delay=0,
- #set=0,
+ # set=0,
online=0,
walk=1)
opObj11 = readUnitConfObj.addOperation(name='printInfo')
-#opObj11 = readUnitConfObj.addOperation(name='printNumberOfBlock')
+# opObj11 = readUnitConfObj.addOperation(name='printNumberOfBlock')
#######################################################################
################ OPERACIONES DOMINIO DEL TIEMPO########################
#######################################################################
@@ -71,31 +71,31 @@ procUnitConfObjA = controllerObj.addProcUnit(datatype='VoltageProc', inputId=rea
# codigo64='1,1,1,0,1,1,0,1,1,1,1,0,0,0,1,0,1,1,1,0,1,1,0,1,0,0,0,1,1,1,0,1,1,1,1,0,1,1,0,1,1,1,1,0,0,0,1,0,0,0,0,1,0,0,1,0,1,1,1,0,0,0,1,0,'+\
# '1,1,1,0,1,1,0,1,1,1,1,0,0,0,1,0,1,1,1,0,1,1,0,1,0,0,0,1,1,1,0,1,0,0,0,1,0,0,1,0,0,0,0,1,1,1,0,1,1,1,1,0,1,1,0,1,0,0,0,1,1,1,0,1'
-#opObj11 = procUnitConfObjA.addOperation(name='setRadarFrequency')
-#opObj11.addParameter(name='frequency', value='30e6', format='float')
+# opObj11 = procUnitConfObjA.addOperation(name='setRadarFrequency')
+# opObj11.addParameter(name='frequency', value='30e6', format='float')
-#opObj10 = procUnitConfObjA.addOperation(name='Scope', optype='external')
-#opObj10.addParameter(name='id', value='10', format='int')
-##opObj10.addParameter(name='xmin', value='0', format='int')
-##opObj10.addParameter(name='xmax', value='50', format='int')
-#opObj10.addParameter(name='type', value='iq')
-#opObj10.addParameter(name='ymin', value='-5000', format='int')
-##opObj10.addParameter(name='ymax', value='8500', format='int')
+# opObj10 = procUnitConfObjA.addOperation(name='Scope', optype='external')
+# opObj10.addParameter(name='id', value='10', format='int')
+# #opObj10.addParameter(name='xmin', value='0', format='int')
+# #opObj10.addParameter(name='xmax', value='50', format='int')
+# opObj10.addParameter(name='type', value='iq')
+# opObj10.addParameter(name='ymin', value='-5000', format='int')
+# #opObj10.addParameter(name='ymax', value='8500', format='int')
-#opObj10 = procUnitConfObjA.addOperation(name='setH0')
-#opObj10.addParameter(name='h0', value='-5000', format='float')
+# opObj10 = procUnitConfObjA.addOperation(name='setH0')
+# opObj10.addParameter(name='h0', value='-5000', format='float')
-#opObj11 = procUnitConfObjA.addOperation(name='filterByHeights')
-#opObj11.addParameter(name='window', value='1', format='int')
+# opObj11 = procUnitConfObjA.addOperation(name='filterByHeights')
+# opObj11.addParameter(name='window', value='1', format='int')
-#codigo='1,1,-1,1,1,-1,1,-1,-1,1,-1,-1,-1,1,-1,-1,-1,1,-1,-1,-1,1,1,1,1,-1,-1,-1'
-#opObj11 = procUnitConfObjSousy.addOperation(name='Decoder', optype='other')
-#opObj11.addParameter(name='code', value=codigo, format='floatlist')
-#opObj11.addParameter(name='nCode', value='1', format='int')
-#opObj11.addParameter(name='nBaud', value='28', format='int')
+# codigo='1,1,-1,1,1,-1,1,-1,-1,1,-1,-1,-1,1,-1,-1,-1,1,-1,-1,-1,1,1,1,1,-1,-1,-1'
+# opObj11 = procUnitConfObjSousy.addOperation(name='Decoder', optype='other')
+# opObj11.addParameter(name='code', value=codigo, format='floatlist')
+# opObj11.addParameter(name='nCode', value='1', format='int')
+# opObj11.addParameter(name='nBaud', value='28', format='int')
-#opObj11 = procUnitConfObjA.addOperation(name='CohInt', optype='other')
-#opObj11.addParameter(name='n', value='100', format='int')
+# opObj11 = procUnitConfObjA.addOperation(name='CohInt', optype='other')
+# opObj11.addParameter(name='n', value='100', format='int')
#######################################################################
########## OPERACIONES DOMINIO DE LA FRECUENCIA########################
@@ -103,47 +103,47 @@ procUnitConfObjA = controllerObj.addProcUnit(datatype='VoltageProc', inputId=rea
procUnitConfObjB = controllerObj.addProcUnit(datatype='SpectraProc', inputId=procUnitConfObjA.getId())
procUnitConfObjB.addParameter(name='nFFTPoints', value='100', format='int')
procUnitConfObjB.addParameter(name='nProfiles', value='100', format='int')
-#procUnitConfObjSousySpectra.addParameter(name='pairsList', value='(0,0),(1,1),(0,1)', format='pairsList')
+# procUnitConfObjSousySpectra.addParameter(name='pairsList', value='(0,0),(1,1),(0,1)', format='pairsList')
-#opObj13 = procUnitConfObjSousySpectra.addOperation(name='removeDC')
-#opObj13.addParameter(name='mode', value='2', format='int')
+# opObj13 = procUnitConfObjSousySpectra.addOperation(name='removeDC')
+# opObj13.addParameter(name='mode', value='2', format='int')
-#opObj11 = procUnitConfObjSousySpectra.addOperation(name='IncohInt', optype='other')
-#opObj11.addParameter(name='n', value='60', format='float')
+# opObj11 = procUnitConfObjSousySpectra.addOperation(name='IncohInt', optype='other')
+# opObj11.addParameter(name='n', value='60', format='float')
#######################################################################
########## PLOTEO DOMINIO DE LA FRECUENCIA#############################
#######################################################################
-#SpectraPlot
+# SpectraPlot
-##opObj11 = procUnitConfObjB.addOperation(name='SpectraPlot', optype='external')
-##opObj11.addParameter(name='id', value='1', format='int')
-##opObj11.addParameter(name='wintitle', value='Spectra', format='str')
-#opObj11.addParameter(name='xmin', value=-0.01, format='float')
-#opObj11.addParameter(name='xmax', value=0.01, format='float')
-#opObj11.addParameter(name='zmin', value=dBmin, format='int')
-#opObj11.addParameter(name='zmax', value=dBmax, format='int')
-#opObj11.addParameter(name='ymin', value=ymin, format='int')
-#opObj11.addParameter(name='ymax', value=ymax, format='int')
-##opObj11.addParameter(name='showprofile', value='1', format='int')
-##opObj11.addParameter(name='save', value=figpath, format='str')
-##opObj11.addParameter(name='save_period', value=10, format='int')
+# #opObj11 = procUnitConfObjB.addOperation(name='SpectraPlot', optype='external')
+# #opObj11.addParameter(name='id', value='1', format='int')
+# #opObj11.addParameter(name='wintitle', value='Spectra', format='str')
+# opObj11.addParameter(name='xmin', value=-0.01, format='float')
+# opObj11.addParameter(name='xmax', value=0.01, format='float')
+# opObj11.addParameter(name='zmin', value=dBmin, format='int')
+# opObj11.addParameter(name='zmax', value=dBmax, format='int')
+# opObj11.addParameter(name='ymin', value=ymin, format='int')
+# opObj11.addParameter(name='ymax', value=ymax, format='int')
+# #opObj11.addParameter(name='showprofile', value='1', format='int')
+# #opObj11.addParameter(name='save', value=figpath, format='str')
+# #opObj11.addParameter(name='save_period', value=10, format='int')
-#RTIPLOT
+# RTIPLOT
-##opObj11 = procUnitConfObjB.addOperation(name='RTIPlot', optype='external')
-##opObj11.addParameter(name='id', value='2', format='int')
-##opObj11.addParameter(name='wintitle', value='RTIPlot', format='str')
-#opObj11.addParameter(name='zmin', value=dBmin, format='int')
-#opObj11.addParameter(name='zmax', value=dBmax, format='int')
-#opObj11.addParameter(name='ymin', value=ymin, format='int')
-#opObj11.addParameter(name='ymax', value=ymax, format='int')
-##opObj11.addParameter(name='xmin', value=0, format='int')
-##opObj11.addParameter(name='xmax', value=23, format='int')
+# #opObj11 = procUnitConfObjB.addOperation(name='RTIPlot', optype='external')
+# #opObj11.addParameter(name='id', value='2', format='int')
+# #opObj11.addParameter(name='wintitle', value='RTIPlot', format='str')
+# opObj11.addParameter(name='zmin', value=dBmin, format='int')
+# opObj11.addParameter(name='zmax', value=dBmax, format='int')
+# opObj11.addParameter(name='ymin', value=ymin, format='int')
+# opObj11.addParameter(name='ymax', value=ymax, format='int')
+# #opObj11.addParameter(name='xmin', value=0, format='int')
+# #opObj11.addParameter(name='xmax', value=23, format='int')
-##opObj11.addParameter(name='showprofile', value='1', format='int')
-##opObj11.addParameter(name='save', value=figpath, format='str')
-##opObj11.addParameter(name='save_period', value=10, format='int')
+# #opObj11.addParameter(name='showprofile', value='1', format='int')
+# #opObj11.addParameter(name='save', value=figpath, format='str')
+# #opObj11.addParameter(name='save_period', value=10, format='int')
# opObj11 = procUnitConfObjSousySpectra.addOperation(name='CrossSpectraPlot', optype='other')
@@ -171,9 +171,9 @@ procUnitConfObjB.addParameter(name='nProfiles', value='100', format='int')
#######################################################################
############### UNIDAD DE ESCRITURA ###################################
#######################################################################
-#opObj11 = procUnitConfObjSousySpectra.addOperation(name='SpectraWriter', optype='other')
-#opObj11.addParameter(name='path', value=wr_path)
-#opObj11.addParameter(name='blocksPerFile', value='50', format='int')
+# opObj11 = procUnitConfObjSousySpectra.addOperation(name='SpectraWriter', optype='other')
+# opObj11.addParameter(name='path', value=wr_path)
+# opObj11.addParameter(name='blocksPerFile', value='50', format='int')
procUnitConfObjC = controllerObj.addProcUnit(datatype='ParametersProc', inputId=procUnitConfObjB.getId())
procUnitConfObjC.addOperation(name='SpectralMoments')
diff --git a/schainpy/scripts/test_002.py b/schainpy/scripts/test_002.py
index f3370d8..6f1defb 100644
--- a/schainpy/scripts/test_002.py
+++ b/schainpy/scripts/test_002.py
@@ -6,39 +6,39 @@ import os, sys
import datetime
import time
-#path = os.path.dirname(os.getcwd())
-#path = os.path.dirname(path)
-#sys.path.insert(0, path)
+# path = os.path.dirname(os.getcwd())
+# path = os.path.dirname(path)
+# sys.path.insert(0, path)
from schainpy.controller import Project
desc = "USRP_test"
filename = "USRP_processing.xml"
controllerObj = Project()
-controllerObj.setup(id = '191', name='Test_USRP', description=desc)
+controllerObj.setup(id='191', name='Test_USRP', description=desc)
############## USED TO PLOT IQ VOLTAGE, POWER AND SPECTRA #############
#######################################################################
######PATH DE LECTURA, ESCRITURA, GRAFICOS Y ENVIO WEB#################
#######################################################################
-#path = '/media/data/data/vientos/57.2063km/echoes/NCO_Woodman'
+# path = '/media/data/data/vientos/57.2063km/echoes/NCO_Woodman'
-#path = '/home/soporte/data_hdf5' #### with clock 35.16 db noise
-path = '/home/alex/Downloads'
+# path = '/home/soporte/data_hdf5' #### with clock 35.16 db noise
+path = '/home/alex/Downloads'
figpath = '/home/alex/Downloads'
-pathfile = '/home/alex/Downloads/test_rawdata2'
+pathfile = '/home/alex/Downloads/test_rawdata2'
-#figpath = '/home/soporte/data_hdf5_imag'
-#remotefolder = "/home/wmaster/graficos"
+# figpath = '/home/soporte/data_hdf5_imag'
+# remotefolder = "/home/wmaster/graficos"
#######################################################################
################# RANGO DE PLOTEO######################################
#######################################################################
dBmin = '30'
dBmax = '60'
xmin = '0'
-xmax ='24'
+xmax = '24'
ymin = '0'
ymax = '600'
#######################################################################
@@ -53,17 +53,17 @@ yesterday = str2.strftime("%Y/%m/%d")
#######################################################################
readUnitConfObj = controllerObj.addReadUnit(datatype='VoltageReader',
path=path,
- startDate="2020/01/01", #"2020/01/01",#today,
- endDate= "2020/12/01", #"2020/12/30",#today,
+ startDate="2020/01/01", # "2020/01/01",#today,
+ endDate="2020/12/01", # "2020/12/30",#today,
startTime='00:00:00',
endTime='23:59:59',
delay=0,
- #set=0,
+ # set=0,
online=0,
walk=1)
opObj11 = readUnitConfObj.addOperation(name='printInfo')
-#opObj11 = readUnitConfObj.addOperation(name='printNumberOfBlock')
+# opObj11 = readUnitConfObj.addOperation(name='printNumberOfBlock')
#######################################################################
################ OPERACIONES DOMINIO DEL TIEMPO########################
#######################################################################
diff --git a/schainpy/scripts/test_sim0001.py b/schainpy/scripts/test_sim0001.py
index fe8e982..79f80cf 100644
--- a/schainpy/scripts/test_sim0001.py
+++ b/schainpy/scripts/test_sim0001.py
@@ -1,19 +1,19 @@
-import os,sys
+import os, sys
import datetime
import time
from schainpy.controller import Project
-path = '/home/alex/Downloads/NEW_WR2'
+path = '/home/alex/Downloads/NEW_WR2'
figpath = path
-desc = "Simulator Test"
+desc = "Simulator Test"
-controllerObj = Project()
+controllerObj = Project()
-controllerObj.setup(id='10',name='Test Simulator',description=desc)
+controllerObj.setup(id='10', name='Test Simulator', description=desc)
readUnitConfObj = controllerObj.addReadUnit(datatype='SimulatorReader',
frequency=9.345e9,
- FixRCP_IPP= 60,
- Tau_0 = 30,
+ FixRCP_IPP=60,
+ Tau_0=30,
AcqH0_0=0,
samples=330,
AcqDH_0=0.15,
@@ -26,13 +26,13 @@ readUnitConfObj = controllerObj.addReadUnit(datatype='SimulatorReader',
online=0,
walk=0)
-#opObj11 = readUnitConfObj.addOperation(name='printInfo')
+# opObj11 = readUnitConfObj.addOperation(name='printInfo')
procUnitConfObjA = controllerObj.addProcUnit(datatype='VoltageProc', inputId=readUnitConfObj.getId())
-#opObj10 = procUnitConfObjA.addOperation(name='selectChannels')
-#opObj10.addParameter(name='channelList', value=[0,1])
-#opObj10.addParameter(name='channelList', value='0',format='intlist')
+# opObj10 = procUnitConfObjA.addOperation(name='selectChannels')
+# opObj10.addParameter(name='channelList', value=[0,1])
+# opObj10.addParameter(name='channelList', value='0',format='intlist')
opObj11 = procUnitConfObjA.addOperation(name='PulsePairVoltage', optype='other')
opObj11.addParameter(name='n', value='32', format='int')
@@ -45,10 +45,10 @@ opObj10.addParameter(name='wintitle', value=type )
opObj10.addParameter(name='type', value=type)
'''
-type="WeatherPower"
+type = "WeatherPower"
opObj10 = procUnitConfObjA.addOperation(name='PulsepairPowerPlot', optype='external')
-#opObj10.addParameter(name='id', value='12')
-opObj10.addParameter(name='wintitle', value=type )
+# opObj10.addParameter(name='id', value='12')
+opObj10.addParameter(name='wintitle', value=type)
'''
type="WeatherVeloity"
diff --git a/schainpy/scripts/test_sim00010.py b/schainpy/scripts/test_sim00010.py
index 8710965..4b2e4d8 100644
--- a/schainpy/scripts/test_sim00010.py
+++ b/schainpy/scripts/test_sim00010.py
@@ -6,11 +6,11 @@ from schainpy.controller import Project
desc = "USRP_test"
filename = "USRP_processing.xml"
controllerObj = Project()
-controllerObj.setup(id = '191', name='Test_USRP', description=desc)
+controllerObj.setup(id='191', name='Test_USRP', description=desc)
############## USED TO PLOT IQ VOLTAGE, POWER AND SPECTRA #############
######PATH DE LECTURA, ESCRITURA, GRAFICOS Y ENVIO WEB#################
-path = '/home/alex/Downloads/test_rawdata'
+path = '/home/alex/Downloads/test_rawdata'
figpath = '/home/alex/Downloads/hdf5_test'
######################## UNIDAD DE LECTURA#############################
'''
@@ -28,8 +28,8 @@ readUnitConfObj = controllerObj.addReadUnit(datatype='VoltageReader',
'''
readUnitConfObj = controllerObj.addReadUnit(datatype='SimulatorReader',
frequency=9.345e9,
- FixRCP_IPP= 60,
- Tau_0 = 30,
+ FixRCP_IPP=60,
+ Tau_0=30,
AcqH0_0=0,
samples=330,
AcqDH_0=0.15,
@@ -37,16 +37,16 @@ readUnitConfObj = controllerObj.addReadUnit(datatype='SimulatorReader',
FixRCP_TXB=0.15,
Fdoppler=600.0,
Hdoppler=36,
- Adoppler=300,#300
+ Adoppler=300, # 300
delay=0,
online=0,
walk=0,
profilesPerBlock=625,
dataBlocksPerFile=100)
- #nTotalReadFiles=2)
+ # nTotalReadFiles=2)
-#opObj11 = readUnitConfObj.addOperation(name='printInfo')
+# opObj11 = readUnitConfObj.addOperation(name='printInfo')
procUnitConfObjA = controllerObj.addProcUnit(datatype='VoltageProc', inputId=readUnitConfObj.getId())
@@ -56,12 +56,12 @@ procUnitConfObjB.addParameter(name='nProfiles', value=625, format='int')
opObj11 = procUnitConfObjB.addOperation(name='removeDC')
opObj11.addParameter(name='mode', value=2)
-#opObj11 = procUnitConfObjB.addOperation(name='SpectraPlot')
-#opObj11 = procUnitConfObjB.addOperation(name='PowerProfilePlot')
+# opObj11 = procUnitConfObjB.addOperation(name='SpectraPlot')
+# opObj11 = procUnitConfObjB.addOperation(name='PowerProfilePlot')
-procUnitConfObjC= controllerObj.addProcUnit(datatype='ParametersProc',inputId=procUnitConfObjB.getId())
+procUnitConfObjC = controllerObj.addProcUnit(datatype='ParametersProc', inputId=procUnitConfObjB.getId())
procUnitConfObjC.addOperation(name='SpectralMoments')
-#opObj11 = procUnitConfObjC.addOperation(name='PowerPlot')
+# opObj11 = procUnitConfObjC.addOperation(name='PowerPlot')
'''
opObj11 = procUnitConfObjC.addOperation(name='SpectralMomentsPlot')
@@ -73,10 +73,10 @@ opObj11.addParameter(name='showprofile', value=1)
'''
opObj10 = procUnitConfObjC.addOperation(name='ParameterWriter')
-opObj10.addParameter(name='path',value=figpath)
-#opObj10.addParameter(name='mode',value=0)
-opObj10.addParameter(name='blocksPerFile',value='100',format='int')
-opObj10.addParameter(name='metadataList',value='utctimeInit,timeInterval',format='list')
-opObj10.addParameter(name='dataList',value='data_POW,data_DOP,data_WIDTH,data_SNR')#,format='list'
+opObj10.addParameter(name='path', value=figpath)
+# opObj10.addParameter(name='mode',value=0)
+opObj10.addParameter(name='blocksPerFile', value='100', format='int')
+opObj10.addParameter(name='metadataList', value='utctimeInit,timeInterval', format='list')
+opObj10.addParameter(name='dataList', value='data_POW,data_DOP,data_WIDTH,data_SNR') # ,format='list'
controllerObj.start()
diff --git a/schainpy/scripts/test_sim0002.py b/schainpy/scripts/test_sim0002.py
index 7feeec7..29c1cc0 100644
--- a/schainpy/scripts/test_sim0002.py
+++ b/schainpy/scripts/test_sim0002.py
@@ -1,19 +1,19 @@
-import os,sys
+import os, sys
import datetime
import time
from schainpy.controller import Project
-path = '/home/alex/Downloads/NEW_WR2/spc16removeDC'
+path = '/home/alex/Downloads/NEW_WR2/spc16removeDC'
figpath = path
-desc = "Simulator Test"
+desc = "Simulator Test"
-controllerObj = Project()
+controllerObj = Project()
-controllerObj.setup(id='10',name='Test Simulator',description=desc)
+controllerObj.setup(id='10', name='Test Simulator', description=desc)
readUnitConfObj = controllerObj.addReadUnit(datatype='SimulatorReader',
frequency=9.345e9,
- FixRCP_IPP= 60,
- Tau_0 = 30,
+ FixRCP_IPP=60,
+ Tau_0=30,
AcqH0_0=0,
samples=330,
AcqDH_0=0.15,
@@ -27,12 +27,12 @@ readUnitConfObj = controllerObj.addReadUnit(datatype='SimulatorReader',
walk=0,
nTotalReadFiles=4)
-opObj11 = readUnitConfObj.addOperation(name='printInfo')
+opObj11 = readUnitConfObj.addOperation(name='printInfo')
procUnitConfObjA = controllerObj.addProcUnit(datatype='VoltageProc', inputId=readUnitConfObj.getId())
opObj10 = procUnitConfObjA.addOperation(name='selectChannels')
-opObj10.addParameter(name='channelList', value=[0,1])
+opObj10.addParameter(name='channelList', value=[0, 1])
procUnitConfObjB = controllerObj.addProcUnit(datatype='SpectraProc', inputId=procUnitConfObjA.getId())
procUnitConfObjB.addParameter(name='nFFTPoints', value=200, format='int')
@@ -41,8 +41,8 @@ procUnitConfObjB.addParameter(name='nProfiles', value=200, format='int')
opObj11 = procUnitConfObjB.addOperation(name='removeDC')
opObj11.addParameter(name='mode', value=2)
-#opObj11 = procUnitConfObjB.addOperation(name='IncohInt', optype='other')
-#opObj11.addParameter(name='n', value='20', format='int')
+# opObj11 = procUnitConfObjB.addOperation(name='IncohInt', optype='other')
+# opObj11.addParameter(name='n', value='20', format='int')
procUnitConfObjC = controllerObj.addProcUnit(datatype='ParametersProc', inputId=procUnitConfObjB.getId())
procUnitConfObjC.addOperation(name='SpectralMoments')
diff --git a/schainpy/scripts/test_sim0003.py b/schainpy/scripts/test_sim0003.py
index 8a43732..1e9a55a 100644
--- a/schainpy/scripts/test_sim0003.py
+++ b/schainpy/scripts/test_sim0003.py
@@ -1,19 +1,19 @@
-import os,sys
+import os, sys
import datetime
import time
from schainpy.controller import Project
-path = '/home/alex/Downloads/NEW_WR2/spc16removeDC'
+path = '/home/alex/Downloads/NEW_WR2/spc16removeDC'
figpath = path
-desc = "Simulator Test"
+desc = "Simulator Test"
-controllerObj = Project()
+controllerObj = Project()
-controllerObj.setup(id='10',name='Test Simulator',description=desc)
+controllerObj.setup(id='10', name='Test Simulator', description=desc)
readUnitConfObj = controllerObj.addReadUnit(datatype='SimulatorReader',
frequency=9.345e9,
- FixRCP_IPP= 60,
- Tau_0 = 30,
+ FixRCP_IPP=60,
+ Tau_0=30,
AcqH0_0=0,
samples=330,
AcqDH_0=0.15,
@@ -27,14 +27,14 @@ readUnitConfObj = controllerObj.addReadUnit(datatype='SimulatorReader',
walk=0,
nTotalReadFiles=4)
-opObj11 = readUnitConfObj.addOperation(name='printInfo')
+opObj11 = readUnitConfObj.addOperation(name='printInfo')
procUnitConfObjA = controllerObj.addProcUnit(datatype='VoltageProc', inputId=readUnitConfObj.getId())
opObj10 = procUnitConfObjA.addOperation(name='selectChannels')
opObj10.addParameter(name='channelList', value=[0])
opObj11 = procUnitConfObjA.addOperation(name='PulsePairVoltage', optype='other')
-opObj11.addParameter(name='n', value='32', format='int')#10
-#opObj11.addParameter(name='removeDC', value=1, format='int')
+opObj11.addParameter(name='n', value='32', format='int') # 10
+# opObj11.addParameter(name='removeDC', value=1, format='int')
controllerObj.start()
diff --git a/schainpy/scripts/test_sim0004.py b/schainpy/scripts/test_sim0004.py
index 539feb9..6cf65cd 100644
--- a/schainpy/scripts/test_sim0004.py
+++ b/schainpy/scripts/test_sim0004.py
@@ -1,20 +1,20 @@
-import os,sys
+import os, sys
import datetime
import time
from schainpy.controller import Project
-path = '/home/alex/Downloads/NEW_WR2'
-pathfile = '/home/alex/Downloads/test_rawdata'
+path = '/home/alex/Downloads/NEW_WR2'
+pathfile = '/home/alex/Downloads/test_rawdata'
figpath = path
-desc = "Simulator Test"
+desc = "Simulator Test"
-controllerObj = Project()
+controllerObj = Project()
-controllerObj.setup(id='10',name='Test Simulator',description=desc)
+controllerObj.setup(id='10', name='Test Simulator', description=desc)
readUnitConfObj = controllerObj.addReadUnit(datatype='SimulatorReader',
frequency=9.345e9,
- FixRCP_IPP= 60,
- Tau_0 = 30.0,
+ FixRCP_IPP=60,
+ Tau_0=30.0,
AcqH0_0=0,
samples=330,
AcqDH_0=0.15,
@@ -27,11 +27,11 @@ readUnitConfObj = controllerObj.addReadUnit(datatype='SimulatorReader',
online=0,
walk=0,
nTotalReadFiles=3)
-#opObj11 = readUnitConfObj.addOperation(name='printInfo')
+# opObj11 = readUnitConfObj.addOperation(name='printInfo')
procUnitConfObjA = controllerObj.addProcUnit(datatype='VoltageProc', inputId=readUnitConfObj.getId())
-#opObj10 = procUnitConfObjA.addOperation(name='selectChannels')
-#opObj10.addParameter(name='channelList', value=[0,1])
-#opObj10.addParameter(name='channelList', value='0',format='intlist')
+# opObj10 = procUnitConfObjA.addOperation(name='selectChannels')
+# opObj10.addParameter(name='channelList', value=[0,1])
+# opObj10.addParameter(name='channelList', value='0',format='intlist')
opObj12 = procUnitConfObjA.addOperation(name='VoltageWriter', optype='other')
opObj12.addParameter(name='path', value=pathfile)
opObj12.addParameter(name='blocksPerFile', value='120', format='int')
diff --git a/schainpy/scripts/test_sim0005.py b/schainpy/scripts/test_sim0005.py
index fd917af..cd985ea 100644
--- a/schainpy/scripts/test_sim0005.py
+++ b/schainpy/scripts/test_sim0005.py
@@ -6,17 +6,17 @@ from schainpy.controller import Project
desc = "USRP_test"
filename = "USRP_processing.xml"
controllerObj = Project()
-controllerObj.setup(id = '191', name='Test_USRP', description=desc)
+controllerObj.setup(id='191', name='Test_USRP', description=desc)
############## USED TO PLOT IQ VOLTAGE, POWER AND SPECTRA #############
######PATH DE LECTURA, ESCRITURA, GRAFICOS Y ENVIO WEB#################
-path = '/home/alex/Downloads/test_rawdata'
+path = '/home/alex/Downloads/test_rawdata'
figpath = '/home/alex/Downloads'
################# RANGO DE PLOTEO######################################
dBmin = '30'
dBmax = '60'
xmin = '0'
-xmax ='24'
+xmax = '24'
ymin = '0'
ymax = '600'
########################FECHA##########################################
@@ -27,17 +27,17 @@ yesterday = str2.strftime("%Y/%m/%d")
######################## UNIDAD DE LECTURA#############################
readUnitConfObj = controllerObj.addReadUnit(datatype='VoltageReader',
path=path,
- startDate="2020/01/01", #"2020/01/01",#today,
- endDate= "2020/12/01", #"2020/12/30",#today,
+ startDate="2020/01/01", # "2020/01/01",#today,
+ endDate="2020/12/01", # "2020/12/30",#today,
startTime='00:00:00',
endTime='23:59:59',
delay=0,
- #set=0,
+ # set=0,
online=0,
walk=1)
opObj11 = readUnitConfObj.addOperation(name='printInfo')
-#opObj11 = readUnitConfObj.addOperation(name='printNumberOfBlock')
+# opObj11 = readUnitConfObj.addOperation(name='printNumberOfBlock')
#######################################################################
################ OPERACIONES DOMINIO DEL TIEMPO########################
#######################################################################
diff --git a/schainpy/scripts/test_sim0006.py b/schainpy/scripts/test_sim0006.py
index 1646f20..597fc0a 100644
--- a/schainpy/scripts/test_sim0006.py
+++ b/schainpy/scripts/test_sim0006.py
@@ -6,21 +6,21 @@ from schainpy.controller import Project
desc = "USRP_test"
filename = "USRP_processing.xml"
controllerObj = Project()
-controllerObj.setup(id = '191', name='Test_USRP', description=desc)
+controllerObj.setup(id='191', name='Test_USRP', description=desc)
############## USED TO PLOT IQ VOLTAGE, POWER AND SPECTRA #############
######PATH DE LECTURA, ESCRITURA, GRAFICOS Y ENVIO WEB#################
-path = '/home/alex/Downloads/test_rawdata'
+path = '/home/alex/Downloads/test_rawdata'
figpath = '/home/alex/Downloads'
######################## UNIDAD DE LECTURA#############################
readUnitConfObj = controllerObj.addReadUnit(datatype='VoltageReader',
path=path,
- startDate="2020/01/01", #"2020/01/01",#today,
- endDate= "2020/12/01", #"2020/12/30",#today,
+ startDate="2020/01/01", # "2020/01/01",#today,
+ endDate="2020/12/01", # "2020/12/30",#today,
startTime='00:00:00',
endTime='23:59:59',
delay=0,
- #set=0,
+ # set=0,
online=0,
walk=1)
@@ -28,9 +28,9 @@ opObj11 = readUnitConfObj.addOperation(name='printInfo')
procUnitConfObjA = controllerObj.addProcUnit(datatype='VoltageProc', inputId=readUnitConfObj.getId())
-#opObj10 = procUnitConfObjA.addOperation(name='selectChannels')
-#opObj10.addParameter(name='channelList', value=[0,1])
-#opObj10.addParameter(name='channelList', value='0',format='intlist')
+# opObj10 = procUnitConfObjA.addOperation(name='selectChannels')
+# opObj10.addParameter(name='channelList', value=[0,1])
+# opObj10.addParameter(name='channelList', value='0',format='intlist')
opObj11 = procUnitConfObjA.addOperation(name='PulsePairVoltage', optype='other')
opObj11.addParameter(name='n', value='16', format='int')
diff --git a/schainpy/scripts/test_sim0007.py b/schainpy/scripts/test_sim0007.py
index 92ee891..5bc3391 100644
--- a/schainpy/scripts/test_sim0007.py
+++ b/schainpy/scripts/test_sim0007.py
@@ -6,21 +6,21 @@ from schainpy.controller import Project
desc = "USRP_test"
filename = "USRP_processing.xml"
controllerObj = Project()
-controllerObj.setup(id = '191', name='Test_USRP', description=desc)
+controllerObj.setup(id='191', name='Test_USRP', description=desc)
############## USED TO PLOT IQ VOLTAGE, POWER AND SPECTRA #############
######PATH DE LECTURA, ESCRITURA, GRAFICOS Y ENVIO WEB#################
-path = '/home/alex/Downloads/test_rawdata'
+path = '/home/alex/Downloads/test_rawdata'
figpath = '/home/alex/Downloads'
######################## UNIDAD DE LECTURA#############################
readUnitConfObj = controllerObj.addReadUnit(datatype='VoltageReader',
path=path,
- startDate="2020/01/01", #"2020/01/01",#today,
- endDate= "2020/12/01", #"2020/12/30",#today,
+ startDate="2020/01/01", # "2020/01/01",#today,
+ endDate="2020/12/01", # "2020/12/30",#today,
startTime='00:00:00',
endTime='23:59:59',
delay=0,
- #set=0,
+ # set=0,
online=0,
walk=1)
@@ -40,14 +40,14 @@ procUnitConfObjB.addParameter(name='nProfiles', value=64, format='int')
opObj11 = procUnitConfObjB.addOperation(name='removeDC')
opObj11.addParameter(name='mode', value=2)
-#opObj11 = procUnitConfObjB.addOperation(name='IncohInt', optype='other')
-#opObj11.addParameter(name='n', value='20', format='int')
+# opObj11 = procUnitConfObjB.addOperation(name='IncohInt', optype='other')
+# opObj11.addParameter(name='n', value='20', format='int')
procUnitConfObjC = controllerObj.addProcUnit(datatype='ParametersProc', inputId=procUnitConfObjB.getId())
procUnitConfObjC.addOperation(name='SpectralMoments')
opObj11 = procUnitConfObjC.addOperation(name='SpectralMomentsPlot')
-#opObj11.addParameter(name='xmin', value=14)
+# opObj11.addParameter(name='xmin', value=14)
opObj11.addParameter(name='xmax', value=6)
-#opObj11.addParameter(name='save', value=figpath)
+# opObj11.addParameter(name='save', value=figpath)
opObj11.addParameter(name='showprofile', value=1)
controllerObj.start()
diff --git a/schainpy/scripts/test_sim0008.py b/schainpy/scripts/test_sim0008.py
index 5d53ccd..926c5a1 100644
--- a/schainpy/scripts/test_sim0008.py
+++ b/schainpy/scripts/test_sim0008.py
@@ -1,19 +1,19 @@
-import os,sys
+import os, sys
import datetime
import time
from schainpy.controller import Project
-path = '/home/alex/Downloads/NEW_WR2/spc16removeDC'
+path = '/home/alex/Downloads/NEW_WR2/spc16removeDC'
figpath = path
-desc = "Simulator Test"
+desc = "Simulator Test"
-controllerObj = Project()
+controllerObj = Project()
-controllerObj.setup(id='10',name='Test Simulator',description=desc)
+controllerObj.setup(id='10', name='Test Simulator', description=desc)
readUnitConfObj = controllerObj.addReadUnit(datatype='SimulatorReader',
frequency=9.345e9,
- FixRCP_IPP= 60,
- Tau_0 = 30,
+ FixRCP_IPP=60,
+ Tau_0=30,
AcqH0_0=0,
samples=330,
AcqDH_0=0.15,
@@ -27,7 +27,7 @@ readUnitConfObj = controllerObj.addReadUnit(datatype='SimulatorReader',
walk=0,
nTotalReadFiles=3)
-opObj11 = readUnitConfObj.addOperation(name='printInfo')
+opObj11 = readUnitConfObj.addOperation(name='printInfo')
procUnitConfObjA = controllerObj.addProcUnit(datatype='VoltageProc', inputId=readUnitConfObj.getId())
@@ -41,27 +41,27 @@ procUnitConfObjB.addParameter(name='nProfiles', value=300, format='int')
opObj11 = procUnitConfObjB.addOperation(name='removeDC')
opObj11.addParameter(name='mode', value=2)
-#opObj11 = procUnitConfObjB.addOperation(name='IncohInt', optype='other')
-#opObj11.addParameter(name='n', value='10', format='int')
+# opObj11 = procUnitConfObjB.addOperation(name='IncohInt', optype='other')
+# opObj11.addParameter(name='n', value='10', format='int')
-#opObj11 = procUnitConfObjB.addOperation(name='SpectraPlot')
-#opObj11 = procUnitConfObjB.addOperation(name='PowerProfilePlot')
-#opObj11.addParameter(name='xmin', value=13)
-#opObj11.addParameter(name='xmax', value=.4)
-#opObj11 = procUnitConfObjB.addOperation(name='NoisePlot')
-#opObj11.addParameter(name='xmin', value=13)
-#opObj11.addParameter(name='xmax', value=14)
+# opObj11 = procUnitConfObjB.addOperation(name='SpectraPlot')
+# opObj11 = procUnitConfObjB.addOperation(name='PowerProfilePlot')
+# opObj11.addParameter(name='xmin', value=13)
+# opObj11.addParameter(name='xmax', value=.4)
+# opObj11 = procUnitConfObjB.addOperation(name='NoisePlot')
+# opObj11.addParameter(name='xmin', value=13)
+# opObj11.addParameter(name='xmax', value=14)
procUnitConfObjC = controllerObj.addProcUnit(datatype='ParametersProc', inputId=procUnitConfObjB.getId())
procUnitConfObjC.addOperation(name='SpectralMoments')
opObj11 = procUnitConfObjC.addOperation(name='SpectralMomentsPlot')
-#opObj11.addParameter(name='xmin', value=14)
-#opObj11.addParameter(name='xmax', value=15)
-#opObj11.addParameter(name='save', value=figpath)
+# opObj11.addParameter(name='xmin', value=14)
+# opObj11.addParameter(name='xmax', value=15)
+# opObj11.addParameter(name='save', value=figpath)
opObj11.addParameter(name='showprofile', value=1)
-#opObj11.addParameter(name='save_period', value=10)
+# opObj11.addParameter(name='save_period', value=10)
'''
opObj11 = procUnitConfObjC.addOperation(name='SnrPlot')
opObj11.addParameter(name='zmin', value=-10)
@@ -73,8 +73,8 @@ opObj11.addParameter(name='zmax', value=40)
opObj11 = procUnitConfObjC.addOperation(name='SpectralWidthPlot')
opObj11.addParameter(name='xmin', value=5)
opObj11.addParameter(name='xmax', value=6)
-#opObj11.addParameter(name='save', value=figpath)
-#opObj11.addParameter(name='showprofile', value=1)
-#opObj11.addParameter(name='save_period', value=10)
+# opObj11.addParameter(name='save', value=figpath)
+# opObj11.addParameter(name='showprofile', value=1)
+# opObj11.addParameter(name='save_period', value=10)
controllerObj.start()
diff --git a/schainpy/scripts/test_sim0009.py b/schainpy/scripts/test_sim0009.py
index 43d6640..a2e1e3d 100644
--- a/schainpy/scripts/test_sim0009.py
+++ b/schainpy/scripts/test_sim0009.py
@@ -1,19 +1,19 @@
-import os,sys
+import os, sys
import datetime
import time
from schainpy.controller import Project
-path = '/home/alex/Downloads/NEW_WR2/spc16removeDC'
+path = '/home/alex/Downloads/NEW_WR2/spc16removeDC'
figpath = path
-desc = "Simulator Test"
+desc = "Simulator Test"
-controllerObj = Project()
+controllerObj = Project()
-controllerObj.setup(id='10',name='Test Simulator',description=desc)
+controllerObj.setup(id='10', name='Test Simulator', description=desc)
readUnitConfObj = controllerObj.addReadUnit(datatype='SimulatorReader',
frequency=9.345e9,
- FixRCP_IPP= 60,
- Tau_0 = 30,
+ FixRCP_IPP=60,
+ Tau_0=30,
AcqH0_0=0,
samples=330,
AcqDH_0=0.15,
@@ -21,12 +21,12 @@ readUnitConfObj = controllerObj.addReadUnit(datatype='SimulatorReader',
FixRCP_TXB=0.15,
Fdoppler=600.0,
Hdoppler=36,
- Adoppler=300,#300
+ Adoppler=300, # 300
delay=0,
online=0,
walk=0,
profilesPerBlock=625,
- dataBlocksPerFile=100)#,#nTotalReadFiles=2)
+ dataBlocksPerFile=100) # ,#nTotalReadFiles=2)
'''
readUnitConfObj = controllerObj.addReadUnit(datatype='VoltageReader',
path=path,
@@ -39,35 +39,35 @@ readUnitConfObj = controllerObj.addReadUnit(datatype='VoltageReader',
online=0,
walk=1)
'''
-opObj11 = readUnitConfObj.addOperation(name='printInfo')
+opObj11 = readUnitConfObj.addOperation(name='printInfo')
procUnitConfObjA = controllerObj.addProcUnit(datatype='VoltageProc', inputId=readUnitConfObj.getId())
-#opObj11 = procUnitConfObjA.addOperation(name='CohInt', optype='other')
-#opObj11.addParameter(name='n', value='10', format='int')
+# opObj11 = procUnitConfObjA.addOperation(name='CohInt', optype='other')
+# opObj11.addParameter(name='n', value='10', format='int')
-#opObj10 = procUnitConfObjA.addOperation(name='selectChannels')
-#opObj10.addParameter(name='channelList', value=[0])
+# opObj10 = procUnitConfObjA.addOperation(name='selectChannels')
+# opObj10.addParameter(name='channelList', value=[0])
opObj11 = procUnitConfObjA.addOperation(name='PulsePairVoltage', optype='other')
-opObj11.addParameter(name='n', value='625', format='int')#10
+opObj11.addParameter(name='n', value='625', format='int') # 10
opObj11.addParameter(name='removeDC', value=1, format='int')
-#opObj11 = procUnitConfObjA.addOperation(name='PulsepairPowerPlot', optype='other')
-#opObj11 = procUnitConfObjA.addOperation(name='PulsepairSignalPlot', optype='other')
+# opObj11 = procUnitConfObjA.addOperation(name='PulsepairPowerPlot', optype='other')
+# opObj11 = procUnitConfObjA.addOperation(name='PulsepairSignalPlot', optype='other')
-#opObj11 = procUnitConfObjA.addOperation(name='PulsepairVelocityPlot', optype='other')
-#opObj11.addParameter(name='xmax', value=8)
+# opObj11 = procUnitConfObjA.addOperation(name='PulsepairVelocityPlot', optype='other')
+# opObj11.addParameter(name='xmax', value=8)
-#opObj11 = procUnitConfObjA.addOperation(name='PulsepairSpecwidthPlot', optype='other')
+# opObj11 = procUnitConfObjA.addOperation(name='PulsepairSpecwidthPlot', optype='other')
-procUnitConfObjB= controllerObj.addProcUnit(datatype='ParametersProc',inputId=procUnitConfObjA.getId())
+procUnitConfObjB = controllerObj.addProcUnit(datatype='ParametersProc', inputId=procUnitConfObjA.getId())
opObj10 = procUnitConfObjB.addOperation(name='ParameterWriter')
-opObj10.addParameter(name='path',value=figpath)
-#opObj10.addParameter(name='mode',value=0)
-opObj10.addParameter(name='blocksPerFile',value='100',format='int')
-opObj10.addParameter(name='metadataList',value='utctimeInit,timeInterval',format='list')
-opObj10.addParameter(name='dataList',value='dataPP_POW,dataPP_DOP,dataPP_SNR,dataPP_WIDTH')#,format='list'
+opObj10.addParameter(name='path', value=figpath)
+# opObj10.addParameter(name='mode',value=0)
+opObj10.addParameter(name='blocksPerFile', value='100', format='int')
+opObj10.addParameter(name='metadataList', value='utctimeInit,timeInterval', format='list')
+opObj10.addParameter(name='dataList', value='dataPP_POW,dataPP_DOP,dataPP_SNR,dataPP_WIDTH') # ,format='list'
controllerObj.start()
diff --git a/schainpy/scripts/wr_integrador.py b/schainpy/scripts/wr_integrador.py
index d719cb3..a720549 100644
--- a/schainpy/scripts/wr_integrador.py
+++ b/schainpy/scripts/wr_integrador.py
@@ -1,4 +1,4 @@
-import os,numpy,h5py
+import os, numpy, h5py
from shutil import copyfile
def isNumber(str):
@@ -8,23 +8,23 @@ def isNumber(str):
except:
return False
-def getfirstFilefromPath(path,meta,ext):
+def getfirstFilefromPath(path, meta, ext):
validFilelist = []
- fileList = os.listdir(path)
- if len(fileList)<1:
+ fileList = os.listdir(path)
+ if len(fileList) < 1:
return None
# meta 1234 567 8-18 BCDE
# H,D,PE YYYY DDD EPOC .ext
for thisFile in fileList:
- if meta =="PE":
+ if meta == "PE":
try:
- number= int(thisFile[len(meta)+7:len(meta)+17])
+ number = int(thisFile[len(meta) + 7:len(meta) + 17])
except:
print("There is a file or folder with different format")
if meta == "D":
try:
- number= int(thisFile[8:11])
+ number = int(thisFile[8:11])
except:
print("There is a file or folder with different format")
@@ -34,129 +34,129 @@ def getfirstFilefromPath(path,meta,ext):
continue
validFilelist.sort()
validFilelist.append(thisFile)
- if len(validFilelist)>0:
- validFilelist = sorted(validFilelist,key=str.lower)
+ if len(validFilelist) > 0:
+ validFilelist = sorted(validFilelist, key=str.lower)
return validFilelist
return None
-def gettimeutcfromDirFilename(path,file):
- dir_file= path+"/"+file
- fp = h5py.File(dir_file,'r')
- epoc = fp['Metadata'].get('utctimeInit')[()]
+def gettimeutcfromDirFilename(path, file):
+ dir_file = path + "/" + file
+ fp = h5py.File(dir_file, 'r')
+ epoc = fp['Metadata'].get('utctimeInit')[()]
fp.close()
return epoc
-def getDatavaluefromDirFilename(path,file,value):
- dir_file= path+"/"+file
- fp = h5py.File(dir_file,'r')
- array = fp['Data'].get(value)[()]
+def getDatavaluefromDirFilename(path, file, value):
+ dir_file = path + "/" + file
+ fp = h5py.File(dir_file, 'r')
+ array = fp['Data'].get(value)[()]
fp.close()
return array
-#·········· Velocidad de Pedestal·················
+# ·········· Velocidad de Pedestal·················
w = input ("Ingresa velocidad de Pedestal: ")
w = 4
w = float(w)
-#·········· Resolucion minimo en grados···········
+# ·········· Resolucion minimo en grados···········
alfa = input ("Ingresa resolucion minima en grados: ")
alfa = 1
alfa = float(alfa)
-#·········· IPP del Experimento ··················
-IPP = input ("Ingresa el IPP del experimento: ")
-IPP = 0.0004
-IPP = float(IPP)
-#·········· MODE ··················
+# ·········· IPP del Experimento ··················
+IPP = input ("Ingresa el IPP del experimento: ")
+IPP = 0.0004
+IPP = float(IPP)
+# ·········· MODE ··················
mode = input ("Ingresa el MODO del experimento T or F: ")
mode = "T"
mode = str(mode)
-#·········· Tiempo en generar la resolucion min···
-#············ MCU ·· var_ang = w * (var_tiempo)···
-var_tiempo = alfa/w
-#·········· Tiempo Equivalente en perfiles········
-#·········· var_tiempo = IPP * ( num_perfiles )·
-num_perfiles = int(var_tiempo/IPP)
+# ·········· Tiempo en generar la resolucion min···
+# ············ MCU ·· var_ang = w * (var_tiempo)···
+var_tiempo = alfa / w
+# ·········· Tiempo Equivalente en perfiles········
+# ·········· var_tiempo = IPP * ( num_perfiles )·
+num_perfiles = int(var_tiempo / IPP)
-#··········DATA PEDESTAL··························
+# ··········DATA PEDESTAL··························
dir_pedestal = "/home/alex/Downloads/pedestal"
-#·········· DATA ADQ······························
-if mode=="T":
- dir_adq = "/home/alex/Downloads/hdf5_testPP/d2020194" # Time domain
+# ·········· DATA ADQ······························
+if mode == "T":
+ dir_adq = "/home/alex/Downloads/hdf5_testPP/d2020194" # Time domain
else:
- dir_adq = "/home/alex/Downloads/hdf5_test/d2020194" # Frequency domain
+ dir_adq = "/home/alex/Downloads/hdf5_test/d2020194" # Frequency domain
-print( "Velocidad angular :", w)
-print( "Resolucion minima en grados :", alfa)
-print( "Numero de perfiles equivalente:", num_perfiles)
-print( "Mode :", mode)
+print("Velocidad angular :", w)
+print("Resolucion minima en grados :", alfa)
+print("Numero de perfiles equivalente:", num_perfiles)
+print("Mode :", mode)
-#············ First File·············
-list_pedestal = getfirstFilefromPath(path=dir_pedestal,meta="PE",ext=".hdf5")
-list_adq = getfirstFilefromPath(path=dir_adq ,meta="D",ext=".hdf5")
+# ············ First File·············
+list_pedestal = getfirstFilefromPath(path=dir_pedestal, meta="PE", ext=".hdf5")
+list_adq = getfirstFilefromPath(path=dir_adq , meta="D", ext=".hdf5")
-#············ utc time ··············
-utc_pedestal= gettimeutcfromDirFilename(path=dir_pedestal,file=list_pedestal[0])
-utc_adq = gettimeutcfromDirFilename(path=dir_adq ,file=list_adq[0])
+# ············ utc time ··············
+utc_pedestal = gettimeutcfromDirFilename(path=dir_pedestal, file=list_pedestal[0])
+utc_adq = gettimeutcfromDirFilename(path=dir_adq , file=list_adq[0])
-print("utc_pedestal :",utc_pedestal)
-print("utc_adq :",utc_adq)
-#·············Relacion: utc_adq (+/-) var_tiempo*nro_file= utc_pedestal
-time_Interval_p = 0.01
-n_perfiles_p = 100
-if utc_adq>utc_pedestal:
- nro_file = int((int(utc_adq) - int(utc_pedestal))/(time_Interval_p*n_perfiles_p))
- ff_pedestal = list_pedestal[nro_file]
- utc_pedestal = gettimeutcfromDirFilename(path=dir_pedestal,file=ff_pedestal)
- nro_key_p = int((utc_adq-utc_pedestal)/time_Interval_p)
- if utc_adq >utc_pedestal:
- ff_pedestal = ff_pedestal
+print("utc_pedestal :", utc_pedestal)
+print("utc_adq :", utc_adq)
+# ·············Relacion: utc_adq (+/-) var_tiempo*nro_file= utc_pedestal
+time_Interval_p = 0.01
+n_perfiles_p = 100
+if utc_adq > utc_pedestal:
+ nro_file = int((int(utc_adq) - int(utc_pedestal)) / (time_Interval_p * n_perfiles_p))
+ ff_pedestal = list_pedestal[nro_file]
+ utc_pedestal = gettimeutcfromDirFilename(path=dir_pedestal, file=ff_pedestal)
+ nro_key_p = int((utc_adq - utc_pedestal) / time_Interval_p)
+ if utc_adq > utc_pedestal:
+ ff_pedestal = ff_pedestal
else:
- nro_file = nro_file-1
- ff_pedestal = list_pedestal[nro_file]
- angulo = getDatavaluefromDirFilename(path=dir_pedestal,file=ff_pedestal,value="azimuth")
- nro_key_p = int((utc_adq-utc_pedestal)/time_Interval_p)
- print("nro_file :",nro_file)
- print("name_file :",ff_pedestal)
- print("utc_pedestal_file :",utc_pedestal)
- print("nro_key_p :",nro_key_p)
- print("utc_pedestal_init :",utc_pedestal+nro_key_p*time_Interval_p)
- print("angulo_array :",angulo[nro_key_p])
-#4+25+25+25+21
-#while True:
-list_pedestal = getfirstFilefromPath(path=dir_pedestal,meta="PE",ext=".hdf5")
-list_adq = getfirstFilefromPath(path=dir_adq ,meta="D",ext=".hdf5")
+ nro_file = nro_file - 1
+ ff_pedestal = list_pedestal[nro_file]
+ angulo = getDatavaluefromDirFilename(path=dir_pedestal, file=ff_pedestal, value="azimuth")
+ nro_key_p = int((utc_adq - utc_pedestal) / time_Interval_p)
+ print("nro_file :", nro_file)
+ print("name_file :", ff_pedestal)
+ print("utc_pedestal_file :", utc_pedestal)
+ print("nro_key_p :", nro_key_p)
+ print("utc_pedestal_init :", utc_pedestal + nro_key_p * time_Interval_p)
+ print("angulo_array :", angulo[nro_key_p])
+# 4+25+25+25+21
+# while True:
+list_pedestal = getfirstFilefromPath(path=dir_pedestal, meta="PE", ext=".hdf5")
+list_adq = getfirstFilefromPath(path=dir_adq , meta="D", ext=".hdf5")
-nro_file = nro_file #10
+nro_file = nro_file # 10
nro_key_perfil = nro_key_p
-blocksPerFile = 100
-wr_path = "/home/alex/Downloads/hdf5_wr/"
+blocksPerFile = 100
+wr_path = "/home/alex/Downloads/hdf5_wr/"
# Lectura de archivos de adquisicion para adicion de azimuth
for thisFile in range(len(list_adq)):
- print("thisFileAdq",thisFile)
- angulo_adq = numpy.zeros(blocksPerFile)
- tmp = 0
+ print("thisFileAdq", thisFile)
+ angulo_adq = numpy.zeros(blocksPerFile)
+ tmp = 0
for j in range(blocksPerFile):
- iterador = nro_key_perfil + 25*(j-tmp)
+ iterador = nro_key_perfil + 25 * (j - tmp)
if iterador < n_perfiles_p:
nro_file = nro_file
else:
- nro_file = nro_file+1
- tmp = j
+ nro_file = nro_file + 1
+ tmp = j
iterador = nro_key_perfil
- ff_pedestal = list_pedestal[nro_file]
- angulo = getDatavaluefromDirFilename(path=dir_pedestal,file=ff_pedestal,value="azimuth")
- angulo_adq[j]= angulo[iterador]
- copyfile(dir_adq+"/"+list_adq[thisFile],wr_path+list_adq[thisFile])
- fp = h5py.File(wr_path+list_adq[thisFile],'a')
- grp = fp.create_group("Pedestal")
- dset = grp.create_dataset("azimuth" , data=angulo_adq)
+ ff_pedestal = list_pedestal[nro_file]
+ angulo = getDatavaluefromDirFilename(path=dir_pedestal, file=ff_pedestal, value="azimuth")
+ angulo_adq[j] = angulo[iterador]
+ copyfile(dir_adq + "/" + list_adq[thisFile], wr_path + list_adq[thisFile])
+ fp = h5py.File(wr_path + list_adq[thisFile], 'a')
+ grp = fp.create_group("Pedestal")
+ dset = grp.create_dataset("azimuth" , data=angulo_adq)
fp.close()
- print("Angulo",angulo_adq)
- print("Angulo",len(angulo_adq))
- nro_key_perfil=iterador + 25
- if nro_key_perfil< n_perfiles_p:
+ print("Angulo", angulo_adq)
+ print("Angulo", len(angulo_adq))
+ nro_key_perfil = iterador + 25
+ if nro_key_perfil < n_perfiles_p:
nro_file = nro_file
else:
- nro_file = nro_file+1
- nro_key_perfil= nro_key_p
+ nro_file = nro_file + 1
+ nro_key_perfil = nro_key_p
diff --git a/setup.py b/setup.py
index 5fa2a77..aaa4f11 100644
--- a/setup.py
+++ b/setup.py
@@ -25,13 +25,13 @@ class build_ext(_build_ext):
self.include_dirs.append(numpy.get_include())
setup(
- name = "schainpy",
- version = __version__,
- description = DOCLINES[0],
- long_description = "\n".join(DOCLINES[2:]),
- url = "https://github.com/JRO-Peru/schainpy",
- author = "Jicamarca Radio Observatory",
- author_email = "jro-developers@jro.igp.gob.pe",
+ name="schainpy",
+ version=__version__,
+ description=DOCLINES[0],
+ long_description="\n".join(DOCLINES[2:]),
+ url="https://github.com/JRO-Peru/schainpy",
+ author="Jicamarca Radio Observatory",
+ author_email="jro-developers@jro.igp.gob.pe",
license="BSD-3-Clause",
classifiers=[
"Development Status :: 4 - Beta",
@@ -49,7 +49,7 @@ setup(
"Programming Language :: Python :: 3.8",
"Topic :: Scientific/Engineering",
],
- packages = {
+ packages={
'schainpy',
'schainpy.model',
'schainpy.model.data',
@@ -61,22 +61,22 @@ setup(
'schainpy.gui',
'schainpy.cli',
},
- package_data = {'': ['schain.conf.template'],
+ package_data={'': ['schain.conf.template'],
'schainpy.files': ['*.oga']
},
- include_package_data = False,
- scripts = ['schainpy/gui/schainGUI'],
- entry_points = {
+ include_package_data=False,
+ scripts=['schainpy/gui/schainGUI'],
+ entry_points={
'console_scripts': [
'schain = schainpy.cli.cli:main',
],
},
- cmdclass = {'build_ext': build_ext},
+ cmdclass={'build_ext': build_ext},
ext_modules=[
Extension("schainpy.model.data._noise", ["schainc/_noise.c"]),
],
- setup_requires = ["numpy"],
- install_requires = [
+ setup_requires=["numpy"],
+ install_requires=[
"scipy",
"h5py",
"matplotlib",