##// END OF EJS Templates
cambios para amisr ISR
joabAM -
r1465:65c0d2b45bc1
parent child
Show More

The requested changes are too big and content was truncated. Show full diff

@@ -0,0 +1,132
1 import numpy
2
3 def setConstants(dataOut):
4 dictionary = {}
5 dictionary["M"] = dataOut.normFactor
6 dictionary["N"] = dataOut.nFFTPoints
7 dictionary["ippSeconds"] = dataOut.ippSeconds
8 dictionary["K"] = dataOut.nIncohInt
9
10 return dictionary
11
12 def initialValuesFunction(data_spc, constants):
13 #Constants
14 M = constants["M"]
15 N = constants["N"]
16 ippSeconds = constants["ippSeconds"]
17
18 S1 = data_spc[0,:]/(N*M)
19 S2 = data_spc[1,:]/(N*M)
20
21 Nl=min(S1)
22 A=sum(S1-Nl)/N
23 #x = dataOut.getVelRange() #below matches Madrigal data better
24 x=numpy.linspace(-(N/2)/(N*ippSeconds),(N/2-1)/(N*ippSeconds),N)*-(6.0/2)
25 v=sum(x*(S1-Nl))/sum(S1-Nl)
26 al1=numpy.sqrt(sum(x**2*(S1-Nl))/sum(S2-Nl)-v**2)
27 p0=[al1,A,A,v,min(S1),min(S2)]#first guess(width,amplitude,velocity,noise)
28 return p0
29
30 def modelFunction(p, constants):
31 ippSeconds = constants["ippSeconds"]
32 N = constants["N"]
33
34 fm_c = ACFtoSPC(p, constants)
35 fm = numpy.hstack((fm_c[0],fm_c[1]))
36 return fm
37
38 def errorFunction(p, constants, LT):
39
40 J=makeJacobian(p, constants)
41 J =numpy.dot(LT,J)
42 covm =numpy.linalg.inv(numpy.dot(J.T ,J))
43 #calculate error as the square root of the covariance matrix diagonal
44 #multiplying by 1.96 would give 95% confidence interval
45 err =numpy.sqrt(numpy.diag(covm))
46 return err
47
48 #-----------------------------------------------------------------------------------
49
50 def ACFw(alpha,A1,A2,vd,x,N,ippSeconds):
51 #creates weighted autocorrelation function based on the operational model
52 #x is n or N-n
53 k=2*numpy.pi/3.0
54 pdt=x*ippSeconds
55 #both correlated channels ACFs are created at the sametime
56 R1=A1*numpy.exp(-1j*k*vd*pdt)/numpy.sqrt(1+(alpha*k*pdt)**2)
57 R2=A2*numpy.exp(-1j*k*vd*pdt)/numpy.sqrt(1+(alpha*k*pdt)**2)
58 # T is the triangle weigthing function
59 T=1-abs(x)/N
60 Rp1=T*R1
61 Rp2=T*R2
62 return [Rp1,Rp2]
63
64 def ACFtoSPC(p, constants):
65 #calls the create ACF function and transforms the ACF to spectra
66 N = constants["N"]
67 ippSeconds = constants["ippSeconds"]
68
69 n=numpy.linspace(0,(N-1),N)
70 Nn=N-n
71 R = ACFw(p[0],p[1],p[2],p[3],n,N,ippSeconds)
72 RN = ACFw(p[0],p[1],p[2],p[3],Nn,N,ippSeconds)
73 Rf1=R[0]+numpy.conjugate(RN[0])
74 Rf2=R[1]+numpy.conjugate(RN[1])
75 sw1=numpy.fft.fft(Rf1,n=N)
76 sw2=numpy.fft.fft(Rf2,n=N)
77 #the fft needs to be shifted, noise added, and takes only the real part
78 sw0=numpy.real(numpy.fft.fftshift(sw1))+abs(p[4])
79 sw1=numpy.real(numpy.fft.fftshift(sw2))+abs(p[5])
80 return [sw0,sw1]
81
82 def makeJacobian(p, constants):
83 #create Jacobian matrix
84 N = constants["N"]
85 IPPt = constants["ippSeconds"]
86
87 n=numpy.linspace(0,(N-1),N)
88 Nn=N-n
89 k=2*numpy.pi/3.0
90 #created weighted ACF
91 R=ACFw(p[0],p[1],p[2],p[3],n,N,IPPt)
92 RN=ACFw(p[0],p[1],p[2],p[3],Nn,N,IPPt)
93 #take derivatives with respect to the fit parameters
94 Jalpha1=R[0]*-1*(k*n*IPPt)**2*p[0]/(1+(p[0]*k*n*IPPt)**2)+numpy.conjugate(RN[0]*-1*(k*Nn*IPPt)**2*p[0]/(1+(p[0]*k*Nn*IPPt)**2))
95 Jalpha2=R[1]*-1*(k*n*IPPt)**2*p[0]/(1+(p[0]*k*n*IPPt)**2)+numpy.conjugate(RN[1]*-1*(k*Nn*IPPt)**2*p[0]/(1+(p[0]*k*Nn*IPPt)**2))
96 JA1=R[0]/p[1]+numpy.conjugate(RN[0]/p[1])
97 JA2=R[1]/p[2]+numpy.conjugate(RN[1]/p[2])
98 Jvd1=R[0]*-1j*k*n*IPPt+numpy.conjugate(RN[0]*-1j*k*Nn*IPPt)
99 Jvd2=R[1]*-1j*k*n*IPPt+numpy.conjugate(RN[1]*-1j*k*Nn*IPPt)
100 #fft
101 sJalp1=numpy.fft.fft(Jalpha1,n=N)
102 sJalp2=numpy.fft.fft(Jalpha2,n=N)
103 sJA1=numpy.fft.fft(JA1,n=N)
104 sJA2=numpy.fft.fft(JA2,n=N)
105 sJvd1=numpy.fft.fft(Jvd1,n=N)
106 sJvd2=numpy.fft.fft(Jvd2,n=N)
107 sJalp1=numpy.real(numpy.fft.fftshift(sJalp1))
108 sJalp2=numpy.real(numpy.fft.fftshift(sJalp2))
109 sJA1=numpy.real(numpy.fft.fftshift(sJA1))
110 sJA2=numpy.real(numpy.fft.fftshift(sJA2))
111 sJvd1=numpy.real(numpy.fft.fftshift(sJvd1))
112 sJvd2=numpy.real(numpy.fft.fftshift(sJvd2))
113 sJnoise=numpy.ones(numpy.shape(sJvd1))
114 #combine arrays
115 za=numpy.zeros([N])
116 sJalp=zip(sJalp1,sJalp2)
117 sJA1=zip(sJA1,za)
118 sJA2=zip(za,sJA2)
119 sJvd=zip(sJvd1,sJvd2)
120 sJn1=zip(sJnoise, za)
121 sJn2=zip(za, sJnoise)
122 #reshape from 2D to 1D
123 sJalp=numpy.reshape(list(sJalp), [2*N])
124 sJA1=numpy.reshape(list(sJA1), [2*N])
125 sJA2=numpy.reshape(list(sJA2), [2*N])
126 sJvd=numpy.reshape(list(sJvd), [2*N])
127 sJn1=numpy.reshape(list(sJn1), [2*N])
128 sJn2=numpy.reshape(list(sJn2), [2*N])
129 #combine into matrix and transpose
130 J=numpy.array([sJalp,sJA1,sJA2,sJvd,sJn1,sJn2])
131 J=J.T
132 return J
@@ -1,699 +1,701
1 1 # Copyright (c) 2012-2020 Jicamarca Radio Observatory
2 2 # All rights reserved.
3 3 #
4 4 # Distributed under the terms of the BSD 3-clause license.
5 5 """Base class to create plot operations
6 6
7 7 """
8 8
9 9 import os
10 10 import sys
11 11 import zmq
12 12 import time
13 13 import numpy
14 14 import datetime
15 15 from collections import deque
16 16 from functools import wraps
17 17 from threading import Thread
18 18 import matplotlib
19 19
20 20 if 'BACKEND' in os.environ:
21 21 matplotlib.use(os.environ['BACKEND'])
22 22 elif 'linux' in sys.platform:
23 23 matplotlib.use("TkAgg")
24 24 elif 'darwin' in sys.platform:
25 25 matplotlib.use('MacOSX')
26 26 else:
27 27 from schainpy.utils import log
28 28 log.warning('Using default Backend="Agg"', 'INFO')
29 29 matplotlib.use('Agg')
30 30
31 31 import matplotlib.pyplot as plt
32 32 from matplotlib.patches import Polygon
33 33 from mpl_toolkits.axes_grid1 import make_axes_locatable
34 34 from matplotlib.ticker import FuncFormatter, LinearLocator, MultipleLocator
35 35
36 36 from schainpy.model.data.jrodata import PlotterData
37 37 from schainpy.model.proc.jroproc_base import ProcessingUnit, Operation, MPDecorator
38 38 from schainpy.utils import log
39 39
40 40 jet_values = matplotlib.pyplot.get_cmap('jet', 100)(numpy.arange(100))[10:90]
41 41 blu_values = matplotlib.pyplot.get_cmap(
42 42 'seismic_r', 20)(numpy.arange(20))[10:15]
43 43 ncmap = matplotlib.colors.LinearSegmentedColormap.from_list(
44 44 'jro', numpy.vstack((blu_values, jet_values)))
45 45 matplotlib.pyplot.register_cmap(cmap=ncmap)
46 46
47 47 CMAPS = [plt.get_cmap(s) for s in ('jro', 'jet', 'viridis',
48 48 'plasma', 'inferno', 'Greys', 'seismic', 'bwr', 'coolwarm')]
49 49
50 50 EARTH_RADIUS = 6.3710e3
51 51
52 52 def ll2xy(lat1, lon1, lat2, lon2):
53 53
54 54 p = 0.017453292519943295
55 55 a = 0.5 - numpy.cos((lat2 - lat1) * p)/2 + numpy.cos(lat1 * p) * \
56 56 numpy.cos(lat2 * p) * (1 - numpy.cos((lon2 - lon1) * p)) / 2
57 57 r = 12742 * numpy.arcsin(numpy.sqrt(a))
58 58 theta = numpy.arctan2(numpy.sin((lon2-lon1)*p)*numpy.cos(lat2*p), numpy.cos(lat1*p)
59 59 * numpy.sin(lat2*p)-numpy.sin(lat1*p)*numpy.cos(lat2*p)*numpy.cos((lon2-lon1)*p))
60 60 theta = -theta + numpy.pi/2
61 61 return r*numpy.cos(theta), r*numpy.sin(theta)
62 62
63 63
64 64 def km2deg(km):
65 65 '''
66 66 Convert distance in km to degrees
67 67 '''
68 68
69 69 return numpy.rad2deg(km/EARTH_RADIUS)
70 70
71 71
72 72 def figpause(interval):
73 73 backend = plt.rcParams['backend']
74 74 if backend in matplotlib.rcsetup.interactive_bk:
75 75 figManager = matplotlib._pylab_helpers.Gcf.get_active()
76 76 if figManager is not None:
77 77 canvas = figManager.canvas
78 78 if canvas.figure.stale:
79 79 canvas.draw()
80 80 try:
81 81 canvas.start_event_loop(interval)
82 82 except:
83 83 pass
84 84 return
85 85
86 86 def popup(message):
87 87 '''
88 88 '''
89 89
90 90 fig = plt.figure(figsize=(12, 8), facecolor='r')
91 91 text = '\n'.join([s.strip() for s in message.split(':')])
92 92 fig.text(0.01, 0.5, text, ha='left', va='center',
93 93 size='20', weight='heavy', color='w')
94 94 fig.show()
95 95 figpause(1000)
96 96
97 97
98 98 class Throttle(object):
99 99 '''
100 100 Decorator that prevents a function from being called more than once every
101 101 time period.
102 102 To create a function that cannot be called more than once a minute, but
103 103 will sleep until it can be called:
104 104 @Throttle(minutes=1)
105 105 def foo():
106 106 pass
107 107
108 108 for i in range(10):
109 109 foo()
110 110 print "This function has run %s times." % i
111 111 '''
112 112
113 113 def __init__(self, seconds=0, minutes=0, hours=0):
114 114 self.throttle_period = datetime.timedelta(
115 115 seconds=seconds, minutes=minutes, hours=hours
116 116 )
117 117
118 118 self.time_of_last_call = datetime.datetime.min
119 119
120 120 def __call__(self, fn):
121 121 @wraps(fn)
122 122 def wrapper(*args, **kwargs):
123 123 coerce = kwargs.pop('coerce', None)
124 124 if coerce:
125 125 self.time_of_last_call = datetime.datetime.now()
126 126 return fn(*args, **kwargs)
127 127 else:
128 128 now = datetime.datetime.now()
129 129 time_since_last_call = now - self.time_of_last_call
130 130 time_left = self.throttle_period - time_since_last_call
131 131
132 132 if time_left > datetime.timedelta(seconds=0):
133 133 return
134 134
135 135 self.time_of_last_call = datetime.datetime.now()
136 136 return fn(*args, **kwargs)
137 137
138 138 return wrapper
139 139
140 140 def apply_throttle(value):
141 141
142 142 @Throttle(seconds=value)
143 143 def fnThrottled(fn):
144 144 fn()
145 145
146 146 return fnThrottled
147 147
148 148
149 149 @MPDecorator
150 150 class Plot(Operation):
151 151 """Base class for Schain plotting operations
152 152
153 153 This class should never be use directtly you must subclass a new operation,
154 154 children classes must be defined as follow:
155 155
156 156 ExamplePlot(Plot):
157 157
158 158 CODE = 'code'
159 159 colormap = 'jet'
160 160 plot_type = 'pcolor' # options are ('pcolor', 'pcolorbuffer', 'scatter', 'scatterbuffer')
161 161
162 162 def setup(self):
163 163 pass
164 164
165 165 def plot(self):
166 166 pass
167 167
168 168 """
169 169
170 170 CODE = 'Figure'
171 171 colormap = 'jet'
172 172 bgcolor = 'white'
173 173 buffering = True
174 174 __missing = 1E30
175 175
176 176 __attrs__ = ['show', 'save', 'ymin', 'ymax', 'zmin', 'zmax', 'title',
177 177 'showprofile']
178 178
179 179 def __init__(self):
180 180
181 181 Operation.__init__(self)
182 182 self.isConfig = False
183 183 self.isPlotConfig = False
184 184 self.save_time = 0
185 185 self.sender_time = 0
186 186 self.data = None
187 187 self.firsttime = True
188 188 self.sender_queue = deque(maxlen=10)
189 189 self.plots_adjust = {'left': 0.125, 'right': 0.9, 'bottom': 0.15, 'top': 0.9, 'wspace': 0.2, 'hspace': 0.2}
190 190
191 191 def __fmtTime(self, x, pos):
192 192 '''
193 193 '''
194 194 if self.t_units == "h_m":
195 195 return '{}'.format(self.getDateTime(x).strftime('%H:%M'))
196 196 if self.t_units == "h":
197 197 return '{}'.format(self.getDateTime(x).strftime('%H'))
198 198
199 199 def __setup(self, **kwargs):
200 200 '''
201 201 Initialize variables
202 202 '''
203 203
204 204 self.figures = []
205 205 self.axes = []
206 206 self.cb_axes = []
207 207 self.pf_axes = []
208 208 self.localtime = kwargs.pop('localtime', True)
209 209 self.show = kwargs.get('show', True)
210 210 self.save = kwargs.get('save', False)
211 211 self.save_period = kwargs.get('save_period', 0)
212 212 self.colormap = kwargs.get('colormap', self.colormap)
213 213 self.colormap_coh = kwargs.get('colormap_coh', 'jet')
214 214 self.colormap_phase = kwargs.get('colormap_phase', 'RdBu_r')
215 215 self.colormaps = kwargs.get('colormaps', None)
216 216 self.bgcolor = kwargs.get('bgcolor', self.bgcolor)
217 217 self.showprofile = kwargs.get('showprofile', False)
218 218 self.title = kwargs.get('wintitle', self.CODE.upper())
219 219 self.cb_label = kwargs.get('cb_label', None)
220 220 self.cb_labels = kwargs.get('cb_labels', None)
221 221 self.labels = kwargs.get('labels', None)
222 222 self.xaxis = kwargs.get('xaxis', 'frequency')
223 223 self.zmin = kwargs.get('zmin', None)
224 224 self.zmax = kwargs.get('zmax', None)
225 225 self.zlimits = kwargs.get('zlimits', None)
226 226 self.xmin = kwargs.get('xmin', None)
227 227 self.xmax = kwargs.get('xmax', None)
228 228 self.xrange = kwargs.get('xrange', 12)
229 229 self.xscale = kwargs.get('xscale', None)
230 230 self.ymin = kwargs.get('ymin', None)
231 231 self.ymax = kwargs.get('ymax', None)
232 232 self.yscale = kwargs.get('yscale', None)
233 233 self.xlabel = kwargs.get('xlabel', None)
234 234 self.attr_time = kwargs.get('attr_time', 'utctime')
235 235 self.attr_data = kwargs.get('attr_data', 'data_param')
236 236 self.decimation = kwargs.get('decimation', None)
237 237 self.oneFigure = kwargs.get('oneFigure', True)
238 238 self.width = kwargs.get('width', None)
239 239 self.height = kwargs.get('height', None)
240 240 self.colorbar = kwargs.get('colorbar', True)
241 241 self.factors = kwargs.get('factors', range(18))
242 242 self.channels = kwargs.get('channels', None)
243 243 self.titles = kwargs.get('titles', [])
244 244 self.polar = False
245 245 self.type = kwargs.get('type', 'iq')
246 246 self.grid = kwargs.get('grid', False)
247 247 self.pause = kwargs.get('pause', False)
248 248 self.save_code = kwargs.get('save_code', self.CODE)
249 249 self.throttle = kwargs.get('throttle', 0)
250 250 self.exp_code = kwargs.get('exp_code', None)
251 251 self.server = kwargs.get('server', False)
252 252 self.sender_period = kwargs.get('sender_period', 60)
253 253 self.tag = kwargs.get('tag', '')
254 254 self.height_index = kwargs.get('height_index', None)
255 255 self.__throttle_plot = apply_throttle(self.throttle)
256 256 code = self.attr_data if self.attr_data else self.CODE
257 257 self.data = PlotterData(self.CODE, self.exp_code, self.localtime)
258 258 self.tmin = kwargs.get('tmin', None)
259 259 self.t_units = kwargs.get('t_units', "h_m")
260 self.selectedHeight = kwargs.get('selectedHeight', None)
261
260 262
261 263 if self.server:
262 264 if not self.server.startswith('tcp://'):
263 265 self.server = 'tcp://{}'.format(self.server)
264 266 log.success(
265 267 'Sending to server: {}'.format(self.server),
266 268 self.name
267 269 )
268 270
269 271 if isinstance(self.attr_data, str):
270 272 self.attr_data = [self.attr_data]
271 273
272 274 def __setup_plot(self):
273 275 '''
274 276 Common setup for all figures, here figures and axes are created
275 277 '''
276 278
277 279 self.setup()
278 280
279 281 self.time_label = 'LT' if self.localtime else 'UTC'
280 282
281 283 if self.width is None:
282 284 self.width = 8
283 285
284 286 self.figures = []
285 287 self.axes = []
286 288 self.cb_axes = []
287 289 self.pf_axes = []
288 290 self.cmaps = []
289 291
290 292 size = '15%' if self.ncols == 1 else '30%'
291 293 pad = '4%' if self.ncols == 1 else '8%'
292 294
293 295 if self.oneFigure:
294 296 if self.height is None:
295 297 self.height = 1.4 * self.nrows + 1
296 298 fig = plt.figure(figsize=(self.width, self.height),
297 299 edgecolor='k',
298 300 facecolor='w')
299 301 self.figures.append(fig)
300 302 for n in range(self.nplots):
301 303 ax = fig.add_subplot(self.nrows, self.ncols,
302 304 n + 1, polar=self.polar)
303 305 ax.tick_params(labelsize=8)
304 306 ax.firsttime = True
305 307 ax.index = 0
306 308 ax.press = None
307 309 self.axes.append(ax)
308 310 if self.showprofile:
309 311 cax = self.__add_axes(ax, size=size, pad=pad)
310 312 cax.tick_params(labelsize=8)
311 313 self.pf_axes.append(cax)
312 314 else:
313 315 if self.height is None:
314 316 self.height = 3
315 317 for n in range(self.nplots):
316 318 fig = plt.figure(figsize=(self.width, self.height),
317 319 edgecolor='k',
318 320 facecolor='w')
319 321 ax = fig.add_subplot(1, 1, 1, polar=self.polar)
320 322 ax.tick_params(labelsize=8)
321 323 ax.firsttime = True
322 324 ax.index = 0
323 325 ax.press = None
324 326 self.figures.append(fig)
325 327 self.axes.append(ax)
326 328 if self.showprofile:
327 329 cax = self.__add_axes(ax, size=size, pad=pad)
328 330 cax.tick_params(labelsize=8)
329 331 self.pf_axes.append(cax)
330 332
331 333 for n in range(self.nrows):
332 334 if self.colormaps is not None:
333 335 cmap = plt.get_cmap(self.colormaps[n])
334 336 else:
335 337 cmap = plt.get_cmap(self.colormap)
336 338 cmap.set_bad(self.bgcolor, 1.)
337 339 self.cmaps.append(cmap)
338 340
339 341 def __add_axes(self, ax, size='30%', pad='8%'):
340 342 '''
341 343 Add new axes to the given figure
342 344 '''
343 345 divider = make_axes_locatable(ax)
344 346 nax = divider.new_horizontal(size=size, pad=pad)
345 347 ax.figure.add_axes(nax)
346 348 return nax
347 349
348 350 def fill_gaps(self, x_buffer, y_buffer, z_buffer):
349 351 '''
350 352 Create a masked array for missing data
351 353 '''
352 354 if x_buffer.shape[0] < 2:
353 355 return x_buffer, y_buffer, z_buffer
354 356
355 357 deltas = x_buffer[1:] - x_buffer[0:-1]
356 358 x_median = numpy.median(deltas)
357 359
358 360 index = numpy.where(deltas > 5 * x_median)
359 361
360 362 if len(index[0]) != 0:
361 363 z_buffer[::, index[0], ::] = self.__missing
362 364 z_buffer = numpy.ma.masked_inside(z_buffer,
363 365 0.99 * self.__missing,
364 366 1.01 * self.__missing)
365 367
366 368 return x_buffer, y_buffer, z_buffer
367 369
368 370 def decimate(self):
369 371
370 372 # dx = int(len(self.x)/self.__MAXNUMX) + 1
371 373 dy = int(len(self.y) / self.decimation) + 1
372 374
373 375 # x = self.x[::dx]
374 376 x = self.x
375 377 y = self.y[::dy]
376 378 z = self.z[::, ::, ::dy]
377 379
378 380 return x, y, z
379 381
380 382 def format(self):
381 383 '''
382 384 Set min and max values, labels, ticks and titles
383 385 '''
384 386
385 387 for n, ax in enumerate(self.axes):
386 388 if ax.firsttime:
387 389 if self.xaxis != 'time':
388 390 xmin = self.xmin
389 391 xmax = self.xmax
390 392 else:
391 393 xmin = self.tmin
392 394 xmax = self.tmin + self.xrange*60*60
393 395 ax.xaxis.set_major_formatter(FuncFormatter(self.__fmtTime))
394 396 if self.t_units == "h_m":
395 397 ax.xaxis.set_major_locator(LinearLocator(9))
396 398 if self.t_units == "h":
397 399 ax.xaxis.set_major_locator(LinearLocator(int((xmax-xmin)/3600)+1))
398 400 ymin = self.ymin if self.ymin is not None else numpy.nanmin(self.y[numpy.isfinite(self.y)])
399 401 ymax = self.ymax if self.ymax is not None else numpy.nanmax(self.y[numpy.isfinite(self.y)])
400 402 ax.set_facecolor(self.bgcolor)
401 403 if self.xscale:
402 404 ax.xaxis.set_major_formatter(FuncFormatter(
403 405 lambda x, pos: '{0:g}'.format(x*self.xscale)))
404 406 if self.yscale:
405 407 ax.yaxis.set_major_formatter(FuncFormatter(
406 408 lambda x, pos: '{0:g}'.format(x*self.yscale)))
407 409 if self.xlabel is not None:
408 410 ax.set_xlabel(self.xlabel)
409 411 if self.ylabel is not None:
410 412 ax.set_ylabel(self.ylabel)
411 413 if self.showprofile:
412 414 self.pf_axes[n].set_ylim(ymin, ymax)
413 415 self.pf_axes[n].set_xlim(self.zmin, self.zmax)
414 416 self.pf_axes[n].set_xlabel('dB')
415 417 self.pf_axes[n].grid(b=True, axis='x')
416 418 [tick.set_visible(False)
417 419 for tick in self.pf_axes[n].get_yticklabels()]
418 420 if self.colorbar:
419 421 ax.cbar = plt.colorbar(
420 422 ax.plt, ax=ax, fraction=0.05, pad=0.02, aspect=10)
421 423 ax.cbar.ax.tick_params(labelsize=8)
422 424 ax.cbar.ax.press = None
423 425 if self.cb_label:
424 426 ax.cbar.set_label(self.cb_label, size=8)
425 427 elif self.cb_labels:
426 428 ax.cbar.set_label(self.cb_labels[n], size=8)
427 429 else:
428 430 ax.cbar = None
429 431 ax.set_xlim(xmin, xmax)
430 432 ax.set_ylim(ymin, ymax)
431 433 ax.firsttime = False
432 434 if self.grid:
433 435 ax.grid(True)
434 436 if not self.polar:
435 437 ax.set_title('{} {} {}'.format(
436 438 self.titles[n],
437 439 self.getDateTime(self.data.max_time).strftime(
438 440 '%Y-%m-%d %H:%M:%S'),
439 441 self.time_label),
440 442 size=8)
441 443 else:
442 444 ax.set_title('{}'.format(self.titles[n]), size=8)
443 445 ax.set_ylim(0, 90)
444 446 ax.set_yticks(numpy.arange(0, 90, 20))
445 447 ax.yaxis.labelpad = 40
446 448
447 449 if self.firsttime:
448 450 for n, fig in enumerate(self.figures):
449 451 fig.subplots_adjust(**self.plots_adjust)
450 452 self.firsttime = False
451 453
452 454 def clear_figures(self):
453 455 '''
454 456 Reset axes for redraw plots
455 457 '''
456 458
457 459 for ax in self.axes+self.pf_axes+self.cb_axes:
458 460 ax.clear()
459 461 ax.firsttime = True
460 462 if hasattr(ax, 'cbar') and ax.cbar:
461 463 ax.cbar.remove()
462 464
463 465 def __plot(self):
464 466 '''
465 467 Main function to plot, format and save figures
466 468 '''
467 469
468 470 self.plot()
469 471 self.format()
470 472
471 473 for n, fig in enumerate(self.figures):
472 474 if self.nrows == 0 or self.nplots == 0:
473 475 log.warning('No data', self.name)
474 476 fig.text(0.5, 0.5, 'No Data', fontsize='large', ha='center')
475 477 fig.canvas.manager.set_window_title(self.CODE)
476 478 continue
477 479
478 480 fig.canvas.manager.set_window_title('{} - {}'.format(self.title,
479 481 self.getDateTime(self.data.max_time).strftime('%Y/%m/%d')))
480 482 fig.canvas.draw()
481 483 if self.show:
482 484 fig.show()
483 485 figpause(0.01)
484 486
485 487 if self.save:
486 488 self.save_figure(n)
487 489
488 490 if self.server:
489 491 self.send_to_server()
490 492
491 493 def __update(self, dataOut, timestamp):
492 494 '''
493 495 '''
494 496
495 497 metadata = {
496 498 'yrange': dataOut.heightList,
497 499 'interval': dataOut.timeInterval,
498 500 'channels': dataOut.channelList
499 501 }
500 502 data, meta = self.update(dataOut)
501 503 metadata.update(meta)
502 504 self.data.update(data, timestamp, metadata)
503 505
504 506 def save_figure(self, n):
505 507 '''
506 508 '''
507 509
508 510 if (self.data.max_time - self.save_time) <= self.save_period:
509 511 return
510 512
511 513 self.save_time = self.data.max_time
512 514
513 515 fig = self.figures[n]
514 516
515 517 if self.throttle == 0:
516 518 figname = os.path.join(
517 519 self.save,
518 520 self.save_code,
519 521 '{}_{}.png'.format(
520 522 self.save_code,
521 523 self.getDateTime(self.data.max_time).strftime(
522 524 '%Y%m%d_%H%M%S'
523 525 ),
524 526 )
525 527 )
526 528 log.log('Saving figure: {}'.format(figname), self.name)
527 529 if not os.path.isdir(os.path.dirname(figname)):
528 530 os.makedirs(os.path.dirname(figname))
529 531 fig.savefig(figname)
530 532
531 533 figname = os.path.join(
532 534 self.save,
533 535 '{}_{}.png'.format(
534 536 self.save_code,
535 537 self.getDateTime(self.data.min_time).strftime(
536 538 '%Y%m%d'
537 539 ),
538 540 )
539 541 )
540 542
541 543 log.log('Saving figure: {}'.format(figname), self.name)
542 544 if not os.path.isdir(os.path.dirname(figname)):
543 545 os.makedirs(os.path.dirname(figname))
544 546 fig.savefig(figname)
545 547
546 548 def send_to_server(self):
547 549 '''
548 550 '''
549 551
550 552 if self.exp_code == None:
551 553 log.warning('Missing `exp_code` skipping sending to server...')
552 554
553 555 last_time = self.data.max_time
554 556 interval = last_time - self.sender_time
555 557 if interval < self.sender_period:
556 558 return
557 559
558 560 self.sender_time = last_time
559 561
560 562 attrs = ['titles', 'zmin', 'zmax', 'tag', 'ymin', 'ymax']
561 563 for attr in attrs:
562 564 value = getattr(self, attr)
563 565 if value:
564 566 if isinstance(value, (numpy.float32, numpy.float64)):
565 567 value = round(float(value), 2)
566 568 self.data.meta[attr] = value
567 569 if self.colormap == 'jet':
568 570 self.data.meta['colormap'] = 'Jet'
569 571 elif 'RdBu' in self.colormap:
570 572 self.data.meta['colormap'] = 'RdBu'
571 573 else:
572 574 self.data.meta['colormap'] = 'Viridis'
573 575 self.data.meta['interval'] = int(interval)
574 576
575 577 self.sender_queue.append(last_time)
576 578
577 579 while True:
578 580 try:
579 581 tm = self.sender_queue.popleft()
580 582 except IndexError:
581 583 break
582 584 msg = self.data.jsonify(tm, self.save_code, self.plot_type)
583 585 self.socket.send_string(msg)
584 586 socks = dict(self.poll.poll(2000))
585 587 if socks.get(self.socket) == zmq.POLLIN:
586 588 reply = self.socket.recv_string()
587 589 if reply == 'ok':
588 590 log.log("Response from server ok", self.name)
589 591 time.sleep(0.1)
590 592 continue
591 593 else:
592 594 log.warning(
593 595 "Malformed reply from server: {}".format(reply), self.name)
594 596 else:
595 597 log.warning(
596 598 "No response from server, retrying...", self.name)
597 599 self.sender_queue.appendleft(tm)
598 600 self.socket.setsockopt(zmq.LINGER, 0)
599 601 self.socket.close()
600 602 self.poll.unregister(self.socket)
601 603 self.socket = self.context.socket(zmq.REQ)
602 604 self.socket.connect(self.server)
603 605 self.poll.register(self.socket, zmq.POLLIN)
604 606 break
605 607
606 608 def setup(self):
607 609 '''
608 610 This method should be implemented in the child class, the following
609 611 attributes should be set:
610 612
611 613 self.nrows: number of rows
612 614 self.ncols: number of cols
613 615 self.nplots: number of plots (channels or pairs)
614 616 self.ylabel: label for Y axes
615 617 self.titles: list of axes title
616 618
617 619 '''
618 620 raise NotImplementedError
619 621
620 622 def plot(self):
621 623 '''
622 624 Must be defined in the child class, the actual plotting method
623 625 '''
624 626 raise NotImplementedError
625 627
626 628 def update(self, dataOut):
627 629 '''
628 630 Must be defined in the child class, update self.data with new data
629 631 '''
630 632
631 633 data = {
632 634 self.CODE: getattr(dataOut, 'data_{}'.format(self.CODE))
633 635 }
634 636 meta = {}
635 637
636 638 return data, meta
637 639
638 640 def run(self, dataOut, **kwargs):
639 641 '''
640 642 Main plotting routine
641 643 '''
642 644 if self.isConfig is False:
643 645 self.__setup(**kwargs)
644 646
645 647 if self.localtime:
646 648 self.getDateTime = datetime.datetime.fromtimestamp
647 649 else:
648 650 self.getDateTime = datetime.datetime.utcfromtimestamp
649 651
650 652 self.data.setup()
651 653 self.isConfig = True
652 654 if self.server:
653 655 self.context = zmq.Context()
654 656 self.socket = self.context.socket(zmq.REQ)
655 657 self.socket.connect(self.server)
656 658 self.poll = zmq.Poller()
657 659 self.poll.register(self.socket, zmq.POLLIN)
658 660
659 661 tm = getattr(dataOut, self.attr_time)
660 662
661 663 if self.data and 'time' in self.xaxis and (tm - self.tmin) >= self.xrange*60*60:
662 664 self.save_time = tm
663 665 self.__plot()
664 666 self.tmin += self.xrange*60*60
665 667 self.data.setup()
666 668 self.clear_figures()
667 669
668 670 self.__update(dataOut, tm)
669 671
670 672 if self.isPlotConfig is False:
671 673 self.__setup_plot()
672 674 self.isPlotConfig = True
673 675 if self.xaxis == 'time':
674 676 dt = self.getDateTime(tm)
675 677 if self.xmin is None:
676 678 self.tmin = tm
677 679 self.xmin = dt.hour
678 680 minutes = (self.xmin-int(self.xmin)) * 60
679 681 seconds = (minutes - int(minutes)) * 60
680 682 self.tmin = (dt.replace(hour=int(self.xmin), minute=int(minutes), second=int(seconds)) -
681 683 datetime.datetime(1970, 1, 1)).total_seconds()
682 684 if self.localtime:
683 685 self.tmin += time.timezone
684 686
685 687 if self.xmin is not None and self.xmax is not None:
686 688 self.xrange = self.xmax - self.xmin
687 689
688 690 if self.throttle == 0:
689 691 self.__plot()
690 692 else:
691 693 self.__throttle_plot(self.__plot)#, coerce=coerce)
692 694
693 695 def close(self):
694 696
695 697 if self.data and not self.data.flagNoData:
696 698 self.save_time = 0
697 699 self.__plot()
698 700 if self.data and not self.data.flagNoData and self.pause:
699 701 figpause(10)
@@ -1,102 +1,107
1 1 # Copyright (c) 2012-2020 Jicamarca Radio Observatory
2 2 # All rights reserved.
3 3 #
4 4 # Distributed under the terms of the BSD 3-clause license.
5 5 """Classes to plo Specra Heis data
6 6
7 7 """
8 8
9 9 import numpy
10 10
11 11 from schainpy.model.graphics.jroplot_base import Plot, plt
12 12 import matplotlib.pyplot as plt
13 13
14 14 class SpectraHeisPlot(Plot):
15 15
16 16 CODE = 'spc_heis'
17
17 channelList = []
18 18 def setup(self):
19 19
20 20 self.nplots = len(self.data.channels)
21 21 self.ncols = int(numpy.sqrt(self.nplots) + 0.9)
22 22 self.nrows = int((1.0 * self.nplots / self.ncols) + 0.9)
23 23 self.height = 2.6 * self.nrows
24 24 self.width = 3.5 * self.ncols
25 25 self.plots_adjust.update({'wspace': 0.4, 'hspace':0.4, 'left': 0.1, 'right': 0.95, 'bottom': 0.08})
26 26 self.ylabel = 'Intensity [dB]'
27 27 self.xlabel = 'Frequency [KHz]'
28 28 self.colorbar = False
29 29
30 30 def update(self, dataOut):
31 if len(self.channelList) == 0:
32 self.channelList = dataOut.channelList
31 33
32 34 data = {}
33 35 meta = {}
34 36 spc = 10*numpy.log10(dataOut.data_spc / dataOut.normFactor)
35 37 data['spc_heis'] = spc
36 38
37 39 return data, meta
38 40
39 41 def plot(self):
40 42
41 43 c = 3E8
42 44 deltaHeight = self.data.yrange[1] - self.data.yrange[0] # yrange = heightList
43 45 x = numpy.arange(-1*len(self.data.yrange)/2., len(self.data.yrange)/2.)*(c/(2*deltaHeight*len(self.data.yrange)*1000))
44 46 self.y = self.data[-1]['spc_heis']
45 47 self.titles = []
46 48
47 49 Maintitle = "Range from %d km to %d km" %(int(self.data.yrange[0]),int(self.data.yrange[-1]))
48 50 for n, ax in enumerate(self.axes):
49 51 ychannel = self.y[n,:]
50 52 if ax.firsttime:
51 53 self.xmin = min(x) if self.xmin is None else self.xmin
52 54 self.xmax = max(x) if self.xmax is None else self.xmax
53 55 ax.plt = ax.plot(x, ychannel, lw=1, color='b')[0]
56 ax.set_ylim(ymin=self.zmin, ymax=self.zmax)
57 ax.set_xlim(xmin=self.xmin, xmax=self.xmax)
54 58 else:
55 59 ax.plt.set_data(x, ychannel)
56
60 ax.set_ylim(ymin=self.zmin, ymax=self.zmax)
61 ax.set_xlim(xmin=self.xmin, xmax=self.xmax)
57 62 self.titles.append("Channel {}: {:4.2f}dB".format(n, numpy.max(ychannel)))
58 63 plt.suptitle(Maintitle)
59 64
60 65 class RTIHeisPlot(Plot):
61 66
62 67 CODE = 'rti_heis'
63 68
64 69 def setup(self):
65 70
66 71 self.xaxis = 'time'
67 72 self.ncols = 1
68 73 self.nrows = 1
69 74 self.nplots = 1
70 75 self.ylabel = 'Intensity [dB]'
71 76 self.xlabel = 'Time'
72 77 self.titles = ['RTI']
73 78 self.colorbar = False
74 79 self.height = 4
75 80 self.plots_adjust.update({'right': 0.85 })
76 81
77 82 def update(self, dataOut):
78 83
79 84 data = {}
80 85 meta = {}
81 86 spc = dataOut.data_spc / dataOut.normFactor
82 87 spc = 10*numpy.log10(numpy.average(spc, axis=1))
83 88 data['rti_heis'] = spc
84 89
85 90 return data, meta
86 91
87 92 def plot(self):
88 93
89 94 x = self.data.times
90 95 Y = self.data['rti_heis']
91 96
92 97 if self.axes[0].firsttime:
93 98 self.ymin = numpy.nanmin(Y) - 5 if self.ymin == None else self.ymin
94 99 self.ymax = numpy.nanmax(Y) + 5 if self.ymax == None else self.ymax
95 100 for ch in self.data.channels:
96 101 y = Y[ch]
97 102 self.axes[0].plot(x, y, lw=1, label='Ch{}'.format(ch))
98 103 plt.legend(bbox_to_anchor=(1.18, 1.0))
99 104 else:
100 105 for ch in self.data.channels:
101 106 y = Y[ch]
102 107 self.axes[0].lines[ch].set_data(x, y)
@@ -1,355 +1,357
1 1 import os
2 2 import datetime
3 3 import numpy
4 4
5 5 from schainpy.model.graphics.jroplot_base import Plot, plt
6 6 from schainpy.model.graphics.jroplot_spectra import SpectraPlot, RTIPlot, CoherencePlot
7 7 from schainpy.utils import log
8 8
9 9 EARTH_RADIUS = 6.3710e3
10 10
11 11
12 12 def ll2xy(lat1, lon1, lat2, lon2):
13 13
14 14 p = 0.017453292519943295
15 15 a = 0.5 - numpy.cos((lat2 - lat1) * p)/2 + numpy.cos(lat1 * p) * \
16 16 numpy.cos(lat2 * p) * (1 - numpy.cos((lon2 - lon1) * p)) / 2
17 17 r = 12742 * numpy.arcsin(numpy.sqrt(a))
18 18 theta = numpy.arctan2(numpy.sin((lon2-lon1)*p)*numpy.cos(lat2*p), numpy.cos(lat1*p)
19 19 * numpy.sin(lat2*p)-numpy.sin(lat1*p)*numpy.cos(lat2*p)*numpy.cos((lon2-lon1)*p))
20 20 theta = -theta + numpy.pi/2
21 21 return r*numpy.cos(theta), r*numpy.sin(theta)
22 22
23 23
24 24 def km2deg(km):
25 25 '''
26 26 Convert distance in km to degrees
27 27 '''
28 28
29 29 return numpy.rad2deg(km/EARTH_RADIUS)
30 30
31 31
32 32
33 33 class SpectralMomentsPlot(SpectraPlot):
34 34 '''
35 35 Plot for Spectral Moments
36 36 '''
37 37 CODE = 'spc_moments'
38 38 colormap = 'jet'
39 39 plot_type = 'pcolor'
40 40
41 41
42 42 class SnrPlot(RTIPlot):
43 43 '''
44 44 Plot for SNR Data
45 45 '''
46 46
47 47 CODE = 'snr'
48 48 colormap = 'jet'
49 49
50 50 def update(self, dataOut):
51 self.update_list(dataOut)
52 data = {
53 'snr': 10*numpy.log10(dataOut.data_snr)
54 }
55
56 return data, {}
51 if len(self.channelList) == 0:
52 self.update_list(dataOut)
53 data = {}
54 meta = {}
55 data['snr'] = 10*numpy.log10(dataOut.data_snr)
56
57 return data, meta
57 58
58 59 class DopplerPlot(RTIPlot):
59 60 '''
60 61 Plot for DOPPLER Data (1st moment)
61 62 '''
62 63
63 64 CODE = 'dop'
64 65 colormap = 'jet'
65 66
66 67 def update(self, dataOut):
67 68 self.update_list(dataOut)
68 69 data = {
69 70 'dop': 10*numpy.log10(dataOut.data_dop)
70 71 }
71 72
72 73 return data, {}
73 74
74 75 class PowerPlot(RTIPlot):
75 76 '''
76 77 Plot for Power Data (0 moment)
77 78 '''
78 79
79 80 CODE = 'pow'
80 81 colormap = 'jet'
81 82
82 83 def update(self, dataOut):
83 84 self.update_list(dataOut)
84 85 data = {
85 86 'pow': 10*numpy.log10(dataOut.data_pow)
86 87 }
88 data['noise'] = 10*numpy.log10(dataOut.getNoise()/dataOut.normFactor)
87 89 return data, {}
88 90
89 91 class SpectralWidthPlot(RTIPlot):
90 92 '''
91 93 Plot for Spectral Width Data (2nd moment)
92 94 '''
93 95
94 96 CODE = 'width'
95 97 colormap = 'jet'
96 98
97 99 def update(self, dataOut):
98 100 self.update_list(dataOut)
99 101 data = {
100 102 'width': dataOut.data_width
101 103 }
102
104 data['noise'] = 10*numpy.log10(dataOut.getNoise()/dataOut.normFactor)
103 105 return data, {}
104 106
105 107 class SkyMapPlot(Plot):
106 108 '''
107 109 Plot for meteors detection data
108 110 '''
109 111
110 112 CODE = 'param'
111 113
112 114 def setup(self):
113 115
114 116 self.ncols = 1
115 117 self.nrows = 1
116 118 self.width = 7.2
117 119 self.height = 7.2
118 120 self.nplots = 1
119 121 self.xlabel = 'Zonal Zenith Angle (deg)'
120 122 self.ylabel = 'Meridional Zenith Angle (deg)'
121 123 self.polar = True
122 124 self.ymin = -180
123 125 self.ymax = 180
124 126 self.colorbar = False
125 127
126 128 def plot(self):
127 129
128 130 arrayParameters = numpy.concatenate(self.data['param'])
129 131 error = arrayParameters[:, -1]
130 132 indValid = numpy.where(error == 0)[0]
131 133 finalMeteor = arrayParameters[indValid, :]
132 134 finalAzimuth = finalMeteor[:, 3]
133 135 finalZenith = finalMeteor[:, 4]
134 136
135 137 x = finalAzimuth * numpy.pi / 180
136 138 y = finalZenith
137 139
138 140 ax = self.axes[0]
139 141
140 142 if ax.firsttime:
141 143 ax.plot = ax.plot(x, y, 'bo', markersize=5)[0]
142 144 else:
143 145 ax.plot.set_data(x, y)
144 146
145 147 dt1 = self.getDateTime(self.data.min_time).strftime('%y/%m/%d %H:%M:%S')
146 148 dt2 = self.getDateTime(self.data.max_time).strftime('%y/%m/%d %H:%M:%S')
147 149 title = 'Meteor Detection Sky Map\n %s - %s \n Number of events: %5.0f\n' % (dt1,
148 150 dt2,
149 151 len(x))
150 152 self.titles[0] = title
151 153
152 154
153 155 class GenericRTIPlot(Plot):
154 156 '''
155 157 Plot for data_xxxx object
156 158 '''
157 159
158 160 CODE = 'param'
159 161 colormap = 'viridis'
160 162 plot_type = 'pcolorbuffer'
161 163
162 164 def setup(self):
163 165 self.xaxis = 'time'
164 166 self.ncols = 1
165 167 self.nrows = self.data.shape('param')[0]
166 168 self.nplots = self.nrows
167 169 self.plots_adjust.update({'hspace':0.8, 'left': 0.1, 'bottom': 0.08, 'right':0.95, 'top': 0.95})
168 170
169 171 if not self.xlabel:
170 172 self.xlabel = 'Time'
171 173
172 174 self.ylabel = 'Height [km]'
173 175 if not self.titles:
174 176 self.titles = ['Param {}'.format(x) for x in range(self.nrows)]
175 177
176 178 def update(self, dataOut):
177 179
178 180 data = {
179 181 'param' : numpy.concatenate([getattr(dataOut, attr) for attr in self.attr_data], axis=0)
180 182 }
181 183
182 184 meta = {}
183 185
184 186 return data, meta
185 187
186 188 def plot(self):
187 189 # self.data.normalize_heights()
188 190 self.x = self.data.times
189 191 self.y = self.data.yrange
190 192 self.z = self.data['param']
191 193
192 194 self.z = numpy.ma.masked_invalid(self.z)
193 195
194 196 if self.decimation is None:
195 197 x, y, z = self.fill_gaps(self.x, self.y, self.z)
196 198 else:
197 199 x, y, z = self.fill_gaps(*self.decimate())
198 200
199 201 for n, ax in enumerate(self.axes):
200 202
201 203 self.zmax = self.zmax if self.zmax is not None else numpy.max(
202 204 self.z[n])
203 205 self.zmin = self.zmin if self.zmin is not None else numpy.min(
204 206 self.z[n])
205 207
206 208 if ax.firsttime:
207 209 if self.zlimits is not None:
208 210 self.zmin, self.zmax = self.zlimits[n]
209 211
210 212 ax.plt = ax.pcolormesh(x, y, z[n].T * self.factors[n],
211 213 vmin=self.zmin,
212 214 vmax=self.zmax,
213 215 cmap=self.cmaps[n]
214 216 )
215 217 else:
216 218 if self.zlimits is not None:
217 219 self.zmin, self.zmax = self.zlimits[n]
218 220 ax.collections.remove(ax.collections[0])
219 221 ax.plt = ax.pcolormesh(x, y, z[n].T * self.factors[n],
220 222 vmin=self.zmin,
221 223 vmax=self.zmax,
222 224 cmap=self.cmaps[n]
223 225 )
224 226
225 227
226 228 class PolarMapPlot(Plot):
227 229 '''
228 230 Plot for weather radar
229 231 '''
230 232
231 233 CODE = 'param'
232 234 colormap = 'seismic'
233 235
234 236 def setup(self):
235 237 self.ncols = 1
236 238 self.nrows = 1
237 239 self.width = 9
238 240 self.height = 8
239 241 self.mode = self.data.meta['mode']
240 242 if self.channels is not None:
241 243 self.nplots = len(self.channels)
242 244 self.nrows = len(self.channels)
243 245 else:
244 246 self.nplots = self.data.shape(self.CODE)[0]
245 247 self.nrows = self.nplots
246 248 self.channels = list(range(self.nplots))
247 249 if self.mode == 'E':
248 250 self.xlabel = 'Longitude'
249 251 self.ylabel = 'Latitude'
250 252 else:
251 253 self.xlabel = 'Range (km)'
252 254 self.ylabel = 'Height (km)'
253 255 self.bgcolor = 'white'
254 256 self.cb_labels = self.data.meta['units']
255 257 self.lat = self.data.meta['latitude']
256 258 self.lon = self.data.meta['longitude']
257 259 self.xmin, self.xmax = float(
258 260 km2deg(self.xmin) + self.lon), float(km2deg(self.xmax) + self.lon)
259 261 self.ymin, self.ymax = float(
260 262 km2deg(self.ymin) + self.lat), float(km2deg(self.ymax) + self.lat)
261 263 # self.polar = True
262 264
263 265 def plot(self):
264 266
265 267 for n, ax in enumerate(self.axes):
266 268 data = self.data['param'][self.channels[n]]
267 269
268 270 zeniths = numpy.linspace(
269 271 0, self.data.meta['max_range'], data.shape[1])
270 272 if self.mode == 'E':
271 273 azimuths = -numpy.radians(self.data.yrange)+numpy.pi/2
272 274 r, theta = numpy.meshgrid(zeniths, azimuths)
273 275 x, y = r*numpy.cos(theta)*numpy.cos(numpy.radians(self.data.meta['elevation'])), r*numpy.sin(
274 276 theta)*numpy.cos(numpy.radians(self.data.meta['elevation']))
275 277 x = km2deg(x) + self.lon
276 278 y = km2deg(y) + self.lat
277 279 else:
278 280 azimuths = numpy.radians(self.data.yrange)
279 281 r, theta = numpy.meshgrid(zeniths, azimuths)
280 282 x, y = r*numpy.cos(theta), r*numpy.sin(theta)
281 283 self.y = zeniths
282 284
283 285 if ax.firsttime:
284 286 if self.zlimits is not None:
285 287 self.zmin, self.zmax = self.zlimits[n]
286 288 ax.plt = ax.pcolormesh( # r, theta, numpy.ma.array(data, mask=numpy.isnan(data)),
287 289 x, y, numpy.ma.array(data, mask=numpy.isnan(data)),
288 290 vmin=self.zmin,
289 291 vmax=self.zmax,
290 292 cmap=self.cmaps[n])
291 293 else:
292 294 if self.zlimits is not None:
293 295 self.zmin, self.zmax = self.zlimits[n]
294 296 ax.collections.remove(ax.collections[0])
295 297 ax.plt = ax.pcolormesh( # r, theta, numpy.ma.array(data, mask=numpy.isnan(data)),
296 298 x, y, numpy.ma.array(data, mask=numpy.isnan(data)),
297 299 vmin=self.zmin,
298 300 vmax=self.zmax,
299 301 cmap=self.cmaps[n])
300 302
301 303 if self.mode == 'A':
302 304 continue
303 305
304 306 # plot district names
305 307 f = open('/data/workspace/schain_scripts/distrito.csv')
306 308 for line in f:
307 309 label, lon, lat = [s.strip() for s in line.split(',') if s]
308 310 lat = float(lat)
309 311 lon = float(lon)
310 312 # ax.plot(lon, lat, '.b', ms=2)
311 313 ax.text(lon, lat, label.decode('utf8'), ha='center',
312 314 va='bottom', size='8', color='black')
313 315
314 316 # plot limites
315 317 limites = []
316 318 tmp = []
317 319 for line in open('/data/workspace/schain_scripts/lima.csv'):
318 320 if '#' in line:
319 321 if tmp:
320 322 limites.append(tmp)
321 323 tmp = []
322 324 continue
323 325 values = line.strip().split(',')
324 326 tmp.append((float(values[0]), float(values[1])))
325 327 for points in limites:
326 328 ax.add_patch(
327 329 Polygon(points, ec='k', fc='none', ls='--', lw=0.5))
328 330
329 331 # plot Cuencas
330 332 for cuenca in ('rimac', 'lurin', 'mala', 'chillon', 'chilca', 'chancay-huaral'):
331 333 f = open('/data/workspace/schain_scripts/{}.csv'.format(cuenca))
332 334 values = [line.strip().split(',') for line in f]
333 335 points = [(float(s[0]), float(s[1])) for s in values]
334 336 ax.add_patch(Polygon(points, ec='b', fc='none'))
335 337
336 338 # plot grid
337 339 for r in (15, 30, 45, 60):
338 340 ax.add_artist(plt.Circle((self.lon, self.lat),
339 341 km2deg(r), color='0.6', fill=False, lw=0.2))
340 342 ax.text(
341 343 self.lon + (km2deg(r))*numpy.cos(60*numpy.pi/180),
342 344 self.lat + (km2deg(r))*numpy.sin(60*numpy.pi/180),
343 345 '{}km'.format(r),
344 346 ha='center', va='bottom', size='8', color='0.6', weight='heavy')
345 347
346 348 if self.mode == 'E':
347 349 title = 'El={}$^\circ$'.format(self.data.meta['elevation'])
348 350 label = 'E{:02d}'.format(int(self.data.meta['elevation']))
349 351 else:
350 352 title = 'Az={}$^\circ$'.format(self.data.meta['azimuth'])
351 353 label = 'A{:02d}'.format(int(self.data.meta['azimuth']))
352 354
353 355 self.save_labels = ['{}-{}'.format(lbl, label) for lbl in self.labels]
354 356 self.titles = ['{} {}'.format(
355 357 self.data.parameters[x], title) for x in self.channels]
@@ -1,737 +1,961
1 1 # Copyright (c) 2012-2020 Jicamarca Radio Observatory
2 2 # All rights reserved.
3 3 #
4 4 # Distributed under the terms of the BSD 3-clause license.
5 5 """Classes to plot Spectra data
6 6
7 7 """
8 8
9 9 import os
10 10 import numpy
11 11
12 12 from schainpy.model.graphics.jroplot_base import Plot, plt, log
13 13 from itertools import combinations
14 14
15 15
16 16 class SpectraPlot(Plot):
17 17 '''
18 18 Plot for Spectra data
19 19 '''
20 20
21 21 CODE = 'spc'
22 22 colormap = 'jet'
23 23 plot_type = 'pcolor'
24 24 buffering = False
25 25 channelList = []
26 26
27 27 def setup(self):
28
28 29 self.nplots = len(self.data.channels)
29 30 self.ncols = int(numpy.sqrt(self.nplots) + 0.9)
30 31 self.nrows = int((1.0 * self.nplots / self.ncols) + 0.9)
31 32 self.height = 2.6 * self.nrows
32 33
33 34 self.cb_label = 'dB'
34 35 if self.showprofile:
35 36 self.width = 4 * self.ncols
36 37 else:
37 38 self.width = 3.5 * self.ncols
38 39 self.plots_adjust.update({'wspace': 0.4, 'hspace':0.4, 'left': 0.1, 'right': 0.9, 'bottom': 0.08})
39 40 self.ylabel = 'Range [km]'
41
42
40 43 def update_list(self,dataOut):
41 44 if len(self.channelList) == 0:
42 45 self.channelList = dataOut.channelList
43 46
44 47 def update(self, dataOut):
48
45 49 self.update_list(dataOut)
46 50 data = {}
47 51 meta = {}
48 52 spc = 10*numpy.log10(dataOut.data_spc/dataOut.normFactor)
49
50 53 data['spc'] = spc
51 54 data['rti'] = dataOut.getPower()
52 55 data['noise'] = 10*numpy.log10(dataOut.getNoise()/dataOut.normFactor)
53 56 meta['xrange'] = (dataOut.getFreqRange(1)/1000., dataOut.getAcfRange(1), dataOut.getVelRange(1))
54 57 if self.CODE == 'spc_moments':
55 58 data['moments'] = dataOut.moments
56 59
57 60 return data, meta
58 61
59 62 def plot(self):
60 63 if self.xaxis == "frequency":
61 64 x = self.data.xrange[0]
62 65 self.xlabel = "Frequency (kHz)"
63 66 elif self.xaxis == "time":
64 67 x = self.data.xrange[1]
65 68 self.xlabel = "Time (ms)"
66 69 else:
67 70 x = self.data.xrange[2]
68 71 self.xlabel = "Velocity (m/s)"
69 72
70 73 if self.CODE == 'spc_moments':
71 74 x = self.data.xrange[2]
72 75 self.xlabel = "Velocity (m/s)"
73 76
74 77 self.titles = []
75
76 78 y = self.data.yrange
77 79 self.y = y
78 80
79 81 data = self.data[-1]
80 82 z = data['spc']
81 83
82 84 for n, ax in enumerate(self.axes):
83 85 noise = data['noise'][n]
84 86 if self.CODE == 'spc_moments':
85 87 mean = data['moments'][n, 1]
86 88 if ax.firsttime:
87 89 self.xmax = self.xmax if self.xmax else numpy.nanmax(x)
88 90 self.xmin = self.xmin if self.xmin else -self.xmax
89 91 self.zmin = self.zmin if self.zmin else numpy.nanmin(z)
90 92 self.zmax = self.zmax if self.zmax else numpy.nanmax(z)
91 93 ax.plt = ax.pcolormesh(x, y, z[n].T,
92 94 vmin=self.zmin,
93 95 vmax=self.zmax,
94 96 cmap=plt.get_cmap(self.colormap)
95 97 )
96 98
97 99 if self.showprofile:
98 100 ax.plt_profile = self.pf_axes[n].plot(
99 101 data['rti'][n], y)[0]
100 102 ax.plt_noise = self.pf_axes[n].plot(numpy.repeat(noise, len(y)), y,
101 103 color="k", linestyle="dashed", lw=1)[0]
102 104 if self.CODE == 'spc_moments':
103 105 ax.plt_mean = ax.plot(mean, y, color='k')[0]
104 106 else:
105 107 ax.plt.set_array(z[n].T.ravel())
106 108 if self.showprofile:
107 109 ax.plt_profile.set_data(data['rti'][n], y)
108 110 ax.plt_noise.set_data(numpy.repeat(noise, len(y)), y)
109 111 if self.CODE == 'spc_moments':
110 112 ax.plt_mean.set_data(mean, y)
111 113 self.titles.append('CH {}: {:3.2f}dB'.format(self.channelList[n], noise))
112 114
113 115
114 116 class CrossSpectraPlot(Plot):
115 117
116 118 CODE = 'cspc'
117 119 colormap = 'jet'
118 120 plot_type = 'pcolor'
119 121 zmin_coh = None
120 122 zmax_coh = None
121 123 zmin_phase = None
122 124 zmax_phase = None
123 125 realChannels = None
124 126 crossPairs = None
125 127
126 128 def setup(self):
127 129
128 130 self.ncols = 4
129 131 self.nplots = len(self.data.pairs) * 2
130 132 self.nrows = int((1.0 * self.nplots / self.ncols) + 0.9)
131 133 self.width = 3.1 * self.ncols
132 134 self.height = 2.6 * self.nrows
133 135 self.ylabel = 'Range [km]'
134 136 self.showprofile = False
135 137 self.plots_adjust.update({'left': 0.08, 'right': 0.92, 'wspace': 0.5, 'hspace':0.4, 'top':0.95, 'bottom': 0.08})
136 138
137 139 def update(self, dataOut):
138 140
139 141 data = {}
140 142 meta = {}
141 143
142 144 spc = dataOut.data_spc
143 145 cspc = dataOut.data_cspc
144 146 meta['xrange'] = (dataOut.getFreqRange(1)/1000., dataOut.getAcfRange(1), dataOut.getVelRange(1))
145 147 rawPairs = list(combinations(list(range(dataOut.nChannels)), 2))
146 148 meta['pairs'] = rawPairs
147 149
148 150 if self.crossPairs == None:
149 151 self.crossPairs = dataOut.pairsList
150 152
151 153 tmp = []
152 154
153 155 for n, pair in enumerate(meta['pairs']):
154 156
155 157 out = cspc[n] / numpy.sqrt(spc[pair[0]] * spc[pair[1]])
156 158 coh = numpy.abs(out)
157 159 phase = numpy.arctan2(out.imag, out.real) * 180 / numpy.pi
158 160 tmp.append(coh)
159 161 tmp.append(phase)
160 162
161 163 data['cspc'] = numpy.array(tmp)
162 164
163 165 return data, meta
164 166
165 167 def plot(self):
166 168
167 169 if self.xaxis == "frequency":
168 170 x = self.data.xrange[0]
169 171 self.xlabel = "Frequency (kHz)"
170 172 elif self.xaxis == "time":
171 173 x = self.data.xrange[1]
172 174 self.xlabel = "Time (ms)"
173 175 else:
174 176 x = self.data.xrange[2]
175 177 self.xlabel = "Velocity (m/s)"
176 178
177 179 self.titles = []
178 180
179 181 y = self.data.yrange
180 182 self.y = y
181 183
182 184 data = self.data[-1]
183 185 cspc = data['cspc']
184 186
185 187 for n in range(len(self.data.pairs)):
186 188
187 189 pair = self.crossPairs[n]
188 190
189 191 coh = cspc[n*2]
190 192 phase = cspc[n*2+1]
191 193 ax = self.axes[2 * n]
192 194
193 195 if ax.firsttime:
194 196 ax.plt = ax.pcolormesh(x, y, coh.T,
195 vmin=0,
196 vmax=1,
197 vmin=self.zmin_coh,
198 vmax=self.zmax_coh,
197 199 cmap=plt.get_cmap(self.colormap_coh)
198 200 )
199 201 else:
200 202 ax.plt.set_array(coh.T.ravel())
201 203 self.titles.append(
202 204 'Coherence Ch{} * Ch{}'.format(pair[0], pair[1]))
203 205
204 206 ax = self.axes[2 * n + 1]
205 207 if ax.firsttime:
206 208 ax.plt = ax.pcolormesh(x, y, phase.T,
207 209 vmin=-180,
208 210 vmax=180,
209 211 cmap=plt.get_cmap(self.colormap_phase)
210 212 )
211 213 else:
212 214 ax.plt.set_array(phase.T.ravel())
215
213 216 self.titles.append('Phase CH{} * CH{}'.format(pair[0], pair[1]))
214 217
215 218
216 219 class RTIPlot(Plot):
217 220 '''
218 221 Plot for RTI data
219 222 '''
220 223
221 224 CODE = 'rti'
222 225 colormap = 'jet'
223 226 plot_type = 'pcolorbuffer'
224 227 titles = None
225 228 channelList = []
226 229
227 230 def setup(self):
228 231 self.xaxis = 'time'
229 232 self.ncols = 1
230 233 #print("dataChannels ",self.data.channels)
231 234 self.nrows = len(self.data.channels)
232 235 self.nplots = len(self.data.channels)
233 236 self.ylabel = 'Range [km]'
234 237 self.xlabel = 'Time'
235 238 self.cb_label = 'dB'
236 239 self.plots_adjust.update({'hspace':0.8, 'left': 0.1, 'bottom': 0.08, 'right':0.95})
237 240 self.titles = ['{} Channel {}'.format(
238 241 self.CODE.upper(), x) for x in range(self.nplots)]
239 242
240 243 def update_list(self,dataOut):
241 244
242 245 self.channelList = dataOut.channelList
243 246
244 247
245 248 def update(self, dataOut):
246 249 if len(self.channelList) == 0:
247 250 self.update_list(dataOut)
248 251 data = {}
249 252 meta = {}
250 253 data['rti'] = dataOut.getPower()
251 254 data['noise'] = 10*numpy.log10(dataOut.getNoise()/dataOut.normFactor)
252 255 return data, meta
253 256
254 257 def plot(self):
255 258
256 259 self.x = self.data.times
257 260 self.y = self.data.yrange
258 261 self.z = self.data[self.CODE]
259 262 self.z = numpy.array(self.z, dtype=float)
260 263 self.z = numpy.ma.masked_invalid(self.z)
264
261 265 try:
262 266 if self.channelList != None:
263 267 self.titles = ['{} Channel {}'.format(
264 268 self.CODE.upper(), x) for x in self.channelList]
265 269 except:
266 270 if self.channelList.any() != None:
267 271 self.titles = ['{} Channel {}'.format(
268 272 self.CODE.upper(), x) for x in self.channelList]
269 273 if self.decimation is None:
270 274 x, y, z = self.fill_gaps(self.x, self.y, self.z)
271 275 else:
272 276 x, y, z = self.fill_gaps(*self.decimate())
273 277 dummy_var = self.axes #ExtraΓ±amente esto actualiza el valor axes
274 278 for n, ax in enumerate(self.axes):
275 279 self.zmin = self.zmin if self.zmin else numpy.min(self.z)
276 280 self.zmax = self.zmax if self.zmax else numpy.max(self.z)
277 281 data = self.data[-1]
278 282 if ax.firsttime:
279 283 ax.plt = ax.pcolormesh(x, y, z[n].T,
280 284 vmin=self.zmin,
281 285 vmax=self.zmax,
282 286 cmap=plt.get_cmap(self.colormap)
283 287 )
284 288 if self.showprofile:
285 ax.plot_profile = self.pf_axes[n].plot(
286 data['rti'][n], self.y)[0]
287 ax.plot_noise = self.pf_axes[n].plot(numpy.repeat(data['noise'][n], len(self.y)), self.y,
289 ax.plot_profile = self.pf_axes[n].plot(data[self.CODE][n], self.y)[0]
290
291 if "noise" in self.data:
292 ax.plot_noise = self.pf_axes[n].plot(numpy.repeat(data['noise'][n], len(self.y)), self.y,
288 293 color="k", linestyle="dashed", lw=1)[0]
289 294 else:
290 295 ax.collections.remove(ax.collections[0])
291 296 ax.plt = ax.pcolormesh(x, y, z[n].T,
292 297 vmin=self.zmin,
293 298 vmax=self.zmax,
294 299 cmap=plt.get_cmap(self.colormap)
295 300 )
296 301 if self.showprofile:
297 ax.plot_profile.set_data(data['rti'][n], self.y)
298 ax.plot_noise.set_data(numpy.repeat(
302 ax.plot_profile.set_data(data[self.CODE][n], self.y)
303 if "noise" in self.data:
304 ax.plot_noise.set_data(numpy.repeat(
299 305 data['noise'][n], len(self.y)), self.y)
300 306
301 307
302 308 class CoherencePlot(RTIPlot):
303 309 '''
304 310 Plot for Coherence data
305 311 '''
306 312
307 313 CODE = 'coh'
308 314
309 315 def setup(self):
310 316 self.xaxis = 'time'
311 317 self.ncols = 1
312 318 self.nrows = len(self.data.pairs)
313 319 self.nplots = len(self.data.pairs)
314 320 self.ylabel = 'Range [km]'
315 321 self.xlabel = 'Time'
316 322 self.plots_adjust.update({'hspace':0.6, 'left': 0.1, 'bottom': 0.1,'right':0.95})
317 323 if self.CODE == 'coh':
318 324 self.cb_label = ''
319 325 self.titles = [
320 326 'Coherence Map Ch{} * Ch{}'.format(x[0], x[1]) for x in self.data.pairs]
321 327 else:
322 328 self.cb_label = 'Degrees'
323 329 self.titles = [
324 330 'Phase Map Ch{} * Ch{}'.format(x[0], x[1]) for x in self.data.pairs]
325 331
326 332 def update(self, dataOut):
327 333 self.update_list(dataOut)
328 334 data = {}
329 335 meta = {}
330 336 data['coh'] = dataOut.getCoherence()
331 337 meta['pairs'] = dataOut.pairsList
332 338
333 339
334 340 return data, meta
335 341
336 342 class PhasePlot(CoherencePlot):
337 343 '''
338 344 Plot for Phase map data
339 345 '''
340 346
341 347 CODE = 'phase'
342 348 colormap = 'seismic'
343 349
344 350 def update(self, dataOut):
345 351
346 352 data = {}
347 353 meta = {}
348 354 data['phase'] = dataOut.getCoherence(phase=True)
349 355 meta['pairs'] = dataOut.pairsList
350 356
351 357 return data, meta
352 358
353 359 class NoisePlot(Plot):
354 360 '''
355 361 Plot for noise
356 362 '''
357 363
358 364 CODE = 'noise'
359 365 plot_type = 'scatterbuffer'
360 366
361 367 def setup(self):
362 368 self.xaxis = 'time'
363 369 self.ncols = 1
364 370 self.nrows = 1
365 371 self.nplots = 1
366 372 self.ylabel = 'Intensity [dB]'
367 373 self.xlabel = 'Time'
368 374 self.titles = ['Noise']
369 375 self.colorbar = False
370 376 self.plots_adjust.update({'right': 0.85 })
371 377
372 378 def update(self, dataOut):
373 379
374 380 data = {}
375 381 meta = {}
376 382 data['noise'] = 10*numpy.log10(dataOut.getNoise()/dataOut.normFactor).reshape(dataOut.nChannels, 1)
377 383 meta['yrange'] = numpy.array([])
378 384
379 385 return data, meta
380 386
381 387 def plot(self):
382 388
383 389 x = self.data.times
384 390 xmin = self.data.min_time
385 391 xmax = xmin + self.xrange * 60 * 60
386 392 Y = self.data['noise']
387 393
388 394 if self.axes[0].firsttime:
389 395 self.ymin = numpy.nanmin(Y) - 5
390 396 self.ymax = numpy.nanmax(Y) + 5
391 397 for ch in self.data.channels:
392 398 y = Y[ch]
393 399 self.axes[0].plot(x, y, lw=1, label='Ch{}'.format(ch))
394 400 plt.legend(bbox_to_anchor=(1.18, 1.0))
395 401 else:
396 402 for ch in self.data.channels:
397 403 y = Y[ch]
398 404 self.axes[0].lines[ch].set_data(x, y)
399 405
400 406
401 407 class PowerProfilePlot(Plot):
402 408
403 409 CODE = 'pow_profile'
404 410 plot_type = 'scatter'
405 411
406 412 def setup(self):
407 413
408 414 self.ncols = 1
409 415 self.nrows = 1
410 416 self.nplots = 1
411 417 self.height = 4
412 418 self.width = 3
413 419 self.ylabel = 'Range [km]'
414 420 self.xlabel = 'Intensity [dB]'
415 421 self.titles = ['Power Profile']
416 422 self.colorbar = False
417 423
418 424 def update(self, dataOut):
419 425
420 426 data = {}
421 427 meta = {}
422 428 data[self.CODE] = dataOut.getPower()
423 429
424 430 return data, meta
425 431
426 432 def plot(self):
427 433
428 434 y = self.data.yrange
429 435 self.y = y
430 436
431 437 x = self.data[-1][self.CODE]
432 438
433 439 if self.xmin is None: self.xmin = numpy.nanmin(x)*0.9
434 440 if self.xmax is None: self.xmax = numpy.nanmax(x)*1.1
435 441
436 442 if self.axes[0].firsttime:
437 443 for ch in self.data.channels:
438 444 self.axes[0].plot(x[ch], y, lw=1, label='Ch{}'.format(ch))
439 445 plt.legend()
440 446 else:
441 447 for ch in self.data.channels:
442 448 self.axes[0].lines[ch].set_data(x[ch], y)
443 449
444 450
445 451 class SpectraCutPlot(Plot):
446 452
447 453 CODE = 'spc_cut'
448 454 plot_type = 'scatter'
449 455 buffering = False
456 heights = []
457 channelList = []
458 maintitle = "Spectra Cuts"
450 459
451 460 def setup(self):
452 461
453 462 self.nplots = len(self.data.channels)
454 463 self.ncols = int(numpy.sqrt(self.nplots) + 0.9)
455 464 self.nrows = int((1.0 * self.nplots / self.ncols) + 0.9)
456 self.width = 3.4 * self.ncols + 1.5
465 self.width = 3.6 * self.ncols + 1.5
457 466 self.height = 3 * self.nrows
458 467 self.ylabel = 'Power [dB]'
459 468 self.colorbar = False
460 469 self.plots_adjust.update({'left':0.1, 'hspace':0.3, 'right': 0.75, 'bottom':0.08})
470 if self.selectedHeight:
471 self.maintitle = "Spectra Cut for %d km " %(int(self.selectedHeight))
461 472
462 473 def update(self, dataOut):
474 if len(self.channelList) == 0:
475 self.channelList = dataOut.channelList
463 476
477 self.heights = dataOut.heightList
478 if self.selectedHeight:
479 index_list = numpy.where(self.heights >= self.selectedHeight)
480 self.height_index = index_list[0]
481 self.height_index = self.height_index[0]
482 #print(self.height_index)
464 483 data = {}
465 484 meta = {}
466 485 spc = 10*numpy.log10(dataOut.data_spc/dataOut.normFactor)
467 data['spc'] = spc
486 if self.selectedHeight:
487 data['spc'] = spc[:,:,self.height_index]
488 else:
489 data['spc'] = spc
468 490 meta['xrange'] = (dataOut.getFreqRange(1)/1000., dataOut.getAcfRange(1), dataOut.getVelRange(1))
469 491
470 492 return data, meta
471 493
472 494 def plot(self):
473 495 if self.xaxis == "frequency":
474 496 x = self.data.xrange[0][1:]
475 497 self.xlabel = "Frequency (kHz)"
476 498 elif self.xaxis == "time":
477 499 x = self.data.xrange[1]
478 500 self.xlabel = "Time (ms)"
479 501 else:
480 502 x = self.data.xrange[2]
481 503 self.xlabel = "Velocity (m/s)"
482 504
483 505 self.titles = []
484 506
485 507 y = self.data.yrange
486 508 z = self.data[-1]['spc']
487
509 #print(z.shape)
488 510 if self.height_index:
489 511 index = numpy.array(self.height_index)
490 512 else:
491 513 index = numpy.arange(0, len(y), int((len(y))/9))
492 514
493 515 for n, ax in enumerate(self.axes):
494 516 if ax.firsttime:
495 517 self.xmax = self.xmax if self.xmax else numpy.nanmax(x)
496 518 self.xmin = self.xmin if self.xmin else -self.xmax
497 519 self.ymin = self.ymin if self.ymin else numpy.nanmin(z)
498 520 self.ymax = self.ymax if self.ymax else numpy.nanmax(z)
499 ax.plt = ax.plot(x, z[n, :, index].T)
500 labels = ['Range = {:2.1f}km'.format(y[i]) for i in index]
501 self.figures[0].legend(ax.plt, labels, loc='center right')
521 if self.selectedHeight:
522 ax.plt = ax.plot(x, z[n,:])
523
524 else:
525 ax.plt = ax.plot(x, z[n, :, index].T)
526 labels = ['Range = {:2.1f}km'.format(y[i]) for i in index]
527 self.figures[0].legend(ax.plt, labels, loc='center right')
502 528 else:
503 529 for i, line in enumerate(ax.plt):
504 line.set_data(x, z[n, :, index[i]])
505 self.titles.append('CH {}'.format(n))
506
530 if self.selectedHeight:
531 line.set_data(x, z[n, :])
532 else:
533 line.set_data(x, z[n, :, index[i]])
534 self.titles.append('CH {}'.format(self.channelList[n]))
535 plt.suptitle(self.maintitle)
507 536
508 537 class BeaconPhase(Plot):
509 538
510 539 __isConfig = None
511 540 __nsubplots = None
512 541
513 542 PREFIX = 'beacon_phase'
514 543
515 544 def __init__(self):
516 545 Plot.__init__(self)
517 546 self.timerange = 24*60*60
518 547 self.isConfig = False
519 548 self.__nsubplots = 1
520 549 self.counter_imagwr = 0
521 550 self.WIDTH = 800
522 551 self.HEIGHT = 400
523 552 self.WIDTHPROF = 120
524 553 self.HEIGHTPROF = 0
525 554 self.xdata = None
526 555 self.ydata = None
527 556
528 557 self.PLOT_CODE = BEACON_CODE
529 558
530 559 self.FTP_WEI = None
531 560 self.EXP_CODE = None
532 561 self.SUB_EXP_CODE = None
533 562 self.PLOT_POS = None
534 563
535 564 self.filename_phase = None
536 565
537 566 self.figfile = None
538 567
539 568 self.xmin = None
540 569 self.xmax = None
541 570
542 571 def getSubplots(self):
543 572
544 573 ncol = 1
545 574 nrow = 1
546 575
547 576 return nrow, ncol
548 577
549 578 def setup(self, id, nplots, wintitle, showprofile=True, show=True):
550 579
551 580 self.__showprofile = showprofile
552 581 self.nplots = nplots
553 582
554 583 ncolspan = 7
555 584 colspan = 6
556 585 self.__nsubplots = 2
557 586
558 587 self.createFigure(id = id,
559 588 wintitle = wintitle,
560 589 widthplot = self.WIDTH+self.WIDTHPROF,
561 590 heightplot = self.HEIGHT+self.HEIGHTPROF,
562 591 show=show)
563 592
564 593 nrow, ncol = self.getSubplots()
565 594
566 595 self.addAxes(nrow, ncol*ncolspan, 0, 0, colspan, 1)
567 596
568 597 def save_phase(self, filename_phase):
569 598 f = open(filename_phase,'w+')
570 599 f.write('\n\n')
571 600 f.write('JICAMARCA RADIO OBSERVATORY - Beacon Phase \n')
572 601 f.write('DD MM YYYY HH MM SS pair(2,0) pair(2,1) pair(2,3) pair(2,4)\n\n' )
573 602 f.close()
574 603
575 604 def save_data(self, filename_phase, data, data_datetime):
576 605 f=open(filename_phase,'a')
577 606 timetuple_data = data_datetime.timetuple()
578 607 day = str(timetuple_data.tm_mday)
579 608 month = str(timetuple_data.tm_mon)
580 609 year = str(timetuple_data.tm_year)
581 610 hour = str(timetuple_data.tm_hour)
582 611 minute = str(timetuple_data.tm_min)
583 612 second = str(timetuple_data.tm_sec)
584 613 f.write(day+' '+month+' '+year+' '+hour+' '+minute+' '+second+' '+str(data[0])+' '+str(data[1])+' '+str(data[2])+' '+str(data[3])+'\n')
585 614 f.close()
586 615
587 616 def plot(self):
588 617 log.warning('TODO: Not yet implemented...')
589 618
590 619 def run(self, dataOut, id, wintitle="", pairsList=None, showprofile='True',
591 620 xmin=None, xmax=None, ymin=None, ymax=None, hmin=None, hmax=None,
592 621 timerange=None,
593 622 save=False, figpath='./', figfile=None, show=True, ftp=False, wr_period=1,
594 623 server=None, folder=None, username=None, password=None,
595 624 ftp_wei=0, exp_code=0, sub_exp_code=0, plot_pos=0):
596 625
597 626 if dataOut.flagNoData:
598 627 return dataOut
599 628
600 629 if not isTimeInHourRange(dataOut.datatime, xmin, xmax):
601 630 return
602 631
603 632 if pairsList == None:
604 633 pairsIndexList = dataOut.pairsIndexList[:10]
605 634 else:
606 635 pairsIndexList = []
607 636 for pair in pairsList:
608 637 if pair not in dataOut.pairsList:
609 638 raise ValueError("Pair %s is not in dataOut.pairsList" %(pair))
610 639 pairsIndexList.append(dataOut.pairsList.index(pair))
611 640
612 641 if pairsIndexList == []:
613 642 return
614 643
615 644 # if len(pairsIndexList) > 4:
616 645 # pairsIndexList = pairsIndexList[0:4]
617 646
618 647 hmin_index = None
619 648 hmax_index = None
620 649
621 650 if hmin != None and hmax != None:
622 651 indexes = numpy.arange(dataOut.nHeights)
623 652 hmin_list = indexes[dataOut.heightList >= hmin]
624 653 hmax_list = indexes[dataOut.heightList <= hmax]
625 654
626 655 if hmin_list.any():
627 656 hmin_index = hmin_list[0]
628 657
629 658 if hmax_list.any():
630 659 hmax_index = hmax_list[-1]+1
631 660
632 661 x = dataOut.getTimeRange()
633 662
634 663 thisDatetime = dataOut.datatime
635 664
636 665 title = wintitle + " Signal Phase" # : %s" %(thisDatetime.strftime("%d-%b-%Y"))
637 666 xlabel = "Local Time"
638 667 ylabel = "Phase (degrees)"
639 668
640 669 update_figfile = False
641 670
642 671 nplots = len(pairsIndexList)
643 672 #phase = numpy.zeros((len(pairsIndexList),len(dataOut.beacon_heiIndexList)))
644 673 phase_beacon = numpy.zeros(len(pairsIndexList))
645 674 for i in range(nplots):
646 675 pair = dataOut.pairsList[pairsIndexList[i]]
647 676 ccf = numpy.average(dataOut.data_cspc[pairsIndexList[i], :, hmin_index:hmax_index], axis=0)
648 677 powa = numpy.average(dataOut.data_spc[pair[0], :, hmin_index:hmax_index], axis=0)
649 678 powb = numpy.average(dataOut.data_spc[pair[1], :, hmin_index:hmax_index], axis=0)
650 679 avgcoherenceComplex = ccf/numpy.sqrt(powa*powb)
651 680 phase = numpy.arctan2(avgcoherenceComplex.imag, avgcoherenceComplex.real)*180/numpy.pi
652 681
653 682 if dataOut.beacon_heiIndexList:
654 683 phase_beacon[i] = numpy.average(phase[dataOut.beacon_heiIndexList])
655 684 else:
656 685 phase_beacon[i] = numpy.average(phase)
657 686
658 687 if not self.isConfig:
659 688
660 689 nplots = len(pairsIndexList)
661 690
662 691 self.setup(id=id,
663 692 nplots=nplots,
664 693 wintitle=wintitle,
665 694 showprofile=showprofile,
666 695 show=show)
667 696
668 697 if timerange != None:
669 698 self.timerange = timerange
670 699
671 700 self.xmin, self.xmax = self.getTimeLim(x, xmin, xmax, timerange)
672 701
673 702 if ymin == None: ymin = 0
674 703 if ymax == None: ymax = 360
675 704
676 705 self.FTP_WEI = ftp_wei
677 706 self.EXP_CODE = exp_code
678 707 self.SUB_EXP_CODE = sub_exp_code
679 708 self.PLOT_POS = plot_pos
680 709
681 710 self.name = thisDatetime.strftime("%Y%m%d_%H%M%S")
682 711 self.isConfig = True
683 712 self.figfile = figfile
684 713 self.xdata = numpy.array([])
685 714 self.ydata = numpy.array([])
686 715
687 716 update_figfile = True
688 717
689 718 #open file beacon phase
690 719 path = '%s%03d' %(self.PREFIX, self.id)
691 720 beacon_file = os.path.join(path,'%s.txt'%self.name)
692 721 self.filename_phase = os.path.join(figpath,beacon_file)
693 722 #self.save_phase(self.filename_phase)
694 723
695 724
696 725 #store data beacon phase
697 726 #self.save_data(self.filename_phase, phase_beacon, thisDatetime)
698 727
699 728 self.setWinTitle(title)
700 729
701 730
702 731 title = "Phase Plot %s" %(thisDatetime.strftime("%Y/%m/%d %H:%M:%S"))
703 732
704 733 legendlabels = ["Pair (%d,%d)"%(pair[0], pair[1]) for pair in dataOut.pairsList]
705 734
706 735 axes = self.axesList[0]
707 736
708 737 self.xdata = numpy.hstack((self.xdata, x[0:1]))
709 738
710 739 if len(self.ydata)==0:
711 740 self.ydata = phase_beacon.reshape(-1,1)
712 741 else:
713 742 self.ydata = numpy.hstack((self.ydata, phase_beacon.reshape(-1,1)))
714 743
715 744
716 745 axes.pmultilineyaxis(x=self.xdata, y=self.ydata,
717 746 xmin=self.xmin, xmax=self.xmax, ymin=ymin, ymax=ymax,
718 747 xlabel=xlabel, ylabel=ylabel, title=title, legendlabels=legendlabels, marker='x', markersize=8, linestyle="solid",
719 748 XAxisAsTime=True, grid='both'
720 749 )
721 750
722 751 self.draw()
723 752
724 753 if dataOut.ltctime >= self.xmax:
725 754 self.counter_imagwr = wr_period
726 755 self.isConfig = False
727 756 update_figfile = True
728 757
729 758 self.save(figpath=figpath,
730 759 figfile=figfile,
731 760 save=save,
732 761 ftp=ftp,
733 762 wr_period=wr_period,
734 763 thisDatetime=thisDatetime,
735 764 update_figfile=update_figfile)
736 765
737 766 return dataOut
767
768 class NoiselessSpectraPlot(Plot):
769 '''
770 Plot for Spectra data, subtracting
771 the noise in all channels, using for
772 amisr-14 data
773 '''
774
775 CODE = 'noiseless_spc'
776 colormap = 'nipy_spectral'
777 plot_type = 'pcolor'
778 buffering = False
779 channelList = []
780
781 def setup(self):
782
783 self.nplots = len(self.data.channels)
784 self.ncols = int(numpy.sqrt(self.nplots) + 0.9)
785 self.nrows = int((1.0 * self.nplots / self.ncols) + 0.9)
786 self.height = 2.6 * self.nrows
787
788 self.cb_label = 'dB'
789 if self.showprofile:
790 self.width = 4 * self.ncols
791 else:
792 self.width = 3.5 * self.ncols
793 self.plots_adjust.update({'wspace': 0.4, 'hspace':0.4, 'left': 0.1, 'right': 0.9, 'bottom': 0.08})
794 self.ylabel = 'Range [km]'
795
796
797 def update_list(self,dataOut):
798 if len(self.channelList) == 0:
799 self.channelList = dataOut.channelList
800
801 def update(self, dataOut):
802
803 self.update_list(dataOut)
804 data = {}
805 meta = {}
806 n0 = 10*numpy.log10(dataOut.getNoise()/dataOut.normFactor)
807 (nch, nff, nh) = dataOut.data_spc.shape
808 n1 = numpy.repeat(n0,nh, axis=0).reshape((nch,nh))
809 noise = numpy.repeat(n1,nff, axis=1).reshape((nch,nff,nh))
810 #print(noise.shape, "noise", noise)
811
812 spc = 10*numpy.log10(dataOut.data_spc/dataOut.normFactor) - noise
813
814 data['spc'] = spc
815 data['rti'] = dataOut.getPower() - n1
816
817 data['noise'] = n0
818 meta['xrange'] = (dataOut.getFreqRange(1)/1000., dataOut.getAcfRange(1), dataOut.getVelRange(1))
819
820 return data, meta
821
822 def plot(self):
823 if self.xaxis == "frequency":
824 x = self.data.xrange[0]
825 self.xlabel = "Frequency (kHz)"
826 elif self.xaxis == "time":
827 x = self.data.xrange[1]
828 self.xlabel = "Time (ms)"
829 else:
830 x = self.data.xrange[2]
831 self.xlabel = "Velocity (m/s)"
832
833 self.titles = []
834 y = self.data.yrange
835 self.y = y
836
837 data = self.data[-1]
838 z = data['spc']
839
840 for n, ax in enumerate(self.axes):
841 noise = data['noise'][n]
842
843 if ax.firsttime:
844 self.xmax = self.xmax if self.xmax else numpy.nanmax(x)
845 self.xmin = self.xmin if self.xmin else -self.xmax
846 self.zmin = self.zmin if self.zmin else numpy.nanmin(z)
847 self.zmax = self.zmax if self.zmax else numpy.nanmax(z)
848 ax.plt = ax.pcolormesh(x, y, z[n].T,
849 vmin=self.zmin,
850 vmax=self.zmax,
851 cmap=plt.get_cmap(self.colormap)
852 )
853
854 if self.showprofile:
855 ax.plt_profile = self.pf_axes[n].plot(
856 data['rti'][n], y)[0]
857 ax.plt_noise = self.pf_axes[n].plot(numpy.repeat(noise, len(y)), y,
858 color="k", linestyle="dashed", lw=1)[0]
859
860 else:
861 ax.plt.set_array(z[n].T.ravel())
862 if self.showprofile:
863 ax.plt_profile.set_data(data['rti'][n], y)
864 ax.plt_noise.set_data(numpy.repeat(noise, len(y)), y)
865
866 self.titles.append('CH {}: {:3.2f}dB'.format(self.channelList[n], noise))
867
868
869 class NoiselessRTIPlot(Plot):
870 '''
871 Plot for RTI data
872 '''
873
874 CODE = 'noiseless_rti'
875 colormap = 'jet'
876 plot_type = 'pcolorbuffer'
877 titles = None
878 channelList = []
879
880 def setup(self):
881 self.xaxis = 'time'
882 self.ncols = 1
883 #print("dataChannels ",self.data.channels)
884 self.nrows = len(self.data.channels)
885 self.nplots = len(self.data.channels)
886 self.ylabel = 'Range [km]'
887 self.xlabel = 'Time'
888 self.cb_label = 'dB'
889 self.plots_adjust.update({'hspace':0.8, 'left': 0.1, 'bottom': 0.08, 'right':0.95})
890 self.titles = ['{} Channel {}'.format(
891 self.CODE.upper(), x) for x in range(self.nplots)]
892
893 def update_list(self,dataOut):
894
895 self.channelList = dataOut.channelList
896
897
898 def update(self, dataOut):
899 if len(self.channelList) == 0:
900 self.update_list(dataOut)
901 data = {}
902 meta = {}
903
904 n0 = 10*numpy.log10(dataOut.getNoise()/dataOut.normFactor)
905 (nch, nff, nh) = dataOut.data_spc.shape
906 noise = numpy.repeat(n0,nh, axis=0).reshape((nch,nh))
907
908
909 data['noiseless_rti'] = dataOut.getPower() - noise
910 data['noise'] = 10*numpy.log10(dataOut.getNoise()/dataOut.normFactor)
911 return data, meta
912
913 def plot(self):
914
915 self.x = self.data.times
916 self.y = self.data.yrange
917 self.z = self.data['noiseless_rti']
918 self.z = numpy.array(self.z, dtype=float)
919 self.z = numpy.ma.masked_invalid(self.z)
920
921 try:
922 if self.channelList != None:
923 self.titles = ['{} Channel {}'.format(
924 self.CODE.upper(), x) for x in self.channelList]
925 except:
926 if self.channelList.any() != None:
927 self.titles = ['{} Channel {}'.format(
928 self.CODE.upper(), x) for x in self.channelList]
929 if self.decimation is None:
930 x, y, z = self.fill_gaps(self.x, self.y, self.z)
931 else:
932 x, y, z = self.fill_gaps(*self.decimate())
933 dummy_var = self.axes #ExtraΓ±amente esto actualiza el valor axes
934 for n, ax in enumerate(self.axes):
935 self.zmin = self.zmin if self.zmin else numpy.min(self.z)
936 self.zmax = self.zmax if self.zmax else numpy.max(self.z)
937 data = self.data[-1]
938 if ax.firsttime:
939 ax.plt = ax.pcolormesh(x, y, z[n].T,
940 vmin=self.zmin,
941 vmax=self.zmax,
942 cmap=plt.get_cmap(self.colormap)
943 )
944 if self.showprofile:
945 ax.plot_profile = self.pf_axes[n].plot(data['noiseless_rti'][n], self.y)[0]
946
947 if "noise" in self.data:
948 ax.plot_noise = self.pf_axes[n].plot(numpy.repeat(data['noise'][n], len(self.y)), self.y,
949 color="k", linestyle="dashed", lw=1)[0]
950 else:
951 ax.collections.remove(ax.collections[0])
952 ax.plt = ax.pcolormesh(x, y, z[n].T,
953 vmin=self.zmin,
954 vmax=self.zmax,
955 cmap=plt.get_cmap(self.colormap)
956 )
957 if self.showprofile:
958 ax.plot_profile.set_data(data['noiseless_rti'][n], self.y)
959 if "noise" in self.data:
960 ax.plot_noise.set_data(numpy.repeat(
961 data['noise'][n], len(self.y)), self.y)
@@ -1,1577 +1,1580
1 1 """
2 2 Created on Jul 2, 2014
3 3
4 4 @author: roj-idl71
5 5 """
6 6 import os
7 7 import sys
8 8 import glob
9 9 import time
10 10 import numpy
11 11 import fnmatch
12 12 import inspect
13 13 import time
14 14 import datetime
15 15 import zmq
16 16
17 17 from schainpy.model.proc.jroproc_base import Operation, MPDecorator
18 18 from schainpy.model.data.jroheaderIO import PROCFLAG, BasicHeader, SystemHeader, RadarControllerHeader, ProcessingHeader
19 19 from schainpy.model.data.jroheaderIO import get_dtype_index, get_numpy_dtype, get_procflag_dtype, get_dtype_width
20 20 from schainpy.utils import log
21 21 import schainpy.admin
22 22
23 23 LOCALTIME = True
24 24 DT_DIRECTIVES = {
25 25 '%Y': 4,
26 26 '%y': 2,
27 27 '%m': 2,
28 28 '%d': 2,
29 29 '%j': 3,
30 30 '%H': 2,
31 31 '%M': 2,
32 32 '%S': 2,
33 33 '%f': 6
34 34 }
35 35
36 36
37 37 def isNumber(cad):
38 38 """
39 39 Chequea si el conjunto de caracteres que componen un string puede ser convertidos a un numero.
40 40
41 41 Excepciones:
42 42 Si un determinado string no puede ser convertido a numero
43 43 Input:
44 44 str, string al cual se le analiza para determinar si convertible a un numero o no
45 45
46 46 Return:
47 47 True : si el string es uno numerico
48 48 False : no es un string numerico
49 49 """
50 50 try:
51 51 float(cad)
52 52 return True
53 53 except:
54 54 return False
55 55
56 56
57 57 def isFileInEpoch(filename, startUTSeconds, endUTSeconds):
58 58 """
59 59 Esta funcion determina si un archivo de datos se encuentra o no dentro del rango de fecha especificado.
60 60
61 61 Inputs:
62 62 filename : nombre completo del archivo de datos en formato Jicamarca (.r)
63 63
64 64 startUTSeconds : fecha inicial del rango seleccionado. La fecha esta dada en
65 65 segundos contados desde 01/01/1970.
66 66 endUTSeconds : fecha final del rango seleccionado. La fecha esta dada en
67 67 segundos contados desde 01/01/1970.
68 68
69 69 Return:
70 70 Boolean : Retorna True si el archivo de datos contiene datos en el rango de
71 71 fecha especificado, de lo contrario retorna False.
72 72
73 73 Excepciones:
74 74 Si el archivo no existe o no puede ser abierto
75 75 Si la cabecera no puede ser leida.
76 76
77 77 """
78 78 basicHeaderObj = BasicHeader(LOCALTIME)
79 79
80 80 try:
81 81 fp = open(filename, 'rb')
82 82 except IOError:
83 83 print("The file %s can't be opened" % (filename))
84 84 return 0
85 85
86 86 sts = basicHeaderObj.read(fp)
87 87 fp.close()
88 88
89 89 if not(sts):
90 90 print("Skipping the file %s because it has not a valid header" % (filename))
91 91 return 0
92 92
93 93 if not ((startUTSeconds <= basicHeaderObj.utc) and (endUTSeconds > basicHeaderObj.utc)):
94 94 return 0
95 95
96 96 return 1
97 97
98 98
99 99 def isTimeInRange(thisTime, startTime, endTime):
100 100 if endTime >= startTime:
101 101 if (thisTime < startTime) or (thisTime > endTime):
102 102 return 0
103 103 return 1
104 104 else:
105 105 if (thisTime < startTime) and (thisTime > endTime):
106 106 return 0
107 107 return 1
108 108
109 109
110 110 def isFileInTimeRange(filename, startDate, endDate, startTime, endTime):
111 111 """
112 112 Retorna 1 si el archivo de datos se encuentra dentro del rango de horas especificado.
113 113
114 114 Inputs:
115 115 filename : nombre completo del archivo de datos en formato Jicamarca (.r)
116 116
117 117 startDate : fecha inicial del rango seleccionado en formato datetime.date
118 118
119 119 endDate : fecha final del rango seleccionado en formato datetime.date
120 120
121 121 startTime : tiempo inicial del rango seleccionado en formato datetime.time
122 122
123 123 endTime : tiempo final del rango seleccionado en formato datetime.time
124 124
125 125 Return:
126 126 Boolean : Retorna True si el archivo de datos contiene datos en el rango de
127 127 fecha especificado, de lo contrario retorna False.
128 128
129 129 Excepciones:
130 130 Si el archivo no existe o no puede ser abierto
131 131 Si la cabecera no puede ser leida.
132 132
133 133 """
134 134
135 135 try:
136 136 fp = open(filename, 'rb')
137 137 except IOError:
138 138 print("The file %s can't be opened" % (filename))
139 139 return None
140 140
141 141 firstBasicHeaderObj = BasicHeader(LOCALTIME)
142 142 systemHeaderObj = SystemHeader()
143 143 radarControllerHeaderObj = RadarControllerHeader()
144 144 processingHeaderObj = ProcessingHeader()
145 145
146 146 lastBasicHeaderObj = BasicHeader(LOCALTIME)
147 147
148 148 sts = firstBasicHeaderObj.read(fp)
149 149
150 150 if not(sts):
151 151 print("[Reading] Skipping the file %s because it has not a valid header" % (filename))
152 152 return None
153 153
154 154 if not systemHeaderObj.read(fp):
155 155 return None
156 156
157 157 if not radarControllerHeaderObj.read(fp):
158 158 return None
159 159
160 160 if not processingHeaderObj.read(fp):
161 161 return None
162 162
163 163 filesize = os.path.getsize(filename)
164 164
165 165 offset = processingHeaderObj.blockSize + 24 # header size
166 166
167 167 if filesize <= offset:
168 168 print("[Reading] %s: This file has not enough data" % filename)
169 169 return None
170 170
171 171 fp.seek(-offset, 2)
172 172
173 173 sts = lastBasicHeaderObj.read(fp)
174 174
175 175 fp.close()
176 176
177 177 thisDatetime = lastBasicHeaderObj.datatime
178 178 thisTime_last_block = thisDatetime.time()
179 179
180 180 thisDatetime = firstBasicHeaderObj.datatime
181 181 thisDate = thisDatetime.date()
182 182 thisTime_first_block = thisDatetime.time()
183 183
184 184 # General case
185 185 # o>>>>>>>>>>>>>><<<<<<<<<<<<<<o
186 186 #-----------o----------------------------o-----------
187 187 # startTime endTime
188 188
189 189 if endTime >= startTime:
190 190 if (thisTime_last_block < startTime) or (thisTime_first_block > endTime):
191 191 return None
192 192
193 193 return thisDatetime
194 194
195 195 # If endTime < startTime then endTime belongs to the next day
196 196
197 197 #<<<<<<<<<<<o o>>>>>>>>>>>
198 198 #-----------o----------------------------o-----------
199 199 # endTime startTime
200 200
201 201 if (thisDate == startDate) and (thisTime_last_block < startTime):
202 202 return None
203 203
204 204 if (thisDate == endDate) and (thisTime_first_block > endTime):
205 205 return None
206 206
207 207 if (thisTime_last_block < startTime) and (thisTime_first_block > endTime):
208 208 return None
209 209
210 210 return thisDatetime
211 211
212 212
213 213 def isFolderInDateRange(folder, startDate=None, endDate=None):
214 214 """
215 215 Retorna 1 si el archivo de datos se encuentra dentro del rango de horas especificado.
216 216
217 217 Inputs:
218 218 folder : nombre completo del directorio.
219 219 Su formato deberia ser "/path_root/?YYYYDDD"
220 220
221 221 siendo:
222 222 YYYY : Anio (ejemplo 2015)
223 223 DDD : Dia del anio (ejemplo 305)
224 224
225 225 startDate : fecha inicial del rango seleccionado en formato datetime.date
226 226
227 227 endDate : fecha final del rango seleccionado en formato datetime.date
228 228
229 229 Return:
230 230 Boolean : Retorna True si el archivo de datos contiene datos en el rango de
231 231 fecha especificado, de lo contrario retorna False.
232 232 Excepciones:
233 233 Si el directorio no tiene el formato adecuado
234 234 """
235 235
236 236 basename = os.path.basename(folder)
237 237
238 238 if not isRadarFolder(basename):
239 239 print("The folder %s has not the rigth format" % folder)
240 240 return 0
241 241
242 242 if startDate and endDate:
243 243 thisDate = getDateFromRadarFolder(basename)
244 244
245 245 if thisDate < startDate:
246 246 return 0
247 247
248 248 if thisDate > endDate:
249 249 return 0
250 250
251 251 return 1
252 252
253 253
254 254 def isFileInDateRange(filename, startDate=None, endDate=None):
255 255 """
256 256 Retorna 1 si el archivo de datos se encuentra dentro del rango de horas especificado.
257 257
258 258 Inputs:
259 259 filename : nombre completo del archivo de datos en formato Jicamarca (.r)
260 260
261 261 Su formato deberia ser "?YYYYDDDsss"
262 262
263 263 siendo:
264 264 YYYY : Anio (ejemplo 2015)
265 265 DDD : Dia del anio (ejemplo 305)
266 266 sss : set
267 267
268 268 startDate : fecha inicial del rango seleccionado en formato datetime.date
269 269
270 270 endDate : fecha final del rango seleccionado en formato datetime.date
271 271
272 272 Return:
273 273 Boolean : Retorna True si el archivo de datos contiene datos en el rango de
274 274 fecha especificado, de lo contrario retorna False.
275 275 Excepciones:
276 276 Si el archivo no tiene el formato adecuado
277 277 """
278 278
279 279 basename = os.path.basename(filename)
280 280
281 281 if not isRadarFile(basename):
282 282 print("The filename %s has not the rigth format" % filename)
283 283 return 0
284 284
285 285 if startDate and endDate:
286 286 thisDate = getDateFromRadarFile(basename)
287 287
288 288 if thisDate < startDate:
289 289 return 0
290 290
291 291 if thisDate > endDate:
292 292 return 0
293 293
294 294 return 1
295 295
296 296
297 297 def getFileFromSet(path, ext, set):
298 298 validFilelist = []
299 299 fileList = os.listdir(path)
300 300
301 301 # 0 1234 567 89A BCDE
302 302 # H YYYY DDD SSS .ext
303 303
304 304 for thisFile in fileList:
305 305 try:
306 306 year = int(thisFile[1:5])
307 307 doy = int(thisFile[5:8])
308 308 except:
309 309 continue
310 310
311 311 if (os.path.splitext(thisFile)[-1].lower() != ext.lower()):
312 312 continue
313 313
314 314 validFilelist.append(thisFile)
315 315
316 316 myfile = fnmatch.filter(
317 317 validFilelist, '*%4.4d%3.3d%3.3d*' % (year, doy, set))
318 318
319 319 if len(myfile) != 0:
320 320 return myfile[0]
321 321 else:
322 322 filename = '*%4.4d%3.3d%3.3d%s' % (year, doy, set, ext.lower())
323 323 print('the filename %s does not exist' % filename)
324 324 print('...going to the last file: ')
325 325
326 326 if validFilelist:
327 327 validFilelist = sorted(validFilelist, key=str.lower)
328 328 return validFilelist[-1]
329 329
330 330 return None
331 331
332 332
333 333 def getlastFileFromPath(path, ext):
334 334 """
335 335 Depura el fileList dejando solo los que cumplan el formato de "PYYYYDDDSSS.ext"
336 336 al final de la depuracion devuelve el ultimo file de la lista que quedo.
337 337
338 338 Input:
339 339 fileList : lista conteniendo todos los files (sin path) que componen una determinada carpeta
340 340 ext : extension de los files contenidos en una carpeta
341 341
342 342 Return:
343 343 El ultimo file de una determinada carpeta, no se considera el path.
344 344 """
345 345 validFilelist = []
346 346 fileList = os.listdir(path)
347 347
348 348 # 0 1234 567 89A BCDE
349 349 # H YYYY DDD SSS .ext
350 350
351 351 for thisFile in fileList:
352 352
353 353 year = thisFile[1:5]
354 354 if not isNumber(year):
355 355 continue
356 356
357 357 doy = thisFile[5:8]
358 358 if not isNumber(doy):
359 359 continue
360 360
361 361 year = int(year)
362 362 doy = int(doy)
363 363
364 364 if (os.path.splitext(thisFile)[-1].lower() != ext.lower()):
365 365 continue
366 366
367 367 validFilelist.append(thisFile)
368 368
369 369 if validFilelist:
370 370 validFilelist = sorted(validFilelist, key=str.lower)
371 371 return validFilelist[-1]
372 372
373 373 return None
374 374
375 375
376 376 def isRadarFolder(folder):
377 377 try:
378 378 year = int(folder[1:5])
379 379 doy = int(folder[5:8])
380 380 except:
381 381 return 0
382 382
383 383 return 1
384 384
385 385
386 386 def isRadarFile(file):
387 387 try:
388 388 year = int(file[1:5])
389 389 doy = int(file[5:8])
390 390 set = int(file[8:11])
391 391 except:
392 392 return 0
393 393
394 394 return 1
395 395
396 396
397 397 def getDateFromRadarFile(file):
398 398 try:
399 399 year = int(file[1:5])
400 400 doy = int(file[5:8])
401 401 set = int(file[8:11])
402 402 except:
403 403 return None
404 404
405 405 thisDate = datetime.date(year, 1, 1) + datetime.timedelta(doy - 1)
406 406 return thisDate
407 407
408 408
409 409 def getDateFromRadarFolder(folder):
410 410 try:
411 411 year = int(folder[1:5])
412 412 doy = int(folder[5:8])
413 413 except:
414 414 return None
415 415
416 416 thisDate = datetime.date(year, 1, 1) + datetime.timedelta(doy - 1)
417 417 return thisDate
418 418
419 419 def parse_format(s, fmt):
420 420
421 421 for i in range(fmt.count('%')):
422 422 x = fmt.index('%')
423 423 d = DT_DIRECTIVES[fmt[x:x+2]]
424 424 fmt = fmt.replace(fmt[x:x+2], s[x:x+d])
425 425 return fmt
426 426
427 427 class Reader(object):
428 428
429 429 c = 3E8
430 430 isConfig = False
431 431 dtype = None
432 432 pathList = []
433 433 filenameList = []
434 434 datetimeList = []
435 435 filename = None
436 436 ext = None
437 437 flagIsNewFile = 1
438 438 flagDiscontinuousBlock = 0
439 439 flagIsNewBlock = 0
440 440 flagNoMoreFiles = 0
441 441 fp = None
442 442 firstHeaderSize = 0
443 443 basicHeaderSize = 24
444 444 versionFile = 1103
445 445 fileSize = None
446 446 fileSizeByHeader = None
447 447 fileIndex = -1
448 448 profileIndex = None
449 449 blockIndex = 0
450 450 nTotalBlocks = 0
451 451 maxTimeStep = 30
452 452 lastUTTime = None
453 453 datablock = None
454 454 dataOut = None
455 455 getByBlock = False
456 456 path = None
457 457 startDate = None
458 458 endDate = None
459 459 startTime = datetime.time(0, 0, 0)
460 460 endTime = datetime.time(23, 59, 59)
461 461 set = None
462 462 expLabel = ""
463 463 online = False
464 464 delay = 60
465 465 nTries = 3 # quantity tries
466 466 nFiles = 3 # number of files for searching
467 467 walk = True
468 468 getblock = False
469 469 nTxs = 1
470 470 realtime = False
471 471 blocksize = 0
472 472 blocktime = None
473 473 warnings = True
474 474 verbose = True
475 475 server = None
476 476 format = None
477 477 oneDDict = None
478 478 twoDDict = None
479 479 independentParam = None
480 480 filefmt = None
481 481 folderfmt = None
482 482 open_file = open
483 483 open_mode = 'rb'
484 484
485 485 def run(self):
486 486
487 487 raise NotImplementedError
488 488
489 489 def getAllowedArgs(self):
490 490 if hasattr(self, '__attrs__'):
491 491 return self.__attrs__
492 492 else:
493 493 return inspect.getargspec(self.run).args
494 494
495 495 def set_kwargs(self, **kwargs):
496 496
497 497 for key, value in kwargs.items():
498 498 setattr(self, key, value)
499 499
500 500 def find_folders(self, path, startDate, endDate, folderfmt, last=False):
501 501
502 502 folders = [x for f in path.split(',')
503 503 for x in os.listdir(f) if os.path.isdir(os.path.join(f, x))]
504 504 folders.sort()
505 505
506 506 if last:
507 507 folders = [folders[-1]]
508 508
509 509 for folder in folders:
510 510 try:
511 511 dt = datetime.datetime.strptime(parse_format(folder, folderfmt), folderfmt).date()
512 512 if dt >= startDate and dt <= endDate:
513 513 yield os.path.join(path, folder)
514 514 else:
515 515 log.log('Skiping folder {}'.format(folder), self.name)
516 516 except Exception as e:
517 517 log.log('Skiping folder {}'.format(folder), self.name)
518 518 continue
519 519 return
520 520
521 521 def find_files(self, folders, ext, filefmt, startDate=None, endDate=None,
522 522 expLabel='', last=False):
523 523
524 524 for path in folders:
525 525 files = glob.glob1(path, '*{}'.format(ext))
526 526 files.sort()
527 527 if last:
528 528 if files:
529 529 fo = files[-1]
530 530 try:
531 531 dt = datetime.datetime.strptime(parse_format(fo, filefmt), filefmt).date()
532 532 yield os.path.join(path, expLabel, fo)
533 533 except Exception as e:
534 534 pass
535 535 return
536 536 else:
537 537 return
538 538
539 539 for fo in files:
540 540 try:
541 541 dt = datetime.datetime.strptime(parse_format(fo, filefmt), filefmt).date()
542 542 if dt >= startDate and dt <= endDate:
543 543 yield os.path.join(path, expLabel, fo)
544 544 else:
545 545 log.log('Skiping file {}'.format(fo), self.name)
546 546 except Exception as e:
547 547 log.log('Skiping file {}'.format(fo), self.name)
548 548 continue
549 549
550 550 def searchFilesOffLine(self, path, startDate, endDate,
551 551 expLabel, ext, walk,
552 552 filefmt, folderfmt):
553 553 """Search files in offline mode for the given arguments
554 554
555 555 Return:
556 556 Generator of files
557 557 """
558 558
559 559 if walk:
560 560 folders = self.find_folders(
561 561 path, startDate, endDate, folderfmt)
562 562 else:
563 563 folders = path.split(',')
564 564
565 565 return self.find_files(
566 566 folders, ext, filefmt, startDate, endDate, expLabel)
567 567
568 568 def searchFilesOnLine(self, path, startDate, endDate,
569 569 expLabel, ext, walk,
570 570 filefmt, folderfmt):
571 571 """Search for the last file of the last folder
572 572
573 573 Arguments:
574 574 path : carpeta donde estan contenidos los files que contiene data
575 575 expLabel : Nombre del subexperimento (subfolder)
576 576 ext : extension de los files
577 577 walk : Si es habilitado no realiza busquedas dentro de los ubdirectorios (doypath)
578 578
579 579 Return:
580 580 generator with the full path of last filename
581 581 """
582 582
583 583 if walk:
584 584 folders = self.find_folders(
585 585 path, startDate, endDate, folderfmt, last=True)
586 586 else:
587 587 folders = path.split(',')
588 588
589 589 return self.find_files(
590 590 folders, ext, filefmt, startDate, endDate, expLabel, last=True)
591 591
592 592 def setNextFile(self):
593 593 """Set the next file to be readed open it and parse de file header"""
594 594
595 595 while True:
596 596 if self.fp != None:
597 597 self.fp.close()
598 598
599 599 if self.online:
600 600 newFile = self.setNextFileOnline()
601 601 else:
602 602 newFile = self.setNextFileOffline()
603 603
604 604 if not(newFile):
605 605 if self.online:
606 606 raise schainpy.admin.SchainError('Time to wait for new files reach')
607 607 else:
608 608 if self.fileIndex == -1:
609 609 raise schainpy.admin.SchainWarning('No files found in the given path')
610 610 else:
611 611 raise schainpy.admin.SchainWarning('No more files to read')
612 612
613 613 if self.verifyFile(self.filename):
614 614 break
615 615
616 616 log.log('Opening file: %s' % self.filename, self.name)
617 617
618 618 self.readFirstHeader()
619 619 self.nReadBlocks = 0
620 620
621 621 def setNextFileOnline(self):
622 622 """Check for the next file to be readed in online mode.
623 623
624 624 Set:
625 625 self.filename
626 626 self.fp
627 627 self.filesize
628 628
629 629 Return:
630 630 boolean
631 631
632 632 """
633 633 nextFile = True
634 634 nextDay = False
635 635
636 636 for nFiles in range(self.nFiles+1):
637 637 for nTries in range(self.nTries):
638 638 fullfilename, filename = self.checkForRealPath(nextFile, nextDay)
639 639 if fullfilename is not None:
640 640 break
641 641 log.warning(
642 642 "Waiting %0.2f sec for the next file: \"%s\" , try %02d ..." % (self.delay, filename, nTries + 1),
643 643 self.name)
644 644 time.sleep(self.delay)
645 645 nextFile = False
646 646 continue
647 647
648 648 if fullfilename is not None:
649 649 break
650 650
651 651 self.nTries = 1
652 652 nextFile = True
653 653
654 654 if nFiles == (self.nFiles - 1):
655 655 log.log('Trying with next day...', self.name)
656 656 nextDay = True
657 657 self.nTries = 3
658 658
659 659 if fullfilename:
660 660 self.fileSize = os.path.getsize(fullfilename)
661 661 self.filename = fullfilename
662 662 self.flagIsNewFile = 1
663 663 if self.fp != None:
664 664 self.fp.close()
665 665 self.fp = self.open_file(fullfilename, self.open_mode)
666 666 self.flagNoMoreFiles = 0
667 667 self.fileIndex += 1
668 668 return 1
669 669 else:
670 670 return 0
671 671
672 672 def setNextFileOffline(self):
673 673 """Open the next file to be readed in offline mode"""
674 674
675 675 try:
676 676 filename = next(self.filenameList)
677 677 self.fileIndex +=1
678 678 except StopIteration:
679 679 self.flagNoMoreFiles = 1
680 680 return 0
681 681
682 682 self.filename = filename
683 683 self.fileSize = os.path.getsize(filename)
684 self.fp = self.open_file(filename, self.open_mode)
684 try:
685 self.fp = self.open_file(filename, self.open_mode)
686 except Exception as e:
687 raise schainpy.admin.SchainError("[Reading] Error in {} file, unable to open".format(filename))
685 688 self.flagIsNewFile = 1
686 689
687 690 return 1
688 691
689 692 @staticmethod
690 693 def isDateTimeInRange(dt, startDate, endDate, startTime, endTime):
691 694 """Check if the given datetime is in range"""
692 695 startDateTime= datetime.datetime.combine(startDate,startTime)
693 696 endDateTime = datetime.datetime.combine(endDate,endTime)
694
697
695 698 if startDateTime <= dt <= endDateTime:
696 699 return True
697 700 return False
698 701
699 702 def verifyFile(self, filename):
700 703 """Check for a valid file
701 704
702 705 Arguments:
703 706 filename -- full path filename
704 707
705 708 Return:
706 709 boolean
707 710 """
708 711
709 712 return True
710 713
711 714 def checkForRealPath(self, nextFile, nextDay):
712 715 """Check if the next file to be readed exists"""
713 716
714 717 raise NotImplementedError
715 718
716 719 def readFirstHeader(self):
717 720 """Parse the file header"""
718 721
719 722 pass
720 723
721 724 def waitDataBlock(self, pointer_location, blocksize=None):
722 725 """
723 726 """
724 727
725 728 currentPointer = pointer_location
726 729 if blocksize is None:
727 730 neededSize = self.processingHeaderObj.blockSize # + self.basicHeaderSize
728 731 else:
729 732 neededSize = blocksize
730 733
731 734 for nTries in range(self.nTries):
732 735 self.fp.close()
733 736 self.fp = open(self.filename, 'rb')
734 737 self.fp.seek(currentPointer)
735 738
736 739 self.fileSize = os.path.getsize(self.filename)
737 740 currentSize = self.fileSize - currentPointer
738 741
739 742 if (currentSize >= neededSize):
740 743 return 1
741 744
742 745 log.warning(
743 746 "Waiting %0.2f seconds for the next block, try %03d ..." % (self.delay, nTries + 1),
744 747 self.name
745 748 )
746 749 time.sleep(self.delay)
747 750
748 751 return 0
749 752
750 753 class JRODataReader(Reader):
751 754
752 755 utc = 0
753 756 nReadBlocks = 0
754 757 foldercounter = 0
755 758 firstHeaderSize = 0
756 759 basicHeaderSize = 24
757 760 __isFirstTimeOnline = 1
758 761 filefmt = "*%Y%j***"
759 762 folderfmt = "*%Y%j"
760 763 __attrs__ = ['path', 'startDate', 'endDate', 'startTime', 'endTime', 'online', 'delay', 'walk']
761 764
762 765 def getDtypeWidth(self):
763 766
764 767 dtype_index = get_dtype_index(self.dtype)
765 768 dtype_width = get_dtype_width(dtype_index)
766 769
767 770 return dtype_width
768 771
769 772 def checkForRealPath(self, nextFile, nextDay):
770 773 """Check if the next file to be readed exists.
771 774
772 775 Example :
773 776 nombre correcto del file es .../.../D2009307/P2009307367.ext
774 777
775 778 Entonces la funcion prueba con las siguientes combinaciones
776 779 .../.../y2009307367.ext
777 780 .../.../Y2009307367.ext
778 781 .../.../x2009307/y2009307367.ext
779 782 .../.../x2009307/Y2009307367.ext
780 783 .../.../X2009307/y2009307367.ext
781 784 .../.../X2009307/Y2009307367.ext
782 785 siendo para este caso, la ultima combinacion de letras, identica al file buscado
783 786
784 787 Return:
785 788 str -- fullpath of the file
786 789 """
787 790
788 791
789 792 if nextFile:
790 793 self.set += 1
791 794 if nextDay:
792 795 self.set = 0
793 796 self.doy += 1
794 797 foldercounter = 0
795 798 prefixDirList = [None, 'd', 'D']
796 799 if self.ext.lower() == ".r": # voltage
797 800 prefixFileList = ['d', 'D']
798 801 elif self.ext.lower() == ".pdata": # spectra
799 802 prefixFileList = ['p', 'P']
800 803
801 804 # barrido por las combinaciones posibles
802 805 for prefixDir in prefixDirList:
803 806 thispath = self.path
804 807 if prefixDir != None:
805 808 # formo el nombre del directorio xYYYYDDD (x=d o x=D)
806 809 if foldercounter == 0:
807 810 thispath = os.path.join(self.path, "%s%04d%03d" %
808 811 (prefixDir, self.year, self.doy))
809 812 else:
810 813 thispath = os.path.join(self.path, "%s%04d%03d_%02d" % (
811 814 prefixDir, self.year, self.doy, foldercounter))
812 815 for prefixFile in prefixFileList: # barrido por las dos combinaciones posibles de "D"
813 816 # formo el nombre del file xYYYYDDDSSS.ext
814 817 filename = "%s%04d%03d%03d%s" % (prefixFile, self.year, self.doy, self.set, self.ext)
815 818 fullfilename = os.path.join(
816 819 thispath, filename)
817 820
818 821 if os.path.exists(fullfilename):
819 822 return fullfilename, filename
820 823
821 824 return None, filename
822 825
823 826 def __waitNewBlock(self):
824 827 """
825 828 Return 1 si se encontro un nuevo bloque de datos, 0 de otra forma.
826 829
827 830 Si el modo de lectura es OffLine siempre retorn 0
828 831 """
829 832 if not self.online:
830 833 return 0
831 834
832 835 if (self.nReadBlocks >= self.processingHeaderObj.dataBlocksPerFile):
833 836 return 0
834 837
835 838 currentPointer = self.fp.tell()
836 839
837 840 neededSize = self.processingHeaderObj.blockSize + self.basicHeaderSize
838 841
839 842 for nTries in range(self.nTries):
840 843
841 844 self.fp.close()
842 845 self.fp = open(self.filename, 'rb')
843 846 self.fp.seek(currentPointer)
844 847
845 848 self.fileSize = os.path.getsize(self.filename)
846 849 currentSize = self.fileSize - currentPointer
847 850
848 851 if (currentSize >= neededSize):
849 852 self.basicHeaderObj.read(self.fp)
850 853 return 1
851 854
852 855 if self.fileSize == self.fileSizeByHeader:
853 856 # self.flagEoF = True
854 857 return 0
855 858
856 859 print("[Reading] Waiting %0.2f seconds for the next block, try %03d ..." % (self.delay, nTries + 1))
857 860 time.sleep(self.delay)
858 861
859 862 return 0
860 863
861 864 def __setNewBlock(self):
862 865
863 866 if self.fp == None:
864 867 return 0
865 868
866 869 if self.flagIsNewFile:
867 870 self.lastUTTime = self.basicHeaderObj.utc
868 871 return 1
869 872
870 873 if self.realtime:
871 874 self.flagDiscontinuousBlock = 1
872 875 if not(self.setNextFile()):
873 876 return 0
874 877 else:
875 878 return 1
876 879
877 880 currentSize = self.fileSize - self.fp.tell()
878 881 neededSize = self.processingHeaderObj.blockSize + self.basicHeaderSize
879 882
880 883 if (currentSize >= neededSize):
881 884 self.basicHeaderObj.read(self.fp)
882 885 self.lastUTTime = self.basicHeaderObj.utc
883 886 return 1
884 887
885 888 if self.__waitNewBlock():
886 889 self.lastUTTime = self.basicHeaderObj.utc
887 890 return 1
888 891
889 892 if not(self.setNextFile()):
890 893 return 0
891 894
892 895 deltaTime = self.basicHeaderObj.utc - self.lastUTTime
893 896 self.lastUTTime = self.basicHeaderObj.utc
894 897
895 898 self.flagDiscontinuousBlock = 0
896 899
897 900 if deltaTime > self.maxTimeStep:
898 901 self.flagDiscontinuousBlock = 1
899 902
900 903 return 1
901 904
902 905 def readNextBlock(self):
903 906
904 907 while True:
905 908 if not(self.__setNewBlock()):
906 909 continue
907 910
908 911 if not(self.readBlock()):
909 912 return 0
910 913
911 914 self.getBasicHeader()
912 915
913 916 if not self.isDateTimeInRange(self.dataOut.datatime, self.startDate, self.endDate, self.startTime, self.endTime):
914 917 print("[Reading] Block No. %d/%d -> %s [Skipping]" % (self.nReadBlocks,
915 918 self.processingHeaderObj.dataBlocksPerFile,
916 919 self.dataOut.datatime.ctime()))
917 920 continue
918 921
919 922 break
920 923
921 924 if self.verbose:
922 925 print("[Reading] Block No. %d/%d -> %s" % (self.nReadBlocks,
923 926 self.processingHeaderObj.dataBlocksPerFile,
924 927 self.dataOut.datatime.ctime()))
925 928 return 1
926 929
927 930 def readFirstHeader(self):
928 931
929 932 self.basicHeaderObj.read(self.fp)
930 933 self.systemHeaderObj.read(self.fp)
931 934 self.radarControllerHeaderObj.read(self.fp)
932 935 self.processingHeaderObj.read(self.fp)
933 936 self.firstHeaderSize = self.basicHeaderObj.size
934 937
935 938 datatype = int(numpy.log2((self.processingHeaderObj.processFlags &
936 939 PROCFLAG.DATATYPE_MASK)) - numpy.log2(PROCFLAG.DATATYPE_CHAR))
937 940 if datatype == 0:
938 941 datatype_str = numpy.dtype([('real', '<i1'), ('imag', '<i1')])
939 942 elif datatype == 1:
940 943 datatype_str = numpy.dtype([('real', '<i2'), ('imag', '<i2')])
941 944 elif datatype == 2:
942 945 datatype_str = numpy.dtype([('real', '<i4'), ('imag', '<i4')])
943 946 elif datatype == 3:
944 947 datatype_str = numpy.dtype([('real', '<i8'), ('imag', '<i8')])
945 948 elif datatype == 4:
946 949 datatype_str = numpy.dtype([('real', '<f4'), ('imag', '<f4')])
947 950 elif datatype == 5:
948 951 datatype_str = numpy.dtype([('real', '<f8'), ('imag', '<f8')])
949 952 else:
950 953 raise ValueError('Data type was not defined')
951 954
952 955 self.dtype = datatype_str
953 956 #self.ippSeconds = 2 * 1000 * self.radarControllerHeaderObj.ipp / self.c
954 957 self.fileSizeByHeader = self.processingHeaderObj.dataBlocksPerFile * self.processingHeaderObj.blockSize + \
955 958 self.firstHeaderSize + self.basicHeaderSize * \
956 959 (self.processingHeaderObj.dataBlocksPerFile - 1)
957 960 # self.dataOut.channelList = numpy.arange(self.systemHeaderObj.numChannels)
958 961 # self.dataOut.channelIndexList = numpy.arange(self.systemHeaderObj.numChannels)
959 962 self.getBlockDimension()
960 963
961 964 def verifyFile(self, filename):
962 965
963 966 flag = True
964 967
965 968 try:
966 969 fp = open(filename, 'rb')
967 970 except IOError:
968 971 log.error("File {} can't be opened".format(filename), self.name)
969 972 return False
970 973
971 974 if self.online and self.waitDataBlock(0):
972 975 pass
973 976
974 977 basicHeaderObj = BasicHeader(LOCALTIME)
975 978 systemHeaderObj = SystemHeader()
976 979 radarControllerHeaderObj = RadarControllerHeader()
977 980 processingHeaderObj = ProcessingHeader()
978 981
979 982 if not(basicHeaderObj.read(fp)):
980 983 flag = False
981 984 if not(systemHeaderObj.read(fp)):
982 985 flag = False
983 986 if not(radarControllerHeaderObj.read(fp)):
984 987 flag = False
985 988 if not(processingHeaderObj.read(fp)):
986 989 flag = False
987 990 if not self.online:
988 991 dt1 = basicHeaderObj.datatime
989 992 pos = self.fileSize-processingHeaderObj.blockSize-24
990 993 if pos<0:
991 994 flag = False
992 995 log.error('Invalid size for file: {}'.format(self.filename), self.name)
993 996 else:
994 997 fp.seek(pos)
995 998 if not(basicHeaderObj.read(fp)):
996 999 flag = False
997 1000 dt2 = basicHeaderObj.datatime
998 1001 if not self.isDateTimeInRange(dt1, self.startDate, self.endDate, self.startTime, self.endTime) and not \
999 1002 self.isDateTimeInRange(dt2, self.startDate, self.endDate, self.startTime, self.endTime):
1000 1003 flag = False
1001 1004
1002 1005 fp.close()
1003 1006 return flag
1004 1007
1005 1008 def findDatafiles(self, path, startDate=None, endDate=None, expLabel='', ext='.r', walk=True, include_path=False):
1006 1009
1007 1010 path_empty = True
1008 1011
1009 1012 dateList = []
1010 1013 pathList = []
1011 1014
1012 1015 multi_path = path.split(',')
1013 1016
1014 1017 if not walk:
1015 1018
1016 1019 for single_path in multi_path:
1017 1020
1018 1021 if not os.path.isdir(single_path):
1019 1022 continue
1020 1023
1021 1024 fileList = glob.glob1(single_path, "*" + ext)
1022 1025
1023 1026 if not fileList:
1024 1027 continue
1025 1028
1026 1029 path_empty = False
1027 1030
1028 1031 fileList.sort()
1029 1032
1030 1033 for thisFile in fileList:
1031 1034
1032 1035 if not os.path.isfile(os.path.join(single_path, thisFile)):
1033 1036 continue
1034 1037
1035 1038 if not isRadarFile(thisFile):
1036 1039 continue
1037 1040
1038 1041 if not isFileInDateRange(thisFile, startDate, endDate):
1039 1042 continue
1040 1043
1041 1044 thisDate = getDateFromRadarFile(thisFile)
1042 1045
1043 1046 if thisDate in dateList or single_path in pathList:
1044 1047 continue
1045 1048
1046 1049 dateList.append(thisDate)
1047 1050 pathList.append(single_path)
1048 1051
1049 1052 else:
1050 1053 for single_path in multi_path:
1051 1054
1052 1055 if not os.path.isdir(single_path):
1053 1056 continue
1054 1057
1055 1058 dirList = []
1056 1059
1057 1060 for thisPath in os.listdir(single_path):
1058 1061
1059 1062 if not os.path.isdir(os.path.join(single_path, thisPath)):
1060 1063 continue
1061 1064
1062 1065 if not isRadarFolder(thisPath):
1063 1066 continue
1064 1067
1065 1068 if not isFolderInDateRange(thisPath, startDate, endDate):
1066 1069 continue
1067 1070
1068 1071 dirList.append(thisPath)
1069 1072
1070 1073 if not dirList:
1071 1074 continue
1072 1075
1073 1076 dirList.sort()
1074 1077
1075 1078 for thisDir in dirList:
1076 1079
1077 1080 datapath = os.path.join(single_path, thisDir, expLabel)
1078 1081 fileList = glob.glob1(datapath, "*" + ext)
1079 1082
1080 1083 if not fileList:
1081 1084 continue
1082 1085
1083 1086 path_empty = False
1084 1087
1085 1088 thisDate = getDateFromRadarFolder(thisDir)
1086 1089
1087 1090 pathList.append(datapath)
1088 1091 dateList.append(thisDate)
1089 1092
1090 1093 dateList.sort()
1091 1094
1092 1095 if walk:
1093 1096 pattern_path = os.path.join(multi_path[0], "[dYYYYDDD]", expLabel)
1094 1097 else:
1095 1098 pattern_path = multi_path[0]
1096 1099
1097 1100 if path_empty:
1098 1101 raise schainpy.admin.SchainError("[Reading] No *%s files in %s for %s to %s" % (ext, pattern_path, startDate, endDate))
1099 1102 else:
1100 1103 if not dateList:
1101 1104 raise schainpy.admin.SchainError("[Reading] Date range selected invalid [%s - %s]: No *%s files in %s)" % (startDate, endDate, ext, path))
1102 1105
1103 1106 if include_path:
1104 1107 return dateList, pathList
1105 1108
1106 1109 return dateList
1107 1110
1108 1111 def setup(self, **kwargs):
1109 1112
1110 1113 self.set_kwargs(**kwargs)
1111 1114 if not self.ext.startswith('.'):
1112 1115 self.ext = '.{}'.format(self.ext)
1113 1116
1114 1117 if self.server is not None:
1115 1118 if 'tcp://' in self.server:
1116 1119 address = server
1117 1120 else:
1118 1121 address = 'ipc:///tmp/%s' % self.server
1119 1122 self.server = address
1120 1123 self.context = zmq.Context()
1121 1124 self.receiver = self.context.socket(zmq.PULL)
1122 1125 self.receiver.connect(self.server)
1123 1126 time.sleep(0.5)
1124 1127 print('[Starting] ReceiverData from {}'.format(self.server))
1125 1128 else:
1126 1129 self.server = None
1127 1130 if self.path == None:
1128 1131 raise ValueError("[Reading] The path is not valid")
1129 1132
1130 1133 if self.online:
1131 1134 log.log("[Reading] Searching files in online mode...", self.name)
1132 1135
1133 1136 for nTries in range(self.nTries):
1134 1137 fullpath = self.searchFilesOnLine(self.path, self.startDate,
1135 1138 self.endDate, self.expLabel, self.ext, self.walk,
1136 1139 self.filefmt, self.folderfmt)
1137 1140
1138 1141 try:
1139 1142 fullpath = next(fullpath)
1140 1143 except:
1141 1144 fullpath = None
1142 1145
1143 1146 if fullpath:
1144 1147 break
1145 1148
1146 1149 log.warning(
1147 1150 'Waiting {} sec for a valid file in {}: try {} ...'.format(
1148 1151 self.delay, self.path, nTries + 1),
1149 1152 self.name)
1150 1153 time.sleep(self.delay)
1151 1154
1152 1155 if not(fullpath):
1153 1156 raise schainpy.admin.SchainError(
1154 1157 'There isn\'t any valid file in {}'.format(self.path))
1155 1158
1156 1159 pathname, filename = os.path.split(fullpath)
1157 1160 self.year = int(filename[1:5])
1158 1161 self.doy = int(filename[5:8])
1159 1162 self.set = int(filename[8:11]) - 1
1160 1163 else:
1161 1164 log.log("Searching files in {}".format(self.path), self.name)
1162 1165 self.filenameList = self.searchFilesOffLine(self.path, self.startDate,
1163 1166 self.endDate, self.expLabel, self.ext, self.walk, self.filefmt, self.folderfmt)
1164 1167
1165 1168 self.setNextFile()
1166 1169
1167 1170 return
1168 1171
1169 1172 def getBasicHeader(self):
1170 1173
1171 1174 self.dataOut.utctime = self.basicHeaderObj.utc + self.basicHeaderObj.miliSecond / \
1172 1175 1000. + self.profileIndex * self.radarControllerHeaderObj.ippSeconds
1173 1176
1174 1177 self.dataOut.flagDiscontinuousBlock = self.flagDiscontinuousBlock
1175 1178
1176 1179 self.dataOut.timeZone = self.basicHeaderObj.timeZone
1177 1180
1178 1181 self.dataOut.dstFlag = self.basicHeaderObj.dstFlag
1179 1182
1180 1183 self.dataOut.errorCount = self.basicHeaderObj.errorCount
1181 1184
1182 1185 self.dataOut.useLocalTime = self.basicHeaderObj.useLocalTime
1183 1186
1184 1187 self.dataOut.ippSeconds = self.radarControllerHeaderObj.ippSeconds / self.nTxs
1185 1188
1186 1189 def getFirstHeader(self):
1187 1190
1188 1191 raise NotImplementedError
1189 1192
1190 1193 def getData(self):
1191 1194
1192 1195 raise NotImplementedError
1193 1196
1194 1197 def hasNotDataInBuffer(self):
1195 1198
1196 1199 raise NotImplementedError
1197 1200
1198 1201 def readBlock(self):
1199 1202
1200 1203 raise NotImplementedError
1201 1204
1202 1205 def isEndProcess(self):
1203 1206
1204 1207 return self.flagNoMoreFiles
1205 1208
1206 1209 def printReadBlocks(self):
1207 1210
1208 1211 print("[Reading] Number of read blocks per file %04d" % self.nReadBlocks)
1209 1212
1210 1213 def printTotalBlocks(self):
1211 1214
1212 1215 print("[Reading] Number of read blocks %04d" % self.nTotalBlocks)
1213 1216
1214 1217 def run(self, **kwargs):
1215 1218 """
1216 1219
1217 1220 Arguments:
1218 1221 path :
1219 1222 startDate :
1220 1223 endDate :
1221 1224 startTime :
1222 1225 endTime :
1223 1226 set :
1224 1227 expLabel :
1225 1228 ext :
1226 1229 online :
1227 1230 delay :
1228 1231 walk :
1229 1232 getblock :
1230 1233 nTxs :
1231 1234 realtime :
1232 1235 blocksize :
1233 1236 blocktime :
1234 1237 skip :
1235 1238 cursor :
1236 1239 warnings :
1237 1240 server :
1238 1241 verbose :
1239 1242 format :
1240 1243 oneDDict :
1241 1244 twoDDict :
1242 1245 independentParam :
1243 1246 """
1244 1247
1245 1248 if not(self.isConfig):
1246 1249 self.setup(**kwargs)
1247 1250 self.isConfig = True
1248 1251 if self.server is None:
1249 1252 self.getData()
1250 1253 else:
1251 1254 self.getFromServer()
1252 1255
1253 1256
1254 1257 class JRODataWriter(Reader):
1255 1258
1256 1259 """
1257 1260 Esta clase permite escribir datos a archivos procesados (.r o ,pdata). La escritura
1258 1261 de los datos siempre se realiza por bloques.
1259 1262 """
1260 1263
1261 1264 setFile = None
1262 1265 profilesPerBlock = None
1263 1266 blocksPerFile = None
1264 1267 nWriteBlocks = 0
1265 1268 fileDate = None
1266 1269
1267 1270 def __init__(self, dataOut=None):
1268 1271 raise NotImplementedError
1269 1272
1270 1273 def hasAllDataInBuffer(self):
1271 1274 raise NotImplementedError
1272 1275
1273 1276 def setBlockDimension(self):
1274 1277 raise NotImplementedError
1275 1278
1276 1279 def writeBlock(self):
1277 1280 raise NotImplementedError
1278 1281
1279 1282 def putData(self):
1280 1283 raise NotImplementedError
1281 1284
1282 1285 def getDtypeWidth(self):
1283 1286
1284 1287 dtype_index = get_dtype_index(self.dtype)
1285 1288 dtype_width = get_dtype_width(dtype_index)
1286 1289
1287 1290 return dtype_width
1288 1291
1289 1292 def getProcessFlags(self):
1290 1293
1291 1294 processFlags = 0
1292 1295
1293 1296 dtype_index = get_dtype_index(self.dtype)
1294 1297 procflag_dtype = get_procflag_dtype(dtype_index)
1295 1298
1296 1299 processFlags += procflag_dtype
1297 1300
1298 1301 if self.dataOut.flagDecodeData:
1299 1302 processFlags += PROCFLAG.DECODE_DATA
1300 1303
1301 1304 if self.dataOut.flagDeflipData:
1302 1305 processFlags += PROCFLAG.DEFLIP_DATA
1303 1306
1304 1307 if self.dataOut.code is not None:
1305 1308 processFlags += PROCFLAG.DEFINE_PROCESS_CODE
1306 1309
1307 1310 if self.dataOut.nCohInt > 1:
1308 1311 processFlags += PROCFLAG.COHERENT_INTEGRATION
1309 1312
1310 1313 if self.dataOut.type == "Spectra":
1311 1314 if self.dataOut.nIncohInt > 1:
1312 1315 processFlags += PROCFLAG.INCOHERENT_INTEGRATION
1313 1316
1314 1317 if self.dataOut.data_dc is not None:
1315 1318 processFlags += PROCFLAG.SAVE_CHANNELS_DC
1316 1319
1317 1320 if self.dataOut.flagShiftFFT:
1318 1321 processFlags += PROCFLAG.SHIFT_FFT_DATA
1319 1322
1320 1323 return processFlags
1321 1324
1322 1325 def setBasicHeader(self):
1323 1326
1324 1327 self.basicHeaderObj.size = self.basicHeaderSize # bytes
1325 1328 self.basicHeaderObj.version = self.versionFile
1326 1329 self.basicHeaderObj.dataBlock = self.nTotalBlocks
1327 1330 utc = numpy.floor(self.dataOut.utctime)
1328 1331 milisecond = (self.dataOut.utctime - utc) * 1000.0
1329 1332 self.basicHeaderObj.utc = utc
1330 1333 self.basicHeaderObj.miliSecond = milisecond
1331 1334 self.basicHeaderObj.timeZone = self.dataOut.timeZone
1332 1335 self.basicHeaderObj.dstFlag = self.dataOut.dstFlag
1333 1336 self.basicHeaderObj.errorCount = self.dataOut.errorCount
1334 1337
1335 1338 def setFirstHeader(self):
1336 1339 """
1337 1340 Obtiene una copia del First Header
1338 1341
1339 1342 Affected:
1340 1343
1341 1344 self.basicHeaderObj
1342 1345 self.systemHeaderObj
1343 1346 self.radarControllerHeaderObj
1344 1347 self.processingHeaderObj self.
1345 1348
1346 1349 Return:
1347 1350 None
1348 1351 """
1349 1352
1350 1353 raise NotImplementedError
1351 1354
1352 1355 def __writeFirstHeader(self):
1353 1356 """
1354 1357 Escribe el primer header del file es decir el Basic header y el Long header (SystemHeader, RadarControllerHeader, ProcessingHeader)
1355 1358
1356 1359 Affected:
1357 1360 __dataType
1358 1361
1359 1362 Return:
1360 1363 None
1361 1364 """
1362 1365
1363 1366 # CALCULAR PARAMETROS
1364 1367
1365 1368 sizeLongHeader = self.systemHeaderObj.size + \
1366 1369 self.radarControllerHeaderObj.size + self.processingHeaderObj.size
1367 1370 self.basicHeaderObj.size = self.basicHeaderSize + sizeLongHeader
1368 1371
1369 1372 self.basicHeaderObj.write(self.fp)
1370 1373 self.systemHeaderObj.write(self.fp)
1371 1374 self.radarControllerHeaderObj.write(self.fp)
1372 1375 self.processingHeaderObj.write(self.fp)
1373 1376
1374 1377 def __setNewBlock(self):
1375 1378 """
1376 1379 Si es un nuevo file escribe el First Header caso contrario escribe solo el Basic Header
1377 1380
1378 1381 Return:
1379 1382 0 : si no pudo escribir nada
1380 1383 1 : Si escribio el Basic el First Header
1381 1384 """
1382 1385 if self.fp == None:
1383 1386 self.setNextFile()
1384 1387
1385 1388 if self.flagIsNewFile:
1386 1389 return 1
1387 1390
1388 1391 if self.blockIndex < self.processingHeaderObj.dataBlocksPerFile:
1389 1392 self.basicHeaderObj.write(self.fp)
1390 1393 return 1
1391 1394
1392 1395 if not(self.setNextFile()):
1393 1396 return 0
1394 1397
1395 1398 return 1
1396 1399
1397 1400 def writeNextBlock(self):
1398 1401 """
1399 1402 Selecciona el bloque siguiente de datos y los escribe en un file
1400 1403
1401 1404 Return:
1402 1405 0 : Si no hizo pudo escribir el bloque de datos
1403 1406 1 : Si no pudo escribir el bloque de datos
1404 1407 """
1405 1408 if not(self.__setNewBlock()):
1406 1409 return 0
1407 1410
1408 1411 self.writeBlock()
1409 1412
1410 1413 print("[Writing] Block No. %d/%d" % (self.blockIndex,
1411 1414 self.processingHeaderObj.dataBlocksPerFile))
1412 1415
1413 1416 return 1
1414 1417
1415 1418 def setNextFile(self):
1416 1419 """Determina el siguiente file que sera escrito
1417 1420
1418 1421 Affected:
1419 1422 self.filename
1420 1423 self.subfolder
1421 1424 self.fp
1422 1425 self.setFile
1423 1426 self.flagIsNewFile
1424 1427
1425 1428 Return:
1426 1429 0 : Si el archivo no puede ser escrito
1427 1430 1 : Si el archivo esta listo para ser escrito
1428 1431 """
1429 1432 ext = self.ext
1430 1433 path = self.path
1431 1434
1432 1435 if self.fp != None:
1433 1436 self.fp.close()
1434 1437
1435 1438
1436 1439 if not os.path.exists(path):
1437 1440 os.mkdir(path)
1438 1441
1439 1442 timeTuple = time.localtime(self.dataOut.utctime)
1440 1443 subfolder = 'd%4.4d%3.3d' % (timeTuple.tm_year, timeTuple.tm_yday)
1441 1444
1442 1445 fullpath = os.path.join(path, subfolder)
1443 1446 setFile = self.setFile
1444 1447
1445 1448 if not(os.path.exists(fullpath)):
1446 1449 os.mkdir(fullpath)
1447 1450 setFile = -1 # inicializo mi contador de seteo
1448 1451 else:
1449 1452 filesList = os.listdir(fullpath)
1450 1453 if len(filesList) > 0:
1451 1454 filesList = sorted(filesList, key=str.lower)
1452 1455 filen = filesList[-1]
1453 1456 # el filename debera tener el siguiente formato
1454 1457 # 0 1234 567 89A BCDE (hex)
1455 1458 # x YYYY DDD SSS .ext
1456 1459 if isNumber(filen[8:11]):
1457 1460 # inicializo mi contador de seteo al seteo del ultimo file
1458 1461 setFile = int(filen[8:11])
1459 1462 else:
1460 1463 setFile = -1
1461 1464 else:
1462 1465 setFile = -1 # inicializo mi contador de seteo
1463 1466
1464 1467 setFile += 1
1465 1468
1466 1469 # If this is a new day it resets some values
1467 1470 if self.dataOut.datatime.date() > self.fileDate:
1468 1471 setFile = 0
1469 1472 self.nTotalBlocks = 0
1470 1473
1471 1474 filen = '{}{:04d}{:03d}{:03d}{}'.format(
1472 1475 self.optchar, timeTuple.tm_year, timeTuple.tm_yday, setFile, ext)
1473 1476
1474 1477 filename = os.path.join(path, subfolder, filen)
1475 1478
1476 1479 fp = open(filename, 'wb')
1477 1480
1478 1481 self.blockIndex = 0
1479 1482 self.filename = filename
1480 1483 self.subfolder = subfolder
1481 1484 self.fp = fp
1482 1485 self.setFile = setFile
1483 1486 self.flagIsNewFile = 1
1484 1487 self.fileDate = self.dataOut.datatime.date()
1485 1488 self.setFirstHeader()
1486 1489
1487 1490 print('[Writing] Opening file: %s' % self.filename)
1488 1491
1489 1492 self.__writeFirstHeader()
1490 1493
1491 1494 return 1
1492 1495
1493 1496 def setup(self, dataOut, path, blocksPerFile, profilesPerBlock=64, set=None, ext=None, datatype=4):
1494 1497 """
1495 1498 Setea el tipo de formato en la cual sera guardada la data y escribe el First Header
1496 1499
1497 1500 Inputs:
1498 1501 path : directory where data will be saved
1499 1502 profilesPerBlock : number of profiles per block
1500 1503 set : initial file set
1501 1504 datatype : An integer number that defines data type:
1502 1505 0 : int8 (1 byte)
1503 1506 1 : int16 (2 bytes)
1504 1507 2 : int32 (4 bytes)
1505 1508 3 : int64 (8 bytes)
1506 1509 4 : float32 (4 bytes)
1507 1510 5 : double64 (8 bytes)
1508 1511
1509 1512 Return:
1510 1513 0 : Si no realizo un buen seteo
1511 1514 1 : Si realizo un buen seteo
1512 1515 """
1513 1516
1514 1517 if ext == None:
1515 1518 ext = self.ext
1516 1519
1517 1520 self.ext = ext.lower()
1518 1521
1519 1522 self.path = path
1520 1523
1521 1524 if set is None:
1522 1525 self.setFile = -1
1523 1526 else:
1524 1527 self.setFile = set - 1
1525 1528
1526 1529 self.blocksPerFile = blocksPerFile
1527 1530 self.profilesPerBlock = profilesPerBlock
1528 1531 self.dataOut = dataOut
1529 1532 self.fileDate = self.dataOut.datatime.date()
1530 1533 self.dtype = self.dataOut.dtype
1531 1534
1532 1535 if datatype is not None:
1533 1536 self.dtype = get_numpy_dtype(datatype)
1534 1537
1535 1538 if not(self.setNextFile()):
1536 1539 print("[Writing] There isn't a next file")
1537 1540 return 0
1538 1541
1539 1542 self.setBlockDimension()
1540 1543
1541 1544 return 1
1542 1545
1543 1546 def run(self, dataOut, path, blocksPerFile=100, profilesPerBlock=64, set=None, ext=None, datatype=4, **kwargs):
1544 1547
1545 1548 if not(self.isConfig):
1546 1549
1547 1550 self.setup(dataOut, path, blocksPerFile, profilesPerBlock=profilesPerBlock,
1548 1551 set=set, ext=ext, datatype=datatype, **kwargs)
1549 1552 self.isConfig = True
1550 1553
1551 1554 self.dataOut = dataOut
1552 1555 self.putData()
1553 1556 return self.dataOut
1554 1557
1555 1558 @MPDecorator
1556 1559 class printInfo(Operation):
1557 1560
1558 1561 def __init__(self):
1559 1562
1560 1563 Operation.__init__(self)
1561 1564 self.__printInfo = True
1562 1565
1563 1566 def run(self, dataOut, headers = ['systemHeaderObj', 'radarControllerHeaderObj', 'processingHeaderObj']):
1564 1567 if self.__printInfo == False:
1565 1568 return
1566 1569
1567 1570 for header in headers:
1568 1571 if hasattr(dataOut, header):
1569 1572 obj = getattr(dataOut, header)
1570 1573 if hasattr(obj, 'printInfo'):
1571 1574 obj.printInfo()
1572 1575 else:
1573 1576 print(obj)
1574 1577 else:
1575 1578 log.warning('Header {} Not found in object'.format(header))
1576 1579
1577 1580 self.__printInfo = False
@@ -1,665 +1,665
1 1 ''''
2 2 Created on Set 9, 2015
3 3
4 4 @author: roj-idl71 Karim Kuyeng
5 5
6 6 @update: 2021, Joab Apaza
7 7 '''
8 8
9 9 import os
10 10 import sys
11 11 import glob
12 12 import fnmatch
13 13 import datetime
14 14 import time
15 15 import re
16 16 import h5py
17 17 import numpy
18 18
19 19 try:
20 20 from gevent import sleep
21 21 except:
22 22 from time import sleep
23 23
24 24 from schainpy.model.data.jroheaderIO import RadarControllerHeader, SystemHeader
25 25 from schainpy.model.data.jrodata import Voltage
26 26 from schainpy.model.proc.jroproc_base import ProcessingUnit, Operation, MPDecorator
27 27 from numpy import imag
28 28 from schainpy.utils import log
29 29
30 30
31 31 class AMISRReader(ProcessingUnit):
32 32 '''
33 33 classdocs
34 34 '''
35 35
36 36 def __init__(self):
37 37 '''
38 38 Constructor
39 39 '''
40 40
41 41 ProcessingUnit.__init__(self)
42 42
43 43 self.set = None
44 44 self.subset = None
45 45 self.extension_file = '.h5'
46 46 self.dtc_str = 'dtc'
47 47 self.dtc_id = 0
48 48 self.status = True
49 49 self.isConfig = False
50 50 self.dirnameList = []
51 51 self.filenameList = []
52 52 self.fileIndex = None
53 53 self.flagNoMoreFiles = False
54 54 self.flagIsNewFile = 0
55 55 self.filename = ''
56 56 self.amisrFilePointer = None
57 57 self.realBeamCode = []
58 58 self.beamCodeMap = None
59 59 self.azimuthList = []
60 60 self.elevationList = []
61 61 self.dataShape = None
62 62 self.flag_old_beams = False
63 63
64 64
65 65 self.profileIndex = 0
66 66
67 67
68 68 self.beamCodeByFrame = None
69 69 self.radacTimeByFrame = None
70 70
71 71 self.dataset = None
72 72
73 73 self.__firstFile = True
74 74
75 75 self.buffer = None
76 76
77 77 self.timezone = 'ut'
78 78
79 79 self.__waitForNewFile = 20
80 80 self.__filename_online = None
81 81 #Is really necessary create the output object in the initializer
82 82 self.dataOut = Voltage()
83 83 self.dataOut.error=False
84
84 self.margin_days = 1
85 85
86 86 def setup(self,path=None,
87 87 startDate=None,
88 88 endDate=None,
89 89 startTime=None,
90 90 endTime=None,
91 91 walk=True,
92 92 timezone='ut',
93 93 all=0,
94 94 code = None,
95 95 nCode = 0,
96 96 nBaud = 0,
97 97 online=False,
98 old_beams=False):
98 old_beams=False,
99 margin_days=1):
99 100
100 101
101 102
102 103 self.timezone = timezone
103 104 self.all = all
104 105 self.online = online
105 106 self.flag_old_beams = old_beams
106 107 self.code = code
107 108 self.nCode = int(nCode)
108 109 self.nBaud = int(nBaud)
109
110 self.margin_days = margin_days
110 111
111 112
112 113 #self.findFiles()
113 114 if not(online):
114 115 #Busqueda de archivos offline
115 116 self.searchFilesOffLine(path, startDate, endDate, startTime, endTime, walk)
116 117 else:
117 118 self.searchFilesOnLine(path, startDate, endDate, startTime,endTime,walk)
118 119
119 120 if not(self.filenameList):
120 121 raise schainpy.admin.SchainWarning("There is no files into the folder: %s"%(path))
121 122 sys.exit()
122 123
123 124 self.fileIndex = 0
124 125
125 126 self.readNextFile(online)
126 127
127 128 '''
128 129 Add code
129 130 '''
130 131 self.isConfig = True
131 132 # print("Setup Done")
132 133 pass
133 134
134 135
135 136 def readAMISRHeader(self,fp):
136 137
137 138 if self.isConfig and (not self.flagNoMoreFiles):
138 139 newShape = fp.get('Raw11/Data/Samples/Data').shape[1:]
139 140 if self.dataShape != newShape and newShape != None:
140 141 raise schainpy.admin.SchainError("NEW FILE HAS A DIFFERENT SHAPE: ")
141 142 print(self.dataShape,newShape,"\n")
142 143 return 0
143 144 else:
144 145 self.dataShape = fp.get('Raw11/Data/Samples/Data').shape[1:]
145 146
146 147
147 148 header = 'Raw11/Data/RadacHeader'
148 149 self.beamCodeByPulse = fp.get(header+'/BeamCode') # LIST OF BEAMS PER PROFILE, TO BE USED ON REARRANGE
149 150 if (self.startDate> datetime.date(2021, 7, 15)) or self.flag_old_beams: #Se cambiΓ³ la forma de extracciΓ³n de Apuntes el 17 o forzar con flag de reorganizaciΓ³n
150 151 self.beamcodeFile = fp['Setup/Beamcodefile'][()].decode()
151 152 self.trueBeams = self.beamcodeFile.split("\n")
152 153 self.trueBeams.pop()#remove last
153 154 [self.realBeamCode.append(x) for x in self.trueBeams if x not in self.realBeamCode]
154 155 self.beamCode = [int(x, 16) for x in self.realBeamCode]
155 156 else:
156 157 _beamCode= fp.get('Raw11/Data/Beamcodes') #se usa la manera previa al cambio de apuntes
157 158 self.beamCode = _beamCode[0,:]
158 159
159 160 if self.beamCodeMap == None:
160 161 self.beamCodeMap = fp['Setup/BeamcodeMap']
161 162 for beam in self.beamCode:
162 163 beamAziElev = numpy.where(self.beamCodeMap[:,0]==beam)
163 164 beamAziElev = beamAziElev[0].squeeze()
164 165 self.azimuthList.append(self.beamCodeMap[beamAziElev,1])
165 166 self.elevationList.append(self.beamCodeMap[beamAziElev,2])
166 167 #print("Beamssss: ",self.beamCodeMap[beamAziElev,1],self.beamCodeMap[beamAziElev,2])
167 168 #print(self.beamCode)
168 169 #self.code = fp.get(header+'/Code') # NOT USE FOR THIS
169 170 self.frameCount = fp.get(header+'/FrameCount')# NOT USE FOR THIS
170 171 self.modeGroup = fp.get(header+'/ModeGroup')# NOT USE FOR THIS
171 172 self.nsamplesPulse = fp.get(header+'/NSamplesPulse')# TO GET NSA OR USING DATA FOR THAT
172 173 self.pulseCount = fp.get(header+'/PulseCount')# NOT USE FOR THIS
173 174 self.radacTime = fp.get(header+'/RadacTime')# 1st TIME ON FILE ANDE CALCULATE THE REST WITH IPP*nindexprofile
174 175 self.timeCount = fp.get(header+'/TimeCount')# NOT USE FOR THIS
175 176 self.timeStatus = fp.get(header+'/TimeStatus')# NOT USE FOR THIS
176 177 self.rangeFromFile = fp.get('Raw11/Data/Samples/Range')
177 178 self.frequency = fp.get('Rx/Frequency')
178 179 txAus = fp.get('Raw11/Data/Pulsewidth')
179 180
180 181
181 182 self.nblocks = self.pulseCount.shape[0] #nblocks
182 183
183 184 self.nprofiles = self.pulseCount.shape[1] #nprofile
184 185 self.nsa = self.nsamplesPulse[0,0] #ngates
185 186 self.nchannels = len(self.beamCode)
186 187 self.ippSeconds = (self.radacTime[0][1] -self.radacTime[0][0]) #Ipp in seconds
187 188 #self.__waitForNewFile = self.nblocks # wait depending on the number of blocks since each block is 1 sec
188 189 self.__waitForNewFile = self.nblocks * self.nprofiles * self.ippSeconds # wait until new file is created
189 190
190 191 #filling radar controller header parameters
191 192 self.__ippKm = self.ippSeconds *.15*1e6 # in km
192 193 self.__txA = (txAus[()])*.15 #(ipp[us]*.15km/1us) in km
193 194 self.__txB = 0
194 195 nWindows=1
195 196 self.__nSamples = self.nsa
196 197 self.__firstHeight = self.rangeFromFile[0][0]/1000 #in km
197 198 self.__deltaHeight = (self.rangeFromFile[0][1] - self.rangeFromFile[0][0])/1000
198
199 #print("amisr-ipp:",self.ippSeconds, self.__ippKm)
199 200 #for now until understand why the code saved is different (code included even though code not in tuf file)
200 201 #self.__codeType = 0
201 202 # self.__nCode = None
202 203 # self.__nBaud = None
203 204 self.__code = self.code
204 205 self.__codeType = 0
205 206 if self.code != None:
206 207 self.__codeType = 1
207 208 self.__nCode = self.nCode
208 209 self.__nBaud = self.nBaud
209 210 #self.__code = 0
210 211
211 212 #filling system header parameters
212 213 self.__nSamples = self.nsa
213 214 self.newProfiles = self.nprofiles/self.nchannels
214 215 self.__channelList = list(range(self.nchannels))
215 216
216 217 self.__frequency = self.frequency[0][0]
217 218
218 219
219 220 return 1
220 221
221 222
222 223 def createBuffers(self):
223 224
224 225 pass
225 226
226 227 def __setParameters(self,path='', startDate='',endDate='',startTime='', endTime='', walk=''):
227 228 self.path = path
228 229 self.startDate = startDate
229 230 self.endDate = endDate
230 231 self.startTime = startTime
231 232 self.endTime = endTime
232 233 self.walk = walk
233 234
234 235 def __checkPath(self):
235 236 if os.path.exists(self.path):
236 237 self.status = 1
237 238 else:
238 239 self.status = 0
239 240 print('Path:%s does not exists'%self.path)
240 241
241 242 return
242 243
243 244
244 245 def __selDates(self, amisr_dirname_format):
245 246 try:
246 247 year = int(amisr_dirname_format[0:4])
247 248 month = int(amisr_dirname_format[4:6])
248 249 dom = int(amisr_dirname_format[6:8])
249 250 thisDate = datetime.date(year,month,dom)
250 251 #margen de un dΓ­a extra, igual luego se filtra for fecha y hora
251 if (thisDate>=(self.startDate - datetime.timedelta(days=1)) and thisDate <= (self.endDate)+ datetime.timedelta(days=1)):
252 if (thisDate>=(self.startDate - datetime.timedelta(days=self.margin_days)) and thisDate <= (self.endDate)+ datetime.timedelta(days=1)):
252 253 return amisr_dirname_format
253 254 except:
254 255 return None
255 256
256 257
257 258 def __findDataForDates(self,online=False):
258 259
259 260 if not(self.status):
260 261 return None
261 262
262 263 pat = '\d+.\d+'
263 264 dirnameList = [re.search(pat,x) for x in os.listdir(self.path)]
264 265 dirnameList = [x for x in dirnameList if x!=None]
265 266 dirnameList = [x.string for x in dirnameList]
266 267 if not(online):
267 268 dirnameList = [self.__selDates(x) for x in dirnameList]
268 269 dirnameList = [x for x in dirnameList if x!=None]
269 270 if len(dirnameList)>0:
270 271 self.status = 1
271 272 self.dirnameList = dirnameList
272 273 self.dirnameList.sort()
273 274 else:
274 275 self.status = 0
275 276 return None
276 277
277 278 def __getTimeFromData(self):
278 279 startDateTime_Reader = datetime.datetime.combine(self.startDate,self.startTime)
279 280 endDateTime_Reader = datetime.datetime.combine(self.endDate,self.endTime)
280 281
281 282 print('Filtering Files from %s to %s'%(startDateTime_Reader, endDateTime_Reader))
282 283 print('........................................')
283 284 filter_filenameList = []
284 285 self.filenameList.sort()
285 286 total_files = len(self.filenameList)
286 287 #for i in range(len(self.filenameList)-1):
287 288 for i in range(total_files):
288 289 filename = self.filenameList[i]
289 290 #print("file-> ",filename)
290 291 try:
291 292 fp = h5py.File(filename,'r')
292 293 time_str = fp.get('Time/RadacTimeString')
293 294
294 295 startDateTimeStr_File = time_str[0][0].decode('UTF-8').split('.')[0]
295 296 #startDateTimeStr_File = "2019-12-16 09:21:11"
296 297 junk = time.strptime(startDateTimeStr_File, '%Y-%m-%d %H:%M:%S')
297 298 startDateTime_File = datetime.datetime(junk.tm_year,junk.tm_mon,junk.tm_mday,junk.tm_hour, junk.tm_min, junk.tm_sec)
298 299
299 300 #endDateTimeStr_File = "2019-12-16 11:10:11"
300 301 endDateTimeStr_File = time_str[-1][-1].decode('UTF-8').split('.')[0]
301 302 junk = time.strptime(endDateTimeStr_File, '%Y-%m-%d %H:%M:%S')
302 303 endDateTime_File = datetime.datetime(junk.tm_year,junk.tm_mon,junk.tm_mday,junk.tm_hour, junk.tm_min, junk.tm_sec)
303 304
304 305 fp.close()
305 306
306 307 #print("check time", startDateTime_File)
307 308 if self.timezone == 'lt':
308 309 startDateTime_File = startDateTime_File - datetime.timedelta(minutes = 300)
309 310 endDateTime_File = endDateTime_File - datetime.timedelta(minutes = 300)
310 311 if (startDateTime_File >=startDateTime_Reader and endDateTime_File<=endDateTime_Reader):
311 312 filter_filenameList.append(filename)
312 313
313 314 if (startDateTime_File>endDateTime_Reader):
314 315 break
315 316 except Exception as e:
316 317 log.warning("Error opening file {} -> {}".format(os.path.split(filename)[1],e))
317 318
318 319 filter_filenameList.sort()
319 320 self.filenameList = filter_filenameList
320 321
321 322 return 1
322 323
323 324 def __filterByGlob1(self, dirName):
324 325 filter_files = glob.glob1(dirName, '*.*%s'%self.extension_file)
325 326 filter_files.sort()
326 327 filterDict = {}
327 328 filterDict.setdefault(dirName)
328 329 filterDict[dirName] = filter_files
329 330 return filterDict
330 331
331 332 def __getFilenameList(self, fileListInKeys, dirList):
332 333 for value in fileListInKeys:
333 334 dirName = list(value.keys())[0]
334 335 for file in value[dirName]:
335 336 filename = os.path.join(dirName, file)
336 337 self.filenameList.append(filename)
337 338
338 339
339 340 def __selectDataForTimes(self, online=False):
340 341 #aun no esta implementado el filtro for tiempo
341 342 if not(self.status):
342 343 return None
343 344
344 345 dirList = [os.path.join(self.path,x) for x in self.dirnameList]
345 346 fileListInKeys = [self.__filterByGlob1(x) for x in dirList]
346 347 self.__getFilenameList(fileListInKeys, dirList)
347 348 if not(online):
348 349 #filtro por tiempo
349 350 if not(self.all):
350 351 self.__getTimeFromData()
351 352
352 353 if len(self.filenameList)>0:
353 354 self.status = 1
354 355 self.filenameList.sort()
355 356 else:
356 357 self.status = 0
357 358 return None
358 359
359 360 else:
360 361 #get the last file - 1
361 362 self.filenameList = [self.filenameList[-2]]
362 363 new_dirnameList = []
363 364 for dirname in self.dirnameList:
364 365 junk = numpy.array([dirname in x for x in self.filenameList])
365 366 junk_sum = junk.sum()
366 367 if junk_sum > 0:
367 368 new_dirnameList.append(dirname)
368 369 self.dirnameList = new_dirnameList
369 370 return 1
370 371
371 372 def searchFilesOnLine(self, path, startDate, endDate, startTime=datetime.time(0,0,0),
372 373 endTime=datetime.time(23,59,59),walk=True):
373 374
374 375 if endDate ==None:
375 376 startDate = datetime.datetime.utcnow().date()
376 377 endDate = datetime.datetime.utcnow().date()
377 378
378 379 self.__setParameters(path=path, startDate=startDate, endDate=endDate,startTime = startTime,endTime=endTime, walk=walk)
379 380
380 381 self.__checkPath()
381 382
382 383 self.__findDataForDates(online=True)
383 384
384 385 self.dirnameList = [self.dirnameList[-1]]
385 386
386 387 self.__selectDataForTimes(online=True)
387 388
388 389 return
389 390
390 391
391 392 def searchFilesOffLine(self,
392 393 path,
393 394 startDate,
394 395 endDate,
395 396 startTime=datetime.time(0,0,0),
396 397 endTime=datetime.time(23,59,59),
397 398 walk=True):
398 399
399 400 self.__setParameters(path, startDate, endDate, startTime, endTime, walk)
400 401
401 402 self.__checkPath()
402 403
403 404 self.__findDataForDates()
404 405
405 406 self.__selectDataForTimes()
406 407
407 408 for i in range(len(self.filenameList)):
408 409 print("%s" %(self.filenameList[i]))
409 410
410 411 return
411 412
412 413 def __setNextFileOffline(self):
413 414
414 415 try:
415 416 self.filename = self.filenameList[self.fileIndex]
416 417 self.amisrFilePointer = h5py.File(self.filename,'r')
417 418 self.fileIndex += 1
418 419 except:
419 420 self.flagNoMoreFiles = 1
420 421 raise schainpy.admin.SchainError('No more files to read')
421 422 return 0
422 423
423 424 self.flagIsNewFile = 1
424 425 print("Setting the file: %s"%self.filename)
425 426
426 427 return 1
427 428
428 429
429 430 def __setNextFileOnline(self):
430 431 filename = self.filenameList[0]
431 432 if self.__filename_online != None:
432 433 self.__selectDataForTimes(online=True)
433 434 filename = self.filenameList[0]
434 435 wait = 0
435 436 self.__waitForNewFile=300 ## DEBUG:
436 437 while self.__filename_online == filename:
437 438 print('waiting %d seconds to get a new file...'%(self.__waitForNewFile))
438 439 if wait == 5:
439 440 self.flagNoMoreFiles = 1
440 441 return 0
441 442 sleep(self.__waitForNewFile)
442 443 self.__selectDataForTimes(online=True)
443 444 filename = self.filenameList[0]
444 445 wait += 1
445 446
446 447 self.__filename_online = filename
447 448
448 449 self.amisrFilePointer = h5py.File(filename,'r')
449 450 self.flagIsNewFile = 1
450 451 self.filename = filename
451 452 print("Setting the file: %s"%self.filename)
452 453 return 1
453 454
454 455
455 456 def readData(self):
456 457 buffer = self.amisrFilePointer.get('Raw11/Data/Samples/Data')
457 458 re = buffer[:,:,:,0]
458 459 im = buffer[:,:,:,1]
459 460 dataset = re + im*1j
460 461
461 462 self.radacTime = self.amisrFilePointer.get('Raw11/Data/RadacHeader/RadacTime')
462 463 timeset = self.radacTime[:,0]
463 464
464 465 return dataset,timeset
465 466
466 467 def reshapeData(self):
467 468 #self.beamCodeByPulse, self.beamCode, self.nblocks, self.nprofiles, self.nsa,
468 469 channels = self.beamCodeByPulse[0,:]
469 470 nchan = self.nchannels
470 471 #self.newProfiles = self.nprofiles/nchan #must be defined on filljroheader
471 472 nblocks = self.nblocks
472 473 nsamples = self.nsa
473 474
474 475 #Dimensions : nChannels, nProfiles, nSamples
475 476 new_block = numpy.empty((nblocks, nchan, numpy.int_(self.newProfiles), nsamples), dtype="complex64")
476 477 ############################################
477 478
478 479 for thisChannel in range(nchan):
479 480 new_block[:,thisChannel,:,:] = self.dataset[:,numpy.where(channels==self.beamCode[thisChannel])[0],:]
480 481
481 482
482 483 new_block = numpy.transpose(new_block, (1,0,2,3))
483 484 new_block = numpy.reshape(new_block, (nchan,-1, nsamples))
484 485
485 486 return new_block
486 487
487 488 def updateIndexes(self):
488 489
489 490 pass
490 491
491 492 def fillJROHeader(self):
492 493
493 494 #fill radar controller header
494 495 self.dataOut.radarControllerHeaderObj = RadarControllerHeader(ipp=self.__ippKm,
495 496 txA=self.__txA,
496 497 txB=0,
497 498 nWindows=1,
498 499 nHeights=self.__nSamples,
499 500 firstHeight=self.__firstHeight,
500 501 deltaHeight=self.__deltaHeight,
501 502 codeType=self.__codeType,
502 503 nCode=self.__nCode, nBaud=self.__nBaud,
503 504 code = self.__code,
504 505 fClock=1)
505
506 506 #fill system header
507 507 self.dataOut.systemHeaderObj = SystemHeader(nSamples=self.__nSamples,
508 508 nProfiles=self.newProfiles,
509 509 nChannels=len(self.__channelList),
510 510 adcResolution=14,
511 511 pciDioBusWidth=32)
512 512
513 513 self.dataOut.type = "Voltage"
514 514 self.dataOut.data = None
515 515 self.dataOut.dtype = numpy.dtype([('real','<i8'),('imag','<i8')])
516 516 # self.dataOut.nChannels = 0
517 517
518 518 # self.dataOut.nHeights = 0
519 519
520 520 self.dataOut.nProfiles = self.newProfiles*self.nblocks
521 521 #self.dataOut.heightList = self.__firstHeigth + numpy.arange(self.__nSamples, dtype = numpy.float)*self.__deltaHeigth
522 522 ranges = numpy.reshape(self.rangeFromFile[()],(-1))
523 523 self.dataOut.heightList = ranges/1000.0 #km
524 524 self.dataOut.channelList = self.__channelList
525 525 self.dataOut.blocksize = self.dataOut.nChannels * self.dataOut.nHeights
526 526
527 527 # self.dataOut.channelIndexList = None
528 528
529 529
530 530 self.dataOut.azimuthList = numpy.array(self.azimuthList)
531 531 self.dataOut.elevationList = numpy.array(self.elevationList)
532 532 self.dataOut.codeList = numpy.array(self.beamCode)
533 533 #print(self.dataOut.elevationList)
534 534 self.dataOut.flagNoData = True
535 535
536 536 #Set to TRUE if the data is discontinuous
537 537 self.dataOut.flagDiscontinuousBlock = False
538 538
539 539 self.dataOut.utctime = None
540 540
541 541 #self.dataOut.timeZone = -5 #self.__timezone/60 #timezone like jroheader, difference in minutes between UTC and localtime
542 542 if self.timezone == 'lt':
543 543 self.dataOut.timeZone = time.timezone / 60. #get the timezone in minutes
544 544 else:
545 545 self.dataOut.timeZone = 0 #by default time is UTC
546 546
547 547 self.dataOut.dstFlag = 0
548 548 self.dataOut.errorCount = 0
549 549 self.dataOut.nCohInt = 1
550 550 self.dataOut.flagDecodeData = False #asumo que la data esta decodificada
551 551 self.dataOut.flagDeflipData = False #asumo que la data esta sin flip
552 552 self.dataOut.flagShiftFFT = False
553 553 self.dataOut.ippSeconds = self.ippSeconds
554 554
555 555 #Time interval between profiles
556 556 #self.dataOut.timeInterval = self.dataOut.ippSeconds * self.dataOut.nCohInt
557 557
558 558 self.dataOut.frequency = self.__frequency
559 559 self.dataOut.realtime = self.online
560 560 pass
561 561
562 562 def readNextFile(self,online=False):
563 563
564 564 if not(online):
565 565 newFile = self.__setNextFileOffline()
566 566 else:
567 567 newFile = self.__setNextFileOnline()
568 568
569 569 if not(newFile):
570 570 self.dataOut.error = True
571 571 return 0
572 572
573 573 if not self.readAMISRHeader(self.amisrFilePointer):
574 574 self.dataOut.error = True
575 575 return 0
576 576
577 577 self.createBuffers()
578 578 self.fillJROHeader()
579 579
580 580 #self.__firstFile = False
581 581
582 582
583 583
584 584 self.dataset,self.timeset = self.readData()
585 585
586 586 if self.endDate!=None:
587 587 endDateTime_Reader = datetime.datetime.combine(self.endDate,self.endTime)
588 588 time_str = self.amisrFilePointer.get('Time/RadacTimeString')
589 589 startDateTimeStr_File = time_str[0][0].decode('UTF-8').split('.')[0]
590 590 junk = time.strptime(startDateTimeStr_File, '%Y-%m-%d %H:%M:%S')
591 591 startDateTime_File = datetime.datetime(junk.tm_year,junk.tm_mon,junk.tm_mday,junk.tm_hour, junk.tm_min, junk.tm_sec)
592 592 if self.timezone == 'lt':
593 593 startDateTime_File = startDateTime_File - datetime.timedelta(minutes = 300)
594 594 if (startDateTime_File>endDateTime_Reader):
595 595 return 0
596 596
597 597 self.jrodataset = self.reshapeData()
598 598 #----self.updateIndexes()
599 599 self.profileIndex = 0
600 600
601 601 return 1
602 602
603 603
604 604 def __hasNotDataInBuffer(self):
605 605 if self.profileIndex >= (self.newProfiles*self.nblocks):
606 606 return 1
607 607 return 0
608 608
609 609
610 610 def getData(self):
611 611
612 612 if self.flagNoMoreFiles:
613 613 self.dataOut.flagNoData = True
614 614 return 0
615 615
616 616 if self.__hasNotDataInBuffer():
617 617 if not (self.readNextFile(self.online)):
618 618 return 0
619 619
620 620
621 621 if self.dataset is None: # setear esta condicion cuando no hayan datos por leer
622 622 self.dataOut.flagNoData = True
623 623 return 0
624 624
625 625 #self.dataOut.data = numpy.reshape(self.jrodataset[self.profileIndex,:],(1,-1))
626 626
627 627 self.dataOut.data = self.jrodataset[:,self.profileIndex,:]
628 628
629 629 #print("R_t",self.timeset)
630 630
631 631 #self.dataOut.utctime = self.jrotimeset[self.profileIndex]
632 632 #verificar basic header de jro data y ver si es compatible con este valor
633 633 #self.dataOut.utctime = self.timeset + (self.profileIndex * self.ippSeconds * self.nchannels)
634 634 indexprof = numpy.mod(self.profileIndex, self.newProfiles)
635 635 indexblock = self.profileIndex/self.newProfiles
636 636 #print (indexblock, indexprof)
637 637 diffUTC = 0
638 638 t_comp = (indexprof * self.ippSeconds * self.nchannels) + diffUTC #
639 639
640 640 #print("utc :",indexblock," __ ",t_comp)
641 641 #print(numpy.shape(self.timeset))
642 642 self.dataOut.utctime = self.timeset[numpy.int_(indexblock)] + t_comp
643 643 #self.dataOut.utctime = self.timeset[self.profileIndex] + t_comp
644 644
645 645 self.dataOut.profileIndex = self.profileIndex
646 646 #print("N profile:",self.profileIndex,self.newProfiles,self.nblocks,self.dataOut.utctime)
647 647 self.dataOut.flagNoData = False
648 648 # if indexprof == 0:
649 649 # print("kamisr: ",self.dataOut.utctime)
650 650
651 651 self.profileIndex += 1
652 652
653 653 return self.dataOut.data #retorno necesario??
654 654
655 655
656 656 def run(self, **kwargs):
657 657 '''
658 658 This method will be called many times so here you should put all your code
659 659 '''
660 660 #print("running kamisr")
661 661 if not self.isConfig:
662 662 self.setup(**kwargs)
663 663 self.isConfig = True
664 664
665 665 self.getData()
@@ -1,685 +1,685
1 1 import os
2 2 import time
3 3 import datetime
4 4
5 5 import numpy
6 6 import h5py
7 7
8 8 import schainpy.admin
9 9 from schainpy.model.data.jrodata import *
10 10 from schainpy.model.proc.jroproc_base import ProcessingUnit, Operation, MPDecorator
11 11 from schainpy.model.io.jroIO_base import *
12 12 from schainpy.utils import log
13 13
14 14
15 15 class HDFReader(Reader, ProcessingUnit):
16 16 """Processing unit to read HDF5 format files
17 17
18 18 This unit reads HDF5 files created with `HDFWriter` operation contains
19 19 by default two groups Data and Metadata all variables would be saved as `dataOut`
20 20 attributes.
21 21 It is possible to read any HDF5 file by given the structure in the `description`
22 22 parameter, also you can add extra values to metadata with the parameter `extras`.
23 23
24 24 Parameters:
25 25 -----------
26 26 path : str
27 27 Path where files are located.
28 28 startDate : date
29 29 Start date of the files
30 30 endDate : list
31 31 End date of the files
32 32 startTime : time
33 33 Start time of the files
34 34 endTime : time
35 35 End time of the files
36 36 description : dict, optional
37 37 Dictionary with the description of the HDF5 file
38 38 extras : dict, optional
39 39 Dictionary with extra metadata to be be added to `dataOut`
40 40
41 41 Examples
42 42 --------
43 43
44 44 desc = {
45 45 'Data': {
46 46 'data_output': ['u', 'v', 'w'],
47 47 'utctime': 'timestamps',
48 48 } ,
49 49 'Metadata': {
50 50 'heightList': 'heights'
51 51 }
52 52 }
53 53
54 54 desc = {
55 55 'Data': {
56 56 'data_output': 'winds',
57 57 'utctime': 'timestamps'
58 58 },
59 59 'Metadata': {
60 60 'heightList': 'heights'
61 61 }
62 62 }
63 63
64 64 extras = {
65 65 'timeZone': 300
66 66 }
67 67
68 68 reader = project.addReadUnit(
69 69 name='HDFReader',
70 70 path='/path/to/files',
71 71 startDate='2019/01/01',
72 72 endDate='2019/01/31',
73 73 startTime='00:00:00',
74 74 endTime='23:59:59',
75 75 # description=json.dumps(desc),
76 76 # extras=json.dumps(extras),
77 77 )
78 78
79 79 """
80 80
81 81 __attrs__ = ['path', 'startDate', 'endDate', 'startTime', 'endTime', 'description', 'extras']
82 82
83 83 def __init__(self):
84 84 ProcessingUnit.__init__(self)
85 85
86 86 self.ext = ".hdf5"
87 87 self.optchar = "D"
88 88 self.meta = {}
89 89 self.data = {}
90 90 self.open_file = h5py.File
91 91 self.open_mode = 'r'
92 92 self.description = {}
93 93 self.extras = {}
94 94 self.filefmt = "*%Y%j***"
95 95 self.folderfmt = "*%Y%j"
96 96 self.utcoffset = 0
97 97
98 98 self.dataOut = Parameters()
99 99 self.dataOut.error=False ## NOTE: Importante definir esto antes inicio
100 100 self.dataOut.flagNoData = True
101 101
102 102 def setup(self, **kwargs):
103 103
104 104 self.set_kwargs(**kwargs)
105 105 if not self.ext.startswith('.'):
106 106 self.ext = '.{}'.format(self.ext)
107 107
108 108 if self.online:
109 109 log.log("Searching files in online mode...", self.name)
110 110
111 111 for nTries in range(self.nTries):
112 112 fullpath = self.searchFilesOnLine(self.path, self.startDate,
113 113 self.endDate, self.expLabel, self.ext, self.walk,
114 114 self.filefmt, self.folderfmt)
115 115 pathname, filename = os.path.split(fullpath)
116 116
117 117 try:
118 118 fullpath = next(fullpath)
119 119
120 120 except:
121 121 fullpath = None
122 122
123 123 if fullpath:
124 124 break
125 125
126 126 log.warning(
127 127 'Waiting {} sec for a valid file in {}: try {} ...'.format(
128 128 self.delay, self.path, nTries + 1),
129 129 self.name)
130 130 time.sleep(self.delay)
131 131
132 132 if not(fullpath):
133 133 raise schainpy.admin.SchainError(
134 134 'There isn\'t any valid file in {}'.format(self.path))
135 135
136 136 pathname, filename = os.path.split(fullpath)
137 137 self.year = int(filename[1:5])
138 138 self.doy = int(filename[5:8])
139 139 self.set = int(filename[8:11]) - 1
140 140 else:
141 141 log.log("Searching files in {}".format(self.path), self.name)
142 142 self.filenameList = self.searchFilesOffLine(self.path, self.startDate,
143 143 self.endDate, self.expLabel, self.ext, self.walk, self.filefmt, self.folderfmt)
144 144
145 145 self.setNextFile()
146 146
147 147
148 148
149 149
150 150 def readFirstHeader(self):
151 151 '''Read metadata and data'''
152 152
153 153 self.__readMetadata()
154 154 self.__readData()
155 155 self.__setBlockList()
156 156
157 157 for attr in self.meta:
158 158 setattr(self.dataOut, attr, self.meta[attr])
159 159 self.blockIndex = 0
160 160
161 161 return
162 162
163 163 def __setBlockList(self):
164 164 '''
165 165 Selects the data within the times defined
166 166
167 167 self.fp
168 168 self.startTime
169 169 self.endTime
170 170 self.blockList
171 171 self.blocksPerFile
172 172
173 173 '''
174 174
175 175 startTime = self.startTime
176 176 endTime = self.endTime
177 177 thisUtcTime = self.data['utctime'] + self.utcoffset
178 178 self.interval = numpy.min(thisUtcTime[1:] - thisUtcTime[:-1])
179 179 thisDatetime = datetime.datetime.utcfromtimestamp(thisUtcTime[0])
180 180 self.startFileDatetime = thisDatetime
181 181 thisDate = thisDatetime.date()
182 182 thisTime = thisDatetime.time()
183 183
184 184 startUtcTime = (datetime.datetime.combine(thisDate, startTime) - datetime.datetime(1970, 1, 1)).total_seconds()
185 185 endUtcTime = (datetime.datetime.combine(thisDate, endTime) - datetime.datetime(1970, 1, 1)).total_seconds()
186 186
187 187 ind = numpy.where(numpy.logical_and(thisUtcTime >= startUtcTime, thisUtcTime < endUtcTime))[0]
188 188
189 189 self.blockList = ind
190 190 self.blocksPerFile = len(ind)
191 191 self.blocksPerFile = len(thisUtcTime)
192 192 return
193 193
194 194 def __readMetadata(self):
195 195 '''
196 196 Reads Metadata
197 197 '''
198 198
199 199 meta = {}
200 200
201 201 if self.description:
202 202 for key, value in self.description['Metadata'].items():
203 203 meta[key] = self.fp[value][()]
204 204 else:
205 205 grp = self.fp['Metadata']
206 206 for name in grp:
207 207 meta[name] = grp[name][()]
208 208
209 209 if self.extras:
210 210 for key, value in self.extras.items():
211 211 meta[key] = value
212 212 self.meta = meta
213 213
214 214 return
215 215
216 216
217 217
218 218 def checkForRealPath(self, nextFile, nextDay):
219 219
220 220 # print("check FRP")
221 221 # dt = self.startFileDatetime + datetime.timedelta(1)
222 222 # filename = '{}.{}{}'.format(self.path, dt.strftime('%Y%m%d'), self.ext)
223 223 # fullfilename = os.path.join(self.path, filename)
224 224 # print("check Path ",fullfilename,filename)
225 225 # if os.path.exists(fullfilename):
226 226 # return fullfilename, filename
227 227 # return None, filename
228 228 return None,None
229 229
230 230 def __readData(self):
231 231
232 232 data = {}
233 233
234 234 if self.description:
235 235 for key, value in self.description['Data'].items():
236 236 if isinstance(value, str):
237 237 if isinstance(self.fp[value], h5py.Dataset):
238 238 data[key] = self.fp[value][()]
239 239 elif isinstance(self.fp[value], h5py.Group):
240 240 array = []
241 241 for ch in self.fp[value]:
242 242 array.append(self.fp[value][ch][()])
243 243 data[key] = numpy.array(array)
244 244 elif isinstance(value, list):
245 245 array = []
246 246 for ch in value:
247 247 array.append(self.fp[ch][()])
248 248 data[key] = numpy.array(array)
249 249 else:
250 250 grp = self.fp['Data']
251 251 for name in grp:
252 252 if isinstance(grp[name], h5py.Dataset):
253 253 array = grp[name][()]
254 254 elif isinstance(grp[name], h5py.Group):
255 255 array = []
256 256 for ch in grp[name]:
257 257 array.append(grp[name][ch][()])
258 258 array = numpy.array(array)
259 259 else:
260 260 log.warning('Unknown type: {}'.format(name))
261 261
262 262 if name in self.description:
263 263 key = self.description[name]
264 264 else:
265 265 key = name
266 266 data[key] = array
267 267
268 268 self.data = data
269 269 return
270 270
271 271 def getData(self):
272 272 if not self.isDateTimeInRange(self.startFileDatetime, self.startDate, self.endDate, self.startTime, self.endTime):
273 273 self.dataOut.flagNoData = True
274 274 self.blockIndex = self.blocksPerFile
275 275 self.dataOut.error = True # TERMINA EL PROGRAMA
276 276 return
277 277 for attr in self.data:
278 278
279 279 if self.data[attr].ndim == 1:
280 280 setattr(self.dataOut, attr, self.data[attr][self.blockIndex])
281 281 else:
282 282 setattr(self.dataOut, attr, self.data[attr][:, self.blockIndex])
283 283
284 284
285 285 self.blockIndex += 1
286 286
287 287 if self.blockIndex == 1:
288 288 log.log("Block No. {}/{} -> {}".format(
289 289 self.blockIndex,
290 290 self.blocksPerFile,
291 291 self.dataOut.datatime.ctime()), self.name)
292 292 else:
293 293 log.log("Block No. {}/{} ".format(
294 294 self.blockIndex,
295 295 self.blocksPerFile),self.name)
296 296
297 297 if self.blockIndex == self.blocksPerFile:
298 298 self.setNextFile()
299 299
300 300 self.dataOut.flagNoData = False
301 301
302 302
303 303 def run(self, **kwargs):
304 304
305 305 if not(self.isConfig):
306 306 self.setup(**kwargs)
307 307 self.isConfig = True
308 308
309 309 self.getData()
310 310
311 311 #@MPDecorator
312 312 class HDFWrite(Operation):
313 313 """Operation to write HDF5 files.
314 314
315 315 The HDF5 file contains by default two groups Data and Metadata where
316 316 you can save any `dataOut` attribute specified by `dataList` and `metadataList`
317 317 parameters, data attributes are normaly time dependent where the metadata
318 318 are not.
319 319 It is possible to customize the structure of the HDF5 file with the
320 320 optional description parameter see the examples.
321 321
322 322 Parameters:
323 323 -----------
324 324 path : str
325 325 Path where files will be saved.
326 326 blocksPerFile : int
327 327 Number of blocks per file
328 328 metadataList : list
329 329 List of the dataOut attributes that will be saved as metadata
330 330 dataList : int
331 331 List of the dataOut attributes that will be saved as data
332 332 setType : bool
333 333 If True the name of the files corresponds to the timestamp of the data
334 334 description : dict, optional
335 335 Dictionary with the desired description of the HDF5 file
336 336
337 337 Examples
338 338 --------
339 339
340 340 desc = {
341 341 'data_output': {'winds': ['z', 'w', 'v']},
342 342 'utctime': 'timestamps',
343 343 'heightList': 'heights'
344 344 }
345 345 desc = {
346 346 'data_output': ['z', 'w', 'v'],
347 347 'utctime': 'timestamps',
348 348 'heightList': 'heights'
349 349 }
350 350 desc = {
351 351 'Data': {
352 352 'data_output': 'winds',
353 353 'utctime': 'timestamps'
354 354 },
355 355 'Metadata': {
356 356 'heightList': 'heights'
357 357 }
358 358 }
359 359
360 360 writer = proc_unit.addOperation(name='HDFWriter')
361 361 writer.addParameter(name='path', value='/path/to/file')
362 362 writer.addParameter(name='blocksPerFile', value='32')
363 363 writer.addParameter(name='metadataList', value='heightList,timeZone')
364 364 writer.addParameter(name='dataList',value='data_output,utctime')
365 365 # writer.addParameter(name='description',value=json.dumps(desc))
366 366
367 367 """
368 368
369 369 ext = ".hdf5"
370 370 optchar = "D"
371 371 filename = None
372 372 path = None
373 373 setFile = None
374 374 fp = None
375 375 firsttime = True
376 376 #Configurations
377 377 blocksPerFile = None
378 378 blockIndex = None
379 379 dataOut = None #eval ??????
380 380 #Data Arrays
381 381 dataList = None
382 382 metadataList = None
383 383 currentDay = None
384 384 lastTime = None
385 385 timeZone = "ut"
386 386 hourLimit = 3
387 387 breakDays = True
388 388
389 389 def __init__(self):
390 390
391 391 Operation.__init__(self)
392 392
393 393
394 394 def setup(self, path=None, blocksPerFile=10, metadataList=None, dataList=None, setType=None,
395 395 description={},timeZone = "ut",hourLimit = 3, breakDays=True):
396 396 self.path = path
397 397 self.blocksPerFile = blocksPerFile
398 398 self.metadataList = metadataList
399 399 self.dataList = [s.strip() for s in dataList]
400 400 self.setType = setType
401 401 self.description = description
402 402 self.timeZone = timeZone
403 403 self.hourLimit = hourLimit
404 404 self.breakDays = breakDays
405 405
406 406 if self.metadataList is None:
407 407 self.metadataList = self.dataOut.metadata_list
408 408
409 409 tableList = []
410 410 dsList = []
411 411
412 412 for i in range(len(self.dataList)):
413 413 dsDict = {}
414 414 if hasattr(self.dataOut, self.dataList[i]):
415 415 dataAux = getattr(self.dataOut, self.dataList[i])
416 416 dsDict['variable'] = self.dataList[i]
417 417 else:
418 log.warning('Attribute {} not found in dataOut', self.name)
418 log.warning('Attribute {} not found in dataOut'.format(self.dataList[i]),self.name)
419 419 continue
420 420
421 421 if dataAux is None:
422 422 continue
423 423 elif isinstance(dataAux, (int, float, numpy.integer, numpy.float)):
424 424 dsDict['nDim'] = 0
425 425 else:
426 426 dsDict['nDim'] = len(dataAux.shape)
427 427 dsDict['shape'] = dataAux.shape
428 428 dsDict['dsNumber'] = dataAux.shape[0]
429 429 dsDict['dtype'] = dataAux.dtype
430 430
431 431 dsList.append(dsDict)
432 432
433 433 self.blockIndex = 0
434 434 self.dsList = dsList
435 435 self.currentDay = self.dataOut.datatime.date()
436 436
437 437
438 438 def timeFlag(self):
439 439 currentTime = self.dataOut.utctime
440 440 timeTuple = None
441 441 if self.timeZone == "lt":
442 442 timeTuple = time.localtime(currentTime)
443 443 else :
444 444 timeTuple = time.gmtime(currentTime)
445 445
446 446 dataDay = timeTuple.tm_yday
447 447
448 448 if self.lastTime is None:
449 449 self.lastTime = currentTime
450 450 self.currentDay = dataDay
451 451 return False
452 452
453 453 timeDiff = currentTime - self.lastTime
454 454
455 455 #Si el dia es diferente o si la diferencia entre un dato y otro supera self.hourLimit
456 456 if (dataDay != self.currentDay) and self.breakDays:
457 457 self.currentDay = dataDay
458 458 return True
459 459 elif timeDiff > self.hourLimit*60*60:
460 460 self.lastTime = currentTime
461 461 return True
462 462 else:
463 463 self.lastTime = currentTime
464 464 return False
465 465
466 466 def run(self, dataOut,**kwargs):
467 467
468 468 self.dataOut = dataOut
469 469 if not(self.isConfig):
470 470 self.setup(**kwargs)
471 471
472 472 self.isConfig = True
473 473 self.setNextFile()
474 474
475 475 self.putData()
476 476
477 477 return self.dataOut
478 478
479 479 def setNextFile(self):
480 480
481 481 ext = self.ext
482 482 path = self.path
483 483 setFile = self.setFile
484 484 timeTuple = None
485 485 if self.timeZone == "lt":
486 486 timeTuple = time.localtime(self.dataOut.utctime)
487 487 elif self.timeZone == "ut":
488 488 timeTuple = time.gmtime(self.dataOut.utctime)
489 489 #print("path: ",timeTuple)
490 490 subfolder = 'd%4.4d%3.3d' % (timeTuple.tm_year,timeTuple.tm_yday)
491 491 fullpath = os.path.join(path, subfolder)
492 492
493 493 if os.path.exists(fullpath):
494 494 filesList = os.listdir(fullpath)
495 495 filesList = [k for k in filesList if k.startswith(self.optchar)]
496 496 if len( filesList ) > 0:
497 497 filesList = sorted(filesList, key=str.lower)
498 498 filen = filesList[-1]
499 499 # el filename debera tener el siguiente formato
500 500 # 0 1234 567 89A BCDE (hex)
501 501 # x YYYY DDD SSS .ext
502 502 if isNumber(filen[8:11]):
503 503 setFile = int(filen[8:11]) #inicializo mi contador de seteo al seteo del ultimo file
504 504 else:
505 505 setFile = -1
506 506 else:
507 507 setFile = -1 #inicializo mi contador de seteo
508 508 else:
509 509 os.makedirs(fullpath)
510 510 setFile = -1 #inicializo mi contador de seteo
511 511
512 512 if self.setType is None:
513 513 setFile += 1
514 514 file = '%s%4.4d%3.3d%03d%s' % (self.optchar,
515 515 timeTuple.tm_year,
516 516 timeTuple.tm_yday,
517 517 setFile,
518 518 ext )
519 519 else:
520 520 setFile = timeTuple.tm_hour*60+timeTuple.tm_min
521 521 file = '%s%4.4d%3.3d%04d%s' % (self.optchar,
522 522 timeTuple.tm_year,
523 523 timeTuple.tm_yday,
524 524 setFile,
525 525 ext )
526 526
527 527 self.filename = os.path.join( path, subfolder, file )
528 528
529 529
530 530
531 531 def getLabel(self, name, x=None):
532 532
533 533 if x is None:
534 534 if 'Data' in self.description:
535 535 data = self.description['Data']
536 536 if 'Metadata' in self.description:
537 537 data.update(self.description['Metadata'])
538 538 else:
539 539 data = self.description
540 540 if name in data:
541 541 if isinstance(data[name], str):
542 542 return data[name]
543 543 elif isinstance(data[name], list):
544 544 return None
545 545 elif isinstance(data[name], dict):
546 546 for key, value in data[name].items():
547 547 return key
548 548 return name
549 549 else:
550 550 if 'Metadata' in self.description:
551 551 meta = self.description['Metadata']
552 552 else:
553 553 meta = self.description
554 554 if name in meta:
555 555 if isinstance(meta[name], list):
556 556 return meta[name][x]
557 557 elif isinstance(meta[name], dict):
558 558 for key, value in meta[name].items():
559 559 return value[x]
560 560 if 'cspc' in name:
561 561 return 'pair{:02d}'.format(x)
562 562 else:
563 563 return 'channel{:02d}'.format(x)
564 564
565 565 def writeMetadata(self, fp):
566 566
567 567 if self.description:
568 568 if 'Metadata' in self.description:
569 569 grp = fp.create_group('Metadata')
570 570 else:
571 571 grp = fp
572 572 else:
573 573 grp = fp.create_group('Metadata')
574 574
575 575 for i in range(len(self.metadataList)):
576 576 if not hasattr(self.dataOut, self.metadataList[i]):
577 577 log.warning('Metadata: `{}` not found'.format(self.metadataList[i]), self.name)
578 578 continue
579 579 value = getattr(self.dataOut, self.metadataList[i])
580 580 if isinstance(value, bool):
581 581 if value is True:
582 582 value = 1
583 583 else:
584 584 value = 0
585 585 grp.create_dataset(self.getLabel(self.metadataList[i]), data=value)
586 586 return
587 587
588 588 def writeData(self, fp):
589 589
590 590 if self.description:
591 591 if 'Data' in self.description:
592 592 grp = fp.create_group('Data')
593 593 else:
594 594 grp = fp
595 595 else:
596 596 grp = fp.create_group('Data')
597 597
598 598 dtsets = []
599 599 data = []
600 600
601 601 for dsInfo in self.dsList:
602 602 if dsInfo['nDim'] == 0:
603 603 ds = grp.create_dataset(
604 604 self.getLabel(dsInfo['variable']),
605 605 (self.blocksPerFile, ),
606 606 chunks=True,
607 607 dtype=numpy.float64)
608 608 dtsets.append(ds)
609 609 data.append((dsInfo['variable'], -1))
610 610 else:
611 611 label = self.getLabel(dsInfo['variable'])
612 612 if label is not None:
613 613 sgrp = grp.create_group(label)
614 614 else:
615 615 sgrp = grp
616 616 for i in range(dsInfo['dsNumber']):
617 617 ds = sgrp.create_dataset(
618 618 self.getLabel(dsInfo['variable'], i),
619 619 (self.blocksPerFile, ) + dsInfo['shape'][1:],
620 620 chunks=True,
621 621 dtype=dsInfo['dtype'])
622 622 dtsets.append(ds)
623 623 data.append((dsInfo['variable'], i))
624 624 fp.flush()
625 625
626 626 log.log('Creating file: {}'.format(fp.filename), self.name)
627 627
628 628 self.ds = dtsets
629 629 self.data = data
630 630 self.firsttime = True
631 631
632 632 return
633 633
634 634 def putData(self):
635 635
636 636 if (self.blockIndex == self.blocksPerFile) or self.timeFlag():
637 637 self.closeFile()
638 638 self.setNextFile()
639 639 self.dataOut.flagNoData = False
640 640 self.blockIndex = 0
641 641 return
642 642
643 643
644 644
645 645 if self.blockIndex == 0:
646 646 #Escribir metadata Aqui???
647 647 #Setting HDF5 File
648 648 self.fp = h5py.File(self.filename, 'w')
649 649 #write metadata
650 650 self.writeMetadata(self.fp)
651 651 #Write data
652 652 self.writeData(self.fp)
653 653 log.log('Block No. {}/{} --> {}'.format(self.blockIndex+1, self.blocksPerFile,self.dataOut.datatime.ctime()), self.name)
654 654 elif (self.blockIndex % 10 ==0):
655 655 log.log('Block No. {}/{} --> {}'.format(self.blockIndex+1, self.blocksPerFile,self.dataOut.datatime.ctime()), self.name)
656 656 else:
657 657
658 658 log.log('Block No. {}/{}'.format(self.blockIndex+1, self.blocksPerFile), self.name)
659 659
660 660 for i, ds in enumerate(self.ds):
661 661 attr, ch = self.data[i]
662 662 if ch == -1:
663 663 ds[self.blockIndex] = getattr(self.dataOut, attr)
664 664 else:
665 665 ds[self.blockIndex] = getattr(self.dataOut, attr)[ch]
666 666
667 667 self.blockIndex += 1
668 668
669 669 self.fp.flush()
670 670 self.dataOut.flagNoData = True
671 671
672 672
673 673 def closeFile(self):
674 674
675 675 if self.blockIndex != self.blocksPerFile:
676 676 for ds in self.ds:
677 677 ds.resize(self.blockIndex, axis=0)
678 678
679 679 if self.fp:
680 680 self.fp.flush()
681 681 self.fp.close()
682 682
683 683 def close(self):
684 684
685 685 self.closeFile()
@@ -1,212 +1,211
1 1 '''
2 2 Base clases to create Processing units and operations, the MPDecorator
3 3 must be used in plotting and writing operations to allow to run as an
4 4 external process.
5 5 '''
6 6 import os
7 7 import inspect
8 8 import zmq
9 9 import time
10 10 import pickle
11 11 import traceback
12 12 from threading import Thread
13 13 from multiprocessing import Process, Queue
14 14 from schainpy.utils import log
15 15 import copy
16 16 QUEUE_SIZE = int(os.environ.get('QUEUE_MAX_SIZE', '10'))
17 17 class ProcessingUnit(object):
18 18 '''
19 19 Base class to create Signal Chain Units
20 20 '''
21 21
22 22 proc_type = 'processing'
23 23
24 24 def __init__(self):
25 25
26 26 self.dataIn = None
27 27 self.dataOut = None
28 28 self.isConfig = False
29 29 self.operations = []
30 30
31 31 def setInput(self, unit):
32 32
33 33 self.dataIn = unit.dataOut
34 34
35 35
36 36 def getAllowedArgs(self):
37 37 if hasattr(self, '__attrs__'):
38 38 return self.__attrs__
39 39 else:
40 40 return inspect.getargspec(self.run).args
41 41
42 42 def addOperation(self, conf, operation):
43 43 '''
44 44 '''
45 45
46 46 self.operations.append((operation, conf.type, conf.getKwargs()))
47 47
48 48 def getOperationObj(self, objId):
49 49
50 50 if objId not in list(self.operations.keys()):
51 51 return None
52 52
53 53 return self.operations[objId]
54 54
55 55 def call(self, **kwargs):
56 56 '''
57 57 '''
58 58
59 59 try:
60 # if self.dataIn is not None and self.dataIn.flagNoData and not self.dataIn.error:
61 # return self.dataIn.isReady()
62 #dataIn=None es unidades de Lectura, segunda parte unidades de procesamiento
63 if self.dataIn is None or (not self.dataIn.error and not self.dataIn.flagNoData):
60
61 if self.dataIn is not None and self.dataIn.flagNoData and not self.dataIn.error:
62 return self.dataIn.isReady()
63 elif self.dataIn is None or not self.dataIn.error:
64 64 self.run(**kwargs)
65 65 elif self.dataIn.error:
66 66 self.dataOut.error = self.dataIn.error
67 67 self.dataOut.flagNoData = True
68
69 68 except:
70 69
71 70 err = traceback.format_exc()
72 71 if 'SchainWarning' in err:
73 72 log.warning(err.split('SchainWarning:')[-1].split('\n')[0].strip(), self.name)
74 73 elif 'SchainError' in err:
75 74 log.error(err.split('SchainError:')[-1].split('\n')[0].strip(), self.name)
76 75 else:
77 76 log.error(err, self.name)
78 77 self.dataOut.error = True
79 78
80 79 for op, optype, opkwargs in self.operations:
81 80 if optype == 'other' and self.dataOut.isReady():
82 81 try:
83 82 self.dataOut = op.run(self.dataOut, **opkwargs)
84 83 except Exception as e:
85 84 print(e)
86 85 self.dataOut.error = True
87 86 return 'Error'
88 87 elif optype == 'external' and self.dataOut.isReady() :
89 88 op.queue.put(copy.deepcopy(self.dataOut))
90 89 elif optype == 'external' and self.dataOut.error:
91 90 op.queue.put(copy.deepcopy(self.dataOut))
92 91
93 92 return 'Error' if self.dataOut.error else True#self.dataOut.isReady()
94 93
95 94 def setup(self):
96 95
97 96 raise NotImplementedError
98 97
99 98 def run(self):
100 99
101 100 raise NotImplementedError
102 101
103 102 def close(self):
104 103
105 104 return
106 105
107 106
108 107 class Operation(object):
109 108
110 109 '''
111 110 '''
112 111
113 112 proc_type = 'operation'
114 113
115 114 def __init__(self):
116 115
117 116 self.id = None
118 117 self.isConfig = False
119 118
120 119 if not hasattr(self, 'name'):
121 120 self.name = self.__class__.__name__
122 121
123 122 def getAllowedArgs(self):
124 123 if hasattr(self, '__attrs__'):
125 124 return self.__attrs__
126 125 else:
127 126 return inspect.getargspec(self.run).args
128 127
129 128 def setup(self):
130 129
131 130 self.isConfig = True
132 131
133 132 raise NotImplementedError
134 133
135 134 def run(self, dataIn, **kwargs):
136 135 """
137 136 Realiza las operaciones necesarias sobre la dataIn.data y actualiza los
138 137 atributos del objeto dataIn.
139 138
140 139 Input:
141 140
142 141 dataIn : objeto del tipo JROData
143 142
144 143 Return:
145 144
146 145 None
147 146
148 147 Affected:
149 148 __buffer : buffer de recepcion de datos.
150 149
151 150 """
152 151 if not self.isConfig:
153 152 self.setup(**kwargs)
154 153
155 154 raise NotImplementedError
156 155
157 156 def close(self):
158 157
159 158 return
160 159
161 160
162 161 def MPDecorator(BaseClass):
163 162 """
164 163 Multiprocessing class decorator
165 164
166 165 This function add multiprocessing features to a BaseClass.
167 166 """
168 167
169 168 class MPClass(BaseClass, Process):
170 169
171 170 def __init__(self, *args, **kwargs):
172 171 super(MPClass, self).__init__()
173 172 Process.__init__(self)
174 173
175 174 self.args = args
176 175 self.kwargs = kwargs
177 176 self.t = time.time()
178 177 self.op_type = 'external'
179 178 self.name = BaseClass.__name__
180 179 self.__doc__ = BaseClass.__doc__
181 180
182 181 if 'plot' in self.name.lower() and not self.name.endswith('_'):
183 182 self.name = '{}{}'.format(self.CODE.upper(), 'Plot')
184 183
185 184 self.start_time = time.time()
186 185 self.err_queue = args[3]
187 186 self.queue = Queue(maxsize=QUEUE_SIZE)
188 187 self.myrun = BaseClass.run
189 188
190 189 def run(self):
191 190
192 191 while True:
193 192
194 193 dataOut = self.queue.get()
195 194
196 195 if not dataOut.error:
197 196 try:
198 197 BaseClass.run(self, dataOut, **self.kwargs)
199 198 except:
200 199 err = traceback.format_exc()
201 200 log.error(err, self.name)
202 201 else:
203 202 break
204 203
205 204 self.close()
206 205
207 206 def close(self):
208 207
209 208 BaseClass.close(self)
210 209 log.success('Done...(Time:{:4.2f} secs)'.format(time.time()-self.start_time), self.name)
211 210
212 211 return MPClass
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
@@ -1,1689 +1,1688
1 1 # Copyright (c) 2012-2020 Jicamarca Radio Observatory
2 2 # All rights reserved.
3 3 #
4 4 # Distributed under the terms of the BSD 3-clause license.
5 5 """Spectra processing Unit and operations
6 6
7 7 Here you will find the processing unit `SpectraProc` and several operations
8 8 to work with Spectra data type
9 9 """
10 10
11 11 import time
12 12 import itertools
13 13
14 14 import numpy
15 15 import math
16 16
17 17 from schainpy.model.proc.jroproc_base import ProcessingUnit, MPDecorator, Operation
18 18 from schainpy.model.data.jrodata import Spectra
19 19 from schainpy.model.data.jrodata import hildebrand_sekhon
20 20 from schainpy.utils import log
21 21
22 22 from scipy.optimize import curve_fit
23 23
24 24 class SpectraProc(ProcessingUnit):
25 25
26 26 def __init__(self):
27 27
28 28 ProcessingUnit.__init__(self)
29 29
30 30 self.buffer = None
31 31 self.firstdatatime = None
32 32 self.profIndex = 0
33 33 self.dataOut = Spectra()
34 34 self.id_min = None
35 35 self.id_max = None
36 36 self.setupReq = False #Agregar a todas las unidades de proc
37 37
38 38 def __updateSpecFromVoltage(self):
39 39
40 40 self.dataOut.timeZone = self.dataIn.timeZone
41 41 self.dataOut.dstFlag = self.dataIn.dstFlag
42 42 self.dataOut.errorCount = self.dataIn.errorCount
43 43 self.dataOut.useLocalTime = self.dataIn.useLocalTime
44 44 try:
45 45 self.dataOut.processingHeaderObj = self.dataIn.processingHeaderObj.copy()
46 46 except:
47 47 pass
48 48 self.dataOut.radarControllerHeaderObj = self.dataIn.radarControllerHeaderObj.copy()
49 49 self.dataOut.systemHeaderObj = self.dataIn.systemHeaderObj.copy()
50 50 self.dataOut.channelList = self.dataIn.channelList
51 51 self.dataOut.heightList = self.dataIn.heightList
52 52 self.dataOut.dtype = numpy.dtype([('real', '<f4'), ('imag', '<f4')])
53 53 self.dataOut.nProfiles = self.dataOut.nFFTPoints
54 54 self.dataOut.flagDiscontinuousBlock = self.dataIn.flagDiscontinuousBlock
55 55 self.dataOut.utctime = self.firstdatatime
56 56 self.dataOut.flagDecodeData = self.dataIn.flagDecodeData
57 57 self.dataOut.flagDeflipData = self.dataIn.flagDeflipData
58 58 self.dataOut.flagShiftFFT = False
59 59 self.dataOut.nCohInt = self.dataIn.nCohInt
60 60 self.dataOut.nIncohInt = 1
61 61 self.dataOut.windowOfFilter = self.dataIn.windowOfFilter
62 62 self.dataOut.frequency = self.dataIn.frequency
63 63 self.dataOut.realtime = self.dataIn.realtime
64 64 self.dataOut.azimuth = self.dataIn.azimuth
65 65 self.dataOut.zenith = self.dataIn.zenith
66 66 self.dataOut.codeList = self.dataIn.codeList
67 67 self.dataOut.azimuthList = self.dataIn.azimuthList
68 68 self.dataOut.elevationList = self.dataIn.elevationList
69 69
70 70
71
72 71 def __getFft(self):
73 72 """
74 73 Convierte valores de Voltaje a Spectra
75 74
76 75 Affected:
77 76 self.dataOut.data_spc
78 77 self.dataOut.data_cspc
79 78 self.dataOut.data_dc
80 79 self.dataOut.heightList
81 80 self.profIndex
82 81 self.buffer
83 82 self.dataOut.flagNoData
84 83 """
85 84 fft_volt = numpy.fft.fft(
86 85 self.buffer, n=self.dataOut.nFFTPoints, axis=1)
87 86 fft_volt = fft_volt.astype(numpy.dtype('complex'))
88 87 dc = fft_volt[:, 0, :]
89 88
90 89 # calculo de self-spectra
91 90 fft_volt = numpy.fft.fftshift(fft_volt, axes=(1,))
92 91 spc = fft_volt * numpy.conjugate(fft_volt)
93 92 spc = spc.real
94 93
95 94 blocksize = 0
96 95 blocksize += dc.size
97 96 blocksize += spc.size
98 97
99 98 cspc = None
100 99 pairIndex = 0
101 100 if self.dataOut.pairsList != None:
102 101 # calculo de cross-spectra
103 102 cspc = numpy.zeros(
104 103 (self.dataOut.nPairs, self.dataOut.nFFTPoints, self.dataOut.nHeights), dtype='complex')
105 104 for pair in self.dataOut.pairsList:
106 105 if pair[0] not in self.dataOut.channelList:
107 106 raise ValueError("Error getting CrossSpectra: pair 0 of %s is not in channelList = %s" % (
108 107 str(pair), str(self.dataOut.channelList)))
109 108 if pair[1] not in self.dataOut.channelList:
110 109 raise ValueError("Error getting CrossSpectra: pair 1 of %s is not in channelList = %s" % (
111 110 str(pair), str(self.dataOut.channelList)))
112 111
113 112 cspc[pairIndex, :, :] = fft_volt[pair[0], :, :] * \
114 113 numpy.conjugate(fft_volt[pair[1], :, :])
115 114 pairIndex += 1
116 115 blocksize += cspc.size
117 116
118 117 self.dataOut.data_spc = spc
119 118 self.dataOut.data_cspc = cspc
120 119 self.dataOut.data_dc = dc
121 120 self.dataOut.blockSize = blocksize
122 121 self.dataOut.flagShiftFFT = False
123 122
124 123 def run(self, nProfiles=None, nFFTPoints=None, pairsList=None, ippFactor=None, shift_fft=False):
125 124
126 125 if self.dataIn.type == "Spectra":
127 126
128 127 try:
129 128 self.dataOut.copy(self.dataIn)
130 129
131 130 except Exception as e:
132 131 print(e)
133 132
134 133 if shift_fft:
135 134 #desplaza a la derecha en el eje 2 determinadas posiciones
136 135 shift = int(self.dataOut.nFFTPoints/2)
137 136 self.dataOut.data_spc = numpy.roll(self.dataOut.data_spc, shift , axis=1)
138 137
139 138 if self.dataOut.data_cspc is not None:
140 139 #desplaza a la derecha en el eje 2 determinadas posiciones
141 140 self.dataOut.data_cspc = numpy.roll(self.dataOut.data_cspc, shift, axis=1)
142 141 if pairsList:
143 142 self.__selectPairs(pairsList)
144 143
145 144
146 145 elif self.dataIn.type == "Voltage":
147 146
148 147 self.dataOut.flagNoData = True
149 148
150 149 if nFFTPoints == None:
151 150 raise ValueError("This SpectraProc.run() need nFFTPoints input variable")
152 151
153 152 if nProfiles == None:
154 153 nProfiles = nFFTPoints
155 154
156 155 if ippFactor == None:
157 156 self.dataOut.ippFactor = 1
158 157
159 158 self.dataOut.nFFTPoints = nFFTPoints
160 159
161 160 if self.buffer is None:
162 161 self.buffer = numpy.zeros((self.dataIn.nChannels,
163 162 nProfiles,
164 163 self.dataIn.nHeights),
165 164 dtype='complex')
166 165
167 166 if self.dataIn.flagDataAsBlock:
168 167 nVoltProfiles = self.dataIn.data.shape[1]
169 168
170 169 if nVoltProfiles == nProfiles:
171 170 self.buffer = self.dataIn.data.copy()
172 171 self.profIndex = nVoltProfiles
173 172
174 173 elif nVoltProfiles < nProfiles:
175 174
176 175 if self.profIndex == 0:
177 176 self.id_min = 0
178 177 self.id_max = nVoltProfiles
179 178
180 179 self.buffer[:, self.id_min:self.id_max,
181 180 :] = self.dataIn.data
182 181 self.profIndex += nVoltProfiles
183 182 self.id_min += nVoltProfiles
184 183 self.id_max += nVoltProfiles
185 184 else:
186 185 raise ValueError("The type object %s has %d profiles, it should just has %d profiles" % (
187 186 self.dataIn.type, self.dataIn.data.shape[1], nProfiles))
188 187 self.dataOut.flagNoData = True
189 188 else:
190 189 self.buffer[:, self.profIndex, :] = self.dataIn.data.copy()
191 190 self.profIndex += 1
192 191
193 192 if self.firstdatatime == None:
194 193 self.firstdatatime = self.dataIn.utctime
195 194
196 195 if self.profIndex == nProfiles:
197 196 self.__updateSpecFromVoltage()
198 197 if pairsList == None:
199 198 self.dataOut.pairsList = [pair for pair in itertools.combinations(self.dataOut.channelList, 2)]
200 199 else:
201 200 self.dataOut.pairsList = pairsList
202 201 self.__getFft()
203 202 self.dataOut.flagNoData = False
204 203 self.firstdatatime = None
205 204 self.profIndex = 0
206 205 else:
207 206 raise ValueError("The type of input object '%s' is not valid".format(
208 207 self.dataIn.type))
209 208
210 209 def __selectPairs(self, pairsList):
211 210
212 211 if not pairsList:
213 212 return
214 213
215 214 pairs = []
216 215 pairsIndex = []
217 216
218 217 for pair in pairsList:
219 218 if pair[0] not in self.dataOut.channelList or pair[1] not in self.dataOut.channelList:
220 219 continue
221 220 pairs.append(pair)
222 221 pairsIndex.append(pairs.index(pair))
223 222
224 223 self.dataOut.data_cspc = self.dataOut.data_cspc[pairsIndex]
225 224 self.dataOut.pairsList = pairs
226 225
227 226 return
228 227
229 228 def selectFFTs(self, minFFT, maxFFT ):
230 229 """
231 230 Selecciona un bloque de datos en base a un grupo de valores de puntos FFTs segun el rango
232 231 minFFT<= FFT <= maxFFT
233 232 """
234 233
235 234 if (minFFT > maxFFT):
236 235 raise ValueError("Error selecting heights: Height range (%d,%d) is not valid" % (minFFT, maxFFT))
237 236
238 237 if (minFFT < self.dataOut.getFreqRange()[0]):
239 238 minFFT = self.dataOut.getFreqRange()[0]
240 239
241 240 if (maxFFT > self.dataOut.getFreqRange()[-1]):
242 241 maxFFT = self.dataOut.getFreqRange()[-1]
243 242
244 243 minIndex = 0
245 244 maxIndex = 0
246 245 FFTs = self.dataOut.getFreqRange()
247 246
248 247 inda = numpy.where(FFTs >= minFFT)
249 248 indb = numpy.where(FFTs <= maxFFT)
250 249
251 250 try:
252 251 minIndex = inda[0][0]
253 252 except:
254 253 minIndex = 0
255 254
256 255 try:
257 256 maxIndex = indb[0][-1]
258 257 except:
259 258 maxIndex = len(FFTs)
260 259
261 260 self.selectFFTsByIndex(minIndex, maxIndex)
262 261
263 262 return 1
264 263
265 264 def getBeaconSignal(self, tauindex=0, channelindex=0, hei_ref=None):
266 265 newheis = numpy.where(
267 266 self.dataOut.heightList > self.dataOut.radarControllerHeaderObj.Taus[tauindex])
268 267
269 268 if hei_ref != None:
270 269 newheis = numpy.where(self.dataOut.heightList > hei_ref)
271 270
272 271 minIndex = min(newheis[0])
273 272 maxIndex = max(newheis[0])
274 273 data_spc = self.dataOut.data_spc[:, :, minIndex:maxIndex + 1]
275 274 heightList = self.dataOut.heightList[minIndex:maxIndex + 1]
276 275
277 276 # determina indices
278 277 nheis = int(self.dataOut.radarControllerHeaderObj.txB /
279 278 (self.dataOut.heightList[1] - self.dataOut.heightList[0]))
280 279 avg_dB = 10 * \
281 280 numpy.log10(numpy.sum(data_spc[channelindex, :, :], axis=0))
282 281 beacon_dB = numpy.sort(avg_dB)[-nheis:]
283 282 beacon_heiIndexList = []
284 283 for val in avg_dB.tolist():
285 284 if val >= beacon_dB[0]:
286 285 beacon_heiIndexList.append(avg_dB.tolist().index(val))
287 286
288 287 #data_spc = data_spc[:,:,beacon_heiIndexList]
289 288 data_cspc = None
290 289 if self.dataOut.data_cspc is not None:
291 290 data_cspc = self.dataOut.data_cspc[:, :, minIndex:maxIndex + 1]
292 291 #data_cspc = data_cspc[:,:,beacon_heiIndexList]
293 292
294 293 data_dc = None
295 294 if self.dataOut.data_dc is not None:
296 295 data_dc = self.dataOut.data_dc[:, minIndex:maxIndex + 1]
297 296 #data_dc = data_dc[:,beacon_heiIndexList]
298 297
299 298 self.dataOut.data_spc = data_spc
300 299 self.dataOut.data_cspc = data_cspc
301 300 self.dataOut.data_dc = data_dc
302 301 self.dataOut.heightList = heightList
303 302 self.dataOut.beacon_heiIndexList = beacon_heiIndexList
304 303
305 304 return 1
306 305
307 306 def selectFFTsByIndex(self, minIndex, maxIndex):
308 307 """
309 308
310 309 """
311 310
312 311 if (minIndex < 0) or (minIndex > maxIndex):
313 312 raise ValueError("Error selecting heights: Index range (%d,%d) is not valid" % (minIndex, maxIndex))
314 313
315 314 if (maxIndex >= self.dataOut.nProfiles):
316 315 maxIndex = self.dataOut.nProfiles-1
317 316
318 317 #Spectra
319 318 data_spc = self.dataOut.data_spc[:,minIndex:maxIndex+1,:]
320 319
321 320 data_cspc = None
322 321 if self.dataOut.data_cspc is not None:
323 322 data_cspc = self.dataOut.data_cspc[:,minIndex:maxIndex+1,:]
324 323
325 324 data_dc = None
326 325 if self.dataOut.data_dc is not None:
327 326 data_dc = self.dataOut.data_dc[minIndex:maxIndex+1,:]
328 327
329 328 self.dataOut.data_spc = data_spc
330 329 self.dataOut.data_cspc = data_cspc
331 330 self.dataOut.data_dc = data_dc
332 331
333 332 self.dataOut.ippSeconds = self.dataOut.ippSeconds*(self.dataOut.nFFTPoints / numpy.shape(data_cspc)[1])
334 333 self.dataOut.nFFTPoints = numpy.shape(data_cspc)[1]
335 334 self.dataOut.profilesPerBlock = numpy.shape(data_cspc)[1]
336 335
337 336 return 1
338 337
339 338 def getNoise(self, minHei=None, maxHei=None, minVel=None, maxVel=None):
340 339 # validacion de rango
341 340 if minHei == None:
342 341 minHei = self.dataOut.heightList[0]
343 342
344 343 if maxHei == None:
345 344 maxHei = self.dataOut.heightList[-1]
346 345
347 346 if (minHei < self.dataOut.heightList[0]) or (minHei > maxHei):
348 347 print('minHei: %.2f is out of the heights range' % (minHei))
349 348 print('minHei is setting to %.2f' % (self.dataOut.heightList[0]))
350 349 minHei = self.dataOut.heightList[0]
351 350
352 351 if (maxHei > self.dataOut.heightList[-1]) or (maxHei < minHei):
353 352 print('maxHei: %.2f is out of the heights range' % (maxHei))
354 353 print('maxHei is setting to %.2f' % (self.dataOut.heightList[-1]))
355 354 maxHei = self.dataOut.heightList[-1]
356 355
357 356 # validacion de velocidades
358 357 velrange = self.dataOut.getVelRange(1)
359 358
360 359 if minVel == None:
361 360 minVel = velrange[0]
362 361
363 362 if maxVel == None:
364 363 maxVel = velrange[-1]
365 364
366 365 if (minVel < velrange[0]) or (minVel > maxVel):
367 366 print('minVel: %.2f is out of the velocity range' % (minVel))
368 367 print('minVel is setting to %.2f' % (velrange[0]))
369 368 minVel = velrange[0]
370 369
371 370 if (maxVel > velrange[-1]) or (maxVel < minVel):
372 371 print('maxVel: %.2f is out of the velocity range' % (maxVel))
373 372 print('maxVel is setting to %.2f' % (velrange[-1]))
374 373 maxVel = velrange[-1]
375 374
376 375 # seleccion de indices para rango
377 376 minIndex = 0
378 377 maxIndex = 0
379 378 heights = self.dataOut.heightList
380 379
381 380 inda = numpy.where(heights >= minHei)
382 381 indb = numpy.where(heights <= maxHei)
383 382
384 383 try:
385 384 minIndex = inda[0][0]
386 385 except:
387 386 minIndex = 0
388 387
389 388 try:
390 389 maxIndex = indb[0][-1]
391 390 except:
392 391 maxIndex = len(heights)
393 392
394 393 if (minIndex < 0) or (minIndex > maxIndex):
395 394 raise ValueError("some value in (%d,%d) is not valid" % (
396 395 minIndex, maxIndex))
397 396
398 397 if (maxIndex >= self.dataOut.nHeights):
399 398 maxIndex = self.dataOut.nHeights - 1
400 399
401 400 # seleccion de indices para velocidades
402 401 indminvel = numpy.where(velrange >= minVel)
403 402 indmaxvel = numpy.where(velrange <= maxVel)
404 403 try:
405 404 minIndexVel = indminvel[0][0]
406 405 except:
407 406 minIndexVel = 0
408 407
409 408 try:
410 409 maxIndexVel = indmaxvel[0][-1]
411 410 except:
412 411 maxIndexVel = len(velrange)
413 412
414 413 # seleccion del espectro
415 414 data_spc = self.dataOut.data_spc[:,
416 415 minIndexVel:maxIndexVel + 1, minIndex:maxIndex + 1]
417 416 # estimacion de ruido
418 417 noise = numpy.zeros(self.dataOut.nChannels)
419 418
420 419 for channel in range(self.dataOut.nChannels):
421 420 daux = data_spc[channel, :, :]
422 421 sortdata = numpy.sort(daux, axis=None)
423 422 noise[channel] = hildebrand_sekhon(sortdata, self.dataOut.nIncohInt)
424 423
425 424 self.dataOut.noise_estimation = noise.copy()
426 425
427 426 return 1
428 427
429 428 class removeDC(Operation):
430 429
431 430 def run(self, dataOut, mode=2):
432 431 self.dataOut = dataOut
433 432 jspectra = self.dataOut.data_spc
434 433 jcspectra = self.dataOut.data_cspc
435 434
436 435 num_chan = jspectra.shape[0]
437 436 num_hei = jspectra.shape[2]
438 437
439 438 if jcspectra is not None:
440 439 jcspectraExist = True
441 440 num_pairs = jcspectra.shape[0]
442 441 else:
443 442 jcspectraExist = False
444 443
445 444 freq_dc = int(jspectra.shape[1] / 2)
446 445 ind_vel = numpy.array([-2, -1, 1, 2]) + freq_dc
447 446 ind_vel = ind_vel.astype(int)
448 447
449 448 if ind_vel[0] < 0:
450 449 ind_vel[list(range(0, 1))] = ind_vel[list(range(0, 1))] + self.num_prof
451 450
452 451 if mode == 1:
453 452 jspectra[:, freq_dc, :] = (
454 453 jspectra[:, ind_vel[1], :] + jspectra[:, ind_vel[2], :]) / 2 # CORRECCION
455 454
456 455 if jcspectraExist:
457 456 jcspectra[:, freq_dc, :] = (
458 457 jcspectra[:, ind_vel[1], :] + jcspectra[:, ind_vel[2], :]) / 2
459 458
460 459 if mode == 2:
461 460
462 461 vel = numpy.array([-2, -1, 1, 2])
463 462 xx = numpy.zeros([4, 4])
464 463
465 464 for fil in range(4):
466 465 xx[fil, :] = vel[fil]**numpy.asarray(list(range(4)))
467 466
468 467 xx_inv = numpy.linalg.inv(xx)
469 468 xx_aux = xx_inv[0, :]
470 469
471 470 for ich in range(num_chan):
472 471 yy = jspectra[ich, ind_vel, :]
473 472 jspectra[ich, freq_dc, :] = numpy.dot(xx_aux, yy)
474 473
475 474 junkid = jspectra[ich, freq_dc, :] <= 0
476 475 cjunkid = sum(junkid)
477 476
478 477 if cjunkid.any():
479 478 jspectra[ich, freq_dc, junkid.nonzero()] = (
480 479 jspectra[ich, ind_vel[1], junkid] + jspectra[ich, ind_vel[2], junkid]) / 2
481 480
482 481 if jcspectraExist:
483 482 for ip in range(num_pairs):
484 483 yy = jcspectra[ip, ind_vel, :]
485 484 jcspectra[ip, freq_dc, :] = numpy.dot(xx_aux, yy)
486 485
487 486 self.dataOut.data_spc = jspectra
488 487 self.dataOut.data_cspc = jcspectra
489 488
490 489 return self.dataOut
491 490
492 491 # import matplotlib.pyplot as plt
493 492
494 493 def fit_func( x, a0, a1, a2): #, a3, a4, a5):
495 494 z = (x - a1) / a2
496 495 y = a0 * numpy.exp(-z**2 / a2) #+ a3 + a4 * x + a5 * x**2
497 496 return y
498 497
499 498
500 499 class CleanRayleigh(Operation):
501 500
502 501 def __init__(self):
503 502
504 503 Operation.__init__(self)
505 504 self.i=0
506 505 self.isConfig = False
507 506 self.__dataReady = False
508 507 self.__profIndex = 0
509 508 self.byTime = False
510 509 self.byProfiles = False
511 510
512 511 self.bloques = None
513 512 self.bloque0 = None
514 513
515 514 self.index = 0
516 515
517 516 self.buffer = 0
518 517 self.buffer2 = 0
519 518 self.buffer3 = 0
520 519
521 520
522 521 def setup(self,dataOut,min_hei,max_hei,n, timeInterval,factor_stdv):
523 522
524 523 self.nChannels = dataOut.nChannels
525 524 self.nProf = dataOut.nProfiles
526 525 self.nPairs = dataOut.data_cspc.shape[0]
527 526 self.pairsArray = numpy.array(dataOut.pairsList)
528 527 self.spectra = dataOut.data_spc
529 528 self.cspectra = dataOut.data_cspc
530 529 self.heights = dataOut.heightList #alturas totales
531 530 self.nHeights = len(self.heights)
532 531 self.min_hei = min_hei
533 532 self.max_hei = max_hei
534 533 if (self.min_hei == None):
535 534 self.min_hei = 0
536 535 if (self.max_hei == None):
537 536 self.max_hei = dataOut.heightList[-1]
538 537 self.hval = ((self.max_hei>=self.heights) & (self.heights >= self.min_hei)).nonzero()
539 538 self.heightsClean = self.heights[self.hval] #alturas filtradas
540 539 self.hval = self.hval[0] # forma (N,), an solo N elementos -> Indices de alturas
541 540 self.nHeightsClean = len(self.heightsClean)
542 541 self.channels = dataOut.channelList
543 542 self.nChan = len(self.channels)
544 543 self.nIncohInt = dataOut.nIncohInt
545 544 self.__initime = dataOut.utctime
546 545 self.maxAltInd = self.hval[-1]+1
547 546 self.minAltInd = self.hval[0]
548 547
549 548 self.crosspairs = dataOut.pairsList
550 549 self.nPairs = len(self.crosspairs)
551 550 self.normFactor = dataOut.normFactor
552 551 self.nFFTPoints = dataOut.nFFTPoints
553 552 self.ippSeconds = dataOut.ippSeconds
554 553 self.currentTime = self.__initime
555 554 self.pairsArray = numpy.array(dataOut.pairsList)
556 555 self.factor_stdv = factor_stdv
557 556
558 557 if n != None :
559 558 self.byProfiles = True
560 559 self.nIntProfiles = n
561 560 else:
562 561 self.__integrationtime = timeInterval
563 562
564 563 self.__dataReady = False
565 564 self.isConfig = True
566 565
567 566
568 567
569 568 def run(self, dataOut,min_hei=None,max_hei=None, n=None, timeInterval=10,factor_stdv=2.5):
570 569
571 570 if not self.isConfig :
572 571
573 572 self.setup(dataOut, min_hei,max_hei,n,timeInterval,factor_stdv)
574 573
575 574 tini=dataOut.utctime
576 575
577 576 if self.byProfiles:
578 577 if self.__profIndex == self.nIntProfiles:
579 578 self.__dataReady = True
580 579 else:
581 580 if (tini - self.__initime) >= self.__integrationtime:
582 581
583 582 self.__dataReady = True
584 583 self.__initime = tini
585 584
586 585 #if (tini.tm_min % 2) == 0 and (tini.tm_sec < 5 and self.fint==0):
587 586
588 587 if self.__dataReady:
589 588
590 589 self.__profIndex = 0
591 590 jspc = self.buffer
592 591 jcspc = self.buffer2
593 592 #jnoise = self.buffer3
594 593 self.buffer = dataOut.data_spc
595 594 self.buffer2 = dataOut.data_cspc
596 595 #self.buffer3 = dataOut.noise
597 596 self.currentTime = dataOut.utctime
598 597 if numpy.any(jspc) :
599 598 #print( jspc.shape, jcspc.shape)
600 599 jspc = numpy.reshape(jspc,(int(len(jspc)/self.nChannels),self.nChannels,self.nFFTPoints,self.nHeights))
601 600 jcspc= numpy.reshape(jcspc,(int(len(jcspc)/self.nPairs),self.nPairs,self.nFFTPoints,self.nHeights))
602 601 self.__dataReady = False
603 602 #print( jspc.shape, jcspc.shape)
604 603 dataOut.flagNoData = False
605 604 else:
606 605 dataOut.flagNoData = True
607 606 self.__dataReady = False
608 607 return dataOut
609 608 else:
610 609 #print( len(self.buffer))
611 610 if numpy.any(self.buffer):
612 611 self.buffer = numpy.concatenate((self.buffer,dataOut.data_spc), axis=0)
613 612 self.buffer2 = numpy.concatenate((self.buffer2,dataOut.data_cspc), axis=0)
614 613 self.buffer3 += dataOut.data_dc
615 614 else:
616 615 self.buffer = dataOut.data_spc
617 616 self.buffer2 = dataOut.data_cspc
618 617 self.buffer3 = dataOut.data_dc
619 618 #print self.index, self.fint
620 619 #print self.buffer2.shape
621 620 dataOut.flagNoData = True ## NOTE: ?? revisar LUEGO
622 621 self.__profIndex += 1
623 622 return dataOut ## NOTE: REV
624 623
625 624
626 625 #index = tini.tm_hour*12+tini.tm_min/5
627 626 '''REVISAR'''
628 627 # jspc = jspc/self.nFFTPoints/self.normFactor
629 628 # jcspc = jcspc/self.nFFTPoints/self.normFactor
630 629
631 630
632 631
633 632 tmp_spectra,tmp_cspectra = self.cleanRayleigh(dataOut,jspc,jcspc,self.factor_stdv)
634 633 dataOut.data_spc = tmp_spectra
635 634 dataOut.data_cspc = tmp_cspectra
636 635
637 636 #dataOut.data_spc,dataOut.data_cspc = self.cleanRayleigh(dataOut,jspc,jcspc,self.factor_stdv)
638 637
639 638 dataOut.data_dc = self.buffer3
640 639 dataOut.nIncohInt *= self.nIntProfiles
641 640 dataOut.utctime = self.currentTime #tiempo promediado
642 641 #print("Time: ",time.localtime(dataOut.utctime))
643 642 # dataOut.data_spc = sat_spectra
644 643 # dataOut.data_cspc = sat_cspectra
645 644 self.buffer = 0
646 645 self.buffer2 = 0
647 646 self.buffer3 = 0
648 647
649 648 return dataOut
650 649
651 650 def cleanRayleigh(self,dataOut,spectra,cspectra,factor_stdv):
652 651 #print("OP cleanRayleigh")
653 652 #import matplotlib.pyplot as plt
654 653 #for k in range(149):
655 654 #channelsProcssd = []
656 655 #channelA_ok = False
657 656 #rfunc = cspectra.copy() #self.bloques
658 657 rfunc = spectra.copy()
659 658 #rfunc = cspectra
660 659 #val_spc = spectra*0.0 #self.bloque0*0.0
661 660 #val_cspc = cspectra*0.0 #self.bloques*0.0
662 661 #in_sat_spectra = spectra.copy() #self.bloque0
663 662 #in_sat_cspectra = cspectra.copy() #self.bloques
664 663
665 664
666 665 ###ONLY FOR TEST:
667 666 raxs = math.ceil(math.sqrt(self.nPairs))
668 667 caxs = math.ceil(self.nPairs/raxs)
669 668 if self.nPairs <4:
670 669 raxs = 2
671 670 caxs = 2
672 671 #print(raxs, caxs)
673 672 fft_rev = 14 #nFFT to plot
674 673 hei_rev = ((self.heights >= 550) & (self.heights <= 551)).nonzero() #hei to plot
675 674 hei_rev = hei_rev[0]
676 675 #print(hei_rev)
677 676
678 677 #print numpy.absolute(rfunc[:,0,0,14])
679 678
680 679 gauss_fit, covariance = None, None
681 680 for ih in range(self.minAltInd,self.maxAltInd):
682 681 for ifreq in range(self.nFFTPoints):
683 682 '''
684 683 ###ONLY FOR TEST:
685 684 if ifreq ==fft_rev and ih==hei_rev: #TO VIEW A SIGNLE FREQUENCY
686 685 fig, axs = plt.subplots(raxs, caxs)
687 686 fig2, axs2 = plt.subplots(raxs, caxs)
688 687 col_ax = 0
689 688 row_ax = 0
690 689 '''
691 690 #print(self.nPairs)
692 691 for ii in range(self.nChan): #PARES DE CANALES SELF y CROSS
693 692 # if self.crosspairs[ii][1]-self.crosspairs[ii][0] > 1: # APLICAR SOLO EN PARES CONTIGUOS
694 693 # continue
695 694 # if not self.crosspairs[ii][0] in channelsProcssd:
696 695 # channelA_ok = True
697 696 #print("pair: ",self.crosspairs[ii])
698 697 '''
699 698 ###ONLY FOR TEST:
700 699 if (col_ax%caxs==0 and col_ax!=0 and self.nPairs !=1):
701 700 col_ax = 0
702 701 row_ax += 1
703 702 '''
704 703 func2clean = 10*numpy.log10(numpy.absolute(rfunc[:,ii,ifreq,ih])) #Potencia?
705 704 #print(func2clean.shape)
706 705 val = (numpy.isfinite(func2clean)==True).nonzero()
707 706
708 707 if len(val)>0: #limitador
709 708 min_val = numpy.around(numpy.amin(func2clean)-2) #> (-40)
710 709 if min_val <= -40 :
711 710 min_val = -40
712 711 max_val = numpy.around(numpy.amax(func2clean)+2) #< 200
713 712 if max_val >= 200 :
714 713 max_val = 200
715 714 #print min_val, max_val
716 715 step = 1
717 716 #print("Getting bins and the histogram")
718 717 x_dist = min_val + numpy.arange(1 + ((max_val-(min_val))/step))*step
719 718 y_dist,binstep = numpy.histogram(func2clean,bins=range(int(min_val),int(max_val+2),step))
720 719 #print(len(y_dist),len(binstep[:-1]))
721 720 #print(row_ax,col_ax, " ..")
722 721 #print(self.pairsArray[ii][0],self.pairsArray[ii][1])
723 722 mean = numpy.sum(x_dist * y_dist) / numpy.sum(y_dist)
724 723 sigma = numpy.sqrt(numpy.sum(y_dist * (x_dist - mean)**2) / numpy.sum(y_dist))
725 724 parg = [numpy.amax(y_dist),mean,sigma]
726 725
727 726 newY = None
728 727
729 728 try :
730 729 gauss_fit, covariance = curve_fit(fit_func, x_dist, y_dist,p0=parg)
731 730 mode = gauss_fit[1]
732 731 stdv = gauss_fit[2]
733 732 #print(" FIT OK",gauss_fit)
734 733 '''
735 734 ###ONLY FOR TEST:
736 735 if ifreq ==fft_rev and ih==hei_rev: #TO VIEW A SIGNLE FREQUENCY
737 736 newY = fit_func(x_dist,gauss_fit[0],gauss_fit[1],gauss_fit[2])
738 737 axs[row_ax,col_ax].plot(binstep[:-1],y_dist,color='green')
739 738 axs[row_ax,col_ax].plot(binstep[:-1],newY,color='red')
740 739 axs[row_ax,col_ax].set_title("CH "+str(self.channels[ii]))
741 740 '''
742 741 except:
743 742 mode = mean
744 743 stdv = sigma
745 744 #print("FIT FAIL")
746 745 #continue
747 746
748 747
749 748 #print(mode,stdv)
750 749 #Removing echoes greater than mode + std_factor*stdv
751 750 noval = (abs(func2clean - mode)>=(factor_stdv*stdv)).nonzero()
752 751 #noval tiene los indices que se van a remover
753 752 #print("Chan ",ii," novals: ",len(noval[0]))
754 753 if len(noval[0]) > 0: #forma de array (N,) es igual a longitud (N)
755 754 novall = ((func2clean - mode) >= (factor_stdv*stdv)).nonzero()
756 755 #print(novall)
757 756 #print(" ",self.pairsArray[ii])
758 757 #cross_pairs = self.pairsArray[ii]
759 758 #Getting coherent echoes which are removed.
760 759 # if len(novall[0]) > 0:
761 760 #
762 761 # val_spc[novall[0],cross_pairs[0],ifreq,ih] = 1
763 762 # val_spc[novall[0],cross_pairs[1],ifreq,ih] = 1
764 763 # val_cspc[novall[0],ii,ifreq,ih] = 1
765 764 #print("OUT NOVALL 1")
766 765 try:
767 766 pair = (self.channels[ii],self.channels[ii + 1])
768 767 except:
769 768 pair = (99,99)
770 769 #print("par ", pair)
771 770 if ( pair in self.crosspairs):
772 771 q = self.crosspairs.index(pair)
773 772 #print("estΓ‘ aqui: ", q, (ii,ii + 1))
774 773 new_a = numpy.delete(cspectra[:,q,ifreq,ih], noval[0])
775 774 cspectra[noval,q,ifreq,ih] = numpy.mean(new_a) #mean CrossSpectra
776 775
777 776 #if channelA_ok:
778 777 #chA = self.channels.index(cross_pairs[0])
779 778 new_b = numpy.delete(spectra[:,ii,ifreq,ih], noval[0])
780 779 spectra[noval,ii,ifreq,ih] = numpy.mean(new_b) #mean Spectra Pair A
781 780 #channelA_ok = False
782 781
783 782 # chB = self.channels.index(cross_pairs[1])
784 783 # new_c = numpy.delete(spectra[:,chB,ifreq,ih], noval[0])
785 784 # spectra[noval,chB,ifreq,ih] = numpy.mean(new_c) #mean Spectra Pair B
786 785 #
787 786 # channelsProcssd.append(self.crosspairs[ii][0]) # save channel A
788 787 # channelsProcssd.append(self.crosspairs[ii][1]) # save channel B
789 788 '''
790 789 ###ONLY FOR TEST:
791 790 if ifreq ==fft_rev and ih==hei_rev: #TO VIEW A SIGNLE FREQUENCY
792 791 func2clean = 10*numpy.log10(numpy.absolute(spectra[:,ii,ifreq,ih]))
793 792 y_dist,binstep = numpy.histogram(func2clean,bins=range(int(min_val),int(max_val+2),step))
794 793 axs2[row_ax,col_ax].plot(binstep[:-1],newY,color='red')
795 794 axs2[row_ax,col_ax].plot(binstep[:-1],y_dist,color='green')
796 795 axs2[row_ax,col_ax].set_title("CH "+str(self.channels[ii]))
797 796 '''
798 797 '''
799 798 ###ONLY FOR TEST:
800 799 col_ax += 1 #contador de ploteo columnas
801 800 ##print(col_ax)
802 801 ###ONLY FOR TEST:
803 802 if ifreq ==fft_rev and ih==hei_rev: #TO VIEW A SIGNLE FREQUENCY
804 803 title = str(dataOut.datatime)+" nFFT: "+str(ifreq)+" Alt: "+str(self.heights[ih])+ " km"
805 804 title2 = str(dataOut.datatime)+" nFFT: "+str(ifreq)+" Alt: "+str(self.heights[ih])+ " km CLEANED"
806 805 fig.suptitle(title)
807 806 fig2.suptitle(title2)
808 807 plt.show()
809 808 '''
810 809 ##################################################################################################
811 810
812 811 #print("Getting average of the spectra and cross-spectra from incoherent echoes.")
813 812 out_spectra = numpy.zeros([self.nChan,self.nFFTPoints,self.nHeights], dtype=float) #+numpy.nan
814 813 out_cspectra = numpy.zeros([self.nPairs,self.nFFTPoints,self.nHeights], dtype=complex) #+numpy.nan
815 814 for ih in range(self.nHeights):
816 815 for ifreq in range(self.nFFTPoints):
817 816 for ich in range(self.nChan):
818 817 tmp = spectra[:,ich,ifreq,ih]
819 818 valid = (numpy.isfinite(tmp[:])==True).nonzero()
820 819
821 820 if len(valid[0]) >0 :
822 821 out_spectra[ich,ifreq,ih] = numpy.nansum(tmp)#/len(valid[0])
823 822
824 823 for icr in range(self.nPairs):
825 824 tmp = numpy.squeeze(cspectra[:,icr,ifreq,ih])
826 825 valid = (numpy.isfinite(tmp)==True).nonzero()
827 826 if len(valid[0]) > 0:
828 827 out_cspectra[icr,ifreq,ih] = numpy.nansum(tmp)#/len(valid[0])
829 828
830 829 return out_spectra, out_cspectra
831 830
832 831 def REM_ISOLATED_POINTS(self,array,rth):
833 832 # import matplotlib.pyplot as plt
834 833 if rth == None :
835 834 rth = 4
836 835 #print("REM ISO")
837 836 num_prof = len(array[0,:,0])
838 837 num_hei = len(array[0,0,:])
839 838 n2d = len(array[:,0,0])
840 839
841 840 for ii in range(n2d) :
842 841 #print ii,n2d
843 842 tmp = array[ii,:,:]
844 843 #print tmp.shape, array[ii,101,:],array[ii,102,:]
845 844
846 845 # fig = plt.figure(figsize=(6,5))
847 846 # left, bottom, width, height = 0.1, 0.1, 0.8, 0.8
848 847 # ax = fig.add_axes([left, bottom, width, height])
849 848 # x = range(num_prof)
850 849 # y = range(num_hei)
851 850 # cp = ax.contour(y,x,tmp)
852 851 # ax.clabel(cp, inline=True,fontsize=10)
853 852 # plt.show()
854 853
855 854 #indxs = WHERE(FINITE(tmp) AND tmp GT 0,cindxs)
856 855 tmp = numpy.reshape(tmp,num_prof*num_hei)
857 856 indxs1 = (numpy.isfinite(tmp)==True).nonzero()
858 857 indxs2 = (tmp > 0).nonzero()
859 858
860 859 indxs1 = (indxs1[0])
861 860 indxs2 = indxs2[0]
862 861 #indxs1 = numpy.array(indxs1[0])
863 862 #indxs2 = numpy.array(indxs2[0])
864 863 indxs = None
865 864 #print indxs1 , indxs2
866 865 for iv in range(len(indxs2)):
867 866 indv = numpy.array((indxs1 == indxs2[iv]).nonzero())
868 867 #print len(indxs2), indv
869 868 if len(indv[0]) > 0 :
870 869 indxs = numpy.concatenate((indxs,indxs2[iv]), axis=None)
871 870 # print indxs
872 871 indxs = indxs[1:]
873 872 #print(indxs, len(indxs))
874 873 if len(indxs) < 4 :
875 874 array[ii,:,:] = 0.
876 875 return
877 876
878 877 xpos = numpy.mod(indxs ,num_hei)
879 878 ypos = (indxs / num_hei)
880 879 sx = numpy.argsort(xpos) # Ordering respect to "x" (time)
881 880 #print sx
882 881 xpos = xpos[sx]
883 882 ypos = ypos[sx]
884 883
885 884 # *********************************** Cleaning isolated points **********************************
886 885 ic = 0
887 886 while True :
888 887 r = numpy.sqrt(list(numpy.power((xpos[ic]-xpos),2)+ numpy.power((ypos[ic]-ypos),2)))
889 888 #no_coh = WHERE(FINITE(r) AND (r LE rth),cno_coh)
890 889 #plt.plot(r)
891 890 #plt.show()
892 891 no_coh1 = (numpy.isfinite(r)==True).nonzero()
893 892 no_coh2 = (r <= rth).nonzero()
894 893 #print r, no_coh1, no_coh2
895 894 no_coh1 = numpy.array(no_coh1[0])
896 895 no_coh2 = numpy.array(no_coh2[0])
897 896 no_coh = None
898 897 #print valid1 , valid2
899 898 for iv in range(len(no_coh2)):
900 899 indv = numpy.array((no_coh1 == no_coh2[iv]).nonzero())
901 900 if len(indv[0]) > 0 :
902 901 no_coh = numpy.concatenate((no_coh,no_coh2[iv]), axis=None)
903 902 no_coh = no_coh[1:]
904 903 #print len(no_coh), no_coh
905 904 if len(no_coh) < 4 :
906 905 #print xpos[ic], ypos[ic], ic
907 906 # plt.plot(r)
908 907 # plt.show()
909 908 xpos[ic] = numpy.nan
910 909 ypos[ic] = numpy.nan
911 910
912 911 ic = ic + 1
913 912 if (ic == len(indxs)) :
914 913 break
915 914 #print( xpos, ypos)
916 915
917 916 indxs = (numpy.isfinite(list(xpos))==True).nonzero()
918 917 #print indxs[0]
919 918 if len(indxs[0]) < 4 :
920 919 array[ii,:,:] = 0.
921 920 return
922 921
923 922 xpos = xpos[indxs[0]]
924 923 ypos = ypos[indxs[0]]
925 924 for i in range(0,len(ypos)):
926 925 ypos[i]=int(ypos[i])
927 926 junk = tmp
928 927 tmp = junk*0.0
929 928
930 929 tmp[list(xpos + (ypos*num_hei))] = junk[list(xpos + (ypos*num_hei))]
931 930 array[ii,:,:] = numpy.reshape(tmp,(num_prof,num_hei))
932 931
933 932 #print array.shape
934 933 #tmp = numpy.reshape(tmp,(num_prof,num_hei))
935 934 #print tmp.shape
936 935
937 936 # fig = plt.figure(figsize=(6,5))
938 937 # left, bottom, width, height = 0.1, 0.1, 0.8, 0.8
939 938 # ax = fig.add_axes([left, bottom, width, height])
940 939 # x = range(num_prof)
941 940 # y = range(num_hei)
942 941 # cp = ax.contour(y,x,array[ii,:,:])
943 942 # ax.clabel(cp, inline=True,fontsize=10)
944 943 # plt.show()
945 944 return array
946 945
947 946
948 947 class IntegrationFaradaySpectra(Operation):
949 948
950 949 __profIndex = 0
951 950 __withOverapping = False
952 951
953 952 __byTime = False
954 953 __initime = None
955 954 __lastdatatime = None
956 955 __integrationtime = None
957 956
958 957 __buffer_spc = None
959 958 __buffer_cspc = None
960 959 __buffer_dc = None
961 960
962 961 __dataReady = False
963 962
964 963 __timeInterval = None
965 964
966 965 n = None
967 966
968 967 def __init__(self):
969 968
970 969 Operation.__init__(self)
971 970
972 971 def setup(self, dataOut,n=None, timeInterval=None, overlapping=False, DPL=None):
973 972 """
974 973 Set the parameters of the integration class.
975 974
976 975 Inputs:
977 976
978 977 n : Number of coherent integrations
979 978 timeInterval : Time of integration. If the parameter "n" is selected this one does not work
980 979 overlapping :
981 980
982 981 """
983 982
984 983 self.__initime = None
985 984 self.__lastdatatime = 0
986 985
987 986 self.__buffer_spc = []
988 987 self.__buffer_cspc = []
989 988 self.__buffer_dc = 0
990 989
991 990 self.__profIndex = 0
992 991 self.__dataReady = False
993 992 self.__byTime = False
994 993
995 994 #self.ByLags = dataOut.ByLags ###REDEFINIR
996 995 self.ByLags = False
997 996
998 997 if DPL != None:
999 998 self.DPL=DPL
1000 999 else:
1001 1000 #self.DPL=dataOut.DPL ###REDEFINIR
1002 1001 self.DPL=0
1003 1002
1004 1003 if n is None and timeInterval is None:
1005 1004 raise ValueError("n or timeInterval should be specified ...")
1006 1005
1007 1006 if n is not None:
1008 1007 self.n = int(n)
1009 1008 else:
1010 1009
1011 1010 self.__integrationtime = int(timeInterval)
1012 1011 self.n = None
1013 1012 self.__byTime = True
1014 1013
1015 1014 def putData(self, data_spc, data_cspc, data_dc):
1016 1015 """
1017 1016 Add a profile to the __buffer_spc and increase in one the __profileIndex
1018 1017
1019 1018 """
1020 1019
1021 1020 self.__buffer_spc.append(data_spc)
1022 1021
1023 1022 if data_cspc is None:
1024 1023 self.__buffer_cspc = None
1025 1024 else:
1026 1025 self.__buffer_cspc.append(data_cspc)
1027 1026
1028 1027 if data_dc is None:
1029 1028 self.__buffer_dc = None
1030 1029 else:
1031 1030 self.__buffer_dc += data_dc
1032 1031
1033 1032 self.__profIndex += 1
1034 1033
1035 1034 return
1036 1035
1037 1036 def hildebrand_sekhon_Integration(self,data,navg):
1038 1037
1039 1038 sortdata = numpy.sort(data, axis=None)
1040 1039 sortID=data.argsort()
1041 1040 lenOfData = len(sortdata)
1042 1041 nums_min = lenOfData*0.75
1043 1042 if nums_min <= 5:
1044 1043 nums_min = 5
1045 1044 sump = 0.
1046 1045 sumq = 0.
1047 1046 j = 0
1048 1047 cont = 1
1049 1048 while((cont == 1)and(j < lenOfData)):
1050 1049 sump += sortdata[j]
1051 1050 sumq += sortdata[j]**2
1052 1051 if j > nums_min:
1053 1052 rtest = float(j)/(j-1) + 1.0/navg
1054 1053 if ((sumq*j) > (rtest*sump**2)):
1055 1054 j = j - 1
1056 1055 sump = sump - sortdata[j]
1057 1056 sumq = sumq - sortdata[j]**2
1058 1057 cont = 0
1059 1058 j += 1
1060 1059 #lnoise = sump / j
1061 1060
1062 1061 return j,sortID
1063 1062
1064 1063 def pushData(self):
1065 1064 """
1066 1065 Return the sum of the last profiles and the profiles used in the sum.
1067 1066
1068 1067 Affected:
1069 1068
1070 1069 self.__profileIndex
1071 1070
1072 1071 """
1073 1072 bufferH=None
1074 1073 buffer=None
1075 1074 buffer1=None
1076 1075 buffer_cspc=None
1077 1076 self.__buffer_spc=numpy.array(self.__buffer_spc)
1078 1077 self.__buffer_cspc=numpy.array(self.__buffer_cspc)
1079 1078 freq_dc = int(self.__buffer_spc.shape[2] / 2)
1080 1079 #print("FREQ_DC",freq_dc,self.__buffer_spc.shape,self.nHeights)
1081 1080 for k in range(7,self.nHeights):
1082 1081 buffer_cspc=numpy.copy(self.__buffer_cspc[:,:,:,k])
1083 1082 outliers_IDs_cspc=[]
1084 1083 cspc_outliers_exist=False
1085 1084 for i in range(self.nChannels):#dataOut.nChannels):
1086 1085
1087 1086 buffer1=numpy.copy(self.__buffer_spc[:,i,:,k])
1088 1087 indexes=[]
1089 1088 #sortIDs=[]
1090 1089 outliers_IDs=[]
1091 1090
1092 1091 for j in range(self.nProfiles):
1093 1092 # if i==0 and j==freq_dc: #NOT CONSIDERING DC PROFILE AT CHANNEL 0
1094 1093 # continue
1095 1094 # if i==1 and j==0: #NOT CONSIDERING DC PROFILE AT CHANNEL 1
1096 1095 # continue
1097 1096 buffer=buffer1[:,j]
1098 1097 index,sortID=self.hildebrand_sekhon_Integration(buffer,1)
1099 1098
1100 1099 indexes.append(index)
1101 1100 #sortIDs.append(sortID)
1102 1101 outliers_IDs=numpy.append(outliers_IDs,sortID[index:])
1103 1102
1104 1103 outliers_IDs=numpy.array(outliers_IDs)
1105 1104 outliers_IDs=outliers_IDs.ravel()
1106 1105 outliers_IDs=numpy.unique(outliers_IDs)
1107 1106 outliers_IDs=outliers_IDs.astype(numpy.dtype('int64'))
1108 1107 indexes=numpy.array(indexes)
1109 1108 indexmin=numpy.min(indexes)
1110 1109
1111 1110 if indexmin != buffer1.shape[0]:
1112 1111 cspc_outliers_exist=True
1113 1112 ###sortdata=numpy.sort(buffer1,axis=0)
1114 1113 ###avg2=numpy.mean(sortdata[:indexmin,:],axis=0)
1115 1114 lt=outliers_IDs
1116 1115 avg=numpy.mean(buffer1[[t for t in range(buffer1.shape[0]) if t not in lt],:],axis=0)
1117 1116
1118 1117 for p in list(outliers_IDs):
1119 1118 buffer1[p,:]=avg
1120 1119
1121 1120 self.__buffer_spc[:,i,:,k]=numpy.copy(buffer1)
1122 1121 ###cspc IDs
1123 1122 #indexmin_cspc+=indexmin_cspc
1124 1123 outliers_IDs_cspc=numpy.append(outliers_IDs_cspc,outliers_IDs)
1125 1124
1126 1125 #if not breakFlag:
1127 1126 outliers_IDs_cspc=outliers_IDs_cspc.astype(numpy.dtype('int64'))
1128 1127 if cspc_outliers_exist:
1129 1128 #sortdata=numpy.sort(buffer_cspc,axis=0)
1130 1129 #avg=numpy.mean(sortdata[:indexmin_cpsc,:],axis=0)
1131 1130 lt=outliers_IDs_cspc
1132 1131
1133 1132 avg=numpy.mean(buffer_cspc[[t for t in range(buffer_cspc.shape[0]) if t not in lt],:],axis=0)
1134 1133 for p in list(outliers_IDs_cspc):
1135 1134 buffer_cspc[p,:]=avg
1136 1135
1137 1136 self.__buffer_cspc[:,:,:,k]=numpy.copy(buffer_cspc)
1138 1137 #else:
1139 1138 #break
1140 1139
1141 1140
1142 1141
1143 1142
1144 1143 buffer=None
1145 1144 bufferH=None
1146 1145 buffer1=None
1147 1146 buffer_cspc=None
1148 1147
1149 1148 #print("cpsc",self.__buffer_cspc[:,0,0,0,0])
1150 1149 #print(self.__profIndex)
1151 1150 #exit()
1152 1151
1153 1152 buffer=None
1154 1153 #print(self.__buffer_spc[:,1,3,20,0])
1155 1154 #print(self.__buffer_spc[:,1,5,37,0])
1156 1155 data_spc = numpy.sum(self.__buffer_spc,axis=0)
1157 1156 data_cspc = numpy.sum(self.__buffer_cspc,axis=0)
1158 1157
1159 1158 #print(numpy.shape(data_spc))
1160 1159 #data_spc[1,4,20,0]=numpy.nan
1161 1160
1162 1161 #data_cspc = self.__buffer_cspc
1163 1162 data_dc = self.__buffer_dc
1164 1163 n = self.__profIndex
1165 1164
1166 1165 self.__buffer_spc = []
1167 1166 self.__buffer_cspc = []
1168 1167 self.__buffer_dc = 0
1169 1168 self.__profIndex = 0
1170 1169
1171 1170 return data_spc, data_cspc, data_dc, n
1172 1171
1173 1172 def byProfiles(self, *args):
1174 1173
1175 1174 self.__dataReady = False
1176 1175 avgdata_spc = None
1177 1176 avgdata_cspc = None
1178 1177 avgdata_dc = None
1179 1178
1180 1179 self.putData(*args)
1181 1180
1182 1181 if self.__profIndex == self.n:
1183 1182
1184 1183 avgdata_spc, avgdata_cspc, avgdata_dc, n = self.pushData()
1185 1184 self.n = n
1186 1185 self.__dataReady = True
1187 1186
1188 1187 return avgdata_spc, avgdata_cspc, avgdata_dc
1189 1188
1190 1189 def byTime(self, datatime, *args):
1191 1190
1192 1191 self.__dataReady = False
1193 1192 avgdata_spc = None
1194 1193 avgdata_cspc = None
1195 1194 avgdata_dc = None
1196 1195
1197 1196 self.putData(*args)
1198 1197
1199 1198 if (datatime - self.__initime) >= self.__integrationtime:
1200 1199 avgdata_spc, avgdata_cspc, avgdata_dc, n = self.pushData()
1201 1200 self.n = n
1202 1201 self.__dataReady = True
1203 1202
1204 1203 return avgdata_spc, avgdata_cspc, avgdata_dc
1205 1204
1206 1205 def integrate(self, datatime, *args):
1207 1206
1208 1207 if self.__profIndex == 0:
1209 1208 self.__initime = datatime
1210 1209
1211 1210 if self.__byTime:
1212 1211 avgdata_spc, avgdata_cspc, avgdata_dc = self.byTime(
1213 1212 datatime, *args)
1214 1213 else:
1215 1214 avgdata_spc, avgdata_cspc, avgdata_dc = self.byProfiles(*args)
1216 1215
1217 1216 if not self.__dataReady:
1218 1217 return None, None, None, None
1219 1218
1220 1219 return self.__initime, avgdata_spc, avgdata_cspc, avgdata_dc
1221 1220
1222 1221 def run(self, dataOut, n=None, DPL = None,timeInterval=None, overlapping=False):
1223 1222 if n == 1:
1224 1223 return dataOut
1225 1224
1226 1225 dataOut.flagNoData = True
1227 1226
1228 1227 if not self.isConfig:
1229 1228 self.setup(dataOut, n, timeInterval, overlapping,DPL )
1230 1229 self.isConfig = True
1231 1230
1232 1231 if not self.ByLags:
1233 1232 self.nProfiles=dataOut.nProfiles
1234 1233 self.nChannels=dataOut.nChannels
1235 1234 self.nHeights=dataOut.nHeights
1236 1235 avgdatatime, avgdata_spc, avgdata_cspc, avgdata_dc = self.integrate(dataOut.utctime,
1237 1236 dataOut.data_spc,
1238 1237 dataOut.data_cspc,
1239 1238 dataOut.data_dc)
1240 1239 else:
1241 1240 self.nProfiles=dataOut.nProfiles
1242 1241 self.nChannels=dataOut.nChannels
1243 1242 self.nHeights=dataOut.nHeights
1244 1243 avgdatatime, avgdata_spc, avgdata_cspc, avgdata_dc = self.integrate(dataOut.utctime,
1245 1244 dataOut.dataLag_spc,
1246 1245 dataOut.dataLag_cspc,
1247 1246 dataOut.dataLag_dc)
1248 1247
1249 1248 if self.__dataReady:
1250 1249
1251 1250 if not self.ByLags:
1252 1251
1253 1252 dataOut.data_spc = numpy.squeeze(avgdata_spc)
1254 1253 dataOut.data_cspc = numpy.squeeze(avgdata_cspc)
1255 1254 dataOut.data_dc = avgdata_dc
1256 1255 else:
1257 1256 dataOut.dataLag_spc = avgdata_spc
1258 1257 dataOut.dataLag_cspc = avgdata_cspc
1259 1258 dataOut.dataLag_dc = avgdata_dc
1260 1259
1261 1260 dataOut.data_spc=dataOut.dataLag_spc[:,:,:,dataOut.LagPlot]
1262 1261 dataOut.data_cspc=dataOut.dataLag_cspc[:,:,:,dataOut.LagPlot]
1263 1262 dataOut.data_dc=dataOut.dataLag_dc[:,:,dataOut.LagPlot]
1264 1263
1265 1264
1266 1265 dataOut.nIncohInt *= self.n
1267 1266 dataOut.utctime = avgdatatime
1268 1267 dataOut.flagNoData = False
1269 1268
1270 1269 return dataOut
1271 1270
1272 1271 class removeInterference(Operation):
1273 1272
1274 1273 def removeInterference2(self):
1275 1274
1276 1275 cspc = self.dataOut.data_cspc
1277 1276 spc = self.dataOut.data_spc
1278 1277 Heights = numpy.arange(cspc.shape[2])
1279 1278 realCspc = numpy.abs(cspc)
1280 1279
1281 1280 for i in range(cspc.shape[0]):
1282 1281 LinePower= numpy.sum(realCspc[i], axis=0)
1283 1282 Threshold = numpy.amax(LinePower)-numpy.sort(LinePower)[len(Heights)-int(len(Heights)*0.1)]
1284 1283 SelectedHeights = Heights[ numpy.where( LinePower < Threshold ) ]
1285 1284 InterferenceSum = numpy.sum( realCspc[i,:,SelectedHeights], axis=0 )
1286 1285 InterferenceThresholdMin = numpy.sort(InterferenceSum)[int(len(InterferenceSum)*0.98)]
1287 1286 InterferenceThresholdMax = numpy.sort(InterferenceSum)[int(len(InterferenceSum)*0.99)]
1288 1287
1289 1288
1290 1289 InterferenceRange = numpy.where( ([InterferenceSum > InterferenceThresholdMin]))# , InterferenceSum < InterferenceThresholdMax]) )
1291 1290 #InterferenceRange = numpy.where( ([InterferenceRange < InterferenceThresholdMax]))
1292 1291 if len(InterferenceRange)<int(cspc.shape[1]*0.3):
1293 1292 cspc[i,InterferenceRange,:] = numpy.NaN
1294 1293
1295 1294 self.dataOut.data_cspc = cspc
1296 1295
1297 1296 def removeInterference(self, interf = 2, hei_interf = None, nhei_interf = None, offhei_interf = None):
1298 1297
1299 1298 jspectra = self.dataOut.data_spc
1300 1299 jcspectra = self.dataOut.data_cspc
1301 1300 jnoise = self.dataOut.getNoise()
1302 1301 num_incoh = self.dataOut.nIncohInt
1303 1302
1304 1303 num_channel = jspectra.shape[0]
1305 1304 num_prof = jspectra.shape[1]
1306 1305 num_hei = jspectra.shape[2]
1307 1306
1308 1307 # hei_interf
1309 1308 if hei_interf is None:
1310 1309 count_hei = int(num_hei / 2)
1311 1310 hei_interf = numpy.asmatrix(list(range(count_hei))) + num_hei - count_hei
1312 1311 hei_interf = numpy.asarray(hei_interf)[0]
1313 1312 # nhei_interf
1314 1313 if (nhei_interf == None):
1315 1314 nhei_interf = 5
1316 1315 if (nhei_interf < 1):
1317 1316 nhei_interf = 1
1318 1317 if (nhei_interf > count_hei):
1319 1318 nhei_interf = count_hei
1320 1319 if (offhei_interf == None):
1321 1320 offhei_interf = 0
1322 1321
1323 1322 ind_hei = list(range(num_hei))
1324 1323 # mask_prof = numpy.asarray(range(num_prof - 2)) + 1
1325 1324 # mask_prof[range(num_prof/2 - 1,len(mask_prof))] += 1
1326 1325 mask_prof = numpy.asarray(list(range(num_prof)))
1327 1326 num_mask_prof = mask_prof.size
1328 1327 comp_mask_prof = [0, num_prof / 2]
1329 1328
1330 1329 # noise_exist: Determina si la variable jnoise ha sido definida y contiene la informacion del ruido de cada canal
1331 1330 if (jnoise.size < num_channel or numpy.isnan(jnoise).any()):
1332 1331 jnoise = numpy.nan
1333 1332 noise_exist = jnoise[0] < numpy.Inf
1334 1333
1335 1334 # Subrutina de Remocion de la Interferencia
1336 1335 for ich in range(num_channel):
1337 1336 # Se ordena los espectros segun su potencia (menor a mayor)
1338 1337 power = jspectra[ich, mask_prof, :]
1339 1338 power = power[:, hei_interf]
1340 1339 power = power.sum(axis=0)
1341 1340 psort = power.ravel().argsort()
1342 1341
1343 1342 # Se estima la interferencia promedio en los Espectros de Potencia empleando
1344 1343 junkspc_interf = jspectra[ich, :, hei_interf[psort[list(range(
1345 1344 offhei_interf, nhei_interf + offhei_interf))]]]
1346 1345
1347 1346 if noise_exist:
1348 1347 # tmp_noise = jnoise[ich] / num_prof
1349 1348 tmp_noise = jnoise[ich]
1350 1349 junkspc_interf = junkspc_interf - tmp_noise
1351 1350 #junkspc_interf[:,comp_mask_prof] = 0
1352 1351
1353 1352 jspc_interf = junkspc_interf.sum(axis=0) / nhei_interf
1354 1353 jspc_interf = jspc_interf.transpose()
1355 1354 # Calculando el espectro de interferencia promedio
1356 1355 noiseid = numpy.where(
1357 1356 jspc_interf <= tmp_noise / numpy.sqrt(num_incoh))
1358 1357 noiseid = noiseid[0]
1359 1358 cnoiseid = noiseid.size
1360 1359 interfid = numpy.where(
1361 1360 jspc_interf > tmp_noise / numpy.sqrt(num_incoh))
1362 1361 interfid = interfid[0]
1363 1362 cinterfid = interfid.size
1364 1363
1365 1364 if (cnoiseid > 0):
1366 1365 jspc_interf[noiseid] = 0
1367 1366
1368 1367 # Expandiendo los perfiles a limpiar
1369 1368 if (cinterfid > 0):
1370 1369 new_interfid = (
1371 1370 numpy.r_[interfid - 1, interfid, interfid + 1] + num_prof) % num_prof
1372 1371 new_interfid = numpy.asarray(new_interfid)
1373 1372 new_interfid = {x for x in new_interfid}
1374 1373 new_interfid = numpy.array(list(new_interfid))
1375 1374 new_cinterfid = new_interfid.size
1376 1375 else:
1377 1376 new_cinterfid = 0
1378 1377
1379 1378 for ip in range(new_cinterfid):
1380 1379 ind = junkspc_interf[:, new_interfid[ip]].ravel().argsort()
1381 1380 jspc_interf[new_interfid[ip]
1382 1381 ] = junkspc_interf[ind[nhei_interf // 2], new_interfid[ip]]
1383 1382
1384 1383 jspectra[ich, :, ind_hei] = jspectra[ich, :,
1385 1384 ind_hei] - jspc_interf # Corregir indices
1386 1385
1387 1386 # Removiendo la interferencia del punto de mayor interferencia
1388 1387 ListAux = jspc_interf[mask_prof].tolist()
1389 1388 maxid = ListAux.index(max(ListAux))
1390 1389
1391 1390 if cinterfid > 0:
1392 1391 for ip in range(cinterfid * (interf == 2) - 1):
1393 1392 ind = (jspectra[ich, interfid[ip], :] < tmp_noise *
1394 1393 (1 + 1 / numpy.sqrt(num_incoh))).nonzero()
1395 1394 cind = len(ind)
1396 1395
1397 1396 if (cind > 0):
1398 1397 jspectra[ich, interfid[ip], ind] = tmp_noise * \
1399 1398 (1 + (numpy.random.uniform(cind) - 0.5) /
1400 1399 numpy.sqrt(num_incoh))
1401 1400
1402 1401 ind = numpy.array([-2, -1, 1, 2])
1403 1402 xx = numpy.zeros([4, 4])
1404 1403
1405 1404 for id1 in range(4):
1406 1405 xx[:, id1] = ind[id1]**numpy.asarray(list(range(4)))
1407 1406
1408 1407 xx_inv = numpy.linalg.inv(xx)
1409 1408 xx = xx_inv[:, 0]
1410 1409 ind = (ind + maxid + num_mask_prof) % num_mask_prof
1411 1410 yy = jspectra[ich, mask_prof[ind], :]
1412 1411 jspectra[ich, mask_prof[maxid], :] = numpy.dot(
1413 1412 yy.transpose(), xx)
1414 1413
1415 1414 indAux = (jspectra[ich, :, :] < tmp_noise *
1416 1415 (1 - 1 / numpy.sqrt(num_incoh))).nonzero()
1417 1416 jspectra[ich, indAux[0], indAux[1]] = tmp_noise * \
1418 1417 (1 - 1 / numpy.sqrt(num_incoh))
1419 1418
1420 1419 # Remocion de Interferencia en el Cross Spectra
1421 1420 if jcspectra is None:
1422 1421 return jspectra, jcspectra
1423 1422 num_pairs = int(jcspectra.size / (num_prof * num_hei))
1424 1423 jcspectra = jcspectra.reshape(num_pairs, num_prof, num_hei)
1425 1424
1426 1425 for ip in range(num_pairs):
1427 1426
1428 1427 #-------------------------------------------
1429 1428
1430 1429 cspower = numpy.abs(jcspectra[ip, mask_prof, :])
1431 1430 cspower = cspower[:, hei_interf]
1432 1431 cspower = cspower.sum(axis=0)
1433 1432
1434 1433 cspsort = cspower.ravel().argsort()
1435 1434 junkcspc_interf = jcspectra[ip, :, hei_interf[cspsort[list(range(
1436 1435 offhei_interf, nhei_interf + offhei_interf))]]]
1437 1436 junkcspc_interf = junkcspc_interf.transpose()
1438 1437 jcspc_interf = junkcspc_interf.sum(axis=1) / nhei_interf
1439 1438
1440 1439 ind = numpy.abs(jcspc_interf[mask_prof]).ravel().argsort()
1441 1440
1442 1441 median_real = int(numpy.median(numpy.real(
1443 1442 junkcspc_interf[mask_prof[ind[list(range(3 * num_prof // 4))]], :])))
1444 1443 median_imag = int(numpy.median(numpy.imag(
1445 1444 junkcspc_interf[mask_prof[ind[list(range(3 * num_prof // 4))]], :])))
1446 1445 comp_mask_prof = [int(e) for e in comp_mask_prof]
1447 1446 junkcspc_interf[comp_mask_prof, :] = numpy.complex(
1448 1447 median_real, median_imag)
1449 1448
1450 1449 for iprof in range(num_prof):
1451 1450 ind = numpy.abs(junkcspc_interf[iprof, :]).ravel().argsort()
1452 1451 jcspc_interf[iprof] = junkcspc_interf[iprof, ind[nhei_interf // 2]]
1453 1452
1454 1453 # Removiendo la Interferencia
1455 1454 jcspectra[ip, :, ind_hei] = jcspectra[ip,
1456 1455 :, ind_hei] - jcspc_interf
1457 1456
1458 1457 ListAux = numpy.abs(jcspc_interf[mask_prof]).tolist()
1459 1458 maxid = ListAux.index(max(ListAux))
1460 1459
1461 1460 ind = numpy.array([-2, -1, 1, 2])
1462 1461 xx = numpy.zeros([4, 4])
1463 1462
1464 1463 for id1 in range(4):
1465 1464 xx[:, id1] = ind[id1]**numpy.asarray(list(range(4)))
1466 1465
1467 1466 xx_inv = numpy.linalg.inv(xx)
1468 1467 xx = xx_inv[:, 0]
1469 1468
1470 1469 ind = (ind + maxid + num_mask_prof) % num_mask_prof
1471 1470 yy = jcspectra[ip, mask_prof[ind], :]
1472 1471 jcspectra[ip, mask_prof[maxid], :] = numpy.dot(yy.transpose(), xx)
1473 1472
1474 1473 # Guardar Resultados
1475 1474 self.dataOut.data_spc = jspectra
1476 1475 self.dataOut.data_cspc = jcspectra
1477 1476
1478 1477 return 1
1479 1478
1480 1479 def run(self, dataOut, interf = 2,hei_interf = None, nhei_interf = None, offhei_interf = None, mode=1):
1481 1480
1482 1481 self.dataOut = dataOut
1483 1482
1484 1483 if mode == 1:
1485 1484 self.removeInterference(interf = 2,hei_interf = None, nhei_interf = None, offhei_interf = None)
1486 1485 elif mode == 2:
1487 1486 self.removeInterference2()
1488 1487
1489 1488 return self.dataOut
1490 1489
1491 1490
1492 1491 class IncohInt(Operation):
1493 1492
1494 1493 __profIndex = 0
1495 1494 __withOverapping = False
1496 1495
1497 1496 __byTime = False
1498 1497 __initime = None
1499 1498 __lastdatatime = None
1500 1499 __integrationtime = None
1501 1500
1502 1501 __buffer_spc = None
1503 1502 __buffer_cspc = None
1504 1503 __buffer_dc = None
1505 1504
1506 1505 __dataReady = False
1507 1506
1508 1507 __timeInterval = None
1509 1508
1510 1509 n = None
1511 1510
1512 1511 def __init__(self):
1513 1512
1514 1513 Operation.__init__(self)
1515 1514
1516 1515 def setup(self, n=None, timeInterval=None, overlapping=False):
1517 1516 """
1518 1517 Set the parameters of the integration class.
1519 1518
1520 1519 Inputs:
1521 1520
1522 1521 n : Number of coherent integrations
1523 1522 timeInterval : Time of integration. If the parameter "n" is selected this one does not work
1524 1523 overlapping :
1525 1524
1526 1525 """
1527 1526
1528 1527 self.__initime = None
1529 1528 self.__lastdatatime = 0
1530 1529
1531 1530 self.__buffer_spc = 0
1532 1531 self.__buffer_cspc = 0
1533 1532 self.__buffer_dc = 0
1534 1533
1535 1534 self.__profIndex = 0
1536 1535 self.__dataReady = False
1537 1536 self.__byTime = False
1538 1537
1539 1538 if n is None and timeInterval is None:
1540 1539 raise ValueError("n or timeInterval should be specified ...")
1541 1540
1542 1541 if n is not None:
1543 1542 self.n = int(n)
1544 1543 else:
1545 1544
1546 1545 self.__integrationtime = int(timeInterval)
1547 1546 self.n = None
1548 1547 self.__byTime = True
1549 1548
1550 1549 def putData(self, data_spc, data_cspc, data_dc):
1551 1550 """
1552 1551 Add a profile to the __buffer_spc and increase in one the __profileIndex
1553 1552
1554 1553 """
1555 1554
1556 1555 self.__buffer_spc += data_spc
1557 1556
1558 1557 if data_cspc is None:
1559 1558 self.__buffer_cspc = None
1560 1559 else:
1561 1560 self.__buffer_cspc += data_cspc
1562 1561
1563 1562 if data_dc is None:
1564 1563 self.__buffer_dc = None
1565 1564 else:
1566 1565 self.__buffer_dc += data_dc
1567 1566
1568 1567 self.__profIndex += 1
1569 1568
1570 1569 return
1571 1570
1572 1571 def pushData(self):
1573 1572 """
1574 1573 Return the sum of the last profiles and the profiles used in the sum.
1575 1574
1576 1575 Affected:
1577 1576
1578 1577 self.__profileIndex
1579 1578
1580 1579 """
1581 1580
1582 1581 data_spc = self.__buffer_spc
1583 1582 data_cspc = self.__buffer_cspc
1584 1583 data_dc = self.__buffer_dc
1585 1584 n = self.__profIndex
1586 1585
1587 1586 self.__buffer_spc = 0
1588 1587 self.__buffer_cspc = 0
1589 1588 self.__buffer_dc = 0
1590 1589 self.__profIndex = 0
1591 1590
1592 1591 return data_spc, data_cspc, data_dc, n
1593 1592
1594 1593 def byProfiles(self, *args):
1595 1594
1596 1595 self.__dataReady = False
1597 1596 avgdata_spc = None
1598 1597 avgdata_cspc = None
1599 1598 avgdata_dc = None
1600 1599
1601 1600 self.putData(*args)
1602 1601
1603 1602 if self.__profIndex == self.n:
1604 1603
1605 1604 avgdata_spc, avgdata_cspc, avgdata_dc, n = self.pushData()
1606 1605 self.n = n
1607 1606 self.__dataReady = True
1608 1607
1609 1608 return avgdata_spc, avgdata_cspc, avgdata_dc
1610 1609
1611 1610 def byTime(self, datatime, *args):
1612 1611
1613 1612 self.__dataReady = False
1614 1613 avgdata_spc = None
1615 1614 avgdata_cspc = None
1616 1615 avgdata_dc = None
1617 1616
1618 1617 self.putData(*args)
1619 1618
1620 1619 if (datatime - self.__initime) >= self.__integrationtime:
1621 1620 avgdata_spc, avgdata_cspc, avgdata_dc, n = self.pushData()
1622 1621 self.n = n
1623 1622 self.__dataReady = True
1624 1623
1625 1624 return avgdata_spc, avgdata_cspc, avgdata_dc
1626 1625
1627 1626 def integrate(self, datatime, *args):
1628 1627
1629 1628 if self.__profIndex == 0:
1630 1629 self.__initime = datatime
1631 1630
1632 1631 if self.__byTime:
1633 1632 avgdata_spc, avgdata_cspc, avgdata_dc = self.byTime(
1634 1633 datatime, *args)
1635 1634 else:
1636 1635 avgdata_spc, avgdata_cspc, avgdata_dc = self.byProfiles(*args)
1637 1636
1638 1637 if not self.__dataReady:
1639 1638 return None, None, None, None
1640 1639
1641 1640 return self.__initime, avgdata_spc, avgdata_cspc, avgdata_dc
1642 1641
1643 1642 def run(self, dataOut, n=None, timeInterval=None, overlapping=False):
1644 1643 if n == 1:
1645 1644 return dataOut
1646 1645
1647 1646 dataOut.flagNoData = True
1648 1647
1649 1648 if not self.isConfig:
1650 1649 self.setup(n, timeInterval, overlapping)
1651 1650 self.isConfig = True
1652 1651
1653 1652 avgdatatime, avgdata_spc, avgdata_cspc, avgdata_dc = self.integrate(dataOut.utctime,
1654 1653 dataOut.data_spc,
1655 1654 dataOut.data_cspc,
1656 1655 dataOut.data_dc)
1657 1656
1658 1657 if self.__dataReady:
1659 1658
1660 1659 dataOut.data_spc = avgdata_spc
1661 1660 dataOut.data_cspc = avgdata_cspc
1662 1661 dataOut.data_dc = avgdata_dc
1663 1662 dataOut.nIncohInt *= self.n
1664 1663 dataOut.utctime = avgdatatime
1665 1664 dataOut.flagNoData = False
1666 1665
1667 1666 return dataOut
1668 1667
1669 1668 class dopplerFlip(Operation):
1670 1669
1671 1670 def run(self, dataOut):
1672 1671 # arreglo 1: (num_chan, num_profiles, num_heights)
1673 1672 self.dataOut = dataOut
1674 1673 # JULIA-oblicua, indice 2
1675 1674 # arreglo 2: (num_profiles, num_heights)
1676 1675 jspectra = self.dataOut.data_spc[2]
1677 1676 jspectra_tmp = numpy.zeros(jspectra.shape)
1678 1677 num_profiles = jspectra.shape[0]
1679 1678 freq_dc = int(num_profiles / 2)
1680 1679 # Flip con for
1681 1680 for j in range(num_profiles):
1682 1681 jspectra_tmp[num_profiles-j-1]= jspectra[j]
1683 1682 # Intercambio perfil de DC con perfil inmediato anterior
1684 1683 jspectra_tmp[freq_dc-1]= jspectra[freq_dc-1]
1685 1684 jspectra_tmp[freq_dc]= jspectra[freq_dc]
1686 1685 # canal modificado es re-escrito en el arreglo de canales
1687 1686 self.dataOut.data_spc[2] = jspectra_tmp
1688 1687
1689 1688 return self.dataOut
@@ -1,1638 +1,1758
1 1 import sys
2 2 import numpy,math
3 3 from scipy import interpolate
4 4 from schainpy.model.proc.jroproc_base import ProcessingUnit, Operation, MPDecorator
5 5 from schainpy.model.data.jrodata import Voltage,hildebrand_sekhon
6 6 from schainpy.utils import log
7 7 from time import time
8 8 import numpy
9 9
10 10
11 11 class VoltageProc(ProcessingUnit):
12 12
13 13 def __init__(self):
14 14
15 15 ProcessingUnit.__init__(self)
16 16
17 17 self.dataOut = Voltage()
18 18 self.flip = 1
19 19 self.setupReq = False
20 20
21 21 def run(self):
22 22
23 23 if self.dataIn.type == 'AMISR':
24 24 self.__updateObjFromAmisrInput()
25 25
26 26 if self.dataIn.type == 'Voltage':
27 27 self.dataOut.copy(self.dataIn)
28 28
29 29 def __updateObjFromAmisrInput(self):
30 30
31 31 self.dataOut.timeZone = self.dataIn.timeZone
32 32 self.dataOut.dstFlag = self.dataIn.dstFlag
33 33 self.dataOut.errorCount = self.dataIn.errorCount
34 34 self.dataOut.useLocalTime = self.dataIn.useLocalTime
35 35
36 36 self.dataOut.flagNoData = self.dataIn.flagNoData
37 37 self.dataOut.data = self.dataIn.data
38 38 self.dataOut.utctime = self.dataIn.utctime
39 39 self.dataOut.channelList = self.dataIn.channelList
40 40 #self.dataOut.timeInterval = self.dataIn.timeInterval
41 41 self.dataOut.heightList = self.dataIn.heightList
42 42 self.dataOut.nProfiles = self.dataIn.nProfiles
43 43
44 44 self.dataOut.nCohInt = self.dataIn.nCohInt
45 45 self.dataOut.ippSeconds = self.dataIn.ippSeconds
46 46 self.dataOut.frequency = self.dataIn.frequency
47 47
48 48 self.dataOut.azimuth = self.dataIn.azimuth
49 49 self.dataOut.zenith = self.dataIn.zenith
50 50
51 51 self.dataOut.beam.codeList = self.dataIn.beam.codeList
52 52 self.dataOut.beam.azimuthList = self.dataIn.beam.azimuthList
53 53 self.dataOut.beam.zenithList = self.dataIn.beam.zenithList
54 54
55 55
56 56 class selectChannels(Operation):
57 57
58 58 def run(self, dataOut, channelList=None):
59 59 self.channelList = channelList
60 60 if self.channelList == None:
61 61 print("Missing channelList")
62 62 return dataOut
63 63 channelIndexList = []
64 64
65 65 if type(dataOut.channelList) is not list: #leer array desde HDF5
66 66 try:
67 67 dataOut.channelList = dataOut.channelList.tolist()
68 68 except Exception as e:
69 69 print("Select Channels: ",e)
70 70 for channel in self.channelList:
71 71 if channel not in dataOut.channelList:
72 72 raise ValueError("Channel %d is not in %s" %(channel, str(dataOut.channelList)))
73 73
74 74 index = dataOut.channelList.index(channel)
75 75 channelIndexList.append(index)
76 76 dataOut = self.selectChannelsByIndex(dataOut,channelIndexList)
77 77 return dataOut
78 78
79 79 def selectChannelsByIndex(self, dataOut, channelIndexList):
80 80 """
81 81 Selecciona un bloque de datos en base a canales segun el channelIndexList
82 82
83 83 Input:
84 84 channelIndexList : lista sencilla de canales a seleccionar por ej. [2,3,7]
85 85
86 86 Affected:
87 87 dataOut.data
88 88 dataOut.channelIndexList
89 89 dataOut.nChannels
90 90 dataOut.m_ProcessingHeader.totalSpectra
91 91 dataOut.systemHeaderObj.numChannels
92 92 dataOut.m_ProcessingHeader.blockSize
93 93
94 94 Return:
95 95 None
96 96 """
97 97 #print("selectChannelsByIndex")
98 98 # for channelIndex in channelIndexList:
99 99 # if channelIndex not in dataOut.channelIndexList:
100 100 # raise ValueError("The value %d in channelIndexList is not valid" %channelIndex)
101 101
102 102 if dataOut.type == 'Voltage':
103 103 if dataOut.flagDataAsBlock:
104 104 """
105 105 Si la data es obtenida por bloques, dimension = [nChannels, nProfiles, nHeis]
106 106 """
107 107 data = dataOut.data[channelIndexList,:,:]
108 108 else:
109 109 data = dataOut.data[channelIndexList,:]
110 110
111 111 dataOut.data = data
112 112 # dataOut.channelList = [dataOut.channelList[i] for i in channelIndexList]
113 113 dataOut.channelList = range(len(channelIndexList))
114 114
115 115 elif dataOut.type == 'Spectra':
116 116 if hasattr(dataOut, 'data_spc'):
117 117 if dataOut.data_spc is None:
118 118 raise ValueError("data_spc is None")
119 119 return dataOut
120 120 else:
121 121 data_spc = dataOut.data_spc[channelIndexList, :]
122 122 dataOut.data_spc = data_spc
123 123
124 124 # if hasattr(dataOut, 'data_dc') :# and
125 125 # if dataOut.data_dc is None:
126 126 # raise ValueError("data_dc is None")
127 127 # return dataOut
128 128 # else:
129 129 # data_dc = dataOut.data_dc[channelIndexList, :]
130 130 # dataOut.data_dc = data_dc
131 131 # dataOut.channelList = [dataOut.channelList[i] for i in channelIndexList]
132 132 dataOut.channelList = channelIndexList
133 133 dataOut = self.__selectPairsByChannel(dataOut,channelIndexList)
134 134
135 135 return dataOut
136 136
137 137 def __selectPairsByChannel(self, dataOut, channelList=None):
138 138 #print("__selectPairsByChannel")
139 139 if channelList == None:
140 140 return
141 141
142 142 pairsIndexListSelected = []
143 143 for pairIndex in dataOut.pairsIndexList:
144 144 # First pair
145 145 if dataOut.pairsList[pairIndex][0] not in channelList:
146 146 continue
147 147 # Second pair
148 148 if dataOut.pairsList[pairIndex][1] not in channelList:
149 149 continue
150 150
151 151 pairsIndexListSelected.append(pairIndex)
152 152 if not pairsIndexListSelected:
153 153 dataOut.data_cspc = None
154 154 dataOut.pairsList = []
155 155 return
156 156
157 157 dataOut.data_cspc = dataOut.data_cspc[pairsIndexListSelected]
158 158 dataOut.pairsList = [dataOut.pairsList[i]
159 159 for i in pairsIndexListSelected]
160 160
161 161 return dataOut
162 162
163 163 class selectHeights(Operation):
164 164
165 165 def run(self, dataOut, minHei=None, maxHei=None, minIndex=None, maxIndex=None):
166 166 """
167 167 Selecciona un bloque de datos en base a un grupo de valores de alturas segun el rango
168 168 minHei <= height <= maxHei
169 169
170 170 Input:
171 171 minHei : valor minimo de altura a considerar
172 172 maxHei : valor maximo de altura a considerar
173 173
174 174 Affected:
175 175 Indirectamente son cambiados varios valores a travez del metodo selectHeightsByIndex
176 176
177 177 Return:
178 178 1 si el metodo se ejecuto con exito caso contrario devuelve 0
179 179 """
180 180
181 dataOut = dataOut
181 self.dataOut = dataOut
182 182
183 183 if minHei and maxHei:
184 184
185 185 if (minHei < dataOut.heightList[0]):
186 186 minHei = dataOut.heightList[0]
187 187
188 188 if (maxHei > dataOut.heightList[-1]):
189 189 maxHei = dataOut.heightList[-1]
190 190
191 191 minIndex = 0
192 192 maxIndex = 0
193 193 heights = dataOut.heightList
194 194
195 195 inda = numpy.where(heights >= minHei)
196 196 indb = numpy.where(heights <= maxHei)
197 197
198 198 try:
199 199 minIndex = inda[0][0]
200 200 except:
201 201 minIndex = 0
202 202
203 203 try:
204 204 maxIndex = indb[0][-1]
205 205 except:
206 206 maxIndex = len(heights)
207 207
208 208 self.selectHeightsByIndex(minIndex, maxIndex)
209 209
210 return self.dataOut
210 return dataOut
211 211
212 212 def selectHeightsByIndex(self, minIndex, maxIndex):
213 213 """
214 214 Selecciona un bloque de datos en base a un grupo indices de alturas segun el rango
215 215 minIndex <= index <= maxIndex
216 216
217 217 Input:
218 218 minIndex : valor de indice minimo de altura a considerar
219 219 maxIndex : valor de indice maximo de altura a considerar
220 220
221 221 Affected:
222 222 self.dataOut.data
223 223 self.dataOut.heightList
224 224
225 225 Return:
226 226 1 si el metodo se ejecuto con exito caso contrario devuelve 0
227 227 """
228 228
229 229 if self.dataOut.type == 'Voltage':
230 230 if (minIndex < 0) or (minIndex > maxIndex):
231 231 raise ValueError("Height index range (%d,%d) is not valid" % (minIndex, maxIndex))
232 232
233 233 if (maxIndex >= self.dataOut.nHeights):
234 234 maxIndex = self.dataOut.nHeights
235 235
236 236 #voltage
237 237 if self.dataOut.flagDataAsBlock:
238 238 """
239 239 Si la data es obtenida por bloques, dimension = [nChannels, nProfiles, nHeis]
240 240 """
241 241 data = self.dataOut.data[:,:, minIndex:maxIndex]
242 242 else:
243 243 data = self.dataOut.data[:, minIndex:maxIndex]
244 244
245 245 # firstHeight = self.dataOut.heightList[minIndex]
246 246
247 247 self.dataOut.data = data
248 248 self.dataOut.heightList = self.dataOut.heightList[minIndex:maxIndex]
249 249
250 250 if self.dataOut.nHeights <= 1:
251 251 raise ValueError("selectHeights: Too few heights. Current number of heights is %d" %(self.dataOut.nHeights))
252 252 elif self.dataOut.type == 'Spectra':
253 253 if (minIndex < 0) or (minIndex > maxIndex):
254 254 raise ValueError("Error selecting heights: Index range (%d,%d) is not valid" % (
255 255 minIndex, maxIndex))
256 256
257 257 if (maxIndex >= self.dataOut.nHeights):
258 258 maxIndex = self.dataOut.nHeights - 1
259 259
260 260 # Spectra
261 261 data_spc = self.dataOut.data_spc[:, :, minIndex:maxIndex + 1]
262 262
263 263 data_cspc = None
264 264 if self.dataOut.data_cspc is not None:
265 265 data_cspc = self.dataOut.data_cspc[:, :, minIndex:maxIndex + 1]
266 266
267 267 data_dc = None
268 268 if self.dataOut.data_dc is not None:
269 269 data_dc = self.dataOut.data_dc[:, minIndex:maxIndex + 1]
270 270
271 271 self.dataOut.data_spc = data_spc
272 272 self.dataOut.data_cspc = data_cspc
273 273 self.dataOut.data_dc = data_dc
274 274
275 275 self.dataOut.heightList = self.dataOut.heightList[minIndex:maxIndex + 1]
276 276
277 277 return 1
278 278
279 279
280 280 class filterByHeights(Operation):
281 281
282 282 def run(self, dataOut, window):
283 283
284 284 deltaHeight = dataOut.heightList[1] - dataOut.heightList[0]
285 285
286 286 if window == None:
287 287 window = (dataOut.radarControllerHeaderObj.txA/dataOut.radarControllerHeaderObj.nBaud) / deltaHeight
288 288
289 289 newdelta = deltaHeight * window
290 290 r = dataOut.nHeights % window
291 291 newheights = (dataOut.nHeights-r)/window
292 292
293 293 if newheights <= 1:
294 294 raise ValueError("filterByHeights: Too few heights. Current number of heights is %d and window is %d" %(dataOut.nHeights, window))
295 295
296 296 if dataOut.flagDataAsBlock:
297 297 """
298 298 Si la data es obtenida por bloques, dimension = [nChannels, nProfiles, nHeis]
299 299 """
300 300 buffer = dataOut.data[:, :, 0:int(dataOut.nHeights-r)]
301 301 buffer = buffer.reshape(dataOut.nChannels, dataOut.nProfiles, int(dataOut.nHeights/window), window)
302 302 buffer = numpy.sum(buffer,3)
303 303
304 304 else:
305 305 buffer = dataOut.data[:,0:int(dataOut.nHeights-r)]
306 306 buffer = buffer.reshape(dataOut.nChannels,int(dataOut.nHeights/window),int(window))
307 307 buffer = numpy.sum(buffer,2)
308 308
309 309 dataOut.data = buffer
310 310 dataOut.heightList = dataOut.heightList[0] + numpy.arange( newheights )*newdelta
311 311 dataOut.windowOfFilter = window
312 312
313 313 return dataOut
314 314
315 315
316 316 class setH0(Operation):
317 317
318 318 def run(self, dataOut, h0, deltaHeight = None):
319 319
320 320 if not deltaHeight:
321 321 deltaHeight = dataOut.heightList[1] - dataOut.heightList[0]
322 322
323 323 nHeights = dataOut.nHeights
324 324
325 325 newHeiRange = h0 + numpy.arange(nHeights)*deltaHeight
326 326
327 327 dataOut.heightList = newHeiRange
328 328
329 329 return dataOut
330 330
331 331
332 332 class deFlip(Operation):
333 333
334 334 def run(self, dataOut, channelList = []):
335 335
336 336 data = dataOut.data.copy()
337 337
338 338 if dataOut.flagDataAsBlock:
339 339 flip = self.flip
340 340 profileList = list(range(dataOut.nProfiles))
341 341
342 342 if not channelList:
343 343 for thisProfile in profileList:
344 344 data[:,thisProfile,:] = data[:,thisProfile,:]*flip
345 345 flip *= -1.0
346 346 else:
347 347 for thisChannel in channelList:
348 348 if thisChannel not in dataOut.channelList:
349 349 continue
350 350
351 351 for thisProfile in profileList:
352 352 data[thisChannel,thisProfile,:] = data[thisChannel,thisProfile,:]*flip
353 353 flip *= -1.0
354 354
355 355 self.flip = flip
356 356
357 357 else:
358 358 if not channelList:
359 359 data[:,:] = data[:,:]*self.flip
360 360 else:
361 361 for thisChannel in channelList:
362 362 if thisChannel not in dataOut.channelList:
363 363 continue
364 364
365 365 data[thisChannel,:] = data[thisChannel,:]*self.flip
366 366
367 367 self.flip *= -1.
368 368
369 369 dataOut.data = data
370 370
371 371 return dataOut
372 372
373 373
374 374 class setAttribute(Operation):
375 375 '''
376 376 Set an arbitrary attribute(s) to dataOut
377 377 '''
378 378
379 379 def __init__(self):
380 380
381 381 Operation.__init__(self)
382 382 self._ready = False
383 383
384 384 def run(self, dataOut, **kwargs):
385 385
386 386 for key, value in kwargs.items():
387 387 setattr(dataOut, key, value)
388 388
389 389 return dataOut
390 390
391 391
392 392 @MPDecorator
393 393 class printAttribute(Operation):
394 394 '''
395 395 Print an arbitrary attribute of dataOut
396 396 '''
397 397
398 398 def __init__(self):
399 399
400 400 Operation.__init__(self)
401 401
402 402 def run(self, dataOut, attributes):
403 403
404 404 if isinstance(attributes, str):
405 405 attributes = [attributes]
406 406 for attr in attributes:
407 407 if hasattr(dataOut, attr):
408 408 log.log(getattr(dataOut, attr), attr)
409 409
410 410
411 411 class interpolateHeights(Operation):
412 412
413 413 def run(self, dataOut, topLim, botLim):
414 414 #69 al 72 para julia
415 415 #82-84 para meteoros
416 416 if len(numpy.shape(dataOut.data))==2:
417 417 sampInterp = (dataOut.data[:,botLim-1] + dataOut.data[:,topLim+1])/2
418 418 sampInterp = numpy.transpose(numpy.tile(sampInterp,(topLim-botLim + 1,1)))
419 419 #dataOut.data[:,botLim:limSup+1] = sampInterp
420 420 dataOut.data[:,botLim:topLim+1] = sampInterp
421 421 else:
422 422 nHeights = dataOut.data.shape[2]
423 423 x = numpy.hstack((numpy.arange(botLim),numpy.arange(topLim+1,nHeights)))
424 424 y = dataOut.data[:,:,list(range(botLim))+list(range(topLim+1,nHeights))]
425 425 f = interpolate.interp1d(x, y, axis = 2)
426 426 xnew = numpy.arange(botLim,topLim+1)
427 427 ynew = f(xnew)
428 428 dataOut.data[:,:,botLim:topLim+1] = ynew
429 429
430 430 return dataOut
431 431
432 432
433 433 class CohInt(Operation):
434 434
435 435 isConfig = False
436 436 __profIndex = 0
437 437 __byTime = False
438 438 __initime = None
439 439 __lastdatatime = None
440 440 __integrationtime = None
441 441 __buffer = None
442 442 __bufferStride = []
443 443 __dataReady = False
444 444 __profIndexStride = 0
445 445 __dataToPutStride = False
446 446 n = None
447 447
448 448 def __init__(self, **kwargs):
449 449
450 450 Operation.__init__(self, **kwargs)
451 451
452 452 def setup(self, n=None, timeInterval=None, stride=None, overlapping=False, byblock=False):
453 453 """
454 454 Set the parameters of the integration class.
455 455
456 456 Inputs:
457 457
458 458 n : Number of coherent integrations
459 459 timeInterval : Time of integration. If the parameter "n" is selected this one does not work
460 460 overlapping :
461 461 """
462 462
463 463 self.__initime = None
464 464 self.__lastdatatime = 0
465 465 self.__buffer = None
466 466 self.__dataReady = False
467 467 self.byblock = byblock
468 468 self.stride = stride
469 469
470 470 if n == None and timeInterval == None:
471 471 raise ValueError("n or timeInterval should be specified ...")
472 472
473 473 if n != None:
474 474 self.n = n
475 475 self.__byTime = False
476 476 else:
477 477 self.__integrationtime = timeInterval #* 60. #if (type(timeInterval)!=integer) -> change this line
478 478 self.n = 9999
479 479 self.__byTime = True
480 480
481 481 if overlapping:
482 482 self.__withOverlapping = True
483 483 self.__buffer = None
484 484 else:
485 485 self.__withOverlapping = False
486 486 self.__buffer = 0
487 487
488 488 self.__profIndex = 0
489 489
490 490 def putData(self, data):
491 491
492 492 """
493 493 Add a profile to the __buffer and increase in one the __profileIndex
494 494
495 495 """
496 496
497 497 if not self.__withOverlapping:
498 498 self.__buffer += data.copy()
499 499 self.__profIndex += 1
500 500 return
501 501
502 502 #Overlapping data
503 503 nChannels, nHeis = data.shape
504 504 data = numpy.reshape(data, (1, nChannels, nHeis))
505 505
506 506 #If the buffer is empty then it takes the data value
507 507 if self.__buffer is None:
508 508 self.__buffer = data
509 509 self.__profIndex += 1
510 510 return
511 511
512 512 #If the buffer length is lower than n then stakcing the data value
513 513 if self.__profIndex < self.n:
514 514 self.__buffer = numpy.vstack((self.__buffer, data))
515 515 self.__profIndex += 1
516 516 return
517 517
518 518 #If the buffer length is equal to n then replacing the last buffer value with the data value
519 519 self.__buffer = numpy.roll(self.__buffer, -1, axis=0)
520 520 self.__buffer[self.n-1] = data
521 521 self.__profIndex = self.n
522 522 return
523 523
524 524
525 525 def pushData(self):
526 526 """
527 527 Return the sum of the last profiles and the profiles used in the sum.
528 528
529 529 Affected:
530 530
531 531 self.__profileIndex
532 532
533 533 """
534 534
535 535 if not self.__withOverlapping:
536 536 data = self.__buffer
537 537 n = self.__profIndex
538 538
539 539 self.__buffer = 0
540 540 self.__profIndex = 0
541 541
542 542 return data, n
543 543
544 544 #Integration with Overlapping
545 545 data = numpy.sum(self.__buffer, axis=0)
546 546 # print data
547 547 # raise
548 548 n = self.__profIndex
549 549
550 550 return data, n
551 551
552 552 def byProfiles(self, data):
553 553
554 554 self.__dataReady = False
555 555 avgdata = None
556 556 # n = None
557 557 # print data
558 558 # raise
559 559 self.putData(data)
560 560
561 561 if self.__profIndex == self.n:
562 562 avgdata, n = self.pushData()
563 563 self.__dataReady = True
564 564
565 565 return avgdata
566 566
567 567 def byTime(self, data, datatime):
568 568
569 569 self.__dataReady = False
570 570 avgdata = None
571 571 n = None
572 572
573 573 self.putData(data)
574 574
575 575 if (datatime - self.__initime) >= self.__integrationtime:
576 576 avgdata, n = self.pushData()
577 577 self.n = n
578 578 self.__dataReady = True
579 579
580 580 return avgdata
581 581
582 582 def integrateByStride(self, data, datatime):
583 583 # print data
584 584 if self.__profIndex == 0:
585 585 self.__buffer = [[data.copy(), datatime]]
586 586 else:
587 587 self.__buffer.append([data.copy(),datatime])
588 588 self.__profIndex += 1
589 589 self.__dataReady = False
590 590
591 591 if self.__profIndex == self.n * self.stride :
592 592 self.__dataToPutStride = True
593 593 self.__profIndexStride = 0
594 594 self.__profIndex = 0
595 595 self.__bufferStride = []
596 596 for i in range(self.stride):
597 597 current = self.__buffer[i::self.stride]
598 598 data = numpy.sum([t[0] for t in current], axis=0)
599 599 avgdatatime = numpy.average([t[1] for t in current])
600 600 # print data
601 601 self.__bufferStride.append((data, avgdatatime))
602 602
603 603 if self.__dataToPutStride:
604 604 self.__dataReady = True
605 605 self.__profIndexStride += 1
606 606 if self.__profIndexStride == self.stride:
607 607 self.__dataToPutStride = False
608 608 # print self.__bufferStride[self.__profIndexStride - 1]
609 609 # raise
610 610 return self.__bufferStride[self.__profIndexStride - 1]
611 611
612 612
613 613 return None, None
614 614
615 615 def integrate(self, data, datatime=None):
616 616
617 617 if self.__initime == None:
618 618 self.__initime = datatime
619 619
620 620 if self.__byTime:
621 621 avgdata = self.byTime(data, datatime)
622 622 else:
623 623 avgdata = self.byProfiles(data)
624 624
625 625
626 626 self.__lastdatatime = datatime
627 627
628 628 if avgdata is None:
629 629 return None, None
630 630
631 631 avgdatatime = self.__initime
632 632
633 633 deltatime = datatime - self.__lastdatatime
634 634
635 635 if not self.__withOverlapping:
636 636 self.__initime = datatime
637 637 else:
638 638 self.__initime += deltatime
639 639
640 640 return avgdata, avgdatatime
641 641
642 642 def integrateByBlock(self, dataOut):
643 643
644 644 times = int(dataOut.data.shape[1]/self.n)
645 645 avgdata = numpy.zeros((dataOut.nChannels, times, dataOut.nHeights), dtype=numpy.complex)
646 646
647 647 id_min = 0
648 648 id_max = self.n
649 649
650 650 for i in range(times):
651 651 junk = dataOut.data[:,id_min:id_max,:]
652 652 avgdata[:,i,:] = junk.sum(axis=1)
653 653 id_min += self.n
654 654 id_max += self.n
655 655
656 656 timeInterval = dataOut.ippSeconds*self.n
657 657 avgdatatime = (times - 1) * timeInterval + dataOut.utctime
658 658 self.__dataReady = True
659 659 return avgdata, avgdatatime
660 660
661 661 def run(self, dataOut, n=None, timeInterval=None, stride=None, overlapping=False, byblock=False, **kwargs):
662 662
663 663 if not self.isConfig:
664 664 self.setup(n=n, stride=stride, timeInterval=timeInterval, overlapping=overlapping, byblock=byblock, **kwargs)
665 665 self.isConfig = True
666 666
667 667 if dataOut.flagDataAsBlock:
668 668 """
669 669 Si la data es leida por bloques, dimension = [nChannels, nProfiles, nHeis]
670 670 """
671 671 avgdata, avgdatatime = self.integrateByBlock(dataOut)
672 672 dataOut.nProfiles /= self.n
673 673 else:
674 674 if stride is None:
675 675 avgdata, avgdatatime = self.integrate(dataOut.data, dataOut.utctime)
676 676 else:
677 677 avgdata, avgdatatime = self.integrateByStride(dataOut.data, dataOut.utctime)
678 678
679 679
680 680 # dataOut.timeInterval *= n
681 681 dataOut.flagNoData = True
682 682
683 683 if self.__dataReady:
684 684 dataOut.data = avgdata
685 685 if not dataOut.flagCohInt:
686 686 dataOut.nCohInt *= self.n
687 687 dataOut.flagCohInt = True
688 688 dataOut.utctime = avgdatatime
689 689 # print avgdata, avgdatatime
690 690 # raise
691 691 # dataOut.timeInterval = dataOut.ippSeconds * dataOut.nCohInt
692 692 dataOut.flagNoData = False
693 693 return dataOut
694 694
695 695 class Decoder(Operation):
696 696
697 697 isConfig = False
698 698 __profIndex = 0
699 699
700 700 code = None
701 701
702 702 nCode = None
703 703 nBaud = None
704 704
705 705 def __init__(self, **kwargs):
706 706
707 707 Operation.__init__(self, **kwargs)
708 708
709 709 self.times = None
710 710 self.osamp = None
711 711 # self.__setValues = False
712 712 self.isConfig = False
713 713 self.setupReq = False
714 714 def setup(self, code, osamp, dataOut):
715 715
716 716 self.__profIndex = 0
717 717
718 718 self.code = code
719 719
720 720 self.nCode = len(code)
721 721 self.nBaud = len(code[0])
722 722 if (osamp != None) and (osamp >1):
723 723 self.osamp = osamp
724 724 self.code = numpy.repeat(code, repeats=self.osamp, axis=1)
725 725 self.nBaud = self.nBaud*self.osamp
726 726
727 727 self.__nChannels = dataOut.nChannels
728 728 self.__nProfiles = dataOut.nProfiles
729 729 self.__nHeis = dataOut.nHeights
730 730
731 731 if self.__nHeis < self.nBaud:
732 732 raise ValueError('Number of heights (%d) should be greater than number of bauds (%d)' %(self.__nHeis, self.nBaud))
733 733
734 734 #Frequency
735 735 __codeBuffer = numpy.zeros((self.nCode, self.__nHeis), dtype=numpy.complex)
736 736
737 737 __codeBuffer[:,0:self.nBaud] = self.code
738 738
739 739 self.fft_code = numpy.conj(numpy.fft.fft(__codeBuffer, axis=1))
740 740
741 741 if dataOut.flagDataAsBlock:
742 742
743 743 self.ndatadec = self.__nHeis #- self.nBaud + 1
744 744
745 745 self.datadecTime = numpy.zeros((self.__nChannels, self.__nProfiles, self.ndatadec), dtype=numpy.complex)
746 746
747 747 else:
748 748
749 749 #Time
750 750 self.ndatadec = self.__nHeis #- self.nBaud + 1
751 751
752 752 self.datadecTime = numpy.zeros((self.__nChannels, self.ndatadec), dtype=numpy.complex)
753 753
754 754 def __convolutionInFreq(self, data):
755 755
756 756 fft_code = self.fft_code[self.__profIndex].reshape(1,-1)
757 757
758 758 fft_data = numpy.fft.fft(data, axis=1)
759 759
760 760 conv = fft_data*fft_code
761 761
762 762 data = numpy.fft.ifft(conv,axis=1)
763 763
764 764 return data
765 765
766 766 def __convolutionInFreqOpt(self, data):
767 767
768 768 raise NotImplementedError
769 769
770 770 def __convolutionInTime(self, data):
771 771
772 772 code = self.code[self.__profIndex]
773 773 for i in range(self.__nChannels):
774 774 self.datadecTime[i,:] = numpy.correlate(data[i,:], code, mode='full')[self.nBaud-1:]
775 775
776 776 return self.datadecTime
777 777
778 778 def __convolutionByBlockInTime(self, data):
779 779
780 780 repetitions = int(self.__nProfiles / self.nCode)
781 781 junk = numpy.lib.stride_tricks.as_strided(self.code, (repetitions, self.code.size), (0, self.code.itemsize))
782 782 junk = junk.flatten()
783 783 code_block = numpy.reshape(junk, (self.nCode*repetitions, self.nBaud))
784 784 profilesList = range(self.__nProfiles)
785 785
786 786 for i in range(self.__nChannels):
787 787 for j in profilesList:
788 788 self.datadecTime[i,j,:] = numpy.correlate(data[i,j,:], code_block[j,:], mode='full')[self.nBaud-1:]
789 789 return self.datadecTime
790 790
791 791 def __convolutionByBlockInFreq(self, data):
792 792
793 793 raise NotImplementedError("Decoder by frequency fro Blocks not implemented")
794 794
795 795
796 796 fft_code = self.fft_code[self.__profIndex].reshape(1,-1)
797 797
798 798 fft_data = numpy.fft.fft(data, axis=2)
799 799
800 800 conv = fft_data*fft_code
801 801
802 802 data = numpy.fft.ifft(conv,axis=2)
803 803
804 804 return data
805 805
806 806
807 807 def run(self, dataOut, code=None, nCode=None, nBaud=None, mode = 0, osamp=None, times=None):
808 808
809 809 if dataOut.flagDecodeData:
810 810 print("This data is already decoded, recoding again ...")
811 811
812 812 if not self.isConfig:
813 813
814 814 if code is None:
815 815 if dataOut.code is None:
816 816 raise ValueError("Code could not be read from %s instance. Enter a value in Code parameter" %dataOut.type)
817 817
818 818 code = dataOut.code
819 819 else:
820 820 code = numpy.array(code).reshape(nCode,nBaud)
821 821 self.setup(code, osamp, dataOut)
822 822
823 823 self.isConfig = True
824 824
825 825 if mode == 3:
826 826 sys.stderr.write("Decoder Warning: mode=%d is not valid, using mode=0\n" %mode)
827 827
828 828 if times != None:
829 829 sys.stderr.write("Decoder Warning: Argument 'times' in not used anymore\n")
830 830
831 831 if self.code is None:
832 832 print("Fail decoding: Code is not defined.")
833 833 return
834 834
835 835 self.__nProfiles = dataOut.nProfiles
836 836 datadec = None
837 837
838 838 if mode == 3:
839 839 mode = 0
840 840
841 841 if dataOut.flagDataAsBlock:
842 842 """
843 843 Decoding when data have been read as block,
844 844 """
845 845
846 846 if mode == 0:
847 847 datadec = self.__convolutionByBlockInTime(dataOut.data)
848 848 if mode == 1:
849 849 datadec = self.__convolutionByBlockInFreq(dataOut.data)
850 850 else:
851 851 """
852 852 Decoding when data have been read profile by profile
853 853 """
854 854 if mode == 0:
855 855 datadec = self.__convolutionInTime(dataOut.data)
856 856
857 857 if mode == 1:
858 858 datadec = self.__convolutionInFreq(dataOut.data)
859 859
860 860 if mode == 2:
861 861 datadec = self.__convolutionInFreqOpt(dataOut.data)
862 862
863 863 if datadec is None:
864 864 raise ValueError("Codification mode selected is not valid: mode=%d. Try selecting 0 or 1" %mode)
865 865
866 866 dataOut.code = self.code
867 867 dataOut.nCode = self.nCode
868 868 dataOut.nBaud = self.nBaud
869 869
870 870 dataOut.data = datadec
871 871
872 872 dataOut.heightList = dataOut.heightList[0:datadec.shape[-1]]
873 873
874 874 dataOut.flagDecodeData = True #asumo q la data esta decodificada
875 875
876 876 if self.__profIndex == self.nCode-1:
877 877 self.__profIndex = 0
878 878 return dataOut
879 879
880 880 self.__profIndex += 1
881 881
882 882 return dataOut
883 883 # dataOut.flagDeflipData = True #asumo q la data no esta sin flip
884 884
885 885
886 886 class ProfileConcat(Operation):
887 887
888 888 isConfig = False
889 889 buffer = None
890 890
891 891 def __init__(self, **kwargs):
892 892
893 893 Operation.__init__(self, **kwargs)
894 894 self.profileIndex = 0
895 895
896 896 def reset(self):
897 897 self.buffer = numpy.zeros_like(self.buffer)
898 898 self.start_index = 0
899 899 self.times = 1
900 900
901 901 def setup(self, data, m, n=1):
902 902 self.buffer = numpy.zeros((data.shape[0],data.shape[1]*m),dtype=type(data[0,0]))
903 903 self.nHeights = data.shape[1]#.nHeights
904 904 self.start_index = 0
905 905 self.times = 1
906 906
907 907 def concat(self, data):
908 908
909 909 self.buffer[:,self.start_index:self.nHeights*self.times] = data.copy()
910 910 self.start_index = self.start_index + self.nHeights
911 911
912 912 def run(self, dataOut, m):
913 913 dataOut.flagNoData = True
914 914
915 915 if not self.isConfig:
916 916 self.setup(dataOut.data, m, 1)
917 917 self.isConfig = True
918 918
919 919 if dataOut.flagDataAsBlock:
920 920 raise ValueError("ProfileConcat can only be used when voltage have been read profile by profile, getBlock = False")
921 921
922 922 else:
923 923 self.concat(dataOut.data)
924 924 self.times += 1
925 925 if self.times > m:
926 926 dataOut.data = self.buffer
927 927 self.reset()
928 928 dataOut.flagNoData = False
929 929 # se deben actualizar mas propiedades del header y del objeto dataOut, por ejemplo, las alturas
930 930 deltaHeight = dataOut.heightList[1] - dataOut.heightList[0]
931 931 xf = dataOut.heightList[0] + dataOut.nHeights * deltaHeight * m
932 932 dataOut.heightList = numpy.arange(dataOut.heightList[0], xf, deltaHeight)
933 933 dataOut.ippSeconds *= m
934 934 return dataOut
935 935
936 936 class ProfileSelector(Operation):
937 937
938 938 profileIndex = None
939 939 # Tamanho total de los perfiles
940 940 nProfiles = None
941 941
942 942 def __init__(self, **kwargs):
943 943
944 944 Operation.__init__(self, **kwargs)
945 945 self.profileIndex = 0
946 946
947 947 def incProfileIndex(self):
948 948
949 949 self.profileIndex += 1
950 950
951 951 if self.profileIndex >= self.nProfiles:
952 952 self.profileIndex = 0
953 953
954 954 def isThisProfileInRange(self, profileIndex, minIndex, maxIndex):
955 955
956 956 if profileIndex < minIndex:
957 957 return False
958 958
959 959 if profileIndex > maxIndex:
960 960 return False
961 961
962 962 return True
963 963
964 964 def isThisProfileInList(self, profileIndex, profileList):
965 965
966 966 if profileIndex not in profileList:
967 967 return False
968 968
969 969 return True
970 970
971 971 def run(self, dataOut, profileList=None, profileRangeList=None, beam=None, byblock=False, rangeList = None, nProfiles=None):
972 972
973 973 """
974 974 ProfileSelector:
975 975
976 976 Inputs:
977 977 profileList : Index of profiles selected. Example: profileList = (0,1,2,7,8)
978 978
979 979 profileRangeList : Minimum and maximum profile indexes. Example: profileRangeList = (4, 30)
980 980
981 981 rangeList : List of profile ranges. Example: rangeList = ((4, 30), (32, 64), (128, 256))
982 982
983 983 """
984 984
985 985 if rangeList is not None:
986 986 if type(rangeList[0]) not in (tuple, list):
987 987 rangeList = [rangeList]
988 988
989 989 dataOut.flagNoData = True
990 990
991 991 if dataOut.flagDataAsBlock:
992 992 """
993 993 data dimension = [nChannels, nProfiles, nHeis]
994 994 """
995 995 if profileList != None:
996 996 dataOut.data = dataOut.data[:,profileList,:]
997 997
998 998 if profileRangeList != None:
999 999 minIndex = profileRangeList[0]
1000 1000 maxIndex = profileRangeList[1]
1001 1001 profileList = list(range(minIndex, maxIndex+1))
1002 1002
1003 1003 dataOut.data = dataOut.data[:,minIndex:maxIndex+1,:]
1004 1004
1005 1005 if rangeList != None:
1006 1006
1007 1007 profileList = []
1008 1008
1009 1009 for thisRange in rangeList:
1010 1010 minIndex = thisRange[0]
1011 1011 maxIndex = thisRange[1]
1012 1012
1013 1013 profileList.extend(list(range(minIndex, maxIndex+1)))
1014 1014
1015 1015 dataOut.data = dataOut.data[:,profileList,:]
1016 1016
1017 1017 dataOut.nProfiles = len(profileList)
1018 1018 dataOut.profileIndex = dataOut.nProfiles - 1
1019 1019 dataOut.flagNoData = False
1020 1020
1021 1021 return dataOut
1022 1022
1023 1023 """
1024 1024 data dimension = [nChannels, nHeis]
1025 1025 """
1026 1026
1027 1027 if profileList != None:
1028 1028
1029 1029 if self.isThisProfileInList(dataOut.profileIndex, profileList):
1030 1030
1031 1031 self.nProfiles = len(profileList)
1032 1032 dataOut.nProfiles = self.nProfiles
1033 1033 dataOut.profileIndex = self.profileIndex
1034 1034 dataOut.flagNoData = False
1035 1035
1036 1036 self.incProfileIndex()
1037 1037 return dataOut
1038 1038
1039 1039 if profileRangeList != None:
1040 1040
1041 1041 minIndex = profileRangeList[0]
1042 1042 maxIndex = profileRangeList[1]
1043 1043
1044 1044 if self.isThisProfileInRange(dataOut.profileIndex, minIndex, maxIndex):
1045 1045
1046 1046 self.nProfiles = maxIndex - minIndex + 1
1047 1047 dataOut.nProfiles = self.nProfiles
1048 1048 dataOut.profileIndex = self.profileIndex
1049 1049 dataOut.flagNoData = False
1050 1050
1051 1051 self.incProfileIndex()
1052 1052 return dataOut
1053 1053
1054 1054 if rangeList != None:
1055 1055
1056 1056 nProfiles = 0
1057 1057
1058 1058 for thisRange in rangeList:
1059 1059 minIndex = thisRange[0]
1060 1060 maxIndex = thisRange[1]
1061 1061
1062 1062 nProfiles += maxIndex - minIndex + 1
1063 1063
1064 1064 for thisRange in rangeList:
1065 1065
1066 1066 minIndex = thisRange[0]
1067 1067 maxIndex = thisRange[1]
1068 1068
1069 1069 if self.isThisProfileInRange(dataOut.profileIndex, minIndex, maxIndex):
1070 1070
1071 1071 self.nProfiles = nProfiles
1072 1072 dataOut.nProfiles = self.nProfiles
1073 1073 dataOut.profileIndex = self.profileIndex
1074 1074 dataOut.flagNoData = False
1075 1075
1076 1076 self.incProfileIndex()
1077 1077
1078 1078 break
1079 1079
1080 1080 return dataOut
1081 1081
1082 1082
1083 1083 if beam != None: #beam is only for AMISR data
1084 1084 if self.isThisProfileInList(dataOut.profileIndex, dataOut.beamRangeDict[beam]):
1085 1085 dataOut.flagNoData = False
1086 1086 dataOut.profileIndex = self.profileIndex
1087 1087
1088 1088 self.incProfileIndex()
1089 1089
1090 1090 return dataOut
1091 1091
1092 1092 raise ValueError("ProfileSelector needs profileList, profileRangeList or rangeList parameter")
1093 1093
1094 1094
1095 1095 class Reshaper(Operation):
1096 1096
1097 1097 def __init__(self, **kwargs):
1098 1098
1099 1099 Operation.__init__(self, **kwargs)
1100 1100
1101 1101 self.__buffer = None
1102 1102 self.__nitems = 0
1103 1103
1104 1104 def __appendProfile(self, dataOut, nTxs):
1105 1105
1106 1106 if self.__buffer is None:
1107 1107 shape = (dataOut.nChannels, int(dataOut.nHeights/nTxs) )
1108 1108 self.__buffer = numpy.empty(shape, dtype = dataOut.data.dtype)
1109 1109
1110 1110 ini = dataOut.nHeights * self.__nitems
1111 1111 end = ini + dataOut.nHeights
1112 1112
1113 1113 self.__buffer[:, ini:end] = dataOut.data
1114 1114
1115 1115 self.__nitems += 1
1116 1116
1117 1117 return int(self.__nitems*nTxs)
1118 1118
1119 1119 def __getBuffer(self):
1120 1120
1121 1121 if self.__nitems == int(1./self.__nTxs):
1122 1122
1123 1123 self.__nitems = 0
1124 1124
1125 1125 return self.__buffer.copy()
1126 1126
1127 1127 return None
1128 1128
1129 1129 def __checkInputs(self, dataOut, shape, nTxs):
1130 1130
1131 1131 if shape is None and nTxs is None:
1132 1132 raise ValueError("Reshaper: shape of factor should be defined")
1133 1133
1134 1134 if nTxs:
1135 1135 if nTxs < 0:
1136 1136 raise ValueError("nTxs should be greater than 0")
1137 1137
1138 1138 if nTxs < 1 and dataOut.nProfiles % (1./nTxs) != 0:
1139 1139 raise ValueError("nProfiles= %d is not divisibled by (1./nTxs) = %f" %(dataOut.nProfiles, (1./nTxs)))
1140 1140
1141 1141 shape = [dataOut.nChannels, dataOut.nProfiles*nTxs, dataOut.nHeights/nTxs]
1142 1142
1143 1143 return shape, nTxs
1144 1144
1145 1145 if len(shape) != 2 and len(shape) != 3:
1146 1146 raise ValueError("shape dimension should be equal to 2 or 3. shape = (nProfiles, nHeis) or (nChannels, nProfiles, nHeis). Actually shape = (%d, %d, %d)" %(dataOut.nChannels, dataOut.nProfiles, dataOut.nHeights))
1147 1147
1148 1148 if len(shape) == 2:
1149 1149 shape_tuple = [dataOut.nChannels]
1150 1150 shape_tuple.extend(shape)
1151 1151 else:
1152 1152 shape_tuple = list(shape)
1153 1153
1154 1154 nTxs = 1.0*shape_tuple[1]/dataOut.nProfiles
1155 1155
1156 1156 return shape_tuple, nTxs
1157 1157
1158 1158 def run(self, dataOut, shape=None, nTxs=None):
1159 1159
1160 1160 shape_tuple, self.__nTxs = self.__checkInputs(dataOut, shape, nTxs)
1161 1161
1162 1162 dataOut.flagNoData = True
1163 1163 profileIndex = None
1164 1164
1165 1165 if dataOut.flagDataAsBlock:
1166 1166
1167 1167 dataOut.data = numpy.reshape(dataOut.data, shape_tuple)
1168 1168 dataOut.flagNoData = False
1169 1169
1170 1170 profileIndex = int(dataOut.nProfiles*self.__nTxs) - 1
1171 1171
1172 1172 else:
1173 1173
1174 1174 if self.__nTxs < 1:
1175 1175
1176 1176 self.__appendProfile(dataOut, self.__nTxs)
1177 1177 new_data = self.__getBuffer()
1178 1178
1179 1179 if new_data is not None:
1180 1180 dataOut.data = new_data
1181 1181 dataOut.flagNoData = False
1182 1182
1183 1183 profileIndex = dataOut.profileIndex*nTxs
1184 1184
1185 1185 else:
1186 1186 raise ValueError("nTxs should be greater than 0 and lower than 1, or use VoltageReader(..., getblock=True)")
1187 1187
1188 1188 deltaHeight = dataOut.heightList[1] - dataOut.heightList[0]
1189 1189
1190 1190 dataOut.heightList = numpy.arange(dataOut.nHeights/self.__nTxs) * deltaHeight + dataOut.heightList[0]
1191 1191
1192 1192 dataOut.nProfiles = int(dataOut.nProfiles*self.__nTxs)
1193 1193
1194 1194 dataOut.profileIndex = profileIndex
1195 1195
1196 1196 dataOut.ippSeconds /= self.__nTxs
1197 1197
1198 1198 return dataOut
1199 1199
1200 1200 class SplitProfiles(Operation):
1201 1201
1202 1202 def __init__(self, **kwargs):
1203 1203
1204 1204 Operation.__init__(self, **kwargs)
1205 1205
1206 1206 def run(self, dataOut, n):
1207 1207
1208 1208 dataOut.flagNoData = True
1209 1209 profileIndex = None
1210 1210
1211 1211 if dataOut.flagDataAsBlock:
1212 1212
1213 1213 #nchannels, nprofiles, nsamples
1214 1214 shape = dataOut.data.shape
1215 1215
1216 1216 if shape[2] % n != 0:
1217 1217 raise ValueError("Could not split the data, n=%d has to be multiple of %d" %(n, shape[2]))
1218 1218
1219 1219 new_shape = shape[0], shape[1]*n, int(shape[2]/n)
1220 1220
1221 1221 dataOut.data = numpy.reshape(dataOut.data, new_shape)
1222 1222 dataOut.flagNoData = False
1223 1223
1224 1224 profileIndex = int(dataOut.nProfiles/n) - 1
1225 1225
1226 1226 else:
1227 1227
1228 1228 raise ValueError("Could not split the data when is read Profile by Profile. Use VoltageReader(..., getblock=True)")
1229 1229
1230 1230 deltaHeight = dataOut.heightList[1] - dataOut.heightList[0]
1231 1231
1232 1232 dataOut.heightList = numpy.arange(dataOut.nHeights/n) * deltaHeight + dataOut.heightList[0]
1233 1233
1234 1234 dataOut.nProfiles = int(dataOut.nProfiles*n)
1235 1235
1236 1236 dataOut.profileIndex = profileIndex
1237 1237
1238 1238 dataOut.ippSeconds /= n
1239 1239
1240 1240 return dataOut
1241 1241
1242 1242 class CombineProfiles(Operation):
1243 1243 def __init__(self, **kwargs):
1244 1244
1245 1245 Operation.__init__(self, **kwargs)
1246 1246
1247 1247 self.__remData = None
1248 1248 self.__profileIndex = 0
1249 1249
1250 1250 def run(self, dataOut, n):
1251 1251
1252 1252 dataOut.flagNoData = True
1253 1253 profileIndex = None
1254 1254
1255 1255 if dataOut.flagDataAsBlock:
1256 1256
1257 1257 #nchannels, nprofiles, nsamples
1258 1258 shape = dataOut.data.shape
1259 1259 new_shape = shape[0], shape[1]/n, shape[2]*n
1260 1260
1261 1261 if shape[1] % n != 0:
1262 1262 raise ValueError("Could not split the data, n=%d has to be multiple of %d" %(n, shape[1]))
1263 1263
1264 1264 dataOut.data = numpy.reshape(dataOut.data, new_shape)
1265 1265 dataOut.flagNoData = False
1266 1266
1267 1267 profileIndex = int(dataOut.nProfiles*n) - 1
1268 1268
1269 1269 else:
1270 1270
1271 1271 #nchannels, nsamples
1272 1272 if self.__remData is None:
1273 1273 newData = dataOut.data
1274 1274 else:
1275 1275 newData = numpy.concatenate((self.__remData, dataOut.data), axis=1)
1276 1276
1277 1277 self.__profileIndex += 1
1278 1278
1279 1279 if self.__profileIndex < n:
1280 1280 self.__remData = newData
1281 1281 #continue
1282 1282 return
1283 1283
1284 1284 self.__profileIndex = 0
1285 1285 self.__remData = None
1286 1286
1287 1287 dataOut.data = newData
1288 1288 dataOut.flagNoData = False
1289 1289
1290 1290 profileIndex = dataOut.profileIndex/n
1291 1291
1292 1292
1293 1293 deltaHeight = dataOut.heightList[1] - dataOut.heightList[0]
1294 1294
1295 1295 dataOut.heightList = numpy.arange(dataOut.nHeights*n) * deltaHeight + dataOut.heightList[0]
1296 1296
1297 1297 dataOut.nProfiles = int(dataOut.nProfiles/n)
1298 1298
1299 1299 dataOut.profileIndex = profileIndex
1300 1300
1301 1301 dataOut.ippSeconds *= n
1302 1302
1303 1303 return dataOut
1304 1304
1305 1305 class PulsePairVoltage(Operation):
1306 1306 '''
1307 1307 Function PulsePair(Signal Power, Velocity)
1308 1308 The real component of Lag[0] provides Intensity Information
1309 1309 The imag component of Lag[1] Phase provides Velocity Information
1310 1310
1311 1311 Configuration Parameters:
1312 1312 nPRF = Number of Several PRF
1313 1313 theta = Degree Azimuth angel Boundaries
1314 1314
1315 1315 Input:
1316 1316 self.dataOut
1317 1317 lag[N]
1318 1318 Affected:
1319 1319 self.dataOut.spc
1320 1320 '''
1321 1321 isConfig = False
1322 1322 __profIndex = 0
1323 1323 __initime = None
1324 1324 __lastdatatime = None
1325 1325 __buffer = None
1326 1326 noise = None
1327 1327 __dataReady = False
1328 1328 n = None
1329 1329 __nch = 0
1330 1330 __nHeis = 0
1331 1331 removeDC = False
1332 1332 ipp = None
1333 1333 lambda_ = 0
1334 1334
1335 1335 def __init__(self,**kwargs):
1336 1336 Operation.__init__(self,**kwargs)
1337 1337
1338 1338 def setup(self, dataOut, n = None, removeDC=False):
1339 1339 '''
1340 1340 n= Numero de PRF's de entrada
1341 1341 '''
1342 1342 self.__initime = None
1343 1343 self.__lastdatatime = 0
1344 1344 self.__dataReady = False
1345 1345 self.__buffer = 0
1346 1346 self.__profIndex = 0
1347 1347 self.noise = None
1348 1348 self.__nch = dataOut.nChannels
1349 1349 self.__nHeis = dataOut.nHeights
1350 1350 self.removeDC = removeDC
1351 1351 self.lambda_ = 3.0e8/(9345.0e6)
1352 1352 self.ippSec = dataOut.ippSeconds
1353 1353 self.nCohInt = dataOut.nCohInt
1354 1354
1355 1355 if n == None:
1356 1356 raise ValueError("n should be specified.")
1357 1357
1358 1358 if n != None:
1359 1359 if n<2:
1360 1360 raise ValueError("n should be greater than 2")
1361 1361
1362 1362 self.n = n
1363 1363 self.__nProf = n
1364 1364
1365 1365 self.__buffer = numpy.zeros((dataOut.nChannels,
1366 1366 n,
1367 1367 dataOut.nHeights),
1368 1368 dtype='complex')
1369 1369
1370 1370 def putData(self,data):
1371 1371 '''
1372 1372 Add a profile to he __buffer and increase in one the __profiel Index
1373 1373 '''
1374 1374 self.__buffer[:,self.__profIndex,:]= data
1375 1375 self.__profIndex += 1
1376 1376 return
1377 1377
1378 1378 def pushData(self,dataOut):
1379 1379 '''
1380 1380 Return the PULSEPAIR and the profiles used in the operation
1381 1381 Affected : self.__profileIndex
1382 1382 '''
1383 1383 #----------------- Remove DC-----------------------------------
1384 1384 if self.removeDC==True:
1385 1385 mean = numpy.mean(self.__buffer,1)
1386 1386 tmp = mean.reshape(self.__nch,1,self.__nHeis)
1387 1387 dc= numpy.tile(tmp,[1,self.__nProf,1])
1388 1388 self.__buffer = self.__buffer - dc
1389 1389 #------------------Calculo de Potencia ------------------------
1390 1390 pair0 = self.__buffer*numpy.conj(self.__buffer)
1391 1391 pair0 = pair0.real
1392 1392 lag_0 = numpy.sum(pair0,1)
1393 1393 #------------------Calculo de Ruido x canal--------------------
1394 1394 self.noise = numpy.zeros(self.__nch)
1395 1395 for i in range(self.__nch):
1396 1396 daux = numpy.sort(pair0[i,:,:],axis= None)
1397 1397 self.noise[i]=hildebrand_sekhon( daux ,self.nCohInt)
1398 1398
1399 1399 self.noise = self.noise.reshape(self.__nch,1)
1400 1400 self.noise = numpy.tile(self.noise,[1,self.__nHeis])
1401 1401 noise_buffer = self.noise.reshape(self.__nch,1,self.__nHeis)
1402 1402 noise_buffer = numpy.tile(noise_buffer,[1,self.__nProf,1])
1403 1403 #------------------ Potencia recibida= P , Potencia senal = S , Ruido= N--
1404 1404 #------------------ P= S+N ,P=lag_0/N ---------------------------------
1405 1405 #-------------------- Power --------------------------------------------------
1406 1406 data_power = lag_0/(self.n*self.nCohInt)
1407 1407 #------------------ Senal ---------------------------------------------------
1408 1408 data_intensity = pair0 - noise_buffer
1409 1409 data_intensity = numpy.sum(data_intensity,axis=1)*(self.n*self.nCohInt)#*self.nCohInt)
1410 1410 #data_intensity = (lag_0-self.noise*self.n)*(self.n*self.nCohInt)
1411 1411 for i in range(self.__nch):
1412 1412 for j in range(self.__nHeis):
1413 1413 if data_intensity[i][j] < 0:
1414 1414 data_intensity[i][j] = numpy.min(numpy.absolute(data_intensity[i][j]))
1415 1415
1416 1416 #----------------- Calculo de Frecuencia y Velocidad doppler--------
1417 1417 pair1 = self.__buffer[:,:-1,:]*numpy.conjugate(self.__buffer[:,1:,:])
1418 1418 lag_1 = numpy.sum(pair1,1)
1419 1419 data_freq = (-1/(2.0*math.pi*self.ippSec*self.nCohInt))*numpy.angle(lag_1)
1420 1420 data_velocity = (self.lambda_/2.0)*data_freq
1421 1421
1422 1422 #---------------- Potencia promedio estimada de la Senal-----------
1423 1423 lag_0 = lag_0/self.n
1424 1424 S = lag_0-self.noise
1425 1425
1426 1426 #---------------- Frecuencia Doppler promedio ---------------------
1427 1427 lag_1 = lag_1/(self.n-1)
1428 1428 R1 = numpy.abs(lag_1)
1429 1429
1430 1430 #---------------- Calculo del SNR----------------------------------
1431 1431 data_snrPP = S/self.noise
1432 1432 for i in range(self.__nch):
1433 1433 for j in range(self.__nHeis):
1434 1434 if data_snrPP[i][j] < 1.e-20:
1435 1435 data_snrPP[i][j] = 1.e-20
1436 1436
1437 1437 #----------------- Calculo del ancho espectral ----------------------
1438 1438 L = S/R1
1439 1439 L = numpy.where(L<0,1,L)
1440 1440 L = numpy.log(L)
1441 1441 tmp = numpy.sqrt(numpy.absolute(L))
1442 1442 data_specwidth = (self.lambda_/(2*math.sqrt(2)*math.pi*self.ippSec*self.nCohInt))*tmp*numpy.sign(L)
1443 1443 n = self.__profIndex
1444 1444
1445 1445 self.__buffer = numpy.zeros((self.__nch, self.__nProf,self.__nHeis), dtype='complex')
1446 1446 self.__profIndex = 0
1447 1447 return data_power,data_intensity,data_velocity,data_snrPP,data_specwidth,n
1448 1448
1449 1449
1450 1450 def pulsePairbyProfiles(self,dataOut):
1451 1451
1452 1452 self.__dataReady = False
1453 1453 data_power = None
1454 1454 data_intensity = None
1455 1455 data_velocity = None
1456 1456 data_specwidth = None
1457 1457 data_snrPP = None
1458 1458 self.putData(data=dataOut.data)
1459 1459 if self.__profIndex == self.n:
1460 1460 data_power,data_intensity, data_velocity,data_snrPP,data_specwidth, n = self.pushData(dataOut=dataOut)
1461 1461 self.__dataReady = True
1462 1462
1463 1463 return data_power, data_intensity, data_velocity, data_snrPP, data_specwidth
1464 1464
1465 1465
1466 1466 def pulsePairOp(self, dataOut, datatime= None):
1467 1467
1468 1468 if self.__initime == None:
1469 1469 self.__initime = datatime
1470 1470 data_power, data_intensity, data_velocity, data_snrPP, data_specwidth = self.pulsePairbyProfiles(dataOut)
1471 1471 self.__lastdatatime = datatime
1472 1472
1473 1473 if data_power is None:
1474 1474 return None, None, None,None,None,None
1475 1475
1476 1476 avgdatatime = self.__initime
1477 1477 deltatime = datatime - self.__lastdatatime
1478 1478 self.__initime = datatime
1479 1479
1480 1480 return data_power, data_intensity, data_velocity, data_snrPP, data_specwidth, avgdatatime
1481 1481
1482 1482 def run(self, dataOut,n = None,removeDC= False, overlapping= False,**kwargs):
1483 1483
1484 1484 if not self.isConfig:
1485 1485 self.setup(dataOut = dataOut, n = n , removeDC=removeDC , **kwargs)
1486 1486 self.isConfig = True
1487 1487 data_power, data_intensity, data_velocity,data_snrPP,data_specwidth, avgdatatime = self.pulsePairOp(dataOut, dataOut.utctime)
1488 1488 dataOut.flagNoData = True
1489 1489
1490 1490 if self.__dataReady:
1491 1491 dataOut.nCohInt *= self.n
1492 1492 dataOut.dataPP_POW = data_intensity # S
1493 1493 dataOut.dataPP_POWER = data_power # P
1494 1494 dataOut.dataPP_DOP = data_velocity
1495 1495 dataOut.dataPP_SNR = data_snrPP
1496 1496 dataOut.dataPP_WIDTH = data_specwidth
1497 1497 dataOut.PRFbyAngle = self.n #numero de PRF*cada angulo rotado que equivale a un tiempo.
1498 1498 dataOut.utctime = avgdatatime
1499 1499 dataOut.flagNoData = False
1500 1500 return dataOut
1501 1501
1502 1502
1503 1503
1504 1504 # import collections
1505 1505 # from scipy.stats import mode
1506 1506 #
1507 1507 # class Synchronize(Operation):
1508 1508 #
1509 1509 # isConfig = False
1510 1510 # __profIndex = 0
1511 1511 #
1512 1512 # def __init__(self, **kwargs):
1513 1513 #
1514 1514 # Operation.__init__(self, **kwargs)
1515 1515 # # self.isConfig = False
1516 1516 # self.__powBuffer = None
1517 1517 # self.__startIndex = 0
1518 1518 # self.__pulseFound = False
1519 1519 #
1520 1520 # def __findTxPulse(self, dataOut, channel=0, pulse_with = None):
1521 1521 #
1522 1522 # #Read data
1523 1523 #
1524 1524 # powerdB = dataOut.getPower(channel = channel)
1525 1525 # noisedB = dataOut.getNoise(channel = channel)[0]
1526 1526 #
1527 1527 # self.__powBuffer.extend(powerdB.flatten())
1528 1528 #
1529 1529 # dataArray = numpy.array(self.__powBuffer)
1530 1530 #
1531 1531 # filteredPower = numpy.correlate(dataArray, dataArray[0:self.__nSamples], "same")
1532 1532 #
1533 1533 # maxValue = numpy.nanmax(filteredPower)
1534 1534 #
1535 1535 # if maxValue < noisedB + 10:
1536 1536 # #No se encuentra ningun pulso de transmision
1537 1537 # return None
1538 1538 #
1539 1539 # maxValuesIndex = numpy.where(filteredPower > maxValue - 0.1*abs(maxValue))[0]
1540 1540 #
1541 1541 # if len(maxValuesIndex) < 2:
1542 1542 # #Solo se encontro un solo pulso de transmision de un baudio, esperando por el siguiente TX
1543 1543 # return None
1544 1544 #
1545 1545 # phasedMaxValuesIndex = maxValuesIndex - self.__nSamples
1546 1546 #
1547 1547 # #Seleccionar solo valores con un espaciamiento de nSamples
1548 1548 # pulseIndex = numpy.intersect1d(maxValuesIndex, phasedMaxValuesIndex)
1549 1549 #
1550 1550 # if len(pulseIndex) < 2:
1551 1551 # #Solo se encontro un pulso de transmision con ancho mayor a 1
1552 1552 # return None
1553 1553 #
1554 1554 # spacing = pulseIndex[1:] - pulseIndex[:-1]
1555 1555 #
1556 1556 # #remover senales que se distancien menos de 10 unidades o muestras
1557 1557 # #(No deberian existir IPP menor a 10 unidades)
1558 1558 #
1559 1559 # realIndex = numpy.where(spacing > 10 )[0]
1560 1560 #
1561 1561 # if len(realIndex) < 2:
1562 1562 # #Solo se encontro un pulso de transmision con ancho mayor a 1
1563 1563 # return None
1564 1564 #
1565 1565 # #Eliminar pulsos anchos (deja solo la diferencia entre IPPs)
1566 1566 # realPulseIndex = pulseIndex[realIndex]
1567 1567 #
1568 1568 # period = mode(realPulseIndex[1:] - realPulseIndex[:-1])[0][0]
1569 1569 #
1570 1570 # print "IPP = %d samples" %period
1571 1571 #
1572 1572 # self.__newNSamples = dataOut.nHeights #int(period)
1573 1573 # self.__startIndex = int(realPulseIndex[0])
1574 1574 #
1575 1575 # return 1
1576 1576 #
1577 1577 #
1578 1578 # def setup(self, nSamples, nChannels, buffer_size = 4):
1579 1579 #
1580 1580 # self.__powBuffer = collections.deque(numpy.zeros( buffer_size*nSamples,dtype=numpy.float),
1581 1581 # maxlen = buffer_size*nSamples)
1582 1582 #
1583 1583 # bufferList = []
1584 1584 #
1585 1585 # for i in range(nChannels):
1586 1586 # bufferByChannel = collections.deque(numpy.zeros( buffer_size*nSamples, dtype=numpy.complex) + numpy.NAN,
1587 1587 # maxlen = buffer_size*nSamples)
1588 1588 #
1589 1589 # bufferList.append(bufferByChannel)
1590 1590 #
1591 1591 # self.__nSamples = nSamples
1592 1592 # self.__nChannels = nChannels
1593 1593 # self.__bufferList = bufferList
1594 1594 #
1595 1595 # def run(self, dataOut, channel = 0):
1596 1596 #
1597 1597 # if not self.isConfig:
1598 1598 # nSamples = dataOut.nHeights
1599 1599 # nChannels = dataOut.nChannels
1600 1600 # self.setup(nSamples, nChannels)
1601 1601 # self.isConfig = True
1602 1602 #
1603 1603 # #Append new data to internal buffer
1604 1604 # for thisChannel in range(self.__nChannels):
1605 1605 # bufferByChannel = self.__bufferList[thisChannel]
1606 1606 # bufferByChannel.extend(dataOut.data[thisChannel])
1607 1607 #
1608 1608 # if self.__pulseFound:
1609 1609 # self.__startIndex -= self.__nSamples
1610 1610 #
1611 1611 # #Finding Tx Pulse
1612 1612 # if not self.__pulseFound:
1613 1613 # indexFound = self.__findTxPulse(dataOut, channel)
1614 1614 #
1615 1615 # if indexFound == None:
1616 1616 # dataOut.flagNoData = True
1617 1617 # return
1618 1618 #
1619 1619 # self.__arrayBuffer = numpy.zeros((self.__nChannels, self.__newNSamples), dtype = numpy.complex)
1620 1620 # self.__pulseFound = True
1621 1621 # self.__startIndex = indexFound
1622 1622 #
1623 1623 # #If pulse was found ...
1624 1624 # for thisChannel in range(self.__nChannels):
1625 1625 # bufferByChannel = self.__bufferList[thisChannel]
1626 1626 # #print self.__startIndex
1627 1627 # x = numpy.array(bufferByChannel)
1628 1628 # self.__arrayBuffer[thisChannel] = x[self.__startIndex:self.__startIndex+self.__newNSamples]
1629 1629 #
1630 1630 # deltaHeight = dataOut.heightList[1] - dataOut.heightList[0]
1631 1631 # dataOut.heightList = numpy.arange(self.__newNSamples)*deltaHeight
1632 1632 # # dataOut.ippSeconds = (self.__newNSamples / deltaHeight)/1e6
1633 1633 #
1634 1634 # dataOut.data = self.__arrayBuffer
1635 1635 #
1636 1636 # self.__startIndex += self.__newNSamples
1637 1637 #
1638 1638 # return
1639 class SSheightProfiles(Operation):
1640
1641 step = None
1642 nsamples = None
1643 bufferShape = None
1644 profileShape = None
1645 sshProfiles = None
1646 profileIndex = None
1647
1648 def __init__(self, **kwargs):
1649
1650 Operation.__init__(self, **kwargs)
1651 self.isConfig = False
1652
1653 def setup(self,dataOut ,step = None , nsamples = None):
1654
1655 if step == None and nsamples == None:
1656 raise ValueError("step or nheights should be specified ...")
1657
1658 self.step = step
1659 self.nsamples = nsamples
1660 self.__nChannels = dataOut.nChannels
1661 self.__nProfiles = dataOut.nProfiles
1662 self.__nHeis = dataOut.nHeights
1663 shape = dataOut.data.shape #nchannels, nprofiles, nsamples
1664
1665 residue = (shape[1] - self.nsamples) % self.step
1666 if residue != 0:
1667 print("The residue is %d, step=%d should be multiple of %d to avoid loss of %d samples"%(residue,step,shape[1] - self.nsamples,residue))
1668
1669 deltaHeight = dataOut.heightList[1] - dataOut.heightList[0]
1670 numberProfile = self.nsamples
1671 numberSamples = (shape[1] - self.nsamples)/self.step
1672
1673 self.bufferShape = int(shape[0]), int(numberSamples), int(numberProfile) # nchannels, nsamples , nprofiles
1674 self.profileShape = int(shape[0]), int(numberProfile), int(numberSamples) # nchannels, nprofiles, nsamples
1675
1676 self.buffer = numpy.zeros(self.bufferShape , dtype=numpy.complex)
1677 self.sshProfiles = numpy.zeros(self.profileShape, dtype=numpy.complex)
1678
1679 def run(self, dataOut, step, nsamples, code = None, repeat = None):
1680 dataOut.flagNoData = True
1681
1682 profileIndex = None
1683 #print(dataOut.getFreqRange(1)/1000.)
1684 #exit(1)
1685 if dataOut.flagDataAsBlock:
1686 dataOut.data = numpy.average(dataOut.data,axis=1)
1687 #print("jee")
1688 dataOut.flagDataAsBlock = False
1689 if not self.isConfig:
1690 self.setup(dataOut, step=step , nsamples=nsamples)
1691 #print("Setup done")
1692 self.isConfig = True
1693
1694 #DC_Hae = numpy.array([0.398+0.588j, -0.926+0.306j, -0.536-0.682j, -0.072+0.53j, 0.368-0.356j, 0.996+0.362j])
1695 #DC_Hae = numpy.array([ 0.001025 +0.0516375j, 0.03485 +0.20923125j, -0.168 -0.02720625j,
1696 #-0.1105375 +0.0707125j, -0.20309375-0.09670625j, 0.189775 +0.02716875j])*(-3.5)
1697
1698 #DC_Hae = numpy.array([ -32.26 +8.66j, -32.26 +8.66j])
1699
1700 #DC_Hae = numpy.array([-2.78500000e-01 -1.39175j, -6.63237294e+02+210.4268625j])
1701
1702 #print(dataOut.data[0,13:15])
1703 #dataOut.data = dataOut.data - DC_Hae[:,None]
1704 #print(dataOut.data[0,13:15])
1705 #exit(1)
1706 if code is not None:
1707 code = numpy.array(code)
1708 code_block = code
1709 '''
1710 roll = 0
1711 code = numpy.roll(code,roll,axis=0)
1712 code = numpy.reshape(code,(5,100,64))
1713 block = dataOut.CurrentBlock%5
1714
1715 day_dif = 0 #day_19_Oct_2021: 3
1716 code_block = code[block-1-day_dif,:,:]
1717 '''
1718 if repeat is not None:
1719 code_block = numpy.repeat(code_block, repeats=repeat, axis=1)
1720 #print(code_block.shape)
1721 for i in range(self.buffer.shape[1]):
1722
1723 if code is not None:
1724 self.buffer[:,i] = dataOut.data[:,i*self.step:i*self.step + self.nsamples]*code_block
1725
1726 else:
1727
1728 self.buffer[:,i] = dataOut.data[:,i*self.step:i*self.step + self.nsamples]#*code[dataOut.profileIndex,:]
1729
1730 #self.buffer[:,j,self.__nHeis-j*self.step - self.nheights:self.__nHeis-j*self.step] = numpy.flip(dataOut.data[:,j*self.step:j*self.step + self.nheights])
1731
1732 for j in range(self.buffer.shape[0]):
1733 self.sshProfiles[j] = numpy.transpose(self.buffer[j])
1734
1735 profileIndex = self.nsamples
1736 deltaHeight = dataOut.heightList[1] - dataOut.heightList[0]
1737 ippSeconds = (deltaHeight*1.0e-6)/(0.15)
1738 #print("ippSeconds, dH: ",ippSeconds,deltaHeight)
1739 try:
1740 if dataOut.concat_m is not None:
1741 ippSeconds= ippSeconds/float(dataOut.concat_m)
1742 #print "Profile concat %d"%dataOut.concat_m
1743 except:
1744 pass
1745
1746 dataOut.data = self.sshProfiles
1747 dataOut.flagNoData = False
1748 dataOut.heightList = numpy.arange(self.buffer.shape[1]) *self.step*deltaHeight + dataOut.heightList[0]
1749 dataOut.nProfiles = int(dataOut.nProfiles*self.nsamples)
1750
1751 dataOut.profileIndex = profileIndex
1752 dataOut.flagDataAsBlock = True
1753 dataOut.ippSeconds = ippSeconds
1754 dataOut.step = self.step
1755 #print(numpy.shape(dataOut.data))
1756 #exit(1)
1757
1758 return dataOut
General Comments 0
You need to be logged in to leave comments. Login now