##// END OF EJS Templates
clean Rayleigh funcionando, tiempo de ejecución elevado usando todos los pares
joabAM -
r1391:fba03565a781
parent child
Show More
@@ -1,693 +1,695
1 1 # Copyright (c) 2012-2020 Jicamarca Radio Observatory
2 2 # All rights reserved.
3 3 #
4 4 # Distributed under the terms of the BSD 3-clause license.
5 5 """Base class to create plot operations
6 6
7 7 """
8 8
9 9 import os
10 10 import sys
11 11 import zmq
12 12 import time
13 13 import numpy
14 14 import datetime
15 15 from collections import deque
16 16 from functools import wraps
17 17 from threading import Thread
18 18 import matplotlib
19 19
20 20 if 'BACKEND' in os.environ:
21 21 matplotlib.use(os.environ['BACKEND'])
22 22 elif 'linux' in sys.platform:
23 23 matplotlib.use("TkAgg")
24 24 elif 'darwin' in sys.platform:
25 25 matplotlib.use('MacOSX')
26 26 else:
27 27 from schainpy.utils import log
28 28 log.warning('Using default Backend="Agg"', 'INFO')
29 29 matplotlib.use('Agg')
30 30
31 31 import matplotlib.pyplot as plt
32 32 from matplotlib.patches import Polygon
33 33 from mpl_toolkits.axes_grid1 import make_axes_locatable
34 34 from matplotlib.ticker import FuncFormatter, LinearLocator, MultipleLocator
35 35
36 36 from schainpy.model.data.jrodata import PlotterData
37 37 from schainpy.model.proc.jroproc_base import ProcessingUnit, Operation, MPDecorator
38 38 from schainpy.utils import log
39 39
40 40 jet_values = matplotlib.pyplot.get_cmap('jet', 100)(numpy.arange(100))[10:90]
41 41 blu_values = matplotlib.pyplot.get_cmap(
42 42 'seismic_r', 20)(numpy.arange(20))[10:15]
43 43 ncmap = matplotlib.colors.LinearSegmentedColormap.from_list(
44 44 'jro', numpy.vstack((blu_values, jet_values)))
45 45 matplotlib.pyplot.register_cmap(cmap=ncmap)
46 46
47 47 CMAPS = [plt.get_cmap(s) for s in ('jro', 'jet', 'viridis',
48 48 'plasma', 'inferno', 'Greys', 'seismic', 'bwr', 'coolwarm')]
49 49
50 50 EARTH_RADIUS = 6.3710e3
51 51
52 52 def ll2xy(lat1, lon1, lat2, lon2):
53 53
54 54 p = 0.017453292519943295
55 55 a = 0.5 - numpy.cos((lat2 - lat1) * p)/2 + numpy.cos(lat1 * p) * \
56 56 numpy.cos(lat2 * p) * (1 - numpy.cos((lon2 - lon1) * p)) / 2
57 57 r = 12742 * numpy.arcsin(numpy.sqrt(a))
58 58 theta = numpy.arctan2(numpy.sin((lon2-lon1)*p)*numpy.cos(lat2*p), numpy.cos(lat1*p)
59 59 * numpy.sin(lat2*p)-numpy.sin(lat1*p)*numpy.cos(lat2*p)*numpy.cos((lon2-lon1)*p))
60 60 theta = -theta + numpy.pi/2
61 61 return r*numpy.cos(theta), r*numpy.sin(theta)
62 62
63 63
64 64 def km2deg(km):
65 65 '''
66 66 Convert distance in km to degrees
67 67 '''
68 68
69 69 return numpy.rad2deg(km/EARTH_RADIUS)
70 70
71 71
72 72 def figpause(interval):
73 73 backend = plt.rcParams['backend']
74 74 if backend in matplotlib.rcsetup.interactive_bk:
75 75 figManager = matplotlib._pylab_helpers.Gcf.get_active()
76 76 if figManager is not None:
77 77 canvas = figManager.canvas
78 78 if canvas.figure.stale:
79 79 canvas.draw()
80 80 try:
81 81 canvas.start_event_loop(interval)
82 82 except:
83 83 pass
84 84 return
85 85
86 86 def popup(message):
87 87 '''
88 88 '''
89 89
90 90 fig = plt.figure(figsize=(12, 8), facecolor='r')
91 91 text = '\n'.join([s.strip() for s in message.split(':')])
92 92 fig.text(0.01, 0.5, text, ha='left', va='center',
93 93 size='20', weight='heavy', color='w')
94 94 fig.show()
95 95 figpause(1000)
96 96
97 97
98 98 class Throttle(object):
99 99 '''
100 100 Decorator that prevents a function from being called more than once every
101 101 time period.
102 102 To create a function that cannot be called more than once a minute, but
103 103 will sleep until it can be called:
104 104 @Throttle(minutes=1)
105 105 def foo():
106 106 pass
107 107
108 108 for i in range(10):
109 109 foo()
110 110 print "This function has run %s times." % i
111 111 '''
112 112
113 113 def __init__(self, seconds=0, minutes=0, hours=0):
114 114 self.throttle_period = datetime.timedelta(
115 115 seconds=seconds, minutes=minutes, hours=hours
116 116 )
117 117
118 118 self.time_of_last_call = datetime.datetime.min
119 119
120 120 def __call__(self, fn):
121 121 @wraps(fn)
122 122 def wrapper(*args, **kwargs):
123 123 coerce = kwargs.pop('coerce', None)
124 124 if coerce:
125 125 self.time_of_last_call = datetime.datetime.now()
126 126 return fn(*args, **kwargs)
127 127 else:
128 128 now = datetime.datetime.now()
129 129 time_since_last_call = now - self.time_of_last_call
130 130 time_left = self.throttle_period - time_since_last_call
131 131
132 132 if time_left > datetime.timedelta(seconds=0):
133 133 return
134 134
135 135 self.time_of_last_call = datetime.datetime.now()
136 136 return fn(*args, **kwargs)
137 137
138 138 return wrapper
139 139
140 140 def apply_throttle(value):
141 141
142 142 @Throttle(seconds=value)
143 143 def fnThrottled(fn):
144 144 fn()
145 145
146 146 return fnThrottled
147 147
148 148
149 149 @MPDecorator
150 150 class Plot(Operation):
151 151 """Base class for Schain plotting operations
152 152
153 153 This class should never be use directtly you must subclass a new operation,
154 154 children classes must be defined as follow:
155 155
156 156 ExamplePlot(Plot):
157 157
158 158 CODE = 'code'
159 159 colormap = 'jet'
160 160 plot_type = 'pcolor' # options are ('pcolor', 'pcolorbuffer', 'scatter', 'scatterbuffer')
161 161
162 162 def setup(self):
163 163 pass
164 164
165 165 def plot(self):
166 166 pass
167 167
168 168 """
169 169
170 170 CODE = 'Figure'
171 171 colormap = 'jet'
172 172 bgcolor = 'white'
173 173 buffering = True
174 174 __missing = 1E30
175 175
176 176 __attrs__ = ['show', 'save', 'ymin', 'ymax', 'zmin', 'zmax', 'title',
177 177 'showprofile']
178 178
179 179 def __init__(self):
180 180
181 181 Operation.__init__(self)
182 182 self.isConfig = False
183 183 self.isPlotConfig = False
184 184 self.save_time = 0
185 185 self.sender_time = 0
186 186 self.data = None
187 187 self.firsttime = True
188 188 self.sender_queue = deque(maxlen=10)
189 189 self.plots_adjust = {'left': 0.125, 'right': 0.9, 'bottom': 0.15, 'top': 0.9, 'wspace': 0.2, 'hspace': 0.2}
190 190
191 191 def __fmtTime(self, x, pos):
192 192 '''
193 193 '''
194 194
195 195 return '{}'.format(self.getDateTime(x).strftime('%H:%M'))
196 196
197 197 def __setup(self, **kwargs):
198 198 '''
199 199 Initialize variables
200 200 '''
201 201
202 202 self.figures = []
203 203 self.axes = []
204 204 self.cb_axes = []
205 self.pf_axes = []
205 206 self.localtime = kwargs.pop('localtime', True)
206 207 self.show = kwargs.get('show', True)
207 208 self.save = kwargs.get('save', False)
208 209 self.save_period = kwargs.get('save_period', 0)
209 210 self.colormap = kwargs.get('colormap', self.colormap)
210 211 self.colormap_coh = kwargs.get('colormap_coh', 'jet')
211 212 self.colormap_phase = kwargs.get('colormap_phase', 'RdBu_r')
212 213 self.colormaps = kwargs.get('colormaps', None)
213 214 self.bgcolor = kwargs.get('bgcolor', self.bgcolor)
214 215 self.showprofile = kwargs.get('showprofile', False)
215 216 self.title = kwargs.get('wintitle', self.CODE.upper())
216 217 self.cb_label = kwargs.get('cb_label', None)
217 218 self.cb_labels = kwargs.get('cb_labels', None)
218 219 self.labels = kwargs.get('labels', None)
219 220 self.xaxis = kwargs.get('xaxis', 'frequency')
220 221 self.zmin = kwargs.get('zmin', None)
221 222 self.zmax = kwargs.get('zmax', None)
222 223 self.zlimits = kwargs.get('zlimits', None)
223 224 self.xmin = kwargs.get('xmin', None)
224 225 self.xmax = kwargs.get('xmax', None)
225 226 self.xrange = kwargs.get('xrange', 12)
226 227 self.xscale = kwargs.get('xscale', None)
227 228 self.ymin = kwargs.get('ymin', None)
228 229 self.ymax = kwargs.get('ymax', None)
229 230 self.yscale = kwargs.get('yscale', None)
230 231 self.xlabel = kwargs.get('xlabel', None)
231 232 self.attr_time = kwargs.get('attr_time', 'utctime')
232 233 self.attr_data = kwargs.get('attr_data', 'data_param')
233 234 self.decimation = kwargs.get('decimation', None)
234 235 self.oneFigure = kwargs.get('oneFigure', True)
235 236 self.width = kwargs.get('width', None)
236 237 self.height = kwargs.get('height', None)
237 238 self.colorbar = kwargs.get('colorbar', True)
238 239 self.factors = kwargs.get('factors', [1, 1, 1, 1, 1, 1, 1, 1])
239 240 self.channels = kwargs.get('channels', None)
240 241 self.titles = kwargs.get('titles', [])
241 242 self.polar = False
242 243 self.type = kwargs.get('type', 'iq')
243 244 self.grid = kwargs.get('grid', False)
244 245 self.pause = kwargs.get('pause', False)
245 246 self.save_code = kwargs.get('save_code', self.CODE)
246 247 self.throttle = kwargs.get('throttle', 0)
247 248 self.exp_code = kwargs.get('exp_code', None)
248 249 self.server = kwargs.get('server', False)
249 250 self.sender_period = kwargs.get('sender_period', 60)
250 251 self.tag = kwargs.get('tag', '')
251 252 self.height_index = kwargs.get('height_index', None)
252 253 self.__throttle_plot = apply_throttle(self.throttle)
253 254 code = self.attr_data if self.attr_data else self.CODE
254 255 self.data = PlotterData(self.CODE, self.exp_code, self.localtime)
256 self.tmin = kwargs.get('tmin', None)
255 257
256 258 if self.server:
257 259 if not self.server.startswith('tcp://'):
258 260 self.server = 'tcp://{}'.format(self.server)
259 261 log.success(
260 262 'Sending to server: {}'.format(self.server),
261 263 self.name
262 264 )
263 265
264 266 if isinstance(self.attr_data, str):
265 267 self.attr_data = [self.attr_data]
266 268
267 269 def __setup_plot(self):
268 270 '''
269 271 Common setup for all figures, here figures and axes are created
270 272 '''
271 273
272 274 self.setup()
273 275
274 276 self.time_label = 'LT' if self.localtime else 'UTC'
275 277
276 278 if self.width is None:
277 279 self.width = 8
278 280
279 281 self.figures = []
280 282 self.axes = []
281 283 self.cb_axes = []
282 284 self.pf_axes = []
283 285 self.cmaps = []
284 286
285 287 size = '15%' if self.ncols == 1 else '30%'
286 288 pad = '4%' if self.ncols == 1 else '8%'
287 289
288 290 if self.oneFigure:
289 291 if self.height is None:
290 292 self.height = 1.4 * self.nrows + 1
291 293 fig = plt.figure(figsize=(self.width, self.height),
292 294 edgecolor='k',
293 295 facecolor='w')
294 296 self.figures.append(fig)
295 297 for n in range(self.nplots):
296 298 ax = fig.add_subplot(self.nrows, self.ncols,
297 299 n + 1, polar=self.polar)
298 300 ax.tick_params(labelsize=8)
299 301 ax.firsttime = True
300 302 ax.index = 0
301 303 ax.press = None
302 304 self.axes.append(ax)
303 305 if self.showprofile:
304 306 cax = self.__add_axes(ax, size=size, pad=pad)
305 307 cax.tick_params(labelsize=8)
306 308 self.pf_axes.append(cax)
307 309 else:
308 310 if self.height is None:
309 311 self.height = 3
310 312 for n in range(self.nplots):
311 313 fig = plt.figure(figsize=(self.width, self.height),
312 314 edgecolor='k',
313 315 facecolor='w')
314 316 ax = fig.add_subplot(1, 1, 1, polar=self.polar)
315 317 ax.tick_params(labelsize=8)
316 318 ax.firsttime = True
317 319 ax.index = 0
318 320 ax.press = None
319 321 self.figures.append(fig)
320 322 self.axes.append(ax)
321 323 if self.showprofile:
322 324 cax = self.__add_axes(ax, size=size, pad=pad)
323 325 cax.tick_params(labelsize=8)
324 326 self.pf_axes.append(cax)
325 327
326 328 for n in range(self.nrows):
327 329 if self.colormaps is not None:
328 330 cmap = plt.get_cmap(self.colormaps[n])
329 331 else:
330 332 cmap = plt.get_cmap(self.colormap)
331 333 cmap.set_bad(self.bgcolor, 1.)
332 334 self.cmaps.append(cmap)
333 335
334 336 def __add_axes(self, ax, size='30%', pad='8%'):
335 337 '''
336 338 Add new axes to the given figure
337 339 '''
338 340 divider = make_axes_locatable(ax)
339 341 nax = divider.new_horizontal(size=size, pad=pad)
340 342 ax.figure.add_axes(nax)
341 343 return nax
342 344
343 345 def fill_gaps(self, x_buffer, y_buffer, z_buffer):
344 346 '''
345 347 Create a masked array for missing data
346 348 '''
347 349 if x_buffer.shape[0] < 2:
348 350 return x_buffer, y_buffer, z_buffer
349 351
350 352 deltas = x_buffer[1:] - x_buffer[0:-1]
351 353 x_median = numpy.median(deltas)
352 354
353 355 index = numpy.where(deltas > 5 * x_median)
354 356
355 357 if len(index[0]) != 0:
356 358 z_buffer[::, index[0], ::] = self.__missing
357 359 z_buffer = numpy.ma.masked_inside(z_buffer,
358 360 0.99 * self.__missing,
359 361 1.01 * self.__missing)
360 362
361 363 return x_buffer, y_buffer, z_buffer
362 364
363 365 def decimate(self):
364 366
365 367 # dx = int(len(self.x)/self.__MAXNUMX) + 1
366 368 dy = int(len(self.y) / self.decimation) + 1
367 369
368 370 # x = self.x[::dx]
369 371 x = self.x
370 372 y = self.y[::dy]
371 373 z = self.z[::, ::, ::dy]
372 374
373 375 return x, y, z
374 376
375 377 def format(self):
376 378 '''
377 379 Set min and max values, labels, ticks and titles
378 380 '''
379 381
380 382 for n, ax in enumerate(self.axes):
381 383 if ax.firsttime:
382 384 if self.xaxis != 'time':
383 385 xmin = self.xmin
384 386 xmax = self.xmax
385 387 else:
386 388 xmin = self.tmin
387 389 xmax = self.tmin + self.xrange*60*60
388 390 ax.xaxis.set_major_formatter(FuncFormatter(self.__fmtTime))
389 391 ax.xaxis.set_major_locator(LinearLocator(9))
390 392 ymin = self.ymin if self.ymin is not None else numpy.nanmin(self.y[numpy.isfinite(self.y)])
391 393 ymax = self.ymax if self.ymax is not None else numpy.nanmax(self.y[numpy.isfinite(self.y)])
392 394 ax.set_facecolor(self.bgcolor)
393 395 if self.xscale:
394 396 ax.xaxis.set_major_formatter(FuncFormatter(
395 397 lambda x, pos: '{0:g}'.format(x*self.xscale)))
396 398 if self.yscale:
397 399 ax.yaxis.set_major_formatter(FuncFormatter(
398 400 lambda x, pos: '{0:g}'.format(x*self.yscale)))
399 401 if self.xlabel is not None:
400 402 ax.set_xlabel(self.xlabel)
401 403 if self.ylabel is not None:
402 404 ax.set_ylabel(self.ylabel)
403 405 if self.showprofile:
404 406 self.pf_axes[n].set_ylim(ymin, ymax)
405 407 self.pf_axes[n].set_xlim(self.zmin, self.zmax)
406 408 self.pf_axes[n].set_xlabel('dB')
407 409 self.pf_axes[n].grid(b=True, axis='x')
408 410 [tick.set_visible(False)
409 411 for tick in self.pf_axes[n].get_yticklabels()]
410 412 if self.colorbar:
411 413 ax.cbar = plt.colorbar(
412 414 ax.plt, ax=ax, fraction=0.05, pad=0.02, aspect=10)
413 415 ax.cbar.ax.tick_params(labelsize=8)
414 416 ax.cbar.ax.press = None
415 417 if self.cb_label:
416 418 ax.cbar.set_label(self.cb_label, size=8)
417 419 elif self.cb_labels:
418 420 ax.cbar.set_label(self.cb_labels[n], size=8)
419 421 else:
420 422 ax.cbar = None
421 423 ax.set_xlim(xmin, xmax)
422 424 ax.set_ylim(ymin, ymax)
423 425 ax.firsttime = False
424 426 if self.grid:
425 427 ax.grid(True)
426 428 if not self.polar:
427 429 ax.set_title('{} {} {}'.format(
428 430 self.titles[n],
429 431 self.getDateTime(self.data.max_time).strftime(
430 432 '%Y-%m-%d %H:%M:%S'),
431 433 self.time_label),
432 434 size=8)
433 435 else:
434 436 ax.set_title('{}'.format(self.titles[n]), size=8)
435 437 ax.set_ylim(0, 90)
436 438 ax.set_yticks(numpy.arange(0, 90, 20))
437 439 ax.yaxis.labelpad = 40
438 440
439 441 if self.firsttime:
440 442 for n, fig in enumerate(self.figures):
441 443 fig.subplots_adjust(**self.plots_adjust)
442 444 self.firsttime = False
443 445
444 446 def clear_figures(self):
445 447 '''
446 448 Reset axes for redraw plots
447 449 '''
448 450
449 451 for ax in self.axes+self.pf_axes+self.cb_axes:
450 452 ax.clear()
451 453 ax.firsttime = True
452 454 if hasattr(ax, 'cbar') and ax.cbar:
453 455 ax.cbar.remove()
454 456
455 457 def __plot(self):
456 458 '''
457 459 Main function to plot, format and save figures
458 460 '''
459 461
460 462 self.plot()
461 463 self.format()
462 464
463 465 for n, fig in enumerate(self.figures):
464 466 if self.nrows == 0 or self.nplots == 0:
465 467 log.warning('No data', self.name)
466 468 fig.text(0.5, 0.5, 'No Data', fontsize='large', ha='center')
467 469 fig.canvas.manager.set_window_title(self.CODE)
468 470 continue
469 471
470 472 fig.canvas.manager.set_window_title('{} - {}'.format(self.title,
471 473 self.getDateTime(self.data.max_time).strftime('%Y/%m/%d')))
472 474 fig.canvas.draw()
473 475 if self.show:
474 476 fig.show()
475 477 figpause(0.01)
476 478
477 479 if self.save:
478 480 self.save_figure(n)
479 481
480 482 if self.server:
481 483 self.send_to_server()
482 484
483 485 def __update(self, dataOut, timestamp):
484 486 '''
485 487 '''
486 488
487 489 metadata = {
488 490 'yrange': dataOut.heightList,
489 491 'interval': dataOut.timeInterval,
490 492 'channels': dataOut.channelList
491 493 }
492 494
493 495 data, meta = self.update(dataOut)
494 496 metadata.update(meta)
495 497 self.data.update(data, timestamp, metadata)
496 498
497 499 def save_figure(self, n):
498 500 '''
499 501 '''
500 502
501 503 if (self.data.max_time - self.save_time) <= self.save_period:
502 504 return
503 505
504 506 self.save_time = self.data.max_time
505 507
506 508 fig = self.figures[n]
507 509
508 510 if self.throttle == 0:
509 511 figname = os.path.join(
510 512 self.save,
511 513 self.save_code,
512 514 '{}_{}.png'.format(
513 515 self.save_code,
514 516 self.getDateTime(self.data.max_time).strftime(
515 517 '%Y%m%d_%H%M%S'
516 518 ),
517 519 )
518 520 )
519 521 log.log('Saving figure: {}'.format(figname), self.name)
520 522 if not os.path.isdir(os.path.dirname(figname)):
521 523 os.makedirs(os.path.dirname(figname))
522 524 fig.savefig(figname)
523 525
524 526 figname = os.path.join(
525 527 self.save,
526 528 '{}_{}.png'.format(
527 529 self.save_code,
528 530 self.getDateTime(self.data.min_time).strftime(
529 531 '%Y%m%d'
530 532 ),
531 533 )
532 534 )
533 535
534 536 log.log('Saving figure: {}'.format(figname), self.name)
535 537 if not os.path.isdir(os.path.dirname(figname)):
536 538 os.makedirs(os.path.dirname(figname))
537 539 fig.savefig(figname)
538 540
539 541 def send_to_server(self):
540 542 '''
541 543 '''
542 544
543 545 if self.exp_code == None:
544 546 log.warning('Missing `exp_code` skipping sending to server...')
545 547
546 548 last_time = self.data.max_time
547 549 interval = last_time - self.sender_time
548 550 if interval < self.sender_period:
549 551 return
550 552
551 553 self.sender_time = last_time
552 554
553 555 attrs = ['titles', 'zmin', 'zmax', 'tag', 'ymin', 'ymax']
554 556 for attr in attrs:
555 557 value = getattr(self, attr)
556 558 if value:
557 559 if isinstance(value, (numpy.float32, numpy.float64)):
558 560 value = round(float(value), 2)
559 561 self.data.meta[attr] = value
560 562 if self.colormap == 'jet':
561 563 self.data.meta['colormap'] = 'Jet'
562 564 elif 'RdBu' in self.colormap:
563 565 self.data.meta['colormap'] = 'RdBu'
564 566 else:
565 567 self.data.meta['colormap'] = 'Viridis'
566 568 self.data.meta['interval'] = int(interval)
567 569
568 570 self.sender_queue.append(last_time)
569 571
570 572 while True:
571 573 try:
572 574 tm = self.sender_queue.popleft()
573 575 except IndexError:
574 576 break
575 577 msg = self.data.jsonify(tm, self.save_code, self.plot_type)
576 578 self.socket.send_string(msg)
577 579 socks = dict(self.poll.poll(2000))
578 580 if socks.get(self.socket) == zmq.POLLIN:
579 581 reply = self.socket.recv_string()
580 582 if reply == 'ok':
581 583 log.log("Response from server ok", self.name)
582 584 time.sleep(0.1)
583 585 continue
584 586 else:
585 587 log.warning(
586 588 "Malformed reply from server: {}".format(reply), self.name)
587 589 else:
588 590 log.warning(
589 591 "No response from server, retrying...", self.name)
590 592 self.sender_queue.appendleft(tm)
591 593 self.socket.setsockopt(zmq.LINGER, 0)
592 594 self.socket.close()
593 595 self.poll.unregister(self.socket)
594 596 self.socket = self.context.socket(zmq.REQ)
595 597 self.socket.connect(self.server)
596 598 self.poll.register(self.socket, zmq.POLLIN)
597 599 break
598 600
599 601 def setup(self):
600 602 '''
601 603 This method should be implemented in the child class, the following
602 604 attributes should be set:
603 605
604 606 self.nrows: number of rows
605 607 self.ncols: number of cols
606 608 self.nplots: number of plots (channels or pairs)
607 609 self.ylabel: label for Y axes
608 610 self.titles: list of axes title
609 611
610 612 '''
611 613 raise NotImplementedError
612 614
613 615 def plot(self):
614 616 '''
615 617 Must be defined in the child class, the actual plotting method
616 618 '''
617 619 raise NotImplementedError
618 620
619 621 def update(self, dataOut):
620 622 '''
621 623 Must be defined in the child class, update self.data with new data
622 624 '''
623 625
624 626 data = {
625 627 self.CODE: getattr(dataOut, 'data_{}'.format(self.CODE))
626 628 }
627 629 meta = {}
628 630
629 631 return data, meta
630 632
631 633 def run(self, dataOut, **kwargs):
632 634 '''
633 635 Main plotting routine
634 636 '''
635 637
636 638 if self.isConfig is False:
637 639 self.__setup(**kwargs)
638 640
639 641 if self.localtime:
640 642 self.getDateTime = datetime.datetime.fromtimestamp
641 643 else:
642 644 self.getDateTime = datetime.datetime.utcfromtimestamp
643 645
644 646 self.data.setup()
645 647 self.isConfig = True
646 648 if self.server:
647 649 self.context = zmq.Context()
648 650 self.socket = self.context.socket(zmq.REQ)
649 651 self.socket.connect(self.server)
650 652 self.poll = zmq.Poller()
651 653 self.poll.register(self.socket, zmq.POLLIN)
652 654
653 655 tm = getattr(dataOut, self.attr_time)
654 656
655 657 if self.data and 'time' in self.xaxis and (tm - self.tmin) >= self.xrange*60*60:
656 658 self.save_time = tm
657 659 self.__plot()
658 660 self.tmin += self.xrange*60*60
659 661 self.data.setup()
660 662 self.clear_figures()
661 663
662 664 self.__update(dataOut, tm)
663 665
664 666 if self.isPlotConfig is False:
665 667 self.__setup_plot()
666 668 self.isPlotConfig = True
667 669 if self.xaxis == 'time':
668 670 dt = self.getDateTime(tm)
669 671 if self.xmin is None:
670 672 self.tmin = tm
671 673 self.xmin = dt.hour
672 674 minutes = (self.xmin-int(self.xmin)) * 60
673 675 seconds = (minutes - int(minutes)) * 60
674 676 self.tmin = (dt.replace(hour=int(self.xmin), minute=int(minutes), second=int(seconds)) -
675 677 datetime.datetime(1970, 1, 1)).total_seconds()
676 678 if self.localtime:
677 679 self.tmin += time.timezone
678 680
679 681 if self.xmin is not None and self.xmax is not None:
680 682 self.xrange = self.xmax - self.xmin
681 683
682 684 if self.throttle == 0:
683 685 self.__plot()
684 686 else:
685 687 self.__throttle_plot(self.__plot)#, coerce=coerce)
686 688
687 689 def close(self):
688 690
689 691 if self.data and not self.data.flagNoData:
690 692 self.save_time = 0
691 693 self.__plot()
692 694 if self.data and not self.data.flagNoData and self.pause:
693 695 figpause(10)
@@ -1,357 +1,356
1 1 import os
2 2 import datetime
3 3 import numpy
4 4
5 5 from schainpy.model.graphics.jroplot_base import Plot, plt
6 6 from schainpy.model.graphics.jroplot_spectra import SpectraPlot, RTIPlot, CoherencePlot
7 7 from schainpy.utils import log
8 8
9 9 EARTH_RADIUS = 6.3710e3
10 10
11 11
12 12 def ll2xy(lat1, lon1, lat2, lon2):
13 13
14 14 p = 0.017453292519943295
15 15 a = 0.5 - numpy.cos((lat2 - lat1) * p)/2 + numpy.cos(lat1 * p) * \
16 16 numpy.cos(lat2 * p) * (1 - numpy.cos((lon2 - lon1) * p)) / 2
17 17 r = 12742 * numpy.arcsin(numpy.sqrt(a))
18 18 theta = numpy.arctan2(numpy.sin((lon2-lon1)*p)*numpy.cos(lat2*p), numpy.cos(lat1*p)
19 19 * numpy.sin(lat2*p)-numpy.sin(lat1*p)*numpy.cos(lat2*p)*numpy.cos((lon2-lon1)*p))
20 20 theta = -theta + numpy.pi/2
21 21 return r*numpy.cos(theta), r*numpy.sin(theta)
22 22
23 23
24 24 def km2deg(km):
25 25 '''
26 26 Convert distance in km to degrees
27 27 '''
28 28
29 29 return numpy.rad2deg(km/EARTH_RADIUS)
30 30
31 31
32 32
33 33 class SpectralMomentsPlot(SpectraPlot):
34 34 '''
35 35 Plot for Spectral Moments
36 36 '''
37 37 CODE = 'spc_moments'
38 38 colormap = 'jet'
39 39 plot_type = 'pcolor'
40 40
41 41
42 42 class SnrPlot(RTIPlot):
43 43 '''
44 44 Plot for SNR Data
45 45 '''
46 46
47 47 CODE = 'snr'
48 48 colormap = 'jet'
49 49
50 50 def update(self, dataOut):
51 51
52 52 data = {
53 53 'snr': 10*numpy.log10(dataOut.data_snr)
54 54 }
55 55
56 56 return data, {}
57 57
58 58 class DopplerPlot(RTIPlot):
59 59 '''
60 60 Plot for DOPPLER Data (1st moment)
61 61 '''
62 62
63 63 CODE = 'dop'
64 64 colormap = 'jet'
65 65
66 66 def update(self, dataOut):
67 67
68 68 data = {
69 69 'dop': 10*numpy.log10(dataOut.data_dop)
70 70 }
71 71
72 72 return data, {}
73 73
74 74 class PowerPlot(RTIPlot):
75 75 '''
76 76 Plot for Power Data (0 moment)
77 77 '''
78 78
79 79 CODE = 'pow'
80 80 colormap = 'jet'
81 81
82 82 def update(self, dataOut):
83 83
84 84 data = {
85 85 'pow': 10*numpy.log10(dataOut.data_pow)
86 86 }
87
87 print("data",data)
88 88 return data, {}
89 89
90 90 class SpectralWidthPlot(RTIPlot):
91 91 '''
92 92 Plot for Spectral Width Data (2nd moment)
93 93 '''
94 94
95 95 CODE = 'width'
96 96 colormap = 'jet'
97 97
98 98 def update(self, dataOut):
99 99
100 100 data = {
101 101 'width': dataOut.data_width
102 102 }
103 103
104 104 return data, {}
105 105
106 106 class SkyMapPlot(Plot):
107 107 '''
108 108 Plot for meteors detection data
109 109 '''
110 110
111 111 CODE = 'param'
112 112
113 113 def setup(self):
114 114
115 115 self.ncols = 1
116 116 self.nrows = 1
117 117 self.width = 7.2
118 118 self.height = 7.2
119 119 self.nplots = 1
120 120 self.xlabel = 'Zonal Zenith Angle (deg)'
121 121 self.ylabel = 'Meridional Zenith Angle (deg)'
122 122 self.polar = True
123 123 self.ymin = -180
124 124 self.ymax = 180
125 125 self.colorbar = False
126 126
127 127 def plot(self):
128 128
129 129 arrayParameters = numpy.concatenate(self.data['param'])
130 130 error = arrayParameters[:, -1]
131 131 indValid = numpy.where(error == 0)[0]
132 132 finalMeteor = arrayParameters[indValid, :]
133 133 finalAzimuth = finalMeteor[:, 3]
134 134 finalZenith = finalMeteor[:, 4]
135 135
136 136 x = finalAzimuth * numpy.pi / 180
137 137 y = finalZenith
138 138
139 139 ax = self.axes[0]
140 140
141 141 if ax.firsttime:
142 142 ax.plot = ax.plot(x, y, 'bo', markersize=5)[0]
143 143 else:
144 144 ax.plot.set_data(x, y)
145 145
146 146 dt1 = self.getDateTime(self.data.min_time).strftime('%y/%m/%d %H:%M:%S')
147 147 dt2 = self.getDateTime(self.data.max_time).strftime('%y/%m/%d %H:%M:%S')
148 148 title = 'Meteor Detection Sky Map\n %s - %s \n Number of events: %5.0f\n' % (dt1,
149 149 dt2,
150 150 len(x))
151 151 self.titles[0] = title
152 152
153 153
154 154 class GenericRTIPlot(Plot):
155 155 '''
156 156 Plot for data_xxxx object
157 157 '''
158 158
159 159 CODE = 'param'
160 160 colormap = 'viridis'
161 161 plot_type = 'pcolorbuffer'
162 162
163 163 def setup(self):
164 164 self.xaxis = 'time'
165 165 self.ncols = 1
166 166 self.nrows = self.data.shape('param')[0]
167 167 self.nplots = self.nrows
168 168 self.plots_adjust.update({'hspace':0.8, 'left': 0.1, 'bottom': 0.08, 'right':0.95, 'top': 0.95})
169 169
170 170 if not self.xlabel:
171 171 self.xlabel = 'Time'
172 172
173 173 self.ylabel = 'Height [km]'
174 174 if not self.titles:
175 175 self.titles = ['Param {}'.format(x) for x in range(self.nrows)]
176 176
177 177 def update(self, dataOut):
178 178
179 179 data = {
180 180 'param' : numpy.concatenate([getattr(dataOut, attr) for attr in self.attr_data], axis=0)
181 181 }
182 182
183 183 meta = {}
184 184
185 185 return data, meta
186 186
187 187 def plot(self):
188 188 # self.data.normalize_heights()
189 189 self.x = self.data.times
190 190 self.y = self.data.yrange
191 191 self.z = self.data['param']
192 192
193 193 self.z = numpy.ma.masked_invalid(self.z)
194 194
195 195 if self.decimation is None:
196 196 x, y, z = self.fill_gaps(self.x, self.y, self.z)
197 197 else:
198 198 x, y, z = self.fill_gaps(*self.decimate())
199 199
200 200 for n, ax in enumerate(self.axes):
201 201
202 202 self.zmax = self.zmax if self.zmax is not None else numpy.max(
203 203 self.z[n])
204 204 self.zmin = self.zmin if self.zmin is not None else numpy.min(
205 205 self.z[n])
206 206
207 207 if ax.firsttime:
208 208 if self.zlimits is not None:
209 209 self.zmin, self.zmax = self.zlimits[n]
210 210
211 211 ax.plt = ax.pcolormesh(x, y, z[n].T * self.factors[n],
212 212 vmin=self.zmin,
213 213 vmax=self.zmax,
214 214 cmap=self.cmaps[n]
215 215 )
216 216 else:
217 217 if self.zlimits is not None:
218 218 self.zmin, self.zmax = self.zlimits[n]
219 219 ax.collections.remove(ax.collections[0])
220 220 ax.plt = ax.pcolormesh(x, y, z[n].T * self.factors[n],
221 221 vmin=self.zmin,
222 222 vmax=self.zmax,
223 223 cmap=self.cmaps[n]
224 224 )
225 225
226 226
227 227 class PolarMapPlot(Plot):
228 228 '''
229 229 Plot for weather radar
230 230 '''
231 231
232 232 CODE = 'param'
233 233 colormap = 'seismic'
234 234
235 235 def setup(self):
236 236 self.ncols = 1
237 237 self.nrows = 1
238 238 self.width = 9
239 239 self.height = 8
240 240 self.mode = self.data.meta['mode']
241 241 if self.channels is not None:
242 242 self.nplots = len(self.channels)
243 243 self.nrows = len(self.channels)
244 244 else:
245 245 self.nplots = self.data.shape(self.CODE)[0]
246 246 self.nrows = self.nplots
247 247 self.channels = list(range(self.nplots))
248 248 if self.mode == 'E':
249 249 self.xlabel = 'Longitude'
250 250 self.ylabel = 'Latitude'
251 251 else:
252 252 self.xlabel = 'Range (km)'
253 253 self.ylabel = 'Height (km)'
254 254 self.bgcolor = 'white'
255 255 self.cb_labels = self.data.meta['units']
256 256 self.lat = self.data.meta['latitude']
257 257 self.lon = self.data.meta['longitude']
258 258 self.xmin, self.xmax = float(
259 259 km2deg(self.xmin) + self.lon), float(km2deg(self.xmax) + self.lon)
260 260 self.ymin, self.ymax = float(
261 261 km2deg(self.ymin) + self.lat), float(km2deg(self.ymax) + self.lat)
262 262 # self.polar = True
263 263
264 264 def plot(self):
265 265
266 266 for n, ax in enumerate(self.axes):
267 267 data = self.data['param'][self.channels[n]]
268 268
269 269 zeniths = numpy.linspace(
270 270 0, self.data.meta['max_range'], data.shape[1])
271 271 if self.mode == 'E':
272 272 azimuths = -numpy.radians(self.data.yrange)+numpy.pi/2
273 273 r, theta = numpy.meshgrid(zeniths, azimuths)
274 274 x, y = r*numpy.cos(theta)*numpy.cos(numpy.radians(self.data.meta['elevation'])), r*numpy.sin(
275 275 theta)*numpy.cos(numpy.radians(self.data.meta['elevation']))
276 276 x = km2deg(x) + self.lon
277 277 y = km2deg(y) + self.lat
278 278 else:
279 279 azimuths = numpy.radians(self.data.yrange)
280 280 r, theta = numpy.meshgrid(zeniths, azimuths)
281 281 x, y = r*numpy.cos(theta), r*numpy.sin(theta)
282 282 self.y = zeniths
283 283
284 284 if ax.firsttime:
285 285 if self.zlimits is not None:
286 286 self.zmin, self.zmax = self.zlimits[n]
287 287 ax.plt = ax.pcolormesh( # r, theta, numpy.ma.array(data, mask=numpy.isnan(data)),
288 288 x, y, numpy.ma.array(data, mask=numpy.isnan(data)),
289 289 vmin=self.zmin,
290 290 vmax=self.zmax,
291 291 cmap=self.cmaps[n])
292 292 else:
293 293 if self.zlimits is not None:
294 294 self.zmin, self.zmax = self.zlimits[n]
295 295 ax.collections.remove(ax.collections[0])
296 296 ax.plt = ax.pcolormesh( # r, theta, numpy.ma.array(data, mask=numpy.isnan(data)),
297 297 x, y, numpy.ma.array(data, mask=numpy.isnan(data)),
298 298 vmin=self.zmin,
299 299 vmax=self.zmax,
300 300 cmap=self.cmaps[n])
301 301
302 302 if self.mode == 'A':
303 303 continue
304 304
305 305 # plot district names
306 306 f = open('/data/workspace/schain_scripts/distrito.csv')
307 307 for line in f:
308 308 label, lon, lat = [s.strip() for s in line.split(',') if s]
309 309 lat = float(lat)
310 310 lon = float(lon)
311 311 # ax.plot(lon, lat, '.b', ms=2)
312 312 ax.text(lon, lat, label.decode('utf8'), ha='center',
313 313 va='bottom', size='8', color='black')
314 314
315 315 # plot limites
316 316 limites = []
317 317 tmp = []
318 318 for line in open('/data/workspace/schain_scripts/lima.csv'):
319 319 if '#' in line:
320 320 if tmp:
321 321 limites.append(tmp)
322 322 tmp = []
323 323 continue
324 324 values = line.strip().split(',')
325 325 tmp.append((float(values[0]), float(values[1])))
326 326 for points in limites:
327 327 ax.add_patch(
328 328 Polygon(points, ec='k', fc='none', ls='--', lw=0.5))
329 329
330 330 # plot Cuencas
331 331 for cuenca in ('rimac', 'lurin', 'mala', 'chillon', 'chilca', 'chancay-huaral'):
332 332 f = open('/data/workspace/schain_scripts/{}.csv'.format(cuenca))
333 333 values = [line.strip().split(',') for line in f]
334 334 points = [(float(s[0]), float(s[1])) for s in values]
335 335 ax.add_patch(Polygon(points, ec='b', fc='none'))
336 336
337 337 # plot grid
338 338 for r in (15, 30, 45, 60):
339 339 ax.add_artist(plt.Circle((self.lon, self.lat),
340 340 km2deg(r), color='0.6', fill=False, lw=0.2))
341 341 ax.text(
342 342 self.lon + (km2deg(r))*numpy.cos(60*numpy.pi/180),
343 343 self.lat + (km2deg(r))*numpy.sin(60*numpy.pi/180),
344 344 '{}km'.format(r),
345 345 ha='center', va='bottom', size='8', color='0.6', weight='heavy')
346 346
347 347 if self.mode == 'E':
348 348 title = 'El={}$^\circ$'.format(self.data.meta['elevation'])
349 349 label = 'E{:02d}'.format(int(self.data.meta['elevation']))
350 350 else:
351 351 title = 'Az={}$^\circ$'.format(self.data.meta['azimuth'])
352 352 label = 'A{:02d}'.format(int(self.data.meta['azimuth']))
353 353
354 354 self.save_labels = ['{}-{}'.format(lbl, label) for lbl in self.labels]
355 355 self.titles = ['{} {}'.format(
356 356 self.data.parameters[x], title) for x in self.channels]
357
@@ -1,711 +1,712
1 1 # Copyright (c) 2012-2020 Jicamarca Radio Observatory
2 2 # All rights reserved.
3 3 #
4 4 # Distributed under the terms of the BSD 3-clause license.
5 5 """Classes to plot Spectra data
6 6
7 7 """
8 8
9 9 import os
10 10 import numpy
11 11
12 12 from schainpy.model.graphics.jroplot_base import Plot, plt, log
13 13
14 14
15 15 class SpectraPlot(Plot):
16 16 '''
17 17 Plot for Spectra data
18 18 '''
19 19
20 20 CODE = 'spc'
21 21 colormap = 'jet'
22 22 plot_type = 'pcolor'
23 23 buffering = False
24 channelList = None
24 channelList = []
25 25
26 26 def setup(self):
27 27 self.nplots = len(self.data.channels)
28 28 self.ncols = int(numpy.sqrt(self.nplots) + 0.9)
29 29 self.nrows = int((1.0 * self.nplots / self.ncols) + 0.9)
30 30 self.height = 2.6 * self.nrows
31 31
32 32 self.cb_label = 'dB'
33 33 if self.showprofile:
34 34 self.width = 4 * self.ncols
35 35 else:
36 36 self.width = 3.5 * self.ncols
37 37 self.plots_adjust.update({'wspace': 0.4, 'hspace':0.4, 'left': 0.1, 'right': 0.9, 'bottom': 0.08})
38 38 self.ylabel = 'Range [km]'
39 39
40 40 def update(self, dataOut):
41 41 if self.channelList == None:
42 42 self.channelList = dataOut.channelList
43 43 data = {}
44 44 meta = {}
45 45 spc = 10*numpy.log10(dataOut.data_spc/dataOut.normFactor)
46 46 data['spc'] = spc
47 47 data['rti'] = dataOut.getPower()
48 48 data['noise'] = 10*numpy.log10(dataOut.getNoise()/dataOut.normFactor)
49 49 meta['xrange'] = (dataOut.getFreqRange(1)/1000., dataOut.getAcfRange(1), dataOut.getVelRange(1))
50 50 if self.CODE == 'spc_moments':
51 51 data['moments'] = dataOut.moments
52 52
53 53 return data, meta
54 54
55 55 def plot(self):
56 56 if self.xaxis == "frequency":
57 57 x = self.data.xrange[0]
58 58 self.xlabel = "Frequency (kHz)"
59 59 elif self.xaxis == "time":
60 60 x = self.data.xrange[1]
61 61 self.xlabel = "Time (ms)"
62 62 else:
63 63 x = self.data.xrange[2]
64 64 self.xlabel = "Velocity (m/s)"
65 65
66 66 if self.CODE == 'spc_moments':
67 67 x = self.data.xrange[2]
68 68 self.xlabel = "Velocity (m/s)"
69 69
70 70 self.titles = []
71 71
72 72 y = self.data.yrange
73 73 self.y = y
74 74
75 75 data = self.data[-1]
76 76 z = data['spc']
77 77
78 78 for n, ax in enumerate(self.axes):
79 79 noise = data['noise'][n]
80 80 if self.CODE == 'spc_moments':
81 81 mean = data['moments'][n, 1]
82 82 if ax.firsttime:
83 83 self.xmax = self.xmax if self.xmax else numpy.nanmax(x)
84 84 self.xmin = self.xmin if self.xmin else -self.xmax
85 85 self.zmin = self.zmin if self.zmin else numpy.nanmin(z)
86 86 self.zmax = self.zmax if self.zmax else numpy.nanmax(z)
87 87 ax.plt = ax.pcolormesh(x, y, z[n].T,
88 88 vmin=self.zmin,
89 89 vmax=self.zmax,
90 90 cmap=plt.get_cmap(self.colormap)
91 91 )
92 92
93 93 if self.showprofile:
94 94 ax.plt_profile = self.pf_axes[n].plot(
95 95 data['rti'][n], y)[0]
96 96 ax.plt_noise = self.pf_axes[n].plot(numpy.repeat(noise, len(y)), y,
97 97 color="k", linestyle="dashed", lw=1)[0]
98 98 if self.CODE == 'spc_moments':
99 99 ax.plt_mean = ax.plot(mean, y, color='k')[0]
100 100 else:
101 101 ax.plt.set_array(z[n].T.ravel())
102 102 if self.showprofile:
103 103 ax.plt_profile.set_data(data['rti'][n], y)
104 104 ax.plt_noise.set_data(numpy.repeat(noise, len(y)), y)
105 105 if self.CODE == 'spc_moments':
106 106 ax.plt_mean.set_data(mean, y)
107 107 self.titles.append('CH {}: {:3.2f}dB'.format(self.channelList[n], noise))
108 108
109 109
110 110 class CrossSpectraPlot(Plot):
111 111
112 112 CODE = 'cspc'
113 113 colormap = 'jet'
114 114 plot_type = 'pcolor'
115 115 zmin_coh = None
116 116 zmax_coh = None
117 117 zmin_phase = None
118 118 zmax_phase = None
119 119
120 120 def setup(self):
121 121
122 122 self.ncols = 4
123 123 self.nplots = len(self.data.pairs) * 2
124 124 self.nrows = int((1.0 * self.nplots / self.ncols) + 0.9)
125 125 self.width = 3.1 * self.ncols
126 126 self.height = 2.6 * self.nrows
127 127 self.ylabel = 'Range [km]'
128 128 self.showprofile = False
129 129 self.plots_adjust.update({'left': 0.08, 'right': 0.92, 'wspace': 0.5, 'hspace':0.4, 'top':0.95, 'bottom': 0.08})
130 130
131 131 def update(self, dataOut):
132 132
133 133 data = {}
134 134 meta = {}
135 135
136 136 spc = dataOut.data_spc
137 137 cspc = dataOut.data_cspc
138 138 meta['xrange'] = (dataOut.getFreqRange(1)/1000., dataOut.getAcfRange(1), dataOut.getVelRange(1))
139 139 meta['pairs'] = dataOut.pairsList
140 140
141 141 tmp = []
142 142
143 143 for n, pair in enumerate(meta['pairs']):
144 144 out = cspc[n] / numpy.sqrt(spc[pair[0]] * spc[pair[1]])
145 145 coh = numpy.abs(out)
146 146 phase = numpy.arctan2(out.imag, out.real) * 180 / numpy.pi
147 147 tmp.append(coh)
148 148 tmp.append(phase)
149 149
150 150 data['cspc'] = numpy.array(tmp)
151 151
152 152 return data, meta
153 153
154 154 def plot(self):
155 155
156 156 if self.xaxis == "frequency":
157 157 x = self.data.xrange[0]
158 158 self.xlabel = "Frequency (kHz)"
159 159 elif self.xaxis == "time":
160 160 x = self.data.xrange[1]
161 161 self.xlabel = "Time (ms)"
162 162 else:
163 163 x = self.data.xrange[2]
164 164 self.xlabel = "Velocity (m/s)"
165 165
166 166 self.titles = []
167 167
168 168 y = self.data.yrange
169 169 self.y = y
170 170
171 171 data = self.data[-1]
172 172 cspc = data['cspc']
173 173
174 174 for n in range(len(self.data.pairs)):
175 175 pair = self.data.pairs[n]
176 176 coh = cspc[n*2]
177 177 phase = cspc[n*2+1]
178 178 ax = self.axes[2 * n]
179 179 if ax.firsttime:
180 180 ax.plt = ax.pcolormesh(x, y, coh.T,
181 181 vmin=0,
182 182 vmax=1,
183 183 cmap=plt.get_cmap(self.colormap_coh)
184 184 )
185 185 else:
186 186 ax.plt.set_array(coh.T.ravel())
187 187 self.titles.append(
188 188 'Coherence Ch{} * Ch{}'.format(pair[0], pair[1]))
189 189
190 190 ax = self.axes[2 * n + 1]
191 191 if ax.firsttime:
192 192 ax.plt = ax.pcolormesh(x, y, phase.T,
193 193 vmin=-180,
194 194 vmax=180,
195 195 cmap=plt.get_cmap(self.colormap_phase)
196 196 )
197 197 else:
198 198 ax.plt.set_array(phase.T.ravel())
199 199 self.titles.append('Phase CH{} * CH{}'.format(pair[0], pair[1]))
200 200
201 201
202 202 class RTIPlot(Plot):
203 203 '''
204 204 Plot for RTI data
205 205 '''
206 206
207 207 CODE = 'rti'
208 208 colormap = 'jet'
209 209 plot_type = 'pcolorbuffer'
210 210 titles = None
211 channelList = None
211 channelList = []
212 212
213 213 def setup(self):
214 214 self.xaxis = 'time'
215 215 self.ncols = 1
216 print("dataChannels ",self.data.channels)
216 217 self.nrows = len(self.data.channels)
217 218 self.nplots = len(self.data.channels)
218 219 self.ylabel = 'Range [km]'
219 220 self.xlabel = 'Time'
220 221 self.cb_label = 'dB'
221 222 self.plots_adjust.update({'hspace':0.8, 'left': 0.1, 'bottom': 0.08, 'right':0.95})
222 223 self.titles = ['{} Channel {}'.format(
223 224 self.CODE.upper(), x) for x in range(self.nplots)]
224
225 print("SETUP")
225 226 def update(self, dataOut):
226 if self.channelList == None:
227 if len(self.channelList) == 0:
227 228 self.channelList = dataOut.channelList
228 229 data = {}
229 230 meta = {}
230 231 data['rti'] = dataOut.getPower()
231 232 data['noise'] = 10*numpy.log10(dataOut.getNoise()/dataOut.normFactor)
232 233
233 234 return data, meta
234 235
235 236 def plot(self):
236 237 self.x = self.data.times
237 238 self.y = self.data.yrange
238 239 self.z = self.data[self.CODE]
239 240 self.z = numpy.ma.masked_invalid(self.z)
240 241 if self.channelList != None:
241 242 self.titles = ['{} Channel {}'.format(
242 243 self.CODE.upper(), x) for x in self.channelList]
243 244
244 245 if self.decimation is None:
245 246 x, y, z = self.fill_gaps(self.x, self.y, self.z)
246 247 else:
247 248 x, y, z = self.fill_gaps(*self.decimate())
248 249
249 250 for n, ax in enumerate(self.axes):
250 251 self.zmin = self.zmin if self.zmin else numpy.min(self.z)
251 252 self.zmax = self.zmax if self.zmax else numpy.max(self.z)
252 253 data = self.data[-1]
253 254 if ax.firsttime:
254 255 ax.plt = ax.pcolormesh(x, y, z[n].T,
255 256 vmin=self.zmin,
256 257 vmax=self.zmax,
257 258 cmap=plt.get_cmap(self.colormap)
258 259 )
259 260 if self.showprofile:
260 261 ax.plot_profile = self.pf_axes[n].plot(
261 262 data['rti'][n], self.y)[0]
262 263 ax.plot_noise = self.pf_axes[n].plot(numpy.repeat(data['noise'][n], len(self.y)), self.y,
263 264 color="k", linestyle="dashed", lw=1)[0]
264 265 else:
265 266 ax.collections.remove(ax.collections[0])
266 267 ax.plt = ax.pcolormesh(x, y, z[n].T,
267 268 vmin=self.zmin,
268 269 vmax=self.zmax,
269 270 cmap=plt.get_cmap(self.colormap)
270 271 )
271 272 if self.showprofile:
272 273 ax.plot_profile.set_data(data['rti'][n], self.y)
273 274 ax.plot_noise.set_data(numpy.repeat(
274 275 data['noise'][n], len(self.y)), self.y)
275 276
276 277
277 278 class CoherencePlot(RTIPlot):
278 279 '''
279 280 Plot for Coherence data
280 281 '''
281 282
282 283 CODE = 'coh'
283 284
284 285 def setup(self):
285 286 self.xaxis = 'time'
286 287 self.ncols = 1
287 288 self.nrows = len(self.data.pairs)
288 289 self.nplots = len(self.data.pairs)
289 290 self.ylabel = 'Range [km]'
290 291 self.xlabel = 'Time'
291 292 self.plots_adjust.update({'hspace':0.6, 'left': 0.1, 'bottom': 0.1,'right':0.95})
292 293 if self.CODE == 'coh':
293 294 self.cb_label = ''
294 295 self.titles = [
295 296 'Coherence Map Ch{} * Ch{}'.format(x[0], x[1]) for x in self.data.pairs]
296 297 else:
297 298 self.cb_label = 'Degrees'
298 299 self.titles = [
299 300 'Phase Map Ch{} * Ch{}'.format(x[0], x[1]) for x in self.data.pairs]
300 301
301 302 def update(self, dataOut):
302 303
303 304 data = {}
304 305 meta = {}
305 306 data['coh'] = dataOut.getCoherence()
306 307 meta['pairs'] = dataOut.pairsList
307 308
308 309 return data, meta
309 310
310 311 class PhasePlot(CoherencePlot):
311 312 '''
312 313 Plot for Phase map data
313 314 '''
314 315
315 316 CODE = 'phase'
316 317 colormap = 'seismic'
317 318
318 319 def update(self, dataOut):
319 320
320 321 data = {}
321 322 meta = {}
322 323 data['phase'] = dataOut.getCoherence(phase=True)
323 324 meta['pairs'] = dataOut.pairsList
324 325
325 326 return data, meta
326 327
327 328 class NoisePlot(Plot):
328 329 '''
329 330 Plot for noise
330 331 '''
331 332
332 333 CODE = 'noise'
333 334 plot_type = 'scatterbuffer'
334 335
335 336 def setup(self):
336 337 self.xaxis = 'time'
337 338 self.ncols = 1
338 339 self.nrows = 1
339 340 self.nplots = 1
340 341 self.ylabel = 'Intensity [dB]'
341 342 self.xlabel = 'Time'
342 343 self.titles = ['Noise']
343 344 self.colorbar = False
344 345 self.plots_adjust.update({'right': 0.85 })
345 346
346 347 def update(self, dataOut):
347 348
348 349 data = {}
349 350 meta = {}
350 351 data['noise'] = 10*numpy.log10(dataOut.getNoise()/dataOut.normFactor).reshape(dataOut.nChannels, 1)
351 352 meta['yrange'] = numpy.array([])
352 353
353 354 return data, meta
354 355
355 356 def plot(self):
356 357
357 358 x = self.data.times
358 359 xmin = self.data.min_time
359 360 xmax = xmin + self.xrange * 60 * 60
360 361 Y = self.data['noise']
361 362
362 363 if self.axes[0].firsttime:
363 364 self.ymin = numpy.nanmin(Y) - 5
364 365 self.ymax = numpy.nanmax(Y) + 5
365 366 for ch in self.data.channels:
366 367 y = Y[ch]
367 368 self.axes[0].plot(x, y, lw=1, label='Ch{}'.format(ch))
368 369 plt.legend(bbox_to_anchor=(1.18, 1.0))
369 370 else:
370 371 for ch in self.data.channels:
371 372 y = Y[ch]
372 373 self.axes[0].lines[ch].set_data(x, y)
373 374
374 375
375 376 class PowerProfilePlot(Plot):
376 377
377 378 CODE = 'pow_profile'
378 379 plot_type = 'scatter'
379 380
380 381 def setup(self):
381 382
382 383 self.ncols = 1
383 384 self.nrows = 1
384 385 self.nplots = 1
385 386 self.height = 4
386 387 self.width = 3
387 388 self.ylabel = 'Range [km]'
388 389 self.xlabel = 'Intensity [dB]'
389 390 self.titles = ['Power Profile']
390 391 self.colorbar = False
391 392
392 393 def update(self, dataOut):
393 394
394 395 data = {}
395 396 meta = {}
396 397 data[self.CODE] = dataOut.getPower()
397 398
398 399 return data, meta
399 400
400 401 def plot(self):
401 402
402 403 y = self.data.yrange
403 404 self.y = y
404 405
405 406 x = self.data[-1][self.CODE]
406 407
407 408 if self.xmin is None: self.xmin = numpy.nanmin(x)*0.9
408 409 if self.xmax is None: self.xmax = numpy.nanmax(x)*1.1
409 410
410 411 if self.axes[0].firsttime:
411 412 for ch in self.data.channels:
412 413 self.axes[0].plot(x[ch], y, lw=1, label='Ch{}'.format(ch))
413 414 plt.legend()
414 415 else:
415 416 for ch in self.data.channels:
416 417 self.axes[0].lines[ch].set_data(x[ch], y)
417 418
418 419
419 420 class SpectraCutPlot(Plot):
420 421
421 422 CODE = 'spc_cut'
422 423 plot_type = 'scatter'
423 424 buffering = False
424 425
425 426 def setup(self):
426 427
427 428 self.nplots = len(self.data.channels)
428 429 self.ncols = int(numpy.sqrt(self.nplots) + 0.9)
429 430 self.nrows = int((1.0 * self.nplots / self.ncols) + 0.9)
430 431 self.width = 3.4 * self.ncols + 1.5
431 432 self.height = 3 * self.nrows
432 433 self.ylabel = 'Power [dB]'
433 434 self.colorbar = False
434 435 self.plots_adjust.update({'left':0.1, 'hspace':0.3, 'right': 0.75, 'bottom':0.08})
435 436
436 437 def update(self, dataOut):
437 438
438 439 data = {}
439 440 meta = {}
440 441 spc = 10*numpy.log10(dataOut.data_spc/dataOut.normFactor)
441 442 data['spc'] = spc
442 443 meta['xrange'] = (dataOut.getFreqRange(1)/1000., dataOut.getAcfRange(1), dataOut.getVelRange(1))
443 444
444 445 return data, meta
445 446
446 447 def plot(self):
447 448 if self.xaxis == "frequency":
448 449 x = self.data.xrange[0][1:]
449 450 self.xlabel = "Frequency (kHz)"
450 451 elif self.xaxis == "time":
451 452 x = self.data.xrange[1]
452 453 self.xlabel = "Time (ms)"
453 454 else:
454 455 x = self.data.xrange[2]
455 456 self.xlabel = "Velocity (m/s)"
456 457
457 458 self.titles = []
458 459
459 460 y = self.data.yrange
460 461 z = self.data[-1]['spc']
461 462
462 463 if self.height_index:
463 464 index = numpy.array(self.height_index)
464 465 else:
465 466 index = numpy.arange(0, len(y), int((len(y))/9))
466 467
467 468 for n, ax in enumerate(self.axes):
468 469 if ax.firsttime:
469 470 self.xmax = self.xmax if self.xmax else numpy.nanmax(x)
470 471 self.xmin = self.xmin if self.xmin else -self.xmax
471 472 self.ymin = self.ymin if self.ymin else numpy.nanmin(z)
472 473 self.ymax = self.ymax if self.ymax else numpy.nanmax(z)
473 474 ax.plt = ax.plot(x, z[n, :, index].T)
474 475 labels = ['Range = {:2.1f}km'.format(y[i]) for i in index]
475 476 self.figures[0].legend(ax.plt, labels, loc='center right')
476 477 else:
477 478 for i, line in enumerate(ax.plt):
478 479 line.set_data(x, z[n, :, index[i]])
479 480 self.titles.append('CH {}'.format(n))
480 481
481 482
482 483 class BeaconPhase(Plot):
483 484
484 485 __isConfig = None
485 486 __nsubplots = None
486 487
487 488 PREFIX = 'beacon_phase'
488 489
489 490 def __init__(self):
490 491 Plot.__init__(self)
491 492 self.timerange = 24*60*60
492 493 self.isConfig = False
493 494 self.__nsubplots = 1
494 495 self.counter_imagwr = 0
495 496 self.WIDTH = 800
496 497 self.HEIGHT = 400
497 498 self.WIDTHPROF = 120
498 499 self.HEIGHTPROF = 0
499 500 self.xdata = None
500 501 self.ydata = None
501 502
502 503 self.PLOT_CODE = BEACON_CODE
503 504
504 505 self.FTP_WEI = None
505 506 self.EXP_CODE = None
506 507 self.SUB_EXP_CODE = None
507 508 self.PLOT_POS = None
508 509
509 510 self.filename_phase = None
510 511
511 512 self.figfile = None
512 513
513 514 self.xmin = None
514 515 self.xmax = None
515 516
516 517 def getSubplots(self):
517 518
518 519 ncol = 1
519 520 nrow = 1
520 521
521 522 return nrow, ncol
522 523
523 524 def setup(self, id, nplots, wintitle, showprofile=True, show=True):
524 525
525 526 self.__showprofile = showprofile
526 527 self.nplots = nplots
527 528
528 529 ncolspan = 7
529 530 colspan = 6
530 531 self.__nsubplots = 2
531 532
532 533 self.createFigure(id = id,
533 534 wintitle = wintitle,
534 535 widthplot = self.WIDTH+self.WIDTHPROF,
535 536 heightplot = self.HEIGHT+self.HEIGHTPROF,
536 537 show=show)
537 538
538 539 nrow, ncol = self.getSubplots()
539 540
540 541 self.addAxes(nrow, ncol*ncolspan, 0, 0, colspan, 1)
541 542
542 543 def save_phase(self, filename_phase):
543 544 f = open(filename_phase,'w+')
544 545 f.write('\n\n')
545 546 f.write('JICAMARCA RADIO OBSERVATORY - Beacon Phase \n')
546 547 f.write('DD MM YYYY HH MM SS pair(2,0) pair(2,1) pair(2,3) pair(2,4)\n\n' )
547 548 f.close()
548 549
549 550 def save_data(self, filename_phase, data, data_datetime):
550 551 f=open(filename_phase,'a')
551 552 timetuple_data = data_datetime.timetuple()
552 553 day = str(timetuple_data.tm_mday)
553 554 month = str(timetuple_data.tm_mon)
554 555 year = str(timetuple_data.tm_year)
555 556 hour = str(timetuple_data.tm_hour)
556 557 minute = str(timetuple_data.tm_min)
557 558 second = str(timetuple_data.tm_sec)
558 559 f.write(day+' '+month+' '+year+' '+hour+' '+minute+' '+second+' '+str(data[0])+' '+str(data[1])+' '+str(data[2])+' '+str(data[3])+'\n')
559 560 f.close()
560 561
561 562 def plot(self):
562 563 log.warning('TODO: Not yet implemented...')
563 564
564 565 def run(self, dataOut, id, wintitle="", pairsList=None, showprofile='True',
565 566 xmin=None, xmax=None, ymin=None, ymax=None, hmin=None, hmax=None,
566 567 timerange=None,
567 568 save=False, figpath='./', figfile=None, show=True, ftp=False, wr_period=1,
568 569 server=None, folder=None, username=None, password=None,
569 570 ftp_wei=0, exp_code=0, sub_exp_code=0, plot_pos=0):
570 571
571 572 if dataOut.flagNoData:
572 573 return dataOut
573 574
574 575 if not isTimeInHourRange(dataOut.datatime, xmin, xmax):
575 576 return
576 577
577 578 if pairsList == None:
578 579 pairsIndexList = dataOut.pairsIndexList[:10]
579 580 else:
580 581 pairsIndexList = []
581 582 for pair in pairsList:
582 583 if pair not in dataOut.pairsList:
583 584 raise ValueError("Pair %s is not in dataOut.pairsList" %(pair))
584 585 pairsIndexList.append(dataOut.pairsList.index(pair))
585 586
586 587 if pairsIndexList == []:
587 588 return
588 589
589 590 # if len(pairsIndexList) > 4:
590 591 # pairsIndexList = pairsIndexList[0:4]
591 592
592 593 hmin_index = None
593 594 hmax_index = None
594 595
595 596 if hmin != None and hmax != None:
596 597 indexes = numpy.arange(dataOut.nHeights)
597 598 hmin_list = indexes[dataOut.heightList >= hmin]
598 599 hmax_list = indexes[dataOut.heightList <= hmax]
599 600
600 601 if hmin_list.any():
601 602 hmin_index = hmin_list[0]
602 603
603 604 if hmax_list.any():
604 605 hmax_index = hmax_list[-1]+1
605 606
606 607 x = dataOut.getTimeRange()
607 608
608 609 thisDatetime = dataOut.datatime
609 610
610 611 title = wintitle + " Signal Phase" # : %s" %(thisDatetime.strftime("%d-%b-%Y"))
611 612 xlabel = "Local Time"
612 613 ylabel = "Phase (degrees)"
613 614
614 615 update_figfile = False
615 616
616 617 nplots = len(pairsIndexList)
617 618 #phase = numpy.zeros((len(pairsIndexList),len(dataOut.beacon_heiIndexList)))
618 619 phase_beacon = numpy.zeros(len(pairsIndexList))
619 620 for i in range(nplots):
620 621 pair = dataOut.pairsList[pairsIndexList[i]]
621 622 ccf = numpy.average(dataOut.data_cspc[pairsIndexList[i], :, hmin_index:hmax_index], axis=0)
622 623 powa = numpy.average(dataOut.data_spc[pair[0], :, hmin_index:hmax_index], axis=0)
623 624 powb = numpy.average(dataOut.data_spc[pair[1], :, hmin_index:hmax_index], axis=0)
624 625 avgcoherenceComplex = ccf/numpy.sqrt(powa*powb)
625 626 phase = numpy.arctan2(avgcoherenceComplex.imag, avgcoherenceComplex.real)*180/numpy.pi
626 627
627 628 if dataOut.beacon_heiIndexList:
628 629 phase_beacon[i] = numpy.average(phase[dataOut.beacon_heiIndexList])
629 630 else:
630 631 phase_beacon[i] = numpy.average(phase)
631 632
632 633 if not self.isConfig:
633 634
634 635 nplots = len(pairsIndexList)
635 636
636 637 self.setup(id=id,
637 638 nplots=nplots,
638 639 wintitle=wintitle,
639 640 showprofile=showprofile,
640 641 show=show)
641 642
642 643 if timerange != None:
643 644 self.timerange = timerange
644 645
645 646 self.xmin, self.xmax = self.getTimeLim(x, xmin, xmax, timerange)
646 647
647 648 if ymin == None: ymin = 0
648 649 if ymax == None: ymax = 360
649 650
650 651 self.FTP_WEI = ftp_wei
651 652 self.EXP_CODE = exp_code
652 653 self.SUB_EXP_CODE = sub_exp_code
653 654 self.PLOT_POS = plot_pos
654 655
655 656 self.name = thisDatetime.strftime("%Y%m%d_%H%M%S")
656 657 self.isConfig = True
657 658 self.figfile = figfile
658 659 self.xdata = numpy.array([])
659 660 self.ydata = numpy.array([])
660 661
661 662 update_figfile = True
662 663
663 664 #open file beacon phase
664 665 path = '%s%03d' %(self.PREFIX, self.id)
665 666 beacon_file = os.path.join(path,'%s.txt'%self.name)
666 667 self.filename_phase = os.path.join(figpath,beacon_file)
667 668 #self.save_phase(self.filename_phase)
668 669
669 670
670 671 #store data beacon phase
671 672 #self.save_data(self.filename_phase, phase_beacon, thisDatetime)
672 673
673 674 self.setWinTitle(title)
674 675
675 676
676 677 title = "Phase Plot %s" %(thisDatetime.strftime("%Y/%m/%d %H:%M:%S"))
677 678
678 679 legendlabels = ["Pair (%d,%d)"%(pair[0], pair[1]) for pair in dataOut.pairsList]
679 680
680 681 axes = self.axesList[0]
681 682
682 683 self.xdata = numpy.hstack((self.xdata, x[0:1]))
683 684
684 685 if len(self.ydata)==0:
685 686 self.ydata = phase_beacon.reshape(-1,1)
686 687 else:
687 688 self.ydata = numpy.hstack((self.ydata, phase_beacon.reshape(-1,1)))
688 689
689 690
690 691 axes.pmultilineyaxis(x=self.xdata, y=self.ydata,
691 692 xmin=self.xmin, xmax=self.xmax, ymin=ymin, ymax=ymax,
692 693 xlabel=xlabel, ylabel=ylabel, title=title, legendlabels=legendlabels, marker='x', markersize=8, linestyle="solid",
693 694 XAxisAsTime=True, grid='both'
694 695 )
695 696
696 697 self.draw()
697 698
698 699 if dataOut.ltctime >= self.xmax:
699 700 self.counter_imagwr = wr_period
700 701 self.isConfig = False
701 702 update_figfile = True
702 703
703 704 self.save(figpath=figpath,
704 705 figfile=figfile,
705 706 save=save,
706 707 ftp=ftp,
707 708 wr_period=wr_period,
708 709 thisDatetime=thisDatetime,
709 710 update_figfile=update_figfile)
710 711
711 712 return dataOut
1 NO CONTENT: modified file
@@ -1,663 +1,661
1 1 '''
2 2 Created on Set 9, 2015
3 3
4 4 @author: roj-idl71 Karim Kuyeng
5 5
6 6 @update: 2021, Joab Apaza
7 7 '''
8 8
9 9 import os
10 10 import sys
11 11 import glob
12 12 import fnmatch
13 13 import datetime
14 14 import time
15 15 import re
16 16 import h5py
17 17 import numpy
18 18
19 19 try:
20 20 from gevent import sleep
21 21 except:
22 22 from time import sleep
23 23
24 24 from schainpy.model.data.jroheaderIO import RadarControllerHeader, SystemHeader
25 25 from schainpy.model.data.jrodata import Voltage
26 26 from schainpy.model.proc.jroproc_base import ProcessingUnit, Operation, MPDecorator
27 27 from numpy import imag
28 28
29 29
30 30 class AMISRReader(ProcessingUnit):
31 31 '''
32 32 classdocs
33 33 '''
34 34
35 35 def __init__(self):
36 36 '''
37 37 Constructor
38 38 '''
39 39
40 40 ProcessingUnit.__init__(self)
41 41
42 42 self.set = None
43 43 self.subset = None
44 44 self.extension_file = '.h5'
45 45 self.dtc_str = 'dtc'
46 46 self.dtc_id = 0
47 47 self.status = True
48 48 self.isConfig = False
49 49 self.dirnameList = []
50 50 self.filenameList = []
51 51 self.fileIndex = None
52 52 self.flagNoMoreFiles = False
53 53 self.flagIsNewFile = 0
54 54 self.filename = ''
55 55 self.amisrFilePointer = None
56 56 self.realBeamCode = []
57 57 self.beamCodeMap = None
58 58 self.azimuthList = []
59 59 self.elevationList = []
60 60 self.dataShape = None
61 61
62 62
63 63
64 64 self.profileIndex = 0
65 65
66 66
67 67 self.beamCodeByFrame = None
68 68 self.radacTimeByFrame = None
69 69
70 70 self.dataset = None
71 71
72 72 self.__firstFile = True
73 73
74 74 self.buffer = None
75 75
76 76 self.timezone = 'ut'
77 77
78 78 self.__waitForNewFile = 20
79 79 self.__filename_online = None
80 80 #Is really necessary create the output object in the initializer
81 81 self.dataOut = Voltage()
82 82 self.dataOut.error=False
83 83
84 84
85 85 def setup(self,path=None,
86 86 startDate=None,
87 87 endDate=None,
88 88 startTime=None,
89 89 endTime=None,
90 90 walk=True,
91 91 timezone='ut',
92 92 all=0,
93 93 code = None,
94 94 nCode = 0,
95 95 nBaud = 0,
96 96 online=False):
97 97
98 98
99 99
100 100 self.timezone = timezone
101 101 self.all = all
102 102 self.online = online
103 103
104 104 self.code = code
105 105 self.nCode = int(nCode)
106 106 self.nBaud = int(nBaud)
107 107
108 108
109 109
110 110 #self.findFiles()
111 111 if not(online):
112 112 #Busqueda de archivos offline
113 113 self.searchFilesOffLine(path, startDate, endDate, startTime, endTime, walk)
114 114 else:
115 115 self.searchFilesOnLine(path, startDate, endDate, startTime,endTime,walk)
116 116
117 117 if not(self.filenameList):
118 118 print("There is no files into the folder: %s"%(path))
119 119 sys.exit()
120 120
121 121 self.fileIndex = 0
122 122
123 123 self.readNextFile(online)
124 124
125 125 '''
126 126 Add code
127 127 '''
128 128 self.isConfig = True
129 129 # print("Setup Done")
130 130 pass
131 131
132 132
133 133 def readAMISRHeader(self,fp):
134 134
135 135 if self.isConfig and (not self.flagNoMoreFiles):
136 136 newShape = fp.get('Raw11/Data/Samples/Data').shape[1:]
137 137 if self.dataShape != newShape and newShape != None:
138 138 print("\nNEW FILE HAS A DIFFERENT SHAPE")
139 139 print(self.dataShape,newShape,"\n")
140 140 return 0
141 141 else:
142 142 self.dataShape = fp.get('Raw11/Data/Samples/Data').shape[1:]
143 143
144 144
145 145 header = 'Raw11/Data/RadacHeader'
146 146 self.beamCodeByPulse = fp.get(header+'/BeamCode') # LIST OF BEAMS PER PROFILE, TO BE USED ON REARRANGE
147 147 if (self.startDate> datetime.date(2021, 7, 15)): #Se cambió la forma de extracción de Apuntes el 17
148 148 self.beamcodeFile = fp['Setup/Beamcodefile'][()].decode()
149 149 self.trueBeams = self.beamcodeFile.split("\n")
150 150 self.trueBeams.pop()#remove last
151 151 [self.realBeamCode.append(x) for x in self.trueBeams if x not in self.realBeamCode]
152 152 self.beamCode = [int(x, 16) for x in self.realBeamCode]
153 153 else:
154 154 _beamCode= fp.get('Raw11/Data/Beamcodes') #se usa la manera previa al cambio de apuntes
155 155 self.beamCode = _beamCode[0,:]
156 156
157 157 if self.beamCodeMap == None:
158 158 self.beamCodeMap = fp['Setup/BeamcodeMap']
159 159 for beam in self.beamCode:
160 160 beamAziElev = numpy.where(self.beamCodeMap[:,0]==beam)
161 161 beamAziElev = beamAziElev[0].squeeze()
162 162 self.azimuthList.append(self.beamCodeMap[beamAziElev,1])
163 163 self.elevationList.append(self.beamCodeMap[beamAziElev,2])
164 164 #print("Beamssss: ",self.beamCodeMap[beamAziElev,1],self.beamCodeMap[beamAziElev,2])
165 165 #print(self.beamCode)
166 166 #self.code = fp.get(header+'/Code') # NOT USE FOR THIS
167 167 self.frameCount = fp.get(header+'/FrameCount')# NOT USE FOR THIS
168 168 self.modeGroup = fp.get(header+'/ModeGroup')# NOT USE FOR THIS
169 169 self.nsamplesPulse = fp.get(header+'/NSamplesPulse')# TO GET NSA OR USING DATA FOR THAT
170 170 self.pulseCount = fp.get(header+'/PulseCount')# NOT USE FOR THIS
171 171 self.radacTime = fp.get(header+'/RadacTime')# 1st TIME ON FILE ANDE CALCULATE THE REST WITH IPP*nindexprofile
172 172 self.timeCount = fp.get(header+'/TimeCount')# NOT USE FOR THIS
173 173 self.timeStatus = fp.get(header+'/TimeStatus')# NOT USE FOR THIS
174 174 self.rangeFromFile = fp.get('Raw11/Data/Samples/Range')
175 175 self.frequency = fp.get('Rx/Frequency')
176 176 txAus = fp.get('Raw11/Data/Pulsewidth')
177 177
178 178
179 179 self.nblocks = self.pulseCount.shape[0] #nblocks
180 180
181 181 self.nprofiles = self.pulseCount.shape[1] #nprofile
182 182 self.nsa = self.nsamplesPulse[0,0] #ngates
183 183 self.nchannels = len(self.beamCode)
184 184 self.ippSeconds = (self.radacTime[0][1] -self.radacTime[0][0]) #Ipp in seconds
185 185 #self.__waitForNewFile = self.nblocks # wait depending on the number of blocks since each block is 1 sec
186 186 self.__waitForNewFile = self.nblocks * self.nprofiles * self.ippSeconds # wait until new file is created
187 187
188 188 #filling radar controller header parameters
189 189 self.__ippKm = self.ippSeconds *.15*1e6 # in km
190 190 self.__txA = (txAus.value)*.15 #(ipp[us]*.15km/1us) in km
191 191 self.__txB = 0
192 192 nWindows=1
193 193 self.__nSamples = self.nsa
194 194 self.__firstHeight = self.rangeFromFile[0][0]/1000 #in km
195 195 self.__deltaHeight = (self.rangeFromFile[0][1] - self.rangeFromFile[0][0])/1000
196 196
197 197 #for now until understand why the code saved is different (code included even though code not in tuf file)
198 198 #self.__codeType = 0
199 199 # self.__nCode = None
200 200 # self.__nBaud = None
201 201 self.__code = self.code
202 202 self.__codeType = 0
203 203 if self.code != None:
204 204 self.__codeType = 1
205 205 self.__nCode = self.nCode
206 206 self.__nBaud = self.nBaud
207 207 #self.__code = 0
208 208
209 209 #filling system header parameters
210 210 self.__nSamples = self.nsa
211 211 self.newProfiles = self.nprofiles/self.nchannels
212 212 self.__channelList = list(range(self.nchannels))
213 213
214 214 self.__frequency = self.frequency[0][0]
215 215
216 216
217 217 return 1
218 218
219 219
220 220 def createBuffers(self):
221 221
222 222 pass
223 223
224 224 def __setParameters(self,path='', startDate='',endDate='',startTime='', endTime='', walk=''):
225 225 self.path = path
226 226 self.startDate = startDate
227 227 self.endDate = endDate
228 228 self.startTime = startTime
229 229 self.endTime = endTime
230 230 self.walk = walk
231 231
232 232 def __checkPath(self):
233 233 if os.path.exists(self.path):
234 234 self.status = 1
235 235 else:
236 236 self.status = 0
237 237 print('Path:%s does not exists'%self.path)
238 238
239 239 return
240 240
241 241
242 242 def __selDates(self, amisr_dirname_format):
243 243 try:
244 244 year = int(amisr_dirname_format[0:4])
245 245 month = int(amisr_dirname_format[4:6])
246 246 dom = int(amisr_dirname_format[6:8])
247 247 thisDate = datetime.date(year,month,dom)
248 248
249 249 if (thisDate>=self.startDate and thisDate <= self.endDate):
250 250 return amisr_dirname_format
251 251 except:
252 252 return None
253 253
254 254
255 255 def __findDataForDates(self,online=False):
256 256
257 257 if not(self.status):
258 258 return None
259 259
260 260 pat = '\d+.\d+'
261 261 dirnameList = [re.search(pat,x) for x in os.listdir(self.path)]
262 262 dirnameList = [x for x in dirnameList if x!=None]
263 263 dirnameList = [x.string for x in dirnameList]
264 264 if not(online):
265 265 dirnameList = [self.__selDates(x) for x in dirnameList]
266 266 dirnameList = [x for x in dirnameList if x!=None]
267 267 if len(dirnameList)>0:
268 268 self.status = 1
269 269 self.dirnameList = dirnameList
270 270 self.dirnameList.sort()
271 271 else:
272 272 self.status = 0
273 273 return None
274 274
275 275 def __getTimeFromData(self):
276 276 startDateTime_Reader = datetime.datetime.combine(self.startDate,self.startTime)
277 277 endDateTime_Reader = datetime.datetime.combine(self.endDate,self.endTime)
278 278
279 279 print('Filtering Files from %s to %s'%(startDateTime_Reader, endDateTime_Reader))
280 280 print('........................................')
281 281 filter_filenameList = []
282 282 self.filenameList.sort()
283 283 #for i in range(len(self.filenameList)-1):
284 284 for i in range(len(self.filenameList)):
285 285 filename = self.filenameList[i]
286 286 fp = h5py.File(filename,'r')
287 287 time_str = fp.get('Time/RadacTimeString')
288 288
289 289 startDateTimeStr_File = time_str[0][0].decode('UTF-8').split('.')[0]
290 290 #startDateTimeStr_File = "2019-12-16 09:21:11"
291 291 junk = time.strptime(startDateTimeStr_File, '%Y-%m-%d %H:%M:%S')
292 292 startDateTime_File = datetime.datetime(junk.tm_year,junk.tm_mon,junk.tm_mday,junk.tm_hour, junk.tm_min, junk.tm_sec)
293 293
294 294 #endDateTimeStr_File = "2019-12-16 11:10:11"
295 295 endDateTimeStr_File = time_str[-1][-1].decode('UTF-8').split('.')[0]
296 296 junk = time.strptime(endDateTimeStr_File, '%Y-%m-%d %H:%M:%S')
297 297 endDateTime_File = datetime.datetime(junk.tm_year,junk.tm_mon,junk.tm_mday,junk.tm_hour, junk.tm_min, junk.tm_sec)
298 298
299 299 fp.close()
300 300
301 301 #print("check time", startDateTime_File)
302 302 if self.timezone == 'lt':
303 303 startDateTime_File = startDateTime_File - datetime.timedelta(minutes = 300)
304 304 endDateTime_File = endDateTime_File - datetime.timedelta(minutes = 300)
305 305 if (endDateTime_File>=startDateTime_Reader and endDateTime_File<=endDateTime_Reader):
306 306 filter_filenameList.append(filename)
307 307
308 308 if (endDateTime_File>endDateTime_Reader):
309 309 break
310 310
311 311
312 312 filter_filenameList.sort()
313 313 self.filenameList = filter_filenameList
314 314 return 1
315 315
316 316 def __filterByGlob1(self, dirName):
317 317 filter_files = glob.glob1(dirName, '*.*%s'%self.extension_file)
318 318 filter_files.sort()
319 319 filterDict = {}
320 320 filterDict.setdefault(dirName)
321 321 filterDict[dirName] = filter_files
322 322 return filterDict
323 323
324 324 def __getFilenameList(self, fileListInKeys, dirList):
325 325 for value in fileListInKeys:
326 326 dirName = list(value.keys())[0]
327 327 for file in value[dirName]:
328 328 filename = os.path.join(dirName, file)
329 329 self.filenameList.append(filename)
330 330
331 331
332 332 def __selectDataForTimes(self, online=False):
333 333 #aun no esta implementado el filtro for tiempo
334 334 if not(self.status):
335 335 return None
336 336
337 337 dirList = [os.path.join(self.path,x) for x in self.dirnameList]
338 338
339 339 fileListInKeys = [self.__filterByGlob1(x) for x in dirList]
340 340
341 341 self.__getFilenameList(fileListInKeys, dirList)
342 342 if not(online):
343 343 #filtro por tiempo
344 344 if not(self.all):
345 345 self.__getTimeFromData()
346 346
347 347 if len(self.filenameList)>0:
348 348 self.status = 1
349 349 self.filenameList.sort()
350 350 else:
351 351 self.status = 0
352 352 return None
353 353
354 354 else:
355 355 #get the last file - 1
356 356 self.filenameList = [self.filenameList[-2]]
357 357 new_dirnameList = []
358 358 for dirname in self.dirnameList:
359 359 junk = numpy.array([dirname in x for x in self.filenameList])
360 360 junk_sum = junk.sum()
361 361 if junk_sum > 0:
362 362 new_dirnameList.append(dirname)
363 363 self.dirnameList = new_dirnameList
364 364 return 1
365 365
366 366 def searchFilesOnLine(self, path, startDate, endDate, startTime=datetime.time(0,0,0),
367 367 endTime=datetime.time(23,59,59),walk=True):
368 368
369 369 if endDate ==None:
370 370 startDate = datetime.datetime.utcnow().date()
371 371 endDate = datetime.datetime.utcnow().date()
372 372
373 373 self.__setParameters(path=path, startDate=startDate, endDate=endDate,startTime = startTime,endTime=endTime, walk=walk)
374 374
375 375 self.__checkPath()
376 376
377 377 self.__findDataForDates(online=True)
378 378
379 379 self.dirnameList = [self.dirnameList[-1]]
380 380
381 381 self.__selectDataForTimes(online=True)
382 382
383 383 return
384 384
385 385
386 386 def searchFilesOffLine(self,
387 387 path,
388 388 startDate,
389 389 endDate,
390 390 startTime=datetime.time(0,0,0),
391 391 endTime=datetime.time(23,59,59),
392 392 walk=True):
393 393
394 394 self.__setParameters(path, startDate, endDate, startTime, endTime, walk)
395 395
396 396 self.__checkPath()
397 397
398 398 self.__findDataForDates()
399 399
400 400 self.__selectDataForTimes()
401 401
402 402 for i in range(len(self.filenameList)):
403 403 print("%s" %(self.filenameList[i]))
404 404
405 405 return
406 406
407 407 def __setNextFileOffline(self):
408 408
409 409 try:
410 410 self.filename = self.filenameList[self.fileIndex]
411 411 self.amisrFilePointer = h5py.File(self.filename,'r')
412 412 self.fileIndex += 1
413 413 except:
414 414 self.flagNoMoreFiles = 1
415 415 print("No more Files")
416 416 return 0
417 417
418 418 self.flagIsNewFile = 1
419 419 print("Setting the file: %s"%self.filename)
420 420
421 421 return 1
422 422
423 423
424 424 def __setNextFileOnline(self):
425 425 filename = self.filenameList[0]
426 426 if self.__filename_online != None:
427 427 self.__selectDataForTimes(online=True)
428 428 filename = self.filenameList[0]
429 429 wait = 0
430 430 self.__waitForNewFile=300 ## DEBUG:
431 431 while self.__filename_online == filename:
432 432 print('waiting %d seconds to get a new file...'%(self.__waitForNewFile))
433 433 if wait == 5:
434 434 self.flagNoMoreFiles = 1
435 435 return 0
436 436 sleep(self.__waitForNewFile)
437 437 self.__selectDataForTimes(online=True)
438 438 filename = self.filenameList[0]
439 439 wait += 1
440 440
441 441 self.__filename_online = filename
442 442
443 443 self.amisrFilePointer = h5py.File(filename,'r')
444 444 self.flagIsNewFile = 1
445 445 self.filename = filename
446 446 print("Setting the file: %s"%self.filename)
447 447 return 1
448 448
449 449
450 450 def readData(self):
451 451 buffer = self.amisrFilePointer.get('Raw11/Data/Samples/Data')
452 452 re = buffer[:,:,:,0]
453 453 im = buffer[:,:,:,1]
454 454 dataset = re + im*1j
455 455
456 456 self.radacTime = self.amisrFilePointer.get('Raw11/Data/RadacHeader/RadacTime')
457 457 timeset = self.radacTime[:,0]
458 458
459 459 return dataset,timeset
460 460
461 461 def reshapeData(self):
462 462 #self.beamCodeByPulse, self.beamCode, self.nblocks, self.nprofiles, self.nsa,
463 463 channels = self.beamCodeByPulse[0,:]
464 464 nchan = self.nchannels
465 465 #self.newProfiles = self.nprofiles/nchan #must be defined on filljroheader
466 466 nblocks = self.nblocks
467 467 nsamples = self.nsa
468 468
469 469 #Dimensions : nChannels, nProfiles, nSamples
470 470 new_block = numpy.empty((nblocks, nchan, numpy.int_(self.newProfiles), nsamples), dtype="complex64")
471 471 ############################################
472 472
473 473 for thisChannel in range(nchan):
474 474 new_block[:,thisChannel,:,:] = self.dataset[:,numpy.where(channels==self.beamCode[thisChannel])[0],:]
475 475
476 476
477 477 new_block = numpy.transpose(new_block, (1,0,2,3))
478 478 new_block = numpy.reshape(new_block, (nchan,-1, nsamples))
479 479
480 480 return new_block
481 481
482 482 def updateIndexes(self):
483 483
484 484 pass
485 485
486 486 def fillJROHeader(self):
487 487
488 488 #fill radar controller header
489 489 self.dataOut.radarControllerHeaderObj = RadarControllerHeader(ipp=self.__ippKm,
490 490 txA=self.__txA,
491 491 txB=0,
492 492 nWindows=1,
493 493 nHeights=self.__nSamples,
494 494 firstHeight=self.__firstHeight,
495 495 deltaHeight=self.__deltaHeight,
496 496 codeType=self.__codeType,
497 497 nCode=self.__nCode, nBaud=self.__nBaud,
498 498 code = self.__code,
499 499 fClock=1)
500 500
501 501 #fill system header
502 502 self.dataOut.systemHeaderObj = SystemHeader(nSamples=self.__nSamples,
503 503 nProfiles=self.newProfiles,
504 504 nChannels=len(self.__channelList),
505 505 adcResolution=14,
506 506 pciDioBusWidth=32)
507 507
508 508 self.dataOut.type = "Voltage"
509 509 self.dataOut.data = None
510 510 self.dataOut.dtype = numpy.dtype([('real','<i8'),('imag','<i8')])
511 511 # self.dataOut.nChannels = 0
512 512
513 513 # self.dataOut.nHeights = 0
514 514
515 515 self.dataOut.nProfiles = self.newProfiles*self.nblocks
516 516 #self.dataOut.heightList = self.__firstHeigth + numpy.arange(self.__nSamples, dtype = numpy.float)*self.__deltaHeigth
517 517 ranges = numpy.reshape(self.rangeFromFile.value,(-1))
518 518 self.dataOut.heightList = ranges/1000.0 #km
519 519 self.dataOut.channelList = self.__channelList
520 520 self.dataOut.blocksize = self.dataOut.nChannels * self.dataOut.nHeights
521 521
522 522 # self.dataOut.channelIndexList = None
523 523
524 524
525 525 self.dataOut.azimuthList = numpy.array(self.azimuthList)
526 526 self.dataOut.elevationList = numpy.array(self.elevationList)
527 527 self.dataOut.codeList = numpy.array(self.beamCode)
528 528 #print(self.dataOut.elevationList)
529 529 self.dataOut.flagNoData = True
530 530
531 531 #Set to TRUE if the data is discontinuous
532 532 self.dataOut.flagDiscontinuousBlock = False
533 533
534 534 self.dataOut.utctime = None
535 535
536 536 #self.dataOut.timeZone = -5 #self.__timezone/60 #timezone like jroheader, difference in minutes between UTC and localtime
537 537 if self.timezone == 'lt':
538 538 self.dataOut.timeZone = time.timezone / 60. #get the timezone in minutes
539 539 else:
540 540 self.dataOut.timeZone = 0 #by default time is UTC
541 541
542 542 self.dataOut.dstFlag = 0
543 543 self.dataOut.errorCount = 0
544 544 self.dataOut.nCohInt = 1
545 545 self.dataOut.flagDecodeData = False #asumo que la data esta decodificada
546 546 self.dataOut.flagDeflipData = False #asumo que la data esta sin flip
547 547 self.dataOut.flagShiftFFT = False
548 548 self.dataOut.ippSeconds = self.ippSeconds
549 549
550 550 #Time interval between profiles
551 551 #self.dataOut.timeInterval = self.dataOut.ippSeconds * self.dataOut.nCohInt
552 552
553 553 self.dataOut.frequency = self.__frequency
554 554 self.dataOut.realtime = self.online
555 555 pass
556 556
557 557 def readNextFile(self,online=False):
558 558
559 559 if not(online):
560 560 newFile = self.__setNextFileOffline()
561 561 else:
562 562 newFile = self.__setNextFileOnline()
563 563
564 564 if not(newFile):
565 565 self.dataOut.error = True
566 566 return 0
567 567
568 568 if not self.readAMISRHeader(self.amisrFilePointer):
569 569 self.dataOut.error = True
570 570 return 0
571 571
572 572 self.createBuffers()
573 573 self.fillJROHeader()
574 574
575 575 #self.__firstFile = False
576 576
577 577
578 578
579 579 self.dataset,self.timeset = self.readData()
580 580
581 581 if self.endDate!=None:
582 582 endDateTime_Reader = datetime.datetime.combine(self.endDate,self.endTime)
583 583 time_str = self.amisrFilePointer.get('Time/RadacTimeString')
584 584 startDateTimeStr_File = time_str[0][0].decode('UTF-8').split('.')[0]
585 585 junk = time.strptime(startDateTimeStr_File, '%Y-%m-%d %H:%M:%S')
586 586 startDateTime_File = datetime.datetime(junk.tm_year,junk.tm_mon,junk.tm_mday,junk.tm_hour, junk.tm_min, junk.tm_sec)
587 587 if self.timezone == 'lt':
588 588 startDateTime_File = startDateTime_File - datetime.timedelta(minutes = 300)
589 589 if (startDateTime_File>endDateTime_Reader):
590 590 return 0
591 591
592 592 self.jrodataset = self.reshapeData()
593 593 #----self.updateIndexes()
594 594 self.profileIndex = 0
595 595
596 596 return 1
597 597
598 598
599 599 def __hasNotDataInBuffer(self):
600 600 if self.profileIndex >= (self.newProfiles*self.nblocks):
601 601 return 1
602 602 return 0
603 603
604 604
605 605 def getData(self):
606 606
607 607 if self.flagNoMoreFiles:
608 608 self.dataOut.flagNoData = True
609 609 return 0
610 610
611 611 if self.__hasNotDataInBuffer():
612 612 if not (self.readNextFile(self.online)):
613 613 return 0
614 614
615 615
616 616 if self.dataset is None: # setear esta condicion cuando no hayan datos por leer
617 617 self.dataOut.flagNoData = True
618 618 return 0
619 619
620 620 #self.dataOut.data = numpy.reshape(self.jrodataset[self.profileIndex,:],(1,-1))
621 621
622 622 self.dataOut.data = self.jrodataset[:,self.profileIndex,:]
623 623
624 624 #print("R_t",self.timeset)
625 625
626 626 #self.dataOut.utctime = self.jrotimeset[self.profileIndex]
627 627 #verificar basic header de jro data y ver si es compatible con este valor
628 628 #self.dataOut.utctime = self.timeset + (self.profileIndex * self.ippSeconds * self.nchannels)
629 629 indexprof = numpy.mod(self.profileIndex, self.newProfiles)
630 630 indexblock = self.profileIndex/self.newProfiles
631 631 #print (indexblock, indexprof)
632 632 diffUTC = 1.8e4 #UTC diference from peru in seconds --Joab
633 633 diffUTC = 0
634 634 t_comp = (indexprof * self.ippSeconds * self.nchannels) + diffUTC #
635 635
636 636 #print("utc :",indexblock," __ ",t_comp)
637 637 #print(numpy.shape(self.timeset))
638 638 self.dataOut.utctime = self.timeset[numpy.int_(indexblock)] + t_comp
639 639 #self.dataOut.utctime = self.timeset[self.profileIndex] + t_comp
640 640 #print(self.dataOut.utctime)
641 641 self.dataOut.profileIndex = self.profileIndex
642 642 #print("N profile:",self.profileIndex,self.newProfiles,self.nblocks,self.dataOut.utctime)
643 643 self.dataOut.flagNoData = False
644 644 # if indexprof == 0:
645 645 # print self.dataOut.utctime
646 646
647 647 self.profileIndex += 1
648 648
649 #return self.dataOut.data
649 return self.dataOut.data
650 650
651 651
652 652 def run(self, **kwargs):
653 653 '''
654 654 This method will be called many times so here you should put all your code
655 655 '''
656 656 #print("running kamisr")
657 657 if not self.isConfig:
658 658 self.setup(**kwargs)
659 659 self.isConfig = True
660 660
661 661 self.getData()
662 #return(self.dataOut.data)
663 return(self.dataOut)
@@ -1,626 +1,651
1 1 import os
2 2 import time
3 3 import datetime
4 4
5 5 import numpy
6 6 import h5py
7 7
8 8 import schainpy.admin
9 9 from schainpy.model.data.jrodata import *
10 10 from schainpy.model.proc.jroproc_base import ProcessingUnit, Operation, MPDecorator
11 11 from schainpy.model.io.jroIO_base import *
12 12 from schainpy.utils import log
13 13
14 14
15 15 class HDFReader(Reader, ProcessingUnit):
16 16 """Processing unit to read HDF5 format files
17 17
18 18 This unit reads HDF5 files created with `HDFWriter` operation contains
19 19 by default two groups Data and Metadata all variables would be saved as `dataOut`
20 20 attributes.
21 21 It is possible to read any HDF5 file by given the structure in the `description`
22 22 parameter, also you can add extra values to metadata with the parameter `extras`.
23 23
24 24 Parameters:
25 25 -----------
26 26 path : str
27 27 Path where files are located.
28 28 startDate : date
29 29 Start date of the files
30 30 endDate : list
31 31 End date of the files
32 32 startTime : time
33 33 Start time of the files
34 34 endTime : time
35 35 End time of the files
36 36 description : dict, optional
37 37 Dictionary with the description of the HDF5 file
38 38 extras : dict, optional
39 39 Dictionary with extra metadata to be be added to `dataOut`
40 40
41 41 Examples
42 42 --------
43 43
44 44 desc = {
45 45 'Data': {
46 46 'data_output': ['u', 'v', 'w'],
47 47 'utctime': 'timestamps',
48 48 } ,
49 49 'Metadata': {
50 50 'heightList': 'heights'
51 51 }
52 52 }
53 53
54 54 desc = {
55 55 'Data': {
56 56 'data_output': 'winds',
57 57 'utctime': 'timestamps'
58 58 },
59 59 'Metadata': {
60 60 'heightList': 'heights'
61 61 }
62 62 }
63 63
64 64 extras = {
65 65 'timeZone': 300
66 66 }
67 67
68 68 reader = project.addReadUnit(
69 69 name='HDFReader',
70 70 path='/path/to/files',
71 71 startDate='2019/01/01',
72 72 endDate='2019/01/31',
73 73 startTime='00:00:00',
74 74 endTime='23:59:59',
75 75 # description=json.dumps(desc),
76 76 # extras=json.dumps(extras),
77 77 )
78 78
79 79 """
80 80
81 81 __attrs__ = ['path', 'startDate', 'endDate', 'startTime', 'endTime', 'description', 'extras']
82 82
83 83 def __init__(self):
84 84 ProcessingUnit.__init__(self)
85 85 self.dataOut = Parameters()
86 86 self.ext = ".hdf5"
87 87 self.optchar = "D"
88 88 self.meta = {}
89 89 self.data = {}
90 90 self.open_file = h5py.File
91 91 self.open_mode = 'r'
92 92 self.description = {}
93 93 self.extras = {}
94 94 self.filefmt = "*%Y%j***"
95 95 self.folderfmt = "*%Y%j"
96 96 self.utcoffset = 0
97 97
98 98 def setup(self, **kwargs):
99 99
100 100 self.set_kwargs(**kwargs)
101 101 if not self.ext.startswith('.'):
102 102 self.ext = '.{}'.format(self.ext)
103 103
104 104 if self.online:
105 105 log.log("Searching files in online mode...", self.name)
106 106
107 107 for nTries in range(self.nTries):
108 108 fullpath = self.searchFilesOnLine(self.path, self.startDate,
109 109 self.endDate, self.expLabel, self.ext, self.walk,
110 110 self.filefmt, self.folderfmt)
111 pathname, filename = os.path.split(fullpath)
112 print(pathname,filename)
111 113 try:
112 114 fullpath = next(fullpath)
115
113 116 except:
114 117 fullpath = None
115 118
116 119 if fullpath:
117 120 break
118 121
119 122 log.warning(
120 123 'Waiting {} sec for a valid file in {}: try {} ...'.format(
121 124 self.delay, self.path, nTries + 1),
122 125 self.name)
123 126 time.sleep(self.delay)
124 127
125 128 if not(fullpath):
126 129 raise schainpy.admin.SchainError(
127 130 'There isn\'t any valid file in {}'.format(self.path))
128 131
129 132 pathname, filename = os.path.split(fullpath)
130 133 self.year = int(filename[1:5])
131 134 self.doy = int(filename[5:8])
132 135 self.set = int(filename[8:11]) - 1
133 136 else:
134 137 log.log("Searching files in {}".format(self.path), self.name)
135 138 self.filenameList = self.searchFilesOffLine(self.path, self.startDate,
136 139 self.endDate, self.expLabel, self.ext, self.walk, self.filefmt, self.folderfmt)
137 140
138 141 self.setNextFile()
139 142
140 143 return
141 144
145
142 146 def readFirstHeader(self):
143 147 '''Read metadata and data'''
144 148
145 149 self.__readMetadata()
146 150 self.__readData()
147 151 self.__setBlockList()
148 152
149 153 if 'type' in self.meta:
150 154 self.dataOut = eval(self.meta['type'])()
151 155
152 156 for attr in self.meta:
157 print("attr: ", attr)
153 158 setattr(self.dataOut, attr, self.meta[attr])
154 159
160
155 161 self.blockIndex = 0
156 162
157 163 return
158 164
159 165 def __setBlockList(self):
160 166 '''
161 167 Selects the data within the times defined
162 168
163 169 self.fp
164 170 self.startTime
165 171 self.endTime
166 172 self.blockList
167 173 self.blocksPerFile
168 174
169 175 '''
170 176
171 177 startTime = self.startTime
172 178 endTime = self.endTime
173 179 thisUtcTime = self.data['utctime'] + self.utcoffset
174 180 self.interval = numpy.min(thisUtcTime[1:] - thisUtcTime[:-1])
175 181 thisDatetime = datetime.datetime.utcfromtimestamp(thisUtcTime[0])
176
182 self.startFileDatetime = thisDatetime
183 print("datee ",self.startFileDatetime)
177 184 thisDate = thisDatetime.date()
178 185 thisTime = thisDatetime.time()
179 186
180 187 startUtcTime = (datetime.datetime.combine(thisDate, startTime) - datetime.datetime(1970, 1, 1)).total_seconds()
181 188 endUtcTime = (datetime.datetime.combine(thisDate, endTime) - datetime.datetime(1970, 1, 1)).total_seconds()
182 189
183 190 ind = numpy.where(numpy.logical_and(thisUtcTime >= startUtcTime, thisUtcTime < endUtcTime))[0]
184 191
185 192 self.blockList = ind
186 193 self.blocksPerFile = len(ind)
194 self.blocksPerFile = len(thisUtcTime)
187 195 return
188 196
189 197 def __readMetadata(self):
190 198 '''
191 199 Reads Metadata
192 200 '''
193 201
194 202 meta = {}
195 203
196 204 if self.description:
197 205 for key, value in self.description['Metadata'].items():
198 206 meta[key] = self.fp[value][()]
199 207 else:
200 208 grp = self.fp['Metadata']
201 209 for name in grp:
202 210 meta[name] = grp[name][()]
203 211
204 212 if self.extras:
205 213 for key, value in self.extras.items():
206 214 meta[key] = value
207 215 self.meta = meta
208 216
209 217 return
210 218
219
220
221 def checkForRealPath(self, nextFile, nextDay):
222
223 # print("check FRP")
224 # dt = self.startFileDatetime + datetime.timedelta(1)
225 # filename = '{}.{}{}'.format(self.path, dt.strftime('%Y%m%d'), self.ext)
226 # fullfilename = os.path.join(self.path, filename)
227 # print("check Path ",fullfilename,filename)
228 # if os.path.exists(fullfilename):
229 # return fullfilename, filename
230 # return None, filename
231 return None,None
232
211 233 def __readData(self):
212 234
213 235 data = {}
214 236
215 237 if self.description:
216 238 for key, value in self.description['Data'].items():
217 239 if isinstance(value, str):
218 240 if isinstance(self.fp[value], h5py.Dataset):
219 241 data[key] = self.fp[value][()]
220 242 elif isinstance(self.fp[value], h5py.Group):
221 243 array = []
222 244 for ch in self.fp[value]:
223 245 array.append(self.fp[value][ch][()])
224 246 data[key] = numpy.array(array)
225 247 elif isinstance(value, list):
226 248 array = []
227 249 for ch in value:
228 250 array.append(self.fp[ch][()])
229 251 data[key] = numpy.array(array)
230 252 else:
231 253 grp = self.fp['Data']
232 254 for name in grp:
233 255 if isinstance(grp[name], h5py.Dataset):
234 256 array = grp[name][()]
235 257 elif isinstance(grp[name], h5py.Group):
236 258 array = []
237 259 for ch in grp[name]:
238 260 array.append(grp[name][ch][()])
239 261 array = numpy.array(array)
240 262 else:
241 263 log.warning('Unknown type: {}'.format(name))
242 264
243 265 if name in self.description:
244 266 key = self.description[name]
245 267 else:
246 268 key = name
247 269 data[key] = array
248 270
249 271 self.data = data
250 272 return
251 273
252 274 def getData(self):
253
275 if not self.isDateTimeInRange(self.startFileDatetime, self.startDate, self.endDate, self.startTime, self.endTime):
276 self.dataOut.flagNoData = True
277 self.dataOut.error = True
278 return
254 279 for attr in self.data:
255 280 if self.data[attr].ndim == 1:
256 281 setattr(self.dataOut, attr, self.data[attr][self.blockIndex])
257 282 else:
258 283 setattr(self.dataOut, attr, self.data[attr][:, self.blockIndex])
259 284
260 285 self.dataOut.flagNoData = False
261 286 self.blockIndex += 1
262 287
263 288 log.log("Block No. {}/{} -> {}".format(
264 289 self.blockIndex,
265 290 self.blocksPerFile,
266 291 self.dataOut.datatime.ctime()), self.name)
267 292
268 293 return
269 294
270 295 def run(self, **kwargs):
271 296
272 297 if not(self.isConfig):
273 298 self.setup(**kwargs)
274 299 self.isConfig = True
275 300
276 301 if self.blockIndex == self.blocksPerFile:
277 302 self.setNextFile()
278 303
279 304 self.getData()
280 305
281 306 return
282 307
283 308 @MPDecorator
284 309 class HDFWriter(Operation):
285 310 """Operation to write HDF5 files.
286 311
287 312 The HDF5 file contains by default two groups Data and Metadata where
288 313 you can save any `dataOut` attribute specified by `dataList` and `metadataList`
289 314 parameters, data attributes are normaly time dependent where the metadata
290 315 are not.
291 316 It is possible to customize the structure of the HDF5 file with the
292 317 optional description parameter see the examples.
293 318
294 319 Parameters:
295 320 -----------
296 321 path : str
297 322 Path where files will be saved.
298 323 blocksPerFile : int
299 324 Number of blocks per file
300 325 metadataList : list
301 326 List of the dataOut attributes that will be saved as metadata
302 327 dataList : int
303 328 List of the dataOut attributes that will be saved as data
304 329 setType : bool
305 330 If True the name of the files corresponds to the timestamp of the data
306 331 description : dict, optional
307 332 Dictionary with the desired description of the HDF5 file
308 333
309 334 Examples
310 335 --------
311 336
312 337 desc = {
313 338 'data_output': {'winds': ['z', 'w', 'v']},
314 339 'utctime': 'timestamps',
315 340 'heightList': 'heights'
316 341 }
317 342 desc = {
318 343 'data_output': ['z', 'w', 'v'],
319 344 'utctime': 'timestamps',
320 345 'heightList': 'heights'
321 346 }
322 347 desc = {
323 348 'Data': {
324 349 'data_output': 'winds',
325 350 'utctime': 'timestamps'
326 351 },
327 352 'Metadata': {
328 353 'heightList': 'heights'
329 354 }
330 355 }
331 356
332 357 writer = proc_unit.addOperation(name='HDFWriter')
333 358 writer.addParameter(name='path', value='/path/to/file')
334 359 writer.addParameter(name='blocksPerFile', value='32')
335 360 writer.addParameter(name='metadataList', value='heightList,timeZone')
336 361 writer.addParameter(name='dataList',value='data_output,utctime')
337 362 # writer.addParameter(name='description',value=json.dumps(desc))
338 363
339 364 """
340 365
341 366 ext = ".hdf5"
342 367 optchar = "D"
343 368 filename = None
344 369 path = None
345 370 setFile = None
346 371 fp = None
347 372 firsttime = True
348 373 #Configurations
349 374 blocksPerFile = None
350 375 blockIndex = None
351 376 dataOut = None
352 377 #Data Arrays
353 378 dataList = None
354 379 metadataList = None
355 380 currentDay = None
356 381 lastTime = None
357 382
358 383 def __init__(self):
359 384
360 385 Operation.__init__(self)
361 386 return
362 387
363 388 def setup(self, path=None, blocksPerFile=10, metadataList=None, dataList=None, setType=None, description=None):
364 389 self.path = path
365 390 self.blocksPerFile = blocksPerFile
366 391 self.metadataList = metadataList
367 392 self.dataList = [s.strip() for s in dataList]
368 393 self.setType = setType
369 394 self.description = description
370 395
371 396 if self.metadataList is None:
372 397 self.metadataList = self.dataOut.metadata_list
373 398
374 399 tableList = []
375 400 dsList = []
376 401
377 402 for i in range(len(self.dataList)):
378 403 dsDict = {}
379 404 if hasattr(self.dataOut, self.dataList[i]):
380 405 dataAux = getattr(self.dataOut, self.dataList[i])
381 406 dsDict['variable'] = self.dataList[i]
382 407 else:
383 408 log.warning('Attribute {} not found in dataOut', self.name)
384 409 continue
385 410
386 411 if dataAux is None:
387 412 continue
388 413 elif isinstance(dataAux, (int, float, numpy.integer, numpy.float)):
389 414 dsDict['nDim'] = 0
390 415 else:
391 416 dsDict['nDim'] = len(dataAux.shape)
392 417 dsDict['shape'] = dataAux.shape
393 418 dsDict['dsNumber'] = dataAux.shape[0]
394 419 dsDict['dtype'] = dataAux.dtype
395 420
396 421 dsList.append(dsDict)
397 422
398 423 self.dsList = dsList
399 424 self.currentDay = self.dataOut.datatime.date()
400 425
401 426 def timeFlag(self):
402 427 currentTime = self.dataOut.utctime
403 428 timeTuple = time.localtime(currentTime)
404 429 dataDay = timeTuple.tm_yday
405 430
406 431 if self.lastTime is None:
407 432 self.lastTime = currentTime
408 433 self.currentDay = dataDay
409 434 return False
410 435
411 436 timeDiff = currentTime - self.lastTime
412 437
413 438 #Si el dia es diferente o si la diferencia entre un dato y otro supera la hora
414 439 if dataDay != self.currentDay:
415 440 self.currentDay = dataDay
416 441 return True
417 442 elif timeDiff > 3*60*60:
418 443 self.lastTime = currentTime
419 444 return True
420 445 else:
421 446 self.lastTime = currentTime
422 447 return False
423 448
424 449 def run(self, dataOut, path, blocksPerFile=10, metadataList=None,
425 450 dataList=[], setType=None, description={}):
426 451
427 452 self.dataOut = dataOut
428 453 if not(self.isConfig):
429 454 self.setup(path=path, blocksPerFile=blocksPerFile,
430 455 metadataList=metadataList, dataList=dataList,
431 456 setType=setType, description=description)
432 457
433 458 self.isConfig = True
434 459 self.setNextFile()
435 460
436 461 self.putData()
437 462 return
438 463
439 464 def setNextFile(self):
440 465
441 466 ext = self.ext
442 467 path = self.path
443 468 setFile = self.setFile
444 469
445 470 timeTuple = time.localtime(self.dataOut.utctime)
446 471 subfolder = 'd%4.4d%3.3d' % (timeTuple.tm_year,timeTuple.tm_yday)
447 472 fullpath = os.path.join(path, subfolder)
448 473
449 474 if os.path.exists(fullpath):
450 475 filesList = os.listdir(fullpath)
451 476 filesList = [k for k in filesList if k.startswith(self.optchar)]
452 477 if len( filesList ) > 0:
453 478 filesList = sorted(filesList, key=str.lower)
454 479 filen = filesList[-1]
455 480 # el filename debera tener el siguiente formato
456 481 # 0 1234 567 89A BCDE (hex)
457 482 # x YYYY DDD SSS .ext
458 483 if isNumber(filen[8:11]):
459 484 setFile = int(filen[8:11]) #inicializo mi contador de seteo al seteo del ultimo file
460 485 else:
461 486 setFile = -1
462 487 else:
463 488 setFile = -1 #inicializo mi contador de seteo
464 489 else:
465 490 os.makedirs(fullpath)
466 491 setFile = -1 #inicializo mi contador de seteo
467 492
468 493 if self.setType is None:
469 494 setFile += 1
470 495 file = '%s%4.4d%3.3d%03d%s' % (self.optchar,
471 496 timeTuple.tm_year,
472 497 timeTuple.tm_yday,
473 498 setFile,
474 499 ext )
475 500 else:
476 501 setFile = timeTuple.tm_hour*60+timeTuple.tm_min
477 502 file = '%s%4.4d%3.3d%04d%s' % (self.optchar,
478 503 timeTuple.tm_year,
479 504 timeTuple.tm_yday,
480 505 setFile,
481 506 ext )
482 507
483 508 self.filename = os.path.join( path, subfolder, file )
484 509
485 510 #Setting HDF5 File
486 511 self.fp = h5py.File(self.filename, 'w')
487 512 #write metadata
488 513 self.writeMetadata(self.fp)
489 514 #Write data
490 515 self.writeData(self.fp)
491 516
492 517 def getLabel(self, name, x=None):
493 518
494 519 if x is None:
495 520 if 'Data' in self.description:
496 521 data = self.description['Data']
497 522 if 'Metadata' in self.description:
498 523 data.update(self.description['Metadata'])
499 524 else:
500 525 data = self.description
501 526 if name in data:
502 527 if isinstance(data[name], str):
503 528 return data[name]
504 529 elif isinstance(data[name], list):
505 530 return None
506 531 elif isinstance(data[name], dict):
507 532 for key, value in data[name].items():
508 533 return key
509 534 return name
510 535 else:
511 536 if 'Metadata' in self.description:
512 537 meta = self.description['Metadata']
513 538 else:
514 539 meta = self.description
515 540 if name in meta:
516 541 if isinstance(meta[name], list):
517 542 return meta[name][x]
518 543 elif isinstance(meta[name], dict):
519 544 for key, value in meta[name].items():
520 545 return value[x]
521 546 if 'cspc' in name:
522 547 return 'pair{:02d}'.format(x)
523 548 else:
524 549 return 'channel{:02d}'.format(x)
525 550
526 551 def writeMetadata(self, fp):
527 552
528 553 if self.description:
529 554 if 'Metadata' in self.description:
530 555 grp = fp.create_group('Metadata')
531 556 else:
532 557 grp = fp
533 558 else:
534 559 grp = fp.create_group('Metadata')
535 560
536 561 for i in range(len(self.metadataList)):
537 562 if not hasattr(self.dataOut, self.metadataList[i]):
538 563 log.warning('Metadata: `{}` not found'.format(self.metadataList[i]), self.name)
539 564 continue
540 565 value = getattr(self.dataOut, self.metadataList[i])
541 566 if isinstance(value, bool):
542 567 if value is True:
543 568 value = 1
544 569 else:
545 570 value = 0
546 571 grp.create_dataset(self.getLabel(self.metadataList[i]), data=value)
547 572 return
548 573
549 574 def writeData(self, fp):
550 575
551 576 if self.description:
552 577 if 'Data' in self.description:
553 578 grp = fp.create_group('Data')
554 579 else:
555 580 grp = fp
556 581 else:
557 582 grp = fp.create_group('Data')
558 583
559 584 dtsets = []
560 585 data = []
561 586
562 587 for dsInfo in self.dsList:
563 588 if dsInfo['nDim'] == 0:
564 589 ds = grp.create_dataset(
565 590 self.getLabel(dsInfo['variable']),
566 591 (self.blocksPerFile, ),
567 592 chunks=True,
568 593 dtype=numpy.float64)
569 594 dtsets.append(ds)
570 595 data.append((dsInfo['variable'], -1))
571 596 else:
572 597 label = self.getLabel(dsInfo['variable'])
573 598 if label is not None:
574 599 sgrp = grp.create_group(label)
575 600 else:
576 601 sgrp = grp
577 602 for i in range(dsInfo['dsNumber']):
578 603 ds = sgrp.create_dataset(
579 604 self.getLabel(dsInfo['variable'], i),
580 605 (self.blocksPerFile, ) + dsInfo['shape'][1:],
581 606 chunks=True,
582 607 dtype=dsInfo['dtype'])
583 608 dtsets.append(ds)
584 609 data.append((dsInfo['variable'], i))
585 610 fp.flush()
586 611
587 612 log.log('Creating file: {}'.format(fp.filename), self.name)
588 613
589 614 self.ds = dtsets
590 615 self.data = data
591 616 self.firsttime = True
592 617 self.blockIndex = 0
593 618 return
594 619
595 620 def putData(self):
596 621
597 622 if (self.blockIndex == self.blocksPerFile) or self.timeFlag():
598 623 self.closeFile()
599 624 self.setNextFile()
600 625
601 626 for i, ds in enumerate(self.ds):
602 627 attr, ch = self.data[i]
603 628 if ch == -1:
604 629 ds[self.blockIndex] = getattr(self.dataOut, attr)
605 630 else:
606 631 ds[self.blockIndex] = getattr(self.dataOut, attr)[ch]
607 632
608 633 self.fp.flush()
609 634 self.blockIndex += 1
610 635 log.log('Block No. {}/{}'.format(self.blockIndex, self.blocksPerFile), self.name)
611 636
612 637 return
613 638
614 639 def closeFile(self):
615 640
616 641 if self.blockIndex != self.blocksPerFile:
617 642 for ds in self.ds:
618 643 ds.resize(self.blockIndex, axis=0)
619 644
620 645 if self.fp:
621 646 self.fp.flush()
622 647 self.fp.close()
623 648
624 649 def close(self):
625 650
626 651 self.closeFile()
@@ -1,3890 +1,3889
1 1 import numpy
2 2 import math
3 3 from scipy import optimize, interpolate, signal, stats, ndimage
4 4 import scipy
5 5 import re
6 6 import datetime
7 7 import copy
8 8 import sys
9 9 import importlib
10 10 import itertools
11 11 from multiprocessing import Pool, TimeoutError
12 12 from multiprocessing.pool import ThreadPool
13 13 import time
14 14
15 15 from scipy.optimize import fmin_l_bfgs_b #optimize with bounds on state papameters
16 16 from .jroproc_base import ProcessingUnit, Operation, MPDecorator
17 17 from schainpy.model.data.jrodata import Parameters, hildebrand_sekhon
18 18 from scipy import asarray as ar,exp
19 19 from scipy.optimize import curve_fit
20 20 from schainpy.utils import log
21 21 import warnings
22 22 from numpy import NaN
23 23 from scipy.optimize.optimize import OptimizeWarning
24 24 warnings.filterwarnings('ignore')
25 25
26 26 import matplotlib.pyplot as plt
27 27
28 28 SPEED_OF_LIGHT = 299792458
29 29
30 30 '''solving pickling issue'''
31 31
32 32 def _pickle_method(method):
33 33 func_name = method.__func__.__name__
34 34 obj = method.__self__
35 35 cls = method.__self__.__class__
36 36 return _unpickle_method, (func_name, obj, cls)
37 37
38 38 def _unpickle_method(func_name, obj, cls):
39 39 for cls in cls.mro():
40 40 try:
41 41 func = cls.__dict__[func_name]
42 42 except KeyError:
43 43 pass
44 44 else:
45 45 break
46 46 return func.__get__(obj, cls)
47 47
48 48
49 49 class ParametersProc(ProcessingUnit):
50 50
51 51 METHODS = {}
52 52 nSeconds = None
53 53
54 54 def __init__(self):
55 55 ProcessingUnit.__init__(self)
56 56
57 57 # self.objectDict = {}
58 58 self.buffer = None
59 59 self.firstdatatime = None
60 60 self.profIndex = 0
61 61 self.dataOut = Parameters()
62 62 self.setupReq = False #Agregar a todas las unidades de proc
63
63 print("INIT PROC")
64 64 def __updateObjFromInput(self):
65 65
66 66 self.dataOut.inputUnit = self.dataIn.type
67 67
68 68 self.dataOut.timeZone = self.dataIn.timeZone
69 69 self.dataOut.dstFlag = self.dataIn.dstFlag
70 70 self.dataOut.errorCount = self.dataIn.errorCount
71 71 self.dataOut.useLocalTime = self.dataIn.useLocalTime
72 72
73 73 self.dataOut.radarControllerHeaderObj = self.dataIn.radarControllerHeaderObj.copy()
74 74 self.dataOut.systemHeaderObj = self.dataIn.systemHeaderObj.copy()
75 75 self.dataOut.channelList = self.dataIn.channelList
76 76 self.dataOut.heightList = self.dataIn.heightList
77 77 self.dataOut.dtype = numpy.dtype([('real','<f4'),('imag','<f4')])
78 78 # self.dataOut.nHeights = self.dataIn.nHeights
79 79 # self.dataOut.nChannels = self.dataIn.nChannels
80 80 # self.dataOut.nBaud = self.dataIn.nBaud
81 81 # self.dataOut.nCode = self.dataIn.nCode
82 82 # self.dataOut.code = self.dataIn.code
83 83 # self.dataOut.nProfiles = self.dataOut.nFFTPoints
84 84 self.dataOut.flagDiscontinuousBlock = self.dataIn.flagDiscontinuousBlock
85 85 # self.dataOut.utctime = self.firstdatatime
86 86 self.dataOut.utctime = self.dataIn.utctime
87 87 self.dataOut.flagDecodeData = self.dataIn.flagDecodeData #asumo q la data esta decodificada
88 88 self.dataOut.flagDeflipData = self.dataIn.flagDeflipData #asumo q la data esta sin flip
89 89 self.dataOut.nCohInt = self.dataIn.nCohInt
90 90 # self.dataOut.nIncohInt = 1
91 91 # self.dataOut.ippSeconds = self.dataIn.ippSeconds
92 92 # self.dataOut.windowOfFilter = self.dataIn.windowOfFilter
93 93 self.dataOut.timeInterval1 = self.dataIn.timeInterval
94 94 self.dataOut.heightList = self.dataIn.heightList
95 95 self.dataOut.frequency = self.dataIn.frequency
96 96 # self.dataOut.noise = self.dataIn.noise
97 97 self.dataOut.codeList = self.dataIn.codeList
98 98 self.dataOut.azimuthList = self.dataIn.azimuthList
99 99 self.dataOut.elevationList = self.dataIn.elevationList
100 100
101 101 def run(self):
102
103
102 print("run proc param")
104 103
105 104 #---------------------- Voltage Data ---------------------------
106 105
107 106 if self.dataIn.type == "Voltage":
108 107
109 108 self.__updateObjFromInput()
110 109 self.dataOut.data_pre = self.dataIn.data.copy()
111 110 self.dataOut.flagNoData = False
112 111 self.dataOut.utctimeInit = self.dataIn.utctime
113 112 self.dataOut.paramInterval = self.dataIn.nProfiles*self.dataIn.nCohInt*self.dataIn.ippSeconds
114 113 if hasattr(self.dataIn, 'dataPP_POW'):
115 114 self.dataOut.dataPP_POW = self.dataIn.dataPP_POW
116 115
117 116 if hasattr(self.dataIn, 'dataPP_POWER'):
118 117 self.dataOut.dataPP_POWER = self.dataIn.dataPP_POWER
119 118
120 119 if hasattr(self.dataIn, 'dataPP_DOP'):
121 120 self.dataOut.dataPP_DOP = self.dataIn.dataPP_DOP
122 121
123 122 if hasattr(self.dataIn, 'dataPP_SNR'):
124 123 self.dataOut.dataPP_SNR = self.dataIn.dataPP_SNR
125 124
126 125 if hasattr(self.dataIn, 'dataPP_WIDTH'):
127 126 self.dataOut.dataPP_WIDTH = self.dataIn.dataPP_WIDTH
128 127 return
129 128
130 129 #---------------------- Spectra Data ---------------------------
131 130
132 131 if self.dataIn.type == "Spectra":
133 132
134 133 self.dataOut.data_pre = [self.dataIn.data_spc, self.dataIn.data_cspc]
135 134 self.dataOut.data_spc = self.dataIn.data_spc
136 135 self.dataOut.data_cspc = self.dataIn.data_cspc
137 136 self.dataOut.nProfiles = self.dataIn.nProfiles
138 137 self.dataOut.nIncohInt = self.dataIn.nIncohInt
139 138 self.dataOut.nFFTPoints = self.dataIn.nFFTPoints
140 139 self.dataOut.ippFactor = self.dataIn.ippFactor
141 140 self.dataOut.abscissaList = self.dataIn.getVelRange(1)
142 141 self.dataOut.spc_noise = self.dataIn.getNoise()
143 142 self.dataOut.spc_range = (self.dataIn.getFreqRange(1) , self.dataIn.getAcfRange(1) , self.dataIn.getVelRange(1))
144 143 # self.dataOut.normFactor = self.dataIn.normFactor
145 144 self.dataOut.pairsList = self.dataIn.pairsList
146 145 self.dataOut.groupList = self.dataIn.pairsList
147 146 self.dataOut.flagNoData = False
148 147
149 148 if hasattr(self.dataIn, 'ChanDist'): #Distances of receiver channels
150 149 self.dataOut.ChanDist = self.dataIn.ChanDist
151 150 else: self.dataOut.ChanDist = None
152 151
153 152 #if hasattr(self.dataIn, 'VelRange'): #Velocities range
154 153 # self.dataOut.VelRange = self.dataIn.VelRange
155 154 #else: self.dataOut.VelRange = None
156 155
157 156 if hasattr(self.dataIn, 'RadarConst'): #Radar Constant
158 157 self.dataOut.RadarConst = self.dataIn.RadarConst
159 158
160 159 if hasattr(self.dataIn, 'NPW'): #NPW
161 160 self.dataOut.NPW = self.dataIn.NPW
162 161
163 162 if hasattr(self.dataIn, 'COFA'): #COFA
164 163 self.dataOut.COFA = self.dataIn.COFA
165 164
166 165
167 166
168 167 #---------------------- Correlation Data ---------------------------
169 168
170 169 if self.dataIn.type == "Correlation":
171 170 acf_ind, ccf_ind, acf_pairs, ccf_pairs, data_acf, data_ccf = self.dataIn.splitFunctions()
172 171
173 172 self.dataOut.data_pre = (self.dataIn.data_cf[acf_ind,:], self.dataIn.data_cf[ccf_ind,:,:])
174 173 self.dataOut.normFactor = (self.dataIn.normFactor[acf_ind,:], self.dataIn.normFactor[ccf_ind,:])
175 174 self.dataOut.groupList = (acf_pairs, ccf_pairs)
176 175
177 176 self.dataOut.abscissaList = self.dataIn.lagRange
178 177 self.dataOut.noise = self.dataIn.noise
179 178 self.dataOut.data_snr = self.dataIn.SNR
180 179 self.dataOut.flagNoData = False
181 180 self.dataOut.nAvg = self.dataIn.nAvg
182 181
183 182 #---------------------- Parameters Data ---------------------------
184 183
185 184 if self.dataIn.type == "Parameters":
186 185 self.dataOut.copy(self.dataIn)
187 186 self.dataOut.flagNoData = False
188
187 self.prin("DAta In")
189 188 return True
190 189
191 190 self.__updateObjFromInput()
192 191 self.dataOut.utctimeInit = self.dataIn.utctime
193 192 self.dataOut.paramInterval = self.dataIn.timeInterval
194 193
195 194 return
196 195
197 196
198 197 def target(tups):
199 198
200 199 obj, args = tups
201 200
202 201 return obj.FitGau(args)
203 202
204 203 class RemoveWideGC(Operation):
205 204 ''' This class remove the wide clutter and replace it with a simple interpolation points
206 205 This mainly applies to CLAIRE radar
207 206
208 207 ClutterWidth : Width to look for the clutter peak
209 208
210 209 Input:
211 210
212 211 self.dataOut.data_pre : SPC and CSPC
213 212 self.dataOut.spc_range : To select wind and rainfall velocities
214 213
215 214 Affected:
216 215
217 216 self.dataOut.data_pre : It is used for the new SPC and CSPC ranges of wind
218 217
219 218 Written by D. Scipión 25.02.2021
220 219 '''
221 220 def __init__(self):
222 221 Operation.__init__(self)
223 222 self.i = 0
224 223 self.ich = 0
225 224 self.ir = 0
226 225
227 226 def run(self, dataOut, ClutterWidth=2.5):
228 227 # print ('Entering RemoveWideGC ... ')
229 228
230 229 self.spc = dataOut.data_pre[0].copy()
231 230 self.spc_out = dataOut.data_pre[0].copy()
232 231 self.Num_Chn = self.spc.shape[0]
233 232 self.Num_Hei = self.spc.shape[2]
234 233 VelRange = dataOut.spc_range[2][:-1]
235 234 dv = VelRange[1]-VelRange[0]
236 235
237 236 # Find the velocities that corresponds to zero
238 237 gc_values = numpy.squeeze(numpy.where(numpy.abs(VelRange) <= ClutterWidth))
239 238
240 239 # Removing novalid data from the spectra
241 240 for ich in range(self.Num_Chn) :
242 241 for ir in range(self.Num_Hei) :
243 242 # Estimate the noise at each range
244 243 HSn = hildebrand_sekhon(self.spc[ich,:,ir],dataOut.nIncohInt)
245 244
246 245 # Removing the noise floor at each range
247 246 novalid = numpy.where(self.spc[ich,:,ir] < HSn)
248 247 self.spc[ich,novalid,ir] = HSn
249 248
250 249 junk = numpy.append(numpy.insert(numpy.squeeze(self.spc[ich,gc_values,ir]),0,HSn),HSn)
251 250 j1index = numpy.squeeze(numpy.where(numpy.diff(junk)>0))
252 251 j2index = numpy.squeeze(numpy.where(numpy.diff(junk)<0))
253 252 if ((numpy.size(j1index)<=1) | (numpy.size(j2index)<=1)) :
254 253 continue
255 254 junk3 = numpy.squeeze(numpy.diff(j1index))
256 255 junk4 = numpy.squeeze(numpy.diff(j2index))
257 256
258 257 valleyindex = j2index[numpy.where(junk4>1)]
259 258 peakindex = j1index[numpy.where(junk3>1)]
260 259
261 260 isvalid = numpy.squeeze(numpy.where(numpy.abs(VelRange[gc_values[peakindex]]) <= 2.5*dv))
262 261 if numpy.size(isvalid) == 0 :
263 262 continue
264 263 if numpy.size(isvalid) >1 :
265 264 vindex = numpy.argmax(self.spc[ich,gc_values[peakindex[isvalid]],ir])
266 265 isvalid = isvalid[vindex]
267 266
268 267 # clutter peak
269 268 gcpeak = peakindex[isvalid]
270 269 vl = numpy.where(valleyindex < gcpeak)
271 270 if numpy.size(vl) == 0:
272 271 continue
273 272 gcvl = valleyindex[vl[0][-1]]
274 273 vr = numpy.where(valleyindex > gcpeak)
275 274 if numpy.size(vr) == 0:
276 275 continue
277 276 gcvr = valleyindex[vr[0][0]]
278 277
279 278 # Removing the clutter
280 279 interpindex = numpy.array([gc_values[gcvl], gc_values[gcvr]])
281 280 gcindex = gc_values[gcvl+1:gcvr-1]
282 281 self.spc_out[ich,gcindex,ir] = numpy.interp(VelRange[gcindex],VelRange[interpindex],self.spc[ich,interpindex,ir])
283 282
284 283 dataOut.data_pre[0] = self.spc_out
285 284 #print ('Leaving RemoveWideGC ... ')
286 285 return dataOut
287 286
288 287 class SpectralFilters(Operation):
289 288 ''' This class allows to replace the novalid values with noise for each channel
290 289 This applies to CLAIRE RADAR
291 290
292 291 PositiveLimit : RightLimit of novalid data
293 292 NegativeLimit : LeftLimit of novalid data
294 293
295 294 Input:
296 295
297 296 self.dataOut.data_pre : SPC and CSPC
298 297 self.dataOut.spc_range : To select wind and rainfall velocities
299 298
300 299 Affected:
301 300
302 301 self.dataOut.data_pre : It is used for the new SPC and CSPC ranges of wind
303 302
304 303 Written by D. Scipión 29.01.2021
305 304 '''
306 305 def __init__(self):
307 306 Operation.__init__(self)
308 307 self.i = 0
309 308
310 309 def run(self, dataOut, ):
311 310
312 311 self.spc = dataOut.data_pre[0].copy()
313 312 self.Num_Chn = self.spc.shape[0]
314 313 VelRange = dataOut.spc_range[2]
315 314
316 315 # novalid corresponds to data within the Negative and PositiveLimit
317 316
318 317
319 318 # Removing novalid data from the spectra
320 319 for i in range(self.Num_Chn):
321 320 self.spc[i,novalid,:] = dataOut.noise[i]
322 321 dataOut.data_pre[0] = self.spc
323 322 return dataOut
324 323
325 324 class GaussianFit(Operation):
326 325
327 326 '''
328 327 Function that fit of one and two generalized gaussians (gg) based
329 328 on the PSD shape across an "power band" identified from a cumsum of
330 329 the measured spectrum - noise.
331 330
332 331 Input:
333 332 self.dataOut.data_pre : SelfSpectra
334 333
335 334 Output:
336 335 self.dataOut.SPCparam : SPC_ch1, SPC_ch2
337 336
338 337 '''
339 338 def __init__(self):
340 339 Operation.__init__(self)
341 340 self.i=0
342 341
343 342
344 343 # def run(self, dataOut, num_intg=7, pnoise=1., SNRlimit=-9): #num_intg: Incoherent integrations, pnoise: Noise, vel_arr: range of velocities, similar to the ftt points
345 344 def run(self, dataOut, SNRdBlimit=-9, method='generalized'):
346 345 """This routine will find a couple of generalized Gaussians to a power spectrum
347 346 methods: generalized, squared
348 347 input: spc
349 348 output:
350 349 noise, amplitude0,shift0,width0,p0,Amplitude1,shift1,width1,p1
351 350 """
352 351 print ('Entering ',method,' double Gaussian fit')
353 352 self.spc = dataOut.data_pre[0].copy()
354 353 self.Num_Hei = self.spc.shape[2]
355 354 self.Num_Bin = self.spc.shape[1]
356 355 self.Num_Chn = self.spc.shape[0]
357 356
358 357 start_time = time.time()
359 358
360 359 pool = Pool(processes=self.Num_Chn)
361 360 args = [(dataOut.spc_range[2], ich, dataOut.spc_noise[ich], dataOut.nIncohInt, SNRdBlimit) for ich in range(self.Num_Chn)]
362 361 objs = [self for __ in range(self.Num_Chn)]
363 362 attrs = list(zip(objs, args))
364 363 DGauFitParam = pool.map(target, attrs)
365 364 # Parameters:
366 365 # 0. Noise, 1. Amplitude, 2. Shift, 3. Width 4. Power
367 366 dataOut.DGauFitParams = numpy.asarray(DGauFitParam)
368 367
369 368 # Double Gaussian Curves
370 369 gau0 = numpy.zeros([self.Num_Chn,self.Num_Bin,self.Num_Hei])
371 370 gau0[:] = numpy.NaN
372 371 gau1 = numpy.zeros([self.Num_Chn,self.Num_Bin,self.Num_Hei])
373 372 gau1[:] = numpy.NaN
374 373 x_mtr = numpy.transpose(numpy.tile(dataOut.getVelRange(1)[:-1], (self.Num_Hei,1)))
375 374 for iCh in range(self.Num_Chn):
376 375 N0 = numpy.transpose(numpy.transpose([dataOut.DGauFitParams[iCh][0,:,0]] * self.Num_Bin))
377 376 N1 = numpy.transpose(numpy.transpose([dataOut.DGauFitParams[iCh][0,:,1]] * self.Num_Bin))
378 377 A0 = numpy.transpose(numpy.transpose([dataOut.DGauFitParams[iCh][1,:,0]] * self.Num_Bin))
379 378 A1 = numpy.transpose(numpy.transpose([dataOut.DGauFitParams[iCh][1,:,1]] * self.Num_Bin))
380 379 v0 = numpy.transpose(numpy.transpose([dataOut.DGauFitParams[iCh][2,:,0]] * self.Num_Bin))
381 380 v1 = numpy.transpose(numpy.transpose([dataOut.DGauFitParams[iCh][2,:,1]] * self.Num_Bin))
382 381 s0 = numpy.transpose(numpy.transpose([dataOut.DGauFitParams[iCh][3,:,0]] * self.Num_Bin))
383 382 s1 = numpy.transpose(numpy.transpose([dataOut.DGauFitParams[iCh][3,:,1]] * self.Num_Bin))
384 383 if method == 'genealized':
385 384 p0 = numpy.transpose(numpy.transpose([dataOut.DGauFitParams[iCh][4,:,0]] * self.Num_Bin))
386 385 p1 = numpy.transpose(numpy.transpose([dataOut.DGauFitParams[iCh][4,:,1]] * self.Num_Bin))
387 386 elif method == 'squared':
388 387 p0 = 2.
389 388 p1 = 2.
390 389 gau0[iCh] = A0*numpy.exp(-0.5*numpy.abs((x_mtr-v0)/s0)**p0)+N0
391 390 gau1[iCh] = A1*numpy.exp(-0.5*numpy.abs((x_mtr-v1)/s1)**p1)+N1
392 391 dataOut.GaussFit0 = gau0
393 392 dataOut.GaussFit1 = gau1
394 393
395 394 print('Leaving ',method ,' double Gaussian fit')
396 395 return dataOut
397 396
398 397 def FitGau(self, X):
399 398 # print('Entering FitGau')
400 399 # Assigning the variables
401 400 Vrange, ch, wnoise, num_intg, SNRlimit = X
402 401 # Noise Limits
403 402 noisebl = wnoise * 0.9
404 403 noisebh = wnoise * 1.1
405 404 # Radar Velocity
406 405 Va = max(Vrange)
407 406 deltav = Vrange[1] - Vrange[0]
408 407 x = numpy.arange(self.Num_Bin)
409 408
410 409 # print ('stop 0')
411 410
412 411 # 5 parameters, 2 Gaussians
413 412 DGauFitParam = numpy.zeros([5, self.Num_Hei,2])
414 413 DGauFitParam[:] = numpy.NaN
415 414
416 415 # SPCparam = []
417 416 # SPC_ch1 = numpy.zeros([self.Num_Bin,self.Num_Hei])
418 417 # SPC_ch2 = numpy.zeros([self.Num_Bin,self.Num_Hei])
419 418 # SPC_ch1[:] = 0 #numpy.NaN
420 419 # SPC_ch2[:] = 0 #numpy.NaN
421 420 # print ('stop 1')
422 421 for ht in range(self.Num_Hei):
423 422 # print (ht)
424 423 # print ('stop 2')
425 424 # Spectra at each range
426 425 spc = numpy.asarray(self.spc)[ch,:,ht]
427 426 snr = ( spc.mean() - wnoise ) / wnoise
428 427 snrdB = 10.*numpy.log10(snr)
429 428
430 429 #print ('stop 3')
431 430 if snrdB < SNRlimit :
432 431 # snr = numpy.NaN
433 432 # SPC_ch1[:,ht] = 0#numpy.NaN
434 433 # SPC_ch1[:,ht] = 0#numpy.NaN
435 434 # SPCparam = (SPC_ch1,SPC_ch2)
436 435 # print ('SNR less than SNRth')
437 436 continue
438 437 # wnoise = hildebrand_sekhon(spc,num_intg)
439 438 # print ('stop 2.01')
440 439 #############################################
441 440 # normalizing spc and noise
442 441 # This part differs from gg1
443 442 # spc_norm_max = max(spc) #commented by D. Scipión 19.03.2021
444 443 #spc = spc / spc_norm_max
445 444 # pnoise = pnoise #/ spc_norm_max #commented by D. Scipión 19.03.2021
446 445 #############################################
447 446
448 447 # print ('stop 2.1')
449 448 fatspectra=1.0
450 449 # noise per channel.... we might want to use the noise at each range
451 450
452 451 # wnoise = noise_ #/ spc_norm_max #commented by D. Scipión 19.03.2021
453 452 #wnoise,stdv,i_max,index =enoise(spc,num_intg) #noise estimate using Hildebrand Sekhon, only wnoise is used
454 453 #if wnoise>1.1*pnoise: # to be tested later
455 454 # wnoise=pnoise
456 455 # noisebl = wnoise*0.9
457 456 # noisebh = wnoise*1.1
458 457 spc = spc - wnoise # signal
459 458
460 459 # print ('stop 2.2')
461 460 minx = numpy.argmin(spc)
462 461 #spcs=spc.copy()
463 462 spcs = numpy.roll(spc,-minx)
464 463 cum = numpy.cumsum(spcs)
465 464 # tot_noise = wnoise * self.Num_Bin #64;
466 465
467 466 # print ('stop 2.3')
468 467 # snr = sum(spcs) / tot_noise
469 468 # snrdB = 10.*numpy.log10(snr)
470 469 #print ('stop 3')
471 470 # if snrdB < SNRlimit :
472 471 # snr = numpy.NaN
473 472 # SPC_ch1[:,ht] = 0#numpy.NaN
474 473 # SPC_ch1[:,ht] = 0#numpy.NaN
475 474 # SPCparam = (SPC_ch1,SPC_ch2)
476 475 # print ('SNR less than SNRth')
477 476 # continue
478 477
479 478
480 479 #if snrdB<-18 or numpy.isnan(snrdB) or num_intg<4:
481 480 # return [None,]*4,[None,]*4,None,snrdB,None,None,[None,]*5,[None,]*9,None
482 481 # print ('stop 4')
483 482 cummax = max(cum)
484 483 epsi = 0.08 * fatspectra # cumsum to narrow down the energy region
485 484 cumlo = cummax * epsi
486 485 cumhi = cummax * (1-epsi)
487 486 powerindex = numpy.array(numpy.where(numpy.logical_and(cum>cumlo, cum<cumhi))[0])
488 487
489 488 # print ('stop 5')
490 489 if len(powerindex) < 1:# case for powerindex 0
491 490 # print ('powerindex < 1')
492 491 continue
493 492 powerlo = powerindex[0]
494 493 powerhi = powerindex[-1]
495 494 powerwidth = powerhi-powerlo
496 495 if powerwidth <= 1:
497 496 # print('powerwidth <= 1')
498 497 continue
499 498
500 499 # print ('stop 6')
501 500 firstpeak = powerlo + powerwidth/10.# first gaussian energy location
502 501 secondpeak = powerhi - powerwidth/10. #second gaussian energy location
503 502 midpeak = (firstpeak + secondpeak)/2.
504 503 firstamp = spcs[int(firstpeak)]
505 504 secondamp = spcs[int(secondpeak)]
506 505 midamp = spcs[int(midpeak)]
507 506
508 507 y_data = spc + wnoise
509 508
510 509 ''' single Gaussian '''
511 510 shift0 = numpy.mod(midpeak+minx, self.Num_Bin )
512 511 width0 = powerwidth/4.#Initialization entire power of spectrum divided by 4
513 512 power0 = 2.
514 513 amplitude0 = midamp
515 514 state0 = [shift0,width0,amplitude0,power0,wnoise]
516 515 bnds = ((0,self.Num_Bin-1),(1,powerwidth),(0,None),(0.5,3.),(noisebl,noisebh))
517 516 lsq1 = fmin_l_bfgs_b(self.misfit1, state0, args=(y_data,x,num_intg), bounds=bnds, approx_grad=True)
518 517 # print ('stop 7.1')
519 518 # print (bnds)
520 519
521 520 chiSq1=lsq1[1]
522 521
523 522 # print ('stop 8')
524 523 if fatspectra<1.0 and powerwidth<4:
525 524 choice=0
526 525 Amplitude0=lsq1[0][2]
527 526 shift0=lsq1[0][0]
528 527 width0=lsq1[0][1]
529 528 p0=lsq1[0][3]
530 529 Amplitude1=0.
531 530 shift1=0.
532 531 width1=0.
533 532 p1=0.
534 533 noise=lsq1[0][4]
535 534 #return (numpy.array([shift0,width0,Amplitude0,p0]),
536 535 # numpy.array([shift1,width1,Amplitude1,p1]),noise,snrdB,chiSq1,6.,sigmas1,[None,]*9,choice)
537 536
538 537 # print ('stop 9')
539 538 ''' two Gaussians '''
540 539 #shift0=numpy.mod(firstpeak+minx,64); shift1=numpy.mod(secondpeak+minx,64)
541 540 shift0 = numpy.mod(firstpeak+minx, self.Num_Bin )
542 541 shift1 = numpy.mod(secondpeak+minx, self.Num_Bin )
543 542 width0 = powerwidth/6.
544 543 width1 = width0
545 544 power0 = 2.
546 545 power1 = power0
547 546 amplitude0 = firstamp
548 547 amplitude1 = secondamp
549 548 state0 = [shift0,width0,amplitude0,power0,shift1,width1,amplitude1,power1,wnoise]
550 549 #bnds=((0,63),(1,powerwidth/2.),(0,None),(0.5,3.),(0,63),(1,powerwidth/2.),(0,None),(0.5,3.),(noisebl,noisebh))
551 550 bnds=((0,self.Num_Bin-1),(1,powerwidth/2.),(0,None),(0.5,3.),(0,self.Num_Bin-1),(1,powerwidth/2.),(0,None),(0.5,3.),(noisebl,noisebh))
552 551 #bnds=(( 0,(self.Num_Bin-1) ),(1,powerwidth/2.),(0,None),(0.5,3.),( 0,(self.Num_Bin-1)),(1,powerwidth/2.),(0,None),(0.5,3.),(0.1,0.5))
553 552
554 553 # print ('stop 10')
555 554 lsq2 = fmin_l_bfgs_b( self.misfit2 , state0 , args=(y_data,x,num_intg) , bounds=bnds , approx_grad=True )
556 555
557 556 # print ('stop 11')
558 557 chiSq2 = lsq2[1]
559 558
560 559 # print ('stop 12')
561 560
562 561 oneG = (chiSq1<5 and chiSq1/chiSq2<2.0) and (abs(lsq2[0][0]-lsq2[0][4])<(lsq2[0][1]+lsq2[0][5])/3. or abs(lsq2[0][0]-lsq2[0][4])<10)
563 562
564 563 # print ('stop 13')
565 564 if snrdB>-12: # when SNR is strong pick the peak with least shift (LOS velocity) error
566 565 if oneG:
567 566 choice = 0
568 567 else:
569 568 w1 = lsq2[0][1]; w2 = lsq2[0][5]
570 569 a1 = lsq2[0][2]; a2 = lsq2[0][6]
571 570 p1 = lsq2[0][3]; p2 = lsq2[0][7]
572 571 s1 = (2**(1+1./p1))*scipy.special.gamma(1./p1)/p1
573 572 s2 = (2**(1+1./p2))*scipy.special.gamma(1./p2)/p2
574 573 gp1 = a1*w1*s1; gp2 = a2*w2*s2 # power content of each ggaussian with proper p scaling
575 574
576 575 if gp1>gp2:
577 576 if a1>0.7*a2:
578 577 choice = 1
579 578 else:
580 579 choice = 2
581 580 elif gp2>gp1:
582 581 if a2>0.7*a1:
583 582 choice = 2
584 583 else:
585 584 choice = 1
586 585 else:
587 586 choice = numpy.argmax([a1,a2])+1
588 587 #else:
589 588 #choice=argmin([std2a,std2b])+1
590 589
591 590 else: # with low SNR go to the most energetic peak
592 591 choice = numpy.argmax([lsq1[0][2]*lsq1[0][1],lsq2[0][2]*lsq2[0][1],lsq2[0][6]*lsq2[0][5]])
593 592
594 593 # print ('stop 14')
595 594 shift0 = lsq2[0][0]
596 595 vel0 = Vrange[0] + shift0 * deltav
597 596 shift1 = lsq2[0][4]
598 597 # vel1=Vrange[0] + shift1 * deltav
599 598
600 599 # max_vel = 1.0
601 600 # Va = max(Vrange)
602 601 # deltav = Vrange[1]-Vrange[0]
603 602 # print ('stop 15')
604 603 #first peak will be 0, second peak will be 1
605 604 # if vel0 > -1.0 and vel0 < max_vel : #first peak is in the correct range # Commented by D.Scipión 19.03.2021
606 605 if vel0 > -Va and vel0 < Va : #first peak is in the correct range
607 606 shift0 = lsq2[0][0]
608 607 width0 = lsq2[0][1]
609 608 Amplitude0 = lsq2[0][2]
610 609 p0 = lsq2[0][3]
611 610
612 611 shift1 = lsq2[0][4]
613 612 width1 = lsq2[0][5]
614 613 Amplitude1 = lsq2[0][6]
615 614 p1 = lsq2[0][7]
616 615 noise = lsq2[0][8]
617 616 else:
618 617 shift1 = lsq2[0][0]
619 618 width1 = lsq2[0][1]
620 619 Amplitude1 = lsq2[0][2]
621 620 p1 = lsq2[0][3]
622 621
623 622 shift0 = lsq2[0][4]
624 623 width0 = lsq2[0][5]
625 624 Amplitude0 = lsq2[0][6]
626 625 p0 = lsq2[0][7]
627 626 noise = lsq2[0][8]
628 627
629 628 if Amplitude0<0.05: # in case the peak is noise
630 629 shift0,width0,Amplitude0,p0 = 4*[numpy.NaN]
631 630 if Amplitude1<0.05:
632 631 shift1,width1,Amplitude1,p1 = 4*[numpy.NaN]
633 632
634 633 # print ('stop 16 ')
635 634 # SPC_ch1[:,ht] = noise + Amplitude0*numpy.exp(-0.5*(abs(x-shift0)/width0)**p0)
636 635 # SPC_ch2[:,ht] = noise + Amplitude1*numpy.exp(-0.5*(abs(x-shift1)/width1)**p1)
637 636 # SPCparam = (SPC_ch1,SPC_ch2)
638 637
639 638 DGauFitParam[0,ht,0] = noise
640 639 DGauFitParam[0,ht,1] = noise
641 640 DGauFitParam[1,ht,0] = Amplitude0
642 641 DGauFitParam[1,ht,1] = Amplitude1
643 642 DGauFitParam[2,ht,0] = Vrange[0] + shift0 * deltav
644 643 DGauFitParam[2,ht,1] = Vrange[0] + shift1 * deltav
645 644 DGauFitParam[3,ht,0] = width0 * deltav
646 645 DGauFitParam[3,ht,1] = width1 * deltav
647 646 DGauFitParam[4,ht,0] = p0
648 647 DGauFitParam[4,ht,1] = p1
649 648
650 649 # print (DGauFitParam.shape)
651 650 # print ('Leaving FitGau')
652 651 return DGauFitParam
653 652 # return SPCparam
654 653 # return GauSPC
655 654
656 655 def y_model1(self,x,state):
657 656 shift0, width0, amplitude0, power0, noise = state
658 657 model0 = amplitude0*numpy.exp(-0.5*abs((x - shift0)/width0)**power0)
659 658 model0u = amplitude0*numpy.exp(-0.5*abs((x - shift0 - self.Num_Bin)/width0)**power0)
660 659 model0d = amplitude0*numpy.exp(-0.5*abs((x - shift0 + self.Num_Bin)/width0)**power0)
661 660 return model0 + model0u + model0d + noise
662 661
663 662 def y_model2(self,x,state): #Equation for two generalized Gaussians with Nyquist
664 663 shift0, width0, amplitude0, power0, shift1, width1, amplitude1, power1, noise = state
665 664 model0 = amplitude0*numpy.exp(-0.5*abs((x-shift0)/width0)**power0)
666 665 model0u = amplitude0*numpy.exp(-0.5*abs((x - shift0 - self.Num_Bin)/width0)**power0)
667 666 model0d = amplitude0*numpy.exp(-0.5*abs((x - shift0 + self.Num_Bin)/width0)**power0)
668 667
669 668 model1 = amplitude1*numpy.exp(-0.5*abs((x - shift1)/width1)**power1)
670 669 model1u = amplitude1*numpy.exp(-0.5*abs((x - shift1 - self.Num_Bin)/width1)**power1)
671 670 model1d = amplitude1*numpy.exp(-0.5*abs((x - shift1 + self.Num_Bin)/width1)**power1)
672 671 return model0 + model0u + model0d + model1 + model1u + model1d + noise
673 672
674 673 def misfit1(self,state,y_data,x,num_intg): # This function compares how close real data is with the model data, the close it is, the better it is.
675 674
676 675 return num_intg*sum((numpy.log(y_data)-numpy.log(self.y_model1(x,state)))**2)#/(64-5.) # /(64-5.) can be commented
677 676
678 677 def misfit2(self,state,y_data,x,num_intg):
679 678 return num_intg*sum((numpy.log(y_data)-numpy.log(self.y_model2(x,state)))**2)#/(64-9.)
680 679
681 680
682 681
683 682 class PrecipitationProc(Operation):
684 683
685 684 '''
686 685 Operator that estimates Reflectivity factor (Z), and estimates rainfall Rate (R)
687 686
688 687 Input:
689 688 self.dataOut.data_pre : SelfSpectra
690 689
691 690 Output:
692 691
693 692 self.dataOut.data_output : Reflectivity factor, rainfall Rate
694 693
695 694
696 695 Parameters affected:
697 696 '''
698 697
699 698 def __init__(self):
700 699 Operation.__init__(self)
701 700 self.i=0
702 701
703 702 def run(self, dataOut, radar=None, Pt=5000, Gt=295.1209, Gr=70.7945, Lambda=0.6741, aL=2.5118,
704 703 tauW=4e-06, ThetaT=0.1656317, ThetaR=0.36774087, Km2 = 0.93, Altitude=3350,SNRdBlimit=-30):
705 704
706 705 # print ('Entering PrecepitationProc ... ')
707 706
708 707 if radar == "MIRA35C" :
709 708
710 709 self.spc = dataOut.data_pre[0].copy()
711 710 self.Num_Hei = self.spc.shape[2]
712 711 self.Num_Bin = self.spc.shape[1]
713 712 self.Num_Chn = self.spc.shape[0]
714 713 Ze = self.dBZeMODE2(dataOut)
715 714
716 715 else:
717 716
718 717 self.spc = dataOut.data_pre[0].copy()
719 718
720 719 #NOTA SE DEBE REMOVER EL RANGO DEL PULSO TX
721 720 self.spc[:,:,0:7]= numpy.NaN
722 721
723 722 self.Num_Hei = self.spc.shape[2]
724 723 self.Num_Bin = self.spc.shape[1]
725 724 self.Num_Chn = self.spc.shape[0]
726 725
727 726 VelRange = dataOut.spc_range[2]
728 727
729 728 ''' Se obtiene la constante del RADAR '''
730 729
731 730 self.Pt = Pt
732 731 self.Gt = Gt
733 732 self.Gr = Gr
734 733 self.Lambda = Lambda
735 734 self.aL = aL
736 735 self.tauW = tauW
737 736 self.ThetaT = ThetaT
738 737 self.ThetaR = ThetaR
739 738 self.GSys = 10**(36.63/10) # Ganancia de los LNA 36.63 dB
740 739 self.lt = 10**(1.67/10) # Perdida en cables Tx 1.67 dB
741 740 self.lr = 10**(5.73/10) # Perdida en cables Rx 5.73 dB
742 741
743 742 Numerator = ( (4*numpy.pi)**3 * aL**2 * 16 * numpy.log(2) )
744 743 Denominator = ( Pt * Gt * Gr * Lambda**2 * SPEED_OF_LIGHT * tauW * numpy.pi * ThetaT * ThetaR)
745 744 RadarConstant = 10e-26 * Numerator / Denominator #
746 745 ExpConstant = 10**(40/10) #Constante Experimental
747 746
748 747 SignalPower = numpy.zeros([self.Num_Chn,self.Num_Bin,self.Num_Hei])
749 748 for i in range(self.Num_Chn):
750 749 SignalPower[i,:,:] = self.spc[i,:,:] - dataOut.noise[i]
751 750 SignalPower[numpy.where(SignalPower < 0)] = 1e-20
752 751
753 752 SPCmean = numpy.mean(SignalPower, 0)
754 753 Pr = SPCmean[:,:]/dataOut.normFactor
755 754
756 755 # Declaring auxiliary variables
757 756 Range = dataOut.heightList*1000. #Range in m
758 757 # replicate the heightlist to obtain a matrix [Num_Bin,Num_Hei]
759 758 rMtrx = numpy.transpose(numpy.transpose([dataOut.heightList*1000.] * self.Num_Bin))
760 759 zMtrx = rMtrx+Altitude
761 760 # replicate the VelRange to obtain a matrix [Num_Bin,Num_Hei]
762 761 VelMtrx = numpy.transpose(numpy.tile(VelRange[:-1], (self.Num_Hei,1)))
763 762
764 763 # height dependence to air density Foote and Du Toit (1969)
765 764 delv_z = 1 + 3.68e-5 * zMtrx + 1.71e-9 * zMtrx**2
766 765 VMtrx = VelMtrx / delv_z #Normalized velocity
767 766 VMtrx[numpy.where(VMtrx> 9.6)] = numpy.NaN
768 767 # Diameter is related to the fall speed of falling drops
769 768 D_Vz = -1.667 * numpy.log( 0.9369 - 0.097087 * VMtrx ) # D in [mm]
770 769 # Only valid for D>= 0.16 mm
771 770 D_Vz[numpy.where(D_Vz < 0.16)] = numpy.NaN
772 771
773 772 #Calculate Radar Reflectivity ETAn
774 773 ETAn = (RadarConstant *ExpConstant) * Pr * rMtrx**2 #Reflectivity (ETA)
775 774 ETAd = ETAn * 6.18 * exp( -0.6 * D_Vz ) * delv_z
776 775 # Radar Cross Section
777 776 sigmaD = Km2 * (D_Vz * 1e-3 )**6 * numpy.pi**5 / Lambda**4
778 777 # Drop Size Distribution
779 778 DSD = ETAn / sigmaD
780 779 # Equivalente Reflectivy
781 780 Ze_eqn = numpy.nansum( DSD * D_Vz**6 ,axis=0)
782 781 Ze_org = numpy.nansum(ETAn * Lambda**4, axis=0) / (1e-18*numpy.pi**5 * Km2) # [mm^6 /m^3]
783 782 # RainFall Rate
784 783 RR = 0.0006*numpy.pi * numpy.nansum( D_Vz**3 * DSD * VelMtrx ,0) #mm/hr
785 784
786 785 # Censoring the data
787 786 # Removing data with SNRth < 0dB se debe considerar el SNR por canal
788 787 SNRth = 10**(SNRdBlimit/10) #-30dB
789 788 novalid = numpy.where((dataOut.data_snr[0,:] <SNRth) | (dataOut.data_snr[1,:] <SNRth) | (dataOut.data_snr[2,:] <SNRth)) # AND condition. Maybe OR condition better
790 789 W = numpy.nanmean(dataOut.data_dop,0)
791 790 W[novalid] = numpy.NaN
792 791 Ze_org[novalid] = numpy.NaN
793 792 RR[novalid] = numpy.NaN
794 793
795 794 dataOut.data_output = RR[8]
796 795 dataOut.data_param = numpy.ones([3,self.Num_Hei])
797 796 dataOut.channelList = [0,1,2]
798 797
799 798 dataOut.data_param[0]=10*numpy.log10(Ze_org)
800 799 dataOut.data_param[1]=-W
801 800 dataOut.data_param[2]=RR
802 801
803 802 # print ('Leaving PrecepitationProc ... ')
804 803 return dataOut
805 804
806 805 def dBZeMODE2(self, dataOut): # Processing for MIRA35C
807 806
808 807 NPW = dataOut.NPW
809 808 COFA = dataOut.COFA
810 809
811 810 SNR = numpy.array([self.spc[0,:,:] / NPW[0]]) #, self.spc[1,:,:] / NPW[1]])
812 811 RadarConst = dataOut.RadarConst
813 812 #frequency = 34.85*10**9
814 813
815 814 ETA = numpy.zeros(([self.Num_Chn ,self.Num_Hei]))
816 815 data_output = numpy.ones([self.Num_Chn , self.Num_Hei])*numpy.NaN
817 816
818 817 ETA = numpy.sum(SNR,1)
819 818
820 819 ETA = numpy.where(ETA != 0. , ETA, numpy.NaN)
821 820
822 821 Ze = numpy.ones([self.Num_Chn, self.Num_Hei] )
823 822
824 823 for r in range(self.Num_Hei):
825 824
826 825 Ze[0,r] = ( ETA[0,r] ) * COFA[0,r][0] * RadarConst * ((r/5000.)**2)
827 826 #Ze[1,r] = ( ETA[1,r] ) * COFA[1,r][0] * RadarConst * ((r/5000.)**2)
828 827
829 828 return Ze
830 829
831 830 # def GetRadarConstant(self):
832 831 #
833 832 # """
834 833 # Constants:
835 834 #
836 835 # Pt: Transmission Power dB 5kW 5000
837 836 # Gt: Transmission Gain dB 24.7 dB 295.1209
838 837 # Gr: Reception Gain dB 18.5 dB 70.7945
839 838 # Lambda: Wavelenght m 0.6741 m 0.6741
840 839 # aL: Attenuation loses dB 4dB 2.5118
841 840 # tauW: Width of transmission pulse s 4us 4e-6
842 841 # ThetaT: Transmission antenna bean angle rad 0.1656317 rad 0.1656317
843 842 # ThetaR: Reception antenna beam angle rad 0.36774087 rad 0.36774087
844 843 #
845 844 # """
846 845 #
847 846 # Numerator = ( (4*numpy.pi)**3 * aL**2 * 16 * numpy.log(2) )
848 847 # Denominator = ( Pt * Gt * Gr * Lambda**2 * SPEED_OF_LIGHT * TauW * numpy.pi * ThetaT * TheraR)
849 848 # RadarConstant = Numerator / Denominator
850 849 #
851 850 # return RadarConstant
852 851
853 852
854 853
855 854 class FullSpectralAnalysis(Operation):
856 855
857 856 """
858 857 Function that implements Full Spectral Analysis technique.
859 858
860 859 Input:
861 860 self.dataOut.data_pre : SelfSpectra and CrossSpectra data
862 861 self.dataOut.groupList : Pairlist of channels
863 862 self.dataOut.ChanDist : Physical distance between receivers
864 863
865 864
866 865 Output:
867 866
868 867 self.dataOut.data_output : Zonal wind, Meridional wind, and Vertical wind
869 868
870 869
871 870 Parameters affected: Winds, height range, SNR
872 871
873 872 """
874 873 def run(self, dataOut, Xi01=None, Xi02=None, Xi12=None, Eta01=None, Eta02=None, Eta12=None, SNRdBlimit=-30,
875 874 minheight=None, maxheight=None, NegativeLimit=None, PositiveLimit=None):
876 875
877 876 spc = dataOut.data_pre[0].copy()
878 877 cspc = dataOut.data_pre[1]
879 878 nHeights = spc.shape[2]
880 879
881 880 # first_height = 0.75 #km (ref: data header 20170822)
882 881 # resolution_height = 0.075 #km
883 882 '''
884 883 finding height range. check this when radar parameters are changed!
885 884 '''
886 885 if maxheight is not None:
887 886 # range_max = math.ceil((maxheight - first_height) / resolution_height) # theoretical
888 887 range_max = math.ceil(13.26 * maxheight - 3) # empirical, works better
889 888 else:
890 889 range_max = nHeights
891 890 if minheight is not None:
892 891 # range_min = int((minheight - first_height) / resolution_height) # theoretical
893 892 range_min = int(13.26 * minheight - 5) # empirical, works better
894 893 if range_min < 0:
895 894 range_min = 0
896 895 else:
897 896 range_min = 0
898 897
899 898 pairsList = dataOut.groupList
900 899 if dataOut.ChanDist is not None :
901 900 ChanDist = dataOut.ChanDist
902 901 else:
903 902 ChanDist = numpy.array([[Xi01, Eta01],[Xi02,Eta02],[Xi12,Eta12]])
904 903
905 904 # 4 variables: zonal, meridional, vertical, and average SNR
906 905 data_param = numpy.zeros([4,nHeights]) * numpy.NaN
907 906 velocityX = numpy.zeros([nHeights]) * numpy.NaN
908 907 velocityY = numpy.zeros([nHeights]) * numpy.NaN
909 908 velocityZ = numpy.zeros([nHeights]) * numpy.NaN
910 909
911 910 dbSNR = 10*numpy.log10(numpy.average(dataOut.data_snr,0))
912 911
913 912 '''***********************************************WIND ESTIMATION**************************************'''
914 913 for Height in range(nHeights):
915 914
916 915 if Height >= range_min and Height < range_max:
917 916 # error_code will be useful in future analysis
918 917 [Vzon,Vmer,Vver, error_code] = self.WindEstimation(spc[:,:,Height], cspc[:,:,Height], pairsList,
919 918 ChanDist, Height, dataOut.noise, dataOut.spc_range, dbSNR[Height], SNRdBlimit, NegativeLimit, PositiveLimit,dataOut.frequency)
920 919
921 920 if abs(Vzon) < 100. and abs(Vmer) < 100.:
922 921 velocityX[Height] = Vzon
923 922 velocityY[Height] = -Vmer
924 923 velocityZ[Height] = Vver
925 924
926 925 # Censoring data with SNR threshold
927 926 dbSNR [dbSNR < SNRdBlimit] = numpy.NaN
928 927
929 928 data_param[0] = velocityX
930 929 data_param[1] = velocityY
931 930 data_param[2] = velocityZ
932 931 data_param[3] = dbSNR
933 932 dataOut.data_param = data_param
934 933 return dataOut
935 934
936 935 def moving_average(self,x, N=2):
937 936 """ convolution for smoothenig data. note that last N-1 values are convolution with zeroes """
938 937 return numpy.convolve(x, numpy.ones((N,))/N)[(N-1):]
939 938
940 939 def gaus(self,xSamples,Amp,Mu,Sigma):
941 940 return Amp * numpy.exp(-0.5*((xSamples - Mu)/Sigma)**2)
942 941
943 942 def Moments(self, ySamples, xSamples):
944 943 Power = numpy.nanmean(ySamples) # Power, 0th Moment
945 944 yNorm = ySamples / numpy.nansum(ySamples)
946 945 RadVel = numpy.nansum(xSamples * yNorm) # Radial Velocity, 1st Moment
947 946 Sigma2 = numpy.nansum(yNorm * (xSamples - RadVel)**2) # Spectral Width, 2nd Moment
948 947 StdDev = numpy.sqrt(numpy.abs(Sigma2)) # Desv. Estandar, Ancho espectral
949 948 return numpy.array([Power,RadVel,StdDev])
950 949
951 950 def StopWindEstimation(self, error_code):
952 951 Vzon = numpy.NaN
953 952 Vmer = numpy.NaN
954 953 Vver = numpy.NaN
955 954 return Vzon, Vmer, Vver, error_code
956 955
957 956 def AntiAliasing(self, interval, maxstep):
958 957 """
959 958 function to prevent errors from aliased values when computing phaseslope
960 959 """
961 960 antialiased = numpy.zeros(len(interval))
962 961 copyinterval = interval.copy()
963 962
964 963 antialiased[0] = copyinterval[0]
965 964
966 965 for i in range(1,len(antialiased)):
967 966 step = interval[i] - interval[i-1]
968 967 if step > maxstep:
969 968 copyinterval -= 2*numpy.pi
970 969 antialiased[i] = copyinterval[i]
971 970 elif step < maxstep*(-1):
972 971 copyinterval += 2*numpy.pi
973 972 antialiased[i] = copyinterval[i]
974 973 else:
975 974 antialiased[i] = copyinterval[i].copy()
976 975
977 976 return antialiased
978 977
979 978 def WindEstimation(self, spc, cspc, pairsList, ChanDist, Height, noise, AbbsisaRange, dbSNR, SNRlimit, NegativeLimit, PositiveLimit, radfreq):
980 979 """
981 980 Function that Calculates Zonal, Meridional and Vertical wind velocities.
982 981 Initial Version by E. Bocanegra updated by J. Zibell until Nov. 2019.
983 982
984 983 Input:
985 984 spc, cspc : self spectra and cross spectra data. In Briggs notation something like S_i*(S_i)_conj, (S_j)_conj respectively.
986 985 pairsList : Pairlist of channels
987 986 ChanDist : array of xi_ij and eta_ij
988 987 Height : height at which data is processed
989 988 noise : noise in [channels] format for specific height
990 989 Abbsisarange : range of the frequencies or velocities
991 990 dbSNR, SNRlimit : signal to noise ratio in db, lower limit
992 991
993 992 Output:
994 993 Vzon, Vmer, Vver : wind velocities
995 994 error_code : int that states where code is terminated
996 995
997 996 0 : no error detected
998 997 1 : Gaussian of mean spc exceeds widthlimit
999 998 2 : no Gaussian of mean spc found
1000 999 3 : SNR to low or velocity to high -> prec. e.g.
1001 1000 4 : at least one Gaussian of cspc exceeds widthlimit
1002 1001 5 : zero out of three cspc Gaussian fits converged
1003 1002 6 : phase slope fit could not be found
1004 1003 7 : arrays used to fit phase have different length
1005 1004 8 : frequency range is either too short (len <= 5) or very long (> 30% of cspc)
1006 1005
1007 1006 """
1008 1007
1009 1008 error_code = 0
1010 1009
1011 1010 nChan = spc.shape[0]
1012 1011 nProf = spc.shape[1]
1013 1012 nPair = cspc.shape[0]
1014 1013
1015 1014 SPC_Samples = numpy.zeros([nChan, nProf]) # for normalized spc values for one height
1016 1015 CSPC_Samples = numpy.zeros([nPair, nProf], dtype=numpy.complex_) # for normalized cspc values
1017 1016 phase = numpy.zeros([nPair, nProf]) # phase between channels
1018 1017 PhaseSlope = numpy.zeros(nPair) # slope of the phases, channelwise
1019 1018 PhaseInter = numpy.zeros(nPair) # intercept to the slope of the phases, channelwise
1020 1019 xFrec = AbbsisaRange[0][:-1] # frequency range
1021 1020 xVel = AbbsisaRange[2][:-1] # velocity range
1022 1021 xSamples = xFrec # the frequency range is taken
1023 1022 delta_x = xSamples[1] - xSamples[0] # delta_f or delta_x
1024 1023
1025 1024 # only consider velocities with in NegativeLimit and PositiveLimit
1026 1025 if (NegativeLimit is None):
1027 1026 NegativeLimit = numpy.min(xVel)
1028 1027 if (PositiveLimit is None):
1029 1028 PositiveLimit = numpy.max(xVel)
1030 1029 xvalid = numpy.where((xVel > NegativeLimit) & (xVel < PositiveLimit))
1031 1030 xSamples_zoom = xSamples[xvalid]
1032 1031
1033 1032 '''Getting Eij and Nij'''
1034 1033 Xi01, Xi02, Xi12 = ChanDist[:,0]
1035 1034 Eta01, Eta02, Eta12 = ChanDist[:,1]
1036 1035
1037 1036 # spwd limit - updated by D. Scipión 30.03.2021
1038 1037 widthlimit = 10
1039 1038 '''************************* SPC is normalized ********************************'''
1040 1039 spc_norm = spc.copy()
1041 1040 # For each channel
1042 1041 for i in range(nChan):
1043 1042 spc_sub = spc_norm[i,:] - noise[i] # only the signal power
1044 1043 SPC_Samples[i] = spc_sub / (numpy.nansum(spc_sub) * delta_x)
1045 1044
1046 1045 '''********************** FITTING MEAN SPC GAUSSIAN **********************'''
1047 1046
1048 1047 """ the gaussian of the mean: first subtract noise, then normalize. this is legal because
1049 1048 you only fit the curve and don't need the absolute value of height for calculation,
1050 1049 only for estimation of width. for normalization of cross spectra, you need initial,
1051 1050 unnormalized self-spectra With noise.
1052 1051
1053 1052 Technically, you don't even need to normalize the self-spectra, as you only need the
1054 1053 width of the peak. However, it was left this way. Note that the normalization has a flaw:
1055 1054 due to subtraction of the noise, some values are below zero. Raw "spc" values should be
1056 1055 >= 0, as it is the modulus squared of the signals (complex * it's conjugate)
1057 1056 """
1058 1057 # initial conditions
1059 1058 popt = [1e-10,0,1e-10]
1060 1059 # Spectra average
1061 1060 SPCMean = numpy.average(SPC_Samples,0)
1062 1061 # Moments in frequency
1063 1062 SPCMoments = self.Moments(SPCMean[xvalid], xSamples_zoom)
1064 1063
1065 1064 # Gauss Fit SPC in frequency domain
1066 1065 if dbSNR > SNRlimit: # only if SNR > SNRth
1067 1066 try:
1068 1067 popt,pcov = curve_fit(self.gaus,xSamples_zoom,SPCMean[xvalid],p0=SPCMoments)
1069 1068 if popt[2] <= 0 or popt[2] > widthlimit: # CONDITION
1070 1069 return self.StopWindEstimation(error_code = 1)
1071 1070 FitGauss = self.gaus(xSamples_zoom,*popt)
1072 1071 except :#RuntimeError:
1073 1072 return self.StopWindEstimation(error_code = 2)
1074 1073 else:
1075 1074 return self.StopWindEstimation(error_code = 3)
1076 1075
1077 1076 '''***************************** CSPC Normalization *************************
1078 1077 The Spc spectra are used to normalize the crossspectra. Peaks from precipitation
1079 1078 influence the norm which is not desired. First, a range is identified where the
1080 1079 wind peak is estimated -> sum_wind is sum of those frequencies. Next, the area
1081 1080 around it gets cut off and values replaced by mean determined by the boundary
1082 1081 data -> sum_noise (spc is not normalized here, thats why the noise is important)
1083 1082
1084 1083 The sums are then added and multiplied by range/datapoints, because you need
1085 1084 an integral and not a sum for normalization.
1086 1085
1087 1086 A norm is found according to Briggs 92.
1088 1087 '''
1089 1088 # for each pair
1090 1089 for i in range(nPair):
1091 1090 cspc_norm = cspc[i,:].copy()
1092 1091 chan_index0 = pairsList[i][0]
1093 1092 chan_index1 = pairsList[i][1]
1094 1093 CSPC_Samples[i] = cspc_norm / (numpy.sqrt(numpy.nansum(spc_norm[chan_index0])*numpy.nansum(spc_norm[chan_index1])) * delta_x)
1095 1094 phase[i] = numpy.arctan2(CSPC_Samples[i].imag, CSPC_Samples[i].real)
1096 1095
1097 1096 CSPCmoments = numpy.vstack([self.Moments(numpy.abs(CSPC_Samples[0,xvalid]), xSamples_zoom),
1098 1097 self.Moments(numpy.abs(CSPC_Samples[1,xvalid]), xSamples_zoom),
1099 1098 self.Moments(numpy.abs(CSPC_Samples[2,xvalid]), xSamples_zoom)])
1100 1099
1101 1100 popt01, popt02, popt12 = [1e-10,0,1e-10], [1e-10,0,1e-10] ,[1e-10,0,1e-10]
1102 1101 FitGauss01, FitGauss02, FitGauss12 = numpy.zeros(len(xSamples)), numpy.zeros(len(xSamples)), numpy.zeros(len(xSamples))
1103 1102
1104 1103 '''*******************************FIT GAUSS CSPC************************************'''
1105 1104 try:
1106 1105 popt01,pcov = curve_fit(self.gaus,xSamples_zoom,numpy.abs(CSPC_Samples[0][xvalid]),p0=CSPCmoments[0])
1107 1106 if popt01[2] > widthlimit: # CONDITION
1108 1107 return self.StopWindEstimation(error_code = 4)
1109 1108 popt02,pcov = curve_fit(self.gaus,xSamples_zoom,numpy.abs(CSPC_Samples[1][xvalid]),p0=CSPCmoments[1])
1110 1109 if popt02[2] > widthlimit: # CONDITION
1111 1110 return self.StopWindEstimation(error_code = 4)
1112 1111 popt12,pcov = curve_fit(self.gaus,xSamples_zoom,numpy.abs(CSPC_Samples[2][xvalid]),p0=CSPCmoments[2])
1113 1112 if popt12[2] > widthlimit: # CONDITION
1114 1113 return self.StopWindEstimation(error_code = 4)
1115 1114
1116 1115 FitGauss01 = self.gaus(xSamples_zoom, *popt01)
1117 1116 FitGauss02 = self.gaus(xSamples_zoom, *popt02)
1118 1117 FitGauss12 = self.gaus(xSamples_zoom, *popt12)
1119 1118 except:
1120 1119 return self.StopWindEstimation(error_code = 5)
1121 1120
1122 1121
1123 1122 '''************* Getting Fij ***************'''
1124 1123 # x-axis point of the gaussian where the center is located from GaussFit of spectra
1125 1124 GaussCenter = popt[1]
1126 1125 ClosestCenter = xSamples_zoom[numpy.abs(xSamples_zoom-GaussCenter).argmin()]
1127 1126 PointGauCenter = numpy.where(xSamples_zoom==ClosestCenter)[0][0]
1128 1127
1129 1128 # Point where e^-1 is located in the gaussian
1130 1129 PeMinus1 = numpy.max(FitGauss) * numpy.exp(-1)
1131 1130 FijClosest = FitGauss[numpy.abs(FitGauss-PeMinus1).argmin()] # The closest point to"Peminus1" in "FitGauss"
1132 1131 PointFij = numpy.where(FitGauss==FijClosest)[0][0]
1133 1132 Fij = numpy.abs(xSamples_zoom[PointFij] - xSamples_zoom[PointGauCenter])
1134 1133
1135 1134 '''********** Taking frequency ranges from mean SPCs **********'''
1136 1135 GauWidth = popt[2] * 3/2 # Bandwidth of Gau01
1137 1136 Range = numpy.empty(2)
1138 1137 Range[0] = GaussCenter - GauWidth
1139 1138 Range[1] = GaussCenter + GauWidth
1140 1139 # Point in x-axis where the bandwidth is located (min:max)
1141 1140 ClosRangeMin = xSamples_zoom[numpy.abs(xSamples_zoom-Range[0]).argmin()]
1142 1141 ClosRangeMax = xSamples_zoom[numpy.abs(xSamples_zoom-Range[1]).argmin()]
1143 1142 PointRangeMin = numpy.where(xSamples_zoom==ClosRangeMin)[0][0]
1144 1143 PointRangeMax = numpy.where(xSamples_zoom==ClosRangeMax)[0][0]
1145 1144 Range = numpy.array([ PointRangeMin, PointRangeMax ])
1146 1145 FrecRange = xSamples_zoom[ Range[0] : Range[1] ]
1147 1146
1148 1147 '''************************** Getting Phase Slope ***************************'''
1149 1148 for i in range(nPair):
1150 1149 if len(FrecRange) > 5:
1151 1150 PhaseRange = phase[i, xvalid[0][Range[0]:Range[1]]].copy()
1152 1151 mask = ~numpy.isnan(FrecRange) & ~numpy.isnan(PhaseRange)
1153 1152 if len(FrecRange) == len(PhaseRange):
1154 1153 try:
1155 1154 slope, intercept, _, _, _ = stats.linregress(FrecRange[mask], self.AntiAliasing(PhaseRange[mask], 4.5))
1156 1155 PhaseSlope[i] = slope
1157 1156 PhaseInter[i] = intercept
1158 1157 except:
1159 1158 return self.StopWindEstimation(error_code = 6)
1160 1159 else:
1161 1160 return self.StopWindEstimation(error_code = 7)
1162 1161 else:
1163 1162 return self.StopWindEstimation(error_code = 8)
1164 1163
1165 1164 '''*** Constants A-H correspond to the convention as in Briggs and Vincent 1992 ***'''
1166 1165
1167 1166 '''Getting constant C'''
1168 1167 cC=(Fij*numpy.pi)**2
1169 1168
1170 1169 '''****** Getting constants F and G ******'''
1171 1170 MijEijNij = numpy.array([[Xi02,Eta02], [Xi12,Eta12]])
1172 1171 # MijEijNij = numpy.array([[Xi01,Eta01], [Xi02,Eta02], [Xi12,Eta12]])
1173 1172 # MijResult0 = (-PhaseSlope[0] * cC) / (2*numpy.pi)
1174 1173 MijResult1 = (-PhaseSlope[1] * cC) / (2*numpy.pi)
1175 1174 MijResult2 = (-PhaseSlope[2] * cC) / (2*numpy.pi)
1176 1175 # MijResults = numpy.array([MijResult0, MijResult1, MijResult2])
1177 1176 MijResults = numpy.array([MijResult1, MijResult2])
1178 1177 (cF,cG) = numpy.linalg.solve(MijEijNij, MijResults)
1179 1178
1180 1179 '''****** Getting constants A, B and H ******'''
1181 1180 W01 = numpy.nanmax( FitGauss01 )
1182 1181 W02 = numpy.nanmax( FitGauss02 )
1183 1182 W12 = numpy.nanmax( FitGauss12 )
1184 1183
1185 1184 WijResult01 = ((cF * Xi01 + cG * Eta01)**2)/cC - numpy.log(W01 / numpy.sqrt(numpy.pi / cC))
1186 1185 WijResult02 = ((cF * Xi02 + cG * Eta02)**2)/cC - numpy.log(W02 / numpy.sqrt(numpy.pi / cC))
1187 1186 WijResult12 = ((cF * Xi12 + cG * Eta12)**2)/cC - numpy.log(W12 / numpy.sqrt(numpy.pi / cC))
1188 1187 WijResults = numpy.array([WijResult01, WijResult02, WijResult12])
1189 1188
1190 1189 WijEijNij = numpy.array([ [Xi01**2, Eta01**2, 2*Xi01*Eta01] , [Xi02**2, Eta02**2, 2*Xi02*Eta02] , [Xi12**2, Eta12**2, 2*Xi12*Eta12] ])
1191 1190 (cA,cB,cH) = numpy.linalg.solve(WijEijNij, WijResults)
1192 1191
1193 1192 VxVy = numpy.array([[cA,cH],[cH,cB]])
1194 1193 VxVyResults = numpy.array([-cF,-cG])
1195 1194 (Vmer,Vzon) = numpy.linalg.solve(VxVy, VxVyResults)
1196 1195 Vver = -SPCMoments[1]*SPEED_OF_LIGHT/(2*radfreq)
1197 1196 error_code = 0
1198 1197
1199 1198 return Vzon, Vmer, Vver, error_code
1200 1199
1201 1200 class SpectralMoments(Operation):
1202 1201
1203 1202 '''
1204 1203 Function SpectralMoments()
1205 1204
1206 1205 Calculates moments (power, mean, standard deviation) and SNR of the signal
1207 1206
1208 1207 Type of dataIn: Spectra
1209 1208
1210 1209 Configuration Parameters:
1211 1210
1212 1211 dirCosx : Cosine director in X axis
1213 1212 dirCosy : Cosine director in Y axis
1214 1213
1215 1214 elevation :
1216 1215 azimuth :
1217 1216
1218 1217 Input:
1219 1218 channelList : simple channel list to select e.g. [2,3,7]
1220 1219 self.dataOut.data_pre : Spectral data
1221 1220 self.dataOut.abscissaList : List of frequencies
1222 1221 self.dataOut.noise : Noise level per channel
1223 1222
1224 1223 Affected:
1225 1224 self.dataOut.moments : Parameters per channel
1226 1225 self.dataOut.data_snr : SNR per channel
1227 1226
1228 1227 '''
1229 1228
1230 1229 def run(self, dataOut):
1231 1230
1232 1231 data = dataOut.data_pre[0]
1233 1232 absc = dataOut.abscissaList[:-1]
1234 1233 noise = dataOut.noise
1235 1234 nChannel = data.shape[0]
1236 1235 data_param = numpy.zeros((nChannel, 4, data.shape[2]))
1237 1236
1238 1237 for ind in range(nChannel):
1239 1238 data_param[ind,:,:] = self.__calculateMoments( data[ind,:,:] , absc , noise[ind] )
1240 1239
1241 1240 dataOut.moments = data_param[:,1:,:]
1242 1241 dataOut.data_snr = data_param[:,0]
1243 1242 dataOut.data_pow = data_param[:,1]
1244 1243 dataOut.data_dop = data_param[:,2]
1245 1244 dataOut.data_width = data_param[:,3]
1246 1245
1247 1246 return dataOut
1248 1247
1249 1248 def __calculateMoments(self, oldspec, oldfreq, n0,
1250 1249 nicoh = None, graph = None, smooth = None, type1 = None, fwindow = None, snrth = None, dc = None, aliasing = None, oldfd = None, wwauto = None):
1251 1250
1252 1251 if (nicoh is None): nicoh = 1
1253 1252 if (graph is None): graph = 0
1254 1253 if (smooth is None): smooth = 0
1255 1254 elif (self.smooth < 3): smooth = 0
1256 1255
1257 1256 if (type1 is None): type1 = 0
1258 1257 if (fwindow is None): fwindow = numpy.zeros(oldfreq.size) + 1
1259 1258 if (snrth is None): snrth = -3
1260 1259 if (dc is None): dc = 0
1261 1260 if (aliasing is None): aliasing = 0
1262 1261 if (oldfd is None): oldfd = 0
1263 1262 if (wwauto is None): wwauto = 0
1264 1263
1265 1264 if (n0 < 1.e-20): n0 = 1.e-20
1266 1265
1267 1266 freq = oldfreq
1268 1267 vec_power = numpy.zeros(oldspec.shape[1])
1269 1268 vec_fd = numpy.zeros(oldspec.shape[1])
1270 1269 vec_w = numpy.zeros(oldspec.shape[1])
1271 1270 vec_snr = numpy.zeros(oldspec.shape[1])
1272 1271
1273 1272 # oldspec = numpy.ma.masked_invalid(oldspec)
1274 1273
1275 1274 for ind in range(oldspec.shape[1]):
1276 1275
1277 1276 spec = oldspec[:,ind]
1278 1277 aux = spec*fwindow
1279 1278 max_spec = aux.max()
1280 1279 m = aux.tolist().index(max_spec)
1281 1280
1282 1281 # Smooth
1283 1282 if (smooth == 0):
1284 1283 spec2 = spec
1285 1284 else:
1286 1285 spec2 = scipy.ndimage.filters.uniform_filter1d(spec,size=smooth)
1287 1286
1288 1287 # Moments Estimation
1289 1288 bb = spec2[numpy.arange(m,spec2.size)]
1290 1289 bb = (bb<n0).nonzero()
1291 1290 bb = bb[0]
1292 1291
1293 1292 ss = spec2[numpy.arange(0,m + 1)]
1294 1293 ss = (ss<n0).nonzero()
1295 1294 ss = ss[0]
1296 1295
1297 1296 if (bb.size == 0):
1298 1297 bb0 = spec.size - 1 - m
1299 1298 else:
1300 1299 bb0 = bb[0] - 1
1301 1300 if (bb0 < 0):
1302 1301 bb0 = 0
1303 1302
1304 1303 if (ss.size == 0):
1305 1304 ss1 = 1
1306 1305 else:
1307 1306 ss1 = max(ss) + 1
1308 1307
1309 1308 if (ss1 > m):
1310 1309 ss1 = m
1311 1310
1312 1311 valid = numpy.arange(int(m + bb0 - ss1 + 1)) + ss1
1313 1312
1314 1313 signal_power = ((spec2[valid] - n0) * fwindow[valid]).mean() # D. Scipión added with correct definition
1315 1314 total_power = (spec2[valid] * fwindow[valid]).mean() # D. Scipión added with correct definition
1316 1315 power = ((spec2[valid] - n0) * fwindow[valid]).sum()
1317 1316 fd = ((spec2[valid]- n0)*freq[valid] * fwindow[valid]).sum() / power
1318 1317 w = numpy.sqrt(((spec2[valid] - n0)*fwindow[valid]*(freq[valid]- fd)**2).sum() / power)
1319 1318 snr = (spec2.mean()-n0)/n0
1320 1319 if (snr < 1.e-20) :
1321 1320 snr = 1.e-20
1322 1321
1323 1322 # vec_power[ind] = power #D. Scipión replaced with the line below
1324 1323 vec_power[ind] = total_power
1325 1324 vec_fd[ind] = fd
1326 1325 vec_w[ind] = w
1327 1326 vec_snr[ind] = snr
1328 1327
1329 1328 return numpy.vstack((vec_snr, vec_power, vec_fd, vec_w))
1330 1329
1331 1330 #------------------ Get SA Parameters --------------------------
1332 1331
1333 1332 def GetSAParameters(self):
1334 1333 #SA en frecuencia
1335 1334 pairslist = self.dataOut.groupList
1336 1335 num_pairs = len(pairslist)
1337 1336
1338 1337 vel = self.dataOut.abscissaList
1339 1338 spectra = self.dataOut.data_pre
1340 1339 cspectra = self.dataIn.data_cspc
1341 1340 delta_v = vel[1] - vel[0]
1342 1341
1343 1342 #Calculating the power spectrum
1344 1343 spc_pow = numpy.sum(spectra, 3)*delta_v
1345 1344 #Normalizing Spectra
1346 1345 norm_spectra = spectra/spc_pow
1347 1346 #Calculating the norm_spectra at peak
1348 1347 max_spectra = numpy.max(norm_spectra, 3)
1349 1348
1350 1349 #Normalizing Cross Spectra
1351 1350 norm_cspectra = numpy.zeros(cspectra.shape)
1352 1351
1353 1352 for i in range(num_chan):
1354 1353 norm_cspectra[i,:,:] = cspectra[i,:,:]/numpy.sqrt(spc_pow[pairslist[i][0],:]*spc_pow[pairslist[i][1],:])
1355 1354
1356 1355 max_cspectra = numpy.max(norm_cspectra,2)
1357 1356 max_cspectra_index = numpy.argmax(norm_cspectra, 2)
1358 1357
1359 1358 for i in range(num_pairs):
1360 1359 cspc_par[i,:,:] = __calculateMoments(norm_cspectra)
1361 1360 #------------------- Get Lags ----------------------------------
1362 1361
1363 1362 class SALags(Operation):
1364 1363 '''
1365 1364 Function GetMoments()
1366 1365
1367 1366 Input:
1368 1367 self.dataOut.data_pre
1369 1368 self.dataOut.abscissaList
1370 1369 self.dataOut.noise
1371 1370 self.dataOut.normFactor
1372 1371 self.dataOut.data_snr
1373 1372 self.dataOut.groupList
1374 1373 self.dataOut.nChannels
1375 1374
1376 1375 Affected:
1377 1376 self.dataOut.data_param
1378 1377
1379 1378 '''
1380 1379 def run(self, dataOut):
1381 1380 data_acf = dataOut.data_pre[0]
1382 1381 data_ccf = dataOut.data_pre[1]
1383 1382 normFactor_acf = dataOut.normFactor[0]
1384 1383 normFactor_ccf = dataOut.normFactor[1]
1385 1384 pairs_acf = dataOut.groupList[0]
1386 1385 pairs_ccf = dataOut.groupList[1]
1387 1386
1388 1387 nHeights = dataOut.nHeights
1389 1388 absc = dataOut.abscissaList
1390 1389 noise = dataOut.noise
1391 1390 SNR = dataOut.data_snr
1392 1391 nChannels = dataOut.nChannels
1393 1392 # pairsList = dataOut.groupList
1394 1393 # pairsAutoCorr, pairsCrossCorr = self.__getPairsAutoCorr(pairsList, nChannels)
1395 1394
1396 1395 for l in range(len(pairs_acf)):
1397 1396 data_acf[l,:,:] = data_acf[l,:,:]/normFactor_acf[l,:]
1398 1397
1399 1398 for l in range(len(pairs_ccf)):
1400 1399 data_ccf[l,:,:] = data_ccf[l,:,:]/normFactor_ccf[l,:]
1401 1400
1402 1401 dataOut.data_param = numpy.zeros((len(pairs_ccf)*2 + 1, nHeights))
1403 1402 dataOut.data_param[:-1,:] = self.__calculateTaus(data_acf, data_ccf, absc)
1404 1403 dataOut.data_param[-1,:] = self.__calculateLag1Phase(data_acf, absc)
1405 1404 return
1406 1405
1407 1406 # def __getPairsAutoCorr(self, pairsList, nChannels):
1408 1407 #
1409 1408 # pairsAutoCorr = numpy.zeros(nChannels, dtype = 'int')*numpy.nan
1410 1409 #
1411 1410 # for l in range(len(pairsList)):
1412 1411 # firstChannel = pairsList[l][0]
1413 1412 # secondChannel = pairsList[l][1]
1414 1413 #
1415 1414 # #Obteniendo pares de Autocorrelacion
1416 1415 # if firstChannel == secondChannel:
1417 1416 # pairsAutoCorr[firstChannel] = int(l)
1418 1417 #
1419 1418 # pairsAutoCorr = pairsAutoCorr.astype(int)
1420 1419 #
1421 1420 # pairsCrossCorr = range(len(pairsList))
1422 1421 # pairsCrossCorr = numpy.delete(pairsCrossCorr,pairsAutoCorr)
1423 1422 #
1424 1423 # return pairsAutoCorr, pairsCrossCorr
1425 1424
1426 1425 def __calculateTaus(self, data_acf, data_ccf, lagRange):
1427 1426
1428 1427 lag0 = data_acf.shape[1]/2
1429 1428 #Funcion de Autocorrelacion
1430 1429 mean_acf = stats.nanmean(data_acf, axis = 0)
1431 1430
1432 1431 #Obtencion Indice de TauCross
1433 1432 ind_ccf = data_ccf.argmax(axis = 1)
1434 1433 #Obtencion Indice de TauAuto
1435 1434 ind_acf = numpy.zeros(ind_ccf.shape,dtype = 'int')
1436 1435 ccf_lag0 = data_ccf[:,lag0,:]
1437 1436
1438 1437 for i in range(ccf_lag0.shape[0]):
1439 1438 ind_acf[i,:] = numpy.abs(mean_acf - ccf_lag0[i,:]).argmin(axis = 0)
1440 1439
1441 1440 #Obtencion de TauCross y TauAuto
1442 1441 tau_ccf = lagRange[ind_ccf]
1443 1442 tau_acf = lagRange[ind_acf]
1444 1443
1445 1444 Nan1, Nan2 = numpy.where(tau_ccf == lagRange[0])
1446 1445
1447 1446 tau_ccf[Nan1,Nan2] = numpy.nan
1448 1447 tau_acf[Nan1,Nan2] = numpy.nan
1449 1448 tau = numpy.vstack((tau_ccf,tau_acf))
1450 1449
1451 1450 return tau
1452 1451
1453 1452 def __calculateLag1Phase(self, data, lagTRange):
1454 1453 data1 = stats.nanmean(data, axis = 0)
1455 1454 lag1 = numpy.where(lagTRange == 0)[0][0] + 1
1456 1455
1457 1456 phase = numpy.angle(data1[lag1,:])
1458 1457
1459 1458 return phase
1460 1459
1461 1460 class SpectralFitting(Operation):
1462 1461 '''
1463 1462 Function GetMoments()
1464 1463
1465 1464 Input:
1466 1465 Output:
1467 1466 Variables modified:
1468 1467 '''
1469 1468
1470 1469 def run(self, dataOut, getSNR = True, path=None, file=None, groupList=None):
1471 1470
1472 1471
1473 1472 if path != None:
1474 1473 sys.path.append(path)
1475 1474 self.dataOut.library = importlib.import_module(file)
1476 1475
1477 1476 #To be inserted as a parameter
1478 1477 groupArray = numpy.array(groupList)
1479 1478 # groupArray = numpy.array([[0,1],[2,3]])
1480 1479 self.dataOut.groupList = groupArray
1481 1480
1482 1481 nGroups = groupArray.shape[0]
1483 1482 nChannels = self.dataIn.nChannels
1484 1483 nHeights=self.dataIn.heightList.size
1485 1484
1486 1485 #Parameters Array
1487 1486 self.dataOut.data_param = None
1488 1487
1489 1488 #Set constants
1490 1489 constants = self.dataOut.library.setConstants(self.dataIn)
1491 1490 self.dataOut.constants = constants
1492 1491 M = self.dataIn.normFactor
1493 1492 N = self.dataIn.nFFTPoints
1494 1493 ippSeconds = self.dataIn.ippSeconds
1495 1494 K = self.dataIn.nIncohInt
1496 1495 pairsArray = numpy.array(self.dataIn.pairsList)
1497 1496
1498 1497 #List of possible combinations
1499 1498 listComb = itertools.combinations(numpy.arange(groupArray.shape[1]),2)
1500 1499 indCross = numpy.zeros(len(list(listComb)), dtype = 'int')
1501 1500
1502 1501 if getSNR:
1503 1502 listChannels = groupArray.reshape((groupArray.size))
1504 1503 listChannels.sort()
1505 1504 noise = self.dataIn.getNoise()
1506 1505 self.dataOut.data_snr = self.__getSNR(self.dataIn.data_spc[listChannels,:,:], noise[listChannels])
1507 1506
1508 1507 for i in range(nGroups):
1509 1508 coord = groupArray[i,:]
1510 1509
1511 1510 #Input data array
1512 1511 data = self.dataIn.data_spc[coord,:,:]/(M*N)
1513 1512 data = data.reshape((data.shape[0]*data.shape[1],data.shape[2]))
1514 1513
1515 1514 #Cross Spectra data array for Covariance Matrixes
1516 1515 ind = 0
1517 1516 for pairs in listComb:
1518 1517 pairsSel = numpy.array([coord[x],coord[y]])
1519 1518 indCross[ind] = int(numpy.where(numpy.all(pairsArray == pairsSel, axis = 1))[0][0])
1520 1519 ind += 1
1521 1520 dataCross = self.dataIn.data_cspc[indCross,:,:]/(M*N)
1522 1521 dataCross = dataCross**2/K
1523 1522
1524 1523 for h in range(nHeights):
1525 1524
1526 1525 #Input
1527 1526 d = data[:,h]
1528 1527
1529 1528 #Covariance Matrix
1530 1529 D = numpy.diag(d**2/K)
1531 1530 ind = 0
1532 1531 for pairs in listComb:
1533 1532 #Coordinates in Covariance Matrix
1534 1533 x = pairs[0]
1535 1534 y = pairs[1]
1536 1535 #Channel Index
1537 1536 S12 = dataCross[ind,:,h]
1538 1537 D12 = numpy.diag(S12)
1539 1538 #Completing Covariance Matrix with Cross Spectras
1540 1539 D[x*N:(x+1)*N,y*N:(y+1)*N] = D12
1541 1540 D[y*N:(y+1)*N,x*N:(x+1)*N] = D12
1542 1541 ind += 1
1543 1542 Dinv=numpy.linalg.inv(D)
1544 1543 L=numpy.linalg.cholesky(Dinv)
1545 1544 LT=L.T
1546 1545
1547 1546 dp = numpy.dot(LT,d)
1548 1547
1549 1548 #Initial values
1550 1549 data_spc = self.dataIn.data_spc[coord,:,h]
1551 1550
1552 1551 if (h>0)and(error1[3]<5):
1553 1552 p0 = self.dataOut.data_param[i,:,h-1]
1554 1553 else:
1555 1554 p0 = numpy.array(self.dataOut.library.initialValuesFunction(data_spc, constants, i))
1556 1555
1557 1556 try:
1558 1557 #Least Squares
1559 1558 minp,covp,infodict,mesg,ier = optimize.leastsq(self.__residFunction,p0,args=(dp,LT,constants),full_output=True)
1560 1559 # minp,covp = optimize.leastsq(self.__residFunction,p0,args=(dp,LT,constants))
1561 1560 #Chi square error
1562 1561 error0 = numpy.sum(infodict['fvec']**2)/(2*N)
1563 1562 #Error with Jacobian
1564 1563 error1 = self.dataOut.library.errorFunction(minp,constants,LT)
1565 1564 except:
1566 1565 minp = p0*numpy.nan
1567 1566 error0 = numpy.nan
1568 1567 error1 = p0*numpy.nan
1569 1568
1570 1569 #Save
1571 1570 if self.dataOut.data_param is None:
1572 1571 self.dataOut.data_param = numpy.zeros((nGroups, p0.size, nHeights))*numpy.nan
1573 1572 self.dataOut.data_error = numpy.zeros((nGroups, p0.size + 1, nHeights))*numpy.nan
1574 1573
1575 1574 self.dataOut.data_error[i,:,h] = numpy.hstack((error0,error1))
1576 1575 self.dataOut.data_param[i,:,h] = minp
1577 1576 return
1578 1577
1579 1578 def __residFunction(self, p, dp, LT, constants):
1580 1579
1581 1580 fm = self.dataOut.library.modelFunction(p, constants)
1582 1581 fmp=numpy.dot(LT,fm)
1583 1582
1584 1583 return dp-fmp
1585 1584
1586 1585 def __getSNR(self, z, noise):
1587 1586
1588 1587 avg = numpy.average(z, axis=1)
1589 1588 SNR = (avg.T-noise)/noise
1590 1589 SNR = SNR.T
1591 1590 return SNR
1592 1591
1593 1592 def __chisq(p,chindex,hindex):
1594 1593 #similar to Resid but calculates CHI**2
1595 1594 [LT,d,fm]=setupLTdfm(p,chindex,hindex)
1596 1595 dp=numpy.dot(LT,d)
1597 1596 fmp=numpy.dot(LT,fm)
1598 1597 chisq=numpy.dot((dp-fmp).T,(dp-fmp))
1599 1598 return chisq
1600 1599
1601 1600 class WindProfiler(Operation):
1602 1601
1603 1602 __isConfig = False
1604 1603
1605 1604 __initime = None
1606 1605 __lastdatatime = None
1607 1606 __integrationtime = None
1608 1607
1609 1608 __buffer = None
1610 1609
1611 1610 __dataReady = False
1612 1611
1613 1612 __firstdata = None
1614 1613
1615 1614 n = None
1616 1615
1617 1616 def __init__(self):
1618 1617 Operation.__init__(self)
1619 1618
1620 1619 def __calculateCosDir(self, elev, azim):
1621 1620 zen = (90 - elev)*numpy.pi/180
1622 1621 azim = azim*numpy.pi/180
1623 1622 cosDirX = numpy.sqrt((1-numpy.cos(zen)**2)/((1+numpy.tan(azim)**2)))
1624 1623 cosDirY = numpy.sqrt(1-numpy.cos(zen)**2-cosDirX**2)
1625 1624
1626 1625 signX = numpy.sign(numpy.cos(azim))
1627 1626 signY = numpy.sign(numpy.sin(azim))
1628 1627
1629 1628 cosDirX = numpy.copysign(cosDirX, signX)
1630 1629 cosDirY = numpy.copysign(cosDirY, signY)
1631 1630 return cosDirX, cosDirY
1632 1631
1633 1632 def __calculateAngles(self, theta_x, theta_y, azimuth):
1634 1633
1635 1634 dir_cosw = numpy.sqrt(1-theta_x**2-theta_y**2)
1636 1635 zenith_arr = numpy.arccos(dir_cosw)
1637 1636 azimuth_arr = numpy.arctan2(theta_x,theta_y) + azimuth*math.pi/180
1638 1637
1639 1638 dir_cosu = numpy.sin(azimuth_arr)*numpy.sin(zenith_arr)
1640 1639 dir_cosv = numpy.cos(azimuth_arr)*numpy.sin(zenith_arr)
1641 1640
1642 1641 return azimuth_arr, zenith_arr, dir_cosu, dir_cosv, dir_cosw
1643 1642
1644 1643 def __calculateMatA(self, dir_cosu, dir_cosv, dir_cosw, horOnly):
1645 1644
1646 1645 #
1647 1646 if horOnly:
1648 1647 A = numpy.c_[dir_cosu,dir_cosv]
1649 1648 else:
1650 1649 A = numpy.c_[dir_cosu,dir_cosv,dir_cosw]
1651 1650 A = numpy.asmatrix(A)
1652 1651 A1 = numpy.linalg.inv(A.transpose()*A)*A.transpose()
1653 1652
1654 1653 return A1
1655 1654
1656 1655 def __correctValues(self, heiRang, phi, velRadial, SNR):
1657 1656 listPhi = phi.tolist()
1658 1657 maxid = listPhi.index(max(listPhi))
1659 1658 minid = listPhi.index(min(listPhi))
1660 1659
1661 1660 rango = list(range(len(phi)))
1662 1661 # rango = numpy.delete(rango,maxid)
1663 1662
1664 1663 heiRang1 = heiRang*math.cos(phi[maxid])
1665 1664 heiRangAux = heiRang*math.cos(phi[minid])
1666 1665 indOut = (heiRang1 < heiRangAux[0]).nonzero()
1667 1666 heiRang1 = numpy.delete(heiRang1,indOut)
1668 1667
1669 1668 velRadial1 = numpy.zeros([len(phi),len(heiRang1)])
1670 1669 SNR1 = numpy.zeros([len(phi),len(heiRang1)])
1671 1670
1672 1671 for i in rango:
1673 1672 x = heiRang*math.cos(phi[i])
1674 1673 y1 = velRadial[i,:]
1675 1674 f1 = interpolate.interp1d(x,y1,kind = 'cubic')
1676 1675
1677 1676 x1 = heiRang1
1678 1677 y11 = f1(x1)
1679 1678
1680 1679 y2 = SNR[i,:]
1681 1680 f2 = interpolate.interp1d(x,y2,kind = 'cubic')
1682 1681 y21 = f2(x1)
1683 1682
1684 1683 velRadial1[i,:] = y11
1685 1684 SNR1[i,:] = y21
1686 1685
1687 1686 return heiRang1, velRadial1, SNR1
1688 1687
1689 1688 def __calculateVelUVW(self, A, velRadial):
1690 1689
1691 1690 #Operacion Matricial
1692 1691 # velUVW = numpy.zeros((velRadial.shape[1],3))
1693 1692 # for ind in range(velRadial.shape[1]):
1694 1693 # velUVW[ind,:] = numpy.dot(A,velRadial[:,ind])
1695 1694 # velUVW = velUVW.transpose()
1696 1695 velUVW = numpy.zeros((A.shape[0],velRadial.shape[1]))
1697 1696 velUVW[:,:] = numpy.dot(A,velRadial)
1698 1697
1699 1698
1700 1699 return velUVW
1701 1700
1702 1701 # def techniqueDBS(self, velRadial0, dirCosx, disrCosy, azimuth, correct, horizontalOnly, heiRang, SNR0):
1703 1702
1704 1703 def techniqueDBS(self, kwargs):
1705 1704 """
1706 1705 Function that implements Doppler Beam Swinging (DBS) technique.
1707 1706
1708 1707 Input: Radial velocities, Direction cosines (x and y) of the Beam, Antenna azimuth,
1709 1708 Direction correction (if necessary), Ranges and SNR
1710 1709
1711 1710 Output: Winds estimation (Zonal, Meridional and Vertical)
1712 1711
1713 1712 Parameters affected: Winds, height range, SNR
1714 1713 """
1715 1714 velRadial0 = kwargs['velRadial']
1716 1715 heiRang = kwargs['heightList']
1717 1716 SNR0 = kwargs['SNR']
1718 1717
1719 1718 if 'dirCosx' in kwargs and 'dirCosy' in kwargs:
1720 1719 theta_x = numpy.array(kwargs['dirCosx'])
1721 1720 theta_y = numpy.array(kwargs['dirCosy'])
1722 1721 else:
1723 1722 elev = numpy.array(kwargs['elevation'])
1724 1723 azim = numpy.array(kwargs['azimuth'])
1725 1724 theta_x, theta_y = self.__calculateCosDir(elev, azim)
1726 1725 azimuth = kwargs['correctAzimuth']
1727 1726 if 'horizontalOnly' in kwargs:
1728 1727 horizontalOnly = kwargs['horizontalOnly']
1729 1728 else: horizontalOnly = False
1730 1729 if 'correctFactor' in kwargs:
1731 1730 correctFactor = kwargs['correctFactor']
1732 1731 else: correctFactor = 1
1733 1732 if 'channelList' in kwargs:
1734 1733 channelList = kwargs['channelList']
1735 1734 if len(channelList) == 2:
1736 1735 horizontalOnly = True
1737 1736 arrayChannel = numpy.array(channelList)
1738 1737 param = param[arrayChannel,:,:]
1739 1738 theta_x = theta_x[arrayChannel]
1740 1739 theta_y = theta_y[arrayChannel]
1741 1740
1742 1741 azimuth_arr, zenith_arr, dir_cosu, dir_cosv, dir_cosw = self.__calculateAngles(theta_x, theta_y, azimuth)
1743 1742 heiRang1, velRadial1, SNR1 = self.__correctValues(heiRang, zenith_arr, correctFactor*velRadial0, SNR0)
1744 1743 A = self.__calculateMatA(dir_cosu, dir_cosv, dir_cosw, horizontalOnly)
1745 1744
1746 1745 #Calculo de Componentes de la velocidad con DBS
1747 1746 winds = self.__calculateVelUVW(A,velRadial1)
1748 1747
1749 1748 return winds, heiRang1, SNR1
1750 1749
1751 1750 def __calculateDistance(self, posx, posy, pairs_ccf, azimuth = None):
1752 1751
1753 1752 nPairs = len(pairs_ccf)
1754 1753 posx = numpy.asarray(posx)
1755 1754 posy = numpy.asarray(posy)
1756 1755
1757 1756 #Rotacion Inversa para alinear con el azimuth
1758 1757 if azimuth!= None:
1759 1758 azimuth = azimuth*math.pi/180
1760 1759 posx1 = posx*math.cos(azimuth) + posy*math.sin(azimuth)
1761 1760 posy1 = -posx*math.sin(azimuth) + posy*math.cos(azimuth)
1762 1761 else:
1763 1762 posx1 = posx
1764 1763 posy1 = posy
1765 1764
1766 1765 #Calculo de Distancias
1767 1766 distx = numpy.zeros(nPairs)
1768 1767 disty = numpy.zeros(nPairs)
1769 1768 dist = numpy.zeros(nPairs)
1770 1769 ang = numpy.zeros(nPairs)
1771 1770
1772 1771 for i in range(nPairs):
1773 1772 distx[i] = posx1[pairs_ccf[i][1]] - posx1[pairs_ccf[i][0]]
1774 1773 disty[i] = posy1[pairs_ccf[i][1]] - posy1[pairs_ccf[i][0]]
1775 1774 dist[i] = numpy.sqrt(distx[i]**2 + disty[i]**2)
1776 1775 ang[i] = numpy.arctan2(disty[i],distx[i])
1777 1776
1778 1777 return distx, disty, dist, ang
1779 1778 #Calculo de Matrices
1780 1779 # nPairs = len(pairs)
1781 1780 # ang1 = numpy.zeros((nPairs, 2, 1))
1782 1781 # dist1 = numpy.zeros((nPairs, 2, 1))
1783 1782 #
1784 1783 # for j in range(nPairs):
1785 1784 # dist1[j,0,0] = dist[pairs[j][0]]
1786 1785 # dist1[j,1,0] = dist[pairs[j][1]]
1787 1786 # ang1[j,0,0] = ang[pairs[j][0]]
1788 1787 # ang1[j,1,0] = ang[pairs[j][1]]
1789 1788 #
1790 1789 # return distx,disty, dist1,ang1
1791 1790
1792 1791
1793 1792 def __calculateVelVer(self, phase, lagTRange, _lambda):
1794 1793
1795 1794 Ts = lagTRange[1] - lagTRange[0]
1796 1795 velW = -_lambda*phase/(4*math.pi*Ts)
1797 1796
1798 1797 return velW
1799 1798
1800 1799 def __calculateVelHorDir(self, dist, tau1, tau2, ang):
1801 1800 nPairs = tau1.shape[0]
1802 1801 nHeights = tau1.shape[1]
1803 1802 vel = numpy.zeros((nPairs,3,nHeights))
1804 1803 dist1 = numpy.reshape(dist, (dist.size,1))
1805 1804
1806 1805 angCos = numpy.cos(ang)
1807 1806 angSin = numpy.sin(ang)
1808 1807
1809 1808 vel0 = dist1*tau1/(2*tau2**2)
1810 1809 vel[:,0,:] = (vel0*angCos).sum(axis = 1)
1811 1810 vel[:,1,:] = (vel0*angSin).sum(axis = 1)
1812 1811
1813 1812 ind = numpy.where(numpy.isinf(vel))
1814 1813 vel[ind] = numpy.nan
1815 1814
1816 1815 return vel
1817 1816
1818 1817 # def __getPairsAutoCorr(self, pairsList, nChannels):
1819 1818 #
1820 1819 # pairsAutoCorr = numpy.zeros(nChannels, dtype = 'int')*numpy.nan
1821 1820 #
1822 1821 # for l in range(len(pairsList)):
1823 1822 # firstChannel = pairsList[l][0]
1824 1823 # secondChannel = pairsList[l][1]
1825 1824 #
1826 1825 # #Obteniendo pares de Autocorrelacion
1827 1826 # if firstChannel == secondChannel:
1828 1827 # pairsAutoCorr[firstChannel] = int(l)
1829 1828 #
1830 1829 # pairsAutoCorr = pairsAutoCorr.astype(int)
1831 1830 #
1832 1831 # pairsCrossCorr = range(len(pairsList))
1833 1832 # pairsCrossCorr = numpy.delete(pairsCrossCorr,pairsAutoCorr)
1834 1833 #
1835 1834 # return pairsAutoCorr, pairsCrossCorr
1836 1835
1837 1836 # def techniqueSA(self, pairsSelected, pairsList, nChannels, tau, azimuth, _lambda, position_x, position_y, lagTRange, correctFactor):
1838 1837 def techniqueSA(self, kwargs):
1839 1838
1840 1839 """
1841 1840 Function that implements Spaced Antenna (SA) technique.
1842 1841
1843 1842 Input: Radial velocities, Direction cosines (x and y) of the Beam, Antenna azimuth,
1844 1843 Direction correction (if necessary), Ranges and SNR
1845 1844
1846 1845 Output: Winds estimation (Zonal, Meridional and Vertical)
1847 1846
1848 1847 Parameters affected: Winds
1849 1848 """
1850 1849 position_x = kwargs['positionX']
1851 1850 position_y = kwargs['positionY']
1852 1851 azimuth = kwargs['azimuth']
1853 1852
1854 1853 if 'correctFactor' in kwargs:
1855 1854 correctFactor = kwargs['correctFactor']
1856 1855 else:
1857 1856 correctFactor = 1
1858 1857
1859 1858 groupList = kwargs['groupList']
1860 1859 pairs_ccf = groupList[1]
1861 1860 tau = kwargs['tau']
1862 1861 _lambda = kwargs['_lambda']
1863 1862
1864 1863 #Cross Correlation pairs obtained
1865 1864 # pairsAutoCorr, pairsCrossCorr = self.__getPairsAutoCorr(pairssList, nChannels)
1866 1865 # pairsArray = numpy.array(pairsList)[pairsCrossCorr]
1867 1866 # pairsSelArray = numpy.array(pairsSelected)
1868 1867 # pairs = []
1869 1868 #
1870 1869 # #Wind estimation pairs obtained
1871 1870 # for i in range(pairsSelArray.shape[0]/2):
1872 1871 # ind1 = numpy.where(numpy.all(pairsArray == pairsSelArray[2*i], axis = 1))[0][0]
1873 1872 # ind2 = numpy.where(numpy.all(pairsArray == pairsSelArray[2*i + 1], axis = 1))[0][0]
1874 1873 # pairs.append((ind1,ind2))
1875 1874
1876 1875 indtau = tau.shape[0]/2
1877 1876 tau1 = tau[:indtau,:]
1878 1877 tau2 = tau[indtau:-1,:]
1879 1878 # tau1 = tau1[pairs,:]
1880 1879 # tau2 = tau2[pairs,:]
1881 1880 phase1 = tau[-1,:]
1882 1881
1883 1882 #---------------------------------------------------------------------
1884 1883 #Metodo Directo
1885 1884 distx, disty, dist, ang = self.__calculateDistance(position_x, position_y, pairs_ccf,azimuth)
1886 1885 winds = self.__calculateVelHorDir(dist, tau1, tau2, ang)
1887 1886 winds = stats.nanmean(winds, axis=0)
1888 1887 #---------------------------------------------------------------------
1889 1888 #Metodo General
1890 1889 # distx, disty, dist = self.calculateDistance(position_x,position_y,pairsCrossCorr, pairsList, azimuth)
1891 1890 # #Calculo Coeficientes de Funcion de Correlacion
1892 1891 # F,G,A,B,H = self.calculateCoef(tau1,tau2,distx,disty,n)
1893 1892 # #Calculo de Velocidades
1894 1893 # winds = self.calculateVelUV(F,G,A,B,H)
1895 1894
1896 1895 #---------------------------------------------------------------------
1897 1896 winds[2,:] = self.__calculateVelVer(phase1, lagTRange, _lambda)
1898 1897 winds = correctFactor*winds
1899 1898 return winds
1900 1899
1901 1900 def __checkTime(self, currentTime, paramInterval, outputInterval):
1902 1901
1903 1902 dataTime = currentTime + paramInterval
1904 1903 deltaTime = dataTime - self.__initime
1905 1904
1906 1905 if deltaTime >= outputInterval or deltaTime < 0:
1907 1906 self.__dataReady = True
1908 1907 return
1909 1908
1910 1909 def techniqueMeteors(self, arrayMeteor, meteorThresh, heightMin, heightMax):
1911 1910 '''
1912 1911 Function that implements winds estimation technique with detected meteors.
1913 1912
1914 1913 Input: Detected meteors, Minimum meteor quantity to wind estimation
1915 1914
1916 1915 Output: Winds estimation (Zonal and Meridional)
1917 1916
1918 1917 Parameters affected: Winds
1919 1918 '''
1920 1919 #Settings
1921 1920 nInt = (heightMax - heightMin)/2
1922 1921 nInt = int(nInt)
1923 1922 winds = numpy.zeros((2,nInt))*numpy.nan
1924 1923
1925 1924 #Filter errors
1926 1925 error = numpy.where(arrayMeteor[:,-1] == 0)[0]
1927 1926 finalMeteor = arrayMeteor[error,:]
1928 1927
1929 1928 #Meteor Histogram
1930 1929 finalHeights = finalMeteor[:,2]
1931 1930 hist = numpy.histogram(finalHeights, bins = nInt, range = (heightMin,heightMax))
1932 1931 nMeteorsPerI = hist[0]
1933 1932 heightPerI = hist[1]
1934 1933
1935 1934 #Sort of meteors
1936 1935 indSort = finalHeights.argsort()
1937 1936 finalMeteor2 = finalMeteor[indSort,:]
1938 1937
1939 1938 # Calculating winds
1940 1939 ind1 = 0
1941 1940 ind2 = 0
1942 1941
1943 1942 for i in range(nInt):
1944 1943 nMet = nMeteorsPerI[i]
1945 1944 ind1 = ind2
1946 1945 ind2 = ind1 + nMet
1947 1946
1948 1947 meteorAux = finalMeteor2[ind1:ind2,:]
1949 1948
1950 1949 if meteorAux.shape[0] >= meteorThresh:
1951 1950 vel = meteorAux[:, 6]
1952 1951 zen = meteorAux[:, 4]*numpy.pi/180
1953 1952 azim = meteorAux[:, 3]*numpy.pi/180
1954 1953
1955 1954 n = numpy.cos(zen)
1956 1955 # m = (1 - n**2)/(1 - numpy.tan(azim)**2)
1957 1956 # l = m*numpy.tan(azim)
1958 1957 l = numpy.sin(zen)*numpy.sin(azim)
1959 1958 m = numpy.sin(zen)*numpy.cos(azim)
1960 1959
1961 1960 A = numpy.vstack((l, m)).transpose()
1962 1961 A1 = numpy.dot(numpy.linalg.inv( numpy.dot(A.transpose(),A) ),A.transpose())
1963 1962 windsAux = numpy.dot(A1, vel)
1964 1963
1965 1964 winds[0,i] = windsAux[0]
1966 1965 winds[1,i] = windsAux[1]
1967 1966
1968 1967 return winds, heightPerI[:-1]
1969 1968
1970 1969 def techniqueNSM_SA(self, **kwargs):
1971 1970 metArray = kwargs['metArray']
1972 1971 heightList = kwargs['heightList']
1973 1972 timeList = kwargs['timeList']
1974 1973
1975 1974 rx_location = kwargs['rx_location']
1976 1975 groupList = kwargs['groupList']
1977 1976 azimuth = kwargs['azimuth']
1978 1977 dfactor = kwargs['dfactor']
1979 1978 k = kwargs['k']
1980 1979
1981 1980 azimuth1, dist = self.__calculateAzimuth1(rx_location, groupList, azimuth)
1982 1981 d = dist*dfactor
1983 1982 #Phase calculation
1984 1983 metArray1 = self.__getPhaseSlope(metArray, heightList, timeList)
1985 1984
1986 1985 metArray1[:,-2] = metArray1[:,-2]*metArray1[:,2]*1000/(k*d[metArray1[:,1].astype(int)]) #angles into velocities
1987 1986
1988 1987 velEst = numpy.zeros((heightList.size,2))*numpy.nan
1989 1988 azimuth1 = azimuth1*numpy.pi/180
1990 1989
1991 1990 for i in range(heightList.size):
1992 1991 h = heightList[i]
1993 1992 indH = numpy.where((metArray1[:,2] == h)&(numpy.abs(metArray1[:,-2]) < 100))[0]
1994 1993 metHeight = metArray1[indH,:]
1995 1994 if metHeight.shape[0] >= 2:
1996 1995 velAux = numpy.asmatrix(metHeight[:,-2]).T #Radial Velocities
1997 1996 iazim = metHeight[:,1].astype(int)
1998 1997 azimAux = numpy.asmatrix(azimuth1[iazim]).T #Azimuths
1999 1998 A = numpy.hstack((numpy.cos(azimAux),numpy.sin(azimAux)))
2000 1999 A = numpy.asmatrix(A)
2001 2000 A1 = numpy.linalg.pinv(A.transpose()*A)*A.transpose()
2002 2001 velHor = numpy.dot(A1,velAux)
2003 2002
2004 2003 velEst[i,:] = numpy.squeeze(velHor)
2005 2004 return velEst
2006 2005
2007 2006 def __getPhaseSlope(self, metArray, heightList, timeList):
2008 2007 meteorList = []
2009 2008 #utctime sec1 height SNR velRad ph0 ph1 ph2 coh0 coh1 coh2
2010 2009 #Putting back together the meteor matrix
2011 2010 utctime = metArray[:,0]
2012 2011 uniqueTime = numpy.unique(utctime)
2013 2012
2014 2013 phaseDerThresh = 0.5
2015 2014 ippSeconds = timeList[1] - timeList[0]
2016 2015 sec = numpy.where(timeList>1)[0][0]
2017 2016 nPairs = metArray.shape[1] - 6
2018 2017 nHeights = len(heightList)
2019 2018
2020 2019 for t in uniqueTime:
2021 2020 metArray1 = metArray[utctime==t,:]
2022 2021 # phaseDerThresh = numpy.pi/4 #reducir Phase thresh
2023 2022 tmet = metArray1[:,1].astype(int)
2024 2023 hmet = metArray1[:,2].astype(int)
2025 2024
2026 2025 metPhase = numpy.zeros((nPairs, heightList.size, timeList.size - 1))
2027 2026 metPhase[:,:] = numpy.nan
2028 2027 metPhase[:,hmet,tmet] = metArray1[:,6:].T
2029 2028
2030 2029 #Delete short trails
2031 2030 metBool = ~numpy.isnan(metPhase[0,:,:])
2032 2031 heightVect = numpy.sum(metBool, axis = 1)
2033 2032 metBool[heightVect<sec,:] = False
2034 2033 metPhase[:,heightVect<sec,:] = numpy.nan
2035 2034
2036 2035 #Derivative
2037 2036 metDer = numpy.abs(metPhase[:,:,1:] - metPhase[:,:,:-1])
2038 2037 phDerAux = numpy.dstack((numpy.full((nPairs,nHeights,1), False, dtype=bool),metDer > phaseDerThresh))
2039 2038 metPhase[phDerAux] = numpy.nan
2040 2039
2041 2040 #--------------------------METEOR DETECTION -----------------------------------------
2042 2041 indMet = numpy.where(numpy.any(metBool,axis=1))[0]
2043 2042
2044 2043 for p in numpy.arange(nPairs):
2045 2044 phase = metPhase[p,:,:]
2046 2045 phDer = metDer[p,:,:]
2047 2046
2048 2047 for h in indMet:
2049 2048 height = heightList[h]
2050 2049 phase1 = phase[h,:] #82
2051 2050 phDer1 = phDer[h,:]
2052 2051
2053 2052 phase1[~numpy.isnan(phase1)] = numpy.unwrap(phase1[~numpy.isnan(phase1)]) #Unwrap
2054 2053
2055 2054 indValid = numpy.where(~numpy.isnan(phase1))[0]
2056 2055 initMet = indValid[0]
2057 2056 endMet = 0
2058 2057
2059 2058 for i in range(len(indValid)-1):
2060 2059
2061 2060 #Time difference
2062 2061 inow = indValid[i]
2063 2062 inext = indValid[i+1]
2064 2063 idiff = inext - inow
2065 2064 #Phase difference
2066 2065 phDiff = numpy.abs(phase1[inext] - phase1[inow])
2067 2066
2068 2067 if idiff>sec or phDiff>numpy.pi/4 or inext==indValid[-1]: #End of Meteor
2069 2068 sizeTrail = inow - initMet + 1
2070 2069 if sizeTrail>3*sec: #Too short meteors
2071 2070 x = numpy.arange(initMet,inow+1)*ippSeconds
2072 2071 y = phase1[initMet:inow+1]
2073 2072 ynnan = ~numpy.isnan(y)
2074 2073 x = x[ynnan]
2075 2074 y = y[ynnan]
2076 2075 slope, intercept, r_value, p_value, std_err = stats.linregress(x,y)
2077 2076 ylin = x*slope + intercept
2078 2077 rsq = r_value**2
2079 2078 if rsq > 0.5:
2080 2079 vel = slope#*height*1000/(k*d)
2081 2080 estAux = numpy.array([utctime,p,height, vel, rsq])
2082 2081 meteorList.append(estAux)
2083 2082 initMet = inext
2084 2083 metArray2 = numpy.array(meteorList)
2085 2084
2086 2085 return metArray2
2087 2086
2088 2087 def __calculateAzimuth1(self, rx_location, pairslist, azimuth0):
2089 2088
2090 2089 azimuth1 = numpy.zeros(len(pairslist))
2091 2090 dist = numpy.zeros(len(pairslist))
2092 2091
2093 2092 for i in range(len(rx_location)):
2094 2093 ch0 = pairslist[i][0]
2095 2094 ch1 = pairslist[i][1]
2096 2095
2097 2096 diffX = rx_location[ch0][0] - rx_location[ch1][0]
2098 2097 diffY = rx_location[ch0][1] - rx_location[ch1][1]
2099 2098 azimuth1[i] = numpy.arctan2(diffY,diffX)*180/numpy.pi
2100 2099 dist[i] = numpy.sqrt(diffX**2 + diffY**2)
2101 2100
2102 2101 azimuth1 -= azimuth0
2103 2102 return azimuth1, dist
2104 2103
2105 2104 def techniqueNSM_DBS(self, **kwargs):
2106 2105 metArray = kwargs['metArray']
2107 2106 heightList = kwargs['heightList']
2108 2107 timeList = kwargs['timeList']
2109 2108 azimuth = kwargs['azimuth']
2110 2109 theta_x = numpy.array(kwargs['theta_x'])
2111 2110 theta_y = numpy.array(kwargs['theta_y'])
2112 2111
2113 2112 utctime = metArray[:,0]
2114 2113 cmet = metArray[:,1].astype(int)
2115 2114 hmet = metArray[:,3].astype(int)
2116 2115 SNRmet = metArray[:,4]
2117 2116 vmet = metArray[:,5]
2118 2117 spcmet = metArray[:,6]
2119 2118
2120 2119 nChan = numpy.max(cmet) + 1
2121 2120 nHeights = len(heightList)
2122 2121
2123 2122 azimuth_arr, zenith_arr, dir_cosu, dir_cosv, dir_cosw = self.__calculateAngles(theta_x, theta_y, azimuth)
2124 2123 hmet = heightList[hmet]
2125 2124 h1met = hmet*numpy.cos(zenith_arr[cmet]) #Corrected heights
2126 2125
2127 2126 velEst = numpy.zeros((heightList.size,2))*numpy.nan
2128 2127
2129 2128 for i in range(nHeights - 1):
2130 2129 hmin = heightList[i]
2131 2130 hmax = heightList[i + 1]
2132 2131
2133 2132 thisH = (h1met>=hmin) & (h1met<hmax) & (cmet!=2) & (SNRmet>8) & (vmet<50) & (spcmet<10)
2134 2133 indthisH = numpy.where(thisH)
2135 2134
2136 2135 if numpy.size(indthisH) > 3:
2137 2136
2138 2137 vel_aux = vmet[thisH]
2139 2138 chan_aux = cmet[thisH]
2140 2139 cosu_aux = dir_cosu[chan_aux]
2141 2140 cosv_aux = dir_cosv[chan_aux]
2142 2141 cosw_aux = dir_cosw[chan_aux]
2143 2142
2144 2143 nch = numpy.size(numpy.unique(chan_aux))
2145 2144 if nch > 1:
2146 2145 A = self.__calculateMatA(cosu_aux, cosv_aux, cosw_aux, True)
2147 2146 velEst[i,:] = numpy.dot(A,vel_aux)
2148 2147
2149 2148 return velEst
2150 2149
2151 2150 def run(self, dataOut, technique, nHours=1, hmin=70, hmax=110, **kwargs):
2152 2151
2153 2152 param = dataOut.data_param
2154 2153 if dataOut.abscissaList.any():
2155 2154 #if dataOut.abscissaList != None:
2156 2155 absc = dataOut.abscissaList[:-1]
2157 2156 # noise = dataOut.noise
2158 2157 heightList = dataOut.heightList
2159 2158 SNR = dataOut.data_snr
2160 2159
2161 2160 if technique == 'DBS':
2162 2161
2163 2162 kwargs['velRadial'] = param[:,1,:] #Radial velocity
2164 2163 kwargs['heightList'] = heightList
2165 2164 kwargs['SNR'] = SNR
2166 2165
2167 2166 dataOut.data_output, dataOut.heightList, dataOut.data_snr = self.techniqueDBS(kwargs) #DBS Function
2168 2167 dataOut.utctimeInit = dataOut.utctime
2169 2168 dataOut.outputInterval = dataOut.paramInterval
2170 2169
2171 2170 elif technique == 'SA':
2172 2171
2173 2172 #Parameters
2174 2173 # position_x = kwargs['positionX']
2175 2174 # position_y = kwargs['positionY']
2176 2175 # azimuth = kwargs['azimuth']
2177 2176 #
2178 2177 # if kwargs.has_key('crosspairsList'):
2179 2178 # pairs = kwargs['crosspairsList']
2180 2179 # else:
2181 2180 # pairs = None
2182 2181 #
2183 2182 # if kwargs.has_key('correctFactor'):
2184 2183 # correctFactor = kwargs['correctFactor']
2185 2184 # else:
2186 2185 # correctFactor = 1
2187 2186
2188 2187 # tau = dataOut.data_param
2189 2188 # _lambda = dataOut.C/dataOut.frequency
2190 2189 # pairsList = dataOut.groupList
2191 2190 # nChannels = dataOut.nChannels
2192 2191
2193 2192 kwargs['groupList'] = dataOut.groupList
2194 2193 kwargs['tau'] = dataOut.data_param
2195 2194 kwargs['_lambda'] = dataOut.C/dataOut.frequency
2196 2195 # dataOut.data_output = self.techniqueSA(pairs, pairsList, nChannels, tau, azimuth, _lambda, position_x, position_y, absc, correctFactor)
2197 2196 dataOut.data_output = self.techniqueSA(kwargs)
2198 2197 dataOut.utctimeInit = dataOut.utctime
2199 2198 dataOut.outputInterval = dataOut.timeInterval
2200 2199
2201 2200 elif technique == 'Meteors':
2202 2201 dataOut.flagNoData = True
2203 2202 self.__dataReady = False
2204 2203
2205 2204 if 'nHours' in kwargs:
2206 2205 nHours = kwargs['nHours']
2207 2206 else:
2208 2207 nHours = 1
2209 2208
2210 2209 if 'meteorsPerBin' in kwargs:
2211 2210 meteorThresh = kwargs['meteorsPerBin']
2212 2211 else:
2213 2212 meteorThresh = 6
2214 2213
2215 2214 if 'hmin' in kwargs:
2216 2215 hmin = kwargs['hmin']
2217 2216 else: hmin = 70
2218 2217 if 'hmax' in kwargs:
2219 2218 hmax = kwargs['hmax']
2220 2219 else: hmax = 110
2221 2220
2222 2221 dataOut.outputInterval = nHours*3600
2223 2222
2224 2223 if self.__isConfig == False:
2225 2224 # self.__initime = dataOut.datatime.replace(minute = 0, second = 0, microsecond = 03)
2226 2225 #Get Initial LTC time
2227 2226 self.__initime = datetime.datetime.utcfromtimestamp(dataOut.utctime)
2228 2227 self.__initime = (self.__initime.replace(minute = 0, second = 0, microsecond = 0) - datetime.datetime(1970, 1, 1)).total_seconds()
2229 2228
2230 2229 self.__isConfig = True
2231 2230
2232 2231 if self.__buffer is None:
2233 2232 self.__buffer = dataOut.data_param
2234 2233 self.__firstdata = copy.copy(dataOut)
2235 2234
2236 2235 else:
2237 2236 self.__buffer = numpy.vstack((self.__buffer, dataOut.data_param))
2238 2237
2239 2238 self.__checkTime(dataOut.utctime, dataOut.paramInterval, dataOut.outputInterval) #Check if the buffer is ready
2240 2239
2241 2240 if self.__dataReady:
2242 2241 dataOut.utctimeInit = self.__initime
2243 2242
2244 2243 self.__initime += dataOut.outputInterval #to erase time offset
2245 2244
2246 2245 dataOut.data_output, dataOut.heightList = self.techniqueMeteors(self.__buffer, meteorThresh, hmin, hmax)
2247 2246 dataOut.flagNoData = False
2248 2247 self.__buffer = None
2249 2248
2250 2249 elif technique == 'Meteors1':
2251 2250 dataOut.flagNoData = True
2252 2251 self.__dataReady = False
2253 2252
2254 2253 if 'nMins' in kwargs:
2255 2254 nMins = kwargs['nMins']
2256 2255 else: nMins = 20
2257 2256 if 'rx_location' in kwargs:
2258 2257 rx_location = kwargs['rx_location']
2259 2258 else: rx_location = [(0,1),(1,1),(1,0)]
2260 2259 if 'azimuth' in kwargs:
2261 2260 azimuth = kwargs['azimuth']
2262 2261 else: azimuth = 51.06
2263 2262 if 'dfactor' in kwargs:
2264 2263 dfactor = kwargs['dfactor']
2265 2264 if 'mode' in kwargs:
2266 2265 mode = kwargs['mode']
2267 2266 if 'theta_x' in kwargs:
2268 2267 theta_x = kwargs['theta_x']
2269 2268 if 'theta_y' in kwargs:
2270 2269 theta_y = kwargs['theta_y']
2271 2270 else: mode = 'SA'
2272 2271
2273 2272 #Borrar luego esto
2274 2273 if dataOut.groupList is None:
2275 2274 dataOut.groupList = [(0,1),(0,2),(1,2)]
2276 2275 groupList = dataOut.groupList
2277 2276 C = 3e8
2278 2277 freq = 50e6
2279 2278 lamb = C/freq
2280 2279 k = 2*numpy.pi/lamb
2281 2280
2282 2281 timeList = dataOut.abscissaList
2283 2282 heightList = dataOut.heightList
2284 2283
2285 2284 if self.__isConfig == False:
2286 2285 dataOut.outputInterval = nMins*60
2287 2286 # self.__initime = dataOut.datatime.replace(minute = 0, second = 0, microsecond = 03)
2288 2287 #Get Initial LTC time
2289 2288 initime = datetime.datetime.utcfromtimestamp(dataOut.utctime)
2290 2289 minuteAux = initime.minute
2291 2290 minuteNew = int(numpy.floor(minuteAux/nMins)*nMins)
2292 2291 self.__initime = (initime.replace(minute = minuteNew, second = 0, microsecond = 0) - datetime.datetime(1970, 1, 1)).total_seconds()
2293 2292
2294 2293 self.__isConfig = True
2295 2294
2296 2295 if self.__buffer is None:
2297 2296 self.__buffer = dataOut.data_param
2298 2297 self.__firstdata = copy.copy(dataOut)
2299 2298
2300 2299 else:
2301 2300 self.__buffer = numpy.vstack((self.__buffer, dataOut.data_param))
2302 2301
2303 2302 self.__checkTime(dataOut.utctime, dataOut.paramInterval, dataOut.outputInterval) #Check if the buffer is ready
2304 2303
2305 2304 if self.__dataReady:
2306 2305 dataOut.utctimeInit = self.__initime
2307 2306 self.__initime += dataOut.outputInterval #to erase time offset
2308 2307
2309 2308 metArray = self.__buffer
2310 2309 if mode == 'SA':
2311 2310 dataOut.data_output = self.techniqueNSM_SA(rx_location=rx_location, groupList=groupList, azimuth=azimuth, dfactor=dfactor, k=k,metArray=metArray, heightList=heightList,timeList=timeList)
2312 2311 elif mode == 'DBS':
2313 2312 dataOut.data_output = self.techniqueNSM_DBS(metArray=metArray,heightList=heightList,timeList=timeList, azimuth=azimuth, theta_x=theta_x, theta_y=theta_y)
2314 2313 dataOut.data_output = dataOut.data_output.T
2315 2314 dataOut.flagNoData = False
2316 2315 self.__buffer = None
2317 2316
2318 2317 return
2319 2318
2320 2319 class EWDriftsEstimation(Operation):
2321 2320
2322 2321 def __init__(self):
2323 2322 Operation.__init__(self)
2324 2323
2325 2324 def __correctValues(self, heiRang, phi, velRadial, SNR):
2326 2325 listPhi = phi.tolist()
2327 2326 maxid = listPhi.index(max(listPhi))
2328 2327 minid = listPhi.index(min(listPhi))
2329 2328
2330 2329 rango = list(range(len(phi)))
2331 2330 # rango = numpy.delete(rango,maxid)
2332 2331
2333 2332 heiRang1 = heiRang*math.cos(phi[maxid])
2334 2333 heiRangAux = heiRang*math.cos(phi[minid])
2335 2334 indOut = (heiRang1 < heiRangAux[0]).nonzero()
2336 2335 heiRang1 = numpy.delete(heiRang1,indOut)
2337 2336
2338 2337 velRadial1 = numpy.zeros([len(phi),len(heiRang1)])
2339 2338 SNR1 = numpy.zeros([len(phi),len(heiRang1)])
2340 2339
2341 2340 for i in rango:
2342 2341 x = heiRang*math.cos(phi[i])
2343 2342 y1 = velRadial[i,:]
2344 2343 f1 = interpolate.interp1d(x,y1,kind = 'cubic')
2345 2344
2346 2345 x1 = heiRang1
2347 2346 y11 = f1(x1)
2348 2347
2349 2348 y2 = SNR[i,:]
2350 2349 f2 = interpolate.interp1d(x,y2,kind = 'cubic')
2351 2350 y21 = f2(x1)
2352 2351
2353 2352 velRadial1[i,:] = y11
2354 2353 SNR1[i,:] = y21
2355 2354
2356 2355 return heiRang1, velRadial1, SNR1
2357 2356
2358 2357 def run(self, dataOut, zenith, zenithCorrection):
2359 2358 heiRang = dataOut.heightList
2360 2359 velRadial = dataOut.data_param[:,3,:]
2361 2360 SNR = dataOut.data_snr
2362 2361
2363 2362 zenith = numpy.array(zenith)
2364 2363 zenith -= zenithCorrection
2365 2364 zenith *= numpy.pi/180
2366 2365
2367 2366 heiRang1, velRadial1, SNR1 = self.__correctValues(heiRang, numpy.abs(zenith), velRadial, SNR)
2368 2367
2369 2368 alp = zenith[0]
2370 2369 bet = zenith[1]
2371 2370
2372 2371 w_w = velRadial1[0,:]
2373 2372 w_e = velRadial1[1,:]
2374 2373
2375 2374 w = (w_w*numpy.sin(bet) - w_e*numpy.sin(alp))/(numpy.cos(alp)*numpy.sin(bet) - numpy.cos(bet)*numpy.sin(alp))
2376 2375 u = (w_w*numpy.cos(bet) - w_e*numpy.cos(alp))/(numpy.sin(alp)*numpy.cos(bet) - numpy.sin(bet)*numpy.cos(alp))
2377 2376
2378 2377 winds = numpy.vstack((u,w))
2379 2378
2380 2379 dataOut.heightList = heiRang1
2381 2380 dataOut.data_output = winds
2382 2381 dataOut.data_snr = SNR1
2383 2382
2384 2383 dataOut.utctimeInit = dataOut.utctime
2385 2384 dataOut.outputInterval = dataOut.timeInterval
2386 2385 return
2387 2386
2388 2387 #--------------- Non Specular Meteor ----------------
2389 2388
2390 2389 class NonSpecularMeteorDetection(Operation):
2391 2390
2392 2391 def run(self, dataOut, mode, SNRthresh=8, phaseDerThresh=0.5, cohThresh=0.8, allData = False):
2393 2392 data_acf = dataOut.data_pre[0]
2394 2393 data_ccf = dataOut.data_pre[1]
2395 2394 pairsList = dataOut.groupList[1]
2396 2395
2397 2396 lamb = dataOut.C/dataOut.frequency
2398 2397 tSamp = dataOut.ippSeconds*dataOut.nCohInt
2399 2398 paramInterval = dataOut.paramInterval
2400 2399
2401 2400 nChannels = data_acf.shape[0]
2402 2401 nLags = data_acf.shape[1]
2403 2402 nProfiles = data_acf.shape[2]
2404 2403 nHeights = dataOut.nHeights
2405 2404 nCohInt = dataOut.nCohInt
2406 2405 sec = numpy.round(nProfiles/dataOut.paramInterval)
2407 2406 heightList = dataOut.heightList
2408 2407 ippSeconds = dataOut.ippSeconds*dataOut.nCohInt*dataOut.nAvg
2409 2408 utctime = dataOut.utctime
2410 2409
2411 2410 dataOut.abscissaList = numpy.arange(0,paramInterval+ippSeconds,ippSeconds)
2412 2411
2413 2412 #------------------------ SNR --------------------------------------
2414 2413 power = data_acf[:,0,:,:].real
2415 2414 noise = numpy.zeros(nChannels)
2416 2415 SNR = numpy.zeros(power.shape)
2417 2416 for i in range(nChannels):
2418 2417 noise[i] = hildebrand_sekhon(power[i,:], nCohInt)
2419 2418 SNR[i] = (power[i]-noise[i])/noise[i]
2420 2419 SNRm = numpy.nanmean(SNR, axis = 0)
2421 2420 SNRdB = 10*numpy.log10(SNR)
2422 2421
2423 2422 if mode == 'SA':
2424 2423 dataOut.groupList = dataOut.groupList[1]
2425 2424 nPairs = data_ccf.shape[0]
2426 2425 #---------------------- Coherence and Phase --------------------------
2427 2426 phase = numpy.zeros(data_ccf[:,0,:,:].shape)
2428 2427 # phase1 = numpy.copy(phase)
2429 2428 coh1 = numpy.zeros(data_ccf[:,0,:,:].shape)
2430 2429
2431 2430 for p in range(nPairs):
2432 2431 ch0 = pairsList[p][0]
2433 2432 ch1 = pairsList[p][1]
2434 2433 ccf = data_ccf[p,0,:,:]/numpy.sqrt(data_acf[ch0,0,:,:]*data_acf[ch1,0,:,:])
2435 2434 phase[p,:,:] = ndimage.median_filter(numpy.angle(ccf), size = (5,1)) #median filter
2436 2435 # phase1[p,:,:] = numpy.angle(ccf) #median filter
2437 2436 coh1[p,:,:] = ndimage.median_filter(numpy.abs(ccf), 5) #median filter
2438 2437 # coh1[p,:,:] = numpy.abs(ccf) #median filter
2439 2438 coh = numpy.nanmax(coh1, axis = 0)
2440 2439 # struc = numpy.ones((5,1))
2441 2440 # coh = ndimage.morphology.grey_dilation(coh, size=(10,1))
2442 2441 #---------------------- Radial Velocity ----------------------------
2443 2442 phaseAux = numpy.mean(numpy.angle(data_acf[:,1,:,:]), axis = 0)
2444 2443 velRad = phaseAux*lamb/(4*numpy.pi*tSamp)
2445 2444
2446 2445 if allData:
2447 2446 boolMetFin = ~numpy.isnan(SNRm)
2448 2447 # coh[:-1,:] = numpy.nanmean(numpy.abs(phase[:,1:,:] - phase[:,:-1,:]),axis=0)
2449 2448 else:
2450 2449 #------------------------ Meteor mask ---------------------------------
2451 2450 # #SNR mask
2452 2451 # boolMet = (SNRdB>SNRthresh)#|(~numpy.isnan(SNRdB))
2453 2452 #
2454 2453 # #Erase small objects
2455 2454 # boolMet1 = self.__erase_small(boolMet, 2*sec, 5)
2456 2455 #
2457 2456 # auxEEJ = numpy.sum(boolMet1,axis=0)
2458 2457 # indOver = auxEEJ>nProfiles*0.8 #Use this later
2459 2458 # indEEJ = numpy.where(indOver)[0]
2460 2459 # indNEEJ = numpy.where(~indOver)[0]
2461 2460 #
2462 2461 # boolMetFin = boolMet1
2463 2462 #
2464 2463 # if indEEJ.size > 0:
2465 2464 # boolMet1[:,indEEJ] = False #Erase heights with EEJ
2466 2465 #
2467 2466 # boolMet2 = coh > cohThresh
2468 2467 # boolMet2 = self.__erase_small(boolMet2, 2*sec,5)
2469 2468 #
2470 2469 # #Final Meteor mask
2471 2470 # boolMetFin = boolMet1|boolMet2
2472 2471
2473 2472 #Coherence mask
2474 2473 boolMet1 = coh > 0.75
2475 2474 struc = numpy.ones((30,1))
2476 2475 boolMet1 = ndimage.morphology.binary_dilation(boolMet1, structure=struc)
2477 2476
2478 2477 #Derivative mask
2479 2478 derPhase = numpy.nanmean(numpy.abs(phase[:,1:,:] - phase[:,:-1,:]),axis=0)
2480 2479 boolMet2 = derPhase < 0.2
2481 2480 # boolMet2 = ndimage.morphology.binary_opening(boolMet2)
2482 2481 # boolMet2 = ndimage.morphology.binary_closing(boolMet2, structure = numpy.ones((10,1)))
2483 2482 boolMet2 = ndimage.median_filter(boolMet2,size=5)
2484 2483 boolMet2 = numpy.vstack((boolMet2,numpy.full((1,nHeights), True, dtype=bool)))
2485 2484 # #Final mask
2486 2485 # boolMetFin = boolMet2
2487 2486 boolMetFin = boolMet1&boolMet2
2488 2487 # boolMetFin = ndimage.morphology.binary_dilation(boolMetFin)
2489 2488 #Creating data_param
2490 2489 coordMet = numpy.where(boolMetFin)
2491 2490
2492 2491 tmet = coordMet[0]
2493 2492 hmet = coordMet[1]
2494 2493
2495 2494 data_param = numpy.zeros((tmet.size, 6 + nPairs))
2496 2495 data_param[:,0] = utctime
2497 2496 data_param[:,1] = tmet
2498 2497 data_param[:,2] = hmet
2499 2498 data_param[:,3] = SNRm[tmet,hmet]
2500 2499 data_param[:,4] = velRad[tmet,hmet]
2501 2500 data_param[:,5] = coh[tmet,hmet]
2502 2501 data_param[:,6:] = phase[:,tmet,hmet].T
2503 2502
2504 2503 elif mode == 'DBS':
2505 2504 dataOut.groupList = numpy.arange(nChannels)
2506 2505
2507 2506 #Radial Velocities
2508 2507 phase = numpy.angle(data_acf[:,1,:,:])
2509 2508 # phase = ndimage.median_filter(numpy.angle(data_acf[:,1,:,:]), size = (1,5,1))
2510 2509 velRad = phase*lamb/(4*numpy.pi*tSamp)
2511 2510
2512 2511 #Spectral width
2513 2512 # acf1 = ndimage.median_filter(numpy.abs(data_acf[:,1,:,:]), size = (1,5,1))
2514 2513 # acf2 = ndimage.median_filter(numpy.abs(data_acf[:,2,:,:]), size = (1,5,1))
2515 2514 acf1 = data_acf[:,1,:,:]
2516 2515 acf2 = data_acf[:,2,:,:]
2517 2516
2518 2517 spcWidth = (lamb/(2*numpy.sqrt(6)*numpy.pi*tSamp))*numpy.sqrt(numpy.log(acf1/acf2))
2519 2518 # velRad = ndimage.median_filter(velRad, size = (1,5,1))
2520 2519 if allData:
2521 2520 boolMetFin = ~numpy.isnan(SNRdB)
2522 2521 else:
2523 2522 #SNR
2524 2523 boolMet1 = (SNRdB>SNRthresh) #SNR mask
2525 2524 boolMet1 = ndimage.median_filter(boolMet1, size=(1,5,5))
2526 2525
2527 2526 #Radial velocity
2528 2527 boolMet2 = numpy.abs(velRad) < 20
2529 2528 boolMet2 = ndimage.median_filter(boolMet2, (1,5,5))
2530 2529
2531 2530 #Spectral Width
2532 2531 boolMet3 = spcWidth < 30
2533 2532 boolMet3 = ndimage.median_filter(boolMet3, (1,5,5))
2534 2533 # boolMetFin = self.__erase_small(boolMet1, 10,5)
2535 2534 boolMetFin = boolMet1&boolMet2&boolMet3
2536 2535
2537 2536 #Creating data_param
2538 2537 coordMet = numpy.where(boolMetFin)
2539 2538
2540 2539 cmet = coordMet[0]
2541 2540 tmet = coordMet[1]
2542 2541 hmet = coordMet[2]
2543 2542
2544 2543 data_param = numpy.zeros((tmet.size, 7))
2545 2544 data_param[:,0] = utctime
2546 2545 data_param[:,1] = cmet
2547 2546 data_param[:,2] = tmet
2548 2547 data_param[:,3] = hmet
2549 2548 data_param[:,4] = SNR[cmet,tmet,hmet].T
2550 2549 data_param[:,5] = velRad[cmet,tmet,hmet].T
2551 2550 data_param[:,6] = spcWidth[cmet,tmet,hmet].T
2552 2551
2553 2552 # self.dataOut.data_param = data_int
2554 2553 if len(data_param) == 0:
2555 2554 dataOut.flagNoData = True
2556 2555 else:
2557 2556 dataOut.data_param = data_param
2558 2557
2559 2558 def __erase_small(self, binArray, threshX, threshY):
2560 2559 labarray, numfeat = ndimage.measurements.label(binArray)
2561 2560 binArray1 = numpy.copy(binArray)
2562 2561
2563 2562 for i in range(1,numfeat + 1):
2564 2563 auxBin = (labarray==i)
2565 2564 auxSize = auxBin.sum()
2566 2565
2567 2566 x,y = numpy.where(auxBin)
2568 2567 widthX = x.max() - x.min()
2569 2568 widthY = y.max() - y.min()
2570 2569
2571 2570 #width X: 3 seg -> 12.5*3
2572 2571 #width Y:
2573 2572
2574 2573 if (auxSize < 50) or (widthX < threshX) or (widthY < threshY):
2575 2574 binArray1[auxBin] = False
2576 2575
2577 2576 return binArray1
2578 2577
2579 2578 #--------------- Specular Meteor ----------------
2580 2579
2581 2580 class SMDetection(Operation):
2582 2581 '''
2583 2582 Function DetectMeteors()
2584 2583 Project developed with paper:
2585 2584 HOLDSWORTH ET AL. 2004
2586 2585
2587 2586 Input:
2588 2587 self.dataOut.data_pre
2589 2588
2590 2589 centerReceiverIndex: From the channels, which is the center receiver
2591 2590
2592 2591 hei_ref: Height reference for the Beacon signal extraction
2593 2592 tauindex:
2594 2593 predefinedPhaseShifts: Predefined phase offset for the voltge signals
2595 2594
2596 2595 cohDetection: Whether to user Coherent detection or not
2597 2596 cohDet_timeStep: Coherent Detection calculation time step
2598 2597 cohDet_thresh: Coherent Detection phase threshold to correct phases
2599 2598
2600 2599 noise_timeStep: Noise calculation time step
2601 2600 noise_multiple: Noise multiple to define signal threshold
2602 2601
2603 2602 multDet_timeLimit: Multiple Detection Removal time limit in seconds
2604 2603 multDet_rangeLimit: Multiple Detection Removal range limit in km
2605 2604
2606 2605 phaseThresh: Maximum phase difference between receiver to be consider a meteor
2607 2606 SNRThresh: Minimum SNR threshold of the meteor signal to be consider a meteor
2608 2607
2609 2608 hmin: Minimum Height of the meteor to use it in the further wind estimations
2610 2609 hmax: Maximum Height of the meteor to use it in the further wind estimations
2611 2610 azimuth: Azimuth angle correction
2612 2611
2613 2612 Affected:
2614 2613 self.dataOut.data_param
2615 2614
2616 2615 Rejection Criteria (Errors):
2617 2616 0: No error; analysis OK
2618 2617 1: SNR < SNR threshold
2619 2618 2: angle of arrival (AOA) ambiguously determined
2620 2619 3: AOA estimate not feasible
2621 2620 4: Large difference in AOAs obtained from different antenna baselines
2622 2621 5: echo at start or end of time series
2623 2622 6: echo less than 5 examples long; too short for analysis
2624 2623 7: echo rise exceeds 0.3s
2625 2624 8: echo decay time less than twice rise time
2626 2625 9: large power level before echo
2627 2626 10: large power level after echo
2628 2627 11: poor fit to amplitude for estimation of decay time
2629 2628 12: poor fit to CCF phase variation for estimation of radial drift velocity
2630 2629 13: height unresolvable echo: not valid height within 70 to 110 km
2631 2630 14: height ambiguous echo: more then one possible height within 70 to 110 km
2632 2631 15: radial drift velocity or projected horizontal velocity exceeds 200 m/s
2633 2632 16: oscilatory echo, indicating event most likely not an underdense echo
2634 2633
2635 2634 17: phase difference in meteor Reestimation
2636 2635
2637 2636 Data Storage:
2638 2637 Meteors for Wind Estimation (8):
2639 2638 Utc Time | Range Height
2640 2639 Azimuth Zenith errorCosDir
2641 2640 VelRad errorVelRad
2642 2641 Phase0 Phase1 Phase2 Phase3
2643 2642 TypeError
2644 2643
2645 2644 '''
2646 2645
2647 2646 def run(self, dataOut, hei_ref = None, tauindex = 0,
2648 2647 phaseOffsets = None,
2649 2648 cohDetection = False, cohDet_timeStep = 1, cohDet_thresh = 25,
2650 2649 noise_timeStep = 4, noise_multiple = 4,
2651 2650 multDet_timeLimit = 1, multDet_rangeLimit = 3,
2652 2651 phaseThresh = 20, SNRThresh = 5,
2653 2652 hmin = 50, hmax=150, azimuth = 0,
2654 2653 channelPositions = None) :
2655 2654
2656 2655
2657 2656 #Getting Pairslist
2658 2657 if channelPositions is None:
2659 2658 # channelPositions = [(2.5,0), (0,2.5), (0,0), (0,4.5), (-2,0)] #T
2660 2659 channelPositions = [(4.5,2), (2,4.5), (2,2), (2,0), (0,2)] #Estrella
2661 2660 meteorOps = SMOperations()
2662 2661 pairslist0, distances = meteorOps.getPhasePairs(channelPositions)
2663 2662 heiRang = dataOut.heightList
2664 2663 #Get Beacon signal - No Beacon signal anymore
2665 2664 # newheis = numpy.where(self.dataOut.heightList>self.dataOut.radarControllerHeaderObj.Taus[tauindex])
2666 2665 #
2667 2666 # if hei_ref != None:
2668 2667 # newheis = numpy.where(self.dataOut.heightList>hei_ref)
2669 2668 #
2670 2669
2671 2670
2672 2671 #****************REMOVING HARDWARE PHASE DIFFERENCES***************
2673 2672 # see if the user put in pre defined phase shifts
2674 2673 voltsPShift = dataOut.data_pre.copy()
2675 2674
2676 2675 # if predefinedPhaseShifts != None:
2677 2676 # hardwarePhaseShifts = numpy.array(predefinedPhaseShifts)*numpy.pi/180
2678 2677 #
2679 2678 # # elif beaconPhaseShifts:
2680 2679 # # #get hardware phase shifts using beacon signal
2681 2680 # # hardwarePhaseShifts = self.__getHardwarePhaseDiff(self.dataOut.data_pre, pairslist, newheis, 10)
2682 2681 # # hardwarePhaseShifts = numpy.insert(hardwarePhaseShifts,centerReceiverIndex,0)
2683 2682 #
2684 2683 # else:
2685 2684 # hardwarePhaseShifts = numpy.zeros(5)
2686 2685 #
2687 2686 # voltsPShift = numpy.zeros((self.dataOut.data_pre.shape[0],self.dataOut.data_pre.shape[1],self.dataOut.data_pre.shape[2]), dtype = 'complex')
2688 2687 # for i in range(self.dataOut.data_pre.shape[0]):
2689 2688 # voltsPShift[i,:,:] = self.__shiftPhase(self.dataOut.data_pre[i,:,:], hardwarePhaseShifts[i])
2690 2689
2691 2690 #******************END OF REMOVING HARDWARE PHASE DIFFERENCES*********
2692 2691
2693 2692 #Remove DC
2694 2693 voltsDC = numpy.mean(voltsPShift,1)
2695 2694 voltsDC = numpy.mean(voltsDC,1)
2696 2695 for i in range(voltsDC.shape[0]):
2697 2696 voltsPShift[i] = voltsPShift[i] - voltsDC[i]
2698 2697
2699 2698 #Don't considerate last heights, theyre used to calculate Hardware Phase Shift
2700 2699 # voltsPShift = voltsPShift[:,:,:newheis[0][0]]
2701 2700
2702 2701 #************ FIND POWER OF DATA W/COH OR NON COH DETECTION (3.4) **********
2703 2702 #Coherent Detection
2704 2703 if cohDetection:
2705 2704 #use coherent detection to get the net power
2706 2705 cohDet_thresh = cohDet_thresh*numpy.pi/180
2707 2706 voltsPShift = self.__coherentDetection(voltsPShift, cohDet_timeStep, dataOut.timeInterval, pairslist0, cohDet_thresh)
2708 2707
2709 2708 #Non-coherent detection!
2710 2709 powerNet = numpy.nansum(numpy.abs(voltsPShift[:,:,:])**2,0)
2711 2710 #********** END OF COH/NON-COH POWER CALCULATION**********************
2712 2711
2713 2712 #********** FIND THE NOISE LEVEL AND POSSIBLE METEORS ****************
2714 2713 #Get noise
2715 2714 noise, noise1 = self.__getNoise(powerNet, noise_timeStep, dataOut.timeInterval)
2716 2715 # noise = self.getNoise1(powerNet, noise_timeStep, self.dataOut.timeInterval)
2717 2716 #Get signal threshold
2718 2717 signalThresh = noise_multiple*noise
2719 2718 #Meteor echoes detection
2720 2719 listMeteors = self.__findMeteors(powerNet, signalThresh)
2721 2720 #******* END OF NOISE LEVEL AND POSSIBLE METEORS CACULATION **********
2722 2721
2723 2722 #************** REMOVE MULTIPLE DETECTIONS (3.5) ***************************
2724 2723 #Parameters
2725 2724 heiRange = dataOut.heightList
2726 2725 rangeInterval = heiRange[1] - heiRange[0]
2727 2726 rangeLimit = multDet_rangeLimit/rangeInterval
2728 2727 timeLimit = multDet_timeLimit/dataOut.timeInterval
2729 2728 #Multiple detection removals
2730 2729 listMeteors1 = self.__removeMultipleDetections(listMeteors, rangeLimit, timeLimit)
2731 2730 #************ END OF REMOVE MULTIPLE DETECTIONS **********************
2732 2731
2733 2732 #********************* METEOR REESTIMATION (3.7, 3.8, 3.9, 3.10) ********************
2734 2733 #Parameters
2735 2734 phaseThresh = phaseThresh*numpy.pi/180
2736 2735 thresh = [phaseThresh, noise_multiple, SNRThresh]
2737 2736 #Meteor reestimation (Errors N 1, 6, 12, 17)
2738 2737 listMeteors2, listMeteorsPower, listMeteorsVolts = self.__meteorReestimation(listMeteors1, voltsPShift, pairslist0, thresh, noise, dataOut.timeInterval, dataOut.frequency)
2739 2738 # listMeteors2, listMeteorsPower, listMeteorsVolts = self.meteorReestimation3(listMeteors2, listMeteorsPower, listMeteorsVolts, voltsPShift, pairslist, thresh, noise)
2740 2739 #Estimation of decay times (Errors N 7, 8, 11)
2741 2740 listMeteors3 = self.__estimateDecayTime(listMeteors2, listMeteorsPower, dataOut.timeInterval, dataOut.frequency)
2742 2741 #******************* END OF METEOR REESTIMATION *******************
2743 2742
2744 2743 #********************* METEOR PARAMETERS CALCULATION (3.11, 3.12, 3.13) **************************
2745 2744 #Calculating Radial Velocity (Error N 15)
2746 2745 radialStdThresh = 10
2747 2746 listMeteors4 = self.__getRadialVelocity(listMeteors3, listMeteorsVolts, radialStdThresh, pairslist0, dataOut.timeInterval)
2748 2747
2749 2748 if len(listMeteors4) > 0:
2750 2749 #Setting New Array
2751 2750 date = dataOut.utctime
2752 2751 arrayParameters = self.__setNewArrays(listMeteors4, date, heiRang)
2753 2752
2754 2753 #Correcting phase offset
2755 2754 if phaseOffsets != None:
2756 2755 phaseOffsets = numpy.array(phaseOffsets)*numpy.pi/180
2757 2756 arrayParameters[:,8:12] = numpy.unwrap(arrayParameters[:,8:12] + phaseOffsets)
2758 2757
2759 2758 #Second Pairslist
2760 2759 pairsList = []
2761 2760 pairx = (0,1)
2762 2761 pairy = (2,3)
2763 2762 pairsList.append(pairx)
2764 2763 pairsList.append(pairy)
2765 2764
2766 2765 jph = numpy.array([0,0,0,0])
2767 2766 h = (hmin,hmax)
2768 2767 arrayParameters = meteorOps.getMeteorParams(arrayParameters, azimuth, h, pairsList, distances, jph)
2769 2768
2770 2769 # #Calculate AOA (Error N 3, 4)
2771 2770 # #JONES ET AL. 1998
2772 2771 # error = arrayParameters[:,-1]
2773 2772 # AOAthresh = numpy.pi/8
2774 2773 # phases = -arrayParameters[:,9:13]
2775 2774 # arrayParameters[:,4:7], arrayParameters[:,-1] = meteorOps.getAOA(phases, pairsList, error, AOAthresh, azimuth)
2776 2775 #
2777 2776 # #Calculate Heights (Error N 13 and 14)
2778 2777 # error = arrayParameters[:,-1]
2779 2778 # Ranges = arrayParameters[:,2]
2780 2779 # zenith = arrayParameters[:,5]
2781 2780 # arrayParameters[:,3], arrayParameters[:,-1] = meteorOps.getHeights(Ranges, zenith, error, hmin, hmax)
2782 2781 # error = arrayParameters[:,-1]
2783 2782 #********************* END OF PARAMETERS CALCULATION **************************
2784 2783
2785 2784 #***************************+ PASS DATA TO NEXT STEP **********************
2786 2785 # arrayFinal = arrayParameters.reshape((1,arrayParameters.shape[0],arrayParameters.shape[1]))
2787 2786 dataOut.data_param = arrayParameters
2788 2787
2789 2788 if arrayParameters is None:
2790 2789 dataOut.flagNoData = True
2791 2790 else:
2792 2791 dataOut.flagNoData = True
2793 2792
2794 2793 return
2795 2794
2796 2795 def __getHardwarePhaseDiff(self, voltage0, pairslist, newheis, n):
2797 2796
2798 2797 minIndex = min(newheis[0])
2799 2798 maxIndex = max(newheis[0])
2800 2799
2801 2800 voltage = voltage0[:,:,minIndex:maxIndex+1]
2802 2801 nLength = voltage.shape[1]/n
2803 2802 nMin = 0
2804 2803 nMax = 0
2805 2804 phaseOffset = numpy.zeros((len(pairslist),n))
2806 2805
2807 2806 for i in range(n):
2808 2807 nMax += nLength
2809 2808 phaseCCF = -numpy.angle(self.__calculateCCF(voltage[:,nMin:nMax,:], pairslist, [0]))
2810 2809 phaseCCF = numpy.mean(phaseCCF, axis = 2)
2811 2810 phaseOffset[:,i] = phaseCCF.transpose()
2812 2811 nMin = nMax
2813 2812 # phaseDiff, phaseArrival = self.estimatePhaseDifference(voltage, pairslist)
2814 2813
2815 2814 #Remove Outliers
2816 2815 factor = 2
2817 2816 wt = phaseOffset - signal.medfilt(phaseOffset,(1,5))
2818 2817 dw = numpy.std(wt,axis = 1)
2819 2818 dw = dw.reshape((dw.size,1))
2820 2819 ind = numpy.where(numpy.logical_or(wt>dw*factor,wt<-dw*factor))
2821 2820 phaseOffset[ind] = numpy.nan
2822 2821 phaseOffset = stats.nanmean(phaseOffset, axis=1)
2823 2822
2824 2823 return phaseOffset
2825 2824
2826 2825 def __shiftPhase(self, data, phaseShift):
2827 2826 #this will shift the phase of a complex number
2828 2827 dataShifted = numpy.abs(data) * numpy.exp((numpy.angle(data)+phaseShift)*1j)
2829 2828 return dataShifted
2830 2829
2831 2830 def __estimatePhaseDifference(self, array, pairslist):
2832 2831 nChannel = array.shape[0]
2833 2832 nHeights = array.shape[2]
2834 2833 numPairs = len(pairslist)
2835 2834 # phaseCCF = numpy.zeros((nChannel, 5, nHeights))
2836 2835 phaseCCF = numpy.angle(self.__calculateCCF(array, pairslist, [-2,-1,0,1,2]))
2837 2836
2838 2837 #Correct phases
2839 2838 derPhaseCCF = phaseCCF[:,1:,:] - phaseCCF[:,0:-1,:]
2840 2839 indDer = numpy.where(numpy.abs(derPhaseCCF) > numpy.pi)
2841 2840
2842 2841 if indDer[0].shape[0] > 0:
2843 2842 for i in range(indDer[0].shape[0]):
2844 2843 signo = -numpy.sign(derPhaseCCF[indDer[0][i],indDer[1][i],indDer[2][i]])
2845 2844 phaseCCF[indDer[0][i],indDer[1][i]+1:,:] += signo*2*numpy.pi
2846 2845
2847 2846 # for j in range(numSides):
2848 2847 # phaseCCFAux = self.calculateCCF(arrayCenter, arraySides[j,:,:], [-2,1,0,1,2])
2849 2848 # phaseCCF[j,:,:] = numpy.angle(phaseCCFAux)
2850 2849 #
2851 2850 #Linear
2852 2851 phaseInt = numpy.zeros((numPairs,1))
2853 2852 angAllCCF = phaseCCF[:,[0,1,3,4],0]
2854 2853 for j in range(numPairs):
2855 2854 fit = stats.linregress([-2,-1,1,2],angAllCCF[j,:])
2856 2855 phaseInt[j] = fit[1]
2857 2856 #Phase Differences
2858 2857 phaseDiff = phaseInt - phaseCCF[:,2,:]
2859 2858 phaseArrival = phaseInt.reshape(phaseInt.size)
2860 2859
2861 2860 #Dealias
2862 2861 phaseArrival = numpy.angle(numpy.exp(1j*phaseArrival))
2863 2862 # indAlias = numpy.where(phaseArrival > numpy.pi)
2864 2863 # phaseArrival[indAlias] -= 2*numpy.pi
2865 2864 # indAlias = numpy.where(phaseArrival < -numpy.pi)
2866 2865 # phaseArrival[indAlias] += 2*numpy.pi
2867 2866
2868 2867 return phaseDiff, phaseArrival
2869 2868
2870 2869 def __coherentDetection(self, volts, timeSegment, timeInterval, pairslist, thresh):
2871 2870 #this function will run the coherent detection used in Holdworth et al. 2004 and return the net power
2872 2871 #find the phase shifts of each channel over 1 second intervals
2873 2872 #only look at ranges below the beacon signal
2874 2873 numProfPerBlock = numpy.ceil(timeSegment/timeInterval)
2875 2874 numBlocks = int(volts.shape[1]/numProfPerBlock)
2876 2875 numHeights = volts.shape[2]
2877 2876 nChannel = volts.shape[0]
2878 2877 voltsCohDet = volts.copy()
2879 2878
2880 2879 pairsarray = numpy.array(pairslist)
2881 2880 indSides = pairsarray[:,1]
2882 2881 # indSides = numpy.array(range(nChannel))
2883 2882 # indSides = numpy.delete(indSides, indCenter)
2884 2883 #
2885 2884 # listCenter = numpy.array_split(volts[indCenter,:,:], numBlocks, 0)
2886 2885 listBlocks = numpy.array_split(volts, numBlocks, 1)
2887 2886
2888 2887 startInd = 0
2889 2888 endInd = 0
2890 2889
2891 2890 for i in range(numBlocks):
2892 2891 startInd = endInd
2893 2892 endInd = endInd + listBlocks[i].shape[1]
2894 2893
2895 2894 arrayBlock = listBlocks[i]
2896 2895 # arrayBlockCenter = listCenter[i]
2897 2896
2898 2897 #Estimate the Phase Difference
2899 2898 phaseDiff, aux = self.__estimatePhaseDifference(arrayBlock, pairslist)
2900 2899 #Phase Difference RMS
2901 2900 arrayPhaseRMS = numpy.abs(phaseDiff)
2902 2901 phaseRMSaux = numpy.sum(arrayPhaseRMS < thresh,0)
2903 2902 indPhase = numpy.where(phaseRMSaux==4)
2904 2903 #Shifting
2905 2904 if indPhase[0].shape[0] > 0:
2906 2905 for j in range(indSides.size):
2907 2906 arrayBlock[indSides[j],:,indPhase] = self.__shiftPhase(arrayBlock[indSides[j],:,indPhase], phaseDiff[j,indPhase].transpose())
2908 2907 voltsCohDet[:,startInd:endInd,:] = arrayBlock
2909 2908
2910 2909 return voltsCohDet
2911 2910
2912 2911 def __calculateCCF(self, volts, pairslist ,laglist):
2913 2912
2914 2913 nHeights = volts.shape[2]
2915 2914 nPoints = volts.shape[1]
2916 2915 voltsCCF = numpy.zeros((len(pairslist), len(laglist), nHeights),dtype = 'complex')
2917 2916
2918 2917 for i in range(len(pairslist)):
2919 2918 volts1 = volts[pairslist[i][0]]
2920 2919 volts2 = volts[pairslist[i][1]]
2921 2920
2922 2921 for t in range(len(laglist)):
2923 2922 idxT = laglist[t]
2924 2923 if idxT >= 0:
2925 2924 vStacked = numpy.vstack((volts2[idxT:,:],
2926 2925 numpy.zeros((idxT, nHeights),dtype='complex')))
2927 2926 else:
2928 2927 vStacked = numpy.vstack((numpy.zeros((-idxT, nHeights),dtype='complex'),
2929 2928 volts2[:(nPoints + idxT),:]))
2930 2929 voltsCCF[i,t,:] = numpy.sum((numpy.conjugate(volts1)*vStacked),axis=0)
2931 2930
2932 2931 vStacked = None
2933 2932 return voltsCCF
2934 2933
2935 2934 def __getNoise(self, power, timeSegment, timeInterval):
2936 2935 numProfPerBlock = numpy.ceil(timeSegment/timeInterval)
2937 2936 numBlocks = int(power.shape[0]/numProfPerBlock)
2938 2937 numHeights = power.shape[1]
2939 2938
2940 2939 listPower = numpy.array_split(power, numBlocks, 0)
2941 2940 noise = numpy.zeros((power.shape[0], power.shape[1]))
2942 2941 noise1 = numpy.zeros((power.shape[0], power.shape[1]))
2943 2942
2944 2943 startInd = 0
2945 2944 endInd = 0
2946 2945
2947 2946 for i in range(numBlocks): #split por canal
2948 2947 startInd = endInd
2949 2948 endInd = endInd + listPower[i].shape[0]
2950 2949
2951 2950 arrayBlock = listPower[i]
2952 2951 noiseAux = numpy.mean(arrayBlock, 0)
2953 2952 # noiseAux = numpy.median(noiseAux)
2954 2953 # noiseAux = numpy.mean(arrayBlock)
2955 2954 noise[startInd:endInd,:] = noise[startInd:endInd,:] + noiseAux
2956 2955
2957 2956 noiseAux1 = numpy.mean(arrayBlock)
2958 2957 noise1[startInd:endInd,:] = noise1[startInd:endInd,:] + noiseAux1
2959 2958
2960 2959 return noise, noise1
2961 2960
2962 2961 def __findMeteors(self, power, thresh):
2963 2962 nProf = power.shape[0]
2964 2963 nHeights = power.shape[1]
2965 2964 listMeteors = []
2966 2965
2967 2966 for i in range(nHeights):
2968 2967 powerAux = power[:,i]
2969 2968 threshAux = thresh[:,i]
2970 2969
2971 2970 indUPthresh = numpy.where(powerAux > threshAux)[0]
2972 2971 indDNthresh = numpy.where(powerAux <= threshAux)[0]
2973 2972
2974 2973 j = 0
2975 2974
2976 2975 while (j < indUPthresh.size - 2):
2977 2976 if (indUPthresh[j + 2] == indUPthresh[j] + 2):
2978 2977 indDNAux = numpy.where(indDNthresh > indUPthresh[j])
2979 2978 indDNthresh = indDNthresh[indDNAux]
2980 2979
2981 2980 if (indDNthresh.size > 0):
2982 2981 indEnd = indDNthresh[0] - 1
2983 2982 indInit = indUPthresh[j]
2984 2983
2985 2984 meteor = powerAux[indInit:indEnd + 1]
2986 2985 indPeak = meteor.argmax() + indInit
2987 2986 FLA = sum(numpy.conj(meteor)*numpy.hstack((meteor[1:],0)))
2988 2987
2989 2988 listMeteors.append(numpy.array([i,indInit,indPeak,indEnd,FLA])) #CHEQUEAR!!!!!
2990 2989 j = numpy.where(indUPthresh == indEnd)[0] + 1
2991 2990 else: j+=1
2992 2991 else: j+=1
2993 2992
2994 2993 return listMeteors
2995 2994
2996 2995 def __removeMultipleDetections(self,listMeteors, rangeLimit, timeLimit):
2997 2996
2998 2997 arrayMeteors = numpy.asarray(listMeteors)
2999 2998 listMeteors1 = []
3000 2999
3001 3000 while arrayMeteors.shape[0] > 0:
3002 3001 FLAs = arrayMeteors[:,4]
3003 3002 maxFLA = FLAs.argmax()
3004 3003 listMeteors1.append(arrayMeteors[maxFLA,:])
3005 3004
3006 3005 MeteorInitTime = arrayMeteors[maxFLA,1]
3007 3006 MeteorEndTime = arrayMeteors[maxFLA,3]
3008 3007 MeteorHeight = arrayMeteors[maxFLA,0]
3009 3008
3010 3009 #Check neighborhood
3011 3010 maxHeightIndex = MeteorHeight + rangeLimit
3012 3011 minHeightIndex = MeteorHeight - rangeLimit
3013 3012 minTimeIndex = MeteorInitTime - timeLimit
3014 3013 maxTimeIndex = MeteorEndTime + timeLimit
3015 3014
3016 3015 #Check Heights
3017 3016 indHeight = numpy.logical_and(arrayMeteors[:,0] >= minHeightIndex, arrayMeteors[:,0] <= maxHeightIndex)
3018 3017 indTime = numpy.logical_and(arrayMeteors[:,3] >= minTimeIndex, arrayMeteors[:,1] <= maxTimeIndex)
3019 3018 indBoth = numpy.where(numpy.logical_and(indTime,indHeight))
3020 3019
3021 3020 arrayMeteors = numpy.delete(arrayMeteors, indBoth, axis = 0)
3022 3021
3023 3022 return listMeteors1
3024 3023
3025 3024 def __meteorReestimation(self, listMeteors, volts, pairslist, thresh, noise, timeInterval,frequency):
3026 3025 numHeights = volts.shape[2]
3027 3026 nChannel = volts.shape[0]
3028 3027
3029 3028 thresholdPhase = thresh[0]
3030 3029 thresholdNoise = thresh[1]
3031 3030 thresholdDB = float(thresh[2])
3032 3031
3033 3032 thresholdDB1 = 10**(thresholdDB/10)
3034 3033 pairsarray = numpy.array(pairslist)
3035 3034 indSides = pairsarray[:,1]
3036 3035
3037 3036 pairslist1 = list(pairslist)
3038 3037 pairslist1.append((0,1))
3039 3038 pairslist1.append((3,4))
3040 3039
3041 3040 listMeteors1 = []
3042 3041 listPowerSeries = []
3043 3042 listVoltageSeries = []
3044 3043 #volts has the war data
3045 3044
3046 3045 if frequency == 30e6:
3047 3046 timeLag = 45*10**-3
3048 3047 else:
3049 3048 timeLag = 15*10**-3
3050 3049 lag = numpy.ceil(timeLag/timeInterval)
3051 3050
3052 3051 for i in range(len(listMeteors)):
3053 3052
3054 3053 ###################### 3.6 - 3.7 PARAMETERS REESTIMATION #########################
3055 3054 meteorAux = numpy.zeros(16)
3056 3055
3057 3056 #Loading meteor Data (mHeight, mStart, mPeak, mEnd)
3058 3057 mHeight = listMeteors[i][0]
3059 3058 mStart = listMeteors[i][1]
3060 3059 mPeak = listMeteors[i][2]
3061 3060 mEnd = listMeteors[i][3]
3062 3061
3063 3062 #get the volt data between the start and end times of the meteor
3064 3063 meteorVolts = volts[:,mStart:mEnd+1,mHeight]
3065 3064 meteorVolts = meteorVolts.reshape(meteorVolts.shape[0], meteorVolts.shape[1], 1)
3066 3065
3067 3066 #3.6. Phase Difference estimation
3068 3067 phaseDiff, aux = self.__estimatePhaseDifference(meteorVolts, pairslist)
3069 3068
3070 3069 #3.7. Phase difference removal & meteor start, peak and end times reestimated
3071 3070 #meteorVolts0.- all Channels, all Profiles
3072 3071 meteorVolts0 = volts[:,:,mHeight]
3073 3072 meteorThresh = noise[:,mHeight]*thresholdNoise
3074 3073 meteorNoise = noise[:,mHeight]
3075 3074 meteorVolts0[indSides,:] = self.__shiftPhase(meteorVolts0[indSides,:], phaseDiff) #Phase Shifting
3076 3075 powerNet0 = numpy.nansum(numpy.abs(meteorVolts0)**2, axis = 0) #Power
3077 3076
3078 3077 #Times reestimation
3079 3078 mStart1 = numpy.where(powerNet0[:mPeak] < meteorThresh[:mPeak])[0]
3080 3079 if mStart1.size > 0:
3081 3080 mStart1 = mStart1[-1] + 1
3082 3081
3083 3082 else:
3084 3083 mStart1 = mPeak
3085 3084
3086 3085 mEnd1 = numpy.where(powerNet0[mPeak:] < meteorThresh[mPeak:])[0][0] + mPeak - 1
3087 3086 mEndDecayTime1 = numpy.where(powerNet0[mPeak:] < meteorNoise[mPeak:])[0]
3088 3087 if mEndDecayTime1.size == 0:
3089 3088 mEndDecayTime1 = powerNet0.size
3090 3089 else:
3091 3090 mEndDecayTime1 = mEndDecayTime1[0] + mPeak - 1
3092 3091 # mPeak1 = meteorVolts0[mStart1:mEnd1 + 1].argmax()
3093 3092
3094 3093 #meteorVolts1.- all Channels, from start to end
3095 3094 meteorVolts1 = meteorVolts0[:,mStart1:mEnd1 + 1]
3096 3095 meteorVolts2 = meteorVolts0[:,mPeak + lag:mEnd1 + 1]
3097 3096 if meteorVolts2.shape[1] == 0:
3098 3097 meteorVolts2 = meteorVolts0[:,mPeak:mEnd1 + 1]
3099 3098 meteorVolts1 = meteorVolts1.reshape(meteorVolts1.shape[0], meteorVolts1.shape[1], 1)
3100 3099 meteorVolts2 = meteorVolts2.reshape(meteorVolts2.shape[0], meteorVolts2.shape[1], 1)
3101 3100 ##################### END PARAMETERS REESTIMATION #########################
3102 3101
3103 3102 ##################### 3.8 PHASE DIFFERENCE REESTIMATION ########################
3104 3103 # if mEnd1 - mStart1 > 4: #Error Number 6: echo less than 5 samples long; too short for analysis
3105 3104 if meteorVolts2.shape[1] > 0:
3106 3105 #Phase Difference re-estimation
3107 3106 phaseDiff1, phaseDiffint = self.__estimatePhaseDifference(meteorVolts2, pairslist1) #Phase Difference Estimation
3108 3107 # phaseDiff1, phaseDiffint = self.estimatePhaseDifference(meteorVolts2, pairslist)
3109 3108 meteorVolts2 = meteorVolts2.reshape(meteorVolts2.shape[0], meteorVolts2.shape[1])
3110 3109 phaseDiff11 = numpy.reshape(phaseDiff1, (phaseDiff1.shape[0],1))
3111 3110 meteorVolts2[indSides,:] = self.__shiftPhase(meteorVolts2[indSides,:], phaseDiff11[0:4]) #Phase Shifting
3112 3111
3113 3112 #Phase Difference RMS
3114 3113 phaseRMS1 = numpy.sqrt(numpy.mean(numpy.square(phaseDiff1)))
3115 3114 powerNet1 = numpy.nansum(numpy.abs(meteorVolts1[:,:])**2,0)
3116 3115 #Data from Meteor
3117 3116 mPeak1 = powerNet1.argmax() + mStart1
3118 3117 mPeakPower1 = powerNet1.max()
3119 3118 noiseAux = sum(noise[mStart1:mEnd1 + 1,mHeight])
3120 3119 mSNR1 = (sum(powerNet1)-noiseAux)/noiseAux
3121 3120 Meteor1 = numpy.array([mHeight, mStart1, mPeak1, mEnd1, mPeakPower1, mSNR1, phaseRMS1])
3122 3121 Meteor1 = numpy.hstack((Meteor1,phaseDiffint))
3123 3122 PowerSeries = powerNet0[mStart1:mEndDecayTime1 + 1]
3124 3123 #Vectorize
3125 3124 meteorAux[0:7] = [mHeight, mStart1, mPeak1, mEnd1, mPeakPower1, mSNR1, phaseRMS1]
3126 3125 meteorAux[7:11] = phaseDiffint[0:4]
3127 3126
3128 3127 #Rejection Criterions
3129 3128 if phaseRMS1 > thresholdPhase: #Error Number 17: Phase variation
3130 3129 meteorAux[-1] = 17
3131 3130 elif mSNR1 < thresholdDB1: #Error Number 1: SNR < threshold dB
3132 3131 meteorAux[-1] = 1
3133 3132
3134 3133
3135 3134 else:
3136 3135 meteorAux[0:4] = [mHeight, mStart, mPeak, mEnd]
3137 3136 meteorAux[-1] = 6 #Error Number 6: echo less than 5 samples long; too short for analysis
3138 3137 PowerSeries = 0
3139 3138
3140 3139 listMeteors1.append(meteorAux)
3141 3140 listPowerSeries.append(PowerSeries)
3142 3141 listVoltageSeries.append(meteorVolts1)
3143 3142
3144 3143 return listMeteors1, listPowerSeries, listVoltageSeries
3145 3144
3146 3145 def __estimateDecayTime(self, listMeteors, listPower, timeInterval, frequency):
3147 3146
3148 3147 threshError = 10
3149 3148 #Depending if it is 30 or 50 MHz
3150 3149 if frequency == 30e6:
3151 3150 timeLag = 45*10**-3
3152 3151 else:
3153 3152 timeLag = 15*10**-3
3154 3153 lag = numpy.ceil(timeLag/timeInterval)
3155 3154
3156 3155 listMeteors1 = []
3157 3156
3158 3157 for i in range(len(listMeteors)):
3159 3158 meteorPower = listPower[i]
3160 3159 meteorAux = listMeteors[i]
3161 3160
3162 3161 if meteorAux[-1] == 0:
3163 3162
3164 3163 try:
3165 3164 indmax = meteorPower.argmax()
3166 3165 indlag = indmax + lag
3167 3166
3168 3167 y = meteorPower[indlag:]
3169 3168 x = numpy.arange(0, y.size)*timeLag
3170 3169
3171 3170 #first guess
3172 3171 a = y[0]
3173 3172 tau = timeLag
3174 3173 #exponential fit
3175 3174 popt, pcov = optimize.curve_fit(self.__exponential_function, x, y, p0 = [a, tau])
3176 3175 y1 = self.__exponential_function(x, *popt)
3177 3176 #error estimation
3178 3177 error = sum((y - y1)**2)/(numpy.var(y)*(y.size - popt.size))
3179 3178
3180 3179 decayTime = popt[1]
3181 3180 riseTime = indmax*timeInterval
3182 3181 meteorAux[11:13] = [decayTime, error]
3183 3182
3184 3183 #Table items 7, 8 and 11
3185 3184 if (riseTime > 0.3): #Number 7: Echo rise exceeds 0.3s
3186 3185 meteorAux[-1] = 7
3187 3186 elif (decayTime < 2*riseTime) : #Number 8: Echo decay time less than than twice rise time
3188 3187 meteorAux[-1] = 8
3189 3188 if (error > threshError): #Number 11: Poor fit to amplitude for estimation of decay time
3190 3189 meteorAux[-1] = 11
3191 3190
3192 3191
3193 3192 except:
3194 3193 meteorAux[-1] = 11
3195 3194
3196 3195
3197 3196 listMeteors1.append(meteorAux)
3198 3197
3199 3198 return listMeteors1
3200 3199
3201 3200 #Exponential Function
3202 3201
3203 3202 def __exponential_function(self, x, a, tau):
3204 3203 y = a*numpy.exp(-x/tau)
3205 3204 return y
3206 3205
3207 3206 def __getRadialVelocity(self, listMeteors, listVolts, radialStdThresh, pairslist, timeInterval):
3208 3207
3209 3208 pairslist1 = list(pairslist)
3210 3209 pairslist1.append((0,1))
3211 3210 pairslist1.append((3,4))
3212 3211 numPairs = len(pairslist1)
3213 3212 #Time Lag
3214 3213 timeLag = 45*10**-3
3215 3214 c = 3e8
3216 3215 lag = numpy.ceil(timeLag/timeInterval)
3217 3216 freq = 30e6
3218 3217
3219 3218 listMeteors1 = []
3220 3219
3221 3220 for i in range(len(listMeteors)):
3222 3221 meteorAux = listMeteors[i]
3223 3222 if meteorAux[-1] == 0:
3224 3223 mStart = listMeteors[i][1]
3225 3224 mPeak = listMeteors[i][2]
3226 3225 mLag = mPeak - mStart + lag
3227 3226
3228 3227 #get the volt data between the start and end times of the meteor
3229 3228 meteorVolts = listVolts[i]
3230 3229 meteorVolts = meteorVolts.reshape(meteorVolts.shape[0], meteorVolts.shape[1], 1)
3231 3230
3232 3231 #Get CCF
3233 3232 allCCFs = self.__calculateCCF(meteorVolts, pairslist1, [-2,-1,0,1,2])
3234 3233
3235 3234 #Method 2
3236 3235 slopes = numpy.zeros(numPairs)
3237 3236 time = numpy.array([-2,-1,1,2])*timeInterval
3238 3237 angAllCCF = numpy.angle(allCCFs[:,[0,1,3,4],0])
3239 3238
3240 3239 #Correct phases
3241 3240 derPhaseCCF = angAllCCF[:,1:] - angAllCCF[:,0:-1]
3242 3241 indDer = numpy.where(numpy.abs(derPhaseCCF) > numpy.pi)
3243 3242
3244 3243 if indDer[0].shape[0] > 0:
3245 3244 for i in range(indDer[0].shape[0]):
3246 3245 signo = -numpy.sign(derPhaseCCF[indDer[0][i],indDer[1][i]])
3247 3246 angAllCCF[indDer[0][i],indDer[1][i]+1:] += signo*2*numpy.pi
3248 3247
3249 3248 # fit = scipy.stats.linregress(numpy.array([-2,-1,1,2])*timeInterval, numpy.array([phaseLagN2s[i],phaseLagN1s[i],phaseLag1s[i],phaseLag2s[i]]))
3250 3249 for j in range(numPairs):
3251 3250 fit = stats.linregress(time, angAllCCF[j,:])
3252 3251 slopes[j] = fit[0]
3253 3252
3254 3253 #Remove Outlier
3255 3254 # indOut = numpy.argmax(numpy.abs(slopes - numpy.mean(slopes)))
3256 3255 # slopes = numpy.delete(slopes,indOut)
3257 3256 # indOut = numpy.argmax(numpy.abs(slopes - numpy.mean(slopes)))
3258 3257 # slopes = numpy.delete(slopes,indOut)
3259 3258
3260 3259 radialVelocity = -numpy.mean(slopes)*(0.25/numpy.pi)*(c/freq)
3261 3260 radialError = numpy.std(slopes)*(0.25/numpy.pi)*(c/freq)
3262 3261 meteorAux[-2] = radialError
3263 3262 meteorAux[-3] = radialVelocity
3264 3263
3265 3264 #Setting Error
3266 3265 #Number 15: Radial Drift velocity or projected horizontal velocity exceeds 200 m/s
3267 3266 if numpy.abs(radialVelocity) > 200:
3268 3267 meteorAux[-1] = 15
3269 3268 #Number 12: Poor fit to CCF variation for estimation of radial drift velocity
3270 3269 elif radialError > radialStdThresh:
3271 3270 meteorAux[-1] = 12
3272 3271
3273 3272 listMeteors1.append(meteorAux)
3274 3273 return listMeteors1
3275 3274
3276 3275 def __setNewArrays(self, listMeteors, date, heiRang):
3277 3276
3278 3277 #New arrays
3279 3278 arrayMeteors = numpy.array(listMeteors)
3280 3279 arrayParameters = numpy.zeros((len(listMeteors), 13))
3281 3280
3282 3281 #Date inclusion
3283 3282 # date = re.findall(r'\((.*?)\)', date)
3284 3283 # date = date[0].split(',')
3285 3284 # date = map(int, date)
3286 3285 #
3287 3286 # if len(date)<6:
3288 3287 # date.append(0)
3289 3288 #
3290 3289 # date = [date[0]*10000 + date[1]*100 + date[2], date[3]*10000 + date[4]*100 + date[5]]
3291 3290 # arrayDate = numpy.tile(date, (len(listMeteors), 1))
3292 3291 arrayDate = numpy.tile(date, (len(listMeteors)))
3293 3292
3294 3293 #Meteor array
3295 3294 # arrayMeteors[:,0] = heiRang[arrayMeteors[:,0].astype(int)]
3296 3295 # arrayMeteors = numpy.hstack((arrayDate, arrayMeteors))
3297 3296
3298 3297 #Parameters Array
3299 3298 arrayParameters[:,0] = arrayDate #Date
3300 3299 arrayParameters[:,1] = heiRang[arrayMeteors[:,0].astype(int)] #Range
3301 3300 arrayParameters[:,6:8] = arrayMeteors[:,-3:-1] #Radial velocity and its error
3302 3301 arrayParameters[:,8:12] = arrayMeteors[:,7:11] #Phases
3303 3302 arrayParameters[:,-1] = arrayMeteors[:,-1] #Error
3304 3303
3305 3304
3306 3305 return arrayParameters
3307 3306
3308 3307 class CorrectSMPhases(Operation):
3309 3308
3310 3309 def run(self, dataOut, phaseOffsets, hmin = 50, hmax = 150, azimuth = 45, channelPositions = None):
3311 3310
3312 3311 arrayParameters = dataOut.data_param
3313 3312 pairsList = []
3314 3313 pairx = (0,1)
3315 3314 pairy = (2,3)
3316 3315 pairsList.append(pairx)
3317 3316 pairsList.append(pairy)
3318 3317 jph = numpy.zeros(4)
3319 3318
3320 3319 phaseOffsets = numpy.array(phaseOffsets)*numpy.pi/180
3321 3320 # arrayParameters[:,8:12] = numpy.unwrap(arrayParameters[:,8:12] + phaseOffsets)
3322 3321 arrayParameters[:,8:12] = numpy.angle(numpy.exp(1j*(arrayParameters[:,8:12] + phaseOffsets)))
3323 3322
3324 3323 meteorOps = SMOperations()
3325 3324 if channelPositions is None:
3326 3325 # channelPositions = [(2.5,0), (0,2.5), (0,0), (0,4.5), (-2,0)] #T
3327 3326 channelPositions = [(4.5,2), (2,4.5), (2,2), (2,0), (0,2)] #Estrella
3328 3327
3329 3328 pairslist0, distances = meteorOps.getPhasePairs(channelPositions)
3330 3329 h = (hmin,hmax)
3331 3330
3332 3331 arrayParameters = meteorOps.getMeteorParams(arrayParameters, azimuth, h, pairsList, distances, jph)
3333 3332
3334 3333 dataOut.data_param = arrayParameters
3335 3334 return
3336 3335
3337 3336 class SMPhaseCalibration(Operation):
3338 3337
3339 3338 __buffer = None
3340 3339
3341 3340 __initime = None
3342 3341
3343 3342 __dataReady = False
3344 3343
3345 3344 __isConfig = False
3346 3345
3347 3346 def __checkTime(self, currentTime, initTime, paramInterval, outputInterval):
3348 3347
3349 3348 dataTime = currentTime + paramInterval
3350 3349 deltaTime = dataTime - initTime
3351 3350
3352 3351 if deltaTime >= outputInterval or deltaTime < 0:
3353 3352 return True
3354 3353
3355 3354 return False
3356 3355
3357 3356 def __getGammas(self, pairs, d, phases):
3358 3357 gammas = numpy.zeros(2)
3359 3358
3360 3359 for i in range(len(pairs)):
3361 3360
3362 3361 pairi = pairs[i]
3363 3362
3364 3363 phip3 = phases[:,pairi[0]]
3365 3364 d3 = d[pairi[0]]
3366 3365 phip2 = phases[:,pairi[1]]
3367 3366 d2 = d[pairi[1]]
3368 3367 #Calculating gamma
3369 3368 # jdcos = alp1/(k*d1)
3370 3369 # jgamma = numpy.angle(numpy.exp(1j*(d0*alp1/d1 - alp0)))
3371 3370 jgamma = -phip2*d3/d2 - phip3
3372 3371 jgamma = numpy.angle(numpy.exp(1j*jgamma))
3373 3372 # jgamma[jgamma>numpy.pi] -= 2*numpy.pi
3374 3373 # jgamma[jgamma<-numpy.pi] += 2*numpy.pi
3375 3374
3376 3375 #Revised distribution
3377 3376 jgammaArray = numpy.hstack((jgamma,jgamma+0.5*numpy.pi,jgamma-0.5*numpy.pi))
3378 3377
3379 3378 #Histogram
3380 3379 nBins = 64
3381 3380 rmin = -0.5*numpy.pi
3382 3381 rmax = 0.5*numpy.pi
3383 3382 phaseHisto = numpy.histogram(jgammaArray, bins=nBins, range=(rmin,rmax))
3384 3383
3385 3384 meteorsY = phaseHisto[0]
3386 3385 phasesX = phaseHisto[1][:-1]
3387 3386 width = phasesX[1] - phasesX[0]
3388 3387 phasesX += width/2
3389 3388
3390 3389 #Gaussian aproximation
3391 3390 bpeak = meteorsY.argmax()
3392 3391 peak = meteorsY.max()
3393 3392 jmin = bpeak - 5
3394 3393 jmax = bpeak + 5 + 1
3395 3394
3396 3395 if jmin<0:
3397 3396 jmin = 0
3398 3397 jmax = 6
3399 3398 elif jmax > meteorsY.size:
3400 3399 jmin = meteorsY.size - 6
3401 3400 jmax = meteorsY.size
3402 3401
3403 3402 x0 = numpy.array([peak,bpeak,50])
3404 3403 coeff = optimize.leastsq(self.__residualFunction, x0, args=(meteorsY[jmin:jmax], phasesX[jmin:jmax]))
3405 3404
3406 3405 #Gammas
3407 3406 gammas[i] = coeff[0][1]
3408 3407
3409 3408 return gammas
3410 3409
3411 3410 def __residualFunction(self, coeffs, y, t):
3412 3411
3413 3412 return y - self.__gauss_function(t, coeffs)
3414 3413
3415 3414 def __gauss_function(self, t, coeffs):
3416 3415
3417 3416 return coeffs[0]*numpy.exp(-0.5*((t - coeffs[1]) / coeffs[2])**2)
3418 3417
3419 3418 def __getPhases(self, azimuth, h, pairsList, d, gammas, meteorsArray):
3420 3419 meteorOps = SMOperations()
3421 3420 nchan = 4
3422 3421 pairx = pairsList[0] #x es 0
3423 3422 pairy = pairsList[1] #y es 1
3424 3423 center_xangle = 0
3425 3424 center_yangle = 0
3426 3425 range_angle = numpy.array([10*numpy.pi,numpy.pi,numpy.pi/2,numpy.pi/4])
3427 3426 ntimes = len(range_angle)
3428 3427
3429 3428 nstepsx = 20
3430 3429 nstepsy = 20
3431 3430
3432 3431 for iz in range(ntimes):
3433 3432 min_xangle = -range_angle[iz]/2 + center_xangle
3434 3433 max_xangle = range_angle[iz]/2 + center_xangle
3435 3434 min_yangle = -range_angle[iz]/2 + center_yangle
3436 3435 max_yangle = range_angle[iz]/2 + center_yangle
3437 3436
3438 3437 inc_x = (max_xangle-min_xangle)/nstepsx
3439 3438 inc_y = (max_yangle-min_yangle)/nstepsy
3440 3439
3441 3440 alpha_y = numpy.arange(nstepsy)*inc_y + min_yangle
3442 3441 alpha_x = numpy.arange(nstepsx)*inc_x + min_xangle
3443 3442 penalty = numpy.zeros((nstepsx,nstepsy))
3444 3443 jph_array = numpy.zeros((nchan,nstepsx,nstepsy))
3445 3444 jph = numpy.zeros(nchan)
3446 3445
3447 3446 # Iterations looking for the offset
3448 3447 for iy in range(int(nstepsy)):
3449 3448 for ix in range(int(nstepsx)):
3450 3449 d3 = d[pairsList[1][0]]
3451 3450 d2 = d[pairsList[1][1]]
3452 3451 d5 = d[pairsList[0][0]]
3453 3452 d4 = d[pairsList[0][1]]
3454 3453
3455 3454 alp2 = alpha_y[iy] #gamma 1
3456 3455 alp4 = alpha_x[ix] #gamma 0
3457 3456
3458 3457 alp3 = -alp2*d3/d2 - gammas[1]
3459 3458 alp5 = -alp4*d5/d4 - gammas[0]
3460 3459 # jph[pairy[1]] = alpha_y[iy]
3461 3460 # jph[pairy[0]] = -gammas[1] - alpha_y[iy]*d[pairy[1]]/d[pairy[0]]
3462 3461
3463 3462 # jph[pairx[1]] = alpha_x[ix]
3464 3463 # jph[pairx[0]] = -gammas[0] - alpha_x[ix]*d[pairx[1]]/d[pairx[0]]
3465 3464 jph[pairsList[0][1]] = alp4
3466 3465 jph[pairsList[0][0]] = alp5
3467 3466 jph[pairsList[1][0]] = alp3
3468 3467 jph[pairsList[1][1]] = alp2
3469 3468 jph_array[:,ix,iy] = jph
3470 3469 # d = [2.0,2.5,2.5,2.0]
3471 3470 #falta chequear si va a leer bien los meteoros
3472 3471 meteorsArray1 = meteorOps.getMeteorParams(meteorsArray, azimuth, h, pairsList, d, jph)
3473 3472 error = meteorsArray1[:,-1]
3474 3473 ind1 = numpy.where(error==0)[0]
3475 3474 penalty[ix,iy] = ind1.size
3476 3475
3477 3476 i,j = numpy.unravel_index(penalty.argmax(), penalty.shape)
3478 3477 phOffset = jph_array[:,i,j]
3479 3478
3480 3479 center_xangle = phOffset[pairx[1]]
3481 3480 center_yangle = phOffset[pairy[1]]
3482 3481
3483 3482 phOffset = numpy.angle(numpy.exp(1j*jph_array[:,i,j]))
3484 3483 phOffset = phOffset*180/numpy.pi
3485 3484 return phOffset
3486 3485
3487 3486
3488 3487 def run(self, dataOut, hmin, hmax, channelPositions=None, nHours = 1):
3489 3488
3490 3489 dataOut.flagNoData = True
3491 3490 self.__dataReady = False
3492 3491 dataOut.outputInterval = nHours*3600
3493 3492
3494 3493 if self.__isConfig == False:
3495 3494 # self.__initime = dataOut.datatime.replace(minute = 0, second = 0, microsecond = 03)
3496 3495 #Get Initial LTC time
3497 3496 self.__initime = datetime.datetime.utcfromtimestamp(dataOut.utctime)
3498 3497 self.__initime = (self.__initime.replace(minute = 0, second = 0, microsecond = 0) - datetime.datetime(1970, 1, 1)).total_seconds()
3499 3498
3500 3499 self.__isConfig = True
3501 3500
3502 3501 if self.__buffer is None:
3503 3502 self.__buffer = dataOut.data_param.copy()
3504 3503
3505 3504 else:
3506 3505 self.__buffer = numpy.vstack((self.__buffer, dataOut.data_param))
3507 3506
3508 3507 self.__dataReady = self.__checkTime(dataOut.utctime, self.__initime, dataOut.paramInterval, dataOut.outputInterval) #Check if the buffer is ready
3509 3508
3510 3509 if self.__dataReady:
3511 3510 dataOut.utctimeInit = self.__initime
3512 3511 self.__initime += dataOut.outputInterval #to erase time offset
3513 3512
3514 3513 freq = dataOut.frequency
3515 3514 c = dataOut.C #m/s
3516 3515 lamb = c/freq
3517 3516 k = 2*numpy.pi/lamb
3518 3517 azimuth = 0
3519 3518 h = (hmin, hmax)
3520 3519 # pairs = ((0,1),(2,3)) #Estrella
3521 3520 # pairs = ((1,0),(2,3)) #T
3522 3521
3523 3522 if channelPositions is None:
3524 3523 # channelPositions = [(2.5,0), (0,2.5), (0,0), (0,4.5), (-2,0)] #T
3525 3524 channelPositions = [(4.5,2), (2,4.5), (2,2), (2,0), (0,2)] #Estrella
3526 3525 meteorOps = SMOperations()
3527 3526 pairslist0, distances = meteorOps.getPhasePairs(channelPositions)
3528 3527
3529 3528 #Checking correct order of pairs
3530 3529 pairs = []
3531 3530 if distances[1] > distances[0]:
3532 3531 pairs.append((1,0))
3533 3532 else:
3534 3533 pairs.append((0,1))
3535 3534
3536 3535 if distances[3] > distances[2]:
3537 3536 pairs.append((3,2))
3538 3537 else:
3539 3538 pairs.append((2,3))
3540 3539 # distances1 = [-distances[0]*lamb, distances[1]*lamb, -distances[2]*lamb, distances[3]*lamb]
3541 3540
3542 3541 meteorsArray = self.__buffer
3543 3542 error = meteorsArray[:,-1]
3544 3543 boolError = (error==0)|(error==3)|(error==4)|(error==13)|(error==14)
3545 3544 ind1 = numpy.where(boolError)[0]
3546 3545 meteorsArray = meteorsArray[ind1,:]
3547 3546 meteorsArray[:,-1] = 0
3548 3547 phases = meteorsArray[:,8:12]
3549 3548
3550 3549 #Calculate Gammas
3551 3550 gammas = self.__getGammas(pairs, distances, phases)
3552 3551 # gammas = numpy.array([-21.70409463,45.76935864])*numpy.pi/180
3553 3552 #Calculate Phases
3554 3553 phasesOff = self.__getPhases(azimuth, h, pairs, distances, gammas, meteorsArray)
3555 3554 phasesOff = phasesOff.reshape((1,phasesOff.size))
3556 3555 dataOut.data_output = -phasesOff
3557 3556 dataOut.flagNoData = False
3558 3557 self.__buffer = None
3559 3558
3560 3559
3561 3560 return
3562 3561
3563 3562 class SMOperations():
3564 3563
3565 3564 def __init__(self):
3566 3565
3567 3566 return
3568 3567
3569 3568 def getMeteorParams(self, arrayParameters0, azimuth, h, pairsList, distances, jph):
3570 3569
3571 3570 arrayParameters = arrayParameters0.copy()
3572 3571 hmin = h[0]
3573 3572 hmax = h[1]
3574 3573
3575 3574 #Calculate AOA (Error N 3, 4)
3576 3575 #JONES ET AL. 1998
3577 3576 AOAthresh = numpy.pi/8
3578 3577 error = arrayParameters[:,-1]
3579 3578 phases = -arrayParameters[:,8:12] + jph
3580 3579 # phases = numpy.unwrap(phases)
3581 3580 arrayParameters[:,3:6], arrayParameters[:,-1] = self.__getAOA(phases, pairsList, distances, error, AOAthresh, azimuth)
3582 3581
3583 3582 #Calculate Heights (Error N 13 and 14)
3584 3583 error = arrayParameters[:,-1]
3585 3584 Ranges = arrayParameters[:,1]
3586 3585 zenith = arrayParameters[:,4]
3587 3586 arrayParameters[:,2], arrayParameters[:,-1] = self.__getHeights(Ranges, zenith, error, hmin, hmax)
3588 3587
3589 3588 #----------------------- Get Final data ------------------------------------
3590 3589 # error = arrayParameters[:,-1]
3591 3590 # ind1 = numpy.where(error==0)[0]
3592 3591 # arrayParameters = arrayParameters[ind1,:]
3593 3592
3594 3593 return arrayParameters
3595 3594
3596 3595 def __getAOA(self, phases, pairsList, directions, error, AOAthresh, azimuth):
3597 3596
3598 3597 arrayAOA = numpy.zeros((phases.shape[0],3))
3599 3598 cosdir0, cosdir = self.__getDirectionCosines(phases, pairsList,directions)
3600 3599
3601 3600 arrayAOA[:,:2] = self.__calculateAOA(cosdir, azimuth)
3602 3601 cosDirError = numpy.sum(numpy.abs(cosdir0 - cosdir), axis = 1)
3603 3602 arrayAOA[:,2] = cosDirError
3604 3603
3605 3604 azimuthAngle = arrayAOA[:,0]
3606 3605 zenithAngle = arrayAOA[:,1]
3607 3606
3608 3607 #Setting Error
3609 3608 indError = numpy.where(numpy.logical_or(error == 3, error == 4))[0]
3610 3609 error[indError] = 0
3611 3610 #Number 3: AOA not fesible
3612 3611 indInvalid = numpy.where(numpy.logical_and((numpy.logical_or(numpy.isnan(zenithAngle), numpy.isnan(azimuthAngle))),error == 0))[0]
3613 3612 error[indInvalid] = 3
3614 3613 #Number 4: Large difference in AOAs obtained from different antenna baselines
3615 3614 indInvalid = numpy.where(numpy.logical_and(cosDirError > AOAthresh,error == 0))[0]
3616 3615 error[indInvalid] = 4
3617 3616 return arrayAOA, error
3618 3617
3619 3618 def __getDirectionCosines(self, arrayPhase, pairsList, distances):
3620 3619
3621 3620 #Initializing some variables
3622 3621 ang_aux = numpy.array([-8,-7,-6,-5,-4,-3,-2,-1,0,1,2,3,4,5,6,7,8])*2*numpy.pi
3623 3622 ang_aux = ang_aux.reshape(1,ang_aux.size)
3624 3623
3625 3624 cosdir = numpy.zeros((arrayPhase.shape[0],2))
3626 3625 cosdir0 = numpy.zeros((arrayPhase.shape[0],2))
3627 3626
3628 3627
3629 3628 for i in range(2):
3630 3629 ph0 = arrayPhase[:,pairsList[i][0]]
3631 3630 ph1 = arrayPhase[:,pairsList[i][1]]
3632 3631 d0 = distances[pairsList[i][0]]
3633 3632 d1 = distances[pairsList[i][1]]
3634 3633
3635 3634 ph0_aux = ph0 + ph1
3636 3635 ph0_aux = numpy.angle(numpy.exp(1j*ph0_aux))
3637 3636 # ph0_aux[ph0_aux > numpy.pi] -= 2*numpy.pi
3638 3637 # ph0_aux[ph0_aux < -numpy.pi] += 2*numpy.pi
3639 3638 #First Estimation
3640 3639 cosdir0[:,i] = (ph0_aux)/(2*numpy.pi*(d0 - d1))
3641 3640
3642 3641 #Most-Accurate Second Estimation
3643 3642 phi1_aux = ph0 - ph1
3644 3643 phi1_aux = phi1_aux.reshape(phi1_aux.size,1)
3645 3644 #Direction Cosine 1
3646 3645 cosdir1 = (phi1_aux + ang_aux)/(2*numpy.pi*(d0 + d1))
3647 3646
3648 3647 #Searching the correct Direction Cosine
3649 3648 cosdir0_aux = cosdir0[:,i]
3650 3649 cosdir0_aux = cosdir0_aux.reshape(cosdir0_aux.size,1)
3651 3650 #Minimum Distance
3652 3651 cosDiff = (cosdir1 - cosdir0_aux)**2
3653 3652 indcos = cosDiff.argmin(axis = 1)
3654 3653 #Saving Value obtained
3655 3654 cosdir[:,i] = cosdir1[numpy.arange(len(indcos)),indcos]
3656 3655
3657 3656 return cosdir0, cosdir
3658 3657
3659 3658 def __calculateAOA(self, cosdir, azimuth):
3660 3659 cosdirX = cosdir[:,0]
3661 3660 cosdirY = cosdir[:,1]
3662 3661
3663 3662 zenithAngle = numpy.arccos(numpy.sqrt(1 - cosdirX**2 - cosdirY**2))*180/numpy.pi
3664 3663 azimuthAngle = numpy.arctan2(cosdirX,cosdirY)*180/numpy.pi + azimuth#0 deg north, 90 deg east
3665 3664 angles = numpy.vstack((azimuthAngle, zenithAngle)).transpose()
3666 3665
3667 3666 return angles
3668 3667
3669 3668 def __getHeights(self, Ranges, zenith, error, minHeight, maxHeight):
3670 3669
3671 3670 Ramb = 375 #Ramb = c/(2*PRF)
3672 3671 Re = 6371 #Earth Radius
3673 3672 heights = numpy.zeros(Ranges.shape)
3674 3673
3675 3674 R_aux = numpy.array([0,1,2])*Ramb
3676 3675 R_aux = R_aux.reshape(1,R_aux.size)
3677 3676
3678 3677 Ranges = Ranges.reshape(Ranges.size,1)
3679 3678
3680 3679 Ri = Ranges + R_aux
3681 3680 hi = numpy.sqrt(Re**2 + Ri**2 + (2*Re*numpy.cos(zenith*numpy.pi/180)*Ri.transpose()).transpose()) - Re
3682 3681
3683 3682 #Check if there is a height between 70 and 110 km
3684 3683 h_bool = numpy.sum(numpy.logical_and(hi > minHeight, hi < maxHeight), axis = 1)
3685 3684 ind_h = numpy.where(h_bool == 1)[0]
3686 3685
3687 3686 hCorr = hi[ind_h, :]
3688 3687 ind_hCorr = numpy.where(numpy.logical_and(hi > minHeight, hi < maxHeight))
3689 3688
3690 3689 hCorr = hi[ind_hCorr][:len(ind_h)]
3691 3690 heights[ind_h] = hCorr
3692 3691
3693 3692 #Setting Error
3694 3693 #Number 13: Height unresolvable echo: not valid height within 70 to 110 km
3695 3694 #Number 14: Height ambiguous echo: more than one possible height within 70 to 110 km
3696 3695 indError = numpy.where(numpy.logical_or(error == 13, error == 14))[0]
3697 3696 error[indError] = 0
3698 3697 indInvalid2 = numpy.where(numpy.logical_and(h_bool > 1, error == 0))[0]
3699 3698 error[indInvalid2] = 14
3700 3699 indInvalid1 = numpy.where(numpy.logical_and(h_bool == 0, error == 0))[0]
3701 3700 error[indInvalid1] = 13
3702 3701
3703 3702 return heights, error
3704 3703
3705 3704 def getPhasePairs(self, channelPositions):
3706 3705 chanPos = numpy.array(channelPositions)
3707 3706 listOper = list(itertools.combinations(list(range(5)),2))
3708 3707
3709 3708 distances = numpy.zeros(4)
3710 3709 axisX = []
3711 3710 axisY = []
3712 3711 distX = numpy.zeros(3)
3713 3712 distY = numpy.zeros(3)
3714 3713 ix = 0
3715 3714 iy = 0
3716 3715
3717 3716 pairX = numpy.zeros((2,2))
3718 3717 pairY = numpy.zeros((2,2))
3719 3718
3720 3719 for i in range(len(listOper)):
3721 3720 pairi = listOper[i]
3722 3721
3723 3722 posDif = numpy.abs(chanPos[pairi[0],:] - chanPos[pairi[1],:])
3724 3723
3725 3724 if posDif[0] == 0:
3726 3725 axisY.append(pairi)
3727 3726 distY[iy] = posDif[1]
3728 3727 iy += 1
3729 3728 elif posDif[1] == 0:
3730 3729 axisX.append(pairi)
3731 3730 distX[ix] = posDif[0]
3732 3731 ix += 1
3733 3732
3734 3733 for i in range(2):
3735 3734 if i==0:
3736 3735 dist0 = distX
3737 3736 axis0 = axisX
3738 3737 else:
3739 3738 dist0 = distY
3740 3739 axis0 = axisY
3741 3740
3742 3741 side = numpy.argsort(dist0)[:-1]
3743 3742 axis0 = numpy.array(axis0)[side,:]
3744 3743 chanC = int(numpy.intersect1d(axis0[0,:], axis0[1,:])[0])
3745 3744 axis1 = numpy.unique(numpy.reshape(axis0,4))
3746 3745 side = axis1[axis1 != chanC]
3747 3746 diff1 = chanPos[chanC,i] - chanPos[side[0],i]
3748 3747 diff2 = chanPos[chanC,i] - chanPos[side[1],i]
3749 3748 if diff1<0:
3750 3749 chan2 = side[0]
3751 3750 d2 = numpy.abs(diff1)
3752 3751 chan1 = side[1]
3753 3752 d1 = numpy.abs(diff2)
3754 3753 else:
3755 3754 chan2 = side[1]
3756 3755 d2 = numpy.abs(diff2)
3757 3756 chan1 = side[0]
3758 3757 d1 = numpy.abs(diff1)
3759 3758
3760 3759 if i==0:
3761 3760 chanCX = chanC
3762 3761 chan1X = chan1
3763 3762 chan2X = chan2
3764 3763 distances[0:2] = numpy.array([d1,d2])
3765 3764 else:
3766 3765 chanCY = chanC
3767 3766 chan1Y = chan1
3768 3767 chan2Y = chan2
3769 3768 distances[2:4] = numpy.array([d1,d2])
3770 3769 # axisXsides = numpy.reshape(axisX[ix,:],4)
3771 3770 #
3772 3771 # channelCentX = int(numpy.intersect1d(pairX[0,:], pairX[1,:])[0])
3773 3772 # channelCentY = int(numpy.intersect1d(pairY[0,:], pairY[1,:])[0])
3774 3773 #
3775 3774 # ind25X = numpy.where(pairX[0,:] != channelCentX)[0][0]
3776 3775 # ind20X = numpy.where(pairX[1,:] != channelCentX)[0][0]
3777 3776 # channel25X = int(pairX[0,ind25X])
3778 3777 # channel20X = int(pairX[1,ind20X])
3779 3778 # ind25Y = numpy.where(pairY[0,:] != channelCentY)[0][0]
3780 3779 # ind20Y = numpy.where(pairY[1,:] != channelCentY)[0][0]
3781 3780 # channel25Y = int(pairY[0,ind25Y])
3782 3781 # channel20Y = int(pairY[1,ind20Y])
3783 3782
3784 3783 # pairslist = [(channelCentX, channel25X),(channelCentX, channel20X),(channelCentY,channel25Y),(channelCentY, channel20Y)]
3785 3784 pairslist = [(chanCX, chan1X),(chanCX, chan2X),(chanCY,chan1Y),(chanCY, chan2Y)]
3786 3785
3787 3786 return pairslist, distances
3788 3787 # def __getAOA(self, phases, pairsList, error, AOAthresh, azimuth):
3789 3788 #
3790 3789 # arrayAOA = numpy.zeros((phases.shape[0],3))
3791 3790 # cosdir0, cosdir = self.__getDirectionCosines(phases, pairsList)
3792 3791 #
3793 3792 # arrayAOA[:,:2] = self.__calculateAOA(cosdir, azimuth)
3794 3793 # cosDirError = numpy.sum(numpy.abs(cosdir0 - cosdir), axis = 1)
3795 3794 # arrayAOA[:,2] = cosDirError
3796 3795 #
3797 3796 # azimuthAngle = arrayAOA[:,0]
3798 3797 # zenithAngle = arrayAOA[:,1]
3799 3798 #
3800 3799 # #Setting Error
3801 3800 # #Number 3: AOA not fesible
3802 3801 # indInvalid = numpy.where(numpy.logical_and((numpy.logical_or(numpy.isnan(zenithAngle), numpy.isnan(azimuthAngle))),error == 0))[0]
3803 3802 # error[indInvalid] = 3
3804 3803 # #Number 4: Large difference in AOAs obtained from different antenna baselines
3805 3804 # indInvalid = numpy.where(numpy.logical_and(cosDirError > AOAthresh,error == 0))[0]
3806 3805 # error[indInvalid] = 4
3807 3806 # return arrayAOA, error
3808 3807 #
3809 3808 # def __getDirectionCosines(self, arrayPhase, pairsList):
3810 3809 #
3811 3810 # #Initializing some variables
3812 3811 # ang_aux = numpy.array([-8,-7,-6,-5,-4,-3,-2,-1,0,1,2,3,4,5,6,7,8])*2*numpy.pi
3813 3812 # ang_aux = ang_aux.reshape(1,ang_aux.size)
3814 3813 #
3815 3814 # cosdir = numpy.zeros((arrayPhase.shape[0],2))
3816 3815 # cosdir0 = numpy.zeros((arrayPhase.shape[0],2))
3817 3816 #
3818 3817 #
3819 3818 # for i in range(2):
3820 3819 # #First Estimation
3821 3820 # phi0_aux = arrayPhase[:,pairsList[i][0]] + arrayPhase[:,pairsList[i][1]]
3822 3821 # #Dealias
3823 3822 # indcsi = numpy.where(phi0_aux > numpy.pi)
3824 3823 # phi0_aux[indcsi] -= 2*numpy.pi
3825 3824 # indcsi = numpy.where(phi0_aux < -numpy.pi)
3826 3825 # phi0_aux[indcsi] += 2*numpy.pi
3827 3826 # #Direction Cosine 0
3828 3827 # cosdir0[:,i] = -(phi0_aux)/(2*numpy.pi*0.5)
3829 3828 #
3830 3829 # #Most-Accurate Second Estimation
3831 3830 # phi1_aux = arrayPhase[:,pairsList[i][0]] - arrayPhase[:,pairsList[i][1]]
3832 3831 # phi1_aux = phi1_aux.reshape(phi1_aux.size,1)
3833 3832 # #Direction Cosine 1
3834 3833 # cosdir1 = -(phi1_aux + ang_aux)/(2*numpy.pi*4.5)
3835 3834 #
3836 3835 # #Searching the correct Direction Cosine
3837 3836 # cosdir0_aux = cosdir0[:,i]
3838 3837 # cosdir0_aux = cosdir0_aux.reshape(cosdir0_aux.size,1)
3839 3838 # #Minimum Distance
3840 3839 # cosDiff = (cosdir1 - cosdir0_aux)**2
3841 3840 # indcos = cosDiff.argmin(axis = 1)
3842 3841 # #Saving Value obtained
3843 3842 # cosdir[:,i] = cosdir1[numpy.arange(len(indcos)),indcos]
3844 3843 #
3845 3844 # return cosdir0, cosdir
3846 3845 #
3847 3846 # def __calculateAOA(self, cosdir, azimuth):
3848 3847 # cosdirX = cosdir[:,0]
3849 3848 # cosdirY = cosdir[:,1]
3850 3849 #
3851 3850 # zenithAngle = numpy.arccos(numpy.sqrt(1 - cosdirX**2 - cosdirY**2))*180/numpy.pi
3852 3851 # azimuthAngle = numpy.arctan2(cosdirX,cosdirY)*180/numpy.pi + azimuth #0 deg north, 90 deg east
3853 3852 # angles = numpy.vstack((azimuthAngle, zenithAngle)).transpose()
3854 3853 #
3855 3854 # return angles
3856 3855 #
3857 3856 # def __getHeights(self, Ranges, zenith, error, minHeight, maxHeight):
3858 3857 #
3859 3858 # Ramb = 375 #Ramb = c/(2*PRF)
3860 3859 # Re = 6371 #Earth Radius
3861 3860 # heights = numpy.zeros(Ranges.shape)
3862 3861 #
3863 3862 # R_aux = numpy.array([0,1,2])*Ramb
3864 3863 # R_aux = R_aux.reshape(1,R_aux.size)
3865 3864 #
3866 3865 # Ranges = Ranges.reshape(Ranges.size,1)
3867 3866 #
3868 3867 # Ri = Ranges + R_aux
3869 3868 # hi = numpy.sqrt(Re**2 + Ri**2 + (2*Re*numpy.cos(zenith*numpy.pi/180)*Ri.transpose()).transpose()) - Re
3870 3869 #
3871 3870 # #Check if there is a height between 70 and 110 km
3872 3871 # h_bool = numpy.sum(numpy.logical_and(hi > minHeight, hi < maxHeight), axis = 1)
3873 3872 # ind_h = numpy.where(h_bool == 1)[0]
3874 3873 #
3875 3874 # hCorr = hi[ind_h, :]
3876 3875 # ind_hCorr = numpy.where(numpy.logical_and(hi > minHeight, hi < maxHeight))
3877 3876 #
3878 3877 # hCorr = hi[ind_hCorr]
3879 3878 # heights[ind_h] = hCorr
3880 3879 #
3881 3880 # #Setting Error
3882 3881 # #Number 13: Height unresolvable echo: not valid height within 70 to 110 km
3883 3882 # #Number 14: Height ambiguous echo: more than one possible height within 70 to 110 km
3884 3883 #
3885 3884 # indInvalid2 = numpy.where(numpy.logical_and(h_bool > 1, error == 0))[0]
3886 3885 # error[indInvalid2] = 14
3887 3886 # indInvalid1 = numpy.where(numpy.logical_and(h_bool == 0, error == 0))[0]
3888 3887 # error[indInvalid1] = 13
3889 3888 #
3890 3889 # return heights, error
@@ -1,1411 +1,1411
1 1 # Copyright (c) 2012-2020 Jicamarca Radio Observatory
2 2 # All rights reserved.
3 3 #
4 4 # Distributed under the terms of the BSD 3-clause license.
5 5 """Spectra processing Unit and operations
6 6
7 7 Here you will find the processing unit `SpectraProc` and several operations
8 8 to work with Spectra data type
9 9 """
10 10
11 11 import time
12 12 import itertools
13 13
14 14 import numpy
15 15 import math
16 16
17 17 from schainpy.model.proc.jroproc_base import ProcessingUnit, MPDecorator, Operation
18 18 from schainpy.model.data.jrodata import Spectra
19 19 from schainpy.model.data.jrodata import hildebrand_sekhon
20 20 from schainpy.utils import log
21 21
22 22 from scipy.optimize import curve_fit
23 23
24 24
25 25 class SpectraProc(ProcessingUnit):
26 26
27 27 def __init__(self):
28 28
29 29 ProcessingUnit.__init__(self)
30 30
31 31 self.buffer = None
32 32 self.firstdatatime = None
33 33 self.profIndex = 0
34 34 self.dataOut = Spectra()
35 35 self.id_min = None
36 36 self.id_max = None
37 37 self.setupReq = False #Agregar a todas las unidades de proc
38 38
39 39 def __updateSpecFromVoltage(self):
40 40
41 41 self.dataOut.timeZone = self.dataIn.timeZone
42 42 self.dataOut.dstFlag = self.dataIn.dstFlag
43 43 self.dataOut.errorCount = self.dataIn.errorCount
44 44 self.dataOut.useLocalTime = self.dataIn.useLocalTime
45 45 try:
46 46 self.dataOut.processingHeaderObj = self.dataIn.processingHeaderObj.copy()
47 47 except:
48 48 pass
49 49 self.dataOut.radarControllerHeaderObj = self.dataIn.radarControllerHeaderObj.copy()
50 50 self.dataOut.systemHeaderObj = self.dataIn.systemHeaderObj.copy()
51 51 self.dataOut.channelList = self.dataIn.channelList
52 52 self.dataOut.heightList = self.dataIn.heightList
53 53 self.dataOut.dtype = numpy.dtype([('real', '<f4'), ('imag', '<f4')])
54 54 self.dataOut.nProfiles = self.dataOut.nFFTPoints
55 55 self.dataOut.flagDiscontinuousBlock = self.dataIn.flagDiscontinuousBlock
56 56 self.dataOut.utctime = self.firstdatatime
57 57 self.dataOut.flagDecodeData = self.dataIn.flagDecodeData
58 58 self.dataOut.flagDeflipData = self.dataIn.flagDeflipData
59 59 self.dataOut.flagShiftFFT = False
60 60 self.dataOut.nCohInt = self.dataIn.nCohInt
61 61 self.dataOut.nIncohInt = 1
62 62 self.dataOut.windowOfFilter = self.dataIn.windowOfFilter
63 63 self.dataOut.frequency = self.dataIn.frequency
64 64 self.dataOut.realtime = self.dataIn.realtime
65 65 self.dataOut.azimuth = self.dataIn.azimuth
66 66 self.dataOut.zenith = self.dataIn.zenith
67 67 self.dataOut.codeList = self.dataIn.codeList
68 68 self.dataOut.azimuthList = self.dataIn.azimuthList
69 69 self.dataOut.elevationList = self.dataIn.elevationList
70 70
71 71 def __getFft(self):
72 72 """
73 73 Convierte valores de Voltaje a Spectra
74 74
75 75 Affected:
76 76 self.dataOut.data_spc
77 77 self.dataOut.data_cspc
78 78 self.dataOut.data_dc
79 79 self.dataOut.heightList
80 80 self.profIndex
81 81 self.buffer
82 82 self.dataOut.flagNoData
83 83 """
84 84 fft_volt = numpy.fft.fft(
85 85 self.buffer, n=self.dataOut.nFFTPoints, axis=1)
86 86 fft_volt = fft_volt.astype(numpy.dtype('complex'))
87 87 dc = fft_volt[:, 0, :]
88 88
89 89 # calculo de self-spectra
90 90 fft_volt = numpy.fft.fftshift(fft_volt, axes=(1,))
91 91 spc = fft_volt * numpy.conjugate(fft_volt)
92 92 spc = spc.real
93 93
94 94 blocksize = 0
95 95 blocksize += dc.size
96 96 blocksize += spc.size
97 97
98 98 cspc = None
99 99 pairIndex = 0
100 100 if self.dataOut.pairsList != None:
101 101 # calculo de cross-spectra
102 102 cspc = numpy.zeros(
103 103 (self.dataOut.nPairs, self.dataOut.nFFTPoints, self.dataOut.nHeights), dtype='complex')
104 104 for pair in self.dataOut.pairsList:
105 105 if pair[0] not in self.dataOut.channelList:
106 106 raise ValueError("Error getting CrossSpectra: pair 0 of %s is not in channelList = %s" % (
107 107 str(pair), str(self.dataOut.channelList)))
108 108 if pair[1] not in self.dataOut.channelList:
109 109 raise ValueError("Error getting CrossSpectra: pair 1 of %s is not in channelList = %s" % (
110 110 str(pair), str(self.dataOut.channelList)))
111 111
112 112 cspc[pairIndex, :, :] = fft_volt[pair[0], :, :] * \
113 113 numpy.conjugate(fft_volt[pair[1], :, :])
114 114 pairIndex += 1
115 115 blocksize += cspc.size
116 116
117 117 self.dataOut.data_spc = spc
118 118 self.dataOut.data_cspc = cspc
119 119 self.dataOut.data_dc = dc
120 120 self.dataOut.blockSize = blocksize
121 121 self.dataOut.flagShiftFFT = False
122 122
123 123 def run(self, nProfiles=None, nFFTPoints=None, pairsList=None, ippFactor=None, shift_fft=False):
124 124
125 125 if self.dataIn.type == "Spectra":
126 126 self.dataOut.copy(self.dataIn)
127 127 if shift_fft:
128 128 #desplaza a la derecha en el eje 2 determinadas posiciones
129 129 shift = int(self.dataOut.nFFTPoints/2)
130 130 self.dataOut.data_spc = numpy.roll(self.dataOut.data_spc, shift , axis=1)
131 131
132 132 if self.dataOut.data_cspc is not None:
133 133 #desplaza a la derecha en el eje 2 determinadas posiciones
134 134 self.dataOut.data_cspc = numpy.roll(self.dataOut.data_cspc, shift, axis=1)
135 135 if pairsList:
136 136 self.__selectPairs(pairsList)
137 137
138 138 elif self.dataIn.type == "Voltage":
139 139
140 140 self.dataOut.flagNoData = True
141 141
142 142 if nFFTPoints == None:
143 143 raise ValueError("This SpectraProc.run() need nFFTPoints input variable")
144 144
145 145 if nProfiles == None:
146 146 nProfiles = nFFTPoints
147 147
148 148 if ippFactor == None:
149 149 self.dataOut.ippFactor = 1
150 150
151 151 self.dataOut.nFFTPoints = nFFTPoints
152 152
153 153 if self.buffer is None:
154 154 self.buffer = numpy.zeros((self.dataIn.nChannels,
155 155 nProfiles,
156 156 self.dataIn.nHeights),
157 157 dtype='complex')
158 158
159 159 if self.dataIn.flagDataAsBlock:
160 160 nVoltProfiles = self.dataIn.data.shape[1]
161 161
162 162 if nVoltProfiles == nProfiles:
163 163 self.buffer = self.dataIn.data.copy()
164 164 self.profIndex = nVoltProfiles
165 165
166 166 elif nVoltProfiles < nProfiles:
167 167
168 168 if self.profIndex == 0:
169 169 self.id_min = 0
170 170 self.id_max = nVoltProfiles
171 171
172 172 self.buffer[:, self.id_min:self.id_max,
173 173 :] = self.dataIn.data
174 174 self.profIndex += nVoltProfiles
175 175 self.id_min += nVoltProfiles
176 176 self.id_max += nVoltProfiles
177 177 else:
178 178 raise ValueError("The type object %s has %d profiles, it should just has %d profiles" % (
179 179 self.dataIn.type, self.dataIn.data.shape[1], nProfiles))
180 180 self.dataOut.flagNoData = True
181 181 else:
182 182 self.buffer[:, self.profIndex, :] = self.dataIn.data.copy()
183 183 self.profIndex += 1
184 184
185 185 if self.firstdatatime == None:
186 186 self.firstdatatime = self.dataIn.utctime
187 187
188 188 if self.profIndex == nProfiles:
189 189 self.__updateSpecFromVoltage()
190 190 if pairsList == None:
191 191 self.dataOut.pairsList = [pair for pair in itertools.combinations(self.dataOut.channelList, 2)]
192 192 else:
193 193 self.dataOut.pairsList = pairsList
194 194 self.__getFft()
195 195 self.dataOut.flagNoData = False
196 196 self.firstdatatime = None
197 197 self.profIndex = 0
198 198 else:
199 199 raise ValueError("The type of input object '%s' is not valid".format(
200 200 self.dataIn.type))
201 201
202 202 def __selectPairs(self, pairsList):
203 203
204 204 if not pairsList:
205 205 return
206 206
207 207 pairs = []
208 208 pairsIndex = []
209 209
210 210 for pair in pairsList:
211 211 if pair[0] not in self.dataOut.channelList or pair[1] not in self.dataOut.channelList:
212 212 continue
213 213 pairs.append(pair)
214 214 pairsIndex.append(pairs.index(pair))
215 215
216 216 self.dataOut.data_cspc = self.dataOut.data_cspc[pairsIndex]
217 217 self.dataOut.pairsList = pairs
218 218
219 219 return
220 220
221 221 def selectFFTs(self, minFFT, maxFFT ):
222 222 """
223 223 Selecciona un bloque de datos en base a un grupo de valores de puntos FFTs segun el rango
224 224 minFFT<= FFT <= maxFFT
225 225 """
226 226
227 227 if (minFFT > maxFFT):
228 228 raise ValueError("Error selecting heights: Height range (%d,%d) is not valid" % (minFFT, maxFFT))
229 229
230 230 if (minFFT < self.dataOut.getFreqRange()[0]):
231 231 minFFT = self.dataOut.getFreqRange()[0]
232 232
233 233 if (maxFFT > self.dataOut.getFreqRange()[-1]):
234 234 maxFFT = self.dataOut.getFreqRange()[-1]
235 235
236 236 minIndex = 0
237 237 maxIndex = 0
238 238 FFTs = self.dataOut.getFreqRange()
239 239
240 240 inda = numpy.where(FFTs >= minFFT)
241 241 indb = numpy.where(FFTs <= maxFFT)
242 242
243 243 try:
244 244 minIndex = inda[0][0]
245 245 except:
246 246 minIndex = 0
247 247
248 248 try:
249 249 maxIndex = indb[0][-1]
250 250 except:
251 251 maxIndex = len(FFTs)
252 252
253 253 self.selectFFTsByIndex(minIndex, maxIndex)
254 254
255 255 return 1
256 256
257 257 def getBeaconSignal(self, tauindex=0, channelindex=0, hei_ref=None):
258 258 newheis = numpy.where(
259 259 self.dataOut.heightList > self.dataOut.radarControllerHeaderObj.Taus[tauindex])
260 260
261 261 if hei_ref != None:
262 262 newheis = numpy.where(self.dataOut.heightList > hei_ref)
263 263
264 264 minIndex = min(newheis[0])
265 265 maxIndex = max(newheis[0])
266 266 data_spc = self.dataOut.data_spc[:, :, minIndex:maxIndex + 1]
267 267 heightList = self.dataOut.heightList[minIndex:maxIndex + 1]
268 268
269 269 # determina indices
270 270 nheis = int(self.dataOut.radarControllerHeaderObj.txB /
271 271 (self.dataOut.heightList[1] - self.dataOut.heightList[0]))
272 272 avg_dB = 10 * \
273 273 numpy.log10(numpy.sum(data_spc[channelindex, :, :], axis=0))
274 274 beacon_dB = numpy.sort(avg_dB)[-nheis:]
275 275 beacon_heiIndexList = []
276 276 for val in avg_dB.tolist():
277 277 if val >= beacon_dB[0]:
278 278 beacon_heiIndexList.append(avg_dB.tolist().index(val))
279 279
280 280 #data_spc = data_spc[:,:,beacon_heiIndexList]
281 281 data_cspc = None
282 282 if self.dataOut.data_cspc is not None:
283 283 data_cspc = self.dataOut.data_cspc[:, :, minIndex:maxIndex + 1]
284 284 #data_cspc = data_cspc[:,:,beacon_heiIndexList]
285 285
286 286 data_dc = None
287 287 if self.dataOut.data_dc is not None:
288 288 data_dc = self.dataOut.data_dc[:, minIndex:maxIndex + 1]
289 289 #data_dc = data_dc[:,beacon_heiIndexList]
290 290
291 291 self.dataOut.data_spc = data_spc
292 292 self.dataOut.data_cspc = data_cspc
293 293 self.dataOut.data_dc = data_dc
294 294 self.dataOut.heightList = heightList
295 295 self.dataOut.beacon_heiIndexList = beacon_heiIndexList
296 296
297 297 return 1
298 298
299 299 def selectFFTsByIndex(self, minIndex, maxIndex):
300 300 """
301 301
302 302 """
303 303
304 304 if (minIndex < 0) or (minIndex > maxIndex):
305 305 raise ValueError("Error selecting heights: Index range (%d,%d) is not valid" % (minIndex, maxIndex))
306 306
307 307 if (maxIndex >= self.dataOut.nProfiles):
308 308 maxIndex = self.dataOut.nProfiles-1
309 309
310 310 #Spectra
311 311 data_spc = self.dataOut.data_spc[:,minIndex:maxIndex+1,:]
312 312
313 313 data_cspc = None
314 314 if self.dataOut.data_cspc is not None:
315 315 data_cspc = self.dataOut.data_cspc[:,minIndex:maxIndex+1,:]
316 316
317 317 data_dc = None
318 318 if self.dataOut.data_dc is not None:
319 319 data_dc = self.dataOut.data_dc[minIndex:maxIndex+1,:]
320 320
321 321 self.dataOut.data_spc = data_spc
322 322 self.dataOut.data_cspc = data_cspc
323 323 self.dataOut.data_dc = data_dc
324 324
325 325 self.dataOut.ippSeconds = self.dataOut.ippSeconds*(self.dataOut.nFFTPoints / numpy.shape(data_cspc)[1])
326 326 self.dataOut.nFFTPoints = numpy.shape(data_cspc)[1]
327 327 self.dataOut.profilesPerBlock = numpy.shape(data_cspc)[1]
328 328
329 329 return 1
330 330
331 331 def getNoise(self, minHei=None, maxHei=None, minVel=None, maxVel=None):
332 332 # validacion de rango
333 333 if minHei == None:
334 334 minHei = self.dataOut.heightList[0]
335 335
336 336 if maxHei == None:
337 337 maxHei = self.dataOut.heightList[-1]
338 338
339 339 if (minHei < self.dataOut.heightList[0]) or (minHei > maxHei):
340 340 print('minHei: %.2f is out of the heights range' % (minHei))
341 341 print('minHei is setting to %.2f' % (self.dataOut.heightList[0]))
342 342 minHei = self.dataOut.heightList[0]
343 343
344 344 if (maxHei > self.dataOut.heightList[-1]) or (maxHei < minHei):
345 345 print('maxHei: %.2f is out of the heights range' % (maxHei))
346 346 print('maxHei is setting to %.2f' % (self.dataOut.heightList[-1]))
347 347 maxHei = self.dataOut.heightList[-1]
348 348
349 349 # validacion de velocidades
350 350 velrange = self.dataOut.getVelRange(1)
351 351
352 352 if minVel == None:
353 353 minVel = velrange[0]
354 354
355 355 if maxVel == None:
356 356 maxVel = velrange[-1]
357 357
358 358 if (minVel < velrange[0]) or (minVel > maxVel):
359 359 print('minVel: %.2f is out of the velocity range' % (minVel))
360 360 print('minVel is setting to %.2f' % (velrange[0]))
361 361 minVel = velrange[0]
362 362
363 363 if (maxVel > velrange[-1]) or (maxVel < minVel):
364 364 print('maxVel: %.2f is out of the velocity range' % (maxVel))
365 365 print('maxVel is setting to %.2f' % (velrange[-1]))
366 366 maxVel = velrange[-1]
367 367
368 368 # seleccion de indices para rango
369 369 minIndex = 0
370 370 maxIndex = 0
371 371 heights = self.dataOut.heightList
372 372
373 373 inda = numpy.where(heights >= minHei)
374 374 indb = numpy.where(heights <= maxHei)
375 375
376 376 try:
377 377 minIndex = inda[0][0]
378 378 except:
379 379 minIndex = 0
380 380
381 381 try:
382 382 maxIndex = indb[0][-1]
383 383 except:
384 384 maxIndex = len(heights)
385 385
386 386 if (minIndex < 0) or (minIndex > maxIndex):
387 387 raise ValueError("some value in (%d,%d) is not valid" % (
388 388 minIndex, maxIndex))
389 389
390 390 if (maxIndex >= self.dataOut.nHeights):
391 391 maxIndex = self.dataOut.nHeights - 1
392 392
393 393 # seleccion de indices para velocidades
394 394 indminvel = numpy.where(velrange >= minVel)
395 395 indmaxvel = numpy.where(velrange <= maxVel)
396 396 try:
397 397 minIndexVel = indminvel[0][0]
398 398 except:
399 399 minIndexVel = 0
400 400
401 401 try:
402 402 maxIndexVel = indmaxvel[0][-1]
403 403 except:
404 404 maxIndexVel = len(velrange)
405 405
406 406 # seleccion del espectro
407 407 data_spc = self.dataOut.data_spc[:,
408 408 minIndexVel:maxIndexVel + 1, minIndex:maxIndex + 1]
409 409 # estimacion de ruido
410 410 noise = numpy.zeros(self.dataOut.nChannels)
411 411
412 412 for channel in range(self.dataOut.nChannels):
413 413 daux = data_spc[channel, :, :]
414 414 sortdata = numpy.sort(daux, axis=None)
415 415 noise[channel] = hildebrand_sekhon(sortdata, self.dataOut.nIncohInt)
416 416
417 417 self.dataOut.noise_estimation = noise.copy()
418 418
419 419 return 1
420 420
421 421 class removeDC(Operation):
422 422
423 423 def run(self, dataOut, mode=2):
424 424 self.dataOut = dataOut
425 425 jspectra = self.dataOut.data_spc
426 426 jcspectra = self.dataOut.data_cspc
427 427
428 428 num_chan = jspectra.shape[0]
429 429 num_hei = jspectra.shape[2]
430 430
431 431 if jcspectra is not None:
432 432 jcspectraExist = True
433 433 num_pairs = jcspectra.shape[0]
434 434 else:
435 435 jcspectraExist = False
436 436
437 437 freq_dc = int(jspectra.shape[1] / 2)
438 438 ind_vel = numpy.array([-2, -1, 1, 2]) + freq_dc
439 439 ind_vel = ind_vel.astype(int)
440 440
441 441 if ind_vel[0] < 0:
442 442 ind_vel[list(range(0, 1))] = ind_vel[list(range(0, 1))] + self.num_prof
443 443
444 444 if mode == 1:
445 445 jspectra[:, freq_dc, :] = (
446 446 jspectra[:, ind_vel[1], :] + jspectra[:, ind_vel[2], :]) / 2 # CORRECCION
447 447
448 448 if jcspectraExist:
449 449 jcspectra[:, freq_dc, :] = (
450 450 jcspectra[:, ind_vel[1], :] + jcspectra[:, ind_vel[2], :]) / 2
451 451
452 452 if mode == 2:
453 453
454 454 vel = numpy.array([-2, -1, 1, 2])
455 455 xx = numpy.zeros([4, 4])
456 456
457 457 for fil in range(4):
458 458 xx[fil, :] = vel[fil]**numpy.asarray(list(range(4)))
459 459
460 460 xx_inv = numpy.linalg.inv(xx)
461 461 xx_aux = xx_inv[0, :]
462 462
463 463 for ich in range(num_chan):
464 464 yy = jspectra[ich, ind_vel, :]
465 465 jspectra[ich, freq_dc, :] = numpy.dot(xx_aux, yy)
466 466
467 467 junkid = jspectra[ich, freq_dc, :] <= 0
468 468 cjunkid = sum(junkid)
469 469
470 470 if cjunkid.any():
471 471 jspectra[ich, freq_dc, junkid.nonzero()] = (
472 472 jspectra[ich, ind_vel[1], junkid] + jspectra[ich, ind_vel[2], junkid]) / 2
473 473
474 474 if jcspectraExist:
475 475 for ip in range(num_pairs):
476 476 yy = jcspectra[ip, ind_vel, :]
477 477 jcspectra[ip, freq_dc, :] = numpy.dot(xx_aux, yy)
478 478
479 479 self.dataOut.data_spc = jspectra
480 480 self.dataOut.data_cspc = jcspectra
481 481
482 482 return self.dataOut
483 483
484 484 # import matplotlib.pyplot as plt
485 485
486 486 def fit_func( x, a0, a1, a2): #, a3, a4, a5):
487 487 z = (x - a1) / a2
488 488 y = a0 * numpy.exp(-z**2 / a2) #+ a3 + a4 * x + a5 * x**2
489 489 return y
490 490 class CleanRayleigh(Operation):
491 491
492 492 def __init__(self):
493 493
494 494 Operation.__init__(self)
495 495 self.i=0
496 496 self.isConfig = False
497 497 self.__dataReady = False
498 498 self.__profIndex = 0
499 499 self.byTime = False
500 500 self.byProfiles = False
501 501
502 502 self.bloques = None
503 503 self.bloque0 = None
504 504
505 505 self.index = 0
506 506
507 507 self.buffer = 0
508 508 self.buffer2 = 0
509 509 self.buffer3 = 0
510 510
511 511
512 512 def setup(self,dataOut,min_hei,max_hei,n, timeInterval,factor_stdv):
513 513
514 514 self.nChannels = dataOut.nChannels
515 515 self.nProf = dataOut.nProfiles
516 516 self.nPairs = dataOut.data_cspc.shape[0]
517 517 self.pairsArray = numpy.array(dataOut.pairsList)
518 518 self.spectra = dataOut.data_spc
519 519 self.cspectra = dataOut.data_cspc
520 520 self.heights = dataOut.heightList #alturas totales
521 521 self.nHeights = len(self.heights)
522 522 self.min_hei = min_hei
523 523 self.max_hei = max_hei
524 524 if (self.min_hei == None):
525 525 self.min_hei = 0
526 526 if (self.max_hei == None):
527 527 self.max_hei = dataOut.heightList[-1]
528 528 self.hval = ((self.max_hei>=self.heights) & (self.heights >= self.min_hei)).nonzero()
529 529 self.heightsClean = self.heights[self.hval] #alturas filtradas
530 530 self.hval = self.hval[0] # forma (N,), an solo N elementos -> Indices de alturas
531 531 self.nHeightsClean = len(self.heightsClean)
532 532 self.channels = dataOut.channelList
533 533 self.nChan = len(self.channels)
534 534 self.nIncohInt = dataOut.nIncohInt
535 535 self.__initime = dataOut.utctime
536 536 self.maxAltInd = self.hval[-1]+1
537 537 self.minAltInd = self.hval[0]
538 538
539 539 self.crosspairs = dataOut.pairsList
540 540 self.nPairs = len(self.crosspairs)
541 541 self.normFactor = dataOut.normFactor
542 542 self.nFFTPoints = dataOut.nFFTPoints
543 543 self.ippSeconds = dataOut.ippSeconds
544 544 self.currentTime = self.__initime
545 545 self.pairsArray = numpy.array(dataOut.pairsList)
546 546 self.factor_stdv = factor_stdv
547 print("CHANNELS: ",[x for x in self.channels])
547 #print("CHANNELS: ",[x for x in self.channels])
548 548
549 549 if n != None :
550 550 self.byProfiles = True
551 551 self.nIntProfiles = n
552 552 else:
553 553 self.__integrationtime = timeInterval
554 554
555 555 self.__dataReady = False
556 556 self.isConfig = True
557 557
558 558
559 559
560 560 def run(self, dataOut,min_hei=None,max_hei=None, n=None, timeInterval=10,factor_stdv=2.5):
561 561 #print (dataOut.utctime)
562 562 if not self.isConfig :
563 563 #print("Setting config")
564 564 self.setup(dataOut, min_hei,max_hei,n,timeInterval,factor_stdv)
565 565 #print("Config Done")
566 566 tini=dataOut.utctime
567 567
568 568 if self.byProfiles:
569 569 if self.__profIndex == self.nIntProfiles:
570 570 self.__dataReady = True
571 571 else:
572 572 if (tini - self.__initime) >= self.__integrationtime:
573 573 #print(tini - self.__initime,self.__profIndex)
574 574 self.__dataReady = True
575 575 self.__initime = tini
576 576
577 577 #if (tini.tm_min % 2) == 0 and (tini.tm_sec < 5 and self.fint==0):
578 578
579 579 if self.__dataReady:
580 print("Data ready",self.__profIndex)
580 #print("Data ready",self.__profIndex)
581 581 self.__profIndex = 0
582 582 jspc = self.buffer
583 583 jcspc = self.buffer2
584 584 #jnoise = self.buffer3
585 585 self.buffer = dataOut.data_spc
586 586 self.buffer2 = dataOut.data_cspc
587 587 #self.buffer3 = dataOut.noise
588 588 self.currentTime = dataOut.utctime
589 589 if numpy.any(jspc) :
590 590 #print( jspc.shape, jcspc.shape)
591 591 jspc = numpy.reshape(jspc,(int(len(jspc)/self.nChannels),self.nChannels,self.nFFTPoints,self.nHeights))
592 592 jcspc= numpy.reshape(jcspc,(int(len(jcspc)/self.nPairs),self.nPairs,self.nFFTPoints,self.nHeights))
593 593 self.__dataReady = False
594 594 #print( jspc.shape, jcspc.shape)
595 595 dataOut.flagNoData = False
596 596 else:
597 597 dataOut.flagNoData = True
598 598 self.__dataReady = False
599 599 return dataOut
600 600 else:
601 601 #print( len(self.buffer))
602 602 if numpy.any(self.buffer):
603 603 self.buffer = numpy.concatenate((self.buffer,dataOut.data_spc), axis=0)
604 604 self.buffer2 = numpy.concatenate((self.buffer2,dataOut.data_cspc), axis=0)
605 605 self.buffer3 += dataOut.data_dc
606 606 else:
607 607 self.buffer = dataOut.data_spc
608 608 self.buffer2 = dataOut.data_cspc
609 609 self.buffer3 = dataOut.data_dc
610 610 #print self.index, self.fint
611 611 #print self.buffer2.shape
612 612 dataOut.flagNoData = True ## NOTE: ?? revisar LUEGO
613 613 self.__profIndex += 1
614 614 return dataOut ## NOTE: REV
615 615
616 616
617 617 #index = tini.tm_hour*12+tini.tm_min/5
618 618 '''REVISAR'''
619 619 # jspc = jspc/self.nFFTPoints/self.normFactor
620 620 # jcspc = jcspc/self.nFFTPoints/self.normFactor
621 621
622 622
623 #dataOut.data_spc,dataOut.data_cspc = self.CleanRayleigh(dataOut,jspc,jcspc,crosspairs,heights,channels,nProf,nHei,nChan,nPairs,nIncohInt,nBlocks=nBlocks)
624 #tmp_spectra,tmp_cspectra,sat_spectra,sat_cspectra = self.cleanRayleigh(dataOut,jspc,jcspc,self.min_hei,self.max_hei)
625 tmp_spectra,tmp_cspectra = self.cleanRayleigh(dataOut,jspc,jcspc,self.factor_stdv)
626 #jspectra = tmp_spectra*len(jspc[:,0,0,0])
627 #jcspectra = tmp_cspectra*len(jspc[:,0,0,0])
628 623
624 tmp_spectra,tmp_cspectra = self.cleanRayleigh(dataOut,jspc,jcspc,self.factor_stdv)
629 625 dataOut.data_spc = tmp_spectra
630 626 dataOut.data_cspc = tmp_cspectra
627
628 #dataOut.data_spc,dataOut.data_cspc = self.cleanRayleigh(dataOut,jspc,jcspc,self.factor_stdv)
629
631 630 dataOut.data_dc = self.buffer3
632 631 dataOut.nIncohInt *= self.nIntProfiles
633 632 dataOut.utctime = self.currentTime #tiempo promediado
634 633 #print("Time: ",time.localtime(dataOut.utctime))
635 634 # dataOut.data_spc = sat_spectra
636 635 # dataOut.data_cspc = sat_cspectra
637 636 self.buffer = 0
638 637 self.buffer2 = 0
639 638 self.buffer3 = 0
640 639
641 640 return dataOut
642 641
643 642 def cleanRayleigh(self,dataOut,spectra,cspectra,factor_stdv):
644 print("OP cleanRayleigh")
643 #print("OP cleanRayleigh")
645 644 #import matplotlib.pyplot as plt
646 645 #for k in range(149):
647 646
648 647 rfunc = cspectra.copy() #self.bloques
649 val_spc = spectra*0.0 #self.bloque0*0.0
650 val_cspc = cspectra*0.0 #self.bloques*0.0
651 in_sat_spectra = spectra.copy() #self.bloque0
652 in_sat_cspectra = cspectra.copy() #self.bloques
648 #rfunc = cspectra
649 #val_spc = spectra*0.0 #self.bloque0*0.0
650 #val_cspc = cspectra*0.0 #self.bloques*0.0
651 #in_sat_spectra = spectra.copy() #self.bloque0
652 #in_sat_cspectra = cspectra.copy() #self.bloques
653 653
654 raxs = math.ceil(math.sqrt(self.nPairs))
655 caxs = math.ceil(self.nPairs/raxs)
654 #raxs = math.ceil(math.sqrt(self.nPairs))
655 #caxs = math.ceil(self.nPairs/raxs)
656 656
657 657 #print(self.hval)
658 658 #print numpy.absolute(rfunc[:,0,0,14])
659 gauss_fit, covariance = None, None
659 660 for ih in range(self.minAltInd,self.maxAltInd):
660 661 for ifreq in range(self.nFFTPoints):
661 662 # fig, axs = plt.subplots(raxs, caxs)
662 663 # fig2, axs2 = plt.subplots(raxs, caxs)
663 col_ax = 0
664 row_ax = 0
664 # col_ax = 0
665 # row_ax = 0
666 #print(len(self.nPairs))
665 667 for ii in range(self.nPairs): #PARES DE CANALES SELF y CROSS
666 668 #print("ii: ",ii)
667 if (col_ax%caxs==0 and col_ax!=0):
668 col_ax = 0
669 row_ax += 1
669 # if (col_ax%caxs==0 and col_ax!=0):
670 # col_ax = 0
671 # row_ax += 1
670 672 func2clean = 10*numpy.log10(numpy.absolute(rfunc[:,ii,ifreq,ih])) #Potencia?
671 673 #print(func2clean.shape)
672 674 val = (numpy.isfinite(func2clean)==True).nonzero()
673 675
674 676 if len(val)>0: #limitador
675 677 min_val = numpy.around(numpy.amin(func2clean)-2) #> (-40)
676 678 if min_val <= -40 :
677 679 min_val = -40
678 680 max_val = numpy.around(numpy.amax(func2clean)+2) #< 200
679 681 if max_val >= 200 :
680 682 max_val = 200
681 683 #print min_val, max_val
682 684 step = 1
683 685 #print("Getting bins and the histogram")
684 686 x_dist = min_val + numpy.arange(1 + ((max_val-(min_val))/step))*step
685 687 y_dist,binstep = numpy.histogram(func2clean,bins=range(int(min_val),int(max_val+2),step))
686 688 #print(len(y_dist),len(binstep[:-1]))
687 689 #print(row_ax,col_ax, " ..")
688 690 #print(self.pairsArray[ii][0],self.pairsArray[ii][1])
689 691 mean = numpy.sum(x_dist * y_dist) / numpy.sum(y_dist)
690 692 sigma = numpy.sqrt(numpy.sum(y_dist * (x_dist - mean)**2) / numpy.sum(y_dist))
691 693 parg = [numpy.amax(y_dist),mean,sigma]
692 gauss_fit, covariance = None, None
693 newY = None
694
695 #newY = None
696
694 697 try :
695 698 gauss_fit, covariance = curve_fit(fit_func, x_dist, y_dist,p0=parg)
696 699 mode = gauss_fit[1]
697 700 stdv = gauss_fit[2]
698 701 #print(" FIT OK",gauss_fit)
699 702 '''
700 703 newY = fit_func(x_dist,gauss_fit[0],gauss_fit[1],gauss_fit[2])
701 704 axs[row_ax,col_ax].plot(binstep[:-1],y_dist,color='green')
702 705 axs[row_ax,col_ax].plot(binstep[:-1],newY,color='red')
703 706 axs[row_ax,col_ax].set_title("Pair "+str(self.crosspairs[ii]))'''
704 707 except:
705 708 mode = mean
706 709 stdv = sigma
707 710 #print("FIT FAIL")
708 711
709 712
710 713 #print(mode,stdv)
711 #Removing echoes greater than mode + 3*stdv
712 #factor_stdv = 2
714 #Removing echoes greater than mode + std_factor*stdv
713 715 noval = (abs(func2clean - mode)>=(factor_stdv*stdv)).nonzero()
714 716 #noval tiene los indices que se van a remover
715 717 #print("Pair ",ii," novals: ",len(noval[0]))
716 718 if len(noval[0]) > 0: #forma de array (N,) es igual a longitud (N)
717 719 novall = ((func2clean - mode) >= (factor_stdv*stdv)).nonzero()
718 720 #print(novall)
719 721 #print(" ",self.pairsArray[ii])
720 722 cross_pairs = self.pairsArray[ii]
721 723 #Getting coherent echoes which are removed.
722 724 # if len(novall[0]) > 0:
723 725 #
724 726 # val_spc[novall[0],cross_pairs[0],ifreq,ih] = 1
725 727 # val_spc[novall[0],cross_pairs[1],ifreq,ih] = 1
726 728 # val_cspc[novall[0],ii,ifreq,ih] = 1
727 729 #print("OUT NOVALL 1")
728 730 #Removing coherent from ISR data
729 731 chA = self.channels.index(cross_pairs[0])
730 732 chB = self.channels.index(cross_pairs[1])
731 733
732 734 new_a = numpy.delete(cspectra[:,ii,ifreq,ih], noval[0])
733 mean_cspc = numpy.mean(new_a)
735 cspectra[noval,ii,ifreq,ih] = numpy.mean(new_a) #mean CrossSpectra
734 736 new_b = numpy.delete(spectra[:,chA,ifreq,ih], noval[0])
735 mean_spc0 = numpy.mean(new_b)
737 spectra[noval,chA,ifreq,ih] = numpy.mean(new_b) #mean Spectra Pair A
736 738 new_c = numpy.delete(spectra[:,chB,ifreq,ih], noval[0])
737 mean_spc1 = numpy.mean(new_c)
738 spectra[noval,chA,ifreq,ih] = mean_spc0
739 spectra[noval,chB,ifreq,ih] = mean_spc1
740 cspectra[noval,ii,ifreq,ih] = mean_cspc
739 spectra[noval,chB,ifreq,ih] = numpy.mean(new_c) #mean Spectra Pair B
740
741 741
742 742 '''
743 743 func2clean = 10*numpy.log10(numpy.absolute(cspectra[:,ii,ifreq,ih]))
744 744 y_dist,binstep = numpy.histogram(func2clean,bins=range(int(min_val),int(max_val+2),step))
745 745 axs2[row_ax,col_ax].plot(binstep[:-1],newY,color='red')
746 746 axs2[row_ax,col_ax].plot(binstep[:-1],y_dist,color='green')
747 747 axs2[row_ax,col_ax].set_title("Pair "+str(self.crosspairs[ii]))
748 748 '''
749 749
750 col_ax += 1 #contador de ploteo columnas
750 #col_ax += 1 #contador de ploteo columnas
751 751 ##print(col_ax)
752 752 '''
753 753 title = str(dataOut.datatime)+" nFFT: "+str(ifreq)+" Alt: "+str(self.heights[ih])+ " km"
754 754 title2 = str(dataOut.datatime)+" nFFT: "+str(ifreq)+" Alt: "+str(self.heights[ih])+ " km CLEANED"
755 755 fig.suptitle(title)
756 756 fig2.suptitle(title2)
757 757 plt.show()'''
758 758
759 759 ''' channels = channels
760 760 cross_pairs = cross_pairs
761 761 #print("OUT NOVALL 2")
762 762
763 763 vcross0 = (cross_pairs[0] == channels[ii]).nonzero()
764 764 vcross1 = (cross_pairs[1] == channels[ii]).nonzero()
765 765 vcross = numpy.concatenate((vcross0,vcross1),axis=None)
766 766 #print('vcros =', vcross)
767 767
768 768 #Getting coherent echoes which are removed.
769 769 if len(novall) > 0:
770 770 #val_spc[novall,ii,ifreq,ih] = 1
771 771 val_spc[ii,ifreq,ih,novall] = 1
772 772 if len(vcross) > 0:
773 773 val_cspc[vcross,ifreq,ih,novall] = 1
774 774
775 775 #Removing coherent from ISR data.
776 776 self.bloque0[ii,ifreq,ih,noval] = numpy.nan
777 777 if len(vcross) > 0:
778 778 self.bloques[vcross,ifreq,ih,noval] = numpy.nan
779 779 '''
780 780
781 print("Getting average of the spectra and cross-spectra from incoherent echoes.")
781 #print("Getting average of the spectra and cross-spectra from incoherent echoes.")
782 782 out_spectra = numpy.zeros([self.nChan,self.nFFTPoints,self.nHeights], dtype=float) #+numpy.nan
783 783 out_cspectra = numpy.zeros([self.nPairs,self.nFFTPoints,self.nHeights], dtype=complex) #+numpy.nan
784 784 for ih in range(self.nHeights):
785 785 for ifreq in range(self.nFFTPoints):
786 786 for ich in range(self.nChan):
787 787 tmp = spectra[:,ich,ifreq,ih]
788 788 valid = (numpy.isfinite(tmp[:])==True).nonzero()
789 789 # if ich == 0 and ifreq == 0 and ih == 17 :
790 790 # print tmp
791 791 # print valid
792 792 # print len(valid[0])
793 793 #print('TMP',tmp)
794 794 if len(valid[0]) >0 :
795 795 out_spectra[ich,ifreq,ih] = numpy.nansum(tmp)#/len(valid[0])
796 796 #for icr in range(nPairs):
797 797 for icr in range(self.nPairs):
798 798 tmp = numpy.squeeze(cspectra[:,icr,ifreq,ih])
799 799 valid = (numpy.isfinite(tmp)==True).nonzero()
800 800 if len(valid[0]) > 0:
801 801 out_cspectra[icr,ifreq,ih] = numpy.nansum(tmp)#/len(valid[0])
802 802 '''
803 803 # print('##########################################################')
804 804 print("Removing fake coherent echoes (at least 4 points around the point)")
805 805
806 806 val_spectra = numpy.sum(val_spc,0)
807 807 val_cspectra = numpy.sum(val_cspc,0)
808 808
809 809 val_spectra = self.REM_ISOLATED_POINTS(val_spectra,4)
810 810 val_cspectra = self.REM_ISOLATED_POINTS(val_cspectra,4)
811 811
812 812 for i in range(nChan):
813 813 for j in range(nProf):
814 814 for k in range(nHeights):
815 815 if numpy.isfinite(val_spectra[i,j,k]) and val_spectra[i,j,k] < 1 :
816 816 val_spc[:,i,j,k] = 0.0
817 817 for i in range(nPairs):
818 818 for j in range(nProf):
819 819 for k in range(nHeights):
820 820 if numpy.isfinite(val_cspectra[i,j,k]) and val_cspectra[i,j,k] < 1 :
821 821 val_cspc[:,i,j,k] = 0.0
822 822
823 823 # val_spc = numpy.reshape(val_spc, (len(spectra[:,0,0,0]),nProf*nHeights*nChan))
824 824 # if numpy.isfinite(val_spectra)==str(True):
825 825 # noval = (val_spectra<1).nonzero()
826 826 # if len(noval) > 0:
827 827 # val_spc[:,noval] = 0.0
828 828 # val_spc = numpy.reshape(val_spc, (149,nChan,nProf,nHeights))
829 829
830 830 #val_cspc = numpy.reshape(val_spc, (149,nChan*nHeights*nProf))
831 831 #if numpy.isfinite(val_cspectra)==str(True):
832 832 # noval = (val_cspectra<1).nonzero()
833 833 # if len(noval) > 0:
834 834 # val_cspc[:,noval] = 0.0
835 835 # val_cspc = numpy.reshape(val_cspc, (149,nChan,nProf,nHeights))
836 836 tmp_sat_spectra = spectra.copy()
837 837 tmp_sat_spectra = tmp_sat_spectra*numpy.nan
838 838 tmp_sat_cspectra = cspectra.copy()
839 839 tmp_sat_cspectra = tmp_sat_cspectra*numpy.nan
840 840 '''
841 841 # fig = plt.figure(figsize=(6,5))
842 842 # left, bottom, width, height = 0.1, 0.1, 0.8, 0.8
843 843 # ax = fig.add_axes([left, bottom, width, height])
844 844 # cp = ax.contour(10*numpy.log10(numpy.absolute(spectra[0,0,:,:])))
845 845 # ax.clabel(cp, inline=True,fontsize=10)
846 846 # plt.show()
847 847 '''
848 848 val = (val_spc > 0).nonzero()
849 849 if len(val[0]) > 0:
850 850 tmp_sat_spectra[val] = in_sat_spectra[val]
851 851 val = (val_cspc > 0).nonzero()
852 852 if len(val[0]) > 0:
853 853 tmp_sat_cspectra[val] = in_sat_cspectra[val]
854 854
855 855 print("Getting average of the spectra and cross-spectra from incoherent echoes 2")
856 856 sat_spectra = numpy.zeros((nChan,nProf,nHeights), dtype=float)
857 857 sat_cspectra = numpy.zeros((nPairs,nProf,nHeights), dtype=complex)
858 858 for ih in range(nHeights):
859 859 for ifreq in range(nProf):
860 860 for ich in range(nChan):
861 861 tmp = numpy.squeeze(tmp_sat_spectra[:,ich,ifreq,ih])
862 862 valid = (numpy.isfinite(tmp)).nonzero()
863 863 if len(valid[0]) > 0:
864 864 sat_spectra[ich,ifreq,ih] = numpy.nansum(tmp)/len(valid[0])
865 865
866 866 for icr in range(nPairs):
867 867 tmp = numpy.squeeze(tmp_sat_cspectra[:,icr,ifreq,ih])
868 868 valid = (numpy.isfinite(tmp)).nonzero()
869 869 if len(valid[0]) > 0:
870 870 sat_cspectra[icr,ifreq,ih] = numpy.nansum(tmp)/len(valid[0])
871 871 '''
872 872 #self.__dataReady= True
873 873 #sat_spectra, sat_cspectra= sat_spectra, sat_cspectra
874 874 #if not self.__dataReady:
875 875 #return None, None
876 876 #return out_spectra, out_cspectra ,sat_spectra,sat_cspectra
877 877 return out_spectra, out_cspectra
878 878
879 879 def REM_ISOLATED_POINTS(self,array,rth):
880 880 # import matplotlib.pyplot as plt
881 881 if rth == None :
882 882 rth = 4
883 883 print("REM ISO")
884 884 num_prof = len(array[0,:,0])
885 885 num_hei = len(array[0,0,:])
886 886 n2d = len(array[:,0,0])
887 887
888 888 for ii in range(n2d) :
889 889 #print ii,n2d
890 890 tmp = array[ii,:,:]
891 891 #print tmp.shape, array[ii,101,:],array[ii,102,:]
892 892
893 893 # fig = plt.figure(figsize=(6,5))
894 894 # left, bottom, width, height = 0.1, 0.1, 0.8, 0.8
895 895 # ax = fig.add_axes([left, bottom, width, height])
896 896 # x = range(num_prof)
897 897 # y = range(num_hei)
898 898 # cp = ax.contour(y,x,tmp)
899 899 # ax.clabel(cp, inline=True,fontsize=10)
900 900 # plt.show()
901 901
902 902 #indxs = WHERE(FINITE(tmp) AND tmp GT 0,cindxs)
903 903 tmp = numpy.reshape(tmp,num_prof*num_hei)
904 904 indxs1 = (numpy.isfinite(tmp)==True).nonzero()
905 905 indxs2 = (tmp > 0).nonzero()
906 906
907 907 indxs1 = (indxs1[0])
908 908 indxs2 = indxs2[0]
909 909 #indxs1 = numpy.array(indxs1[0])
910 910 #indxs2 = numpy.array(indxs2[0])
911 911 indxs = None
912 912 #print indxs1 , indxs2
913 913 for iv in range(len(indxs2)):
914 914 indv = numpy.array((indxs1 == indxs2[iv]).nonzero())
915 915 #print len(indxs2), indv
916 916 if len(indv[0]) > 0 :
917 917 indxs = numpy.concatenate((indxs,indxs2[iv]), axis=None)
918 918 # print indxs
919 919 indxs = indxs[1:]
920 920 #print(indxs, len(indxs))
921 921 if len(indxs) < 4 :
922 922 array[ii,:,:] = 0.
923 923 return
924 924
925 925 xpos = numpy.mod(indxs ,num_hei)
926 926 ypos = (indxs / num_hei)
927 927 sx = numpy.argsort(xpos) # Ordering respect to "x" (time)
928 928 #print sx
929 929 xpos = xpos[sx]
930 930 ypos = ypos[sx]
931 931
932 932 # *********************************** Cleaning isolated points **********************************
933 933 ic = 0
934 934 while True :
935 935 r = numpy.sqrt(list(numpy.power((xpos[ic]-xpos),2)+ numpy.power((ypos[ic]-ypos),2)))
936 936 #no_coh = WHERE(FINITE(r) AND (r LE rth),cno_coh)
937 937 #plt.plot(r)
938 938 #plt.show()
939 939 no_coh1 = (numpy.isfinite(r)==True).nonzero()
940 940 no_coh2 = (r <= rth).nonzero()
941 941 #print r, no_coh1, no_coh2
942 942 no_coh1 = numpy.array(no_coh1[0])
943 943 no_coh2 = numpy.array(no_coh2[0])
944 944 no_coh = None
945 945 #print valid1 , valid2
946 946 for iv in range(len(no_coh2)):
947 947 indv = numpy.array((no_coh1 == no_coh2[iv]).nonzero())
948 948 if len(indv[0]) > 0 :
949 949 no_coh = numpy.concatenate((no_coh,no_coh2[iv]), axis=None)
950 950 no_coh = no_coh[1:]
951 951 #print len(no_coh), no_coh
952 952 if len(no_coh) < 4 :
953 953 #print xpos[ic], ypos[ic], ic
954 954 # plt.plot(r)
955 955 # plt.show()
956 956 xpos[ic] = numpy.nan
957 957 ypos[ic] = numpy.nan
958 958
959 959 ic = ic + 1
960 960 if (ic == len(indxs)) :
961 961 break
962 962 #print( xpos, ypos)
963 963
964 964 indxs = (numpy.isfinite(list(xpos))==True).nonzero()
965 965 #print indxs[0]
966 966 if len(indxs[0]) < 4 :
967 967 array[ii,:,:] = 0.
968 968 return
969 969
970 970 xpos = xpos[indxs[0]]
971 971 ypos = ypos[indxs[0]]
972 972 for i in range(0,len(ypos)):
973 973 ypos[i]=int(ypos[i])
974 974 junk = tmp
975 975 tmp = junk*0.0
976 976
977 977 tmp[list(xpos + (ypos*num_hei))] = junk[list(xpos + (ypos*num_hei))]
978 978 array[ii,:,:] = numpy.reshape(tmp,(num_prof,num_hei))
979 979
980 980 #print array.shape
981 981 #tmp = numpy.reshape(tmp,(num_prof,num_hei))
982 982 #print tmp.shape
983 983
984 984 # fig = plt.figure(figsize=(6,5))
985 985 # left, bottom, width, height = 0.1, 0.1, 0.8, 0.8
986 986 # ax = fig.add_axes([left, bottom, width, height])
987 987 # x = range(num_prof)
988 988 # y = range(num_hei)
989 989 # cp = ax.contour(y,x,array[ii,:,:])
990 990 # ax.clabel(cp, inline=True,fontsize=10)
991 991 # plt.show()
992 992 return array
993 993
994 994 class removeInterference(Operation):
995 995
996 996 def removeInterference2(self):
997 997
998 998 cspc = self.dataOut.data_cspc
999 999 spc = self.dataOut.data_spc
1000 1000 Heights = numpy.arange(cspc.shape[2])
1001 1001 realCspc = numpy.abs(cspc)
1002 1002
1003 1003 for i in range(cspc.shape[0]):
1004 1004 LinePower= numpy.sum(realCspc[i], axis=0)
1005 1005 Threshold = numpy.amax(LinePower)-numpy.sort(LinePower)[len(Heights)-int(len(Heights)*0.1)]
1006 1006 SelectedHeights = Heights[ numpy.where( LinePower < Threshold ) ]
1007 1007 InterferenceSum = numpy.sum( realCspc[i,:,SelectedHeights], axis=0 )
1008 1008 InterferenceThresholdMin = numpy.sort(InterferenceSum)[int(len(InterferenceSum)*0.98)]
1009 1009 InterferenceThresholdMax = numpy.sort(InterferenceSum)[int(len(InterferenceSum)*0.99)]
1010 1010
1011 1011
1012 1012 InterferenceRange = numpy.where( ([InterferenceSum > InterferenceThresholdMin]))# , InterferenceSum < InterferenceThresholdMax]) )
1013 1013 #InterferenceRange = numpy.where( ([InterferenceRange < InterferenceThresholdMax]))
1014 1014 if len(InterferenceRange)<int(cspc.shape[1]*0.3):
1015 1015 cspc[i,InterferenceRange,:] = numpy.NaN
1016 1016
1017 1017 self.dataOut.data_cspc = cspc
1018 1018
1019 1019 def removeInterference(self, interf = 2, hei_interf = None, nhei_interf = None, offhei_interf = None):
1020 1020
1021 1021 jspectra = self.dataOut.data_spc
1022 1022 jcspectra = self.dataOut.data_cspc
1023 1023 jnoise = self.dataOut.getNoise()
1024 1024 num_incoh = self.dataOut.nIncohInt
1025 1025
1026 1026 num_channel = jspectra.shape[0]
1027 1027 num_prof = jspectra.shape[1]
1028 1028 num_hei = jspectra.shape[2]
1029 1029
1030 1030 # hei_interf
1031 1031 if hei_interf is None:
1032 1032 count_hei = int(num_hei / 2)
1033 1033 hei_interf = numpy.asmatrix(list(range(count_hei))) + num_hei - count_hei
1034 1034 hei_interf = numpy.asarray(hei_interf)[0]
1035 1035 # nhei_interf
1036 1036 if (nhei_interf == None):
1037 1037 nhei_interf = 5
1038 1038 if (nhei_interf < 1):
1039 1039 nhei_interf = 1
1040 1040 if (nhei_interf > count_hei):
1041 1041 nhei_interf = count_hei
1042 1042 if (offhei_interf == None):
1043 1043 offhei_interf = 0
1044 1044
1045 1045 ind_hei = list(range(num_hei))
1046 1046 # mask_prof = numpy.asarray(range(num_prof - 2)) + 1
1047 1047 # mask_prof[range(num_prof/2 - 1,len(mask_prof))] += 1
1048 1048 mask_prof = numpy.asarray(list(range(num_prof)))
1049 1049 num_mask_prof = mask_prof.size
1050 1050 comp_mask_prof = [0, num_prof / 2]
1051 1051
1052 1052 # noise_exist: Determina si la variable jnoise ha sido definida y contiene la informacion del ruido de cada canal
1053 1053 if (jnoise.size < num_channel or numpy.isnan(jnoise).any()):
1054 1054 jnoise = numpy.nan
1055 1055 noise_exist = jnoise[0] < numpy.Inf
1056 1056
1057 1057 # Subrutina de Remocion de la Interferencia
1058 1058 for ich in range(num_channel):
1059 1059 # Se ordena los espectros segun su potencia (menor a mayor)
1060 1060 power = jspectra[ich, mask_prof, :]
1061 1061 power = power[:, hei_interf]
1062 1062 power = power.sum(axis=0)
1063 1063 psort = power.ravel().argsort()
1064 1064
1065 1065 # Se estima la interferencia promedio en los Espectros de Potencia empleando
1066 1066 junkspc_interf = jspectra[ich, :, hei_interf[psort[list(range(
1067 1067 offhei_interf, nhei_interf + offhei_interf))]]]
1068 1068
1069 1069 if noise_exist:
1070 1070 # tmp_noise = jnoise[ich] / num_prof
1071 1071 tmp_noise = jnoise[ich]
1072 1072 junkspc_interf = junkspc_interf - tmp_noise
1073 1073 #junkspc_interf[:,comp_mask_prof] = 0
1074 1074
1075 1075 jspc_interf = junkspc_interf.sum(axis=0) / nhei_interf
1076 1076 jspc_interf = jspc_interf.transpose()
1077 1077 # Calculando el espectro de interferencia promedio
1078 1078 noiseid = numpy.where(
1079 1079 jspc_interf <= tmp_noise / numpy.sqrt(num_incoh))
1080 1080 noiseid = noiseid[0]
1081 1081 cnoiseid = noiseid.size
1082 1082 interfid = numpy.where(
1083 1083 jspc_interf > tmp_noise / numpy.sqrt(num_incoh))
1084 1084 interfid = interfid[0]
1085 1085 cinterfid = interfid.size
1086 1086
1087 1087 if (cnoiseid > 0):
1088 1088 jspc_interf[noiseid] = 0
1089 1089
1090 1090 # Expandiendo los perfiles a limpiar
1091 1091 if (cinterfid > 0):
1092 1092 new_interfid = (
1093 1093 numpy.r_[interfid - 1, interfid, interfid + 1] + num_prof) % num_prof
1094 1094 new_interfid = numpy.asarray(new_interfid)
1095 1095 new_interfid = {x for x in new_interfid}
1096 1096 new_interfid = numpy.array(list(new_interfid))
1097 1097 new_cinterfid = new_interfid.size
1098 1098 else:
1099 1099 new_cinterfid = 0
1100 1100
1101 1101 for ip in range(new_cinterfid):
1102 1102 ind = junkspc_interf[:, new_interfid[ip]].ravel().argsort()
1103 1103 jspc_interf[new_interfid[ip]
1104 1104 ] = junkspc_interf[ind[nhei_interf // 2], new_interfid[ip]]
1105 1105
1106 1106 jspectra[ich, :, ind_hei] = jspectra[ich, :,
1107 1107 ind_hei] - jspc_interf # Corregir indices
1108 1108
1109 1109 # Removiendo la interferencia del punto de mayor interferencia
1110 1110 ListAux = jspc_interf[mask_prof].tolist()
1111 1111 maxid = ListAux.index(max(ListAux))
1112 1112
1113 1113 if cinterfid > 0:
1114 1114 for ip in range(cinterfid * (interf == 2) - 1):
1115 1115 ind = (jspectra[ich, interfid[ip], :] < tmp_noise *
1116 1116 (1 + 1 / numpy.sqrt(num_incoh))).nonzero()
1117 1117 cind = len(ind)
1118 1118
1119 1119 if (cind > 0):
1120 1120 jspectra[ich, interfid[ip], ind] = tmp_noise * \
1121 1121 (1 + (numpy.random.uniform(cind) - 0.5) /
1122 1122 numpy.sqrt(num_incoh))
1123 1123
1124 1124 ind = numpy.array([-2, -1, 1, 2])
1125 1125 xx = numpy.zeros([4, 4])
1126 1126
1127 1127 for id1 in range(4):
1128 1128 xx[:, id1] = ind[id1]**numpy.asarray(list(range(4)))
1129 1129
1130 1130 xx_inv = numpy.linalg.inv(xx)
1131 1131 xx = xx_inv[:, 0]
1132 1132 ind = (ind + maxid + num_mask_prof) % num_mask_prof
1133 1133 yy = jspectra[ich, mask_prof[ind], :]
1134 1134 jspectra[ich, mask_prof[maxid], :] = numpy.dot(
1135 1135 yy.transpose(), xx)
1136 1136
1137 1137 indAux = (jspectra[ich, :, :] < tmp_noise *
1138 1138 (1 - 1 / numpy.sqrt(num_incoh))).nonzero()
1139 1139 jspectra[ich, indAux[0], indAux[1]] = tmp_noise * \
1140 1140 (1 - 1 / numpy.sqrt(num_incoh))
1141 1141
1142 1142 # Remocion de Interferencia en el Cross Spectra
1143 1143 if jcspectra is None:
1144 1144 return jspectra, jcspectra
1145 1145 num_pairs = int(jcspectra.size / (num_prof * num_hei))
1146 1146 jcspectra = jcspectra.reshape(num_pairs, num_prof, num_hei)
1147 1147
1148 1148 for ip in range(num_pairs):
1149 1149
1150 1150 #-------------------------------------------
1151 1151
1152 1152 cspower = numpy.abs(jcspectra[ip, mask_prof, :])
1153 1153 cspower = cspower[:, hei_interf]
1154 1154 cspower = cspower.sum(axis=0)
1155 1155
1156 1156 cspsort = cspower.ravel().argsort()
1157 1157 junkcspc_interf = jcspectra[ip, :, hei_interf[cspsort[list(range(
1158 1158 offhei_interf, nhei_interf + offhei_interf))]]]
1159 1159 junkcspc_interf = junkcspc_interf.transpose()
1160 1160 jcspc_interf = junkcspc_interf.sum(axis=1) / nhei_interf
1161 1161
1162 1162 ind = numpy.abs(jcspc_interf[mask_prof]).ravel().argsort()
1163 1163
1164 1164 median_real = int(numpy.median(numpy.real(
1165 1165 junkcspc_interf[mask_prof[ind[list(range(3 * num_prof // 4))]], :])))
1166 1166 median_imag = int(numpy.median(numpy.imag(
1167 1167 junkcspc_interf[mask_prof[ind[list(range(3 * num_prof // 4))]], :])))
1168 1168 comp_mask_prof = [int(e) for e in comp_mask_prof]
1169 1169 junkcspc_interf[comp_mask_prof, :] = numpy.complex(
1170 1170 median_real, median_imag)
1171 1171
1172 1172 for iprof in range(num_prof):
1173 1173 ind = numpy.abs(junkcspc_interf[iprof, :]).ravel().argsort()
1174 1174 jcspc_interf[iprof] = junkcspc_interf[iprof, ind[nhei_interf // 2]]
1175 1175
1176 1176 # Removiendo la Interferencia
1177 1177 jcspectra[ip, :, ind_hei] = jcspectra[ip,
1178 1178 :, ind_hei] - jcspc_interf
1179 1179
1180 1180 ListAux = numpy.abs(jcspc_interf[mask_prof]).tolist()
1181 1181 maxid = ListAux.index(max(ListAux))
1182 1182
1183 1183 ind = numpy.array([-2, -1, 1, 2])
1184 1184 xx = numpy.zeros([4, 4])
1185 1185
1186 1186 for id1 in range(4):
1187 1187 xx[:, id1] = ind[id1]**numpy.asarray(list(range(4)))
1188 1188
1189 1189 xx_inv = numpy.linalg.inv(xx)
1190 1190 xx = xx_inv[:, 0]
1191 1191
1192 1192 ind = (ind + maxid + num_mask_prof) % num_mask_prof
1193 1193 yy = jcspectra[ip, mask_prof[ind], :]
1194 1194 jcspectra[ip, mask_prof[maxid], :] = numpy.dot(yy.transpose(), xx)
1195 1195
1196 1196 # Guardar Resultados
1197 1197 self.dataOut.data_spc = jspectra
1198 1198 self.dataOut.data_cspc = jcspectra
1199 1199
1200 1200 return 1
1201 1201
1202 1202 def run(self, dataOut, interf = 2,hei_interf = None, nhei_interf = None, offhei_interf = None, mode=1):
1203 1203
1204 1204 self.dataOut = dataOut
1205 1205
1206 1206 if mode == 1:
1207 1207 self.removeInterference(interf = 2,hei_interf = None, nhei_interf = None, offhei_interf = None)
1208 1208 elif mode == 2:
1209 1209 self.removeInterference2()
1210 1210
1211 1211 return self.dataOut
1212 1212
1213 1213
1214 1214 class IncohInt(Operation):
1215 1215
1216 1216 __profIndex = 0
1217 1217 __withOverapping = False
1218 1218
1219 1219 __byTime = False
1220 1220 __initime = None
1221 1221 __lastdatatime = None
1222 1222 __integrationtime = None
1223 1223
1224 1224 __buffer_spc = None
1225 1225 __buffer_cspc = None
1226 1226 __buffer_dc = None
1227 1227
1228 1228 __dataReady = False
1229 1229
1230 1230 __timeInterval = None
1231 1231
1232 1232 n = None
1233 1233
1234 1234 def __init__(self):
1235 1235
1236 1236 Operation.__init__(self)
1237 1237
1238 1238 def setup(self, n=None, timeInterval=None, overlapping=False):
1239 1239 """
1240 1240 Set the parameters of the integration class.
1241 1241
1242 1242 Inputs:
1243 1243
1244 1244 n : Number of coherent integrations
1245 1245 timeInterval : Time of integration. If the parameter "n" is selected this one does not work
1246 1246 overlapping :
1247 1247
1248 1248 """
1249 1249
1250 1250 self.__initime = None
1251 1251 self.__lastdatatime = 0
1252 1252
1253 1253 self.__buffer_spc = 0
1254 1254 self.__buffer_cspc = 0
1255 1255 self.__buffer_dc = 0
1256 1256
1257 1257 self.__profIndex = 0
1258 1258 self.__dataReady = False
1259 1259 self.__byTime = False
1260 1260
1261 1261 if n is None and timeInterval is None:
1262 1262 raise ValueError("n or timeInterval should be specified ...")
1263 1263
1264 1264 if n is not None:
1265 1265 self.n = int(n)
1266 1266 else:
1267 1267
1268 1268 self.__integrationtime = int(timeInterval)
1269 1269 self.n = None
1270 1270 self.__byTime = True
1271 1271
1272 1272 def putData(self, data_spc, data_cspc, data_dc):
1273 1273 """
1274 1274 Add a profile to the __buffer_spc and increase in one the __profileIndex
1275 1275
1276 1276 """
1277 1277
1278 1278 self.__buffer_spc += data_spc
1279 1279
1280 1280 if data_cspc is None:
1281 1281 self.__buffer_cspc = None
1282 1282 else:
1283 1283 self.__buffer_cspc += data_cspc
1284 1284
1285 1285 if data_dc is None:
1286 1286 self.__buffer_dc = None
1287 1287 else:
1288 1288 self.__buffer_dc += data_dc
1289 1289
1290 1290 self.__profIndex += 1
1291 1291
1292 1292 return
1293 1293
1294 1294 def pushData(self):
1295 1295 """
1296 1296 Return the sum of the last profiles and the profiles used in the sum.
1297 1297
1298 1298 Affected:
1299 1299
1300 1300 self.__profileIndex
1301 1301
1302 1302 """
1303 1303
1304 1304 data_spc = self.__buffer_spc
1305 1305 data_cspc = self.__buffer_cspc
1306 1306 data_dc = self.__buffer_dc
1307 1307 n = self.__profIndex
1308 1308
1309 1309 self.__buffer_spc = 0
1310 1310 self.__buffer_cspc = 0
1311 1311 self.__buffer_dc = 0
1312 1312 self.__profIndex = 0
1313 1313
1314 1314 return data_spc, data_cspc, data_dc, n
1315 1315
1316 1316 def byProfiles(self, *args):
1317 1317
1318 1318 self.__dataReady = False
1319 1319 avgdata_spc = None
1320 1320 avgdata_cspc = None
1321 1321 avgdata_dc = None
1322 1322
1323 1323 self.putData(*args)
1324 1324
1325 1325 if self.__profIndex == self.n:
1326 1326
1327 1327 avgdata_spc, avgdata_cspc, avgdata_dc, n = self.pushData()
1328 1328 self.n = n
1329 1329 self.__dataReady = True
1330 1330
1331 1331 return avgdata_spc, avgdata_cspc, avgdata_dc
1332 1332
1333 1333 def byTime(self, datatime, *args):
1334 1334
1335 1335 self.__dataReady = False
1336 1336 avgdata_spc = None
1337 1337 avgdata_cspc = None
1338 1338 avgdata_dc = None
1339 1339
1340 1340 self.putData(*args)
1341 1341
1342 1342 if (datatime - self.__initime) >= self.__integrationtime:
1343 1343 avgdata_spc, avgdata_cspc, avgdata_dc, n = self.pushData()
1344 1344 self.n = n
1345 1345 self.__dataReady = True
1346 1346
1347 1347 return avgdata_spc, avgdata_cspc, avgdata_dc
1348 1348
1349 1349 def integrate(self, datatime, *args):
1350 1350
1351 1351 if self.__profIndex == 0:
1352 1352 self.__initime = datatime
1353 1353
1354 1354 if self.__byTime:
1355 1355 avgdata_spc, avgdata_cspc, avgdata_dc = self.byTime(
1356 1356 datatime, *args)
1357 1357 else:
1358 1358 avgdata_spc, avgdata_cspc, avgdata_dc = self.byProfiles(*args)
1359 1359
1360 1360 if not self.__dataReady:
1361 1361 return None, None, None, None
1362 1362
1363 1363 return self.__initime, avgdata_spc, avgdata_cspc, avgdata_dc
1364 1364
1365 1365 def run(self, dataOut, n=None, timeInterval=None, overlapping=False):
1366 1366 if n == 1:
1367 1367 return dataOut
1368 1368
1369 1369 dataOut.flagNoData = True
1370 1370
1371 1371 if not self.isConfig:
1372 1372 self.setup(n, timeInterval, overlapping)
1373 1373 self.isConfig = True
1374 1374
1375 1375 avgdatatime, avgdata_spc, avgdata_cspc, avgdata_dc = self.integrate(dataOut.utctime,
1376 1376 dataOut.data_spc,
1377 1377 dataOut.data_cspc,
1378 1378 dataOut.data_dc)
1379 1379
1380 1380 if self.__dataReady:
1381 1381
1382 1382 dataOut.data_spc = avgdata_spc
1383 1383 dataOut.data_cspc = avgdata_cspc
1384 1384 dataOut.data_dc = avgdata_dc
1385 1385 dataOut.nIncohInt *= self.n
1386 1386 dataOut.utctime = avgdatatime
1387 1387 dataOut.flagNoData = False
1388 1388
1389 1389 return dataOut
1390 1390
1391 1391 class dopplerFlip(Operation):
1392 1392
1393 1393 def run(self, dataOut):
1394 1394 # arreglo 1: (num_chan, num_profiles, num_heights)
1395 1395 self.dataOut = dataOut
1396 1396 # JULIA-oblicua, indice 2
1397 1397 # arreglo 2: (num_profiles, num_heights)
1398 1398 jspectra = self.dataOut.data_spc[2]
1399 1399 jspectra_tmp = numpy.zeros(jspectra.shape)
1400 1400 num_profiles = jspectra.shape[0]
1401 1401 freq_dc = int(num_profiles / 2)
1402 1402 # Flip con for
1403 1403 for j in range(num_profiles):
1404 1404 jspectra_tmp[num_profiles-j-1]= jspectra[j]
1405 1405 # Intercambio perfil de DC con perfil inmediato anterior
1406 1406 jspectra_tmp[freq_dc-1]= jspectra[freq_dc-1]
1407 1407 jspectra_tmp[freq_dc]= jspectra[freq_dc]
1408 1408 # canal modificado es re-escrito en el arreglo de canales
1409 1409 self.dataOut.data_spc[2] = jspectra_tmp
1410 1410
1411 1411 return self.dataOut
General Comments 0
You need to be logged in to leave comments. Login now