##// END OF EJS Templates
merge v2.3
José Chávez -
r1098:831d2c3bd88d merge
parent child
Show More
@@ -1,951 +1,955
1 1
2 2 import os
3 3 import time
4 4 import glob
5 5 import datetime
6 6 from multiprocessing import Process
7 7
8 8 import zmq
9 9 import numpy
10 10 import matplotlib
11 11 import matplotlib.pyplot as plt
12 12 from mpl_toolkits.axes_grid1 import make_axes_locatable
13 13 from matplotlib.ticker import FuncFormatter, LinearLocator, MultipleLocator
14 14
15 15 from schainpy.model.proc.jroproc_base import Operation
16 16 from schainpy.utils import log
17 17
18 jet_values = matplotlib.pyplot.get_cmap("jet", 100)(numpy.arange(100))[10:90]
18 jet_values = matplotlib.pyplot.get_cmap('jet', 100)(numpy.arange(100))[10:90]
19 19 blu_values = matplotlib.pyplot.get_cmap(
20 "seismic_r", 20)(numpy.arange(20))[10:15]
20 'seismic_r', 20)(numpy.arange(20))[10:15]
21 21 ncmap = matplotlib.colors.LinearSegmentedColormap.from_list(
22 "jro", numpy.vstack((blu_values, jet_values)))
22 'jro', numpy.vstack((blu_values, jet_values)))
23 23 matplotlib.pyplot.register_cmap(cmap=ncmap)
24 24
25 25 CMAPS = [plt.get_cmap(s) for s in ('jro', 'jet', 'RdBu_r', 'seismic')]
26 26
27 27
28 28 def figpause(interval):
29 29 backend = plt.rcParams['backend']
30 30 if backend in matplotlib.rcsetup.interactive_bk:
31 31 figManager = matplotlib._pylab_helpers.Gcf.get_active()
32 32 if figManager is not None:
33 33 canvas = figManager.canvas
34 34 if canvas.figure.stale:
35 35 canvas.draw()
36 36 canvas.start_event_loop(interval)
37 37 return
38 38
39 39
40 40 class PlotData(Operation, Process):
41 41 '''
42 42 Base class for Schain plotting operations
43 43 '''
44 44
45 45 CODE = 'Figure'
46 46 colormap = 'jro'
47 47 bgcolor = 'white'
48 48 CONFLATE = False
49 49 __MAXNUMX = 80
50 50 __missing = 1E30
51 51
52 __attrs__ = ['show', 'save', 'xmin', 'xmax', 'ymin', 'ymax', 'zmin', 'zmax',
53 'zlimits', 'xlabel', 'ylabel', 'cb_label', 'title', 'titles', 'colorbar',
54 'bgcolor', 'width', 'height', 'localtime', 'oneFigure', 'showprofile']
55
52 56 def __init__(self, **kwargs):
53 57
54 58 Operation.__init__(self, plot=True, **kwargs)
55 59 Process.__init__(self)
56 60 self.contador = 0
57 61 self.kwargs['code'] = self.CODE
58 62 self.mp = False
59 63 self.data = None
60 64 self.isConfig = False
61 65 self.figures = []
62 66 self.axes = []
63 67 self.cb_axes = []
64 68 self.localtime = kwargs.pop('localtime', True)
65 69 self.show = kwargs.get('show', True)
66 70 self.save = kwargs.get('save', False)
67 71 self.colormap = kwargs.get('colormap', self.colormap)
68 72 self.colormap_coh = kwargs.get('colormap_coh', 'jet')
69 73 self.colormap_phase = kwargs.get('colormap_phase', 'RdBu_r')
70 74 self.colormaps = kwargs.get('colormaps', None)
71 75 self.bgcolor = kwargs.get('bgcolor', self.bgcolor)
72 76 self.showprofile = kwargs.get('showprofile', False)
73 77 self.title = kwargs.get('wintitle', self.CODE.upper())
74 78 self.cb_label = kwargs.get('cb_label', None)
75 79 self.cb_labels = kwargs.get('cb_labels', None)
76 80 self.xaxis = kwargs.get('xaxis', 'frequency')
77 81 self.zmin = kwargs.get('zmin', None)
78 82 self.zmax = kwargs.get('zmax', None)
79 83 self.zlimits = kwargs.get('zlimits', None)
80 84 self.xmin = kwargs.get('xmin', None)
81 85 self.xmax = kwargs.get('xmax', None)
82 86 self.xrange = kwargs.get('xrange', 24)
83 87 self.ymin = kwargs.get('ymin', None)
84 88 self.ymax = kwargs.get('ymax', None)
85 89 self.xlabel = kwargs.get('xlabel', None)
86 90 self.__MAXNUMY = kwargs.get('decimation', 300)
87 91 self.showSNR = kwargs.get('showSNR', False)
88 92 self.oneFigure = kwargs.get('oneFigure', True)
89 93 self.width = kwargs.get('width', None)
90 94 self.height = kwargs.get('height', None)
91 95 self.colorbar = kwargs.get('colorbar', True)
92 96 self.factors = kwargs.get('factors', [1, 1, 1, 1, 1, 1, 1, 1])
93 97 self.titles = ['' for __ in range(16)]
98 self.polar = False
94 99
95 100 def __fmtTime(self, x, pos):
96 101 '''
97 102 '''
98 103
99 104 return '{}'.format(self.getDateTime(x).strftime('%H:%M'))
100 105
101 106 def __setup(self):
102 107 '''
103 108 Common setup for all figures, here figures and axes are created
104 109 '''
105 110
111 if self.CODE not in self.data:
112 raise ValueError(log.error('Missing data for {}'.format(self.CODE),
113 self.name))
114
106 115 self.setup()
107 116
108 117 self.time_label = 'LT' if self.localtime else 'UTC'
109 118 if self.data.localtime:
110 119 self.getDateTime = datetime.datetime.fromtimestamp
111 120 else:
112 121 self.getDateTime = datetime.datetime.utcfromtimestamp
113 122
114 123 if self.width is None:
115 124 self.width = 8
116 125
117 126 self.figures = []
118 127 self.axes = []
119 128 self.cb_axes = []
120 129 self.pf_axes = []
121 130 self.cmaps = []
122 131
123 132 size = '15%' if self.ncols == 1 else '30%'
124 133 pad = '4%' if self.ncols == 1 else '8%'
125 134
126 135 if self.oneFigure:
127 136 if self.height is None:
128 137 self.height = 1.4 * self.nrows + 1
129 138 fig = plt.figure(figsize=(self.width, self.height),
130 139 edgecolor='k',
131 140 facecolor='w')
132 141 self.figures.append(fig)
133 142 for n in range(self.nplots):
134 ax = fig.add_subplot(self.nrows, self.ncols, n + 1)
143 ax = fig.add_subplot(self.nrows, self.ncols,
144 n + 1, polar=self.polar)
135 145 ax.tick_params(labelsize=8)
136 146 ax.firsttime = True
137 147 ax.index = 0
138 148 ax.press = None
139 149 self.axes.append(ax)
140 150 if self.showprofile:
141 151 cax = self.__add_axes(ax, size=size, pad=pad)
142 152 cax.tick_params(labelsize=8)
143 153 self.pf_axes.append(cax)
144 154 else:
145 155 if self.height is None:
146 156 self.height = 3
147 157 for n in range(self.nplots):
148 158 fig = plt.figure(figsize=(self.width, self.height),
149 159 edgecolor='k',
150 160 facecolor='w')
151 ax = fig.add_subplot(1, 1, 1)
161 ax = fig.add_subplot(1, 1, 1, polar=self.polar)
152 162 ax.tick_params(labelsize=8)
153 163 ax.firsttime = True
154 164 ax.index = 0
155 165 ax.press = None
156 166 self.figures.append(fig)
157 167 self.axes.append(ax)
158 168 if self.showprofile:
159 169 cax = self.__add_axes(ax, size=size, pad=pad)
160 170 cax.tick_params(labelsize=8)
161 171 self.pf_axes.append(cax)
162 172
163 173 for n in range(self.nrows):
164 174 if self.colormaps is not None:
165 175 cmap = plt.get_cmap(self.colormaps[n])
166 176 else:
167 177 cmap = plt.get_cmap(self.colormap)
168 178 cmap.set_bad(self.bgcolor, 1.)
169 179 self.cmaps.append(cmap)
170 180
171 181 for fig in self.figures:
172 182 fig.canvas.mpl_connect('key_press_event', self.OnKeyPress)
173 183 fig.canvas.mpl_connect('scroll_event', self.OnBtnScroll)
174 184 fig.canvas.mpl_connect('button_press_event', self.onBtnPress)
175 185 fig.canvas.mpl_connect('motion_notify_event', self.onMotion)
176 186 fig.canvas.mpl_connect('button_release_event', self.onBtnRelease)
177 187 if self.show:
178 188 fig.show()
179 189
180 190 def OnKeyPress(self, event):
181 191 '''
182 192 Event for pressing keys (up, down) change colormap
183 193 '''
184 194 ax = event.inaxes
185 195 if ax in self.axes:
186 196 if event.key == 'down':
187 197 ax.index += 1
188 198 elif event.key == 'up':
189 199 ax.index -= 1
190 200 if ax.index < 0:
191 201 ax.index = len(CMAPS) - 1
192 202 elif ax.index == len(CMAPS):
193 203 ax.index = 0
194 204 cmap = CMAPS[ax.index]
195 205 ax.cbar.set_cmap(cmap)
196 206 ax.cbar.draw_all()
197 207 ax.plt.set_cmap(cmap)
198 208 ax.cbar.patch.figure.canvas.draw()
199 209 self.colormap = cmap.name
200 210
201 211 def OnBtnScroll(self, event):
202 212 '''
203 213 Event for scrolling, scale figure
204 214 '''
205 215 cb_ax = event.inaxes
206 216 if cb_ax in [ax.cbar.ax for ax in self.axes if ax.cbar]:
207 217 ax = [ax for ax in self.axes if cb_ax == ax.cbar.ax][0]
208 218 pt = ax.cbar.ax.bbox.get_points()[:, 1]
209 219 nrm = ax.cbar.norm
210 220 vmin, vmax, p0, p1, pS = (
211 221 nrm.vmin, nrm.vmax, pt[0], pt[1], event.y)
212 222 scale = 2 if event.step == 1 else 0.5
213 223 point = vmin + (vmax - vmin) / (p1 - p0) * (pS - p0)
214 224 ax.cbar.norm.vmin = point - scale * (point - vmin)
215 225 ax.cbar.norm.vmax = point - scale * (point - vmax)
216 226 ax.plt.set_norm(ax.cbar.norm)
217 227 ax.cbar.draw_all()
218 228 ax.cbar.patch.figure.canvas.draw()
219 229
220 230 def onBtnPress(self, event):
221 231 '''
222 232 Event for mouse button press
223 233 '''
224 234 cb_ax = event.inaxes
225 235 if cb_ax is None:
226 236 return
227 237
228 238 if cb_ax in [ax.cbar.ax for ax in self.axes if ax.cbar]:
229 239 cb_ax.press = event.x, event.y
230 240 else:
231 241 cb_ax.press = None
232 242
233 243 def onMotion(self, event):
234 244 '''
235 245 Event for move inside colorbar
236 246 '''
237 247 cb_ax = event.inaxes
238 248 if cb_ax is None:
239 249 return
240 250 if cb_ax not in [ax.cbar.ax for ax in self.axes if ax.cbar]:
241 251 return
242 252 if cb_ax.press is None:
243 253 return
244 254
245 255 ax = [ax for ax in self.axes if cb_ax == ax.cbar.ax][0]
246 256 xprev, yprev = cb_ax.press
247 257 dx = event.x - xprev
248 258 dy = event.y - yprev
249 259 cb_ax.press = event.x, event.y
250 260 scale = ax.cbar.norm.vmax - ax.cbar.norm.vmin
251 261 perc = 0.03
252 262
253 263 if event.button == 1:
254 264 ax.cbar.norm.vmin -= (perc * scale) * numpy.sign(dy)
255 265 ax.cbar.norm.vmax -= (perc * scale) * numpy.sign(dy)
256 266 elif event.button == 3:
257 267 ax.cbar.norm.vmin -= (perc * scale) * numpy.sign(dy)
258 268 ax.cbar.norm.vmax += (perc * scale) * numpy.sign(dy)
259 269
260 270 ax.cbar.draw_all()
261 271 ax.plt.set_norm(ax.cbar.norm)
262 272 ax.cbar.patch.figure.canvas.draw()
263 273
264 274 def onBtnRelease(self, event):
265 275 '''
266 276 Event for mouse button release
267 277 '''
268 278 cb_ax = event.inaxes
269 279 if cb_ax is not None:
270 280 cb_ax.press = None
271 281
272 282 def __add_axes(self, ax, size='30%', pad='8%'):
273 283 '''
274 284 Add new axes to the given figure
275 285 '''
276 286 divider = make_axes_locatable(ax)
277 287 nax = divider.new_horizontal(size=size, pad=pad)
278 288 ax.figure.add_axes(nax)
279 289 return nax
280 290
281 291 self.setup()
282 292
283 293 def setup(self):
284 294 '''
285 295 This method should be implemented in the child class, the following
286 296 attributes should be set:
287 297
288 298 self.nrows: number of rows
289 299 self.ncols: number of cols
290 300 self.nplots: number of plots (channels or pairs)
291 301 self.ylabel: label for Y axes
292 302 self.titles: list of axes title
293 303
294 304 '''
295 305 raise(NotImplementedError, 'Implement this method in child class')
296 306
297 307 def fill_gaps(self, x_buffer, y_buffer, z_buffer):
298 308 '''
299 309 Create a masked array for missing data
300 310 '''
301 311 if x_buffer.shape[0] < 2:
302 312 return x_buffer, y_buffer, z_buffer
303 313
304 314 deltas = x_buffer[1:] - x_buffer[0:-1]
305 315 x_median = numpy.median(deltas)
306 316
307 317 index = numpy.where(deltas > 5 * x_median)
308 318
309 319 if len(index[0]) != 0:
310 320 z_buffer[::, index[0], ::] = self.__missing
311 321 z_buffer = numpy.ma.masked_inside(z_buffer,
312 322 0.99 * self.__missing,
313 323 1.01 * self.__missing)
314 324
315 325 return x_buffer, y_buffer, z_buffer
316 326
317 327 def decimate(self):
318 328
319 329 # dx = int(len(self.x)/self.__MAXNUMX) + 1
320 330 dy = int(len(self.y) / self.__MAXNUMY) + 1
321 331
322 332 # x = self.x[::dx]
323 333 x = self.x
324 334 y = self.y[::dy]
325 335 z = self.z[::, ::, ::dy]
326 336
327 337 return x, y, z
328 338
329 339 def format(self):
330 340 '''
331 341 Set min and max values, labels, ticks and titles
332 342 '''
333 343
334 344 if self.xmin is None:
335 345 xmin = self.min_time
336 346 else:
337 347 if self.xaxis is 'time':
338 348 dt = self.getDateTime(self.min_time)
339 349 xmin = (dt.replace(hour=int(self.xmin), minute=0, second=0) -
340 350 datetime.datetime(1970, 1, 1)).total_seconds()
341 351 if self.data.localtime:
342 352 xmin += time.timezone
343 353 else:
344 354 xmin = self.xmin
345 355
346 356 if self.xmax is None:
347 357 xmax = xmin + self.xrange * 60 * 60
348 358 else:
349 359 if self.xaxis is 'time':
350 360 dt = self.getDateTime(self.max_time)
351 xmax = (dt.replace(hour=int(self.xmax), minute=0, second=0) -
361 xmax = (dt.replace(hour=int(self.xmax), minute=59, second=59) -
352 362 datetime.datetime(1970, 1, 1)).total_seconds()
353 363 if self.data.localtime:
354 364 xmax += time.timezone
355 365 else:
356 366 xmax = self.xmax
357 367
358 368 ymin = self.ymin if self.ymin else numpy.nanmin(self.y)
359 369 ymax = self.ymax if self.ymax else numpy.nanmax(self.y)
360 370
361 371 Y = numpy.array([10, 20, 50, 100, 200, 500, 1000, 2000])
362 372 i = 1 if numpy.where(ymax < Y)[
363 373 0][0] < 0 else numpy.where(ymax < Y)[0][0]
364 374 ystep = Y[i - 1] / 5
365 375
366 ystep = 200 if ymax >= 800 else 100 if ymax >= 400 else 50 if ymax >= 200 else 20
367
368 376 for n, ax in enumerate(self.axes):
369 377 if ax.firsttime:
370 378 ax.set_facecolor(self.bgcolor)
371 379 ax.yaxis.set_major_locator(MultipleLocator(ystep))
372 380 if self.xaxis is 'time':
373 381 ax.xaxis.set_major_formatter(FuncFormatter(self.__fmtTime))
374 382 ax.xaxis.set_major_locator(LinearLocator(9))
375 383 if self.xlabel is not None:
376 384 ax.set_xlabel(self.xlabel)
377 385 ax.set_ylabel(self.ylabel)
378 386 ax.firsttime = False
379 387 if self.showprofile:
380 388 self.pf_axes[n].set_ylim(ymin, ymax)
381 389 self.pf_axes[n].set_xlim(self.zmin, self.zmax)
382 390 self.pf_axes[n].set_xlabel('dB')
383 391 self.pf_axes[n].grid(b=True, axis='x')
384 392 [tick.set_visible(False)
385 393 for tick in self.pf_axes[n].get_yticklabels()]
386 394 if self.colorbar:
387 ax.cbar = plt.colorbar(ax.plt, ax=ax, pad=0.02, aspect=10)
395 ax.cbar = plt.colorbar(
396 ax.plt, ax=ax, fraction=0.05, pad=0.02, aspect=10)
388 397 ax.cbar.ax.tick_params(labelsize=8)
389 398 ax.cbar.ax.press = None
390 399 if self.cb_label:
391 400 ax.cbar.set_label(self.cb_label, size=8)
392 401 elif self.cb_labels:
393 402 ax.cbar.set_label(self.cb_labels[n], size=8)
394 403 else:
395 404 ax.cbar = None
396 405
397 ax.set_title('{} - {} {}'.format(
398 self.titles[n],
399 self.getDateTime(self.max_time).strftime('%H:%M:%S'),
400 self.time_label),
401 size=8)
402 ax.set_xlim(xmin, xmax)
403 ax.set_ylim(ymin, ymax)
406 if not self.polar:
407 ax.set_xlim(xmin, xmax)
408 ax.set_ylim(ymin, ymax)
409 ax.set_title('{} - {} {}'.format(
410 self.titles[n],
411 self.getDateTime(self.max_time).strftime('%H:%M:%S'),
412 self.time_label),
413 size=8)
414 else:
415 ax.set_title('{}'.format(self.titles[n]), size=8)
416 ax.set_ylim(0, 90)
417 ax.set_yticks(numpy.arange(0, 90, 20))
418 ax.yaxis.labelpad = 40
404 419
405 420 def __plot(self):
406 421 '''
407 422 '''
408 423 log.success('Plotting', self.name)
409 424
410 425 self.plot()
411 426 self.format()
412 427
413 428 for n, fig in enumerate(self.figures):
414 429 if self.nrows == 0 or self.nplots == 0:
415 430 log.warning('No data', self.name)
431 fig.text(0.5, 0.5, 'No Data', fontsize='large', ha='center')
416 432 continue
417 433
418 434 fig.tight_layout()
419 435 fig.canvas.manager.set_window_title('{} - {}'.format(self.title,
420 436 self.getDateTime(self.max_time).strftime('%Y/%m/%d')))
421 437 # fig.canvas.draw()
422 438
423 439 if self.save: # and self.data.ended:
424 440 self.contador += 1
425 441 channels = range(self.nrows)
426 442 if self.oneFigure:
427 443 label = ''
428 444 else:
429 445 label = '_{}'.format(channels[n])
430 446 figname = os.path.join(
431 447 self.save,
432 448 '{}{}_{}{}.png'.format(
433 449 self.CODE,
434 450 label,
435 451 self.getDateTime(self.saveTime).strftime(
436 452 '%y%m%d_%H%M%S'),
437 453 str(self.contador),
438 454 )
439 455 )
440 print 'Saving figure: {}'.format(figname)
456 log.log('Saving figure: {}'.format(figname), self.name)
441 457 fig.savefig(figname)
442 458
443 459 def plot(self):
444 460 '''
445 461 '''
446 462 raise(NotImplementedError, 'Implement this method in child class')
447 463
448 464 def run(self):
449 465
450 466 log.success('Starting', self.name)
451 467
452 468 context = zmq.Context()
453 469 receiver = context.socket(zmq.SUB)
454 470 receiver.setsockopt(zmq.SUBSCRIBE, '')
455 471 receiver.setsockopt(zmq.CONFLATE, self.CONFLATE)
456 472
457 473 if 'server' in self.kwargs['parent']:
458 474 receiver.connect(
459 475 'ipc:///tmp/{}.plots'.format(self.kwargs['parent']['server']))
460 476 else:
461 477 receiver.connect("ipc:///tmp/zmq.plots")
462 478
463 479 while True:
464 480 try:
465 481 self.data = receiver.recv_pyobj(flags=zmq.NOBLOCK)
466 482 if self.data.localtime and self.localtime:
467 483 self.times = self.data.times
468 484 elif self.data.localtime and not self.localtime:
469 485 self.times = self.data.times + time.timezone
470 486 elif not self.data.localtime and self.localtime:
471 487 self.times = self.data.times - time.timezone
472 488 else:
473 489 self.times = self.data.times
474 490
475 491 self.min_time = self.times[0]
476 492 self.max_time = self.times[-1]
477 493
478 494 if self.isConfig is False:
479 495 self.__setup()
480 496 self.isConfig = True
481 497
482 498 self.__plot()
483 499
484 500 except zmq.Again as e:
485 501 log.log('Waiting for data...')
486 502 if self.data:
487 503 figpause(self.data.throttle)
488 504 else:
489 505 time.sleep(2)
490 506
491 507 def close(self):
492 508 if self.data:
493 509 self.__plot()
494 510
495 511
496 512 class PlotSpectraData(PlotData):
497 513 '''
498 514 Plot for Spectra data
499 515 '''
500 516
501 517 CODE = 'spc'
502 518 colormap = 'jro'
503 519
504 520 def setup(self):
505 521 self.nplots = len(self.data.channels)
506 522 self.ncols = int(numpy.sqrt(self.nplots) + 0.9)
507 523 self.nrows = int((1.0 * self.nplots / self.ncols) + 0.9)
508 524 self.width = 3.4 * self.ncols
509 525 self.height = 3 * self.nrows
510 526 self.cb_label = 'dB'
511 527 if self.showprofile:
512 528 self.width += 0.8 * self.ncols
513 529
514 530 self.ylabel = 'Range [Km]'
515 531
516 532 def plot(self):
517 533 if self.xaxis == "frequency":
518 534 x = self.data.xrange[0]
519 535 self.xlabel = "Frequency (kHz)"
520 536 elif self.xaxis == "time":
521 537 x = self.data.xrange[1]
522 538 self.xlabel = "Time (ms)"
523 539 else:
524 540 x = self.data.xrange[2]
525 541 self.xlabel = "Velocity (m/s)"
526 542
527 543 if self.CODE == 'spc_mean':
528 544 x = self.data.xrange[2]
529 545 self.xlabel = "Velocity (m/s)"
530 546
531 547 self.titles = []
532 548
533 549 y = self.data.heights
534 550 self.y = y
535 551 z = self.data['spc']
536 552
537 553 for n, ax in enumerate(self.axes):
538 554 noise = self.data['noise'][n][-1]
539 555 if self.CODE == 'spc_mean':
540 556 mean = self.data['mean'][n][-1]
541 557 if ax.firsttime:
542 558 self.xmax = self.xmax if self.xmax else numpy.nanmax(x)
543 559 self.xmin = self.xmin if self.xmin else -self.xmax
544 560 self.zmin = self.zmin if self.zmin else numpy.nanmin(z)
545 561 self.zmax = self.zmax if self.zmax else numpy.nanmax(z)
546 562 ax.plt = ax.pcolormesh(x, y, z[n].T,
547 563 vmin=self.zmin,
548 564 vmax=self.zmax,
549 565 cmap=plt.get_cmap(self.colormap)
550 566 )
551 567
552 568 if self.showprofile:
553 569 ax.plt_profile = self.pf_axes[n].plot(
554 570 self.data['rti'][n][-1], y)[0]
555 571 ax.plt_noise = self.pf_axes[n].plot(numpy.repeat(noise, len(y)), y,
556 572 color="k", linestyle="dashed", lw=1)[0]
557 573 if self.CODE == 'spc_mean':
558 574 ax.plt_mean = ax.plot(mean, y, color='k')[0]
559 575 else:
560 576 ax.plt.set_array(z[n].T.ravel())
561 577 if self.showprofile:
562 578 ax.plt_profile.set_data(self.data['rti'][n][-1], y)
563 579 ax.plt_noise.set_data(numpy.repeat(noise, len(y)), y)
564 580 if self.CODE == 'spc_mean':
565 581 ax.plt_mean.set_data(mean, y)
566 582
567 583 self.titles.append('CH {}: {:3.2f}dB'.format(n, noise))
568 584 self.saveTime = self.max_time
569 585
570 586
571 587 class PlotCrossSpectraData(PlotData):
572 588
573 589 CODE = 'cspc'
574 590 zmin_coh = None
575 591 zmax_coh = None
576 592 zmin_phase = None
577 593 zmax_phase = None
578 594
579 595 def setup(self):
580 596
581 597 self.ncols = 4
582 598 self.nrows = len(self.data.pairs)
583 599 self.nplots = self.nrows * 4
584 600 self.width = 3.4 * self.ncols
585 601 self.height = 3 * self.nrows
586 602 self.ylabel = 'Range [Km]'
587 603 self.showprofile = False
588 604
589 605 def plot(self):
590 606
591 607 if self.xaxis == "frequency":
592 608 x = self.data.xrange[0]
593 609 self.xlabel = "Frequency (kHz)"
594 610 elif self.xaxis == "time":
595 611 x = self.data.xrange[1]
596 612 self.xlabel = "Time (ms)"
597 613 else:
598 614 x = self.data.xrange[2]
599 615 self.xlabel = "Velocity (m/s)"
600 616
601 617 self.titles = []
602 618
603 619 y = self.data.heights
604 620 self.y = y
605 621 spc = self.data['spc']
606 622 cspc = self.data['cspc']
607 623
608 624 for n in range(self.nrows):
609 625 noise = self.data['noise'][n][-1]
610 626 pair = self.data.pairs[n]
611 627 ax = self.axes[4 * n]
612 628 ax3 = self.axes[4 * n + 3]
613 629 if ax.firsttime:
614 630 self.xmax = self.xmax if self.xmax else numpy.nanmax(x)
615 631 self.xmin = self.xmin if self.xmin else -self.xmax
616 632 self.zmin = self.zmin if self.zmin else numpy.nanmin(spc)
617 633 self.zmax = self.zmax if self.zmax else numpy.nanmax(spc)
618 634 ax.plt = ax.pcolormesh(x, y, spc[pair[0]].T,
619 635 vmin=self.zmin,
620 636 vmax=self.zmax,
621 637 cmap=plt.get_cmap(self.colormap)
622 638 )
623 639 else:
624 640 ax.plt.set_array(spc[pair[0]].T.ravel())
625 641 self.titles.append('CH {}: {:3.2f}dB'.format(n, noise))
626 642
627 643 ax = self.axes[4 * n + 1]
628 644 if ax.firsttime:
629 645 ax.plt = ax.pcolormesh(x, y, spc[pair[1]].T,
630 646 vmin=self.zmin,
631 647 vmax=self.zmax,
632 648 cmap=plt.get_cmap(self.colormap)
633 649 )
634 650 else:
635 651 ax.plt.set_array(spc[pair[1]].T.ravel())
636 652 self.titles.append('CH {}: {:3.2f}dB'.format(n, noise))
637 653
638 654 out = cspc[n] / numpy.sqrt(spc[pair[0]] * spc[pair[1]])
639 655 coh = numpy.abs(out)
640 656 phase = numpy.arctan2(out.imag, out.real) * 180 / numpy.pi
641 657
642 658 ax = self.axes[4 * n + 2]
643 659 if ax.firsttime:
644 660 ax.plt = ax.pcolormesh(x, y, coh.T,
645 661 vmin=0,
646 662 vmax=1,
647 663 cmap=plt.get_cmap(self.colormap_coh)
648 664 )
649 665 else:
650 666 ax.plt.set_array(coh.T.ravel())
651 667 self.titles.append(
652 668 'Coherence Ch{} * Ch{}'.format(pair[0], pair[1]))
653 669
654 670 ax = self.axes[4 * n + 3]
655 671 if ax.firsttime:
656 672 ax.plt = ax.pcolormesh(x, y, phase.T,
657 673 vmin=-180,
658 674 vmax=180,
659 675 cmap=plt.get_cmap(self.colormap_phase)
660 676 )
661 677 else:
662 678 ax.plt.set_array(phase.T.ravel())
663 679 self.titles.append('Phase CH{} * CH{}'.format(pair[0], pair[1]))
664 680
665 681 self.saveTime = self.max_time
666 682
667 683
668 684 class PlotSpectraMeanData(PlotSpectraData):
669 685 '''
670 686 Plot for Spectra and Mean
671 687 '''
672 688 CODE = 'spc_mean'
673 689 colormap = 'jro'
674 690
675 691
676 692 class PlotRTIData(PlotData):
677 693 '''
678 694 Plot for RTI data
679 695 '''
680 696
681 697 CODE = 'rti'
682 698 colormap = 'jro'
683 699
684 700 def setup(self):
685 701 self.xaxis = 'time'
686 702 self.ncols = 1
687 703 self.nrows = len(self.data.channels)
688 704 self.nplots = len(self.data.channels)
689 705 self.ylabel = 'Range [Km]'
690 706 self.cb_label = 'dB'
691 707 self.titles = ['{} Channel {}'.format(
692 708 self.CODE.upper(), x) for x in range(self.nrows)]
693 709
694 710 def plot(self):
695 711 self.x = self.times
696 712 self.y = self.data.heights
697 713 self.z = self.data[self.CODE]
698 714 self.z = numpy.ma.masked_invalid(self.z)
699 715
700 716 for n, ax in enumerate(self.axes):
701 717 x, y, z = self.fill_gaps(*self.decimate())
702 718 self.zmin = self.zmin if self.zmin else numpy.min(self.z)
703 719 self.zmax = self.zmax if self.zmax else numpy.max(self.z)
704 720 if ax.firsttime:
705 721 ax.plt = ax.pcolormesh(x, y, z[n].T,
706 722 vmin=self.zmin,
707 723 vmax=self.zmax,
708 724 cmap=plt.get_cmap(self.colormap)
709 725 )
710 726 if self.showprofile:
711 727 ax.plot_profile = self.pf_axes[n].plot(
712 728 self.data['rti'][n][-1], self.y)[0]
713 729 ax.plot_noise = self.pf_axes[n].plot(numpy.repeat(self.data['noise'][n][-1], len(self.y)), self.y,
714 730 color="k", linestyle="dashed", lw=1)[0]
715 731 else:
716 732 ax.collections.remove(ax.collections[0])
717 733 ax.plt = ax.pcolormesh(x, y, z[n].T,
718 734 vmin=self.zmin,
719 735 vmax=self.zmax,
720 736 cmap=plt.get_cmap(self.colormap)
721 737 )
722 738 if self.showprofile:
723 739 ax.plot_profile.set_data(self.data['rti'][n][-1], self.y)
724 740 ax.plot_noise.set_data(numpy.repeat(
725 741 self.data['noise'][n][-1], len(self.y)), self.y)
726 742
727 743 self.saveTime = self.min_time
728 744
729 745
730 746 class PlotCOHData(PlotRTIData):
731 747 '''
732 748 Plot for Coherence data
733 749 '''
734 750
735 751 CODE = 'coh'
736 752
737 753 def setup(self):
738 754 self.xaxis = 'time'
739 755 self.ncols = 1
740 756 self.nrows = len(self.data.pairs)
741 757 self.nplots = len(self.data.pairs)
742 758 self.ylabel = 'Range [Km]'
743 759 if self.CODE == 'coh':
744 760 self.cb_label = ''
745 761 self.titles = [
746 762 'Coherence Map Ch{} * Ch{}'.format(x[0], x[1]) for x in self.data.pairs]
747 763 else:
748 764 self.cb_label = 'Degrees'
749 765 self.titles = [
750 766 'Phase Map Ch{} * Ch{}'.format(x[0], x[1]) for x in self.data.pairs]
751 767
752 768
753 769 class PlotPHASEData(PlotCOHData):
754 770 '''
755 771 Plot for Phase map data
756 772 '''
757 773
758 774 CODE = 'phase'
759 775 colormap = 'seismic'
760 776
761 777
762 778 class PlotNoiseData(PlotData):
763 779 '''
764 780 Plot for noise
765 781 '''
766 782
767 783 CODE = 'noise'
768 784
769 785 def setup(self):
770 786 self.xaxis = 'time'
771 787 self.ncols = 1
772 788 self.nrows = 1
773 789 self.nplots = 1
774 790 self.ylabel = 'Intensity [dB]'
775 791 self.titles = ['Noise']
776 792 self.colorbar = False
777 793
778 794 def plot(self):
779 795
780 796 x = self.times
781 797 xmin = self.min_time
782 798 xmax = xmin + self.xrange * 60 * 60
783 799 Y = self.data[self.CODE]
784 800
785 801 if self.axes[0].firsttime:
786 802 for ch in self.data.channels:
787 803 y = Y[ch]
788 804 self.axes[0].plot(x, y, lw=1, label='Ch{}'.format(ch))
789 805 plt.legend()
790 806 else:
791 807 for ch in self.data.channels:
792 808 y = Y[ch]
793 809 self.axes[0].lines[ch].set_data(x, y)
794 810
795 811 self.ymin = numpy.nanmin(Y) - 5
796 812 self.ymax = numpy.nanmax(Y) + 5
797 813 self.saveTime = self.min_time
798 814
799 815
800 816 class PlotSNRData(PlotRTIData):
801 817 '''
802 818 Plot for SNR Data
803 819 '''
804 820
805 821 CODE = 'snr'
806 822 colormap = 'jet'
807 823
808 824
809 825 class PlotDOPData(PlotRTIData):
810 826 '''
811 827 Plot for DOPPLER Data
812 828 '''
813 829
814 830 CODE = 'dop'
815 831 colormap = 'jet'
816 832
817 833
818 834 class PlotSkyMapData(PlotData):
819 835 '''
820 836 Plot for meteors detection data
821 837 '''
822 838
823 CODE = 'met'
839 CODE = 'param'
824 840
825 841 def setup(self):
826 842
827 843 self.ncols = 1
828 844 self.nrows = 1
829 845 self.width = 7.2
830 846 self.height = 7.2
831
847 self.nplots = 1
832 848 self.xlabel = 'Zonal Zenith Angle (deg)'
833 849 self.ylabel = 'Meridional Zenith Angle (deg)'
834
835 if self.figure is None:
836 self.figure = plt.figure(figsize=(self.width, self.height),
837 edgecolor='k',
838 facecolor='w')
839 else:
840 self.figure.clf()
841
842 self.ax = plt.subplot2grid(
843 (self.nrows, self.ncols), (0, 0), 1, 1, polar=True)
844 self.ax.firsttime = True
850 self.polar = True
851 self.ymin = -180
852 self.ymax = 180
853 self.colorbar = False
845 854
846 855 def plot(self):
847 856
848 arrayParameters = numpy.concatenate(
849 [self.data['param'][t] for t in self.times])
857 arrayParameters = numpy.concatenate(self.data['param'])
850 858 error = arrayParameters[:, -1]
851 859 indValid = numpy.where(error == 0)[0]
852 860 finalMeteor = arrayParameters[indValid, :]
853 861 finalAzimuth = finalMeteor[:, 3]
854 862 finalZenith = finalMeteor[:, 4]
855 863
856 864 x = finalAzimuth * numpy.pi / 180
857 865 y = finalZenith
858 866
859 if self.ax.firsttime:
860 self.ax.plot = self.ax.plot(x, y, 'bo', markersize=5)[0]
861 self.ax.set_ylim(0, 90)
862 self.ax.set_yticks(numpy.arange(0, 90, 20))
863 self.ax.set_xlabel(self.xlabel)
864 self.ax.set_ylabel(self.ylabel)
865 self.ax.yaxis.labelpad = 40
866 self.ax.firsttime = False
867 ax = self.axes[0]
868
869 if ax.firsttime:
870 ax.plot = ax.plot(x, y, 'bo', markersize=5)[0]
867 871 else:
868 self.ax.plot.set_data(x, y)
872 ax.plot.set_data(x, y)
869 873
870 874 dt1 = self.getDateTime(self.min_time).strftime('%y/%m/%d %H:%M:%S')
871 875 dt2 = self.getDateTime(self.max_time).strftime('%y/%m/%d %H:%M:%S')
872 876 title = 'Meteor Detection Sky Map\n %s - %s \n Number of events: %5.0f\n' % (dt1,
873 877 dt2,
874 878 len(x))
875 self.ax.set_title(title, size=8)
879 self.titles[0] = title
876 880 self.saveTime = self.max_time
877 881
878 882
879 883 class PlotParamData(PlotRTIData):
880 884 '''
881 885 Plot for data_param object
882 886 '''
883 887
884 888 CODE = 'param'
885 889 colormap = 'seismic'
886 890
887 891 def setup(self):
888 892 self.xaxis = 'time'
889 893 self.ncols = 1
890 894 self.nrows = self.data.shape(self.CODE)[0]
891 895 self.nplots = self.nrows
892 896 if self.showSNR:
893 897 self.nrows += 1
894 898 self.nplots += 1
895 899
896 900 self.ylabel = 'Height [Km]'
897 901 self.titles = self.data.parameters \
898 902 if self.data.parameters else ['Param {}'.format(x) for x in xrange(self.nrows)]
899 903 if self.showSNR:
900 904 self.titles.append('SNR')
901 905
902 906 def plot(self):
903 907 self.data.normalize_heights()
904 908 self.x = self.times
905 909 self.y = self.data.heights
906 910 if self.showSNR:
907 911 self.z = numpy.concatenate(
908 912 (self.data[self.CODE], self.data['snr'])
909 913 )
910 914 else:
911 915 self.z = self.data[self.CODE]
912 916
913 917 self.z = numpy.ma.masked_invalid(self.z)
914 918
915 919 for n, ax in enumerate(self.axes):
916 920
917 921 x, y, z = self.fill_gaps(*self.decimate())
918 922 self.zmax = self.zmax if self.zmax is not None else numpy.max(
919 923 self.z[n])
920 924 self.zmin = self.zmin if self.zmin is not None else numpy.min(
921 925 self.z[n])
922 926
923 927 if ax.firsttime:
924 928 if self.zlimits is not None:
925 929 self.zmin, self.zmax = self.zlimits[n]
926 930
927 931 ax.plt = ax.pcolormesh(x, y, z[n].T * self.factors[n],
928 932 vmin=self.zmin,
929 933 vmax=self.zmax,
930 934 cmap=self.cmaps[n]
931 935 )
932 936 else:
933 937 if self.zlimits is not None:
934 938 self.zmin, self.zmax = self.zlimits[n]
935 939 ax.collections.remove(ax.collections[0])
936 940 ax.plt = ax.pcolormesh(x, y, z[n].T * self.factors[n],
937 941 vmin=self.zmin,
938 942 vmax=self.zmax,
939 943 cmap=self.cmaps[n]
940 944 )
941 945
942 946 self.saveTime = self.min_time
943 947
944 948
945 949 class PlotOutputData(PlotParamData):
946 950 '''
947 951 Plot data_output object
948 952 '''
949 953
950 954 CODE = 'output'
951 955 colormap = 'seismic'
@@ -1,1830 +1,1833
1 1 '''
2 2 Created on Jul 2, 2014
3 3
4 4 @author: roj-idl71
5 5 '''
6 6 import os
7 7 import sys
8 8 import glob
9 9 import time
10 10 import numpy
11 11 import fnmatch
12 12 import inspect
13 13 import time
14 14 import datetime
15 15 import traceback
16 16 import zmq
17 17
18 18 try:
19 19 from gevent import sleep
20 20 except:
21 21 from time import sleep
22 22
23 23 from schainpy.model.data.jroheaderIO import PROCFLAG, BasicHeader, SystemHeader, RadarControllerHeader, ProcessingHeader
24 24 from schainpy.model.data.jroheaderIO import get_dtype_index, get_numpy_dtype, get_procflag_dtype, get_dtype_width
25 25
26 26 LOCALTIME = True
27 27
28 28
29 29 def isNumber(cad):
30 30 """
31 31 Chequea si el conjunto de caracteres que componen un string puede ser convertidos a un numero.
32 32
33 33 Excepciones:
34 34 Si un determinado string no puede ser convertido a numero
35 35 Input:
36 36 str, string al cual se le analiza para determinar si convertible a un numero o no
37 37
38 38 Return:
39 39 True : si el string es uno numerico
40 40 False : no es un string numerico
41 41 """
42 42 try:
43 43 float(cad)
44 44 return True
45 45 except:
46 46 return False
47 47
48 48
49 49 def isFileInEpoch(filename, startUTSeconds, endUTSeconds):
50 50 """
51 51 Esta funcion determina si un archivo de datos se encuentra o no dentro del rango de fecha especificado.
52 52
53 53 Inputs:
54 54 filename : nombre completo del archivo de datos en formato Jicamarca (.r)
55 55
56 56 startUTSeconds : fecha inicial del rango seleccionado. La fecha esta dada en
57 57 segundos contados desde 01/01/1970.
58 58 endUTSeconds : fecha final del rango seleccionado. La fecha esta dada en
59 59 segundos contados desde 01/01/1970.
60 60
61 61 Return:
62 62 Boolean : Retorna True si el archivo de datos contiene datos en el rango de
63 63 fecha especificado, de lo contrario retorna False.
64 64
65 65 Excepciones:
66 66 Si el archivo no existe o no puede ser abierto
67 67 Si la cabecera no puede ser leida.
68 68
69 69 """
70 70 basicHeaderObj = BasicHeader(LOCALTIME)
71 71
72 72 try:
73 73 fp = open(filename, 'rb')
74 74 except IOError:
75 75 print "The file %s can't be opened" % (filename)
76 76 return 0
77 77
78 78 sts = basicHeaderObj.read(fp)
79 79 fp.close()
80 80
81 81 if not(sts):
82 82 print "Skipping the file %s because it has not a valid header" % (filename)
83 83 return 0
84 84
85 85 if not ((startUTSeconds <= basicHeaderObj.utc) and (endUTSeconds > basicHeaderObj.utc)):
86 86 return 0
87 87
88 88 return 1
89 89
90 90
91 91 def isTimeInRange(thisTime, startTime, endTime):
92 92 if endTime >= startTime:
93 93 if (thisTime < startTime) or (thisTime > endTime):
94 94 return 0
95 95 return 1
96 96 else:
97 97 if (thisTime < startTime) and (thisTime > endTime):
98 98 return 0
99 99 return 1
100 100
101 101
102 102 def isFileInTimeRange(filename, startDate, endDate, startTime, endTime):
103 103 """
104 104 Retorna 1 si el archivo de datos se encuentra dentro del rango de horas especificado.
105 105
106 106 Inputs:
107 107 filename : nombre completo del archivo de datos en formato Jicamarca (.r)
108 108
109 109 startDate : fecha inicial del rango seleccionado en formato datetime.date
110 110
111 111 endDate : fecha final del rango seleccionado en formato datetime.date
112 112
113 113 startTime : tiempo inicial del rango seleccionado en formato datetime.time
114 114
115 115 endTime : tiempo final del rango seleccionado en formato datetime.time
116 116
117 117 Return:
118 118 Boolean : Retorna True si el archivo de datos contiene datos en el rango de
119 119 fecha especificado, de lo contrario retorna False.
120 120
121 121 Excepciones:
122 122 Si el archivo no existe o no puede ser abierto
123 123 Si la cabecera no puede ser leida.
124 124
125 125 """
126 126
127 127 try:
128 128 fp = open(filename, 'rb')
129 129 except IOError:
130 130 print "The file %s can't be opened" % (filename)
131 131 return None
132 132
133 133 firstBasicHeaderObj = BasicHeader(LOCALTIME)
134 134 systemHeaderObj = SystemHeader()
135 135 radarControllerHeaderObj = RadarControllerHeader()
136 136 processingHeaderObj = ProcessingHeader()
137 137
138 138 lastBasicHeaderObj = BasicHeader(LOCALTIME)
139 139
140 140 sts = firstBasicHeaderObj.read(fp)
141 141
142 142 if not(sts):
143 143 print "[Reading] Skipping the file %s because it has not a valid header" % (filename)
144 144 return None
145 145
146 146 if not systemHeaderObj.read(fp):
147 147 return None
148 148
149 149 if not radarControllerHeaderObj.read(fp):
150 150 return None
151 151
152 152 if not processingHeaderObj.read(fp):
153 153 return None
154 154
155 155 filesize = os.path.getsize(filename)
156 156
157 157 offset = processingHeaderObj.blockSize + 24 # header size
158 158
159 159 if filesize <= offset:
160 160 print "[Reading] %s: This file has not enough data" % filename
161 161 return None
162 162
163 163 fp.seek(-offset, 2)
164 164
165 165 sts = lastBasicHeaderObj.read(fp)
166 166
167 167 fp.close()
168 168
169 169 thisDatetime = lastBasicHeaderObj.datatime
170 170 thisTime_last_block = thisDatetime.time()
171 171
172 172 thisDatetime = firstBasicHeaderObj.datatime
173 173 thisDate = thisDatetime.date()
174 174 thisTime_first_block = thisDatetime.time()
175 175
176 176 # General case
177 177 # o>>>>>>>>>>>>>><<<<<<<<<<<<<<o
178 178 #-----------o----------------------------o-----------
179 179 # startTime endTime
180 180
181 181 if endTime >= startTime:
182 182 if (thisTime_last_block < startTime) or (thisTime_first_block > endTime):
183 183 return None
184 184
185 185 return thisDatetime
186 186
187 187 # If endTime < startTime then endTime belongs to the next day
188 188
189 189 #<<<<<<<<<<<o o>>>>>>>>>>>
190 190 #-----------o----------------------------o-----------
191 191 # endTime startTime
192 192
193 193 if (thisDate == startDate) and (thisTime_last_block < startTime):
194 194 return None
195 195
196 196 if (thisDate == endDate) and (thisTime_first_block > endTime):
197 197 return None
198 198
199 199 if (thisTime_last_block < startTime) and (thisTime_first_block > endTime):
200 200 return None
201 201
202 202 return thisDatetime
203 203
204 204
205 205 def isFolderInDateRange(folder, startDate=None, endDate=None):
206 206 """
207 207 Retorna 1 si el archivo de datos se encuentra dentro del rango de horas especificado.
208 208
209 209 Inputs:
210 210 folder : nombre completo del directorio.
211 211 Su formato deberia ser "/path_root/?YYYYDDD"
212 212
213 213 siendo:
214 214 YYYY : Anio (ejemplo 2015)
215 215 DDD : Dia del anio (ejemplo 305)
216 216
217 217 startDate : fecha inicial del rango seleccionado en formato datetime.date
218 218
219 219 endDate : fecha final del rango seleccionado en formato datetime.date
220 220
221 221 Return:
222 222 Boolean : Retorna True si el archivo de datos contiene datos en el rango de
223 223 fecha especificado, de lo contrario retorna False.
224 224 Excepciones:
225 225 Si el directorio no tiene el formato adecuado
226 226 """
227 227
228 228 basename = os.path.basename(folder)
229 229
230 230 if not isRadarFolder(basename):
231 231 print "The folder %s has not the rigth format" % folder
232 232 return 0
233 233
234 234 if startDate and endDate:
235 235 thisDate = getDateFromRadarFolder(basename)
236 236
237 237 if thisDate < startDate:
238 238 return 0
239 239
240 240 if thisDate > endDate:
241 241 return 0
242 242
243 243 return 1
244 244
245 245
246 246 def isFileInDateRange(filename, startDate=None, endDate=None):
247 247 """
248 248 Retorna 1 si el archivo de datos se encuentra dentro del rango de horas especificado.
249 249
250 250 Inputs:
251 251 filename : nombre completo del archivo de datos en formato Jicamarca (.r)
252 252
253 253 Su formato deberia ser "?YYYYDDDsss"
254 254
255 255 siendo:
256 256 YYYY : Anio (ejemplo 2015)
257 257 DDD : Dia del anio (ejemplo 305)
258 258 sss : set
259 259
260 260 startDate : fecha inicial del rango seleccionado en formato datetime.date
261 261
262 262 endDate : fecha final del rango seleccionado en formato datetime.date
263 263
264 264 Return:
265 265 Boolean : Retorna True si el archivo de datos contiene datos en el rango de
266 266 fecha especificado, de lo contrario retorna False.
267 267 Excepciones:
268 268 Si el archivo no tiene el formato adecuado
269 269 """
270 270
271 271 basename = os.path.basename(filename)
272 272
273 273 if not isRadarFile(basename):
274 274 print "The filename %s has not the rigth format" % filename
275 275 return 0
276 276
277 277 if startDate and endDate:
278 278 thisDate = getDateFromRadarFile(basename)
279 279
280 280 if thisDate < startDate:
281 281 return 0
282 282
283 283 if thisDate > endDate:
284 284 return 0
285 285
286 286 return 1
287 287
288 288
289 289 def getFileFromSet(path, ext, set):
290 290 validFilelist = []
291 291 fileList = os.listdir(path)
292 292
293 293 # 0 1234 567 89A BCDE
294 294 # H YYYY DDD SSS .ext
295 295
296 296 for thisFile in fileList:
297 297 try:
298 298 year = int(thisFile[1:5])
299 299 doy = int(thisFile[5:8])
300 300 except:
301 301 continue
302 302
303 303 if (os.path.splitext(thisFile)[-1].lower() != ext.lower()):
304 304 continue
305 305
306 306 validFilelist.append(thisFile)
307 307
308 308 myfile = fnmatch.filter(
309 309 validFilelist, '*%4.4d%3.3d%3.3d*' % (year, doy, set))
310 310
311 311 if len(myfile) != 0:
312 312 return myfile[0]
313 313 else:
314 314 filename = '*%4.4d%3.3d%3.3d%s' % (year, doy, set, ext.lower())
315 315 print 'the filename %s does not exist' % filename
316 316 print '...going to the last file: '
317 317
318 318 if validFilelist:
319 319 validFilelist = sorted(validFilelist, key=str.lower)
320 320 return validFilelist[-1]
321 321
322 322 return None
323 323
324 324
325 325 def getlastFileFromPath(path, ext):
326 326 """
327 327 Depura el fileList dejando solo los que cumplan el formato de "PYYYYDDDSSS.ext"
328 328 al final de la depuracion devuelve el ultimo file de la lista que quedo.
329 329
330 330 Input:
331 331 fileList : lista conteniendo todos los files (sin path) que componen una determinada carpeta
332 332 ext : extension de los files contenidos en una carpeta
333 333
334 334 Return:
335 335 El ultimo file de una determinada carpeta, no se considera el path.
336 336 """
337 337 validFilelist = []
338 338 fileList = os.listdir(path)
339 339
340 340 # 0 1234 567 89A BCDE
341 341 # H YYYY DDD SSS .ext
342 342
343 343 for thisFile in fileList:
344 344
345 345 year = thisFile[1:5]
346 346 if not isNumber(year):
347 347 continue
348 348
349 349 doy = thisFile[5:8]
350 350 if not isNumber(doy):
351 351 continue
352 352
353 353 year = int(year)
354 354 doy = int(doy)
355 355
356 356 if (os.path.splitext(thisFile)[-1].lower() != ext.lower()):
357 357 continue
358 358
359 359 validFilelist.append(thisFile)
360 360
361 361 if validFilelist:
362 362 validFilelist = sorted(validFilelist, key=str.lower)
363 363 return validFilelist[-1]
364 364
365 365 return None
366 366
367 367
368 368 def checkForRealPath(path, foldercounter, year, doy, set, ext):
369 369 """
370 370 Por ser Linux Case Sensitive entonces checkForRealPath encuentra el nombre correcto de un path,
371 371 Prueba por varias combinaciones de nombres entre mayusculas y minusculas para determinar
372 372 el path exacto de un determinado file.
373 373
374 374 Example :
375 375 nombre correcto del file es .../.../D2009307/P2009307367.ext
376 376
377 377 Entonces la funcion prueba con las siguientes combinaciones
378 378 .../.../y2009307367.ext
379 379 .../.../Y2009307367.ext
380 380 .../.../x2009307/y2009307367.ext
381 381 .../.../x2009307/Y2009307367.ext
382 382 .../.../X2009307/y2009307367.ext
383 383 .../.../X2009307/Y2009307367.ext
384 384 siendo para este caso, la ultima combinacion de letras, identica al file buscado
385 385
386 386 Return:
387 387 Si encuentra la cobinacion adecuada devuelve el path completo y el nombre del file
388 388 caso contrario devuelve None como path y el la ultima combinacion de nombre en mayusculas
389 389 para el filename
390 390 """
391 391 fullfilename = None
392 392 find_flag = False
393 393 filename = None
394 394
395 395 prefixDirList = [None, 'd', 'D']
396 396 if ext.lower() == ".r": # voltage
397 397 prefixFileList = ['d', 'D']
398 398 elif ext.lower() == ".pdata": # spectra
399 399 prefixFileList = ['p', 'P']
400 400 else:
401 401 return None, filename
402 402
403 403 # barrido por las combinaciones posibles
404 404 for prefixDir in prefixDirList:
405 405 thispath = path
406 406 if prefixDir != None:
407 407 # formo el nombre del directorio xYYYYDDD (x=d o x=D)
408 408 if foldercounter == 0:
409 409 thispath = os.path.join(path, "%s%04d%03d" %
410 410 (prefixDir, year, doy))
411 411 else:
412 412 thispath = os.path.join(path, "%s%04d%03d_%02d" % (
413 413 prefixDir, year, doy, foldercounter))
414 414 for prefixFile in prefixFileList: # barrido por las dos combinaciones posibles de "D"
415 415 # formo el nombre del file xYYYYDDDSSS.ext
416 416 filename = "%s%04d%03d%03d%s" % (prefixFile, year, doy, set, ext)
417 417 fullfilename = os.path.join(
418 418 thispath, filename) # formo el path completo
419 419
420 420 if os.path.exists(fullfilename): # verifico que exista
421 421 find_flag = True
422 422 break
423 423 if find_flag:
424 424 break
425 425
426 426 if not(find_flag):
427 427 return None, filename
428 428
429 429 return fullfilename, filename
430 430
431 431
432 432 def isRadarFolder(folder):
433 433 try:
434 434 year = int(folder[1:5])
435 435 doy = int(folder[5:8])
436 436 except:
437 437 return 0
438 438
439 439 return 1
440 440
441 441
442 442 def isRadarFile(file):
443 443 try:
444 444 year = int(file[1:5])
445 445 doy = int(file[5:8])
446 446 set = int(file[8:11])
447 447 except:
448 448 return 0
449 449
450 450 return 1
451 451
452 452
453 453 def getDateFromRadarFile(file):
454 454 try:
455 455 year = int(file[1:5])
456 456 doy = int(file[5:8])
457 457 set = int(file[8:11])
458 458 except:
459 459 return None
460 460
461 461 thisDate = datetime.date(year, 1, 1) + datetime.timedelta(doy - 1)
462 462 return thisDate
463 463
464 464
465 465 def getDateFromRadarFolder(folder):
466 466 try:
467 467 year = int(folder[1:5])
468 468 doy = int(folder[5:8])
469 469 except:
470 470 return None
471 471
472 472 thisDate = datetime.date(year, 1, 1) + datetime.timedelta(doy - 1)
473 473 return thisDate
474 474
475 475
476 476 class JRODataIO:
477 477
478 478 c = 3E8
479 479
480 480 isConfig = False
481 481
482 482 basicHeaderObj = None
483 483
484 484 systemHeaderObj = None
485 485
486 486 radarControllerHeaderObj = None
487 487
488 488 processingHeaderObj = None
489 489
490 490 dtype = None
491 491
492 492 pathList = []
493 493
494 494 filenameList = []
495 495
496 496 filename = None
497 497
498 498 ext = None
499 499
500 500 flagIsNewFile = 1
501 501
502 502 flagDiscontinuousBlock = 0
503 503
504 504 flagIsNewBlock = 0
505 505
506 506 fp = None
507 507
508 508 firstHeaderSize = 0
509 509
510 510 basicHeaderSize = 24
511 511
512 512 versionFile = 1103
513 513
514 514 fileSize = None
515 515
516 516 # ippSeconds = None
517 517
518 518 fileSizeByHeader = None
519 519
520 520 fileIndex = None
521 521
522 522 profileIndex = None
523 523
524 524 blockIndex = None
525 525
526 526 nTotalBlocks = None
527 527
528 528 maxTimeStep = 30
529 529
530 530 lastUTTime = None
531 531
532 532 datablock = None
533 533
534 534 dataOut = None
535 535
536 536 blocksize = None
537 537
538 538 getByBlock = False
539 539
540 540 def __init__(self):
541 541
542 542 raise NotImplementedError
543 543
544 544 def run(self):
545 545
546 546 raise NotImplementedError
547 547
548 548 def getDtypeWidth(self):
549 549
550 550 dtype_index = get_dtype_index(self.dtype)
551 551 dtype_width = get_dtype_width(dtype_index)
552 552
553 553 return dtype_width
554 554
555 555 def getAllowedArgs(self):
556 return inspect.getargspec(self.run).args
556 if hasattr(self, '__attrs__'):
557 return self.__attrs__
558 else:
559 return inspect.getargspec(self.run).args
557 560
558 561
559 562 class JRODataReader(JRODataIO):
560 563
561 564 online = 0
562 565
563 566 realtime = 0
564 567
565 568 nReadBlocks = 0
566 569
567 570 delay = 10 # number of seconds waiting a new file
568 571
569 572 nTries = 3 # quantity tries
570 573
571 574 nFiles = 3 # number of files for searching
572 575
573 576 path = None
574 577
575 578 foldercounter = 0
576 579
577 580 flagNoMoreFiles = 0
578 581
579 582 datetimeList = []
580 583
581 584 __isFirstTimeOnline = 1
582 585
583 586 __printInfo = True
584 587
585 588 profileIndex = None
586 589
587 590 nTxs = 1
588 591
589 592 txIndex = None
590 593
591 594 # Added--------------------
592 595
593 596 selBlocksize = None
594 597
595 598 selBlocktime = None
596 599
597 600 def __init__(self):
598 601 """
599 602 This class is used to find data files
600 603
601 604 Example:
602 605 reader = JRODataReader()
603 606 fileList = reader.findDataFiles()
604 607
605 608 """
606 609 pass
607 610
608 611 def createObjByDefault(self):
609 612 """
610 613
611 614 """
612 615 raise NotImplementedError
613 616
614 617 def getBlockDimension(self):
615 618
616 619 raise NotImplementedError
617 620
618 621 def searchFilesOffLine(self,
619 622 path,
620 623 startDate=None,
621 624 endDate=None,
622 625 startTime=datetime.time(0, 0, 0),
623 626 endTime=datetime.time(23, 59, 59),
624 627 set=None,
625 628 expLabel='',
626 629 ext='.r',
627 630 cursor=None,
628 631 skip=None,
629 632 walk=True):
630 633
631 634 self.filenameList = []
632 635 self.datetimeList = []
633 636
634 637 pathList = []
635 638
636 639 dateList, pathList = self.findDatafiles(
637 640 path, startDate, endDate, expLabel, ext, walk, include_path=True)
638 641
639 642 if dateList == []:
640 643 return [], []
641 644
642 645 if len(dateList) > 1:
643 646 print "[Reading] Data found for date range [%s - %s]: total days = %d" % (startDate, endDate, len(dateList))
644 647 else:
645 648 print "[Reading] Data found for date range [%s - %s]: date = %s" % (startDate, endDate, dateList[0])
646 649
647 650 filenameList = []
648 651 datetimeList = []
649 652
650 653 for thisPath in pathList:
651 654
652 655 fileList = glob.glob1(thisPath, "*%s" % ext)
653 656 fileList.sort()
654 657
655 658 skippedFileList = []
656 659
657 660 if cursor is not None and skip is not None:
658 661
659 662 if skip == 0:
660 663 skippedFileList = []
661 664 else:
662 665 skippedFileList = fileList[cursor *
663 666 skip: cursor * skip + skip]
664 667
665 668 else:
666 669 skippedFileList = fileList
667 670
668 671 for file in skippedFileList:
669 672
670 673 filename = os.path.join(thisPath, file)
671 674
672 675 if not isFileInDateRange(filename, startDate, endDate):
673 676 continue
674 677
675 678 thisDatetime = isFileInTimeRange(
676 679 filename, startDate, endDate, startTime, endTime)
677 680
678 681 if not(thisDatetime):
679 682 continue
680 683
681 684 filenameList.append(filename)
682 685 datetimeList.append(thisDatetime)
683 686
684 687 if not(filenameList):
685 688 print "[Reading] Time range selected invalid [%s - %s]: No *%s files in %s)" % (startTime, endTime, ext, path)
686 689 return [], []
687 690
688 691 print "[Reading] %d file(s) was(were) found in time range: %s - %s" % (len(filenameList), startTime, endTime)
689 692 print
690 693
691 694 # for i in range(len(filenameList)):
692 695 # print "[Reading] %s -> [%s]" %(filenameList[i], datetimeList[i].ctime())
693 696
694 697 self.filenameList = filenameList
695 698 self.datetimeList = datetimeList
696 699
697 700 return pathList, filenameList
698 701
699 702 def __searchFilesOnLine(self, path, expLabel="", ext=None, walk=True, set=None):
700 703 """
701 704 Busca el ultimo archivo de la ultima carpeta (determinada o no por startDateTime) y
702 705 devuelve el archivo encontrado ademas de otros datos.
703 706
704 707 Input:
705 708 path : carpeta donde estan contenidos los files que contiene data
706 709
707 710 expLabel : Nombre del subexperimento (subfolder)
708 711
709 712 ext : extension de los files
710 713
711 714 walk : Si es habilitado no realiza busquedas dentro de los ubdirectorios (doypath)
712 715
713 716 Return:
714 717 directory : eL directorio donde esta el file encontrado
715 718 filename : el ultimo file de una determinada carpeta
716 719 year : el anho
717 720 doy : el numero de dia del anho
718 721 set : el set del archivo
719 722
720 723
721 724 """
722 725 if not os.path.isdir(path):
723 726 return None, None, None, None, None, None
724 727
725 728 dirList = []
726 729
727 730 if not walk:
728 731 fullpath = path
729 732 foldercounter = 0
730 733 else:
731 734 # Filtra solo los directorios
732 735 for thisPath in os.listdir(path):
733 736 if not os.path.isdir(os.path.join(path, thisPath)):
734 737 continue
735 738 if not isRadarFolder(thisPath):
736 739 continue
737 740
738 741 dirList.append(thisPath)
739 742
740 743 if not(dirList):
741 744 return None, None, None, None, None, None
742 745
743 746 dirList = sorted(dirList, key=str.lower)
744 747
745 748 doypath = dirList[-1]
746 749 foldercounter = int(doypath.split('_')[1]) if len(
747 750 doypath.split('_')) > 1 else 0
748 751 fullpath = os.path.join(path, doypath, expLabel)
749 752
750 753 print "[Reading] %s folder was found: " % (fullpath)
751 754
752 755 if set == None:
753 756 filename = getlastFileFromPath(fullpath, ext)
754 757 else:
755 758 filename = getFileFromSet(fullpath, ext, set)
756 759
757 760 if not(filename):
758 761 return None, None, None, None, None, None
759 762
760 763 print "[Reading] %s file was found" % (filename)
761 764
762 765 if not(self.__verifyFile(os.path.join(fullpath, filename))):
763 766 return None, None, None, None, None, None
764 767
765 768 year = int(filename[1:5])
766 769 doy = int(filename[5:8])
767 770 set = int(filename[8:11])
768 771
769 772 return fullpath, foldercounter, filename, year, doy, set
770 773
771 774 def __setNextFileOffline(self):
772 775
773 776 idFile = self.fileIndex
774 777
775 778 while (True):
776 779 idFile += 1
777 780 if not(idFile < len(self.filenameList)):
778 781 self.flagNoMoreFiles = 1
779 782 # print "[Reading] No more Files"
780 783 return 0
781 784
782 785 filename = self.filenameList[idFile]
783 786
784 787 if not(self.__verifyFile(filename)):
785 788 continue
786 789
787 790 fileSize = os.path.getsize(filename)
788 791 fp = open(filename, 'rb')
789 792 break
790 793
791 794 self.flagIsNewFile = 1
792 795 self.fileIndex = idFile
793 796 self.filename = filename
794 797 self.fileSize = fileSize
795 798 self.fp = fp
796 799
797 800 # print "[Reading] Setting the file: %s"%self.filename
798 801
799 802 return 1
800 803
801 804 def __setNextFileOnline(self):
802 805 """
803 806 Busca el siguiente file que tenga suficiente data para ser leida, dentro de un folder especifico, si
804 807 no encuentra un file valido espera un tiempo determinado y luego busca en los posibles n files
805 808 siguientes.
806 809
807 810 Affected:
808 811 self.flagIsNewFile
809 812 self.filename
810 813 self.fileSize
811 814 self.fp
812 815 self.set
813 816 self.flagNoMoreFiles
814 817
815 818 Return:
816 819 0 : si luego de una busqueda del siguiente file valido este no pudo ser encontrado
817 820 1 : si el file fue abierto con exito y esta listo a ser leido
818 821
819 822 Excepciones:
820 823 Si un determinado file no puede ser abierto
821 824 """
822 825 nFiles = 0
823 826 fileOk_flag = False
824 827 firstTime_flag = True
825 828
826 829 self.set += 1
827 830
828 831 if self.set > 999:
829 832 self.set = 0
830 833 self.foldercounter += 1
831 834
832 835 # busca el 1er file disponible
833 836 fullfilename, filename = checkForRealPath(
834 837 self.path, self.foldercounter, self.year, self.doy, self.set, self.ext)
835 838 if fullfilename:
836 839 if self.__verifyFile(fullfilename, False):
837 840 fileOk_flag = True
838 841
839 842 # si no encuentra un file entonces espera y vuelve a buscar
840 843 if not(fileOk_flag):
841 844 # busco en los siguientes self.nFiles+1 files posibles
842 845 for nFiles in range(self.nFiles + 1):
843 846
844 847 if firstTime_flag: # si es la 1era vez entonces hace el for self.nTries veces
845 848 tries = self.nTries
846 849 else:
847 850 tries = 1 # si no es la 1era vez entonces solo lo hace una vez
848 851
849 852 for nTries in range(tries):
850 853 if firstTime_flag:
851 854 print "\t[Reading] Waiting %0.2f sec for the next file: \"%s\" , try %03d ..." % (self.delay, filename, nTries + 1)
852 855 sleep(self.delay)
853 856 else:
854 857 print "\t[Reading] Searching the next \"%s%04d%03d%03d%s\" file ..." % (self.optchar, self.year, self.doy, self.set, self.ext)
855 858
856 859 fullfilename, filename = checkForRealPath(
857 860 self.path, self.foldercounter, self.year, self.doy, self.set, self.ext)
858 861 if fullfilename:
859 862 if self.__verifyFile(fullfilename):
860 863 fileOk_flag = True
861 864 break
862 865
863 866 if fileOk_flag:
864 867 break
865 868
866 869 firstTime_flag = False
867 870
868 871 print "\t[Reading] Skipping the file \"%s\" due to this file doesn't exist" % filename
869 872 self.set += 1
870 873
871 874 # si no encuentro el file buscado cambio de carpeta y busco en la siguiente carpeta
872 875 if nFiles == (self.nFiles - 1):
873 876 self.set = 0
874 877 self.doy += 1
875 878 self.foldercounter = 0
876 879
877 880 if fileOk_flag:
878 881 self.fileSize = os.path.getsize(fullfilename)
879 882 self.filename = fullfilename
880 883 self.flagIsNewFile = 1
881 884 if self.fp != None:
882 885 self.fp.close()
883 886 self.fp = open(fullfilename, 'rb')
884 887 self.flagNoMoreFiles = 0
885 888 # print '[Reading] Setting the file: %s' % fullfilename
886 889 else:
887 890 self.fileSize = 0
888 891 self.filename = None
889 892 self.flagIsNewFile = 0
890 893 self.fp = None
891 894 self.flagNoMoreFiles = 1
892 895 # print '[Reading] No more files to read'
893 896
894 897 return fileOk_flag
895 898
896 899 def setNextFile(self):
897 900 if self.fp != None:
898 901 self.fp.close()
899 902
900 903 if self.online:
901 904 newFile = self.__setNextFileOnline()
902 905 else:
903 906 newFile = self.__setNextFileOffline()
904 907
905 908 if not(newFile):
906 909 print '[Reading] No more files to read'
907 910 return 0
908 911
909 912 if self.verbose:
910 913 print '[Reading] Setting the file: %s' % self.filename
911 914
912 915 self.__readFirstHeader()
913 916 self.nReadBlocks = 0
914 917 return 1
915 918
916 919 def __waitNewBlock(self):
917 920 """
918 921 Return 1 si se encontro un nuevo bloque de datos, 0 de otra forma.
919 922
920 923 Si el modo de lectura es OffLine siempre retorn 0
921 924 """
922 925 if not self.online:
923 926 return 0
924 927
925 928 if (self.nReadBlocks >= self.processingHeaderObj.dataBlocksPerFile):
926 929 return 0
927 930
928 931 currentPointer = self.fp.tell()
929 932
930 933 neededSize = self.processingHeaderObj.blockSize + self.basicHeaderSize
931 934
932 935 for nTries in range(self.nTries):
933 936
934 937 self.fp.close()
935 938 self.fp = open(self.filename, 'rb')
936 939 self.fp.seek(currentPointer)
937 940
938 941 self.fileSize = os.path.getsize(self.filename)
939 942 currentSize = self.fileSize - currentPointer
940 943
941 944 if (currentSize >= neededSize):
942 945 self.basicHeaderObj.read(self.fp)
943 946 return 1
944 947
945 948 if self.fileSize == self.fileSizeByHeader:
946 949 # self.flagEoF = True
947 950 return 0
948 951
949 952 print "[Reading] Waiting %0.2f seconds for the next block, try %03d ..." % (self.delay, nTries + 1)
950 953 sleep(self.delay)
951 954
952 955 return 0
953 956
954 957 def waitDataBlock(self, pointer_location):
955 958
956 959 currentPointer = pointer_location
957 960
958 961 neededSize = self.processingHeaderObj.blockSize # + self.basicHeaderSize
959 962
960 963 for nTries in range(self.nTries):
961 964 self.fp.close()
962 965 self.fp = open(self.filename, 'rb')
963 966 self.fp.seek(currentPointer)
964 967
965 968 self.fileSize = os.path.getsize(self.filename)
966 969 currentSize = self.fileSize - currentPointer
967 970
968 971 if (currentSize >= neededSize):
969 972 return 1
970 973
971 974 print "[Reading] Waiting %0.2f seconds for the next block, try %03d ..." % (self.delay, nTries + 1)
972 975 sleep(self.delay)
973 976
974 977 return 0
975 978
976 979 def __jumpToLastBlock(self):
977 980
978 981 if not(self.__isFirstTimeOnline):
979 982 return
980 983
981 984 csize = self.fileSize - self.fp.tell()
982 985 blocksize = self.processingHeaderObj.blockSize
983 986
984 987 # salta el primer bloque de datos
985 988 if csize > self.processingHeaderObj.blockSize:
986 989 self.fp.seek(self.fp.tell() + blocksize)
987 990 else:
988 991 return
989 992
990 993 csize = self.fileSize - self.fp.tell()
991 994 neededsize = self.processingHeaderObj.blockSize + self.basicHeaderSize
992 995 while True:
993 996
994 997 if self.fp.tell() < self.fileSize:
995 998 self.fp.seek(self.fp.tell() + neededsize)
996 999 else:
997 1000 self.fp.seek(self.fp.tell() - neededsize)
998 1001 break
999 1002
1000 1003 # csize = self.fileSize - self.fp.tell()
1001 1004 # neededsize = self.processingHeaderObj.blockSize + self.basicHeaderSize
1002 1005 # factor = int(csize/neededsize)
1003 1006 # if factor > 0:
1004 1007 # self.fp.seek(self.fp.tell() + factor*neededsize)
1005 1008
1006 1009 self.flagIsNewFile = 0
1007 1010 self.__isFirstTimeOnline = 0
1008 1011
1009 1012 def __setNewBlock(self):
1010 1013 # if self.server is None:
1011 1014 if self.fp == None:
1012 1015 return 0
1013 1016
1014 1017 # if self.online:
1015 1018 # self.__jumpToLastBlock()
1016 1019
1017 1020 if self.flagIsNewFile:
1018 1021 self.lastUTTime = self.basicHeaderObj.utc
1019 1022 return 1
1020 1023
1021 1024 if self.realtime:
1022 1025 self.flagDiscontinuousBlock = 1
1023 1026 if not(self.setNextFile()):
1024 1027 return 0
1025 1028 else:
1026 1029 return 1
1027 1030 # if self.server is None:
1028 1031 currentSize = self.fileSize - self.fp.tell()
1029 1032 neededSize = self.processingHeaderObj.blockSize + self.basicHeaderSize
1030 1033 if (currentSize >= neededSize):
1031 1034 self.basicHeaderObj.read(self.fp)
1032 1035 self.lastUTTime = self.basicHeaderObj.utc
1033 1036 return 1
1034 1037 # else:
1035 1038 # self.basicHeaderObj.read(self.zHeader)
1036 1039 # self.lastUTTime = self.basicHeaderObj.utc
1037 1040 # return 1
1038 1041 if self.__waitNewBlock():
1039 1042 self.lastUTTime = self.basicHeaderObj.utc
1040 1043 return 1
1041 1044 # if self.server is None:
1042 1045 if not(self.setNextFile()):
1043 1046 return 0
1044 1047
1045 1048 deltaTime = self.basicHeaderObj.utc - self.lastUTTime
1046 1049 self.lastUTTime = self.basicHeaderObj.utc
1047 1050
1048 1051 self.flagDiscontinuousBlock = 0
1049 1052
1050 1053 if deltaTime > self.maxTimeStep:
1051 1054 self.flagDiscontinuousBlock = 1
1052 1055
1053 1056 return 1
1054 1057
1055 1058 def readNextBlock(self):
1056 1059
1057 1060 # Skip block out of startTime and endTime
1058 1061 while True:
1059 1062 if not(self.__setNewBlock()):
1060 1063 return 0
1061 1064
1062 1065 if not(self.readBlock()):
1063 1066 return 0
1064 1067
1065 1068 self.getBasicHeader()
1066 1069 if (self.dataOut.datatime < datetime.datetime.combine(self.startDate, self.startTime)) or (self.dataOut.datatime > datetime.datetime.combine(self.endDate, self.endTime)):
1067 1070 print "[Reading] Block No. %d/%d -> %s [Skipping]" % (self.nReadBlocks,
1068 1071 self.processingHeaderObj.dataBlocksPerFile,
1069 1072 self.dataOut.datatime.ctime())
1070 1073 continue
1071 1074
1072 1075 break
1073 1076
1074 1077 if self.verbose:
1075 1078 print "[Reading] Block No. %d/%d -> %s" % (self.nReadBlocks,
1076 1079 self.processingHeaderObj.dataBlocksPerFile,
1077 1080 self.dataOut.datatime.ctime())
1078 1081 return 1
1079 1082
1080 1083 def __readFirstHeader(self):
1081 1084
1082 1085 self.basicHeaderObj.read(self.fp)
1083 1086 self.systemHeaderObj.read(self.fp)
1084 1087 self.radarControllerHeaderObj.read(self.fp)
1085 1088 self.processingHeaderObj.read(self.fp)
1086 1089
1087 1090 self.firstHeaderSize = self.basicHeaderObj.size
1088 1091
1089 1092 datatype = int(numpy.log2((self.processingHeaderObj.processFlags &
1090 1093 PROCFLAG.DATATYPE_MASK)) - numpy.log2(PROCFLAG.DATATYPE_CHAR))
1091 1094 if datatype == 0:
1092 1095 datatype_str = numpy.dtype([('real', '<i1'), ('imag', '<i1')])
1093 1096 elif datatype == 1:
1094 1097 datatype_str = numpy.dtype([('real', '<i2'), ('imag', '<i2')])
1095 1098 elif datatype == 2:
1096 1099 datatype_str = numpy.dtype([('real', '<i4'), ('imag', '<i4')])
1097 1100 elif datatype == 3:
1098 1101 datatype_str = numpy.dtype([('real', '<i8'), ('imag', '<i8')])
1099 1102 elif datatype == 4:
1100 1103 datatype_str = numpy.dtype([('real', '<f4'), ('imag', '<f4')])
1101 1104 elif datatype == 5:
1102 1105 datatype_str = numpy.dtype([('real', '<f8'), ('imag', '<f8')])
1103 1106 else:
1104 1107 raise ValueError, 'Data type was not defined'
1105 1108
1106 1109 self.dtype = datatype_str
1107 1110 #self.ippSeconds = 2 * 1000 * self.radarControllerHeaderObj.ipp / self.c
1108 1111 self.fileSizeByHeader = self.processingHeaderObj.dataBlocksPerFile * self.processingHeaderObj.blockSize + \
1109 1112 self.firstHeaderSize + self.basicHeaderSize * \
1110 1113 (self.processingHeaderObj.dataBlocksPerFile - 1)
1111 1114 # self.dataOut.channelList = numpy.arange(self.systemHeaderObj.numChannels)
1112 1115 # self.dataOut.channelIndexList = numpy.arange(self.systemHeaderObj.numChannels)
1113 1116 self.getBlockDimension()
1114 1117
1115 1118 def __verifyFile(self, filename, msgFlag=True):
1116 1119
1117 1120 msg = None
1118 1121
1119 1122 try:
1120 1123 fp = open(filename, 'rb')
1121 1124 except IOError:
1122 1125
1123 1126 if msgFlag:
1124 1127 print "[Reading] File %s can't be opened" % (filename)
1125 1128
1126 1129 return False
1127 1130
1128 1131 currentPosition = fp.tell()
1129 1132 neededSize = self.processingHeaderObj.blockSize + self.firstHeaderSize
1130 1133
1131 1134 if neededSize == 0:
1132 1135 basicHeaderObj = BasicHeader(LOCALTIME)
1133 1136 systemHeaderObj = SystemHeader()
1134 1137 radarControllerHeaderObj = RadarControllerHeader()
1135 1138 processingHeaderObj = ProcessingHeader()
1136 1139
1137 1140 if not(basicHeaderObj.read(fp)):
1138 1141 fp.close()
1139 1142 return False
1140 1143
1141 1144 if not(systemHeaderObj.read(fp)):
1142 1145 fp.close()
1143 1146 return False
1144 1147
1145 1148 if not(radarControllerHeaderObj.read(fp)):
1146 1149 fp.close()
1147 1150 return False
1148 1151
1149 1152 if not(processingHeaderObj.read(fp)):
1150 1153 fp.close()
1151 1154 return False
1152 1155
1153 1156 neededSize = processingHeaderObj.blockSize + basicHeaderObj.size
1154 1157 else:
1155 1158 msg = "[Reading] Skipping the file %s due to it hasn't enough data" % filename
1156 1159
1157 1160 fp.close()
1158 1161
1159 1162 fileSize = os.path.getsize(filename)
1160 1163 currentSize = fileSize - currentPosition
1161 1164
1162 1165 if currentSize < neededSize:
1163 1166 if msgFlag and (msg != None):
1164 1167 print msg
1165 1168 return False
1166 1169
1167 1170 return True
1168 1171
1169 1172 def findDatafiles(self, path, startDate=None, endDate=None, expLabel='', ext='.r', walk=True, include_path=False):
1170 1173
1171 1174 path_empty = True
1172 1175
1173 1176 dateList = []
1174 1177 pathList = []
1175 1178
1176 1179 multi_path = path.split(',')
1177 1180
1178 1181 if not walk:
1179 1182
1180 1183 for single_path in multi_path:
1181 1184
1182 1185 if not os.path.isdir(single_path):
1183 1186 continue
1184 1187
1185 1188 fileList = glob.glob1(single_path, "*" + ext)
1186 1189
1187 1190 if not fileList:
1188 1191 continue
1189 1192
1190 1193 path_empty = False
1191 1194
1192 1195 fileList.sort()
1193 1196
1194 1197 for thisFile in fileList:
1195 1198
1196 1199 if not os.path.isfile(os.path.join(single_path, thisFile)):
1197 1200 continue
1198 1201
1199 1202 if not isRadarFile(thisFile):
1200 1203 continue
1201 1204
1202 1205 if not isFileInDateRange(thisFile, startDate, endDate):
1203 1206 continue
1204 1207
1205 1208 thisDate = getDateFromRadarFile(thisFile)
1206 1209
1207 1210 if thisDate in dateList:
1208 1211 continue
1209 1212
1210 1213 dateList.append(thisDate)
1211 1214 pathList.append(single_path)
1212 1215
1213 1216 else:
1214 1217 for single_path in multi_path:
1215 1218
1216 1219 if not os.path.isdir(single_path):
1217 1220 continue
1218 1221
1219 1222 dirList = []
1220 1223
1221 1224 for thisPath in os.listdir(single_path):
1222 1225
1223 1226 if not os.path.isdir(os.path.join(single_path, thisPath)):
1224 1227 continue
1225 1228
1226 1229 if not isRadarFolder(thisPath):
1227 1230 continue
1228 1231
1229 1232 if not isFolderInDateRange(thisPath, startDate, endDate):
1230 1233 continue
1231 1234
1232 1235 dirList.append(thisPath)
1233 1236
1234 1237 if not dirList:
1235 1238 continue
1236 1239
1237 1240 dirList.sort()
1238 1241
1239 1242 for thisDir in dirList:
1240 1243
1241 1244 datapath = os.path.join(single_path, thisDir, expLabel)
1242 1245 fileList = glob.glob1(datapath, "*" + ext)
1243 1246
1244 1247 if not fileList:
1245 1248 continue
1246 1249
1247 1250 path_empty = False
1248 1251
1249 1252 thisDate = getDateFromRadarFolder(thisDir)
1250 1253
1251 1254 pathList.append(datapath)
1252 1255 dateList.append(thisDate)
1253 1256
1254 1257 dateList.sort()
1255 1258
1256 1259 if walk:
1257 1260 pattern_path = os.path.join(multi_path[0], "[dYYYYDDD]", expLabel)
1258 1261 else:
1259 1262 pattern_path = multi_path[0]
1260 1263
1261 1264 if path_empty:
1262 1265 print "[Reading] No *%s files in %s for %s to %s" % (ext, pattern_path, startDate, endDate)
1263 1266 else:
1264 1267 if not dateList:
1265 1268 print "[Reading] Date range selected invalid [%s - %s]: No *%s files in %s)" % (startDate, endDate, ext, path)
1266 1269
1267 1270 if include_path:
1268 1271 return dateList, pathList
1269 1272
1270 1273 return dateList
1271 1274
1272 1275 def setup(self,
1273 1276 path=None,
1274 1277 startDate=None,
1275 1278 endDate=None,
1276 1279 startTime=datetime.time(0, 0, 0),
1277 1280 endTime=datetime.time(23, 59, 59),
1278 1281 set=None,
1279 1282 expLabel="",
1280 1283 ext=None,
1281 1284 online=False,
1282 1285 delay=60,
1283 1286 walk=True,
1284 1287 getblock=False,
1285 1288 nTxs=1,
1286 1289 realtime=False,
1287 1290 blocksize=None,
1288 1291 blocktime=None,
1289 1292 skip=None,
1290 1293 cursor=None,
1291 1294 warnings=True,
1292 1295 verbose=True,
1293 1296 server=None,
1294 1297 format=None,
1295 1298 oneDDict=None,
1296 1299 twoDDict=None,
1297 1300 ind2DList=None):
1298 1301 if server is not None:
1299 1302 if 'tcp://' in server:
1300 1303 address = server
1301 1304 else:
1302 1305 address = 'ipc:///tmp/%s' % server
1303 1306 self.server = address
1304 1307 self.context = zmq.Context()
1305 1308 self.receiver = self.context.socket(zmq.PULL)
1306 1309 self.receiver.connect(self.server)
1307 1310 time.sleep(0.5)
1308 1311 print '[Starting] ReceiverData from {}'.format(self.server)
1309 1312 else:
1310 1313 self.server = None
1311 1314 if path == None:
1312 1315 raise ValueError, "[Reading] The path is not valid"
1313 1316
1314 1317 if ext == None:
1315 1318 ext = self.ext
1316 1319
1317 1320 if online:
1318 1321 print "[Reading] Searching files in online mode..."
1319 1322
1320 1323 for nTries in range(self.nTries):
1321 1324 fullpath, foldercounter, file, year, doy, set = self.__searchFilesOnLine(
1322 1325 path=path, expLabel=expLabel, ext=ext, walk=walk, set=set)
1323 1326
1324 1327 if fullpath:
1325 1328 break
1326 1329
1327 1330 print '[Reading] Waiting %0.2f sec for an valid file in %s: try %02d ...' % (self.delay, path, nTries + 1)
1328 1331 sleep(self.delay)
1329 1332
1330 1333 if not(fullpath):
1331 1334 print "[Reading] There 'isn't any valid file in %s" % path
1332 1335 return
1333 1336
1334 1337 self.year = year
1335 1338 self.doy = doy
1336 1339 self.set = set - 1
1337 1340 self.path = path
1338 1341 self.foldercounter = foldercounter
1339 1342 last_set = None
1340 1343 else:
1341 1344 print "[Reading] Searching files in offline mode ..."
1342 1345 pathList, filenameList = self.searchFilesOffLine(path, startDate=startDate, endDate=endDate,
1343 1346 startTime=startTime, endTime=endTime,
1344 1347 set=set, expLabel=expLabel, ext=ext,
1345 1348 walk=walk, cursor=cursor,
1346 1349 skip=skip)
1347 1350
1348 1351 if not(pathList):
1349 1352 self.fileIndex = -1
1350 1353 self.pathList = []
1351 1354 self.filenameList = []
1352 1355 return
1353 1356
1354 1357 self.fileIndex = -1
1355 1358 self.pathList = pathList
1356 1359 self.filenameList = filenameList
1357 1360 file_name = os.path.basename(filenameList[-1])
1358 1361 basename, ext = os.path.splitext(file_name)
1359 1362 last_set = int(basename[-3:])
1360 1363
1361 1364 self.online = online
1362 1365 self.realtime = realtime
1363 1366 self.delay = delay
1364 1367 ext = ext.lower()
1365 1368 self.ext = ext
1366 1369 self.getByBlock = getblock
1367 1370 self.nTxs = nTxs
1368 1371 self.startTime = startTime
1369 1372 self.endTime = endTime
1370 1373 self.endDate = endDate
1371 1374 self.startDate = startDate
1372 1375 # Added-----------------
1373 1376 self.selBlocksize = blocksize
1374 1377 self.selBlocktime = blocktime
1375 1378
1376 1379 # Verbose-----------
1377 1380 self.verbose = verbose
1378 1381 self.warnings = warnings
1379 1382
1380 1383 if not(self.setNextFile()):
1381 1384 if (startDate != None) and (endDate != None):
1382 1385 print "[Reading] No files in range: %s - %s" % (datetime.datetime.combine(startDate, startTime).ctime(), datetime.datetime.combine(endDate, endTime).ctime())
1383 1386 elif startDate != None:
1384 1387 print "[Reading] No files in range: %s" % (datetime.datetime.combine(startDate, startTime).ctime())
1385 1388 else:
1386 1389 print "[Reading] No files"
1387 1390
1388 1391 self.fileIndex = -1
1389 1392 self.pathList = []
1390 1393 self.filenameList = []
1391 1394 return
1392 1395
1393 1396 # self.getBasicHeader()
1394 1397
1395 1398 if last_set != None:
1396 1399 self.dataOut.last_block = last_set * \
1397 1400 self.processingHeaderObj.dataBlocksPerFile + self.basicHeaderObj.dataBlock
1398 1401 return
1399 1402
1400 1403 def getBasicHeader(self):
1401 1404
1402 1405 self.dataOut.utctime = self.basicHeaderObj.utc + self.basicHeaderObj.miliSecond / \
1403 1406 1000. + self.profileIndex * self.radarControllerHeaderObj.ippSeconds
1404 1407
1405 1408 self.dataOut.flagDiscontinuousBlock = self.flagDiscontinuousBlock
1406 1409
1407 1410 self.dataOut.timeZone = self.basicHeaderObj.timeZone
1408 1411
1409 1412 self.dataOut.dstFlag = self.basicHeaderObj.dstFlag
1410 1413
1411 1414 self.dataOut.errorCount = self.basicHeaderObj.errorCount
1412 1415
1413 1416 self.dataOut.useLocalTime = self.basicHeaderObj.useLocalTime
1414 1417
1415 1418 self.dataOut.ippSeconds = self.radarControllerHeaderObj.ippSeconds / self.nTxs
1416 1419
1417 1420 # self.dataOut.nProfiles = self.processingHeaderObj.profilesPerBlock*self.nTxs
1418 1421
1419 1422 def getFirstHeader(self):
1420 1423
1421 1424 raise NotImplementedError
1422 1425
1423 1426 def getData(self):
1424 1427
1425 1428 raise NotImplementedError
1426 1429
1427 1430 def hasNotDataInBuffer(self):
1428 1431
1429 1432 raise NotImplementedError
1430 1433
1431 1434 def readBlock(self):
1432 1435
1433 1436 raise NotImplementedError
1434 1437
1435 1438 def isEndProcess(self):
1436 1439
1437 1440 return self.flagNoMoreFiles
1438 1441
1439 1442 def printReadBlocks(self):
1440 1443
1441 1444 print "[Reading] Number of read blocks per file %04d" % self.nReadBlocks
1442 1445
1443 1446 def printTotalBlocks(self):
1444 1447
1445 1448 print "[Reading] Number of read blocks %04d" % self.nTotalBlocks
1446 1449
1447 1450 def printNumberOfBlock(self):
1448 1451 'SPAM!'
1449 1452
1450 1453 # if self.flagIsNewBlock:
1451 1454 # print "[Reading] Block No. %d/%d -> %s" %(self.nReadBlocks,
1452 1455 # self.processingHeaderObj.dataBlocksPerFile,
1453 1456 # self.dataOut.datatime.ctime())
1454 1457
1455 1458 def printInfo(self):
1456 1459
1457 1460 if self.__printInfo == False:
1458 1461 return
1459 1462
1460 1463 self.basicHeaderObj.printInfo()
1461 1464 self.systemHeaderObj.printInfo()
1462 1465 self.radarControllerHeaderObj.printInfo()
1463 1466 self.processingHeaderObj.printInfo()
1464 1467
1465 1468 self.__printInfo = False
1466 1469
1467 1470 def run(self,
1468 1471 path=None,
1469 1472 startDate=None,
1470 1473 endDate=None,
1471 1474 startTime=datetime.time(0, 0, 0),
1472 1475 endTime=datetime.time(23, 59, 59),
1473 1476 set=None,
1474 1477 expLabel="",
1475 1478 ext=None,
1476 1479 online=False,
1477 1480 delay=60,
1478 1481 walk=True,
1479 1482 getblock=False,
1480 1483 nTxs=1,
1481 1484 realtime=False,
1482 1485 blocksize=None,
1483 1486 blocktime=None,
1484 1487 skip=None,
1485 1488 cursor=None,
1486 1489 warnings=True,
1487 1490 server=None,
1488 1491 verbose=True,
1489 1492 format=None,
1490 1493 oneDDict=None,
1491 1494 twoDDict=None,
1492 1495 ind2DList=None, **kwargs):
1493 1496
1494 1497 if not(self.isConfig):
1495 1498 self.setup(path=path,
1496 1499 startDate=startDate,
1497 1500 endDate=endDate,
1498 1501 startTime=startTime,
1499 1502 endTime=endTime,
1500 1503 set=set,
1501 1504 expLabel=expLabel,
1502 1505 ext=ext,
1503 1506 online=online,
1504 1507 delay=delay,
1505 1508 walk=walk,
1506 1509 getblock=getblock,
1507 1510 nTxs=nTxs,
1508 1511 realtime=realtime,
1509 1512 blocksize=blocksize,
1510 1513 blocktime=blocktime,
1511 1514 skip=skip,
1512 1515 cursor=cursor,
1513 1516 warnings=warnings,
1514 1517 server=server,
1515 1518 verbose=verbose,
1516 1519 format=format,
1517 1520 oneDDict=oneDDict,
1518 1521 twoDDict=twoDDict,
1519 1522 ind2DList=ind2DList)
1520 1523 self.isConfig = True
1521 1524 if server is None:
1522 1525 self.getData()
1523 1526 else:
1524 1527 self.getFromServer()
1525 1528
1526 1529
1527 1530 class JRODataWriter(JRODataIO):
1528 1531
1529 1532 """
1530 1533 Esta clase permite escribir datos a archivos procesados (.r o ,pdata). La escritura
1531 1534 de los datos siempre se realiza por bloques.
1532 1535 """
1533 1536
1534 1537 blockIndex = 0
1535 1538
1536 1539 path = None
1537 1540
1538 1541 setFile = None
1539 1542
1540 1543 profilesPerBlock = None
1541 1544
1542 1545 blocksPerFile = None
1543 1546
1544 1547 nWriteBlocks = 0
1545 1548
1546 1549 fileDate = None
1547 1550
1548 1551 def __init__(self, dataOut=None):
1549 1552 raise NotImplementedError
1550 1553
1551 1554 def hasAllDataInBuffer(self):
1552 1555 raise NotImplementedError
1553 1556
1554 1557 def setBlockDimension(self):
1555 1558 raise NotImplementedError
1556 1559
1557 1560 def writeBlock(self):
1558 1561 raise NotImplementedError
1559 1562
1560 1563 def putData(self):
1561 1564 raise NotImplementedError
1562 1565
1563 1566 def getProcessFlags(self):
1564 1567
1565 1568 processFlags = 0
1566 1569
1567 1570 dtype_index = get_dtype_index(self.dtype)
1568 1571 procflag_dtype = get_procflag_dtype(dtype_index)
1569 1572
1570 1573 processFlags += procflag_dtype
1571 1574
1572 1575 if self.dataOut.flagDecodeData:
1573 1576 processFlags += PROCFLAG.DECODE_DATA
1574 1577
1575 1578 if self.dataOut.flagDeflipData:
1576 1579 processFlags += PROCFLAG.DEFLIP_DATA
1577 1580
1578 1581 if self.dataOut.code is not None:
1579 1582 processFlags += PROCFLAG.DEFINE_PROCESS_CODE
1580 1583
1581 1584 if self.dataOut.nCohInt > 1:
1582 1585 processFlags += PROCFLAG.COHERENT_INTEGRATION
1583 1586
1584 1587 if self.dataOut.type == "Spectra":
1585 1588 if self.dataOut.nIncohInt > 1:
1586 1589 processFlags += PROCFLAG.INCOHERENT_INTEGRATION
1587 1590
1588 1591 if self.dataOut.data_dc is not None:
1589 1592 processFlags += PROCFLAG.SAVE_CHANNELS_DC
1590 1593
1591 1594 if self.dataOut.flagShiftFFT:
1592 1595 processFlags += PROCFLAG.SHIFT_FFT_DATA
1593 1596
1594 1597 return processFlags
1595 1598
1596 1599 def setBasicHeader(self):
1597 1600
1598 1601 self.basicHeaderObj.size = self.basicHeaderSize # bytes
1599 1602 self.basicHeaderObj.version = self.versionFile
1600 1603 self.basicHeaderObj.dataBlock = self.nTotalBlocks
1601 1604
1602 1605 utc = numpy.floor(self.dataOut.utctime)
1603 1606 milisecond = (self.dataOut.utctime - utc) * 1000.0
1604 1607
1605 1608 self.basicHeaderObj.utc = utc
1606 1609 self.basicHeaderObj.miliSecond = milisecond
1607 1610 self.basicHeaderObj.timeZone = self.dataOut.timeZone
1608 1611 self.basicHeaderObj.dstFlag = self.dataOut.dstFlag
1609 1612 self.basicHeaderObj.errorCount = self.dataOut.errorCount
1610 1613
1611 1614 def setFirstHeader(self):
1612 1615 """
1613 1616 Obtiene una copia del First Header
1614 1617
1615 1618 Affected:
1616 1619
1617 1620 self.basicHeaderObj
1618 1621 self.systemHeaderObj
1619 1622 self.radarControllerHeaderObj
1620 1623 self.processingHeaderObj self.
1621 1624
1622 1625 Return:
1623 1626 None
1624 1627 """
1625 1628
1626 1629 raise NotImplementedError
1627 1630
1628 1631 def __writeFirstHeader(self):
1629 1632 """
1630 1633 Escribe el primer header del file es decir el Basic header y el Long header (SystemHeader, RadarControllerHeader, ProcessingHeader)
1631 1634
1632 1635 Affected:
1633 1636 __dataType
1634 1637
1635 1638 Return:
1636 1639 None
1637 1640 """
1638 1641
1639 1642 # CALCULAR PARAMETROS
1640 1643
1641 1644 sizeLongHeader = self.systemHeaderObj.size + \
1642 1645 self.radarControllerHeaderObj.size + self.processingHeaderObj.size
1643 1646 self.basicHeaderObj.size = self.basicHeaderSize + sizeLongHeader
1644 1647
1645 1648 self.basicHeaderObj.write(self.fp)
1646 1649 self.systemHeaderObj.write(self.fp)
1647 1650 self.radarControllerHeaderObj.write(self.fp)
1648 1651 self.processingHeaderObj.write(self.fp)
1649 1652
1650 1653 def __setNewBlock(self):
1651 1654 """
1652 1655 Si es un nuevo file escribe el First Header caso contrario escribe solo el Basic Header
1653 1656
1654 1657 Return:
1655 1658 0 : si no pudo escribir nada
1656 1659 1 : Si escribio el Basic el First Header
1657 1660 """
1658 1661 if self.fp == None:
1659 1662 self.setNextFile()
1660 1663
1661 1664 if self.flagIsNewFile:
1662 1665 return 1
1663 1666
1664 1667 if self.blockIndex < self.processingHeaderObj.dataBlocksPerFile:
1665 1668 self.basicHeaderObj.write(self.fp)
1666 1669 return 1
1667 1670
1668 1671 if not(self.setNextFile()):
1669 1672 return 0
1670 1673
1671 1674 return 1
1672 1675
1673 1676 def writeNextBlock(self):
1674 1677 """
1675 1678 Selecciona el bloque siguiente de datos y los escribe en un file
1676 1679
1677 1680 Return:
1678 1681 0 : Si no hizo pudo escribir el bloque de datos
1679 1682 1 : Si no pudo escribir el bloque de datos
1680 1683 """
1681 1684 if not(self.__setNewBlock()):
1682 1685 return 0
1683 1686
1684 1687 self.writeBlock()
1685 1688
1686 1689 print "[Writing] Block No. %d/%d" % (self.blockIndex,
1687 1690 self.processingHeaderObj.dataBlocksPerFile)
1688 1691
1689 1692 return 1
1690 1693
1691 1694 def setNextFile(self):
1692 1695 """
1693 1696 Determina el siguiente file que sera escrito
1694 1697
1695 1698 Affected:
1696 1699 self.filename
1697 1700 self.subfolder
1698 1701 self.fp
1699 1702 self.setFile
1700 1703 self.flagIsNewFile
1701 1704
1702 1705 Return:
1703 1706 0 : Si el archivo no puede ser escrito
1704 1707 1 : Si el archivo esta listo para ser escrito
1705 1708 """
1706 1709 ext = self.ext
1707 1710 path = self.path
1708 1711
1709 1712 if self.fp != None:
1710 1713 self.fp.close()
1711 1714
1712 1715 timeTuple = time.localtime(self.dataOut.utctime)
1713 1716 subfolder = 'd%4.4d%3.3d' % (timeTuple.tm_year, timeTuple.tm_yday)
1714 1717
1715 1718 fullpath = os.path.join(path, subfolder)
1716 1719 setFile = self.setFile
1717 1720
1718 1721 if not(os.path.exists(fullpath)):
1719 1722 os.mkdir(fullpath)
1720 1723 setFile = -1 # inicializo mi contador de seteo
1721 1724 else:
1722 1725 filesList = os.listdir(fullpath)
1723 1726 if len(filesList) > 0:
1724 1727 filesList = sorted(filesList, key=str.lower)
1725 1728 filen = filesList[-1]
1726 1729 # el filename debera tener el siguiente formato
1727 1730 # 0 1234 567 89A BCDE (hex)
1728 1731 # x YYYY DDD SSS .ext
1729 1732 if isNumber(filen[8:11]):
1730 1733 # inicializo mi contador de seteo al seteo del ultimo file
1731 1734 setFile = int(filen[8:11])
1732 1735 else:
1733 1736 setFile = -1
1734 1737 else:
1735 1738 setFile = -1 # inicializo mi contador de seteo
1736 1739
1737 1740 setFile += 1
1738 1741
1739 1742 # If this is a new day it resets some values
1740 1743 if self.dataOut.datatime.date() > self.fileDate:
1741 1744 setFile = 0
1742 1745 self.nTotalBlocks = 0
1743 1746
1744 1747 filen = '%s%4.4d%3.3d%3.3d%s' % (
1745 1748 self.optchar, timeTuple.tm_year, timeTuple.tm_yday, setFile, ext)
1746 1749
1747 1750 filename = os.path.join(path, subfolder, filen)
1748 1751
1749 1752 fp = open(filename, 'wb')
1750 1753
1751 1754 self.blockIndex = 0
1752 1755
1753 1756 # guardando atributos
1754 1757 self.filename = filename
1755 1758 self.subfolder = subfolder
1756 1759 self.fp = fp
1757 1760 self.setFile = setFile
1758 1761 self.flagIsNewFile = 1
1759 1762 self.fileDate = self.dataOut.datatime.date()
1760 1763
1761 1764 self.setFirstHeader()
1762 1765
1763 1766 print '[Writing] Opening file: %s' % self.filename
1764 1767
1765 1768 self.__writeFirstHeader()
1766 1769
1767 1770 return 1
1768 1771
1769 1772 def setup(self, dataOut, path, blocksPerFile, profilesPerBlock=64, set=None, ext=None, datatype=4):
1770 1773 """
1771 1774 Setea el tipo de formato en la cual sera guardada la data y escribe el First Header
1772 1775
1773 1776 Inputs:
1774 1777 path : directory where data will be saved
1775 1778 profilesPerBlock : number of profiles per block
1776 1779 set : initial file set
1777 1780 datatype : An integer number that defines data type:
1778 1781 0 : int8 (1 byte)
1779 1782 1 : int16 (2 bytes)
1780 1783 2 : int32 (4 bytes)
1781 1784 3 : int64 (8 bytes)
1782 1785 4 : float32 (4 bytes)
1783 1786 5 : double64 (8 bytes)
1784 1787
1785 1788 Return:
1786 1789 0 : Si no realizo un buen seteo
1787 1790 1 : Si realizo un buen seteo
1788 1791 """
1789 1792
1790 1793 if ext == None:
1791 1794 ext = self.ext
1792 1795
1793 1796 self.ext = ext.lower()
1794 1797
1795 1798 self.path = path
1796 1799
1797 1800 if set is None:
1798 1801 self.setFile = -1
1799 1802 else:
1800 1803 self.setFile = set - 1
1801 1804
1802 1805 self.blocksPerFile = blocksPerFile
1803 1806
1804 1807 self.profilesPerBlock = profilesPerBlock
1805 1808
1806 1809 self.dataOut = dataOut
1807 1810 self.fileDate = self.dataOut.datatime.date()
1808 1811 # By default
1809 1812 self.dtype = self.dataOut.dtype
1810 1813
1811 1814 if datatype is not None:
1812 1815 self.dtype = get_numpy_dtype(datatype)
1813 1816
1814 1817 if not(self.setNextFile()):
1815 1818 print "[Writing] There isn't a next file"
1816 1819 return 0
1817 1820
1818 1821 self.setBlockDimension()
1819 1822
1820 1823 return 1
1821 1824
1822 1825 def run(self, dataOut, path, blocksPerFile, profilesPerBlock=64, set=None, ext=None, datatype=4, **kwargs):
1823 1826
1824 1827 if not(self.isConfig):
1825 1828
1826 1829 self.setup(dataOut, path, blocksPerFile, profilesPerBlock=profilesPerBlock,
1827 1830 set=set, ext=ext, datatype=datatype, **kwargs)
1828 1831 self.isConfig = True
1829 1832
1830 1833 self.putData()
@@ -1,1182 +1,1180
1 1 import os
2 2 import sys
3 3 import glob
4 4 import fnmatch
5 5 import datetime
6 6 import time
7 7 import re
8 8 import h5py
9 9 import numpy
10 10
11 11 import pylab as plb
12 12 from scipy.optimize import curve_fit
13 13 from scipy import asarray as ar, exp
14 14 from scipy import stats
15 15
16 16 from numpy.ma.core import getdata
17 17
18 18 SPEED_OF_LIGHT = 299792458
19 19 SPEED_OF_LIGHT = 3e8
20 20
21 21 try:
22 22 from gevent import sleep
23 23 except:
24 24 from time import sleep
25 25
26 26 from schainpy.model.data.jrodata import Spectra
27 27 #from schainpy.model.data.BLTRheaderIO import FileHeader, RecordHeader
28 28 from schainpy.model.proc.jroproc_base import ProcessingUnit, Operation
29 29 #from schainpy.model.io.jroIO_bltr import BLTRReader
30 30 from numpy import imag, shape, NaN
31 31
32 32 from jroIO_base import JRODataReader
33 33
34 34
35 35 class Header(object):
36 36
37 37 def __init__(self):
38 38 raise NotImplementedError
39 39
40 40 def read(self):
41 41
42 42 raise NotImplementedError
43 43
44 44 def write(self):
45 45
46 46 raise NotImplementedError
47 47
48 48 def printInfo(self):
49 49
50 50 message = "#" * 50 + "\n"
51 51 message += self.__class__.__name__.upper() + "\n"
52 52 message += "#" * 50 + "\n"
53 53
54 54 keyList = self.__dict__.keys()
55 55 keyList.sort()
56 56
57 57 for key in keyList:
58 58 message += "%s = %s" % (key, self.__dict__[key]) + "\n"
59 59
60 60 if "size" not in keyList:
61 61 attr = getattr(self, "size")
62 62
63 63 if attr:
64 64 message += "%s = %s" % ("size", attr) + "\n"
65 65
66 66 # print message
67 67
68 68
69 69 FILE_STRUCTURE = numpy.dtype([ # HEADER 48bytes
70 70 ('FileMgcNumber', '<u4'), # 0x23020100
71 71 # No Of FDT data records in this file (0 or more)
72 72 ('nFDTdataRecors', '<u4'),
73 73 ('OffsetStartHeader', '<u4'),
74 74 ('RadarUnitId', '<u4'),
75 75 ('SiteName', numpy.str_, 32), # Null terminated
76 76 ])
77 77
78 78
79 79 class FileHeaderBLTR(Header):
80 80
81 81 def __init__(self):
82 82
83 83 self.FileMgcNumber = 0 # 0x23020100
84 84 # No Of FDT data records in this file (0 or more)
85 85 self.nFDTdataRecors = 0
86 86 self.RadarUnitId = 0
87 87 self.OffsetStartHeader = 0
88 88 self.SiteName = ""
89 89 self.size = 48
90 90
91 91 def FHread(self, fp):
92 92 # try:
93 93 startFp = open(fp, "rb")
94 94
95 95 header = numpy.fromfile(startFp, FILE_STRUCTURE, 1)
96 96
97 97 print ' '
98 98 print 'puntero file header', startFp.tell()
99 99 print ' '
100 100
101 101 ''' numpy.fromfile(file, dtype, count, sep='')
102 102 file : file or str
103 103 Open file object or filename.
104 104
105 105 dtype : data-type
106 106 Data type of the returned array. For binary files, it is used to determine
107 107 the size and byte-order of the items in the file.
108 108
109 109 count : int
110 110 Number of items to read. -1 means all items (i.e., the complete file).
111 111
112 112 sep : str
113 113 Separator between items if file is a text file. Empty ("") separator means
114 114 the file should be treated as binary. Spaces (" ") in the separator match zero
115 115 or more whitespace characters. A separator consisting only of spaces must match
116 116 at least one whitespace.
117 117
118 118 '''
119 119
120 120 self.FileMgcNumber = hex(header['FileMgcNumber'][0])
121 121 # No Of FDT data records in this file (0 or more)
122 122 self.nFDTdataRecors = int(header['nFDTdataRecors'][0])
123 123 self.RadarUnitId = int(header['RadarUnitId'][0])
124 124 self.OffsetStartHeader = int(header['OffsetStartHeader'][0])
125 125 self.SiteName = str(header['SiteName'][0])
126 126
127 127 # print 'Numero de bloques', self.nFDTdataRecors
128 128
129 129 if self.size < 48:
130 130 return 0
131 131
132 132 return 1
133 133
134 134 def write(self, fp):
135 135
136 136 headerTuple = (self.FileMgcNumber,
137 137 self.nFDTdataRecors,
138 138 self.RadarUnitId,
139 139 self.SiteName,
140 140 self.size)
141 141
142 142 header = numpy.array(headerTuple, FILE_STRUCTURE)
143 143 # numpy.array(object, dtype=None, copy=True, order=None, subok=False, ndmin=0)
144 144 header.tofile(fp)
145 145 ''' ndarray.tofile(fid, sep, format) Write array to a file as text or binary (default).
146 146
147 147 fid : file or str
148 148 An open file object, or a string containing a filename.
149 149
150 150 sep : str
151 151 Separator between array items for text output. If "" (empty), a binary file is written,
152 152 equivalent to file.write(a.tobytes()).
153 153
154 154 format : str
155 155 Format string for text file output. Each entry in the array is formatted to text by
156 156 first converting it to the closest Python type, and then using "format" % item.
157 157
158 158 '''
159 159
160 160 return 1
161 161
162 162
163 163 RECORD_STRUCTURE = numpy.dtype([ # RECORD HEADER 180+20N bytes
164 164 ('RecMgcNumber', '<u4'), # 0x23030001
165 165 ('RecCounter', '<u4'), # Record counter(0,1, ...)
166 166 # Offset to start of next record form start of this record
167 167 ('Off2StartNxtRec', '<u4'),
168 168 # Offset to start of data from start of this record
169 169 ('Off2StartData', '<u4'),
170 170 # Epoch time stamp of start of acquisition (seconds)
171 171 ('nUtime', '<i4'),
172 172 # Millisecond component of time stamp (0,...,999)
173 173 ('nMilisec', '<u4'),
174 174 # Experiment tag name (null terminated)
175 175 ('ExpTagName', numpy.str_, 32),
176 176 # Experiment comment (null terminated)
177 177 ('ExpComment', numpy.str_, 32),
178 178 # Site latitude (from GPS) in degrees (positive implies North)
179 179 ('SiteLatDegrees', '<f4'),
180 180 # Site longitude (from GPS) in degrees (positive implies East)
181 181 ('SiteLongDegrees', '<f4'),
182 182 # RTC GPS engine status (0=SEEK, 1=LOCK, 2=NOT FITTED, 3=UNAVAILABLE)
183 183 ('RTCgpsStatus', '<u4'),
184 184 ('TransmitFrec', '<u4'), # Transmit frequency (Hz)
185 185 ('ReceiveFrec', '<u4'), # Receive frequency
186 186 # First local oscillator frequency (Hz)
187 187 ('FirstOsciFrec', '<u4'),
188 188 # (0="O", 1="E", 2="linear 1", 3="linear2")
189 189 ('Polarisation', '<u4'),
190 190 # Receiver filter settings (0,1,2,3)
191 191 ('ReceiverFiltSett', '<u4'),
192 192 # Number of modes in use (1 or 2)
193 193 ('nModesInUse', '<u4'),
194 194 # Dual Mode index number for these data (0 or 1)
195 195 ('DualModeIndex', '<u4'),
196 196 # Dual Mode range correction for these data (m)
197 197 ('DualModeRange', '<u4'),
198 198 # Number of digital channels acquired (2*N)
199 199 ('nDigChannels', '<u4'),
200 200 # Sampling resolution (meters)
201 201 ('SampResolution', '<u4'),
202 202 # Number of range gates sampled
203 203 ('nHeights', '<u4'),
204 204 # Start range of sampling (meters)
205 205 ('StartRangeSamp', '<u4'),
206 206 ('PRFhz', '<u4'), # PRF (Hz)
207 207 ('nCohInt', '<u4'), # Integrations
208 208 # Number of data points transformed
209 209 ('nProfiles', '<u4'),
210 210 # Number of receive beams stored in file (1 or N)
211 211 ('nChannels', '<u4'),
212 212 ('nIncohInt', '<u4'), # Number of spectral averages
213 213 # FFT windowing index (0 = no window)
214 214 ('FFTwindowingInd', '<u4'),
215 215 # Beam steer angle (azimuth) in degrees (clockwise from true North)
216 216 ('BeamAngleAzim', '<f4'),
217 217 # Beam steer angle (zenith) in degrees (0=> vertical)
218 218 ('BeamAngleZen', '<f4'),
219 219 # Antenna coordinates (Range(meters), Bearing(degrees)) - N pairs
220 220 ('AntennaCoord0', '<f4'),
221 221 # Antenna coordinates (Range(meters), Bearing(degrees)) - N pairs
222 222 ('AntennaAngl0', '<f4'),
223 223 # Antenna coordinates (Range(meters), Bearing(degrees)) - N pairs
224 224 ('AntennaCoord1', '<f4'),
225 225 # Antenna coordinates (Range(meters), Bearing(degrees)) - N pairs
226 226 ('AntennaAngl1', '<f4'),
227 227 # Antenna coordinates (Range(meters), Bearing(degrees)) - N pairs
228 228 ('AntennaCoord2', '<f4'),
229 229 # Antenna coordinates (Range(meters), Bearing(degrees)) - N pairs
230 230 ('AntennaAngl2', '<f4'),
231 231 # Receiver phase calibration (degrees) - N values
232 232 ('RecPhaseCalibr0', '<f4'),
233 233 # Receiver phase calibration (degrees) - N values
234 234 ('RecPhaseCalibr1', '<f4'),
235 235 # Receiver phase calibration (degrees) - N values
236 236 ('RecPhaseCalibr2', '<f4'),
237 237 # Receiver amplitude calibration (ratio relative to receiver one) - N values
238 238 ('RecAmpCalibr0', '<f4'),
239 239 # Receiver amplitude calibration (ratio relative to receiver one) - N values
240 240 ('RecAmpCalibr1', '<f4'),
241 241 # Receiver amplitude calibration (ratio relative to receiver one) - N values
242 242 ('RecAmpCalibr2', '<f4'),
243 243 # Receiver gains in dB - N values
244 244 ('ReceiverGaindB0', '<i4'),
245 245 # Receiver gains in dB - N values
246 246 ('ReceiverGaindB1', '<i4'),
247 247 # Receiver gains in dB - N values
248 248 ('ReceiverGaindB2', '<i4'),
249 249 ])
250 250
251 251
252 252 class RecordHeaderBLTR(Header):
253 253
254 254 def __init__(self, RecMgcNumber=None, RecCounter=0, Off2StartNxtRec=811248,
255 255 nUtime=0, nMilisec=0, ExpTagName=None,
256 256 ExpComment=None, SiteLatDegrees=0, SiteLongDegrees=0,
257 257 RTCgpsStatus=0, TransmitFrec=0, ReceiveFrec=0,
258 258 FirstOsciFrec=0, Polarisation=0, ReceiverFiltSett=0,
259 259 nModesInUse=0, DualModeIndex=0, DualModeRange=0,
260 260 nDigChannels=0, SampResolution=0, nHeights=0,
261 261 StartRangeSamp=0, PRFhz=0, nCohInt=0,
262 262 nProfiles=0, nChannels=0, nIncohInt=0,
263 263 FFTwindowingInd=0, BeamAngleAzim=0, BeamAngleZen=0,
264 264 AntennaCoord0=0, AntennaCoord1=0, AntennaCoord2=0,
265 265 RecPhaseCalibr0=0, RecPhaseCalibr1=0, RecPhaseCalibr2=0,
266 266 RecAmpCalibr0=0, RecAmpCalibr1=0, RecAmpCalibr2=0,
267 267 AntennaAngl0=0, AntennaAngl1=0, AntennaAngl2=0,
268 268 ReceiverGaindB0=0, ReceiverGaindB1=0, ReceiverGaindB2=0, Off2StartData=0, OffsetStartHeader=0):
269 269
270 270 self.RecMgcNumber = RecMgcNumber # 0x23030001
271 271 self.RecCounter = RecCounter
272 272 self.Off2StartNxtRec = Off2StartNxtRec
273 273 self.Off2StartData = Off2StartData
274 274 self.nUtime = nUtime
275 275 self.nMilisec = nMilisec
276 276 self.ExpTagName = ExpTagName
277 277 self.ExpComment = ExpComment
278 278 self.SiteLatDegrees = SiteLatDegrees
279 279 self.SiteLongDegrees = SiteLongDegrees
280 280 self.RTCgpsStatus = RTCgpsStatus
281 281 self.TransmitFrec = TransmitFrec
282 282 self.ReceiveFrec = ReceiveFrec
283 283 self.FirstOsciFrec = FirstOsciFrec
284 284 self.Polarisation = Polarisation
285 285 self.ReceiverFiltSett = ReceiverFiltSett
286 286 self.nModesInUse = nModesInUse
287 287 self.DualModeIndex = DualModeIndex
288 288 self.DualModeRange = DualModeRange
289 289 self.nDigChannels = nDigChannels
290 290 self.SampResolution = SampResolution
291 291 self.nHeights = nHeights
292 292 self.StartRangeSamp = StartRangeSamp
293 293 self.PRFhz = PRFhz
294 294 self.nCohInt = nCohInt
295 295 self.nProfiles = nProfiles
296 296 self.nChannels = nChannels
297 297 self.nIncohInt = nIncohInt
298 298 self.FFTwindowingInd = FFTwindowingInd
299 299 self.BeamAngleAzim = BeamAngleAzim
300 300 self.BeamAngleZen = BeamAngleZen
301 301 self.AntennaCoord0 = AntennaCoord0
302 302 self.AntennaAngl0 = AntennaAngl0
303 303 self.AntennaAngl1 = AntennaAngl1
304 304 self.AntennaAngl2 = AntennaAngl2
305 305 self.AntennaCoord1 = AntennaCoord1
306 306 self.AntennaCoord2 = AntennaCoord2
307 307 self.RecPhaseCalibr0 = RecPhaseCalibr0
308 308 self.RecPhaseCalibr1 = RecPhaseCalibr1
309 309 self.RecPhaseCalibr2 = RecPhaseCalibr2
310 310 self.RecAmpCalibr0 = RecAmpCalibr0
311 311 self.RecAmpCalibr1 = RecAmpCalibr1
312 312 self.RecAmpCalibr2 = RecAmpCalibr2
313 313 self.ReceiverGaindB0 = ReceiverGaindB0
314 314 self.ReceiverGaindB1 = ReceiverGaindB1
315 315 self.ReceiverGaindB2 = ReceiverGaindB2
316 316 self.OffsetStartHeader = 48
317 317
318 318 def RHread(self, fp):
319 319 # print fp
320 320 # startFp = open('/home/erick/Documents/Data/huancayo.20161019.22.fdt',"rb") #The method tell() returns the current position of the file read/write pointer within the file.
321 321 # The method tell() returns the current position of the file read/write pointer within the file.
322 322 startFp = open(fp, "rb")
323 323 # RecCounter=0
324 324 # Off2StartNxtRec=811248
325 325 OffRHeader = self.OffsetStartHeader + self.RecCounter * self.Off2StartNxtRec
326 326 print ' '
327 327 print 'puntero Record Header', startFp.tell()
328 328 print ' '
329 329
330 330 startFp.seek(OffRHeader, os.SEEK_SET)
331 331
332 332 print ' '
333 333 print 'puntero Record Header con seek', startFp.tell()
334 334 print ' '
335 335
336 336 # print 'Posicion del bloque: ',OffRHeader
337 337
338 338 header = numpy.fromfile(startFp, RECORD_STRUCTURE, 1)
339 339
340 340 print ' '
341 341 print 'puntero Record Header con seek', startFp.tell()
342 342 print ' '
343 343
344 344 print ' '
345 345 #
346 346 # print 'puntero Record Header despues de seek', header.tell()
347 347 print ' '
348 348
349 349 self.RecMgcNumber = hex(header['RecMgcNumber'][0]) # 0x23030001
350 350 self.RecCounter = int(header['RecCounter'][0])
351 351 self.Off2StartNxtRec = int(header['Off2StartNxtRec'][0])
352 352 self.Off2StartData = int(header['Off2StartData'][0])
353 353 self.nUtime = header['nUtime'][0]
354 354 self.nMilisec = header['nMilisec'][0]
355 355 self.ExpTagName = str(header['ExpTagName'][0])
356 356 self.ExpComment = str(header['ExpComment'][0])
357 357 self.SiteLatDegrees = header['SiteLatDegrees'][0]
358 358 self.SiteLongDegrees = header['SiteLongDegrees'][0]
359 359 self.RTCgpsStatus = header['RTCgpsStatus'][0]
360 360 self.TransmitFrec = header['TransmitFrec'][0]
361 361 self.ReceiveFrec = header['ReceiveFrec'][0]
362 362 self.FirstOsciFrec = header['FirstOsciFrec'][0]
363 363 self.Polarisation = header['Polarisation'][0]
364 364 self.ReceiverFiltSett = header['ReceiverFiltSett'][0]
365 365 self.nModesInUse = header['nModesInUse'][0]
366 366 self.DualModeIndex = header['DualModeIndex'][0]
367 367 self.DualModeRange = header['DualModeRange'][0]
368 368 self.nDigChannels = header['nDigChannels'][0]
369 369 self.SampResolution = header['SampResolution'][0]
370 370 self.nHeights = header['nHeights'][0]
371 371 self.StartRangeSamp = header['StartRangeSamp'][0]
372 372 self.PRFhz = header['PRFhz'][0]
373 373 self.nCohInt = header['nCohInt'][0]
374 374 self.nProfiles = header['nProfiles'][0]
375 375 self.nChannels = header['nChannels'][0]
376 376 self.nIncohInt = header['nIncohInt'][0]
377 377 self.FFTwindowingInd = header['FFTwindowingInd'][0]
378 378 self.BeamAngleAzim = header['BeamAngleAzim'][0]
379 379 self.BeamAngleZen = header['BeamAngleZen'][0]
380 380 self.AntennaCoord0 = header['AntennaCoord0'][0]
381 381 self.AntennaAngl0 = header['AntennaAngl0'][0]
382 382 self.AntennaCoord1 = header['AntennaCoord1'][0]
383 383 self.AntennaAngl1 = header['AntennaAngl1'][0]
384 384 self.AntennaCoord2 = header['AntennaCoord2'][0]
385 385 self.AntennaAngl2 = header['AntennaAngl2'][0]
386 386 self.RecPhaseCalibr0 = header['RecPhaseCalibr0'][0]
387 387 self.RecPhaseCalibr1 = header['RecPhaseCalibr1'][0]
388 388 self.RecPhaseCalibr2 = header['RecPhaseCalibr2'][0]
389 389 self.RecAmpCalibr0 = header['RecAmpCalibr0'][0]
390 390 self.RecAmpCalibr1 = header['RecAmpCalibr1'][0]
391 391 self.RecAmpCalibr2 = header['RecAmpCalibr2'][0]
392 392 self.ReceiverGaindB0 = header['ReceiverGaindB0'][0]
393 393 self.ReceiverGaindB1 = header['ReceiverGaindB1'][0]
394 394 self.ReceiverGaindB2 = header['ReceiverGaindB2'][0]
395 395
396 396 self.ipp = 0.5 * (SPEED_OF_LIGHT / self.PRFhz)
397 397
398 398 self.RHsize = 180 + 20 * self.nChannels
399 399 self.Datasize = self.nProfiles * self.nChannels * self.nHeights * 2 * 4
400 400 # print 'Datasize',self.Datasize
401 401 endFp = self.OffsetStartHeader + self.RecCounter * self.Off2StartNxtRec
402 402
403 403 print '=============================================='
404 404 print 'RecMgcNumber ', self.RecMgcNumber
405 405 print 'RecCounter ', self.RecCounter
406 406 print 'Off2StartNxtRec ', self.Off2StartNxtRec
407 407 print 'Off2StartData ', self.Off2StartData
408 408 print 'Range Resolution ', self.SampResolution
409 409 print 'First Height ', self.StartRangeSamp
410 410 print 'PRF (Hz) ', self.PRFhz
411 411 print 'Heights (K) ', self.nHeights
412 412 print 'Channels (N) ', self.nChannels
413 413 print 'Profiles (J) ', self.nProfiles
414 414 print 'iCoh ', self.nCohInt
415 415 print 'iInCoh ', self.nIncohInt
416 416 print 'BeamAngleAzim ', self.BeamAngleAzim
417 417 print 'BeamAngleZen ', self.BeamAngleZen
418 418
419 419 # print 'ModoEnUso ',self.DualModeIndex
420 420 # print 'UtcTime ',self.nUtime
421 421 # print 'MiliSec ',self.nMilisec
422 422 # print 'Exp TagName ',self.ExpTagName
423 423 # print 'Exp Comment ',self.ExpComment
424 424 # print 'FFT Window Index ',self.FFTwindowingInd
425 425 # print 'N Dig. Channels ',self.nDigChannels
426 426 print 'Size de bloque ', self.RHsize
427 427 print 'DataSize ', self.Datasize
428 428 print 'BeamAngleAzim ', self.BeamAngleAzim
429 429 # print 'AntennaCoord0 ',self.AntennaCoord0
430 430 # print 'AntennaAngl0 ',self.AntennaAngl0
431 431 # print 'AntennaCoord1 ',self.AntennaCoord1
432 432 # print 'AntennaAngl1 ',self.AntennaAngl1
433 433 # print 'AntennaCoord2 ',self.AntennaCoord2
434 434 # print 'AntennaAngl2 ',self.AntennaAngl2
435 435 print 'RecPhaseCalibr0 ', self.RecPhaseCalibr0
436 436 print 'RecPhaseCalibr1 ', self.RecPhaseCalibr1
437 437 print 'RecPhaseCalibr2 ', self.RecPhaseCalibr2
438 438 print 'RecAmpCalibr0 ', self.RecAmpCalibr0
439 439 print 'RecAmpCalibr1 ', self.RecAmpCalibr1
440 440 print 'RecAmpCalibr2 ', self.RecAmpCalibr2
441 441 print 'ReceiverGaindB0 ', self.ReceiverGaindB0
442 442 print 'ReceiverGaindB1 ', self.ReceiverGaindB1
443 443 print 'ReceiverGaindB2 ', self.ReceiverGaindB2
444 444 print '=============================================='
445 445
446 446 if OffRHeader > endFp:
447 447 sys.stderr.write(
448 448 "Warning %s: Size value read from System Header is lower than it has to be\n" % fp)
449 449 return 0
450 450
451 451 if OffRHeader < endFp:
452 452 sys.stderr.write(
453 453 "Warning %s: Size value read from System Header size is greater than it has to be\n" % fp)
454 454 return 0
455 455
456 456 return 1
457 457
458 458
459 459 class BLTRSpectraReader (ProcessingUnit, FileHeaderBLTR, RecordHeaderBLTR, JRODataReader):
460 460
461 461 path = None
462 462 startDate = None
463 463 endDate = None
464 464 startTime = None
465 465 endTime = None
466 466 walk = None
467 467 isConfig = False
468 468
469 469 fileList = None
470 470
471 471 # metadata
472 472 TimeZone = None
473 473 Interval = None
474 474 heightList = None
475 475
476 476 # data
477 477 data = None
478 478 utctime = None
479 479
480 480 def __init__(self, **kwargs):
481 481
482 482 # Eliminar de la base la herencia
483 483 ProcessingUnit.__init__(self, **kwargs)
484 484
485 485 #self.isConfig = False
486 486
487 487 #self.pts2read_SelfSpectra = 0
488 488 #self.pts2read_CrossSpectra = 0
489 489 #self.pts2read_DCchannels = 0
490 490 #self.datablock = None
491 491 self.utc = None
492 492 self.ext = ".fdt"
493 493 self.optchar = "P"
494 494 self.fpFile = None
495 495 self.fp = None
496 496 self.BlockCounter = 0
497 497 self.dtype = None
498 498 self.fileSizeByHeader = None
499 499 self.filenameList = []
500 500 self.fileSelector = 0
501 501 self.Off2StartNxtRec = 0
502 502 self.RecCounter = 0
503 503 self.flagNoMoreFiles = 0
504 504 self.data_spc = None
505 505 self.data_cspc = None
506 506 self.data_output = None
507 507 self.path = None
508 508 self.OffsetStartHeader = 0
509 509 self.Off2StartData = 0
510 510 self.ipp = 0
511 511 self.nFDTdataRecors = 0
512 512 self.blocksize = 0
513 513 self.dataOut = Spectra()
514 514 self.profileIndex = 1 # Always
515 515 self.dataOut.flagNoData = False
516 self.dataOut.nRdPairs = 0
517 self.dataOut.pairsList = []
518 self.dataOut.data_spc = None
519 self.dataOut.noise = []
516 self.dataOut.nRdPairs = 0
517 self.dataOut.data_spc = None
520 518 self.dataOut.velocityX = []
521 519 self.dataOut.velocityY = []
522 520 self.dataOut.velocityV = []
523 521
524 522 def Files2Read(self, fp):
525 523 '''
526 524 Function that indicates the number of .fdt files that exist in the folder to be read.
527 525 It also creates an organized list with the names of the files to read.
528 526 '''
529 527 # self.__checkPath()
530 528
531 529 # Gets the list of files within the fp address
532 530 ListaData = os.listdir(fp)
533 531 # Sort the list of files from least to largest by names
534 532 ListaData = sorted(ListaData)
535 533 nFiles = 0 # File Counter
536 534 FileList = [] # A list is created that will contain the .fdt files
537 535 for IndexFile in ListaData:
538 536 if '.fdt' in IndexFile:
539 537 FileList.append(IndexFile)
540 538 nFiles += 1
541 539
542 540 # print 'Files2Read'
543 541 # print 'Existen '+str(nFiles)+' archivos .fdt'
544 542
545 543 self.filenameList = FileList # List of files from least to largest by names
546 544
547 545 def run(self, **kwargs):
548 546 '''
549 547 This method will be the one that will initiate the data entry, will be called constantly.
550 548 You should first verify that your Setup () is set up and then continue to acquire
551 549 the data to be processed with getData ().
552 550 '''
553 551 if not self.isConfig:
554 552 self.setup(**kwargs)
555 553 self.isConfig = True
556 554
557 555 self.getData()
558 556 # print 'running'
559 557
560 558 def setup(self, path=None,
561 559 startDate=None,
562 560 endDate=None,
563 561 startTime=None,
564 562 endTime=None,
565 563 walk=True,
566 564 timezone='utc',
567 565 code=None,
568 566 online=False,
569 567 ReadMode=None,
570 568 **kwargs):
571 569
572 570 self.isConfig = True
573 571
574 572 self.path = path
575 573 self.startDate = startDate
576 574 self.endDate = endDate
577 575 self.startTime = startTime
578 576 self.endTime = endTime
579 577 self.walk = walk
580 578 self.ReadMode = int(ReadMode)
581 579
582 580 pass
583 581
584 582 def getData(self):
585 583 '''
586 584 Before starting this function, you should check that there is still an unread file,
587 585 If there are still blocks to read or if the data block is empty.
588 586
589 587 You should call the file "read".
590 588
591 589 '''
592 590
593 591 if self.flagNoMoreFiles:
594 592 self.dataOut.flagNoData = True
595 593 print 'NoData se vuelve true'
596 594 return 0
597 595
598 596 self.fp = self.path
599 597 self.Files2Read(self.fp)
600 598 self.readFile(self.fp)
601 599 self.dataOut.data_spc = self.data_spc
602 600 self.dataOut.data_cspc = self.data_cspc
603 601 self.dataOut.data_output = self.data_output
604 602
605 603 print 'self.dataOut.data_output', shape(self.dataOut.data_output)
606 604
607 605 # self.removeDC()
608 606 return self.dataOut.data_spc
609 607
610 608 def readFile(self, fp):
611 609 '''
612 610 You must indicate if you are reading in Online or Offline mode and load the
613 611 The parameters for this file reading mode.
614 612
615 613 Then you must do 2 actions:
616 614
617 615 1. Get the BLTR FileHeader.
618 616 2. Start reading the first block.
619 617 '''
620 618
621 619 # The address of the folder is generated the name of the .fdt file that will be read
622 620 print "File: ", self.fileSelector + 1
623 621
624 622 if self.fileSelector < len(self.filenameList):
625 623
626 624 self.fpFile = str(fp) + '/' + \
627 625 str(self.filenameList[self.fileSelector])
628 626 # print self.fpFile
629 627 fheader = FileHeaderBLTR()
630 628 fheader.FHread(self.fpFile) # Bltr FileHeader Reading
631 629 self.nFDTdataRecors = fheader.nFDTdataRecors
632 630
633 631 self.readBlock() # Block reading
634 632 else:
635 633 print 'readFile FlagNoData becomes true'
636 634 self.flagNoMoreFiles = True
637 635 self.dataOut.flagNoData = True
638 636 return 0
639 637
640 638 def getVelRange(self, extrapoints=0):
641 639 Lambda = SPEED_OF_LIGHT / 50000000
642 640 # 1./(self.dataOut.ippSeconds * self.dataOut.nCohInt)
643 641 PRF = self.dataOut.PRF
644 642 Vmax = -Lambda / (4. * (1. / PRF) * self.dataOut.nCohInt * 2.)
645 643 deltafreq = PRF / (self.nProfiles)
646 644 deltavel = (Vmax * 2) / (self.nProfiles)
647 645 freqrange = deltafreq * \
648 646 (numpy.arange(self.nProfiles) - self.nProfiles / 2.) - deltafreq / 2
649 647 velrange = deltavel * \
650 648 (numpy.arange(self.nProfiles) - self.nProfiles / 2.)
651 649 return velrange
652 650
653 651 def readBlock(self):
654 652 '''
655 653 It should be checked if the block has data, if it is not passed to the next file.
656 654
657 655 Then the following is done:
658 656
659 657 1. Read the RecordHeader
660 658 2. Fill the buffer with the current block number.
661 659
662 660 '''
663 661
664 662 if self.BlockCounter < self.nFDTdataRecors - 2:
665 663 print self.nFDTdataRecors, 'CONDICION!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!'
666 664 if self.ReadMode == 1:
667 665 rheader = RecordHeaderBLTR(RecCounter=self.BlockCounter + 1)
668 666 elif self.ReadMode == 0:
669 667 rheader = RecordHeaderBLTR(RecCounter=self.BlockCounter)
670 668
671 669 rheader.RHread(self.fpFile) # Bltr FileHeader Reading
672 670
673 671 self.OffsetStartHeader = rheader.OffsetStartHeader
674 672 self.RecCounter = rheader.RecCounter
675 673 self.Off2StartNxtRec = rheader.Off2StartNxtRec
676 674 self.Off2StartData = rheader.Off2StartData
677 675 self.nProfiles = rheader.nProfiles
678 676 self.nChannels = rheader.nChannels
679 677 self.nHeights = rheader.nHeights
680 678 self.frequency = rheader.TransmitFrec
681 679 self.DualModeIndex = rheader.DualModeIndex
682 680
683 681 self.pairsList = [(0, 1), (0, 2), (1, 2)]
684 682 self.dataOut.pairsList = self.pairsList
685 683
686 684 self.nRdPairs = len(self.dataOut.pairsList)
687 685 self.dataOut.nRdPairs = self.nRdPairs
688 686
689 687 self.__firstHeigth = rheader.StartRangeSamp
690 688 self.__deltaHeigth = rheader.SampResolution
691 689 self.dataOut.heightList = self.__firstHeigth + \
692 690 numpy.array(range(self.nHeights)) * self.__deltaHeigth
693 691 self.dataOut.channelList = range(self.nChannels)
694 692 self.dataOut.nProfiles = rheader.nProfiles
695 693 self.dataOut.nIncohInt = rheader.nIncohInt
696 694 self.dataOut.nCohInt = rheader.nCohInt
697 695 self.dataOut.ippSeconds = 1 / float(rheader.PRFhz)
698 696 self.dataOut.PRF = rheader.PRFhz
699 697 self.dataOut.nFFTPoints = rheader.nProfiles
700 698 self.dataOut.utctime = rheader.nUtime
701 699 self.dataOut.timeZone = 0
702 700 self.dataOut.normFactor = self.dataOut.nProfiles * \
703 701 self.dataOut.nIncohInt * self.dataOut.nCohInt
704 702 self.dataOut.outputInterval = self.dataOut.ippSeconds * \
705 703 self.dataOut.nCohInt * self.dataOut.nIncohInt * self.nProfiles
706 704
707 705 self.data_output = numpy.ones([3, rheader.nHeights]) * numpy.NaN
708 706 print 'self.data_output', shape(self.data_output)
709 707 self.dataOut.velocityX = []
710 708 self.dataOut.velocityY = []
711 709 self.dataOut.velocityV = []
712 710
713 711 '''Block Reading, the Block Data is received and Reshape is used to give it
714 712 shape.
715 713 '''
716 714
717 715 # Procedure to take the pointer to where the date block starts
718 716 startDATA = open(self.fpFile, "rb")
719 717 OffDATA = self.OffsetStartHeader + self.RecCounter * \
720 718 self.Off2StartNxtRec + self.Off2StartData
721 719 startDATA.seek(OffDATA, os.SEEK_SET)
722 720
723 721 def moving_average(x, N=2):
724 722 return numpy.convolve(x, numpy.ones((N,)) / N)[(N - 1):]
725 723
726 724 def gaus(xSamples, a, x0, sigma):
727 725 return a * exp(-(xSamples - x0)**2 / (2 * sigma**2))
728 726
729 727 def Find(x, value):
730 728 for index in range(len(x)):
731 729 if x[index] == value:
732 730 return index
733 731
734 732 def pol2cart(rho, phi):
735 733 x = rho * numpy.cos(phi)
736 734 y = rho * numpy.sin(phi)
737 735 return(x, y)
738 736
739 737 if self.DualModeIndex == self.ReadMode:
740 738
741 739 self.data_fft = numpy.fromfile(
742 740 startDATA, [('complex', '<c8')], self.nProfiles * self.nChannels * self.nHeights)
743 741
744 742 self.data_fft = self.data_fft.astype(numpy.dtype('complex'))
745 743
746 744 self.data_block = numpy.reshape(
747 745 self.data_fft, (self.nHeights, self.nChannels, self.nProfiles))
748 746
749 747 self.data_block = numpy.transpose(self.data_block, (1, 2, 0))
750 748
751 749 copy = self.data_block.copy()
752 750 spc = copy * numpy.conjugate(copy)
753 751
754 752 self.data_spc = numpy.absolute(
755 753 spc) # valor absoluto o magnitud
756 754
757 755 factor = self.dataOut.normFactor
758 756
759 757 z = self.data_spc.copy() # /factor
760 758 z = numpy.where(numpy.isfinite(z), z, numpy.NAN)
761 759 #zdB = 10*numpy.log10(z)
762 760 print ' '
763 761 print 'Z: '
764 762 print shape(z)
765 763 print ' '
766 764 print ' '
767 765
768 766 self.dataOut.data_spc = self.data_spc
769 767
770 768 self.noise = self.dataOut.getNoise(
771 769 ymin_index=80, ymax_index=132) # /factor
772 770 #noisedB = 10*numpy.log10(self.noise)
773 771
774 772 ySamples = numpy.ones([3, self.nProfiles])
775 773 phase = numpy.ones([3, self.nProfiles])
776 774 CSPCSamples = numpy.ones(
777 775 [3, self.nProfiles], dtype=numpy.complex_)
778 776 coherence = numpy.ones([3, self.nProfiles])
779 777 PhaseSlope = numpy.ones(3)
780 778 PhaseInter = numpy.ones(3)
781 779
782 780 '''****** Getting CrossSpectra ******'''
783 781 cspc = self.data_block.copy()
784 782 self.data_cspc = self.data_block.copy()
785 783
786 784 xFrec = self.getVelRange(1)
787 785 VelRange = self.getVelRange(1)
788 786 self.dataOut.VelRange = VelRange
789 787 # print ' '
790 788 # print ' '
791 789 # print 'xFrec',xFrec
792 790 # print ' '
793 791 # print ' '
794 792 # Height=35
795 793 for i in range(self.nRdPairs):
796 794
797 795 chan_index0 = self.dataOut.pairsList[i][0]
798 796 chan_index1 = self.dataOut.pairsList[i][1]
799 797
800 798 self.data_cspc[i, :, :] = cspc[chan_index0, :,
801 799 :] * numpy.conjugate(cspc[chan_index1, :, :])
802 800
803 801 '''Getting Eij and Nij'''
804 802 (AntennaX0, AntennaY0) = pol2cart(
805 803 rheader.AntennaCoord0, rheader.AntennaAngl0 * numpy.pi / 180)
806 804 (AntennaX1, AntennaY1) = pol2cart(
807 805 rheader.AntennaCoord1, rheader.AntennaAngl1 * numpy.pi / 180)
808 806 (AntennaX2, AntennaY2) = pol2cart(
809 807 rheader.AntennaCoord2, rheader.AntennaAngl2 * numpy.pi / 180)
810 808
811 809 E01 = AntennaX0 - AntennaX1
812 810 N01 = AntennaY0 - AntennaY1
813 811
814 812 E02 = AntennaX0 - AntennaX2
815 813 N02 = AntennaY0 - AntennaY2
816 814
817 815 E12 = AntennaX1 - AntennaX2
818 816 N12 = AntennaY1 - AntennaY2
819 817
820 818 self.ChanDist = numpy.array(
821 819 [[E01, N01], [E02, N02], [E12, N12]])
822 820
823 821 self.dataOut.ChanDist = self.ChanDist
824 822
825 823
826 824 # for Height in range(self.nHeights):
827 825 #
828 826 # for i in range(self.nRdPairs):
829 827 #
830 828 # '''****** Line of Data SPC ******'''
831 829 # zline=z[i,:,Height]
832 830 #
833 831 # '''****** DC is removed ******'''
834 832 # DC=Find(zline,numpy.amax(zline))
835 833 # zline[DC]=(zline[DC-1]+zline[DC+1])/2
836 834 #
837 835 #
838 836 # '''****** SPC is normalized ******'''
839 837 # FactNorm= zline.copy() / numpy.sum(zline.copy())
840 838 # FactNorm= FactNorm/numpy.sum(FactNorm)
841 839 #
842 840 # SmoothSPC=moving_average(FactNorm,N=3)
843 841 #
844 842 # xSamples = ar(range(len(SmoothSPC)))
845 843 # ySamples[i] = SmoothSPC-self.noise[i]
846 844 #
847 845 # for i in range(self.nRdPairs):
848 846 #
849 847 # '''****** Line of Data CSPC ******'''
850 848 # cspcLine=self.data_cspc[i,:,Height].copy()
851 849 #
852 850 #
853 851 #
854 852 # '''****** CSPC is normalized ******'''
855 853 # chan_index0 = self.dataOut.pairsList[i][0]
856 854 # chan_index1 = self.dataOut.pairsList[i][1]
857 855 # CSPCFactor= numpy.sum(ySamples[chan_index0]) * numpy.sum(ySamples[chan_index1])
858 856 #
859 857 #
860 858 # CSPCNorm= cspcLine.copy() / numpy.sqrt(CSPCFactor)
861 859 #
862 860 #
863 861 # CSPCSamples[i] = CSPCNorm-self.noise[i]
864 862 # coherence[i] = numpy.abs(CSPCSamples[i]) / numpy.sqrt(CSPCFactor)
865 863 #
866 864 # '''****** DC is removed ******'''
867 865 # DC=Find(coherence[i],numpy.amax(coherence[i]))
868 866 # coherence[i][DC]=(coherence[i][DC-1]+coherence[i][DC+1])/2
869 867 # coherence[i]= moving_average(coherence[i],N=2)
870 868 #
871 869 # phase[i] = moving_average( numpy.arctan2(CSPCSamples[i].imag, CSPCSamples[i].real),N=1)#*180/numpy.pi
872 870 #
873 871 #
874 872 # '''****** Getting fij width ******'''
875 873 #
876 874 # yMean=[]
877 875 # yMean2=[]
878 876 #
879 877 # for j in range(len(ySamples[1])):
880 878 # yMean=numpy.append(yMean,numpy.average([ySamples[0,j],ySamples[1,j],ySamples[2,j]]))
881 879 #
882 880 # '''******* Getting fitting Gaussian ******'''
883 881 # meanGauss=sum(xSamples*yMean) / len(xSamples)
884 882 # sigma=sum(yMean*(xSamples-meanGauss)**2) / len(xSamples)
885 883 # #print 'Height',Height,'SNR', meanGauss/sigma**2
886 884 #
887 885 # if (abs(meanGauss/sigma**2) > 0.0001) :
888 886 #
889 887 # try:
890 888 # popt,pcov = curve_fit(gaus,xSamples,yMean,p0=[1,meanGauss,sigma])
891 889 #
892 890 # if numpy.amax(popt)>numpy.amax(yMean)*0.3:
893 891 # FitGauss=gaus(xSamples,*popt)
894 892 #
895 893 # else:
896 894 # FitGauss=numpy.ones(len(xSamples))*numpy.mean(yMean)
897 895 # print 'Verificador: Dentro', Height
898 896 # except RuntimeError:
899 897 #
900 898 # try:
901 899 # for j in range(len(ySamples[1])):
902 900 # yMean2=numpy.append(yMean2,numpy.average([ySamples[1,j],ySamples[2,j]]))
903 901 # popt,pcov = curve_fit(gaus,xSamples,yMean2,p0=[1,meanGauss,sigma])
904 902 # FitGauss=gaus(xSamples,*popt)
905 903 # print 'Verificador: Exepcion1', Height
906 904 # except RuntimeError:
907 905 #
908 906 # try:
909 907 # popt,pcov = curve_fit(gaus,xSamples,ySamples[1],p0=[1,meanGauss,sigma])
910 908 # FitGauss=gaus(xSamples,*popt)
911 909 # print 'Verificador: Exepcion2', Height
912 910 # except RuntimeError:
913 911 # FitGauss=numpy.ones(len(xSamples))*numpy.mean(yMean)
914 912 # print 'Verificador: Exepcion3', Height
915 913 # else:
916 914 # FitGauss=numpy.ones(len(xSamples))*numpy.mean(yMean)
917 915 # #print 'Verificador: Fuera', Height
918 916 #
919 917 #
920 918 #
921 919 # Maximun=numpy.amax(yMean)
922 920 # eMinus1=Maximun*numpy.exp(-1)
923 921 #
924 922 # HWpos=Find(FitGauss,min(FitGauss, key=lambda value:abs(value-eMinus1)))
925 923 # HalfWidth= xFrec[HWpos]
926 924 # GCpos=Find(FitGauss, numpy.amax(FitGauss))
927 925 # Vpos=Find(FactNorm, numpy.amax(FactNorm))
928 926 # #Vpos=numpy.sum(FactNorm)/len(FactNorm)
929 927 # #Vpos=Find(FactNorm, min(FactNorm, key=lambda value:abs(value- numpy.mean(FactNorm) )))
930 928 # #print 'GCpos',GCpos, numpy.amax(FitGauss), 'HWpos',HWpos
931 929 # '''****** Getting Fij ******'''
932 930 #
933 931 # GaussCenter=xFrec[GCpos]
934 932 # if (GaussCenter<0 and HalfWidth>0) or (GaussCenter>0 and HalfWidth<0):
935 933 # Fij=abs(GaussCenter)+abs(HalfWidth)+0.0000001
936 934 # else:
937 935 # Fij=abs(GaussCenter-HalfWidth)+0.0000001
938 936 #
939 937 # '''****** Getting Frecuency range of significant data ******'''
940 938 #
941 939 # Rangpos=Find(FitGauss,min(FitGauss, key=lambda value:abs(value-Maximun*0.10)))
942 940 #
943 941 # if Rangpos<GCpos:
944 942 # Range=numpy.array([Rangpos,2*GCpos-Rangpos])
945 943 # else:
946 944 # Range=numpy.array([2*GCpos-Rangpos,Rangpos])
947 945 #
948 946 # FrecRange=xFrec[Range[0]:Range[1]]
949 947 #
950 948 # #print 'FrecRange', FrecRange
951 949 # '''****** Getting SCPC Slope ******'''
952 950 #
953 951 # for i in range(self.nRdPairs):
954 952 #
955 953 # if len(FrecRange)>5 and len(FrecRange)<self.nProfiles*0.5:
956 954 # PhaseRange=moving_average(phase[i,Range[0]:Range[1]],N=3)
957 955 #
958 956 # slope, intercept, r_value, p_value, std_err = stats.linregress(FrecRange,PhaseRange)
959 957 # PhaseSlope[i]=slope
960 958 # PhaseInter[i]=intercept
961 959 # else:
962 960 # PhaseSlope[i]=0
963 961 # PhaseInter[i]=0
964 962 #
965 963 # # plt.figure(i+15)
966 964 # # plt.title('FASE ( CH%s*CH%s )' %(self.dataOut.pairsList[i][0],self.dataOut.pairsList[i][1]))
967 965 # # plt.xlabel('Frecuencia (KHz)')
968 966 # # plt.ylabel('Magnitud')
969 967 # # #plt.subplot(311+i)
970 968 # # plt.plot(FrecRange,PhaseRange,'b')
971 969 # # plt.plot(FrecRange,FrecRange*PhaseSlope[i]+PhaseInter[i],'r')
972 970 #
973 971 # #plt.axis([-0.6, 0.2, -3.2, 3.2])
974 972 #
975 973 #
976 974 # '''Getting constant C'''
977 975 # cC=(Fij*numpy.pi)**2
978 976 #
979 977 # # '''Getting Eij and Nij'''
980 978 # # (AntennaX0,AntennaY0)=pol2cart(rheader.AntennaCoord0, rheader.AntennaAngl0*numpy.pi/180)
981 979 # # (AntennaX1,AntennaY1)=pol2cart(rheader.AntennaCoord1, rheader.AntennaAngl1*numpy.pi/180)
982 980 # # (AntennaX2,AntennaY2)=pol2cart(rheader.AntennaCoord2, rheader.AntennaAngl2*numpy.pi/180)
983 981 # #
984 982 # # E01=AntennaX0-AntennaX1
985 983 # # N01=AntennaY0-AntennaY1
986 984 # #
987 985 # # E02=AntennaX0-AntennaX2
988 986 # # N02=AntennaY0-AntennaY2
989 987 # #
990 988 # # E12=AntennaX1-AntennaX2
991 989 # # N12=AntennaY1-AntennaY2
992 990 #
993 991 # '''****** Getting constants F and G ******'''
994 992 # MijEijNij=numpy.array([[E02,N02], [E12,N12]])
995 993 # MijResult0=(-PhaseSlope[1]*cC) / (2*numpy.pi)
996 994 # MijResult1=(-PhaseSlope[2]*cC) / (2*numpy.pi)
997 995 # MijResults=numpy.array([MijResult0,MijResult1])
998 996 # (cF,cG) = numpy.linalg.solve(MijEijNij, MijResults)
999 997 #
1000 998 # '''****** Getting constants A, B and H ******'''
1001 999 # W01=numpy.amax(coherence[0])
1002 1000 # W02=numpy.amax(coherence[1])
1003 1001 # W12=numpy.amax(coherence[2])
1004 1002 #
1005 1003 # WijResult0=((cF*E01+cG*N01)**2)/cC - numpy.log(W01 / numpy.sqrt(numpy.pi/cC))
1006 1004 # WijResult1=((cF*E02+cG*N02)**2)/cC - numpy.log(W02 / numpy.sqrt(numpy.pi/cC))
1007 1005 # WijResult2=((cF*E12+cG*N12)**2)/cC - numpy.log(W12 / numpy.sqrt(numpy.pi/cC))
1008 1006 #
1009 1007 # WijResults=numpy.array([WijResult0, WijResult1, WijResult2])
1010 1008 #
1011 1009 # WijEijNij=numpy.array([ [E01**2, N01**2, 2*E01*N01] , [E02**2, N02**2, 2*E02*N02] , [E12**2, N12**2, 2*E12*N12] ])
1012 1010 # (cA,cB,cH) = numpy.linalg.solve(WijEijNij, WijResults)
1013 1011 #
1014 1012 # VxVy=numpy.array([[cA,cH],[cH,cB]])
1015 1013 #
1016 1014 # VxVyResults=numpy.array([-cF,-cG])
1017 1015 # (Vx,Vy) = numpy.linalg.solve(VxVy, VxVyResults)
1018 1016 # Vzon = Vy
1019 1017 # Vmer = Vx
1020 1018 # Vmag=numpy.sqrt(Vzon**2+Vmer**2)
1021 1019 # Vang=numpy.arctan2(Vmer,Vzon)
1022 1020 #
1023 1021 # if abs(Vy)<100 and abs(Vy)> 0.:
1024 1022 # self.dataOut.velocityX=numpy.append(self.dataOut.velocityX, Vzon) #Vmag
1025 1023 # #print 'Vmag',Vmag
1026 1024 # else:
1027 1025 # self.dataOut.velocityX=numpy.append(self.dataOut.velocityX, NaN)
1028 1026 #
1029 1027 # if abs(Vx)<100 and abs(Vx) > 0.:
1030 1028 # self.dataOut.velocityY=numpy.append(self.dataOut.velocityY, Vmer) #Vang
1031 1029 # #print 'Vang',Vang
1032 1030 # else:
1033 1031 # self.dataOut.velocityY=numpy.append(self.dataOut.velocityY, NaN)
1034 1032 #
1035 1033 # if abs(GaussCenter)<2:
1036 1034 # self.dataOut.velocityV=numpy.append(self.dataOut.velocityV, xFrec[Vpos])
1037 1035 #
1038 1036 # else:
1039 1037 # self.dataOut.velocityV=numpy.append(self.dataOut.velocityV, NaN)
1040 1038 #
1041 1039 #
1042 1040 # # print '********************************************'
1043 1041 # # print 'HalfWidth ', HalfWidth
1044 1042 # # print 'Maximun ', Maximun
1045 1043 # # print 'eMinus1 ', eMinus1
1046 1044 # # print 'Rangpos ', Rangpos
1047 1045 # # print 'GaussCenter ',GaussCenter
1048 1046 # # print 'E01 ',E01
1049 1047 # # print 'N01 ',N01
1050 1048 # # print 'E02 ',E02
1051 1049 # # print 'N02 ',N02
1052 1050 # # print 'E12 ',E12
1053 1051 # # print 'N12 ',N12
1054 1052 # #print 'self.dataOut.velocityX ', self.dataOut.velocityX
1055 1053 # # print 'Fij ', Fij
1056 1054 # # print 'cC ', cC
1057 1055 # # print 'cF ', cF
1058 1056 # # print 'cG ', cG
1059 1057 # # print 'cA ', cA
1060 1058 # # print 'cB ', cB
1061 1059 # # print 'cH ', cH
1062 1060 # # print 'Vx ', Vx
1063 1061 # # print 'Vy ', Vy
1064 1062 # # print 'Vmag ', Vmag
1065 1063 # # print 'Vang ', Vang*180/numpy.pi
1066 1064 # # print 'PhaseSlope ',PhaseSlope[0]
1067 1065 # # print 'PhaseSlope ',PhaseSlope[1]
1068 1066 # # print 'PhaseSlope ',PhaseSlope[2]
1069 1067 # # print '********************************************'
1070 1068 # #print 'data_output',shape(self.dataOut.velocityX), shape(self.dataOut.velocityY)
1071 1069 #
1072 1070 # #print 'self.dataOut.velocityX', len(self.dataOut.velocityX)
1073 1071 # #print 'self.dataOut.velocityY', len(self.dataOut.velocityY)
1074 1072 # #print 'self.dataOut.velocityV', self.dataOut.velocityV
1075 1073 #
1076 1074 # self.data_output[0]=numpy.array(self.dataOut.velocityX)
1077 1075 # self.data_output[1]=numpy.array(self.dataOut.velocityY)
1078 1076 # self.data_output[2]=numpy.array(self.dataOut.velocityV)
1079 1077 #
1080 1078 # prin= self.data_output[0][~numpy.isnan(self.data_output[0])]
1081 1079 # print ' '
1082 1080 # print 'VmagAverage',numpy.mean(prin)
1083 1081 # print ' '
1084 1082 # # plt.figure(5)
1085 1083 # # plt.subplot(211)
1086 1084 # # plt.plot(self.dataOut.velocityX,'yo:')
1087 1085 # # plt.subplot(212)
1088 1086 # # plt.plot(self.dataOut.velocityY,'yo:')
1089 1087 #
1090 1088 # # plt.figure(1)
1091 1089 # # # plt.subplot(121)
1092 1090 # # # plt.plot(xFrec,ySamples[0],'k',label='Ch0')
1093 1091 # # # plt.plot(xFrec,ySamples[1],'g',label='Ch1')
1094 1092 # # # plt.plot(xFrec,ySamples[2],'r',label='Ch2')
1095 1093 # # # plt.plot(xFrec,FitGauss,'yo:',label='fit')
1096 1094 # # # plt.legend()
1097 1095 # # plt.title('DATOS A ALTURA DE 2850 METROS')
1098 1096 # #
1099 1097 # # plt.xlabel('Frecuencia (KHz)')
1100 1098 # # plt.ylabel('Magnitud')
1101 1099 # # # plt.subplot(122)
1102 1100 # # # plt.title('Fit for Time Constant')
1103 1101 # # #plt.plot(xFrec,zline)
1104 1102 # # #plt.plot(xFrec,SmoothSPC,'g')
1105 1103 # # plt.plot(xFrec,FactNorm)
1106 1104 # # plt.axis([-4, 4, 0, 0.15])
1107 1105 # # # plt.xlabel('SelfSpectra KHz')
1108 1106 # #
1109 1107 # # plt.figure(10)
1110 1108 # # # plt.subplot(121)
1111 1109 # # plt.plot(xFrec,ySamples[0],'b',label='Ch0')
1112 1110 # # plt.plot(xFrec,ySamples[1],'y',label='Ch1')
1113 1111 # # plt.plot(xFrec,ySamples[2],'r',label='Ch2')
1114 1112 # # # plt.plot(xFrec,FitGauss,'yo:',label='fit')
1115 1113 # # plt.legend()
1116 1114 # # plt.title('SELFSPECTRA EN CANALES')
1117 1115 # #
1118 1116 # # plt.xlabel('Frecuencia (KHz)')
1119 1117 # # plt.ylabel('Magnitud')
1120 1118 # # # plt.subplot(122)
1121 1119 # # # plt.title('Fit for Time Constant')
1122 1120 # # #plt.plot(xFrec,zline)
1123 1121 # # #plt.plot(xFrec,SmoothSPC,'g')
1124 1122 # # # plt.plot(xFrec,FactNorm)
1125 1123 # # # plt.axis([-4, 4, 0, 0.15])
1126 1124 # # # plt.xlabel('SelfSpectra KHz')
1127 1125 # #
1128 1126 # # plt.figure(9)
1129 1127 # #
1130 1128 # #
1131 1129 # # plt.title('DATOS SUAVIZADOS')
1132 1130 # # plt.xlabel('Frecuencia (KHz)')
1133 1131 # # plt.ylabel('Magnitud')
1134 1132 # # plt.plot(xFrec,SmoothSPC,'g')
1135 1133 # #
1136 1134 # # #plt.plot(xFrec,FactNorm)
1137 1135 # # plt.axis([-4, 4, 0, 0.15])
1138 1136 # # # plt.xlabel('SelfSpectra KHz')
1139 1137 # # #
1140 1138 # # plt.figure(2)
1141 1139 # # # #plt.subplot(121)
1142 1140 # # plt.plot(xFrec,yMean,'r',label='Mean SelfSpectra')
1143 1141 # # plt.plot(xFrec,FitGauss,'yo:',label='Ajuste Gaussiano')
1144 1142 # # # plt.plot(xFrec[Rangpos],FitGauss[Find(FitGauss,min(FitGauss, key=lambda value:abs(value-Maximun*0.1)))],'bo')
1145 1143 # # # #plt.plot(xFrec,phase)
1146 1144 # # # plt.xlabel('Suavizado, promediado KHz')
1147 1145 # # plt.title('SELFSPECTRA PROMEDIADO')
1148 1146 # # # #plt.subplot(122)
1149 1147 # # # #plt.plot(xSamples,zline)
1150 1148 # # plt.xlabel('Frecuencia (KHz)')
1151 1149 # # plt.ylabel('Magnitud')
1152 1150 # # plt.legend()
1153 1151 # # #
1154 1152 # # # plt.figure(3)
1155 1153 # # # plt.subplot(311)
1156 1154 # # # #plt.plot(xFrec,phase[0])
1157 1155 # # # plt.plot(xFrec,phase[0],'g')
1158 1156 # # # plt.subplot(312)
1159 1157 # # # plt.plot(xFrec,phase[1],'g')
1160 1158 # # # plt.subplot(313)
1161 1159 # # # plt.plot(xFrec,phase[2],'g')
1162 1160 # # # #plt.plot(xFrec,phase[2])
1163 1161 # # #
1164 1162 # # # plt.figure(4)
1165 1163 # # #
1166 1164 # # # plt.plot(xSamples,coherence[0],'b')
1167 1165 # # # plt.plot(xSamples,coherence[1],'r')
1168 1166 # # # plt.plot(xSamples,coherence[2],'g')
1169 1167 # # plt.show()
1170 1168 # # #
1171 1169 # # # plt.clf()
1172 1170 # # # plt.cla()
1173 1171 # # # plt.close()
1174 1172 #
1175 1173 # print ' '
1176 1174
1177 1175 self.BlockCounter += 2
1178 1176
1179 1177 else:
1180 1178 self.fileSelector += 1
1181 1179 self.BlockCounter = 0
1182 1180 print "Next File"
@@ -1,802 +1,800
1 1 import os
2 2 import sys
3 3 import glob
4 4 import fnmatch
5 5 import datetime
6 6 import time
7 7 import re
8 8 import h5py
9 9 import numpy
10 10
11 11 from scipy.optimize import curve_fit
12 12 from scipy import asarray as ar, exp
13 13 from scipy import stats
14 14
15 15 from numpy.ma.core import getdata
16 16
17 17 SPEED_OF_LIGHT = 299792458
18 18 SPEED_OF_LIGHT = 3e8
19 19
20 20 try:
21 21 from gevent import sleep
22 22 except:
23 23 from time import sleep
24 24
25 25 from schainpy.model.data.jrodata import Spectra
26 26 #from schainpy.model.data.BLTRheaderIO import FileHeader, RecordHeader
27 27 from schainpy.model.proc.jroproc_base import ProcessingUnit, Operation
28 28 #from schainpy.model.io.jroIO_bltr import BLTRReader
29 29 from numpy import imag, shape, NaN, empty
30 30
31 31
32 32 class Header(object):
33 33
34 34 def __init__(self):
35 35 raise NotImplementedError
36 36
37 37 def read(self):
38 38
39 39 raise NotImplementedError
40 40
41 41 def write(self):
42 42
43 43 raise NotImplementedError
44 44
45 45 def printInfo(self):
46 46
47 47 message = "#" * 50 + "\n"
48 48 message += self.__class__.__name__.upper() + "\n"
49 49 message += "#" * 50 + "\n"
50 50
51 51 keyList = self.__dict__.keys()
52 52 keyList.sort()
53 53
54 54 for key in keyList:
55 55 message += "%s = %s" % (key, self.__dict__[key]) + "\n"
56 56
57 57 if "size" not in keyList:
58 58 attr = getattr(self, "size")
59 59
60 60 if attr:
61 61 message += "%s = %s" % ("size", attr) + "\n"
62 62
63 63 # print message
64 64
65 65
66 66 FILE_HEADER = numpy.dtype([ # HEADER 1024bytes
67 67 ('Hname', 'a32'), # Original file name
68 68 # Date and time when the file was created
69 69 ('Htime', numpy.str_, 32),
70 70 # Name of operator who created the file
71 71 ('Hoper', numpy.str_, 64),
72 72 # Place where the measurements was carried out
73 73 ('Hplace', numpy.str_, 128),
74 74 # Description of measurements
75 75 ('Hdescr', numpy.str_, 256),
76 76 ('Hdummy', numpy.str_, 512), # Reserved space
77 77 # Main chunk 8bytes
78 78 # Main chunk signature FZKF or NUIG
79 79 ('Msign', numpy.str_, 4),
80 80 ('MsizeData', '<i4'), # Size of data block main chunk
81 81 # Processing DSP parameters 36bytes
82 82 ('PPARsign', numpy.str_, 4), # PPAR signature
83 83 ('PPARsize', '<i4'), # PPAR size of block
84 84 ('PPARprf', '<i4'), # Pulse repetition frequency
85 85 ('PPARpdr', '<i4'), # Pulse duration
86 86 ('PPARsft', '<i4'), # FFT length
87 87 # Number of spectral (in-coherent) averages
88 88 ('PPARavc', '<i4'),
89 89 # Number of lowest range gate for moment estimation
90 90 ('PPARihp', '<i4'),
91 91 # Count for gates for moment estimation
92 92 ('PPARchg', '<i4'),
93 93 # switch on/off polarimetric measurements. Should be 1.
94 94 ('PPARpol', '<i4'),
95 95 # Service DSP parameters 112bytes
96 96 # STC attenuation on the lowest ranges on/off
97 97 ('SPARatt', '<i4'),
98 98 ('SPARtx', '<i4'), # OBSOLETE
99 99 ('SPARaddGain0', '<f4'), # OBSOLETE
100 100 ('SPARaddGain1', '<f4'), # OBSOLETE
101 101 # Debug only. It normal mode it is 0.
102 102 ('SPARwnd', '<i4'),
103 103 # Delay between sync pulse and tx pulse for phase corr, ns
104 104 ('SPARpos', '<i4'),
105 105 # "add to pulse" to compensate for delay between the leading edge of driver pulse and envelope of the RF signal.
106 106 ('SPARadd', '<i4'),
107 107 # Time for measuring txn pulse phase. OBSOLETE
108 108 ('SPARlen', '<i4'),
109 109 ('SPARcal', '<i4'), # OBSOLETE
110 110 ('SPARnos', '<i4'), # OBSOLETE
111 111 ('SPARof0', '<i4'), # detection threshold
112 112 ('SPARof1', '<i4'), # OBSOLETE
113 113 ('SPARswt', '<i4'), # 2nd moment estimation threshold
114 114 ('SPARsum', '<i4'), # OBSOLETE
115 115 ('SPARosc', '<i4'), # flag Oscillosgram mode
116 116 ('SPARtst', '<i4'), # OBSOLETE
117 117 ('SPARcor', '<i4'), # OBSOLETE
118 118 ('SPARofs', '<i4'), # OBSOLETE
119 119 # Hildebrand div noise detection on noise gate
120 120 ('SPARhsn', '<i4'),
121 121 # Hildebrand div noise detection on all gates
122 122 ('SPARhsa', '<f4'),
123 123 ('SPARcalibPow_M', '<f4'), # OBSOLETE
124 124 ('SPARcalibSNR_M', '<f4'), # OBSOLETE
125 125 ('SPARcalibPow_S', '<f4'), # OBSOLETE
126 126 ('SPARcalibSNR_S', '<f4'), # OBSOLETE
127 127 # Lowest range gate for spectra saving Raw_Gate1 >=5
128 128 ('SPARrawGate1', '<i4'),
129 129 # Number of range gates with atmospheric signal
130 130 ('SPARrawGate2', '<i4'),
131 131 # flag - IQ or spectra saving on/off
132 132 ('SPARraw', '<i4'),
133 133 ('SPARprc', '<i4'), ]) # flag - Moment estimation switched on/off
134 134
135 135
136 136 class FileHeaderMIRA35c(Header):
137 137
138 138 def __init__(self):
139 139
140 140 self.Hname = None
141 141 self.Htime = None
142 142 self.Hoper = None
143 143 self.Hplace = None
144 144 self.Hdescr = None
145 145 self.Hdummy = None
146 146
147 147 self.Msign = None
148 148 self.MsizeData = None
149 149
150 150 self.PPARsign = None
151 151 self.PPARsize = None
152 152 self.PPARprf = None
153 153 self.PPARpdr = None
154 154 self.PPARsft = None
155 155 self.PPARavc = None
156 156 self.PPARihp = None
157 157 self.PPARchg = None
158 158 self.PPARpol = None
159 159 # Service DSP parameters
160 160 self.SPARatt = None
161 161 self.SPARtx = None
162 162 self.SPARaddGain0 = None
163 163 self.SPARaddGain1 = None
164 164 self.SPARwnd = None
165 165 self.SPARpos = None
166 166 self.SPARadd = None
167 167 self.SPARlen = None
168 168 self.SPARcal = None
169 169 self.SPARnos = None
170 170 self.SPARof0 = None
171 171 self.SPARof1 = None
172 172 self.SPARswt = None
173 173 self.SPARsum = None
174 174 self.SPARosc = None
175 175 self.SPARtst = None
176 176 self.SPARcor = None
177 177 self.SPARofs = None
178 178 self.SPARhsn = None
179 179 self.SPARhsa = None
180 180 self.SPARcalibPow_M = None
181 181 self.SPARcalibSNR_M = None
182 182 self.SPARcalibPow_S = None
183 183 self.SPARcalibSNR_S = None
184 184 self.SPARrawGate1 = None
185 185 self.SPARrawGate2 = None
186 186 self.SPARraw = None
187 187 self.SPARprc = None
188 188
189 189 self.FHsize = 1180
190 190
191 191 def FHread(self, fp):
192 192
193 193 header = numpy.fromfile(fp, FILE_HEADER, 1)
194 194 ''' numpy.fromfile(file, dtype, count, sep='')
195 195 file : file or str
196 196 Open file object or filename.
197 197
198 198 dtype : data-type
199 199 Data type of the returned array. For binary files, it is used to determine
200 200 the size and byte-order of the items in the file.
201 201
202 202 count : int
203 203 Number of items to read. -1 means all items (i.e., the complete file).
204 204
205 205 sep : str
206 206 Separator between items if file is a text file. Empty ("") separator means
207 207 the file should be treated as binary. Spaces (" ") in the separator match zero
208 208 or more whitespace characters. A separator consisting only of spaces must match
209 209 at least one whitespace.
210 210
211 211 '''
212 212
213 213 self.Hname = str(header['Hname'][0])
214 214 self.Htime = str(header['Htime'][0])
215 215 self.Hoper = str(header['Hoper'][0])
216 216 self.Hplace = str(header['Hplace'][0])
217 217 self.Hdescr = str(header['Hdescr'][0])
218 218 self.Hdummy = str(header['Hdummy'][0])
219 219 # 1024
220 220
221 221 self.Msign = str(header['Msign'][0])
222 222 self.MsizeData = header['MsizeData'][0]
223 223 # 8
224 224
225 225 self.PPARsign = str(header['PPARsign'][0])
226 226 self.PPARsize = header['PPARsize'][0]
227 227 self.PPARprf = header['PPARprf'][0]
228 228 self.PPARpdr = header['PPARpdr'][0]
229 229 self.PPARsft = header['PPARsft'][0]
230 230 self.PPARavc = header['PPARavc'][0]
231 231 self.PPARihp = header['PPARihp'][0]
232 232 self.PPARchg = header['PPARchg'][0]
233 233 self.PPARpol = header['PPARpol'][0]
234 234 # Service DSP parameters
235 235 # 36
236 236
237 237 self.SPARatt = header['SPARatt'][0]
238 238 self.SPARtx = header['SPARtx'][0]
239 239 self.SPARaddGain0 = header['SPARaddGain0'][0]
240 240 self.SPARaddGain1 = header['SPARaddGain1'][0]
241 241 self.SPARwnd = header['SPARwnd'][0]
242 242 self.SPARpos = header['SPARpos'][0]
243 243 self.SPARadd = header['SPARadd'][0]
244 244 self.SPARlen = header['SPARlen'][0]
245 245 self.SPARcal = header['SPARcal'][0]
246 246 self.SPARnos = header['SPARnos'][0]
247 247 self.SPARof0 = header['SPARof0'][0]
248 248 self.SPARof1 = header['SPARof1'][0]
249 249 self.SPARswt = header['SPARswt'][0]
250 250 self.SPARsum = header['SPARsum'][0]
251 251 self.SPARosc = header['SPARosc'][0]
252 252 self.SPARtst = header['SPARtst'][0]
253 253 self.SPARcor = header['SPARcor'][0]
254 254 self.SPARofs = header['SPARofs'][0]
255 255 self.SPARhsn = header['SPARhsn'][0]
256 256 self.SPARhsa = header['SPARhsa'][0]
257 257 self.SPARcalibPow_M = header['SPARcalibPow_M'][0]
258 258 self.SPARcalibSNR_M = header['SPARcalibSNR_M'][0]
259 259 self.SPARcalibPow_S = header['SPARcalibPow_S'][0]
260 260 self.SPARcalibSNR_S = header['SPARcalibSNR_S'][0]
261 261 self.SPARrawGate1 = header['SPARrawGate1'][0]
262 262 self.SPARrawGate2 = header['SPARrawGate2'][0]
263 263 self.SPARraw = header['SPARraw'][0]
264 264 self.SPARprc = header['SPARprc'][0]
265 265 # 112
266 266 # 1180
267 267 # print 'Pointer fp header', fp.tell()
268 268 # print ' '
269 269 # print 'SPARrawGate'
270 270 # print self.SPARrawGate2 - self.SPARrawGate1
271 271
272 272 # print ' '
273 273 # print 'Hname'
274 274 # print self.Hname
275 275
276 276 # print ' '
277 277 # print 'Msign'
278 278 # print self.Msign
279 279
280 280 def write(self, fp):
281 281
282 282 headerTuple = (self.Hname,
283 283 self.Htime,
284 284 self.Hoper,
285 285 self.Hplace,
286 286 self.Hdescr,
287 287 self.Hdummy)
288 288
289 289 header = numpy.array(headerTuple, FILE_HEADER)
290 290 # numpy.array(object, dtype=None, copy=True, order=None, subok=False, ndmin=0)
291 291 header.tofile(fp)
292 292 ''' ndarray.tofile(fid, sep, format) Write array to a file as text or binary (default).
293 293
294 294 fid : file or str
295 295 An open file object, or a string containing a filename.
296 296
297 297 sep : str
298 298 Separator between array items for text output. If "" (empty), a binary file is written,
299 299 equivalent to file.write(a.tobytes()).
300 300
301 301 format : str
302 302 Format string for text file output. Each entry in the array is formatted to text by
303 303 first converting it to the closest Python type, and then using "format" % item.
304 304
305 305 '''
306 306
307 307 return 1
308 308
309 309
310 310 SRVI_HEADER = numpy.dtype([
311 311 ('SignatureSRVI1', numpy.str_, 4),
312 312 ('SizeOfDataBlock1', '<i4'),
313 313 ('DataBlockTitleSRVI1', numpy.str_, 4),
314 314 ('SizeOfSRVI1', '<i4'), ])
315 315
316 316
317 317 class SRVIHeader(Header):
318 318 def __init__(self, SignatureSRVI1=0, SizeOfDataBlock1=0, DataBlockTitleSRVI1=0, SizeOfSRVI1=0):
319 319
320 320 self.SignatureSRVI1 = SignatureSRVI1
321 321 self.SizeOfDataBlock1 = SizeOfDataBlock1
322 322 self.DataBlockTitleSRVI1 = DataBlockTitleSRVI1
323 323 self.SizeOfSRVI1 = SizeOfSRVI1
324 324
325 325 self.SRVIHsize = 16
326 326
327 327 def SRVIread(self, fp):
328 328
329 329 header = numpy.fromfile(fp, SRVI_HEADER, 1)
330 330
331 331 self.SignatureSRVI1 = str(header['SignatureSRVI1'][0])
332 332 self.SizeOfDataBlock1 = header['SizeOfDataBlock1'][0]
333 333 self.DataBlockTitleSRVI1 = str(header['DataBlockTitleSRVI1'][0])
334 334 self.SizeOfSRVI1 = header['SizeOfSRVI1'][0]
335 335 # 16
336 336 print 'Pointer fp SRVIheader', fp.tell()
337 337
338 338
339 339 SRVI_STRUCTURE = numpy.dtype([
340 340 ('frame_cnt', '<u4'),
341 341 ('time_t', '<u4'), #
342 342 ('tpow', '<f4'), #
343 343 ('npw1', '<f4'), #
344 344 ('npw2', '<f4'), #
345 345 ('cpw1', '<f4'), #
346 346 ('pcw2', '<f4'), #
347 347 ('ps_err', '<u4'), #
348 348 ('te_err', '<u4'), #
349 349 ('rc_err', '<u4'), #
350 350 ('grs1', '<u4'), #
351 351 ('grs2', '<u4'), #
352 352 ('azipos', '<f4'), #
353 353 ('azivel', '<f4'), #
354 354 ('elvpos', '<f4'), #
355 355 ('elvvel', '<f4'), #
356 356 ('northAngle', '<f4'),
357 357 ('microsec', '<u4'), #
358 358 ('azisetvel', '<f4'), #
359 359 ('elvsetpos', '<f4'), #
360 360 ('RadarConst', '<f4'), ]) #
361 361
362 362
363 363 class RecordHeader(Header):
364 364
365 365 def __init__(self, frame_cnt=0, time_t=0, tpow=0, npw1=0, npw2=0,
366 366 cpw1=0, pcw2=0, ps_err=0, te_err=0, rc_err=0, grs1=0,
367 367 grs2=0, azipos=0, azivel=0, elvpos=0, elvvel=0, northangle=0,
368 368 microsec=0, azisetvel=0, elvsetpos=0, RadarConst=0, RecCounter=0, Off2StartNxtRec=0):
369 369
370 370 self.frame_cnt = frame_cnt
371 371 self.dwell = time_t
372 372 self.tpow = tpow
373 373 self.npw1 = npw1
374 374 self.npw2 = npw2
375 375 self.cpw1 = cpw1
376 376 self.pcw2 = pcw2
377 377 self.ps_err = ps_err
378 378 self.te_err = te_err
379 379 self.rc_err = rc_err
380 380 self.grs1 = grs1
381 381 self.grs2 = grs2
382 382 self.azipos = azipos
383 383 self.azivel = azivel
384 384 self.elvpos = elvpos
385 385 self.elvvel = elvvel
386 386 self.northAngle = northangle
387 387 self.microsec = microsec
388 388 self.azisetvel = azisetvel
389 389 self.elvsetpos = elvsetpos
390 390 self.RadarConst = RadarConst
391 391 self.RHsize = 84
392 392 self.RecCounter = RecCounter
393 393 self.Off2StartNxtRec = Off2StartNxtRec
394 394
395 395 def RHread(self, fp):
396 396
397 397 # startFp = open(fp,"rb") #The method tell() returns the current position of the file read/write pointer within the file.
398 398
399 399 #OffRHeader= 1180 + self.RecCounter*(self.Off2StartNxtRec)
400 400 #startFp.seek(OffRHeader, os.SEEK_SET)
401 401
402 402 # print 'Posicion del bloque: ',OffRHeader
403 403
404 404 header = numpy.fromfile(fp, SRVI_STRUCTURE, 1)
405 405
406 406 self.frame_cnt = header['frame_cnt'][0]
407 407 self.time_t = header['time_t'][0] #
408 408 self.tpow = header['tpow'][0] #
409 409 self.npw1 = header['npw1'][0] #
410 410 self.npw2 = header['npw2'][0] #
411 411 self.cpw1 = header['cpw1'][0] #
412 412 self.pcw2 = header['pcw2'][0] #
413 413 self.ps_err = header['ps_err'][0] #
414 414 self.te_err = header['te_err'][0] #
415 415 self.rc_err = header['rc_err'][0] #
416 416 self.grs1 = header['grs1'][0] #
417 417 self.grs2 = header['grs2'][0] #
418 418 self.azipos = header['azipos'][0] #
419 419 self.azivel = header['azivel'][0] #
420 420 self.elvpos = header['elvpos'][0] #
421 421 self.elvvel = header['elvvel'][0] #
422 422 self.northAngle = header['northAngle'][0] #
423 423 self.microsec = header['microsec'][0] #
424 424 self.azisetvel = header['azisetvel'][0] #
425 425 self.elvsetpos = header['elvsetpos'][0] #
426 426 self.RadarConst = header['RadarConst'][0] #
427 427 # 84
428 428
429 429 # print 'Pointer fp RECheader', fp.tell()
430 430
431 431 #self.ipp= 0.5*(SPEED_OF_LIGHT/self.PRFhz)
432 432
433 433 #self.RHsize = 180+20*self.nChannels
434 434 #self.Datasize= self.nProfiles*self.nChannels*self.nHeights*2*4
435 435 # print 'Datasize',self.Datasize
436 436 #endFp = self.OffsetStartHeader + self.RecCounter*self.Off2StartNxtRec
437 437
438 438 print '=============================================='
439 439
440 440 print '=============================================='
441 441
442 442 return 1
443 443
444 444
445 445 class MIRA35CReader (ProcessingUnit, FileHeaderMIRA35c, SRVIHeader, RecordHeader):
446 446
447 447 path = None
448 448 startDate = None
449 449 endDate = None
450 450 startTime = None
451 451 endTime = None
452 452 walk = None
453 453 isConfig = False
454 454
455 455 fileList = None
456 456
457 457 # metadata
458 458 TimeZone = None
459 459 Interval = None
460 460 heightList = None
461 461
462 462 # data
463 463 data = None
464 464 utctime = None
465 465
466 466 def __init__(self, **kwargs):
467 467
468 468 # Eliminar de la base la herencia
469 469 ProcessingUnit.__init__(self, **kwargs)
470 470 self.PointerReader = 0
471 471 self.FileHeaderFlag = False
472 472 self.utc = None
473 473 self.ext = ".zspca"
474 474 self.optchar = "P"
475 475 self.fpFile = None
476 476 self.fp = None
477 477 self.BlockCounter = 0
478 478 self.dtype = None
479 479 self.fileSizeByHeader = None
480 480 self.filenameList = []
481 481 self.fileSelector = 0
482 482 self.Off2StartNxtRec = 0
483 483 self.RecCounter = 0
484 484 self.flagNoMoreFiles = 0
485 485 self.data_spc = None
486 486 # self.data_cspc=None
487 487 self.data_output = None
488 488 self.path = None
489 489 self.OffsetStartHeader = 0
490 490 self.Off2StartData = 0
491 491 self.ipp = 0
492 492 self.nFDTdataRecors = 0
493 493 self.blocksize = 0
494 494 self.dataOut = Spectra()
495 495 self.profileIndex = 1 # Always
496 496 self.dataOut.flagNoData = False
497 self.dataOut.nRdPairs = 0
498 self.dataOut.pairsList = []
499 self.dataOut.data_spc = None
500
501 self.dataOut.normFactor = 1
497 self.dataOut.nRdPairs = 0
498 self.dataOut.data_spc = None
502 499 self.nextfileflag = True
503 500 self.dataOut.RadarConst = 0
504 501 self.dataOut.HSDV = []
505 502 self.dataOut.NPW = []
506 503 self.dataOut.COFA = []
507 self.dataOut.noise = 0
504 # self.dataOut.noise = 0
508 505
509 506 def Files2Read(self, fp):
510 507 '''
511 508 Function that indicates the number of .fdt files that exist in the folder to be read.
512 509 It also creates an organized list with the names of the files to read.
513 510 '''
514 511 # self.__checkPath()
515 512
516 513 # Gets the list of files within the fp address
517 514 ListaData = os.listdir(fp)
518 515 # Sort the list of files from least to largest by names
519 516 ListaData = sorted(ListaData)
520 517 nFiles = 0 # File Counter
521 518 FileList = [] # A list is created that will contain the .fdt files
522 519 for IndexFile in ListaData:
523 520 if '.zspca' in IndexFile and '.gz' not in IndexFile:
524 521 FileList.append(IndexFile)
525 522 nFiles += 1
526 523
527 524 # print 'Files2Read'
528 525 # print 'Existen '+str(nFiles)+' archivos .fdt'
529 526
530 527 self.filenameList = FileList # List of files from least to largest by names
531 528
532 529 def run(self, **kwargs):
533 530 '''
534 531 This method will be the one that will initiate the data entry, will be called constantly.
535 532 You should first verify that your Setup () is set up and then continue to acquire
536 533 the data to be processed with getData ().
537 534 '''
538 535 if not self.isConfig:
539 536 self.setup(**kwargs)
540 537 self.isConfig = True
541 538
542 539 self.getData()
543 540
544 541 def setup(self, path=None,
545 542 startDate=None,
546 543 endDate=None,
547 544 startTime=None,
548 545 endTime=None,
549 546 walk=True,
550 547 timezone='utc',
551 548 code=None,
552 549 online=False,
553 550 ReadMode=None, **kwargs):
554 551
555 552 self.isConfig = True
556 553
557 554 self.path = path
558 555 self.startDate = startDate
559 556 self.endDate = endDate
560 557 self.startTime = startTime
561 558 self.endTime = endTime
562 559 self.walk = walk
563 560 # self.ReadMode=int(ReadMode)
564 561
565 562 pass
566 563
567 564 def getData(self):
568 565 '''
569 566 Before starting this function, you should check that there is still an unread file,
570 567 If there are still blocks to read or if the data block is empty.
571 568
572 569 You should call the file "read".
573 570
574 571 '''
575 572
576 573 if self.flagNoMoreFiles:
577 574 self.dataOut.flagNoData = True
578 575 print 'NoData se vuelve true'
579 576 return 0
580 577
581 578 self.fp = self.path
582 579 self.Files2Read(self.fp)
583 580 self.readFile(self.fp)
584 581
585 582 self.dataOut.data_spc = self.dataOut_spc # self.data_spc.copy()
586 583 self.dataOut.RadarConst = self.RadarConst
587 584 self.dataOut.data_output = self.data_output
588 585 self.dataOut.noise = self.dataOut.getNoise()
589 586 # print 'ACAAAAAA', self.dataOut.noise
590 587 self.dataOut.data_spc = self.dataOut.data_spc + self.dataOut.noise
588 self.dataOut.normFactor = 1
591 589 # print 'self.dataOut.noise',self.dataOut.noise
592 590
593 591 return self.dataOut.data_spc
594 592
595 593 def readFile(self, fp):
596 594 '''
597 595 You must indicate if you are reading in Online or Offline mode and load the
598 596 The parameters for this file reading mode.
599 597
600 598 Then you must do 2 actions:
601 599
602 600 1. Get the BLTR FileHeader.
603 601 2. Start reading the first block.
604 602 '''
605 603
606 604 # The address of the folder is generated the name of the .fdt file that will be read
607 605 print "File: ", self.fileSelector + 1
608 606
609 607 if self.fileSelector < len(self.filenameList):
610 608
611 609 self.fpFile = str(fp) + '/' + \
612 610 str(self.filenameList[self.fileSelector])
613 611
614 612 if self.nextfileflag == True:
615 613 self.fp = open(self.fpFile, "rb")
616 614 self.nextfileflag == False
617 615
618 616 '''HERE STARTING THE FILE READING'''
619 617
620 618 self.fheader = FileHeaderMIRA35c()
621 619 self.fheader.FHread(self.fp) # Bltr FileHeader Reading
622 620
623 621 self.SPARrawGate1 = self.fheader.SPARrawGate1
624 622 self.SPARrawGate2 = self.fheader.SPARrawGate2
625 623 self.Num_Hei = self.SPARrawGate2 - self.SPARrawGate1
626 624 self.Num_Bins = self.fheader.PPARsft
627 625 self.dataOut.nFFTPoints = self.fheader.PPARsft
628 626
629 627 self.Num_inCoh = self.fheader.PPARavc
630 628 self.dataOut.PRF = self.fheader.PPARprf
631 629 self.dataOut.frequency = 34.85 * 10**9
632 630 self.Lambda = SPEED_OF_LIGHT / self.dataOut.frequency
633 631 self.dataOut.ippSeconds = 1. / float(self.dataOut.PRF)
634 632
635 633 pulse_width = self.fheader.PPARpdr * 10**-9
636 634 self.__deltaHeigth = 0.5 * SPEED_OF_LIGHT * pulse_width
637 635
638 636 self.data_spc = numpy.zeros((self.Num_Hei, self.Num_Bins, 2))
639 637 self.dataOut.HSDV = numpy.zeros((self.Num_Hei, 2))
640 638
641 639 self.Ze = numpy.zeros(self.Num_Hei)
642 640 self.ETA = numpy.zeros(([2, self.Num_Hei]))
643 641
644 642 self.readBlock() # Block reading
645 643
646 644 else:
647 645 print 'readFile FlagNoData becomes true'
648 646 self.flagNoMoreFiles = True
649 647 self.dataOut.flagNoData = True
650 648 self.FileHeaderFlag == True
651 649 return 0
652 650
653 651 def readBlock(self):
654 652 '''
655 653 It should be checked if the block has data, if it is not passed to the next file.
656 654
657 655 Then the following is done:
658 656
659 657 1. Read the RecordHeader
660 658 2. Fill the buffer with the current block number.
661 659
662 660 '''
663 661
664 662 if self.PointerReader > 1180:
665 663 self.fp.seek(self.PointerReader, os.SEEK_SET)
666 664 self.FirstPoint = self.PointerReader
667 665
668 666 else:
669 667 self.FirstPoint = 1180
670 668
671 669 self.srviHeader = SRVIHeader()
672 670
673 671 self.srviHeader.SRVIread(self.fp) # Se obtiene la cabecera del SRVI
674 672
675 673 self.blocksize = self.srviHeader.SizeOfDataBlock1 # Se obtiene el tamao del bloque
676 674
677 675 if self.blocksize == 148:
678 676 print 'blocksize == 148 bug'
679 677 jump = numpy.fromfile(self.fp, [('jump', numpy.str_, 140)], 1)
680 678
681 679 # Se obtiene la cabecera del SRVI
682 680 self.srviHeader.SRVIread(self.fp)
683 681
684 682 if not self.srviHeader.SizeOfSRVI1:
685 683 self.fileSelector += 1
686 684 self.nextfileflag == True
687 685 self.FileHeaderFlag == True
688 686
689 687 self.recordheader = RecordHeader()
690 688 self.recordheader.RHread(self.fp)
691 689 self.RadarConst = self.recordheader.RadarConst
692 690 dwell = self.recordheader.time_t
693 691 npw1 = self.recordheader.npw1
694 692 npw2 = self.recordheader.npw2
695 693
696 694 self.dataOut.channelList = range(1)
697 695 self.dataOut.nIncohInt = self.Num_inCoh
698 696 self.dataOut.nProfiles = self.Num_Bins
699 697 self.dataOut.nCohInt = 1
700 698 self.dataOut.windowOfFilter = 1
701 699 self.dataOut.utctime = dwell
702 700 self.dataOut.timeZone = 0
703 701
704 702 self.dataOut.outputInterval = self.dataOut.getTimeInterval()
705 703 self.dataOut.heightList = self.SPARrawGate1 * self.__deltaHeigth + \
706 704 numpy.array(range(self.Num_Hei)) * self.__deltaHeigth
707 705
708 706 self.HSDVsign = numpy.fromfile(self.fp, [('HSDV', numpy.str_, 4)], 1)
709 707 self.SizeHSDV = numpy.fromfile(self.fp, [('SizeHSDV', '<i4')], 1)
710 708 self.HSDV_Co = numpy.fromfile(
711 709 self.fp, [('HSDV_Co', '<f4')], self.Num_Hei)
712 710 self.HSDV_Cx = numpy.fromfile(
713 711 self.fp, [('HSDV_Cx', '<f4')], self.Num_Hei)
714 712
715 713 self.COFAsign = numpy.fromfile(self.fp, [('COFA', numpy.str_, 4)], 1)
716 714 self.SizeCOFA = numpy.fromfile(self.fp, [('SizeCOFA', '<i4')], 1)
717 715 self.COFA_Co = numpy.fromfile(
718 716 self.fp, [('COFA_Co', '<f4')], self.Num_Hei)
719 717 self.COFA_Cx = numpy.fromfile(
720 718 self.fp, [('COFA_Cx', '<f4')], self.Num_Hei)
721 719
722 720 self.ZSPCsign = numpy.fromfile(
723 721 self.fp, [('ZSPCsign', numpy.str_, 4)], 1)
724 722 self.SizeZSPC = numpy.fromfile(self.fp, [('SizeZSPC', '<i4')], 1)
725 723
726 724 self.dataOut.HSDV[0] = self.HSDV_Co[:][0]
727 725 self.dataOut.HSDV[1] = self.HSDV_Cx[:][0]
728 726
729 727 for irg in range(self.Num_Hei):
730 728 # Number of spectral sub pieces containing significant power
731 729 nspc = numpy.fromfile(self.fp, [('nspc', 'int16')], 1)[0][0]
732 730
733 731 for k in range(nspc):
734 732 # Index of the spectral bin where the piece is beginning
735 733 binIndex = numpy.fromfile(
736 734 self.fp, [('binIndex', 'int16')], 1)[0][0]
737 735 nbins = numpy.fromfile(self.fp, [('nbins', 'int16')], 1)[
738 736 0][0] # Number of bins of the piece
739 737
740 738 # Co_Channel
741 739 jbin = numpy.fromfile(self.fp, [('jbin', 'uint16')], nbins)[
742 740 0][0] # Spectrum piece to be normaliced
743 741 jmax = numpy.fromfile(self.fp, [('jmax', 'float32')], 1)[
744 742 0][0] # Maximun piece to be normaliced
745 743
746 744 self.data_spc[irg, binIndex:binIndex + nbins, 0] = self.data_spc[irg,
747 745 binIndex:binIndex + nbins, 0] + jbin / 65530. * jmax
748 746
749 747 # Cx_Channel
750 748 jbin = numpy.fromfile(
751 749 self.fp, [('jbin', 'uint16')], nbins)[0][0]
752 750 jmax = numpy.fromfile(self.fp, [('jmax', 'float32')], 1)[0][0]
753 751
754 752 self.data_spc[irg, binIndex:binIndex + nbins, 1] = self.data_spc[irg,
755 753 binIndex:binIndex + nbins, 1] + jbin / 65530. * jmax
756 754
757 755 for bin in range(self.Num_Bins):
758 756
759 757 self.data_spc[:, bin, 0] = self.data_spc[:,
760 758 bin, 0] - self.dataOut.HSDV[:, 0]
761 759
762 760 self.data_spc[:, bin, 1] = self.data_spc[:,
763 761 bin, 1] - self.dataOut.HSDV[:, 1]
764 762
765 763 numpy.set_printoptions(threshold='nan')
766 764
767 765 self.data_spc = numpy.where(self.data_spc > 0., self.data_spc, 0)
768 766
769 767 self.dataOut.COFA = numpy.array([self.COFA_Co, self.COFA_Cx])
770 768
771 769 print ' '
772 770 print 'SPC', numpy.shape(self.dataOut.data_spc)
773 771 # print 'SPC',self.dataOut.data_spc
774 772
775 773 noinor1 = 713031680
776 774 noinor2 = 30
777 775
778 776 npw1 = 1 # 0**(npw1/10) * noinor1 * noinor2
779 777 npw2 = 1 # 0**(npw2/10) * noinor1 * noinor2
780 778 self.dataOut.NPW = numpy.array([npw1, npw2])
781 779
782 780 print ' '
783 781
784 782 self.data_spc = numpy.transpose(self.data_spc, (2, 1, 0))
785 783 self.data_spc = numpy.fft.fftshift(self.data_spc, axes=1)
786 784
787 785 self.data_spc = numpy.fliplr(self.data_spc)
788 786
789 787 self.data_spc = numpy.where(self.data_spc > 0., self.data_spc, 0)
790 788 self.dataOut_spc = numpy.ones([1, self.Num_Bins, self.Num_Hei])
791 789 self.dataOut_spc[0, :, :] = self.data_spc[0, :, :]
792 790 # print 'SHAPE', self.dataOut_spc.shape
793 791 # For nyquist correction:
794 792 # fix = 20 # ~3m/s
795 793 #shift = self.Num_Bins/2 + fix
796 794 #self.data_spc = numpy.array([ self.data_spc[: , self.Num_Bins-shift+1: , :] , self.data_spc[: , 0:self.Num_Bins-shift , :]])
797 795
798 796 '''Block Reading, the Block Data is received and Reshape is used to give it
799 797 shape.
800 798 '''
801 799
802 800 self.PointerReader = self.fp.tell()
@@ -1,348 +1,360
1 1 '''
2 2
3 3 $Author: murco $
4 4 $Id: jroproc_base.py 1 2012-11-12 18:56:07Z murco $
5 5 '''
6 6 import inspect
7 7 from fuzzywuzzy import process
8 8
9 9 def checkKwargs(method, kwargs):
10 10 currentKwargs = kwargs
11 11 choices = inspect.getargspec(method).args
12 12 try:
13 13 choices.remove('self')
14 14 except Exception as e:
15 15 pass
16 16
17 17 try:
18 18 choices.remove('dataOut')
19 19 except Exception as e:
20 20 pass
21 21
22 22 for kwarg in kwargs:
23 23 fuzz = process.extractOne(kwarg, choices)
24 24 if fuzz is None:
25 25 continue
26 26 if fuzz[1] < 100:
27 27 raise Exception('\x1b[0;32;40mDid you mean {} instead of {} in {}? \x1b[0m'.
28 28 format(fuzz[0], kwarg, method.__self__.__class__.__name__))
29 29
30 30 class ProcessingUnit(object):
31 31
32 32 """
33 33 Esta es la clase base para el procesamiento de datos.
34 34
35 35 Contiene el metodo "call" para llamar operaciones. Las operaciones pueden ser:
36 36 - Metodos internos (callMethod)
37 37 - Objetos del tipo Operation (callObject). Antes de ser llamados, estos objetos
38 38 tienen que ser agreagados con el metodo "add".
39 39
40 40 """
41 41 # objeto de datos de entrada (Voltage, Spectra o Correlation)
42 42 dataIn = None
43 43 dataInList = []
44 44
45 45 # objeto de datos de entrada (Voltage, Spectra o Correlation)
46 46 dataOut = None
47 47
48 48 operations2RunDict = None
49 49
50 50 isConfig = False
51 51
52 52
53 53 def __init__(self, *args, **kwargs):
54 54
55 55 self.dataIn = None
56 56 self.dataInList = []
57 57
58 58 self.dataOut = None
59 59
60 60 self.operations2RunDict = {}
61 61 self.operationKwargs = {}
62 62
63 63 self.isConfig = False
64 64
65 65 self.args = args
66 66 self.kwargs = kwargs
67
68 if not hasattr(self, 'name'):
69 self.name = self.__class__.__name__
70
67 71 checkKwargs(self.run, kwargs)
68 72
69 73 def getAllowedArgs(self):
70 return inspect.getargspec(self.run).args
74 if hasattr(self, '__attrs__'):
75 return self.__attrs__
76 else:
77 return inspect.getargspec(self.run).args
71 78
72 79 def addOperationKwargs(self, objId, **kwargs):
73 80 '''
74 81 '''
75 82
76 83 self.operationKwargs[objId] = kwargs
77 84
78 85
79 86 def addOperation(self, opObj, objId):
80 87
81 88 """
82 89 Agrega un objeto del tipo "Operation" (opObj) a la lista de objetos "self.objectList" y retorna el
83 90 identificador asociado a este objeto.
84 91
85 92 Input:
86 93
87 94 object : objeto de la clase "Operation"
88 95
89 96 Return:
90 97
91 98 objId : identificador del objeto, necesario para ejecutar la operacion
92 99 """
93 100
94 101 self.operations2RunDict[objId] = opObj
95 102
96 103 return objId
97 104
98 105 def getOperationObj(self, objId):
99 106
100 107 if objId not in self.operations2RunDict.keys():
101 108 return None
102 109
103 110 return self.operations2RunDict[objId]
104 111
105 112 def operation(self, **kwargs):
106 113
107 114 """
108 115 Operacion directa sobre la data (dataOut.data). Es necesario actualizar los valores de los
109 116 atributos del objeto dataOut
110 117
111 118 Input:
112 119
113 120 **kwargs : Diccionario de argumentos de la funcion a ejecutar
114 121 """
115 122
116 123 raise NotImplementedError
117 124
118 125 def callMethod(self, name, opId):
119 126
120 127 """
121 128 Ejecuta el metodo con el nombre "name" y con argumentos **kwargs de la propia clase.
122 129
123 130 Input:
124 131 name : nombre del metodo a ejecutar
125 132
126 133 **kwargs : diccionario con los nombres y valores de la funcion a ejecutar.
127 134
128 135 """
129 136
130 137 #Checking the inputs
131 138 if name == 'run':
132 139
133 140 if not self.checkInputs():
134 141 self.dataOut.flagNoData = True
135 142 return False
136 143 else:
137 144 #Si no es un metodo RUN la entrada es la misma dataOut (interna)
138 145 if self.dataOut is not None and self.dataOut.isEmpty():
139 146 return False
140 147
141 148 #Getting the pointer to method
142 149 methodToCall = getattr(self, name)
143 150
144 151 #Executing the self method
145 152
146 153 if hasattr(self, 'mp'):
147 154 if name=='run':
148 155 if self.mp is False:
149 156 self.mp = True
150 157 self.start()
151 158 else:
152 159 self.operationKwargs[opId]['parent'] = self.kwargs
153 160 methodToCall(**self.operationKwargs[opId])
154 161 else:
155 162 if name=='run':
156 163 methodToCall(**self.kwargs)
157 164 else:
158 165 methodToCall(**self.operationKwargs[opId])
159 166
160 167 if self.dataOut is None:
161 168 return False
162 169
163 170 if self.dataOut.isEmpty():
164 171 return False
165 172
166 173 return True
167 174
168 175 def callObject(self, objId):
169 176
170 177 """
171 178 Ejecuta la operacion asociada al identificador del objeto "objId"
172 179
173 180 Input:
174 181
175 182 objId : identificador del objeto a ejecutar
176 183
177 184 **kwargs : diccionario con los nombres y valores de la funcion a ejecutar.
178 185
179 186 Return:
180 187
181 188 None
182 189 """
183 190
184 191 if self.dataOut is not None and self.dataOut.isEmpty():
185 192 return False
186 193
187 194 externalProcObj = self.operations2RunDict[objId]
188 195
189 196 if hasattr(externalProcObj, 'mp'):
190 197 if externalProcObj.mp is False:
191 198 externalProcObj.kwargs['parent'] = self.kwargs
192 199 self.operationKwargs[objId] = externalProcObj.kwargs
193 200 externalProcObj.mp = True
194 201 externalProcObj.start()
195 202 else:
196 203 externalProcObj.run(self.dataOut, **externalProcObj.kwargs)
197 204 self.operationKwargs[objId] = externalProcObj.kwargs
198 205
199 206
200 207 return True
201 208
202 209 def call(self, opType, opName=None, opId=None):
203 210 """
204 211 Return True si ejecuta la operacion interna nombrada "opName" o la operacion externa
205 212 identificada con el id "opId"; con los argumentos "**kwargs".
206 213
207 214 False si la operacion no se ha ejecutado.
208 215
209 216 Input:
210 217
211 218 opType : Puede ser "self" o "external"
212 219
213 220 Depende del tipo de operacion para llamar a:callMethod or callObject:
214 221
215 222 1. If opType = "self": Llama a un metodo propio de esta clase:
216 223
217 224 name_method = getattr(self, name)
218 225 name_method(**kwargs)
219 226
220 227
221 228 2. If opType = "other" o"external": Llama al metodo "run()" de una instancia de la
222 229 clase "Operation" o de un derivado de ella:
223 230
224 231 instanceName = self.operationList[opId]
225 232 instanceName.run(**kwargs)
226 233
227 234 opName : Si la operacion es interna (opType = 'self'), entonces el "opName" sera
228 235 usada para llamar a un metodo interno de la clase Processing
229 236
230 237 opId : Si la operacion es externa (opType = 'other' o 'external), entonces el
231 238 "opId" sera usada para llamar al metodo "run" de la clase Operation
232 239 registrada anteriormente con ese Id
233 240
234 241 Exception:
235 242 Este objeto de tipo Operation debe de haber sido agregado antes con el metodo:
236 243 "addOperation" e identificado con el valor "opId" = el id de la operacion.
237 244 De lo contrario retornara un error del tipo ValueError
238 245
239 246 """
240 247
241 248 if opType == 'self':
242 249
243 250 if not opName:
244 251 raise ValueError, "opName parameter should be defined"
245 252
246 253 sts = self.callMethod(opName, opId)
247 254
248 255 elif opType == 'other' or opType == 'external' or opType == 'plotter':
249 256
250 257 if not opId:
251 258 raise ValueError, "opId parameter should be defined"
252 259
253 260 if opId not in self.operations2RunDict.keys():
254 261 raise ValueError, "Any operation with id=%s has been added" %str(opId)
255 262
256 263 sts = self.callObject(opId)
257 264
258 265 else:
259 266 raise ValueError, "opType should be 'self', 'external' or 'plotter'; and not '%s'" %opType
260 267
261 268 return sts
262 269
263 270 def setInput(self, dataIn):
264 271
265 272 self.dataIn = dataIn
266 273 self.dataInList.append(dataIn)
267 274
268 275 def getOutputObj(self):
269 276
270 277 return self.dataOut
271 278
272 279 def checkInputs(self):
273 280
274 281 for thisDataIn in self.dataInList:
275 282
276 283 if thisDataIn.isEmpty():
277 284 return False
278 285
279 286 return True
280 287
281 288 def setup(self):
282 289
283 290 raise NotImplementedError
284 291
285 292 def run(self):
286 293
287 294 raise NotImplementedError
288 295
289 296 def close(self):
290 297 #Close every thread, queue or any other object here is it is neccesary.
291 298 return
292 299
293 300 class Operation(object):
294 301
295 302 """
296 303 Clase base para definir las operaciones adicionales que se pueden agregar a la clase ProcessingUnit
297 304 y necesiten acumular informacion previa de los datos a procesar. De preferencia usar un buffer de
298 305 acumulacion dentro de esta clase
299 306
300 307 Ejemplo: Integraciones coherentes, necesita la informacion previa de los n perfiles anteriores (bufffer)
301 308
302 309 """
303 310
304 311 __buffer = None
305 312 isConfig = False
306 313
307 314 def __init__(self, **kwargs):
308 315
309 316 self.__buffer = None
310 317 self.isConfig = False
311 318 self.kwargs = kwargs
319 if not hasattr(self, 'name'):
320 self.name = self.__class__.__name__
312 321 checkKwargs(self.run, kwargs)
313 322
314 323 def getAllowedArgs(self):
315 return inspect.getargspec(self.run).args
324 if hasattr(self, '__attrs__'):
325 return self.__attrs__
326 else:
327 return inspect.getargspec(self.run).args
316 328
317 329 def setup(self):
318 330
319 331 self.isConfig = True
320 332
321 333 raise NotImplementedError
322 334
323 335 def run(self, dataIn, **kwargs):
324 336
325 337 """
326 338 Realiza las operaciones necesarias sobre la dataIn.data y actualiza los
327 339 atributos del objeto dataIn.
328 340
329 341 Input:
330 342
331 343 dataIn : objeto del tipo JROData
332 344
333 345 Return:
334 346
335 347 None
336 348
337 349 Affected:
338 350 __buffer : buffer de recepcion de datos.
339 351
340 352 """
341 353 if not self.isConfig:
342 354 self.setup(**kwargs)
343 355
344 356 raise NotImplementedError
345 357
346 358 def close(self):
347 359
348 360 pass
@@ -1,4044 +1,4044
1 1 import numpy
2 2 import math
3 3 from scipy import optimize, interpolate, signal, stats, ndimage
4 4 import scipy
5 5 import re
6 6 import datetime
7 7 import copy
8 8 import sys
9 9 import importlib
10 10 import itertools
11 11 from multiprocessing import Pool, TimeoutError
12 12 from multiprocessing.pool import ThreadPool
13 13 import copy_reg
14 14 import cPickle
15 15 import types
16 16 from functools import partial
17 17 import time
18 18 #from sklearn.cluster import KMeans
19 19
20 20
21 21 from scipy.optimize import fmin_l_bfgs_b #optimize with bounds on state papameters
22 22 from jroproc_base import ProcessingUnit, Operation
23 23 from schainpy.model.data.jrodata import Parameters, hildebrand_sekhon
24 24 from scipy import asarray as ar,exp
25 25 from scipy.optimize import curve_fit
26 26
27 27 import warnings
28 28 from numpy import NaN
29 29 from scipy.optimize.optimize import OptimizeWarning
30 30 warnings.filterwarnings('ignore')
31 31
32 32
33 33 SPEED_OF_LIGHT = 299792458
34 34
35 35
36 36 '''solving pickling issue'''
37 37
38 38 def _pickle_method(method):
39 39 func_name = method.im_func.__name__
40 40 obj = method.im_self
41 41 cls = method.im_class
42 42 return _unpickle_method, (func_name, obj, cls)
43 43
44 44 def _unpickle_method(func_name, obj, cls):
45 45 for cls in cls.mro():
46 46 try:
47 47 func = cls.__dict__[func_name]
48 48 except KeyError:
49 49 pass
50 50 else:
51 51 break
52 52 return func.__get__(obj, cls)
53 53
54 54 class ParametersProc(ProcessingUnit):
55 55
56 56 nSeconds = None
57 57
58 58 def __init__(self):
59 59 ProcessingUnit.__init__(self)
60 60
61 61 # self.objectDict = {}
62 62 self.buffer = None
63 63 self.firstdatatime = None
64 64 self.profIndex = 0
65 65 self.dataOut = Parameters()
66 66
67 67 def __updateObjFromInput(self):
68 68
69 69 self.dataOut.inputUnit = self.dataIn.type
70 70
71 71 self.dataOut.timeZone = self.dataIn.timeZone
72 72 self.dataOut.dstFlag = self.dataIn.dstFlag
73 73 self.dataOut.errorCount = self.dataIn.errorCount
74 74 self.dataOut.useLocalTime = self.dataIn.useLocalTime
75 75
76 76 self.dataOut.radarControllerHeaderObj = self.dataIn.radarControllerHeaderObj.copy()
77 77 self.dataOut.systemHeaderObj = self.dataIn.systemHeaderObj.copy()
78 78 self.dataOut.channelList = self.dataIn.channelList
79 79 self.dataOut.heightList = self.dataIn.heightList
80 80 self.dataOut.dtype = numpy.dtype([('real','<f4'),('imag','<f4')])
81 81 # self.dataOut.nHeights = self.dataIn.nHeights
82 82 # self.dataOut.nChannels = self.dataIn.nChannels
83 83 self.dataOut.nBaud = self.dataIn.nBaud
84 84 self.dataOut.nCode = self.dataIn.nCode
85 85 self.dataOut.code = self.dataIn.code
86 86 # self.dataOut.nProfiles = self.dataOut.nFFTPoints
87 87 self.dataOut.flagDiscontinuousBlock = self.dataIn.flagDiscontinuousBlock
88 88 # self.dataOut.utctime = self.firstdatatime
89 89 self.dataOut.utctime = self.dataIn.utctime
90 90 self.dataOut.flagDecodeData = self.dataIn.flagDecodeData #asumo q la data esta decodificada
91 91 self.dataOut.flagDeflipData = self.dataIn.flagDeflipData #asumo q la data esta sin flip
92 92 self.dataOut.nCohInt = self.dataIn.nCohInt
93 93 # self.dataOut.nIncohInt = 1
94 94 self.dataOut.ippSeconds = self.dataIn.ippSeconds
95 95 # self.dataOut.windowOfFilter = self.dataIn.windowOfFilter
96 96 self.dataOut.timeInterval1 = self.dataIn.timeInterval
97 97 self.dataOut.heightList = self.dataIn.getHeiRange()
98 98 self.dataOut.frequency = self.dataIn.frequency
99 99 # self.dataOut.noise = self.dataIn.noise
100 100
101 101 def run(self):
102 102
103 103 #---------------------- Voltage Data ---------------------------
104 104
105 105 if self.dataIn.type == "Voltage":
106 106
107 107 self.__updateObjFromInput()
108 108 self.dataOut.data_pre = self.dataIn.data.copy()
109 109 self.dataOut.flagNoData = False
110 110 self.dataOut.utctimeInit = self.dataIn.utctime
111 111 self.dataOut.paramInterval = self.dataIn.nProfiles*self.dataIn.nCohInt*self.dataIn.ippSeconds
112 112 return
113 113
114 114 #---------------------- Spectra Data ---------------------------
115 115
116 116 if self.dataIn.type == "Spectra":
117 117
118 118 self.dataOut.data_pre = (self.dataIn.data_spc, self.dataIn.data_cspc)
119 119 self.dataOut.data_spc = self.dataIn.data_spc
120 120 self.dataOut.data_cspc = self.dataIn.data_cspc
121 121 self.dataOut.nProfiles = self.dataIn.nProfiles
122 122 self.dataOut.nIncohInt = self.dataIn.nIncohInt
123 123 self.dataOut.nFFTPoints = self.dataIn.nFFTPoints
124 124 self.dataOut.ippFactor = self.dataIn.ippFactor
125 125 self.dataOut.abscissaList = self.dataIn.getVelRange(1)
126 126 self.dataOut.spc_noise = self.dataIn.getNoise()
127 127 self.dataOut.spc_range = (self.dataIn.getFreqRange(1)/1000. , self.dataIn.getAcfRange(1) , self.dataIn.getVelRange(1))
128 128 self.dataOut.pairsList = self.dataIn.pairsList
129 129 self.dataOut.groupList = self.dataIn.pairsList
130 130 self.dataOut.flagNoData = False
131 131
132 132 if hasattr(self.dataIn, 'ChanDist'): #Distances of receiver channels
133 133 self.dataOut.ChanDist = self.dataIn.ChanDist
134 134 else: self.dataOut.ChanDist = None
135 135
136 136 if hasattr(self.dataIn, 'VelRange'): #Velocities range
137 137 self.dataOut.VelRange = self.dataIn.VelRange
138 138 else: self.dataOut.VelRange = None
139 139
140 140 if hasattr(self.dataIn, 'RadarConst'): #Radar Constant
141 141 self.dataOut.RadarConst = self.dataIn.RadarConst
142 142
143 143 if hasattr(self.dataIn, 'NPW'): #NPW
144 144 self.dataOut.NPW = self.dataIn.NPW
145 145
146 146 if hasattr(self.dataIn, 'COFA'): #COFA
147 147 self.dataOut.COFA = self.dataIn.COFA
148 148
149 149
150 150
151 151 #---------------------- Correlation Data ---------------------------
152 152
153 153 if self.dataIn.type == "Correlation":
154 154 acf_ind, ccf_ind, acf_pairs, ccf_pairs, data_acf, data_ccf = self.dataIn.splitFunctions()
155 155
156 156 self.dataOut.data_pre = (self.dataIn.data_cf[acf_ind,:], self.dataIn.data_cf[ccf_ind,:,:])
157 157 self.dataOut.normFactor = (self.dataIn.normFactor[acf_ind,:], self.dataIn.normFactor[ccf_ind,:])
158 158 self.dataOut.groupList = (acf_pairs, ccf_pairs)
159 159
160 160 self.dataOut.abscissaList = self.dataIn.lagRange
161 161 self.dataOut.noise = self.dataIn.noise
162 162 self.dataOut.data_SNR = self.dataIn.SNR
163 163 self.dataOut.flagNoData = False
164 164 self.dataOut.nAvg = self.dataIn.nAvg
165 165
166 166 #---------------------- Parameters Data ---------------------------
167 167
168 168 if self.dataIn.type == "Parameters":
169 169 self.dataOut.copy(self.dataIn)
170 170 self.dataOut.flagNoData = False
171 171
172 172 return True
173 173
174 174 self.__updateObjFromInput()
175 175 self.dataOut.utctimeInit = self.dataIn.utctime
176 176 self.dataOut.paramInterval = self.dataIn.timeInterval
177 177
178 178 return
179 179
180 180
181 181 def target(tups):
182 182
183 183 obj, args = tups
184 184 #print 'TARGETTT', obj, args
185 185 return obj.FitGau(args)
186 186
187 187 class GaussianFit(Operation):
188 188
189 189 '''
190 190 Function that fit of one and two generalized gaussians (gg) based
191 191 on the PSD shape across an "power band" identified from a cumsum of
192 192 the measured spectrum - noise.
193 193
194 194 Input:
195 195 self.dataOut.data_pre : SelfSpectra
196 196
197 197 Output:
198 198 self.dataOut.GauSPC : SPC_ch1, SPC_ch2
199 199
200 200 '''
201 201 def __init__(self, **kwargs):
202 202 Operation.__init__(self, **kwargs)
203 203 self.i=0
204 204
205 205
206 206 def run(self, dataOut, num_intg=7, pnoise=1., vel_arr=None, SNRlimit=-9): #num_intg: Incoherent integrations, pnoise: Noise, vel_arr: range of velocities, similar to the ftt points
207 207 """This routine will find a couple of generalized Gaussians to a power spectrum
208 208 input: spc
209 209 output:
210 210 Amplitude0,shift0,width0,p0,Amplitude1,shift1,width1,p1,noise
211 211 """
212 212
213 213 self.spc = dataOut.data_pre[0].copy()
214 214
215 215
216 216 print 'SelfSpectra Shape', numpy.asarray(self.spc).shape
217 217
218 218
219 219 #plt.figure(50)
220 220 #plt.subplot(121)
221 221 #plt.plot(self.spc,'k',label='spc(66)')
222 222 #plt.plot(xFrec,ySamples[1],'g',label='Ch1')
223 223 #plt.plot(xFrec,ySamples[2],'r',label='Ch2')
224 224 #plt.plot(xFrec,FitGauss,'yo:',label='fit')
225 225 #plt.legend()
226 226 #plt.title('DATOS A ALTURA DE 7500 METROS')
227 227 #plt.show()
228 228
229 229 self.Num_Hei = self.spc.shape[2]
230 230 #self.Num_Bin = len(self.spc)
231 231 self.Num_Bin = self.spc.shape[1]
232 232 self.Num_Chn = self.spc.shape[0]
233 233
234 234 Vrange = dataOut.abscissaList
235 235
236 236 #print 'self.spc2', numpy.asarray(self.spc).shape
237 237
238 238 GauSPC = numpy.empty([2,self.Num_Bin,self.Num_Hei])
239 239 SPC_ch1 = numpy.empty([self.Num_Bin,self.Num_Hei])
240 240 SPC_ch2 = numpy.empty([self.Num_Bin,self.Num_Hei])
241 241 SPC_ch1[:] = numpy.NaN
242 242 SPC_ch2[:] = numpy.NaN
243 243
244 244
245 245 start_time = time.time()
246 246
247 247 noise_ = dataOut.spc_noise[0].copy()
248 248
249 249
250 250
251 251 pool = Pool(processes=self.Num_Chn)
252 252 args = [(Vrange, Ch, pnoise, noise_, num_intg, SNRlimit) for Ch in range(self.Num_Chn)]
253 253 objs = [self for __ in range(self.Num_Chn)]
254 254 attrs = zip(objs, args)
255 255 gauSPC = pool.map(target, attrs)
256 256 dataOut.GauSPC = numpy.asarray(gauSPC)
257 257 # ret = []
258 258 # for n in range(self.Num_Chn):
259 259 # self.FitGau(args[n])
260 260 # dataOut.GauSPC = ret
261 261
262 262
263 263
264 264 # for ch in range(self.Num_Chn):
265 265 #
266 266 # for ht in range(self.Num_Hei):
267 267 # #print (numpy.asarray(self.spc).shape)
268 268 # spc = numpy.asarray(self.spc)[ch,:,ht]
269 269 #
270 270 # #############################################
271 271 # # normalizing spc and noise
272 272 # # This part differs from gg1
273 273 # spc_norm_max = max(spc)
274 274 # spc = spc / spc_norm_max
275 275 # pnoise = pnoise / spc_norm_max
276 276 # #############################################
277 277 #
278 278 # if abs(vel_arr[0])<15.0: # this switch is for spectra collected with different length IPP's
279 279 # fatspectra=1.0
280 280 # else:
281 281 # fatspectra=0.5
282 282 #
283 283 # wnoise = noise_ / spc_norm_max
284 284 # #print 'wnoise', noise_, dataOut.spc_noise[0], wnoise
285 285 # #wnoise,stdv,i_max,index =enoise(spc,num_intg) #noise estimate using Hildebrand Sekhon, only wnoise is used
286 286 # #if wnoise>1.1*pnoise: # to be tested later
287 287 # # wnoise=pnoise
288 288 # noisebl=wnoise*0.9; noisebh=wnoise*1.1
289 289 # spc=spc-wnoise
290 290 #
291 291 # minx=numpy.argmin(spc)
292 292 # spcs=numpy.roll(spc,-minx)
293 293 # cum=numpy.cumsum(spcs)
294 294 # tot_noise=wnoise * self.Num_Bin #64;
295 295 # #tot_signal=sum(cum[-5:])/5.; ''' How does this line work? '''
296 296 # #snr=tot_signal/tot_noise
297 297 # #snr=cum[-1]/tot_noise
298 298 #
299 299 # #print 'spc' , spcs[5:8] , 'tot_noise', tot_noise
300 300 #
301 301 # snr = sum(spcs)/tot_noise
302 302 # snrdB=10.*numpy.log10(snr)
303 303 #
304 304 # #if snrdB < -9 :
305 305 # # snrdB = numpy.NaN
306 306 # # continue
307 307 #
308 308 # #print 'snr',snrdB # , sum(spcs) , tot_noise
309 309 #
310 310 #
311 311 # #if snrdB<-18 or numpy.isnan(snrdB) or num_intg<4:
312 312 # # return [None,]*4,[None,]*4,None,snrdB,None,None,[None,]*5,[None,]*9,None
313 313 #
314 314 # cummax=max(cum); epsi=0.08*fatspectra # cumsum to narrow down the energy region
315 315 # cumlo=cummax*epsi;
316 316 # cumhi=cummax*(1-epsi)
317 317 # powerindex=numpy.array(numpy.where(numpy.logical_and(cum>cumlo, cum<cumhi))[0])
318 318 #
319 319 # #if len(powerindex)==1:
320 320 # ##return [numpy.mod(powerindex[0]+minx,64),None,None,None,],[None,]*4,None,snrdB,None,None,[None,]*5,[None,]*9,None
321 321 # #return [numpy.mod(powerindex[0]+minx, self.Num_Bin ),None,None,None,],[None,]*4,None,snrdB,None,None,[None,]*5,[None,]*9,None
322 322 # #elif len(powerindex)<4*fatspectra:
323 323 # #return [None,]*4,[None,]*4,None,snrdB,None,None,[None,]*5,[None,]*9,None
324 324 #
325 325 # if len(powerindex) < 1:# case for powerindex 0
326 326 # continue
327 327 # powerlo=powerindex[0]
328 328 # powerhi=powerindex[-1]
329 329 # powerwidth=powerhi-powerlo
330 330 #
331 331 # firstpeak=powerlo+powerwidth/10.# first gaussian energy location
332 332 # secondpeak=powerhi-powerwidth/10.#second gaussian energy location
333 333 # midpeak=(firstpeak+secondpeak)/2.
334 334 # firstamp=spcs[int(firstpeak)]
335 335 # secondamp=spcs[int(secondpeak)]
336 336 # midamp=spcs[int(midpeak)]
337 337 # #x=numpy.spc.shape[1]
338 338 #
339 339 # #x=numpy.arange(64)
340 340 # x=numpy.arange( self.Num_Bin )
341 341 # y_data=spc+wnoise
342 342 #
343 343 # # single gaussian
344 344 # #shift0=numpy.mod(midpeak+minx,64)
345 345 # shift0=numpy.mod(midpeak+minx, self.Num_Bin )
346 346 # width0=powerwidth/4.#Initialization entire power of spectrum divided by 4
347 347 # power0=2.
348 348 # amplitude0=midamp
349 349 # state0=[shift0,width0,amplitude0,power0,wnoise]
350 350 # #bnds=((0,63),(1,powerwidth),(0,None),(0.5,3.),(noisebl,noisebh))
351 351 # bnds=(( 0,(self.Num_Bin-1) ),(1,powerwidth),(0,None),(0.5,3.),(noisebl,noisebh))
352 352 # #bnds=(( 0,(self.Num_Bin-1) ),(1,powerwidth),(0,None),(0.5,3.),(0.1,0.5))
353 353 # # bnds = range of fft, power width, amplitude, power, noise
354 354 # lsq1=fmin_l_bfgs_b(self.misfit1,state0,args=(y_data,x,num_intg),bounds=bnds,approx_grad=True)
355 355 #
356 356 # chiSq1=lsq1[1];
357 357 # jack1= self.y_jacobian1(x,lsq1[0])
358 358 #
359 359 #
360 360 # try:
361 361 # sigmas1=numpy.sqrt(chiSq1*numpy.diag(numpy.linalg.inv(numpy.dot(jack1.T,jack1))))
362 362 # except:
363 363 # std1=32.; sigmas1=numpy.ones(5)
364 364 # else:
365 365 # std1=sigmas1[0]
366 366 #
367 367 #
368 368 # if fatspectra<1.0 and powerwidth<4:
369 369 # choice=0
370 370 # Amplitude0=lsq1[0][2]
371 371 # shift0=lsq1[0][0]
372 372 # width0=lsq1[0][1]
373 373 # p0=lsq1[0][3]
374 374 # Amplitude1=0.
375 375 # shift1=0.
376 376 # width1=0.
377 377 # p1=0.
378 378 # noise=lsq1[0][4]
379 379 # #return (numpy.array([shift0,width0,Amplitude0,p0]),
380 380 # # numpy.array([shift1,width1,Amplitude1,p1]),noise,snrdB,chiSq1,6.,sigmas1,[None,]*9,choice)
381 381 #
382 382 # # two gaussians
383 383 # #shift0=numpy.mod(firstpeak+minx,64); shift1=numpy.mod(secondpeak+minx,64)
384 384 # shift0=numpy.mod(firstpeak+minx, self.Num_Bin );
385 385 # shift1=numpy.mod(secondpeak+minx, self.Num_Bin )
386 386 # width0=powerwidth/6.;
387 387 # width1=width0
388 388 # power0=2.;
389 389 # power1=power0
390 390 # amplitude0=firstamp;
391 391 # amplitude1=secondamp
392 392 # state0=[shift0,width0,amplitude0,power0,shift1,width1,amplitude1,power1,wnoise]
393 393 # #bnds=((0,63),(1,powerwidth/2.),(0,None),(0.5,3.),(0,63),(1,powerwidth/2.),(0,None),(0.5,3.),(noisebl,noisebh))
394 394 # bnds=(( 0,(self.Num_Bin-1) ),(1,powerwidth/2.),(0,None),(0.5,3.),( 0,(self.Num_Bin-1)),(1,powerwidth/2.),(0,None),(0.5,3.),(noisebl,noisebh))
395 395 # #bnds=(( 0,(self.Num_Bin-1) ),(1,powerwidth/2.),(0,None),(0.5,3.),( 0,(self.Num_Bin-1)),(1,powerwidth/2.),(0,None),(0.5,3.),(0.1,0.5))
396 396 #
397 397 # lsq2=fmin_l_bfgs_b(self.misfit2,state0,args=(y_data,x,num_intg),bounds=bnds,approx_grad=True)
398 398 #
399 399 #
400 400 # chiSq2=lsq2[1]; jack2=self.y_jacobian2(x,lsq2[0])
401 401 #
402 402 #
403 403 # try:
404 404 # sigmas2=numpy.sqrt(chiSq2*numpy.diag(numpy.linalg.inv(numpy.dot(jack2.T,jack2))))
405 405 # except:
406 406 # std2a=32.; std2b=32.; sigmas2=numpy.ones(9)
407 407 # else:
408 408 # std2a=sigmas2[0]; std2b=sigmas2[4]
409 409 #
410 410 #
411 411 #
412 412 # oneG=(chiSq1<5 and chiSq1/chiSq2<2.0) and (abs(lsq2[0][0]-lsq2[0][4])<(lsq2[0][1]+lsq2[0][5])/3. or abs(lsq2[0][0]-lsq2[0][4])<10)
413 413 #
414 414 # if snrdB>-9: # when SNR is strong pick the peak with least shift (LOS velocity) error
415 415 # if oneG:
416 416 # choice=0
417 417 # else:
418 418 # w1=lsq2[0][1]; w2=lsq2[0][5]
419 419 # a1=lsq2[0][2]; a2=lsq2[0][6]
420 420 # p1=lsq2[0][3]; p2=lsq2[0][7]
421 421 # s1=(2**(1+1./p1))*scipy.special.gamma(1./p1)/p1; s2=(2**(1+1./p2))*scipy.special.gamma(1./p2)/p2;
422 422 # gp1=a1*w1*s1; gp2=a2*w2*s2 # power content of each ggaussian with proper p scaling
423 423 #
424 424 # if gp1>gp2:
425 425 # if a1>0.7*a2:
426 426 # choice=1
427 427 # else:
428 428 # choice=2
429 429 # elif gp2>gp1:
430 430 # if a2>0.7*a1:
431 431 # choice=2
432 432 # else:
433 433 # choice=1
434 434 # else:
435 435 # choice=numpy.argmax([a1,a2])+1
436 436 # #else:
437 437 # #choice=argmin([std2a,std2b])+1
438 438 #
439 439 # else: # with low SNR go to the most energetic peak
440 440 # choice=numpy.argmax([lsq1[0][2]*lsq1[0][1],lsq2[0][2]*lsq2[0][1],lsq2[0][6]*lsq2[0][5]])
441 441 #
442 442 # #print 'choice',choice
443 443 #
444 444 # if choice==0: # pick the single gaussian fit
445 445 # Amplitude0=lsq1[0][2]
446 446 # shift0=lsq1[0][0]
447 447 # width0=lsq1[0][1]
448 448 # p0=lsq1[0][3]
449 449 # Amplitude1=0.
450 450 # shift1=0.
451 451 # width1=0.
452 452 # p1=0.
453 453 # noise=lsq1[0][4]
454 454 # elif choice==1: # take the first one of the 2 gaussians fitted
455 455 # Amplitude0 = lsq2[0][2]
456 456 # shift0 = lsq2[0][0]
457 457 # width0 = lsq2[0][1]
458 458 # p0 = lsq2[0][3]
459 459 # Amplitude1 = lsq2[0][6] # This is 0 in gg1
460 460 # shift1 = lsq2[0][4] # This is 0 in gg1
461 461 # width1 = lsq2[0][5] # This is 0 in gg1
462 462 # p1 = lsq2[0][7] # This is 0 in gg1
463 463 # noise = lsq2[0][8]
464 464 # else: # the second one
465 465 # Amplitude0 = lsq2[0][6]
466 466 # shift0 = lsq2[0][4]
467 467 # width0 = lsq2[0][5]
468 468 # p0 = lsq2[0][7]
469 469 # Amplitude1 = lsq2[0][2] # This is 0 in gg1
470 470 # shift1 = lsq2[0][0] # This is 0 in gg1
471 471 # width1 = lsq2[0][1] # This is 0 in gg1
472 472 # p1 = lsq2[0][3] # This is 0 in gg1
473 473 # noise = lsq2[0][8]
474 474 #
475 475 # #print len(noise + Amplitude0*numpy.exp(-0.5*(abs(x-shift0))/width0)**p0)
476 476 # SPC_ch1[:,ht] = noise + Amplitude0*numpy.exp(-0.5*(abs(x-shift0))/width0)**p0
477 477 # SPC_ch2[:,ht] = noise + Amplitude1*numpy.exp(-0.5*(abs(x-shift1))/width1)**p1
478 478 # #print 'SPC_ch1.shape',SPC_ch1.shape
479 479 # #print 'SPC_ch2.shape',SPC_ch2.shape
480 480 # #dataOut.data_param = SPC_ch1
481 481 # GauSPC[0] = SPC_ch1
482 482 # GauSPC[1] = SPC_ch2
483 483
484 484 # #plt.gcf().clear()
485 485 # plt.figure(50+self.i)
486 486 # self.i=self.i+1
487 487 # #plt.subplot(121)
488 488 # plt.plot(self.spc,'k')#,label='spc(66)')
489 489 # plt.plot(SPC_ch1[ch,ht],'b')#,label='gg1')
490 490 # #plt.plot(SPC_ch2,'r')#,label='gg2')
491 491 # #plt.plot(xFrec,ySamples[1],'g',label='Ch1')
492 492 # #plt.plot(xFrec,ySamples[2],'r',label='Ch2')
493 493 # #plt.plot(xFrec,FitGauss,'yo:',label='fit')
494 494 # plt.legend()
495 495 # plt.title('DATOS A ALTURA DE 7500 METROS')
496 496 # plt.show()
497 497 # print 'shift0', shift0
498 498 # print 'Amplitude0', Amplitude0
499 499 # print 'width0', width0
500 500 # print 'p0', p0
501 501 # print '========================'
502 502 # print 'shift1', shift1
503 503 # print 'Amplitude1', Amplitude1
504 504 # print 'width1', width1
505 505 # print 'p1', p1
506 506 # print 'noise', noise
507 507 # print 's_noise', wnoise
508 508
509 509 print '========================================================'
510 510 print 'total_time: ', time.time()-start_time
511 511
512 512 # re-normalizing spc and noise
513 513 # This part differs from gg1
514 514
515 515
516 516
517 517 ''' Parameters:
518 518 1. Amplitude
519 519 2. Shift
520 520 3. Width
521 521 4. Power
522 522 '''
523 523
524 524
525 525 ###############################################################################
526 526 def FitGau(self, X):
527 527
528 528 Vrange, ch, pnoise, noise_, num_intg, SNRlimit = X
529 529 #print 'VARSSSS', ch, pnoise, noise, num_intg
530 530
531 531 #print 'HEIGHTS', self.Num_Hei
532 532
533 533 GauSPC = []
534 534 SPC_ch1 = numpy.empty([self.Num_Bin,self.Num_Hei])
535 535 SPC_ch2 = numpy.empty([self.Num_Bin,self.Num_Hei])
536 536 SPC_ch1[:] = 0#numpy.NaN
537 537 SPC_ch2[:] = 0#numpy.NaN
538 538
539 539
540 540
541 541 for ht in range(self.Num_Hei):
542 542 #print (numpy.asarray(self.spc).shape)
543 543
544 544 #print 'TTTTT', ch , ht
545 545 #print self.spc.shape
546 546
547 547
548 548 spc = numpy.asarray(self.spc)[ch,:,ht]
549 549
550 550 #############################################
551 551 # normalizing spc and noise
552 552 # This part differs from gg1
553 553 spc_norm_max = max(spc)
554 554 spc = spc / spc_norm_max
555 555 pnoise = pnoise / spc_norm_max
556 556 #############################################
557 557
558 558 fatspectra=1.0
559 559
560 560 wnoise = noise_ / spc_norm_max
561 561 #wnoise,stdv,i_max,index =enoise(spc,num_intg) #noise estimate using Hildebrand Sekhon, only wnoise is used
562 562 #if wnoise>1.1*pnoise: # to be tested later
563 563 # wnoise=pnoise
564 564 noisebl=wnoise*0.9; noisebh=wnoise*1.1
565 565 spc=spc-wnoise
566 566 # print 'wnoise', noise_[0], spc_norm_max, wnoise
567 567 minx=numpy.argmin(spc)
568 568 spcs=numpy.roll(spc,-minx)
569 569 cum=numpy.cumsum(spcs)
570 570 tot_noise=wnoise * self.Num_Bin #64;
571 571 #print 'spc' , spcs[5:8] , 'tot_noise', tot_noise
572 572 #tot_signal=sum(cum[-5:])/5.; ''' How does this line work? '''
573 573 #snr=tot_signal/tot_noise
574 574 #snr=cum[-1]/tot_noise
575 575 snr = sum(spcs)/tot_noise
576 576 snrdB=10.*numpy.log10(snr)
577 577
578 578 if snrdB < SNRlimit :
579 579 snr = numpy.NaN
580 580 SPC_ch1[:,ht] = 0#numpy.NaN
581 581 SPC_ch1[:,ht] = 0#numpy.NaN
582 582 GauSPC = (SPC_ch1,SPC_ch2)
583 583 continue
584 584 #print 'snr',snrdB #, sum(spcs) , tot_noise
585 585
586 586
587 587
588 588 #if snrdB<-18 or numpy.isnan(snrdB) or num_intg<4:
589 589 # return [None,]*4,[None,]*4,None,snrdB,None,None,[None,]*5,[None,]*9,None
590 590
591 591 cummax=max(cum); epsi=0.08*fatspectra # cumsum to narrow down the energy region
592 592 cumlo=cummax*epsi;
593 593 cumhi=cummax*(1-epsi)
594 594 powerindex=numpy.array(numpy.where(numpy.logical_and(cum>cumlo, cum<cumhi))[0])
595 595
596 596
597 597 if len(powerindex) < 1:# case for powerindex 0
598 598 continue
599 599 powerlo=powerindex[0]
600 600 powerhi=powerindex[-1]
601 601 powerwidth=powerhi-powerlo
602 602
603 603 firstpeak=powerlo+powerwidth/10.# first gaussian energy location
604 604 secondpeak=powerhi-powerwidth/10.#second gaussian energy location
605 605 midpeak=(firstpeak+secondpeak)/2.
606 606 firstamp=spcs[int(firstpeak)]
607 607 secondamp=spcs[int(secondpeak)]
608 608 midamp=spcs[int(midpeak)]
609 609
610 610 x=numpy.arange( self.Num_Bin )
611 611 y_data=spc+wnoise
612 612
613 613 # single gaussian
614 614 shift0=numpy.mod(midpeak+minx, self.Num_Bin )
615 615 width0=powerwidth/4.#Initialization entire power of spectrum divided by 4
616 616 power0=2.
617 617 amplitude0=midamp
618 618 state0=[shift0,width0,amplitude0,power0,wnoise]
619 619 bnds=(( 0,(self.Num_Bin-1) ),(1,powerwidth),(0,None),(0.5,3.),(noisebl,noisebh))
620 620 lsq1=fmin_l_bfgs_b(self.misfit1,state0,args=(y_data,x,num_intg),bounds=bnds,approx_grad=True)
621 621
622 622 chiSq1=lsq1[1];
623 623 jack1= self.y_jacobian1(x,lsq1[0])
624 624
625 625
626 626 try:
627 627 sigmas1=numpy.sqrt(chiSq1*numpy.diag(numpy.linalg.inv(numpy.dot(jack1.T,jack1))))
628 628 except:
629 629 std1=32.; sigmas1=numpy.ones(5)
630 630 else:
631 631 std1=sigmas1[0]
632 632
633 633
634 634 if fatspectra<1.0 and powerwidth<4:
635 635 choice=0
636 636 Amplitude0=lsq1[0][2]
637 637 shift0=lsq1[0][0]
638 638 width0=lsq1[0][1]
639 639 p0=lsq1[0][3]
640 640 Amplitude1=0.
641 641 shift1=0.
642 642 width1=0.
643 643 p1=0.
644 644 noise=lsq1[0][4]
645 645 #return (numpy.array([shift0,width0,Amplitude0,p0]),
646 646 # numpy.array([shift1,width1,Amplitude1,p1]),noise,snrdB,chiSq1,6.,sigmas1,[None,]*9,choice)
647 647
648 648 # two gaussians
649 649 #shift0=numpy.mod(firstpeak+minx,64); shift1=numpy.mod(secondpeak+minx,64)
650 650 shift0=numpy.mod(firstpeak+minx, self.Num_Bin );
651 651 shift1=numpy.mod(secondpeak+minx, self.Num_Bin )
652 652 width0=powerwidth/6.;
653 653 width1=width0
654 654 power0=2.;
655 655 power1=power0
656 656 amplitude0=firstamp;
657 657 amplitude1=secondamp
658 658 state0=[shift0,width0,amplitude0,power0,shift1,width1,amplitude1,power1,wnoise]
659 659 #bnds=((0,63),(1,powerwidth/2.),(0,None),(0.5,3.),(0,63),(1,powerwidth/2.),(0,None),(0.5,3.),(noisebl,noisebh))
660 660 bnds=(( 0,(self.Num_Bin-1) ),(1,powerwidth/2.),(0,None),(0.5,3.),( 0,(self.Num_Bin-1)),(1,powerwidth/2.),(0,None),(0.5,3.),(noisebl,noisebh))
661 661 #bnds=(( 0,(self.Num_Bin-1) ),(1,powerwidth/2.),(0,None),(0.5,3.),( 0,(self.Num_Bin-1)),(1,powerwidth/2.),(0,None),(0.5,3.),(0.1,0.5))
662 662
663 663 lsq2=fmin_l_bfgs_b(self.misfit2,state0,args=(y_data,x,num_intg),bounds=bnds,approx_grad=True)
664 664
665 665
666 666 chiSq2=lsq2[1]; jack2=self.y_jacobian2(x,lsq2[0])
667 667
668 668
669 669 try:
670 670 sigmas2=numpy.sqrt(chiSq2*numpy.diag(numpy.linalg.inv(numpy.dot(jack2.T,jack2))))
671 671 except:
672 672 std2a=32.; std2b=32.; sigmas2=numpy.ones(9)
673 673 else:
674 674 std2a=sigmas2[0]; std2b=sigmas2[4]
675 675
676 676
677 677
678 678 oneG=(chiSq1<5 and chiSq1/chiSq2<2.0) and (abs(lsq2[0][0]-lsq2[0][4])<(lsq2[0][1]+lsq2[0][5])/3. or abs(lsq2[0][0]-lsq2[0][4])<10)
679 679
680 680 if snrdB>-6: # when SNR is strong pick the peak with least shift (LOS velocity) error
681 681 if oneG:
682 682 choice=0
683 683 else:
684 684 w1=lsq2[0][1]; w2=lsq2[0][5]
685 685 a1=lsq2[0][2]; a2=lsq2[0][6]
686 686 p1=lsq2[0][3]; p2=lsq2[0][7]
687 687 s1=(2**(1+1./p1))*scipy.special.gamma(1./p1)/p1;
688 688 s2=(2**(1+1./p2))*scipy.special.gamma(1./p2)/p2;
689 689 gp1=a1*w1*s1; gp2=a2*w2*s2 # power content of each ggaussian with proper p scaling
690 690
691 691 if gp1>gp2:
692 692 if a1>0.7*a2:
693 693 choice=1
694 694 else:
695 695 choice=2
696 696 elif gp2>gp1:
697 697 if a2>0.7*a1:
698 698 choice=2
699 699 else:
700 700 choice=1
701 701 else:
702 702 choice=numpy.argmax([a1,a2])+1
703 703 #else:
704 704 #choice=argmin([std2a,std2b])+1
705 705
706 706 else: # with low SNR go to the most energetic peak
707 707 choice=numpy.argmax([lsq1[0][2]*lsq1[0][1],lsq2[0][2]*lsq2[0][1],lsq2[0][6]*lsq2[0][5]])
708 708
709 709
710 710 shift0=lsq2[0][0]; vel0=Vrange[0] + shift0*(Vrange[1]-Vrange[0])
711 711 shift1=lsq2[0][4]; vel1=Vrange[0] + shift1*(Vrange[1]-Vrange[0])
712 712
713 713 max_vel = 20
714 714
715 715 #first peak will be 0, second peak will be 1
716 716 if vel0 > 0 and vel0 < max_vel : #first peak is in the correct range
717 717 shift0=lsq2[0][0]
718 718 width0=lsq2[0][1]
719 719 Amplitude0=lsq2[0][2]
720 720 p0=lsq2[0][3]
721 721
722 722 shift1=lsq2[0][4]
723 723 width1=lsq2[0][5]
724 724 Amplitude1=lsq2[0][6]
725 725 p1=lsq2[0][7]
726 726 noise=lsq2[0][8]
727 727 else:
728 728 shift1=lsq2[0][0]
729 729 width1=lsq2[0][1]
730 730 Amplitude1=lsq2[0][2]
731 731 p1=lsq2[0][3]
732 732
733 733 shift0=lsq2[0][4]
734 734 width0=lsq2[0][5]
735 735 Amplitude0=lsq2[0][6]
736 736 p0=lsq2[0][7]
737 737 noise=lsq2[0][8]
738 738
739 739 if Amplitude0<0.1: # in case the peak is noise
740 740 shift0,width0,Amplitude0,p0 = 4*[numpy.NaN]
741 741 if Amplitude1<0.1:
742 742 shift1,width1,Amplitude1,p1 = 4*[numpy.NaN]
743 743
744 744
745 745 # if choice==0: # pick the single gaussian fit
746 746 # Amplitude0=lsq1[0][2]
747 747 # shift0=lsq1[0][0]
748 748 # width0=lsq1[0][1]
749 749 # p0=lsq1[0][3]
750 750 # Amplitude1=0.
751 751 # shift1=0.
752 752 # width1=0.
753 753 # p1=0.
754 754 # noise=lsq1[0][4]
755 755 # elif choice==1: # take the first one of the 2 gaussians fitted
756 756 # Amplitude0 = lsq2[0][2]
757 757 # shift0 = lsq2[0][0]
758 758 # width0 = lsq2[0][1]
759 759 # p0 = lsq2[0][3]
760 760 # Amplitude1 = lsq2[0][6] # This is 0 in gg1
761 761 # shift1 = lsq2[0][4] # This is 0 in gg1
762 762 # width1 = lsq2[0][5] # This is 0 in gg1
763 763 # p1 = lsq2[0][7] # This is 0 in gg1
764 764 # noise = lsq2[0][8]
765 765 # else: # the second one
766 766 # Amplitude0 = lsq2[0][6]
767 767 # shift0 = lsq2[0][4]
768 768 # width0 = lsq2[0][5]
769 769 # p0 = lsq2[0][7]
770 770 # Amplitude1 = lsq2[0][2] # This is 0 in gg1
771 771 # shift1 = lsq2[0][0] # This is 0 in gg1
772 772 # width1 = lsq2[0][1] # This is 0 in gg1
773 773 # p1 = lsq2[0][3] # This is 0 in gg1
774 774 # noise = lsq2[0][8]
775 775
776 776 #print len(noise + Amplitude0*numpy.exp(-0.5*(abs(x-shift0))/width0)**p0)
777 777 SPC_ch1[:,ht] = noise + Amplitude0*numpy.exp(-0.5*(abs(x-shift0))/width0)**p0
778 778 SPC_ch2[:,ht] = noise + Amplitude1*numpy.exp(-0.5*(abs(x-shift1))/width1)**p1
779 779 #print 'SPC_ch1.shape',SPC_ch1.shape
780 780 #print 'SPC_ch2.shape',SPC_ch2.shape
781 781 #dataOut.data_param = SPC_ch1
782 782 GauSPC = (SPC_ch1,SPC_ch2)
783 783 #GauSPC[1] = SPC_ch2
784 784
785 785 # print 'shift0', shift0
786 786 # print 'Amplitude0', Amplitude0
787 787 # print 'width0', width0
788 788 # print 'p0', p0
789 789 # print '========================'
790 790 # print 'shift1', shift1
791 791 # print 'Amplitude1', Amplitude1
792 792 # print 'width1', width1
793 793 # print 'p1', p1
794 794 # print 'noise', noise
795 795 # print 's_noise', wnoise
796 796
797 797 return GauSPC
798 798
799 799
800 800 def y_jacobian1(self,x,state): # This function is for further analysis of generalized Gaussians, it is not too importan for the signal discrimination.
801 801 y_model=self.y_model1(x,state)
802 802 s0,w0,a0,p0,n=state
803 803 e0=((x-s0)/w0)**2;
804 804
805 805 e0u=((x-s0-self.Num_Bin)/w0)**2;
806 806
807 807 e0d=((x-s0+self.Num_Bin)/w0)**2
808 808 m0=numpy.exp(-0.5*e0**(p0/2.));
809 809 m0u=numpy.exp(-0.5*e0u**(p0/2.));
810 810 m0d=numpy.exp(-0.5*e0d**(p0/2.))
811 811 JA=m0+m0u+m0d
812 812 JP=(-1/4.)*a0*m0*e0**(p0/2.)*numpy.log(e0)+(-1/4.)*a0*m0u*e0u**(p0/2.)*numpy.log(e0u)+(-1/4.)*a0*m0d*e0d**(p0/2.)*numpy.log(e0d)
813 813
814 814 JS=(p0/w0/2.)*a0*m0*e0**(p0/2.-1)*((x-s0)/w0)+(p0/w0/2.)*a0*m0u*e0u**(p0/2.-1)*((x-s0- self.Num_Bin )/w0)+(p0/w0/2.)*a0*m0d*e0d**(p0/2.-1)*((x-s0+ self.Num_Bin )/w0)
815 815
816 816 JW=(p0/w0/2.)*a0*m0*e0**(p0/2.-1)*((x-s0)/w0)**2+(p0/w0/2.)*a0*m0u*e0u**(p0/2.-1)*((x-s0- self.Num_Bin )/w0)**2+(p0/w0/2.)*a0*m0d*e0d**(p0/2.-1)*((x-s0+ self.Num_Bin )/w0)**2
817 817 jack1=numpy.sqrt(7)*numpy.array([JS/y_model,JW/y_model,JA/y_model,JP/y_model,1./y_model])
818 818 return jack1.T
819 819
820 820 def y_jacobian2(self,x,state):
821 821 y_model=self.y_model2(x,state)
822 822 s0,w0,a0,p0,s1,w1,a1,p1,n=state
823 823 e0=((x-s0)/w0)**2;
824 824
825 825 e0u=((x-s0- self.Num_Bin )/w0)**2;
826 826
827 827 e0d=((x-s0+ self.Num_Bin )/w0)**2
828 828 e1=((x-s1)/w1)**2;
829 829
830 830 e1u=((x-s1- self.Num_Bin )/w1)**2;
831 831
832 832 e1d=((x-s1+ self.Num_Bin )/w1)**2
833 833 m0=numpy.exp(-0.5*e0**(p0/2.));
834 834 m0u=numpy.exp(-0.5*e0u**(p0/2.));
835 835 m0d=numpy.exp(-0.5*e0d**(p0/2.))
836 836 m1=numpy.exp(-0.5*e1**(p1/2.));
837 837 m1u=numpy.exp(-0.5*e1u**(p1/2.));
838 838 m1d=numpy.exp(-0.5*e1d**(p1/2.))
839 839 JA=m0+m0u+m0d
840 840 JA1=m1+m1u+m1d
841 841 JP=(-1/4.)*a0*m0*e0**(p0/2.)*numpy.log(e0)+(-1/4.)*a0*m0u*e0u**(p0/2.)*numpy.log(e0u)+(-1/4.)*a0*m0d*e0d**(p0/2.)*numpy.log(e0d)
842 842 JP1=(-1/4.)*a1*m1*e1**(p1/2.)*numpy.log(e1)+(-1/4.)*a1*m1u*e1u**(p1/2.)*numpy.log(e1u)+(-1/4.)*a1*m1d*e1d**(p1/2.)*numpy.log(e1d)
843 843
844 844 JS=(p0/w0/2.)*a0*m0*e0**(p0/2.-1)*((x-s0)/w0)+(p0/w0/2.)*a0*m0u*e0u**(p0/2.-1)*((x-s0- self.Num_Bin )/w0)+(p0/w0/2.)*a0*m0d*e0d**(p0/2.-1)*((x-s0+ self.Num_Bin )/w0)
845 845
846 846 JS1=(p1/w1/2.)*a1*m1*e1**(p1/2.-1)*((x-s1)/w1)+(p1/w1/2.)*a1*m1u*e1u**(p1/2.-1)*((x-s1- self.Num_Bin )/w1)+(p1/w1/2.)*a1*m1d*e1d**(p1/2.-1)*((x-s1+ self.Num_Bin )/w1)
847 847
848 848 JW=(p0/w0/2.)*a0*m0*e0**(p0/2.-1)*((x-s0)/w0)**2+(p0/w0/2.)*a0*m0u*e0u**(p0/2.-1)*((x-s0- self.Num_Bin )/w0)**2+(p0/w0/2.)*a0*m0d*e0d**(p0/2.-1)*((x-s0+ self.Num_Bin )/w0)**2
849 849
850 850 JW1=(p1/w1/2.)*a1*m1*e1**(p1/2.-1)*((x-s1)/w1)**2+(p1/w1/2.)*a1*m1u*e1u**(p1/2.-1)*((x-s1- self.Num_Bin )/w1)**2+(p1/w1/2.)*a1*m1d*e1d**(p1/2.-1)*((x-s1+ self.Num_Bin )/w1)**2
851 851 jack2=numpy.sqrt(7)*numpy.array([JS/y_model,JW/y_model,JA/y_model,JP/y_model,JS1/y_model,JW1/y_model,JA1/y_model,JP1/y_model,1./y_model])
852 852 return jack2.T
853 853
854 854 def y_model1(self,x,state):
855 855 shift0,width0,amplitude0,power0,noise=state
856 856 model0=amplitude0*numpy.exp(-0.5*abs((x-shift0)/width0)**power0)
857 857
858 858 model0u=amplitude0*numpy.exp(-0.5*abs((x-shift0- self.Num_Bin )/width0)**power0)
859 859
860 860 model0d=amplitude0*numpy.exp(-0.5*abs((x-shift0+ self.Num_Bin )/width0)**power0)
861 861 return model0+model0u+model0d+noise
862 862
863 863 def y_model2(self,x,state): #Equation for two generalized Gaussians with Nyquist
864 864 shift0,width0,amplitude0,power0,shift1,width1,amplitude1,power1,noise=state
865 865 model0=amplitude0*numpy.exp(-0.5*abs((x-shift0)/width0)**power0)
866 866
867 867 model0u=amplitude0*numpy.exp(-0.5*abs((x-shift0- self.Num_Bin )/width0)**power0)
868 868
869 869 model0d=amplitude0*numpy.exp(-0.5*abs((x-shift0+ self.Num_Bin )/width0)**power0)
870 870 model1=amplitude1*numpy.exp(-0.5*abs((x-shift1)/width1)**power1)
871 871
872 872 model1u=amplitude1*numpy.exp(-0.5*abs((x-shift1- self.Num_Bin )/width1)**power1)
873 873
874 874 model1d=amplitude1*numpy.exp(-0.5*abs((x-shift1+ self.Num_Bin )/width1)**power1)
875 875 return model0+model0u+model0d+model1+model1u+model1d+noise
876 876
877 877 def misfit1(self,state,y_data,x,num_intg): # This function compares how close real data is with the model data, the close it is, the better it is.
878 878
879 879 return num_intg*sum((numpy.log(y_data)-numpy.log(self.y_model1(x,state)))**2)#/(64-5.) # /(64-5.) can be commented
880 880
881 881 def misfit2(self,state,y_data,x,num_intg):
882 882 return num_intg*sum((numpy.log(y_data)-numpy.log(self.y_model2(x,state)))**2)#/(64-9.)
883 883
884 884
885 885 class PrecipitationProc(Operation):
886 886
887 887 '''
888 888 Operator that estimates Reflectivity factor (Z), and estimates rainfall Rate (R)
889 889
890 890 Input:
891 891 self.dataOut.data_pre : SelfSpectra
892 892
893 893 Output:
894 894
895 895 self.dataOut.data_output : Reflectivity factor, rainfall Rate
896 896
897 897
898 898 Parameters affected:
899 899 '''
900 900
901 901
902 902 def run(self, dataOut, radar=None, Pt=None, Gt=None, Gr=None, Lambda=None, aL=None,
903 903 tauW=None, ThetaT=None, ThetaR=None, Km = 0.93, Altitude=None):
904 904
905 905 self.spc = dataOut.data_pre[0].copy()
906 906 self.Num_Hei = self.spc.shape[2]
907 907 self.Num_Bin = self.spc.shape[1]
908 908 self.Num_Chn = self.spc.shape[0]
909 909
910 910 Velrange = dataOut.abscissaList
911 911
912 912 if radar == "MIRA35C" :
913 913
914 914 Ze = self.dBZeMODE2(dataOut)
915 915
916 916 else:
917 917
918 918 self.Pt = Pt
919 919 self.Gt = Gt
920 920 self.Gr = Gr
921 921 self.Lambda = Lambda
922 922 self.aL = aL
923 923 self.tauW = tauW
924 924 self.ThetaT = ThetaT
925 925 self.ThetaR = ThetaR
926 926
927 927 RadarConstant = GetRadarConstant()
928 928 SPCmean = numpy.mean(self.spc,0)
929 929 ETA = numpy.zeros(self.Num_Hei)
930 930 Pr = numpy.sum(SPCmean,0)
931 931
932 932 #for R in range(self.Num_Hei):
933 933 # ETA[R] = RadarConstant * Pr[R] * R**2 #Reflectivity (ETA)
934 934
935 935 D_range = numpy.zeros(self.Num_Hei)
936 936 EqSec = numpy.zeros(self.Num_Hei)
937 937 del_V = numpy.zeros(self.Num_Hei)
938 938
939 939 for R in range(self.Num_Hei):
940 940 ETA[R] = RadarConstant * Pr[R] * R**2 #Reflectivity (ETA)
941 941
942 942 h = R + Altitude #Range from ground to radar pulse altitude
943 943 del_V[R] = 1 + 3.68 * 10**-5 * h + 1.71 * 10**-9 * h**2 #Density change correction for velocity
944 944
945 945 D_range[R] = numpy.log( (9.65 - (Velrange[R]/del_V[R])) / 10.3 ) / -0.6 #Range of Diameter of drops related to velocity
946 946 SIGMA[R] = numpy.pi**5 / Lambda**4 * Km * D_range[R]**6 #Equivalent Section of drops (sigma)
947 947
948 948 N_dist[R] = ETA[R] / SIGMA[R]
949 949
950 950 Ze = (ETA * Lambda**4) / (numpy.pi * Km)
951 951 Z = numpy.sum( N_dist * D_range**6 )
952 952 RR = 6*10**-4*numpy.pi * numpy.sum( D_range**3 * N_dist * Velrange ) #Rainfall rate
953 953
954 954
955 955 RR = (Ze/200)**(1/1.6)
956 956 dBRR = 10*numpy.log10(RR)
957 957
958 958 dBZe = 10*numpy.log10(Ze)
959 959 dataOut.data_output = Ze
960 960 dataOut.data_param = numpy.ones([2,self.Num_Hei])
961 961 dataOut.channelList = [0,1]
962 962 print 'channelList', dataOut.channelList
963 963 dataOut.data_param[0]=dBZe
964 964 dataOut.data_param[1]=dBRR
965 965 print 'RR SHAPE', dBRR.shape
966 966 print 'Ze SHAPE', dBZe.shape
967 967 print 'dataOut.data_param SHAPE', dataOut.data_param.shape
968 968
969 969
970 970 def dBZeMODE2(self, dataOut): # Processing for MIRA35C
971 971
972 972 NPW = dataOut.NPW
973 973 COFA = dataOut.COFA
974 974
975 975 SNR = numpy.array([self.spc[0,:,:] / NPW[0]]) #, self.spc[1,:,:] / NPW[1]])
976 976 RadarConst = dataOut.RadarConst
977 977 #frequency = 34.85*10**9
978 978
979 979 ETA = numpy.zeros(([self.Num_Chn ,self.Num_Hei]))
980 980 data_output = numpy.ones([self.Num_Chn , self.Num_Hei])*numpy.NaN
981 981
982 982 ETA = numpy.sum(SNR,1)
983 983 print 'ETA' , ETA
984 984 ETA = numpy.where(ETA is not 0. , ETA, numpy.NaN)
985 985
986 986 Ze = numpy.ones([self.Num_Chn, self.Num_Hei] )
987 987
988 988 for r in range(self.Num_Hei):
989 989
990 990 Ze[0,r] = ( ETA[0,r] ) * COFA[0,r][0] * RadarConst * ((r/5000.)**2)
991 991 #Ze[1,r] = ( ETA[1,r] ) * COFA[1,r][0] * RadarConst * ((r/5000.)**2)
992 992
993 993 return Ze
994 994
995 995 def GetRadarConstant(self):
996 996
997 997 """
998 998 Constants:
999 999
1000 1000 Pt: Transmission Power dB
1001 1001 Gt: Transmission Gain dB
1002 1002 Gr: Reception Gain dB
1003 1003 Lambda: Wavelenght m
1004 1004 aL: Attenuation loses dB
1005 1005 tauW: Width of transmission pulse s
1006 1006 ThetaT: Transmission antenna bean angle rad
1007 1007 ThetaR: Reception antenna beam angle rad
1008 1008
1009 1009 """
1010 1010 Numerator = ( (4*numpy.pi)**3 * aL**2 * 16 * numpy.log(2) )
1011 1011 Denominator = ( Pt * Gt * Gr * Lambda**2 * SPEED_OF_LIGHT * TauW * numpy.pi * ThetaT * TheraR)
1012 1012 RadarConstant = Numerator / Denominator
1013 1013
1014 1014 return RadarConstant
1015 1015
1016 1016
1017 1017
1018 1018 class FullSpectralAnalysis(Operation):
1019 1019
1020 1020 """
1021 1021 Function that implements Full Spectral Analisys technique.
1022 1022
1023 1023 Input:
1024 1024 self.dataOut.data_pre : SelfSpectra and CrossSPectra data
1025 1025 self.dataOut.groupList : Pairlist of channels
1026 1026 self.dataOut.ChanDist : Physical distance between receivers
1027 1027
1028 1028
1029 1029 Output:
1030 1030
1031 1031 self.dataOut.data_output : Zonal wind, Meridional wind and Vertical wind
1032 1032
1033 1033
1034 1034 Parameters affected: Winds, height range, SNR
1035 1035
1036 1036 """
1037 1037 def run(self, dataOut, E01=None, E02=None, E12=None, N01=None, N02=None, N12=None, SNRlimit=7):
1038 1038
1039 1039 spc = dataOut.data_pre[0].copy()
1040 1040 cspc = dataOut.data_pre[1].copy()
1041 1041
1042 1042 nChannel = spc.shape[0]
1043 1043 nProfiles = spc.shape[1]
1044 1044 nHeights = spc.shape[2]
1045 1045
1046 1046 pairsList = dataOut.groupList
1047 1047 if dataOut.ChanDist is not None :
1048 1048 ChanDist = dataOut.ChanDist
1049 1049 else:
1050 1050 ChanDist = numpy.array([[E01, N01],[E02,N02],[E12,N12]])
1051 1051
1052 1052 #print 'ChanDist', ChanDist
1053 1053
1054 1054 if dataOut.VelRange is not None:
1055 1055 VelRange= dataOut.VelRange
1056 1056 else:
1057 1057 VelRange= dataOut.abscissaList
1058 1058
1059 1059 ySamples=numpy.ones([nChannel,nProfiles])
1060 1060 phase=numpy.ones([nChannel,nProfiles])
1061 1061 CSPCSamples=numpy.ones([nChannel,nProfiles],dtype=numpy.complex_)
1062 1062 coherence=numpy.ones([nChannel,nProfiles])
1063 1063 PhaseSlope=numpy.ones(nChannel)
1064 1064 PhaseInter=numpy.ones(nChannel)
1065 1065 dataSNR = dataOut.data_SNR
1066 1066
1067 1067
1068 1068
1069 1069 data = dataOut.data_pre
1070 1070 noise = dataOut.noise
1071 1071 print 'noise',noise
1072 1072 #SNRdB = 10*numpy.log10(dataOut.data_SNR)
1073 1073
1074 1074 FirstMoment = numpy.average(dataOut.data_param[:,1,:],0)
1075 1075 #SNRdBMean = []
1076 1076
1077 1077
1078 1078 #for j in range(nHeights):
1079 1079 # FirstMoment = numpy.append(FirstMoment,numpy.mean([dataOut.data_param[0,1,j],dataOut.data_param[1,1,j],dataOut.data_param[2,1,j]]))
1080 1080 # SNRdBMean = numpy.append(SNRdBMean,numpy.mean([SNRdB[0,j],SNRdB[1,j],SNRdB[2,j]]))
1081 1081
1082 1082 data_output=numpy.ones([3,spc.shape[2]])*numpy.NaN
1083 1083
1084 1084 velocityX=[]
1085 1085 velocityY=[]
1086 1086 velocityV=[]
1087 1087
1088 1088 dbSNR = 10*numpy.log10(dataSNR)
1089 1089 dbSNR = numpy.average(dbSNR,0)
1090 1090 for Height in range(nHeights):
1091 1091
1092 1092 [Vzon,Vmer,Vver, GaussCenter]= self.WindEstimation(spc, cspc, pairsList, ChanDist, Height, noise, VelRange, dbSNR[Height], SNRlimit)
1093 1093
1094 1094 if abs(Vzon)<100. and abs(Vzon)> 0.:
1095 1095 velocityX=numpy.append(velocityX, Vzon)#Vmag
1096 1096
1097 1097 else:
1098 1098 print 'Vzon',Vzon
1099 1099 velocityX=numpy.append(velocityX, numpy.NaN)
1100 1100
1101 1101 if abs(Vmer)<100. and abs(Vmer) > 0.:
1102 1102 velocityY=numpy.append(velocityY, Vmer)#Vang
1103 1103
1104 1104 else:
1105 1105 print 'Vmer',Vmer
1106 1106 velocityY=numpy.append(velocityY, numpy.NaN)
1107 1107
1108 1108 if dbSNR[Height] > SNRlimit:
1109 1109 velocityV=numpy.append(velocityV, FirstMoment[Height])
1110 1110 else:
1111 1111 velocityV=numpy.append(velocityV, numpy.NaN)
1112 1112 #FirstMoment[Height]= numpy.NaN
1113 1113 # if SNRdBMean[Height] <12:
1114 1114 # FirstMoment[Height] = numpy.NaN
1115 1115 # velocityX[Height] = numpy.NaN
1116 1116 # velocityY[Height] = numpy.NaN
1117 1117
1118 1118
1119 1119 data_output[0]=numpy.array(velocityX)
1120 1120 data_output[1]=numpy.array(velocityY)
1121 1121 data_output[2]=-velocityV#FirstMoment
1122 1122
1123 1123 print ' '
1124 1124 #print 'FirstMoment'
1125 1125 #print FirstMoment
1126 1126 print 'velocityX',data_output[0]
1127 1127 print ' '
1128 1128 print 'velocityY',data_output[1]
1129 1129 #print numpy.array(velocityY)
1130 1130 print ' '
1131 1131 #print 'SNR'
1132 1132 #print 10*numpy.log10(dataOut.data_SNR)
1133 1133 #print numpy.shape(10*numpy.log10(dataOut.data_SNR))
1134 1134 print ' '
1135 1135
1136 1136
1137 1137 dataOut.data_output=data_output
1138 1138 return
1139 1139
1140 1140
1141 1141 def moving_average(self,x, N=2):
1142 1142 return numpy.convolve(x, numpy.ones((N,))/N)[(N-1):]
1143 1143
1144 1144 def gaus(self,xSamples,a,x0,sigma):
1145 1145 return a*numpy.exp(-(xSamples-x0)**2/(2*sigma**2))
1146 1146
1147 1147 def Find(self,x,value):
1148 1148 for index in range(len(x)):
1149 1149 if x[index]==value:
1150 1150 return index
1151 1151
1152 1152 def WindEstimation(self, spc, cspc, pairsList, ChanDist, Height, noise, VelRange, dbSNR, SNRlimit):
1153 1153
1154 1154 ySamples=numpy.ones([spc.shape[0],spc.shape[1]])
1155 1155 phase=numpy.ones([spc.shape[0],spc.shape[1]])
1156 1156 CSPCSamples=numpy.ones([spc.shape[0],spc.shape[1]],dtype=numpy.complex_)
1157 1157 coherence=numpy.ones([spc.shape[0],spc.shape[1]])
1158 1158 PhaseSlope=numpy.ones(spc.shape[0])
1159 1159 PhaseInter=numpy.ones(spc.shape[0])
1160 1160 xFrec=VelRange
1161 1161
1162 1162 '''Getting Eij and Nij'''
1163 1163
1164 1164 E01=ChanDist[0][0]
1165 1165 N01=ChanDist[0][1]
1166 1166
1167 1167 E02=ChanDist[1][0]
1168 1168 N02=ChanDist[1][1]
1169 1169
1170 1170 E12=ChanDist[2][0]
1171 1171 N12=ChanDist[2][1]
1172 1172
1173 1173 z = spc.copy()
1174 1174 z = numpy.where(numpy.isfinite(z), z, numpy.NAN)
1175 1175
1176 1176 for i in range(spc.shape[0]):
1177 1177
1178 1178 '''****** Line of Data SPC ******'''
1179 1179 zline=z[i,:,Height]
1180 1180
1181 1181 '''****** SPC is normalized ******'''
1182 1182 FactNorm= (zline.copy()-noise[i]) / numpy.sum(zline.copy())
1183 1183 FactNorm= FactNorm/numpy.sum(FactNorm)
1184 1184
1185 1185 SmoothSPC=self.moving_average(FactNorm,N=3)
1186 1186
1187 1187 xSamples = ar(range(len(SmoothSPC)))
1188 1188 ySamples[i] = SmoothSPC
1189 1189
1190 1190 #dbSNR=10*numpy.log10(dataSNR)
1191 1191 print ' '
1192 1192 print ' '
1193 1193 print ' '
1194 1194
1195 1195 #print 'dataSNR', dbSNR.shape, dbSNR[0,40:120]
1196 1196 print 'SmoothSPC', SmoothSPC.shape, SmoothSPC[0:20]
1197 1197 print 'noise',noise
1198 1198 print 'zline',zline.shape, zline[0:20]
1199 1199 print 'FactNorm',FactNorm.shape, FactNorm[0:20]
1200 1200 print 'FactNorm suma', numpy.sum(FactNorm)
1201 1201
1202 1202 for i in range(spc.shape[0]):
1203 1203
1204 1204 '''****** Line of Data CSPC ******'''
1205 1205 cspcLine=cspc[i,:,Height].copy()
1206 1206
1207 1207 '''****** CSPC is normalized ******'''
1208 1208 chan_index0 = pairsList[i][0]
1209 1209 chan_index1 = pairsList[i][1]
1210 1210 CSPCFactor= abs(numpy.sum(ySamples[chan_index0]) * numpy.sum(ySamples[chan_index1])) #
1211 1211
1212 1212 CSPCNorm = (cspcLine.copy() -noise[i]) / numpy.sqrt(CSPCFactor)
1213 1213
1214 1214 CSPCSamples[i] = CSPCNorm
1215 1215 coherence[i] = numpy.abs(CSPCSamples[i]) / numpy.sqrt(CSPCFactor)
1216 1216
1217 1217 coherence[i]= self.moving_average(coherence[i],N=2)
1218 1218
1219 1219 phase[i] = self.moving_average( numpy.arctan2(CSPCSamples[i].imag, CSPCSamples[i].real),N=1)#*180/numpy.pi
1220 1220
1221 1221 print 'cspcLine', cspcLine.shape, cspcLine[0:20]
1222 1222 print 'CSPCFactor', CSPCFactor#, CSPCFactor[0:20]
1223 1223 print numpy.sum(ySamples[chan_index0]), numpy.sum(ySamples[chan_index1]), -noise[i]
1224 1224 print 'CSPCNorm', CSPCNorm.shape, CSPCNorm[0:20]
1225 1225 print 'CSPCNorm suma', numpy.sum(CSPCNorm)
1226 1226 print 'CSPCSamples', CSPCSamples.shape, CSPCSamples[0,0:20]
1227 1227
1228 1228 '''****** Getting fij width ******'''
1229 1229
1230 1230 yMean=[]
1231 1231 yMean2=[]
1232 1232
1233 1233 for j in range(len(ySamples[1])):
1234 1234 yMean=numpy.append(yMean,numpy.mean([ySamples[0,j],ySamples[1,j],ySamples[2,j]]))
1235 1235
1236 1236 '''******* Getting fitting Gaussian ******'''
1237 1237 meanGauss=sum(xSamples*yMean) / len(xSamples)
1238 1238 sigma=sum(yMean*(xSamples-meanGauss)**2) / len(xSamples)
1239 1239
1240 1240 print '****************************'
1241 1241 print 'len(xSamples): ',len(xSamples)
1242 1242 print 'yMean: ', yMean.shape, yMean[0:20]
1243 1243 print 'ySamples', ySamples.shape, ySamples[0,0:20]
1244 1244 print 'xSamples: ',xSamples.shape, xSamples[0:20]
1245 1245
1246 1246 print 'meanGauss',meanGauss
1247 1247 print 'sigma',sigma
1248 1248
1249 1249 #if (abs(meanGauss/sigma**2) > 0.0001) : #0.000000001):
1250 1250 if dbSNR > SNRlimit :
1251 1251 try:
1252 1252 popt,pcov = curve_fit(self.gaus,xSamples,yMean,p0=[1,meanGauss,sigma])
1253 1253
1254 1254 if numpy.amax(popt)>numpy.amax(yMean)*0.3:
1255 1255 FitGauss=self.gaus(xSamples,*popt)
1256 1256
1257 1257 else:
1258 1258 FitGauss=numpy.ones(len(xSamples))*numpy.mean(yMean)
1259 1259 print 'Verificador: Dentro', Height
1260 1260 except :#RuntimeError:
1261 1261 FitGauss=numpy.ones(len(xSamples))*numpy.mean(yMean)
1262 1262
1263 1263
1264 1264 else:
1265 1265 FitGauss=numpy.ones(len(xSamples))*numpy.mean(yMean)
1266 1266
1267 1267 Maximun=numpy.amax(yMean)
1268 1268 eMinus1=Maximun*numpy.exp(-1)#*0.8
1269 1269
1270 1270 HWpos=self.Find(FitGauss,min(FitGauss, key=lambda value:abs(value-eMinus1)))
1271 1271 HalfWidth= xFrec[HWpos]
1272 1272 GCpos=self.Find(FitGauss, numpy.amax(FitGauss))
1273 1273 Vpos=self.Find(FactNorm, numpy.amax(FactNorm))
1274 1274
1275 1275 #Vpos=FirstMoment[]
1276 1276
1277 1277 '''****** Getting Fij ******'''
1278 1278
1279 1279 GaussCenter=xFrec[GCpos]
1280 1280 if (GaussCenter<0 and HalfWidth>0) or (GaussCenter>0 and HalfWidth<0):
1281 1281 Fij=abs(GaussCenter)+abs(HalfWidth)+0.0000001
1282 1282 else:
1283 1283 Fij=abs(GaussCenter-HalfWidth)+0.0000001
1284 1284
1285 1285 '''****** Getting Frecuency range of significant data ******'''
1286 1286
1287 1287 Rangpos=self.Find(FitGauss,min(FitGauss, key=lambda value:abs(value-Maximun*0.10)))
1288 1288
1289 1289 if Rangpos<GCpos:
1290 1290 Range=numpy.array([Rangpos,2*GCpos-Rangpos])
1291 1291 elif Rangpos< ( len(xFrec)- len(xFrec)*0.1):
1292 1292 Range=numpy.array([2*GCpos-Rangpos,Rangpos])
1293 1293 else:
1294 1294 Range = numpy.array([0,0])
1295 1295
1296 1296 print ' '
1297 1297 print 'GCpos',GCpos, ( len(xFrec)- len(xFrec)*0.1)
1298 1298 print 'Rangpos',Rangpos
1299 1299 print 'RANGE: ', Range
1300 1300 FrecRange=xFrec[Range[0]:Range[1]]
1301 1301
1302 1302 '''****** Getting SCPC Slope ******'''
1303 1303
1304 1304 for i in range(spc.shape[0]):
1305 1305
1306 1306 if len(FrecRange)>5 and len(FrecRange)<spc.shape[1]*0.5:
1307 1307 PhaseRange=self.moving_average(phase[i,Range[0]:Range[1]],N=3)
1308 1308
1309 1309 print 'FrecRange', len(FrecRange) , FrecRange
1310 1310 print 'PhaseRange', len(PhaseRange), PhaseRange
1311 1311 print ' '
1312 1312 if len(FrecRange) == len(PhaseRange):
1313 1313 slope, intercept, r_value, p_value, std_err = stats.linregress(FrecRange,PhaseRange)
1314 1314 PhaseSlope[i]=slope
1315 1315 PhaseInter[i]=intercept
1316 1316 else:
1317 1317 PhaseSlope[i]=0
1318 1318 PhaseInter[i]=0
1319 1319 else:
1320 1320 PhaseSlope[i]=0
1321 1321 PhaseInter[i]=0
1322 1322
1323 1323 '''Getting constant C'''
1324 1324 cC=(Fij*numpy.pi)**2
1325 1325
1326 1326 '''****** Getting constants F and G ******'''
1327 1327 MijEijNij=numpy.array([[E02,N02], [E12,N12]])
1328 1328 MijResult0=(-PhaseSlope[1]*cC) / (2*numpy.pi)
1329 1329 MijResult1=(-PhaseSlope[2]*cC) / (2*numpy.pi)
1330 1330 MijResults=numpy.array([MijResult0,MijResult1])
1331 1331 (cF,cG) = numpy.linalg.solve(MijEijNij, MijResults)
1332 1332
1333 1333 '''****** Getting constants A, B and H ******'''
1334 1334 W01=numpy.amax(coherence[0])
1335 1335 W02=numpy.amax(coherence[1])
1336 1336 W12=numpy.amax(coherence[2])
1337 1337
1338 1338 WijResult0=((cF*E01+cG*N01)**2)/cC - numpy.log(W01 / numpy.sqrt(numpy.pi/cC))
1339 1339 WijResult1=((cF*E02+cG*N02)**2)/cC - numpy.log(W02 / numpy.sqrt(numpy.pi/cC))
1340 1340 WijResult2=((cF*E12+cG*N12)**2)/cC - numpy.log(W12 / numpy.sqrt(numpy.pi/cC))
1341 1341
1342 1342 WijResults=numpy.array([WijResult0, WijResult1, WijResult2])
1343 1343
1344 1344 WijEijNij=numpy.array([ [E01**2, N01**2, 2*E01*N01] , [E02**2, N02**2, 2*E02*N02] , [E12**2, N12**2, 2*E12*N12] ])
1345 1345 (cA,cB,cH) = numpy.linalg.solve(WijEijNij, WijResults)
1346 1346
1347 1347 VxVy=numpy.array([[cA,cH],[cH,cB]])
1348 1348
1349 1349 VxVyResults=numpy.array([-cF,-cG])
1350 1350 (Vx,Vy) = numpy.linalg.solve(VxVy, VxVyResults)
1351 1351
1352 1352 Vzon = Vy
1353 1353 Vmer = Vx
1354 1354 Vmag=numpy.sqrt(Vzon**2+Vmer**2)
1355 1355 Vang=numpy.arctan2(Vmer,Vzon)
1356 1356 Vver=xFrec[Vpos]
1357 1357 print 'vzon y vmer', Vzon, Vmer
1358 1358 return Vzon, Vmer, Vver, GaussCenter
1359 1359
1360 1360 class SpectralMoments(Operation):
1361 1361
1362 1362 '''
1363 1363 Function SpectralMoments()
1364 1364
1365 1365 Calculates moments (power, mean, standard deviation) and SNR of the signal
1366 1366
1367 1367 Type of dataIn: Spectra
1368 1368
1369 1369 Configuration Parameters:
1370 1370
1371 1371 dirCosx : Cosine director in X axis
1372 1372 dirCosy : Cosine director in Y axis
1373 1373
1374 1374 elevation :
1375 1375 azimuth :
1376 1376
1377 1377 Input:
1378 1378 channelList : simple channel list to select e.g. [2,3,7]
1379 1379 self.dataOut.data_pre : Spectral data
1380 1380 self.dataOut.abscissaList : List of frequencies
1381 1381 self.dataOut.noise : Noise level per channel
1382 1382
1383 1383 Affected:
1384 1384 self.dataOut.data_param : Parameters per channel
1385 1385 self.dataOut.data_SNR : SNR per channel
1386 1386
1387 1387 '''
1388 1388
1389 1389 def run(self, dataOut):
1390 1390
1391 1391 #dataOut.data_pre = dataOut.data_pre[0]
1392 1392 data = dataOut.data_pre[0]
1393 1393 absc = dataOut.abscissaList[:-1]
1394 1394 noise = dataOut.noise
1395 1395 nChannel = data.shape[0]
1396 1396 data_param = numpy.zeros((nChannel, 4, data.shape[2]))
1397 1397
1398 1398 for ind in range(nChannel):
1399 1399 data_param[ind,:,:] = self.__calculateMoments( data[ind,:,:] , absc , noise[ind] )
1400 1400
1401 1401 dataOut.data_param = data_param[:,1:,:]
1402 1402 dataOut.data_SNR = data_param[:,0]
1403 1403 dataOut.data_DOP = data_param[:,1]
1404 1404 dataOut.data_MEAN = data_param[:,2]
1405 1405 dataOut.data_STD = data_param[:,3]
1406 1406 return
1407 1407
1408 1408 def __calculateMoments(self, oldspec, oldfreq, n0,
1409 1409 nicoh = None, graph = None, smooth = None, type1 = None, fwindow = None, snrth = None, dc = None, aliasing = None, oldfd = None, wwauto = None):
1410 1410
1411 if (nicoh == None): nicoh = 1
1412 if (graph == None): graph = 0
1413 if (smooth == None): smooth = 0
1411 if (nicoh is None): nicoh = 1
1412 if (graph is None): graph = 0
1413 if (smooth is None): smooth = 0
1414 1414 elif (self.smooth < 3): smooth = 0
1415 1415
1416 if (type1 == None): type1 = 0
1417 if (fwindow == None): fwindow = numpy.zeros(oldfreq.size) + 1
1418 if (snrth == None): snrth = -3
1419 if (dc == None): dc = 0
1420 if (aliasing == None): aliasing = 0
1421 if (oldfd == None): oldfd = 0
1422 if (wwauto == None): wwauto = 0
1416 if (type1 is None): type1 = 0
1417 if (fwindow is None): fwindow = numpy.zeros(oldfreq.size) + 1
1418 if (snrth is None): snrth = -3
1419 if (dc is None): dc = 0
1420 if (aliasing is None): aliasing = 0
1421 if (oldfd is None): oldfd = 0
1422 if (wwauto is None): wwauto = 0
1423 1423
1424 1424 if (n0 < 1.e-20): n0 = 1.e-20
1425 1425
1426 1426 freq = oldfreq
1427 1427 vec_power = numpy.zeros(oldspec.shape[1])
1428 1428 vec_fd = numpy.zeros(oldspec.shape[1])
1429 1429 vec_w = numpy.zeros(oldspec.shape[1])
1430 1430 vec_snr = numpy.zeros(oldspec.shape[1])
1431 1431
1432 1432 for ind in range(oldspec.shape[1]):
1433 1433
1434 1434 spec = oldspec[:,ind]
1435 1435 aux = spec*fwindow
1436 1436 max_spec = aux.max()
1437 1437 m = list(aux).index(max_spec)
1438 1438
1439 1439 #Smooth
1440 1440 if (smooth == 0): spec2 = spec
1441 1441 else: spec2 = scipy.ndimage.filters.uniform_filter1d(spec,size=smooth)
1442 1442
1443 1443 # Calculo de Momentos
1444 1444 bb = spec2[range(m,spec2.size)]
1445 1445 bb = (bb<n0).nonzero()
1446 1446 bb = bb[0]
1447 1447
1448 1448 ss = spec2[range(0,m + 1)]
1449 1449 ss = (ss<n0).nonzero()
1450 1450 ss = ss[0]
1451 1451
1452 1452 if (bb.size == 0):
1453 1453 bb0 = spec.size - 1 - m
1454 1454 else:
1455 1455 bb0 = bb[0] - 1
1456 1456 if (bb0 < 0):
1457 1457 bb0 = 0
1458 1458
1459 1459 if (ss.size == 0): ss1 = 1
1460 1460 else: ss1 = max(ss) + 1
1461 1461
1462 1462 if (ss1 > m): ss1 = m
1463 1463
1464 1464 valid = numpy.asarray(range(int(m + bb0 - ss1 + 1))) + ss1
1465 1465 power = ((spec2[valid] - n0)*fwindow[valid]).sum()
1466 1466 fd = ((spec2[valid]- n0)*freq[valid]*fwindow[valid]).sum()/power
1467 1467 w = math.sqrt(((spec2[valid] - n0)*fwindow[valid]*(freq[valid]- fd)**2).sum()/power)
1468 1468 snr = (spec2.mean()-n0)/n0
1469 1469
1470 1470 if (snr < 1.e-20) :
1471 1471 snr = 1.e-20
1472 1472
1473 1473 vec_power[ind] = power
1474 1474 vec_fd[ind] = fd
1475 1475 vec_w[ind] = w
1476 1476 vec_snr[ind] = snr
1477 1477
1478 1478 moments = numpy.vstack((vec_snr, vec_power, vec_fd, vec_w))
1479 1479 return moments
1480 1480
1481 1481 #------------------ Get SA Parameters --------------------------
1482 1482
1483 1483 def GetSAParameters(self):
1484 1484 #SA en frecuencia
1485 1485 pairslist = self.dataOut.groupList
1486 1486 num_pairs = len(pairslist)
1487 1487
1488 1488 vel = self.dataOut.abscissaList
1489 1489 spectra = self.dataOut.data_pre
1490 1490 cspectra = self.dataIn.data_cspc
1491 1491 delta_v = vel[1] - vel[0]
1492 1492
1493 1493 #Calculating the power spectrum
1494 1494 spc_pow = numpy.sum(spectra, 3)*delta_v
1495 1495 #Normalizing Spectra
1496 1496 norm_spectra = spectra/spc_pow
1497 1497 #Calculating the norm_spectra at peak
1498 1498 max_spectra = numpy.max(norm_spectra, 3)
1499 1499
1500 1500 #Normalizing Cross Spectra
1501 1501 norm_cspectra = numpy.zeros(cspectra.shape)
1502 1502
1503 1503 for i in range(num_chan):
1504 1504 norm_cspectra[i,:,:] = cspectra[i,:,:]/numpy.sqrt(spc_pow[pairslist[i][0],:]*spc_pow[pairslist[i][1],:])
1505 1505
1506 1506 max_cspectra = numpy.max(norm_cspectra,2)
1507 1507 max_cspectra_index = numpy.argmax(norm_cspectra, 2)
1508 1508
1509 1509 for i in range(num_pairs):
1510 1510 cspc_par[i,:,:] = __calculateMoments(norm_cspectra)
1511 1511 #------------------- Get Lags ----------------------------------
1512 1512
1513 1513 class SALags(Operation):
1514 1514 '''
1515 1515 Function GetMoments()
1516 1516
1517 1517 Input:
1518 1518 self.dataOut.data_pre
1519 1519 self.dataOut.abscissaList
1520 1520 self.dataOut.noise
1521 1521 self.dataOut.normFactor
1522 1522 self.dataOut.data_SNR
1523 1523 self.dataOut.groupList
1524 1524 self.dataOut.nChannels
1525 1525
1526 1526 Affected:
1527 1527 self.dataOut.data_param
1528 1528
1529 1529 '''
1530 1530 def run(self, dataOut):
1531 1531 data_acf = dataOut.data_pre[0]
1532 1532 data_ccf = dataOut.data_pre[1]
1533 1533 normFactor_acf = dataOut.normFactor[0]
1534 1534 normFactor_ccf = dataOut.normFactor[1]
1535 1535 pairs_acf = dataOut.groupList[0]
1536 1536 pairs_ccf = dataOut.groupList[1]
1537 1537
1538 1538 nHeights = dataOut.nHeights
1539 1539 absc = dataOut.abscissaList
1540 1540 noise = dataOut.noise
1541 1541 SNR = dataOut.data_SNR
1542 1542 nChannels = dataOut.nChannels
1543 1543 # pairsList = dataOut.groupList
1544 1544 # pairsAutoCorr, pairsCrossCorr = self.__getPairsAutoCorr(pairsList, nChannels)
1545 1545
1546 1546 for l in range(len(pairs_acf)):
1547 1547 data_acf[l,:,:] = data_acf[l,:,:]/normFactor_acf[l,:]
1548 1548
1549 1549 for l in range(len(pairs_ccf)):
1550 1550 data_ccf[l,:,:] = data_ccf[l,:,:]/normFactor_ccf[l,:]
1551 1551
1552 1552 dataOut.data_param = numpy.zeros((len(pairs_ccf)*2 + 1, nHeights))
1553 1553 dataOut.data_param[:-1,:] = self.__calculateTaus(data_acf, data_ccf, absc)
1554 1554 dataOut.data_param[-1,:] = self.__calculateLag1Phase(data_acf, absc)
1555 1555 return
1556 1556
1557 1557 # def __getPairsAutoCorr(self, pairsList, nChannels):
1558 1558 #
1559 1559 # pairsAutoCorr = numpy.zeros(nChannels, dtype = 'int')*numpy.nan
1560 1560 #
1561 1561 # for l in range(len(pairsList)):
1562 1562 # firstChannel = pairsList[l][0]
1563 1563 # secondChannel = pairsList[l][1]
1564 1564 #
1565 1565 # #Obteniendo pares de Autocorrelacion
1566 1566 # if firstChannel == secondChannel:
1567 1567 # pairsAutoCorr[firstChannel] = int(l)
1568 1568 #
1569 1569 # pairsAutoCorr = pairsAutoCorr.astype(int)
1570 1570 #
1571 1571 # pairsCrossCorr = range(len(pairsList))
1572 1572 # pairsCrossCorr = numpy.delete(pairsCrossCorr,pairsAutoCorr)
1573 1573 #
1574 1574 # return pairsAutoCorr, pairsCrossCorr
1575 1575
1576 1576 def __calculateTaus(self, data_acf, data_ccf, lagRange):
1577 1577
1578 1578 lag0 = data_acf.shape[1]/2
1579 1579 #Funcion de Autocorrelacion
1580 1580 mean_acf = stats.nanmean(data_acf, axis = 0)
1581 1581
1582 1582 #Obtencion Indice de TauCross
1583 1583 ind_ccf = data_ccf.argmax(axis = 1)
1584 1584 #Obtencion Indice de TauAuto
1585 1585 ind_acf = numpy.zeros(ind_ccf.shape,dtype = 'int')
1586 1586 ccf_lag0 = data_ccf[:,lag0,:]
1587 1587
1588 1588 for i in range(ccf_lag0.shape[0]):
1589 1589 ind_acf[i,:] = numpy.abs(mean_acf - ccf_lag0[i,:]).argmin(axis = 0)
1590 1590
1591 1591 #Obtencion de TauCross y TauAuto
1592 1592 tau_ccf = lagRange[ind_ccf]
1593 1593 tau_acf = lagRange[ind_acf]
1594 1594
1595 1595 Nan1, Nan2 = numpy.where(tau_ccf == lagRange[0])
1596 1596
1597 1597 tau_ccf[Nan1,Nan2] = numpy.nan
1598 1598 tau_acf[Nan1,Nan2] = numpy.nan
1599 1599 tau = numpy.vstack((tau_ccf,tau_acf))
1600 1600
1601 1601 return tau
1602 1602
1603 1603 def __calculateLag1Phase(self, data, lagTRange):
1604 1604 data1 = stats.nanmean(data, axis = 0)
1605 1605 lag1 = numpy.where(lagTRange == 0)[0][0] + 1
1606 1606
1607 1607 phase = numpy.angle(data1[lag1,:])
1608 1608
1609 1609 return phase
1610 1610
1611 1611 class SpectralFitting(Operation):
1612 1612 '''
1613 1613 Function GetMoments()
1614 1614
1615 1615 Input:
1616 1616 Output:
1617 1617 Variables modified:
1618 1618 '''
1619 1619
1620 1620 def run(self, dataOut, getSNR = True, path=None, file=None, groupList=None):
1621 1621
1622 1622
1623 1623 if path != None:
1624 1624 sys.path.append(path)
1625 1625 self.dataOut.library = importlib.import_module(file)
1626 1626
1627 1627 #To be inserted as a parameter
1628 1628 groupArray = numpy.array(groupList)
1629 1629 # groupArray = numpy.array([[0,1],[2,3]])
1630 1630 self.dataOut.groupList = groupArray
1631 1631
1632 1632 nGroups = groupArray.shape[0]
1633 1633 nChannels = self.dataIn.nChannels
1634 1634 nHeights=self.dataIn.heightList.size
1635 1635
1636 1636 #Parameters Array
1637 1637 self.dataOut.data_param = None
1638 1638
1639 1639 #Set constants
1640 1640 constants = self.dataOut.library.setConstants(self.dataIn)
1641 1641 self.dataOut.constants = constants
1642 1642 M = self.dataIn.normFactor
1643 1643 N = self.dataIn.nFFTPoints
1644 1644 ippSeconds = self.dataIn.ippSeconds
1645 1645 K = self.dataIn.nIncohInt
1646 1646 pairsArray = numpy.array(self.dataIn.pairsList)
1647 1647
1648 1648 #List of possible combinations
1649 1649 listComb = itertools.combinations(numpy.arange(groupArray.shape[1]),2)
1650 1650 indCross = numpy.zeros(len(list(listComb)), dtype = 'int')
1651 1651
1652 1652 if getSNR:
1653 1653 listChannels = groupArray.reshape((groupArray.size))
1654 1654 listChannels.sort()
1655 1655 noise = self.dataIn.getNoise()
1656 1656 self.dataOut.data_SNR = self.__getSNR(self.dataIn.data_spc[listChannels,:,:], noise[listChannels])
1657 1657
1658 1658 for i in range(nGroups):
1659 1659 coord = groupArray[i,:]
1660 1660
1661 1661 #Input data array
1662 1662 data = self.dataIn.data_spc[coord,:,:]/(M*N)
1663 1663 data = data.reshape((data.shape[0]*data.shape[1],data.shape[2]))
1664 1664
1665 1665 #Cross Spectra data array for Covariance Matrixes
1666 1666 ind = 0
1667 1667 for pairs in listComb:
1668 1668 pairsSel = numpy.array([coord[x],coord[y]])
1669 1669 indCross[ind] = int(numpy.where(numpy.all(pairsArray == pairsSel, axis = 1))[0][0])
1670 1670 ind += 1
1671 1671 dataCross = self.dataIn.data_cspc[indCross,:,:]/(M*N)
1672 1672 dataCross = dataCross**2/K
1673 1673
1674 1674 for h in range(nHeights):
1675 1675 # print self.dataOut.heightList[h]
1676 1676
1677 1677 #Input
1678 1678 d = data[:,h]
1679 1679
1680 1680 #Covariance Matrix
1681 1681 D = numpy.diag(d**2/K)
1682 1682 ind = 0
1683 1683 for pairs in listComb:
1684 1684 #Coordinates in Covariance Matrix
1685 1685 x = pairs[0]
1686 1686 y = pairs[1]
1687 1687 #Channel Index
1688 1688 S12 = dataCross[ind,:,h]
1689 1689 D12 = numpy.diag(S12)
1690 1690 #Completing Covariance Matrix with Cross Spectras
1691 1691 D[x*N:(x+1)*N,y*N:(y+1)*N] = D12
1692 1692 D[y*N:(y+1)*N,x*N:(x+1)*N] = D12
1693 1693 ind += 1
1694 1694 Dinv=numpy.linalg.inv(D)
1695 1695 L=numpy.linalg.cholesky(Dinv)
1696 1696 LT=L.T
1697 1697
1698 1698 dp = numpy.dot(LT,d)
1699 1699
1700 1700 #Initial values
1701 1701 data_spc = self.dataIn.data_spc[coord,:,h]
1702 1702
1703 1703 if (h>0)and(error1[3]<5):
1704 1704 p0 = self.dataOut.data_param[i,:,h-1]
1705 1705 else:
1706 1706 p0 = numpy.array(self.dataOut.library.initialValuesFunction(data_spc, constants, i))
1707 1707
1708 1708 try:
1709 1709 #Least Squares
1710 1710 minp,covp,infodict,mesg,ier = optimize.leastsq(self.__residFunction,p0,args=(dp,LT,constants),full_output=True)
1711 1711 # minp,covp = optimize.leastsq(self.__residFunction,p0,args=(dp,LT,constants))
1712 1712 #Chi square error
1713 1713 error0 = numpy.sum(infodict['fvec']**2)/(2*N)
1714 1714 #Error with Jacobian
1715 1715 error1 = self.dataOut.library.errorFunction(minp,constants,LT)
1716 1716 except:
1717 1717 minp = p0*numpy.nan
1718 1718 error0 = numpy.nan
1719 1719 error1 = p0*numpy.nan
1720 1720
1721 1721 #Save
1722 if self.dataOut.data_param == None:
1722 if self.dataOut.data_param is None:
1723 1723 self.dataOut.data_param = numpy.zeros((nGroups, p0.size, nHeights))*numpy.nan
1724 1724 self.dataOut.data_error = numpy.zeros((nGroups, p0.size + 1, nHeights))*numpy.nan
1725 1725
1726 1726 self.dataOut.data_error[i,:,h] = numpy.hstack((error0,error1))
1727 1727 self.dataOut.data_param[i,:,h] = minp
1728 1728 return
1729 1729
1730 1730 def __residFunction(self, p, dp, LT, constants):
1731 1731
1732 1732 fm = self.dataOut.library.modelFunction(p, constants)
1733 1733 fmp=numpy.dot(LT,fm)
1734 1734
1735 1735 return dp-fmp
1736 1736
1737 1737 def __getSNR(self, z, noise):
1738 1738
1739 1739 avg = numpy.average(z, axis=1)
1740 1740 SNR = (avg.T-noise)/noise
1741 1741 SNR = SNR.T
1742 1742 return SNR
1743 1743
1744 1744 def __chisq(p,chindex,hindex):
1745 1745 #similar to Resid but calculates CHI**2
1746 1746 [LT,d,fm]=setupLTdfm(p,chindex,hindex)
1747 1747 dp=numpy.dot(LT,d)
1748 1748 fmp=numpy.dot(LT,fm)
1749 1749 chisq=numpy.dot((dp-fmp).T,(dp-fmp))
1750 1750 return chisq
1751 1751
1752 1752 class WindProfiler(Operation):
1753 1753
1754 1754 __isConfig = False
1755 1755
1756 1756 __initime = None
1757 1757 __lastdatatime = None
1758 1758 __integrationtime = None
1759 1759
1760 1760 __buffer = None
1761 1761
1762 1762 __dataReady = False
1763 1763
1764 1764 __firstdata = None
1765 1765
1766 1766 n = None
1767 1767
1768 1768 def __init__(self, **kwargs):
1769 1769 Operation.__init__(self, **kwargs)
1770 1770
1771 1771 def __calculateCosDir(self, elev, azim):
1772 1772 zen = (90 - elev)*numpy.pi/180
1773 1773 azim = azim*numpy.pi/180
1774 1774 cosDirX = numpy.sqrt((1-numpy.cos(zen)**2)/((1+numpy.tan(azim)**2)))
1775 1775 cosDirY = numpy.sqrt(1-numpy.cos(zen)**2-cosDirX**2)
1776 1776
1777 1777 signX = numpy.sign(numpy.cos(azim))
1778 1778 signY = numpy.sign(numpy.sin(azim))
1779 1779
1780 1780 cosDirX = numpy.copysign(cosDirX, signX)
1781 1781 cosDirY = numpy.copysign(cosDirY, signY)
1782 1782 return cosDirX, cosDirY
1783 1783
1784 1784 def __calculateAngles(self, theta_x, theta_y, azimuth):
1785 1785
1786 1786 dir_cosw = numpy.sqrt(1-theta_x**2-theta_y**2)
1787 1787 zenith_arr = numpy.arccos(dir_cosw)
1788 1788 azimuth_arr = numpy.arctan2(theta_x,theta_y) + azimuth*math.pi/180
1789 1789
1790 1790 dir_cosu = numpy.sin(azimuth_arr)*numpy.sin(zenith_arr)
1791 1791 dir_cosv = numpy.cos(azimuth_arr)*numpy.sin(zenith_arr)
1792 1792
1793 1793 return azimuth_arr, zenith_arr, dir_cosu, dir_cosv, dir_cosw
1794 1794
1795 1795 def __calculateMatA(self, dir_cosu, dir_cosv, dir_cosw, horOnly):
1796 1796
1797 1797 #
1798 1798 if horOnly:
1799 1799 A = numpy.c_[dir_cosu,dir_cosv]
1800 1800 else:
1801 1801 A = numpy.c_[dir_cosu,dir_cosv,dir_cosw]
1802 1802 A = numpy.asmatrix(A)
1803 1803 A1 = numpy.linalg.inv(A.transpose()*A)*A.transpose()
1804 1804
1805 1805 return A1
1806 1806
1807 1807 def __correctValues(self, heiRang, phi, velRadial, SNR):
1808 1808 listPhi = phi.tolist()
1809 1809 maxid = listPhi.index(max(listPhi))
1810 1810 minid = listPhi.index(min(listPhi))
1811 1811
1812 1812 rango = range(len(phi))
1813 1813 # rango = numpy.delete(rango,maxid)
1814 1814
1815 1815 heiRang1 = heiRang*math.cos(phi[maxid])
1816 1816 heiRangAux = heiRang*math.cos(phi[minid])
1817 1817 indOut = (heiRang1 < heiRangAux[0]).nonzero()
1818 1818 heiRang1 = numpy.delete(heiRang1,indOut)
1819 1819
1820 1820 velRadial1 = numpy.zeros([len(phi),len(heiRang1)])
1821 1821 SNR1 = numpy.zeros([len(phi),len(heiRang1)])
1822 1822
1823 1823 for i in rango:
1824 1824 x = heiRang*math.cos(phi[i])
1825 1825 y1 = velRadial[i,:]
1826 1826 f1 = interpolate.interp1d(x,y1,kind = 'cubic')
1827 1827
1828 1828 x1 = heiRang1
1829 1829 y11 = f1(x1)
1830 1830
1831 1831 y2 = SNR[i,:]
1832 1832 f2 = interpolate.interp1d(x,y2,kind = 'cubic')
1833 1833 y21 = f2(x1)
1834 1834
1835 1835 velRadial1[i,:] = y11
1836 1836 SNR1[i,:] = y21
1837 1837
1838 1838 return heiRang1, velRadial1, SNR1
1839 1839
1840 1840 def __calculateVelUVW(self, A, velRadial):
1841 1841
1842 1842 #Operacion Matricial
1843 1843 # velUVW = numpy.zeros((velRadial.shape[1],3))
1844 1844 # for ind in range(velRadial.shape[1]):
1845 1845 # velUVW[ind,:] = numpy.dot(A,velRadial[:,ind])
1846 1846 # velUVW = velUVW.transpose()
1847 1847 velUVW = numpy.zeros((A.shape[0],velRadial.shape[1]))
1848 1848 velUVW[:,:] = numpy.dot(A,velRadial)
1849 1849
1850 1850
1851 1851 return velUVW
1852 1852
1853 1853 # def techniqueDBS(self, velRadial0, dirCosx, disrCosy, azimuth, correct, horizontalOnly, heiRang, SNR0):
1854 1854
1855 1855 def techniqueDBS(self, kwargs):
1856 1856 """
1857 1857 Function that implements Doppler Beam Swinging (DBS) technique.
1858 1858
1859 1859 Input: Radial velocities, Direction cosines (x and y) of the Beam, Antenna azimuth,
1860 1860 Direction correction (if necessary), Ranges and SNR
1861 1861
1862 1862 Output: Winds estimation (Zonal, Meridional and Vertical)
1863 1863
1864 1864 Parameters affected: Winds, height range, SNR
1865 1865 """
1866 1866 velRadial0 = kwargs['velRadial']
1867 1867 heiRang = kwargs['heightList']
1868 1868 SNR0 = kwargs['SNR']
1869 1869
1870 1870 if kwargs.has_key('dirCosx') and kwargs.has_key('dirCosy'):
1871 1871 theta_x = numpy.array(kwargs['dirCosx'])
1872 1872 theta_y = numpy.array(kwargs['dirCosy'])
1873 1873 else:
1874 1874 elev = numpy.array(kwargs['elevation'])
1875 1875 azim = numpy.array(kwargs['azimuth'])
1876 1876 theta_x, theta_y = self.__calculateCosDir(elev, azim)
1877 1877 azimuth = kwargs['correctAzimuth']
1878 1878 if kwargs.has_key('horizontalOnly'):
1879 1879 horizontalOnly = kwargs['horizontalOnly']
1880 1880 else: horizontalOnly = False
1881 1881 if kwargs.has_key('correctFactor'):
1882 1882 correctFactor = kwargs['correctFactor']
1883 1883 else: correctFactor = 1
1884 1884 if kwargs.has_key('channelList'):
1885 1885 channelList = kwargs['channelList']
1886 1886 if len(channelList) == 2:
1887 1887 horizontalOnly = True
1888 1888 arrayChannel = numpy.array(channelList)
1889 1889 param = param[arrayChannel,:,:]
1890 1890 theta_x = theta_x[arrayChannel]
1891 1891 theta_y = theta_y[arrayChannel]
1892 1892
1893 1893 azimuth_arr, zenith_arr, dir_cosu, dir_cosv, dir_cosw = self.__calculateAngles(theta_x, theta_y, azimuth)
1894 1894 heiRang1, velRadial1, SNR1 = self.__correctValues(heiRang, zenith_arr, correctFactor*velRadial0, SNR0)
1895 1895 A = self.__calculateMatA(dir_cosu, dir_cosv, dir_cosw, horizontalOnly)
1896 1896
1897 1897 #Calculo de Componentes de la velocidad con DBS
1898 1898 winds = self.__calculateVelUVW(A,velRadial1)
1899 1899
1900 1900 return winds, heiRang1, SNR1
1901 1901
1902 1902 def __calculateDistance(self, posx, posy, pairs_ccf, azimuth = None):
1903 1903
1904 1904 nPairs = len(pairs_ccf)
1905 1905 posx = numpy.asarray(posx)
1906 1906 posy = numpy.asarray(posy)
1907 1907
1908 1908 #Rotacion Inversa para alinear con el azimuth
1909 1909 if azimuth!= None:
1910 1910 azimuth = azimuth*math.pi/180
1911 1911 posx1 = posx*math.cos(azimuth) + posy*math.sin(azimuth)
1912 1912 posy1 = -posx*math.sin(azimuth) + posy*math.cos(azimuth)
1913 1913 else:
1914 1914 posx1 = posx
1915 1915 posy1 = posy
1916 1916
1917 1917 #Calculo de Distancias
1918 1918 distx = numpy.zeros(nPairs)
1919 1919 disty = numpy.zeros(nPairs)
1920 1920 dist = numpy.zeros(nPairs)
1921 1921 ang = numpy.zeros(nPairs)
1922 1922
1923 1923 for i in range(nPairs):
1924 1924 distx[i] = posx1[pairs_ccf[i][1]] - posx1[pairs_ccf[i][0]]
1925 1925 disty[i] = posy1[pairs_ccf[i][1]] - posy1[pairs_ccf[i][0]]
1926 1926 dist[i] = numpy.sqrt(distx[i]**2 + disty[i]**2)
1927 1927 ang[i] = numpy.arctan2(disty[i],distx[i])
1928 1928
1929 1929 return distx, disty, dist, ang
1930 1930 #Calculo de Matrices
1931 1931 # nPairs = len(pairs)
1932 1932 # ang1 = numpy.zeros((nPairs, 2, 1))
1933 1933 # dist1 = numpy.zeros((nPairs, 2, 1))
1934 1934 #
1935 1935 # for j in range(nPairs):
1936 1936 # dist1[j,0,0] = dist[pairs[j][0]]
1937 1937 # dist1[j,1,0] = dist[pairs[j][1]]
1938 1938 # ang1[j,0,0] = ang[pairs[j][0]]
1939 1939 # ang1[j,1,0] = ang[pairs[j][1]]
1940 1940 #
1941 1941 # return distx,disty, dist1,ang1
1942 1942
1943 1943
1944 1944 def __calculateVelVer(self, phase, lagTRange, _lambda):
1945 1945
1946 1946 Ts = lagTRange[1] - lagTRange[0]
1947 1947 velW = -_lambda*phase/(4*math.pi*Ts)
1948 1948
1949 1949 return velW
1950 1950
1951 1951 def __calculateVelHorDir(self, dist, tau1, tau2, ang):
1952 1952 nPairs = tau1.shape[0]
1953 1953 nHeights = tau1.shape[1]
1954 1954 vel = numpy.zeros((nPairs,3,nHeights))
1955 1955 dist1 = numpy.reshape(dist, (dist.size,1))
1956 1956
1957 1957 angCos = numpy.cos(ang)
1958 1958 angSin = numpy.sin(ang)
1959 1959
1960 1960 vel0 = dist1*tau1/(2*tau2**2)
1961 1961 vel[:,0,:] = (vel0*angCos).sum(axis = 1)
1962 1962 vel[:,1,:] = (vel0*angSin).sum(axis = 1)
1963 1963
1964 1964 ind = numpy.where(numpy.isinf(vel))
1965 1965 vel[ind] = numpy.nan
1966 1966
1967 1967 return vel
1968 1968
1969 1969 # def __getPairsAutoCorr(self, pairsList, nChannels):
1970 1970 #
1971 1971 # pairsAutoCorr = numpy.zeros(nChannels, dtype = 'int')*numpy.nan
1972 1972 #
1973 1973 # for l in range(len(pairsList)):
1974 1974 # firstChannel = pairsList[l][0]
1975 1975 # secondChannel = pairsList[l][1]
1976 1976 #
1977 1977 # #Obteniendo pares de Autocorrelacion
1978 1978 # if firstChannel == secondChannel:
1979 1979 # pairsAutoCorr[firstChannel] = int(l)
1980 1980 #
1981 1981 # pairsAutoCorr = pairsAutoCorr.astype(int)
1982 1982 #
1983 1983 # pairsCrossCorr = range(len(pairsList))
1984 1984 # pairsCrossCorr = numpy.delete(pairsCrossCorr,pairsAutoCorr)
1985 1985 #
1986 1986 # return pairsAutoCorr, pairsCrossCorr
1987 1987
1988 1988 # def techniqueSA(self, pairsSelected, pairsList, nChannels, tau, azimuth, _lambda, position_x, position_y, lagTRange, correctFactor):
1989 1989 def techniqueSA(self, kwargs):
1990 1990
1991 1991 """
1992 1992 Function that implements Spaced Antenna (SA) technique.
1993 1993
1994 1994 Input: Radial velocities, Direction cosines (x and y) of the Beam, Antenna azimuth,
1995 1995 Direction correction (if necessary), Ranges and SNR
1996 1996
1997 1997 Output: Winds estimation (Zonal, Meridional and Vertical)
1998 1998
1999 1999 Parameters affected: Winds
2000 2000 """
2001 2001 position_x = kwargs['positionX']
2002 2002 position_y = kwargs['positionY']
2003 2003 azimuth = kwargs['azimuth']
2004 2004
2005 2005 if kwargs.has_key('correctFactor'):
2006 2006 correctFactor = kwargs['correctFactor']
2007 2007 else:
2008 2008 correctFactor = 1
2009 2009
2010 2010 groupList = kwargs['groupList']
2011 2011 pairs_ccf = groupList[1]
2012 2012 tau = kwargs['tau']
2013 2013 _lambda = kwargs['_lambda']
2014 2014
2015 2015 #Cross Correlation pairs obtained
2016 2016 # pairsAutoCorr, pairsCrossCorr = self.__getPairsAutoCorr(pairssList, nChannels)
2017 2017 # pairsArray = numpy.array(pairsList)[pairsCrossCorr]
2018 2018 # pairsSelArray = numpy.array(pairsSelected)
2019 2019 # pairs = []
2020 2020 #
2021 2021 # #Wind estimation pairs obtained
2022 2022 # for i in range(pairsSelArray.shape[0]/2):
2023 2023 # ind1 = numpy.where(numpy.all(pairsArray == pairsSelArray[2*i], axis = 1))[0][0]
2024 2024 # ind2 = numpy.where(numpy.all(pairsArray == pairsSelArray[2*i + 1], axis = 1))[0][0]
2025 2025 # pairs.append((ind1,ind2))
2026 2026
2027 2027 indtau = tau.shape[0]/2
2028 2028 tau1 = tau[:indtau,:]
2029 2029 tau2 = tau[indtau:-1,:]
2030 2030 # tau1 = tau1[pairs,:]
2031 2031 # tau2 = tau2[pairs,:]
2032 2032 phase1 = tau[-1,:]
2033 2033
2034 2034 #---------------------------------------------------------------------
2035 2035 #Metodo Directo
2036 2036 distx, disty, dist, ang = self.__calculateDistance(position_x, position_y, pairs_ccf,azimuth)
2037 2037 winds = self.__calculateVelHorDir(dist, tau1, tau2, ang)
2038 2038 winds = stats.nanmean(winds, axis=0)
2039 2039 #---------------------------------------------------------------------
2040 2040 #Metodo General
2041 2041 # distx, disty, dist = self.calculateDistance(position_x,position_y,pairsCrossCorr, pairsList, azimuth)
2042 2042 # #Calculo Coeficientes de Funcion de Correlacion
2043 2043 # F,G,A,B,H = self.calculateCoef(tau1,tau2,distx,disty,n)
2044 2044 # #Calculo de Velocidades
2045 2045 # winds = self.calculateVelUV(F,G,A,B,H)
2046 2046
2047 2047 #---------------------------------------------------------------------
2048 2048 winds[2,:] = self.__calculateVelVer(phase1, lagTRange, _lambda)
2049 2049 winds = correctFactor*winds
2050 2050 return winds
2051 2051
2052 2052 def __checkTime(self, currentTime, paramInterval, outputInterval):
2053 2053
2054 2054 dataTime = currentTime + paramInterval
2055 2055 deltaTime = dataTime - self.__initime
2056 2056
2057 2057 if deltaTime >= outputInterval or deltaTime < 0:
2058 2058 self.__dataReady = True
2059 2059 return
2060 2060
2061 2061 def techniqueMeteors(self, arrayMeteor, meteorThresh, heightMin, heightMax):
2062 2062 '''
2063 2063 Function that implements winds estimation technique with detected meteors.
2064 2064
2065 2065 Input: Detected meteors, Minimum meteor quantity to wind estimation
2066 2066
2067 2067 Output: Winds estimation (Zonal and Meridional)
2068 2068
2069 2069 Parameters affected: Winds
2070 2070 '''
2071 2071 # print arrayMeteor.shape
2072 2072 #Settings
2073 2073 nInt = (heightMax - heightMin)/2
2074 2074 # print nInt
2075 2075 nInt = int(nInt)
2076 2076 # print nInt
2077 2077 winds = numpy.zeros((2,nInt))*numpy.nan
2078 2078
2079 2079 #Filter errors
2080 2080 error = numpy.where(arrayMeteor[:,-1] == 0)[0]
2081 2081 finalMeteor = arrayMeteor[error,:]
2082 2082
2083 2083 #Meteor Histogram
2084 2084 finalHeights = finalMeteor[:,2]
2085 2085 hist = numpy.histogram(finalHeights, bins = nInt, range = (heightMin,heightMax))
2086 2086 nMeteorsPerI = hist[0]
2087 2087 heightPerI = hist[1]
2088 2088
2089 2089 #Sort of meteors
2090 2090 indSort = finalHeights.argsort()
2091 2091 finalMeteor2 = finalMeteor[indSort,:]
2092 2092
2093 2093 # Calculating winds
2094 2094 ind1 = 0
2095 2095 ind2 = 0
2096 2096
2097 2097 for i in range(nInt):
2098 2098 nMet = nMeteorsPerI[i]
2099 2099 ind1 = ind2
2100 2100 ind2 = ind1 + nMet
2101 2101
2102 2102 meteorAux = finalMeteor2[ind1:ind2,:]
2103 2103
2104 2104 if meteorAux.shape[0] >= meteorThresh:
2105 2105 vel = meteorAux[:, 6]
2106 2106 zen = meteorAux[:, 4]*numpy.pi/180
2107 2107 azim = meteorAux[:, 3]*numpy.pi/180
2108 2108
2109 2109 n = numpy.cos(zen)
2110 2110 # m = (1 - n**2)/(1 - numpy.tan(azim)**2)
2111 2111 # l = m*numpy.tan(azim)
2112 2112 l = numpy.sin(zen)*numpy.sin(azim)
2113 2113 m = numpy.sin(zen)*numpy.cos(azim)
2114 2114
2115 2115 A = numpy.vstack((l, m)).transpose()
2116 2116 A1 = numpy.dot(numpy.linalg.inv( numpy.dot(A.transpose(),A) ),A.transpose())
2117 2117 windsAux = numpy.dot(A1, vel)
2118 2118
2119 2119 winds[0,i] = windsAux[0]
2120 2120 winds[1,i] = windsAux[1]
2121 2121
2122 2122 return winds, heightPerI[:-1]
2123 2123
2124 2124 def techniqueNSM_SA(self, **kwargs):
2125 2125 metArray = kwargs['metArray']
2126 2126 heightList = kwargs['heightList']
2127 2127 timeList = kwargs['timeList']
2128 2128
2129 2129 rx_location = kwargs['rx_location']
2130 2130 groupList = kwargs['groupList']
2131 2131 azimuth = kwargs['azimuth']
2132 2132 dfactor = kwargs['dfactor']
2133 2133 k = kwargs['k']
2134 2134
2135 2135 azimuth1, dist = self.__calculateAzimuth1(rx_location, groupList, azimuth)
2136 2136 d = dist*dfactor
2137 2137 #Phase calculation
2138 2138 metArray1 = self.__getPhaseSlope(metArray, heightList, timeList)
2139 2139
2140 2140 metArray1[:,-2] = metArray1[:,-2]*metArray1[:,2]*1000/(k*d[metArray1[:,1].astype(int)]) #angles into velocities
2141 2141
2142 2142 velEst = numpy.zeros((heightList.size,2))*numpy.nan
2143 2143 azimuth1 = azimuth1*numpy.pi/180
2144 2144
2145 2145 for i in range(heightList.size):
2146 2146 h = heightList[i]
2147 2147 indH = numpy.where((metArray1[:,2] == h)&(numpy.abs(metArray1[:,-2]) < 100))[0]
2148 2148 metHeight = metArray1[indH,:]
2149 2149 if metHeight.shape[0] >= 2:
2150 2150 velAux = numpy.asmatrix(metHeight[:,-2]).T #Radial Velocities
2151 2151 iazim = metHeight[:,1].astype(int)
2152 2152 azimAux = numpy.asmatrix(azimuth1[iazim]).T #Azimuths
2153 2153 A = numpy.hstack((numpy.cos(azimAux),numpy.sin(azimAux)))
2154 2154 A = numpy.asmatrix(A)
2155 2155 A1 = numpy.linalg.pinv(A.transpose()*A)*A.transpose()
2156 2156 velHor = numpy.dot(A1,velAux)
2157 2157
2158 2158 velEst[i,:] = numpy.squeeze(velHor)
2159 2159 return velEst
2160 2160
2161 2161 def __getPhaseSlope(self, metArray, heightList, timeList):
2162 2162 meteorList = []
2163 2163 #utctime sec1 height SNR velRad ph0 ph1 ph2 coh0 coh1 coh2
2164 2164 #Putting back together the meteor matrix
2165 2165 utctime = metArray[:,0]
2166 2166 uniqueTime = numpy.unique(utctime)
2167 2167
2168 2168 phaseDerThresh = 0.5
2169 2169 ippSeconds = timeList[1] - timeList[0]
2170 2170 sec = numpy.where(timeList>1)[0][0]
2171 2171 nPairs = metArray.shape[1] - 6
2172 2172 nHeights = len(heightList)
2173 2173
2174 2174 for t in uniqueTime:
2175 2175 metArray1 = metArray[utctime==t,:]
2176 2176 # phaseDerThresh = numpy.pi/4 #reducir Phase thresh
2177 2177 tmet = metArray1[:,1].astype(int)
2178 2178 hmet = metArray1[:,2].astype(int)
2179 2179
2180 2180 metPhase = numpy.zeros((nPairs, heightList.size, timeList.size - 1))
2181 2181 metPhase[:,:] = numpy.nan
2182 2182 metPhase[:,hmet,tmet] = metArray1[:,6:].T
2183 2183
2184 2184 #Delete short trails
2185 2185 metBool = ~numpy.isnan(metPhase[0,:,:])
2186 2186 heightVect = numpy.sum(metBool, axis = 1)
2187 2187 metBool[heightVect<sec,:] = False
2188 2188 metPhase[:,heightVect<sec,:] = numpy.nan
2189 2189
2190 2190 #Derivative
2191 2191 metDer = numpy.abs(metPhase[:,:,1:] - metPhase[:,:,:-1])
2192 2192 phDerAux = numpy.dstack((numpy.full((nPairs,nHeights,1), False, dtype=bool),metDer > phaseDerThresh))
2193 2193 metPhase[phDerAux] = numpy.nan
2194 2194
2195 2195 #--------------------------METEOR DETECTION -----------------------------------------
2196 2196 indMet = numpy.where(numpy.any(metBool,axis=1))[0]
2197 2197
2198 2198 for p in numpy.arange(nPairs):
2199 2199 phase = metPhase[p,:,:]
2200 2200 phDer = metDer[p,:,:]
2201 2201
2202 2202 for h in indMet:
2203 2203 height = heightList[h]
2204 2204 phase1 = phase[h,:] #82
2205 2205 phDer1 = phDer[h,:]
2206 2206
2207 2207 phase1[~numpy.isnan(phase1)] = numpy.unwrap(phase1[~numpy.isnan(phase1)]) #Unwrap
2208 2208
2209 2209 indValid = numpy.where(~numpy.isnan(phase1))[0]
2210 2210 initMet = indValid[0]
2211 2211 endMet = 0
2212 2212
2213 2213 for i in range(len(indValid)-1):
2214 2214
2215 2215 #Time difference
2216 2216 inow = indValid[i]
2217 2217 inext = indValid[i+1]
2218 2218 idiff = inext - inow
2219 2219 #Phase difference
2220 2220 phDiff = numpy.abs(phase1[inext] - phase1[inow])
2221 2221
2222 2222 if idiff>sec or phDiff>numpy.pi/4 or inext==indValid[-1]: #End of Meteor
2223 2223 sizeTrail = inow - initMet + 1
2224 2224 if sizeTrail>3*sec: #Too short meteors
2225 2225 x = numpy.arange(initMet,inow+1)*ippSeconds
2226 2226 y = phase1[initMet:inow+1]
2227 2227 ynnan = ~numpy.isnan(y)
2228 2228 x = x[ynnan]
2229 2229 y = y[ynnan]
2230 2230 slope, intercept, r_value, p_value, std_err = stats.linregress(x,y)
2231 2231 ylin = x*slope + intercept
2232 2232 rsq = r_value**2
2233 2233 if rsq > 0.5:
2234 2234 vel = slope#*height*1000/(k*d)
2235 2235 estAux = numpy.array([utctime,p,height, vel, rsq])
2236 2236 meteorList.append(estAux)
2237 2237 initMet = inext
2238 2238 metArray2 = numpy.array(meteorList)
2239 2239
2240 2240 return metArray2
2241 2241
2242 2242 def __calculateAzimuth1(self, rx_location, pairslist, azimuth0):
2243 2243
2244 2244 azimuth1 = numpy.zeros(len(pairslist))
2245 2245 dist = numpy.zeros(len(pairslist))
2246 2246
2247 2247 for i in range(len(rx_location)):
2248 2248 ch0 = pairslist[i][0]
2249 2249 ch1 = pairslist[i][1]
2250 2250
2251 2251 diffX = rx_location[ch0][0] - rx_location[ch1][0]
2252 2252 diffY = rx_location[ch0][1] - rx_location[ch1][1]
2253 2253 azimuth1[i] = numpy.arctan2(diffY,diffX)*180/numpy.pi
2254 2254 dist[i] = numpy.sqrt(diffX**2 + diffY**2)
2255 2255
2256 2256 azimuth1 -= azimuth0
2257 2257 return azimuth1, dist
2258 2258
2259 2259 def techniqueNSM_DBS(self, **kwargs):
2260 2260 metArray = kwargs['metArray']
2261 2261 heightList = kwargs['heightList']
2262 2262 timeList = kwargs['timeList']
2263 2263 azimuth = kwargs['azimuth']
2264 2264 theta_x = numpy.array(kwargs['theta_x'])
2265 2265 theta_y = numpy.array(kwargs['theta_y'])
2266 2266
2267 2267 utctime = metArray[:,0]
2268 2268 cmet = metArray[:,1].astype(int)
2269 2269 hmet = metArray[:,3].astype(int)
2270 2270 SNRmet = metArray[:,4]
2271 2271 vmet = metArray[:,5]
2272 2272 spcmet = metArray[:,6]
2273 2273
2274 2274 nChan = numpy.max(cmet) + 1
2275 2275 nHeights = len(heightList)
2276 2276
2277 2277 azimuth_arr, zenith_arr, dir_cosu, dir_cosv, dir_cosw = self.__calculateAngles(theta_x, theta_y, azimuth)
2278 2278 hmet = heightList[hmet]
2279 2279 h1met = hmet*numpy.cos(zenith_arr[cmet]) #Corrected heights
2280 2280
2281 2281 velEst = numpy.zeros((heightList.size,2))*numpy.nan
2282 2282
2283 2283 for i in range(nHeights - 1):
2284 2284 hmin = heightList[i]
2285 2285 hmax = heightList[i + 1]
2286 2286
2287 2287 thisH = (h1met>=hmin) & (h1met<hmax) & (cmet!=2) & (SNRmet>8) & (vmet<50) & (spcmet<10)
2288 2288 indthisH = numpy.where(thisH)
2289 2289
2290 2290 if numpy.size(indthisH) > 3:
2291 2291
2292 2292 vel_aux = vmet[thisH]
2293 2293 chan_aux = cmet[thisH]
2294 2294 cosu_aux = dir_cosu[chan_aux]
2295 2295 cosv_aux = dir_cosv[chan_aux]
2296 2296 cosw_aux = dir_cosw[chan_aux]
2297 2297
2298 2298 nch = numpy.size(numpy.unique(chan_aux))
2299 2299 if nch > 1:
2300 2300 A = self.__calculateMatA(cosu_aux, cosv_aux, cosw_aux, True)
2301 2301 velEst[i,:] = numpy.dot(A,vel_aux)
2302 2302
2303 2303 return velEst
2304 2304
2305 2305 def run(self, dataOut, technique, nHours=1, hmin=70, hmax=110, **kwargs):
2306 2306
2307 2307 param = dataOut.data_param
2308 2308 if dataOut.abscissaList != None:
2309 2309 absc = dataOut.abscissaList[:-1]
2310 2310 # noise = dataOut.noise
2311 2311 heightList = dataOut.heightList
2312 2312 SNR = dataOut.data_SNR
2313 2313
2314 2314 if technique == 'DBS':
2315 2315
2316 2316 kwargs['velRadial'] = param[:,1,:] #Radial velocity
2317 2317 kwargs['heightList'] = heightList
2318 2318 kwargs['SNR'] = SNR
2319 2319
2320 2320 dataOut.data_output, dataOut.heightList, dataOut.data_SNR = self.techniqueDBS(kwargs) #DBS Function
2321 2321 dataOut.utctimeInit = dataOut.utctime
2322 2322 dataOut.outputInterval = dataOut.paramInterval
2323 2323
2324 2324 elif technique == 'SA':
2325 2325
2326 2326 #Parameters
2327 2327 # position_x = kwargs['positionX']
2328 2328 # position_y = kwargs['positionY']
2329 2329 # azimuth = kwargs['azimuth']
2330 2330 #
2331 2331 # if kwargs.has_key('crosspairsList'):
2332 2332 # pairs = kwargs['crosspairsList']
2333 2333 # else:
2334 2334 # pairs = None
2335 2335 #
2336 2336 # if kwargs.has_key('correctFactor'):
2337 2337 # correctFactor = kwargs['correctFactor']
2338 2338 # else:
2339 2339 # correctFactor = 1
2340 2340
2341 2341 # tau = dataOut.data_param
2342 2342 # _lambda = dataOut.C/dataOut.frequency
2343 2343 # pairsList = dataOut.groupList
2344 2344 # nChannels = dataOut.nChannels
2345 2345
2346 2346 kwargs['groupList'] = dataOut.groupList
2347 2347 kwargs['tau'] = dataOut.data_param
2348 2348 kwargs['_lambda'] = dataOut.C/dataOut.frequency
2349 2349 # dataOut.data_output = self.techniqueSA(pairs, pairsList, nChannels, tau, azimuth, _lambda, position_x, position_y, absc, correctFactor)
2350 2350 dataOut.data_output = self.techniqueSA(kwargs)
2351 2351 dataOut.utctimeInit = dataOut.utctime
2352 2352 dataOut.outputInterval = dataOut.timeInterval
2353 2353
2354 2354 elif technique == 'Meteors':
2355 2355 dataOut.flagNoData = True
2356 2356 self.__dataReady = False
2357 2357
2358 2358 if kwargs.has_key('nHours'):
2359 2359 nHours = kwargs['nHours']
2360 2360 else:
2361 2361 nHours = 1
2362 2362
2363 2363 if kwargs.has_key('meteorsPerBin'):
2364 2364 meteorThresh = kwargs['meteorsPerBin']
2365 2365 else:
2366 2366 meteorThresh = 6
2367 2367
2368 2368 if kwargs.has_key('hmin'):
2369 2369 hmin = kwargs['hmin']
2370 2370 else: hmin = 70
2371 2371 if kwargs.has_key('hmax'):
2372 2372 hmax = kwargs['hmax']
2373 2373 else: hmax = 110
2374 2374
2375 2375 dataOut.outputInterval = nHours*3600
2376 2376
2377 2377 if self.__isConfig == False:
2378 2378 # self.__initime = dataOut.datatime.replace(minute = 0, second = 0, microsecond = 03)
2379 2379 #Get Initial LTC time
2380 2380 self.__initime = datetime.datetime.utcfromtimestamp(dataOut.utctime)
2381 2381 self.__initime = (self.__initime.replace(minute = 0, second = 0, microsecond = 0) - datetime.datetime(1970, 1, 1)).total_seconds()
2382 2382
2383 2383 self.__isConfig = True
2384 2384
2385 if self.__buffer == None:
2385 if self.__buffer is None:
2386 2386 self.__buffer = dataOut.data_param
2387 2387 self.__firstdata = copy.copy(dataOut)
2388 2388
2389 2389 else:
2390 2390 self.__buffer = numpy.vstack((self.__buffer, dataOut.data_param))
2391 2391
2392 2392 self.__checkTime(dataOut.utctime, dataOut.paramInterval, dataOut.outputInterval) #Check if the buffer is ready
2393 2393
2394 2394 if self.__dataReady:
2395 2395 dataOut.utctimeInit = self.__initime
2396 2396
2397 2397 self.__initime += dataOut.outputInterval #to erase time offset
2398 2398
2399 2399 dataOut.data_output, dataOut.heightList = self.techniqueMeteors(self.__buffer, meteorThresh, hmin, hmax)
2400 2400 dataOut.flagNoData = False
2401 2401 self.__buffer = None
2402 2402
2403 2403 elif technique == 'Meteors1':
2404 2404 dataOut.flagNoData = True
2405 2405 self.__dataReady = False
2406 2406
2407 2407 if kwargs.has_key('nMins'):
2408 2408 nMins = kwargs['nMins']
2409 2409 else: nMins = 20
2410 2410 if kwargs.has_key('rx_location'):
2411 2411 rx_location = kwargs['rx_location']
2412 2412 else: rx_location = [(0,1),(1,1),(1,0)]
2413 2413 if kwargs.has_key('azimuth'):
2414 2414 azimuth = kwargs['azimuth']
2415 2415 else: azimuth = 51.06
2416 2416 if kwargs.has_key('dfactor'):
2417 2417 dfactor = kwargs['dfactor']
2418 2418 if kwargs.has_key('mode'):
2419 2419 mode = kwargs['mode']
2420 2420 if kwargs.has_key('theta_x'):
2421 2421 theta_x = kwargs['theta_x']
2422 2422 if kwargs.has_key('theta_y'):
2423 2423 theta_y = kwargs['theta_y']
2424 2424 else: mode = 'SA'
2425 2425
2426 2426 #Borrar luego esto
2427 if dataOut.groupList == None:
2427 if dataOut.groupList is None:
2428 2428 dataOut.groupList = [(0,1),(0,2),(1,2)]
2429 2429 groupList = dataOut.groupList
2430 2430 C = 3e8
2431 2431 freq = 50e6
2432 2432 lamb = C/freq
2433 2433 k = 2*numpy.pi/lamb
2434 2434
2435 2435 timeList = dataOut.abscissaList
2436 2436 heightList = dataOut.heightList
2437 2437
2438 2438 if self.__isConfig == False:
2439 2439 dataOut.outputInterval = nMins*60
2440 2440 # self.__initime = dataOut.datatime.replace(minute = 0, second = 0, microsecond = 03)
2441 2441 #Get Initial LTC time
2442 2442 initime = datetime.datetime.utcfromtimestamp(dataOut.utctime)
2443 2443 minuteAux = initime.minute
2444 2444 minuteNew = int(numpy.floor(minuteAux/nMins)*nMins)
2445 2445 self.__initime = (initime.replace(minute = minuteNew, second = 0, microsecond = 0) - datetime.datetime(1970, 1, 1)).total_seconds()
2446 2446
2447 2447 self.__isConfig = True
2448 2448
2449 if self.__buffer == None:
2449 if self.__buffer is None:
2450 2450 self.__buffer = dataOut.data_param
2451 2451 self.__firstdata = copy.copy(dataOut)
2452 2452
2453 2453 else:
2454 2454 self.__buffer = numpy.vstack((self.__buffer, dataOut.data_param))
2455 2455
2456 2456 self.__checkTime(dataOut.utctime, dataOut.paramInterval, dataOut.outputInterval) #Check if the buffer is ready
2457 2457
2458 2458 if self.__dataReady:
2459 2459 dataOut.utctimeInit = self.__initime
2460 2460 self.__initime += dataOut.outputInterval #to erase time offset
2461 2461
2462 2462 metArray = self.__buffer
2463 2463 if mode == 'SA':
2464 2464 dataOut.data_output = self.techniqueNSM_SA(rx_location=rx_location, groupList=groupList, azimuth=azimuth, dfactor=dfactor, k=k,metArray=metArray, heightList=heightList,timeList=timeList)
2465 2465 elif mode == 'DBS':
2466 2466 dataOut.data_output = self.techniqueNSM_DBS(metArray=metArray,heightList=heightList,timeList=timeList, azimuth=azimuth, theta_x=theta_x, theta_y=theta_y)
2467 2467 dataOut.data_output = dataOut.data_output.T
2468 2468 dataOut.flagNoData = False
2469 2469 self.__buffer = None
2470 2470
2471 2471 return
2472 2472
2473 2473 class EWDriftsEstimation(Operation):
2474 2474
2475 2475 def __init__(self, **kwargs):
2476 2476 Operation.__init__(self, **kwargs)
2477 2477
2478 2478 def __correctValues(self, heiRang, phi, velRadial, SNR):
2479 2479 listPhi = phi.tolist()
2480 2480 maxid = listPhi.index(max(listPhi))
2481 2481 minid = listPhi.index(min(listPhi))
2482 2482
2483 2483 rango = range(len(phi))
2484 2484 # rango = numpy.delete(rango,maxid)
2485 2485
2486 2486 heiRang1 = heiRang*math.cos(phi[maxid])
2487 2487 heiRangAux = heiRang*math.cos(phi[minid])
2488 2488 indOut = (heiRang1 < heiRangAux[0]).nonzero()
2489 2489 heiRang1 = numpy.delete(heiRang1,indOut)
2490 2490
2491 2491 velRadial1 = numpy.zeros([len(phi),len(heiRang1)])
2492 2492 SNR1 = numpy.zeros([len(phi),len(heiRang1)])
2493 2493
2494 2494 for i in rango:
2495 2495 x = heiRang*math.cos(phi[i])
2496 2496 y1 = velRadial[i,:]
2497 2497 f1 = interpolate.interp1d(x,y1,kind = 'cubic')
2498 2498
2499 2499 x1 = heiRang1
2500 2500 y11 = f1(x1)
2501 2501
2502 2502 y2 = SNR[i,:]
2503 2503 f2 = interpolate.interp1d(x,y2,kind = 'cubic')
2504 2504 y21 = f2(x1)
2505 2505
2506 2506 velRadial1[i,:] = y11
2507 2507 SNR1[i,:] = y21
2508 2508
2509 2509 return heiRang1, velRadial1, SNR1
2510 2510
2511 2511 def run(self, dataOut, zenith, zenithCorrection):
2512 2512 heiRang = dataOut.heightList
2513 2513 velRadial = dataOut.data_param[:,3,:]
2514 2514 SNR = dataOut.data_SNR
2515 2515
2516 2516 zenith = numpy.array(zenith)
2517 2517 zenith -= zenithCorrection
2518 2518 zenith *= numpy.pi/180
2519 2519
2520 2520 heiRang1, velRadial1, SNR1 = self.__correctValues(heiRang, numpy.abs(zenith), velRadial, SNR)
2521 2521
2522 2522 alp = zenith[0]
2523 2523 bet = zenith[1]
2524 2524
2525 2525 w_w = velRadial1[0,:]
2526 2526 w_e = velRadial1[1,:]
2527 2527
2528 2528 w = (w_w*numpy.sin(bet) - w_e*numpy.sin(alp))/(numpy.cos(alp)*numpy.sin(bet) - numpy.cos(bet)*numpy.sin(alp))
2529 2529 u = (w_w*numpy.cos(bet) - w_e*numpy.cos(alp))/(numpy.sin(alp)*numpy.cos(bet) - numpy.sin(bet)*numpy.cos(alp))
2530 2530
2531 2531 winds = numpy.vstack((u,w))
2532 2532
2533 2533 dataOut.heightList = heiRang1
2534 2534 dataOut.data_output = winds
2535 2535 dataOut.data_SNR = SNR1
2536 2536
2537 2537 dataOut.utctimeInit = dataOut.utctime
2538 2538 dataOut.outputInterval = dataOut.timeInterval
2539 2539 return
2540 2540
2541 2541 #--------------- Non Specular Meteor ----------------
2542 2542
2543 2543 class NonSpecularMeteorDetection(Operation):
2544 2544
2545 2545 def run(self, dataOut, mode, SNRthresh=8, phaseDerThresh=0.5, cohThresh=0.8, allData = False):
2546 2546 data_acf = dataOut.data_pre[0]
2547 2547 data_ccf = dataOut.data_pre[1]
2548 2548 pairsList = dataOut.groupList[1]
2549 2549
2550 2550 lamb = dataOut.C/dataOut.frequency
2551 2551 tSamp = dataOut.ippSeconds*dataOut.nCohInt
2552 2552 paramInterval = dataOut.paramInterval
2553 2553
2554 2554 nChannels = data_acf.shape[0]
2555 2555 nLags = data_acf.shape[1]
2556 2556 nProfiles = data_acf.shape[2]
2557 2557 nHeights = dataOut.nHeights
2558 2558 nCohInt = dataOut.nCohInt
2559 2559 sec = numpy.round(nProfiles/dataOut.paramInterval)
2560 2560 heightList = dataOut.heightList
2561 2561 ippSeconds = dataOut.ippSeconds*dataOut.nCohInt*dataOut.nAvg
2562 2562 utctime = dataOut.utctime
2563 2563
2564 2564 dataOut.abscissaList = numpy.arange(0,paramInterval+ippSeconds,ippSeconds)
2565 2565
2566 2566 #------------------------ SNR --------------------------------------
2567 2567 power = data_acf[:,0,:,:].real
2568 2568 noise = numpy.zeros(nChannels)
2569 2569 SNR = numpy.zeros(power.shape)
2570 2570 for i in range(nChannels):
2571 2571 noise[i] = hildebrand_sekhon(power[i,:], nCohInt)
2572 2572 SNR[i] = (power[i]-noise[i])/noise[i]
2573 2573 SNRm = numpy.nanmean(SNR, axis = 0)
2574 2574 SNRdB = 10*numpy.log10(SNR)
2575 2575
2576 2576 if mode == 'SA':
2577 2577 dataOut.groupList = dataOut.groupList[1]
2578 2578 nPairs = data_ccf.shape[0]
2579 2579 #---------------------- Coherence and Phase --------------------------
2580 2580 phase = numpy.zeros(data_ccf[:,0,:,:].shape)
2581 2581 # phase1 = numpy.copy(phase)
2582 2582 coh1 = numpy.zeros(data_ccf[:,0,:,:].shape)
2583 2583
2584 2584 for p in range(nPairs):
2585 2585 ch0 = pairsList[p][0]
2586 2586 ch1 = pairsList[p][1]
2587 2587 ccf = data_ccf[p,0,:,:]/numpy.sqrt(data_acf[ch0,0,:,:]*data_acf[ch1,0,:,:])
2588 2588 phase[p,:,:] = ndimage.median_filter(numpy.angle(ccf), size = (5,1)) #median filter
2589 2589 # phase1[p,:,:] = numpy.angle(ccf) #median filter
2590 2590 coh1[p,:,:] = ndimage.median_filter(numpy.abs(ccf), 5) #median filter
2591 2591 # coh1[p,:,:] = numpy.abs(ccf) #median filter
2592 2592 coh = numpy.nanmax(coh1, axis = 0)
2593 2593 # struc = numpy.ones((5,1))
2594 2594 # coh = ndimage.morphology.grey_dilation(coh, size=(10,1))
2595 2595 #---------------------- Radial Velocity ----------------------------
2596 2596 phaseAux = numpy.mean(numpy.angle(data_acf[:,1,:,:]), axis = 0)
2597 2597 velRad = phaseAux*lamb/(4*numpy.pi*tSamp)
2598 2598
2599 2599 if allData:
2600 2600 boolMetFin = ~numpy.isnan(SNRm)
2601 2601 # coh[:-1,:] = numpy.nanmean(numpy.abs(phase[:,1:,:] - phase[:,:-1,:]),axis=0)
2602 2602 else:
2603 2603 #------------------------ Meteor mask ---------------------------------
2604 2604 # #SNR mask
2605 2605 # boolMet = (SNRdB>SNRthresh)#|(~numpy.isnan(SNRdB))
2606 2606 #
2607 2607 # #Erase small objects
2608 2608 # boolMet1 = self.__erase_small(boolMet, 2*sec, 5)
2609 2609 #
2610 2610 # auxEEJ = numpy.sum(boolMet1,axis=0)
2611 2611 # indOver = auxEEJ>nProfiles*0.8 #Use this later
2612 2612 # indEEJ = numpy.where(indOver)[0]
2613 2613 # indNEEJ = numpy.where(~indOver)[0]
2614 2614 #
2615 2615 # boolMetFin = boolMet1
2616 2616 #
2617 2617 # if indEEJ.size > 0:
2618 2618 # boolMet1[:,indEEJ] = False #Erase heights with EEJ
2619 2619 #
2620 2620 # boolMet2 = coh > cohThresh
2621 2621 # boolMet2 = self.__erase_small(boolMet2, 2*sec,5)
2622 2622 #
2623 2623 # #Final Meteor mask
2624 2624 # boolMetFin = boolMet1|boolMet2
2625 2625
2626 2626 #Coherence mask
2627 2627 boolMet1 = coh > 0.75
2628 2628 struc = numpy.ones((30,1))
2629 2629 boolMet1 = ndimage.morphology.binary_dilation(boolMet1, structure=struc)
2630 2630
2631 2631 #Derivative mask
2632 2632 derPhase = numpy.nanmean(numpy.abs(phase[:,1:,:] - phase[:,:-1,:]),axis=0)
2633 2633 boolMet2 = derPhase < 0.2
2634 2634 # boolMet2 = ndimage.morphology.binary_opening(boolMet2)
2635 2635 # boolMet2 = ndimage.morphology.binary_closing(boolMet2, structure = numpy.ones((10,1)))
2636 2636 boolMet2 = ndimage.median_filter(boolMet2,size=5)
2637 2637 boolMet2 = numpy.vstack((boolMet2,numpy.full((1,nHeights), True, dtype=bool)))
2638 2638 # #Final mask
2639 2639 # boolMetFin = boolMet2
2640 2640 boolMetFin = boolMet1&boolMet2
2641 2641 # boolMetFin = ndimage.morphology.binary_dilation(boolMetFin)
2642 2642 #Creating data_param
2643 2643 coordMet = numpy.where(boolMetFin)
2644 2644
2645 2645 tmet = coordMet[0]
2646 2646 hmet = coordMet[1]
2647 2647
2648 2648 data_param = numpy.zeros((tmet.size, 6 + nPairs))
2649 2649 data_param[:,0] = utctime
2650 2650 data_param[:,1] = tmet
2651 2651 data_param[:,2] = hmet
2652 2652 data_param[:,3] = SNRm[tmet,hmet]
2653 2653 data_param[:,4] = velRad[tmet,hmet]
2654 2654 data_param[:,5] = coh[tmet,hmet]
2655 2655 data_param[:,6:] = phase[:,tmet,hmet].T
2656 2656
2657 2657 elif mode == 'DBS':
2658 2658 dataOut.groupList = numpy.arange(nChannels)
2659 2659
2660 2660 #Radial Velocities
2661 2661 phase = numpy.angle(data_acf[:,1,:,:])
2662 2662 # phase = ndimage.median_filter(numpy.angle(data_acf[:,1,:,:]), size = (1,5,1))
2663 2663 velRad = phase*lamb/(4*numpy.pi*tSamp)
2664 2664
2665 2665 #Spectral width
2666 2666 # acf1 = ndimage.median_filter(numpy.abs(data_acf[:,1,:,:]), size = (1,5,1))
2667 2667 # acf2 = ndimage.median_filter(numpy.abs(data_acf[:,2,:,:]), size = (1,5,1))
2668 2668 acf1 = data_acf[:,1,:,:]
2669 2669 acf2 = data_acf[:,2,:,:]
2670 2670
2671 2671 spcWidth = (lamb/(2*numpy.sqrt(6)*numpy.pi*tSamp))*numpy.sqrt(numpy.log(acf1/acf2))
2672 2672 # velRad = ndimage.median_filter(velRad, size = (1,5,1))
2673 2673 if allData:
2674 2674 boolMetFin = ~numpy.isnan(SNRdB)
2675 2675 else:
2676 2676 #SNR
2677 2677 boolMet1 = (SNRdB>SNRthresh) #SNR mask
2678 2678 boolMet1 = ndimage.median_filter(boolMet1, size=(1,5,5))
2679 2679
2680 2680 #Radial velocity
2681 2681 boolMet2 = numpy.abs(velRad) < 20
2682 2682 boolMet2 = ndimage.median_filter(boolMet2, (1,5,5))
2683 2683
2684 2684 #Spectral Width
2685 2685 boolMet3 = spcWidth < 30
2686 2686 boolMet3 = ndimage.median_filter(boolMet3, (1,5,5))
2687 2687 # boolMetFin = self.__erase_small(boolMet1, 10,5)
2688 2688 boolMetFin = boolMet1&boolMet2&boolMet3
2689 2689
2690 2690 #Creating data_param
2691 2691 coordMet = numpy.where(boolMetFin)
2692 2692
2693 2693 cmet = coordMet[0]
2694 2694 tmet = coordMet[1]
2695 2695 hmet = coordMet[2]
2696 2696
2697 2697 data_param = numpy.zeros((tmet.size, 7))
2698 2698 data_param[:,0] = utctime
2699 2699 data_param[:,1] = cmet
2700 2700 data_param[:,2] = tmet
2701 2701 data_param[:,3] = hmet
2702 2702 data_param[:,4] = SNR[cmet,tmet,hmet].T
2703 2703 data_param[:,5] = velRad[cmet,tmet,hmet].T
2704 2704 data_param[:,6] = spcWidth[cmet,tmet,hmet].T
2705 2705
2706 2706 # self.dataOut.data_param = data_int
2707 2707 if len(data_param) == 0:
2708 2708 dataOut.flagNoData = True
2709 2709 else:
2710 2710 dataOut.data_param = data_param
2711 2711
2712 2712 def __erase_small(self, binArray, threshX, threshY):
2713 2713 labarray, numfeat = ndimage.measurements.label(binArray)
2714 2714 binArray1 = numpy.copy(binArray)
2715 2715
2716 2716 for i in range(1,numfeat + 1):
2717 2717 auxBin = (labarray==i)
2718 2718 auxSize = auxBin.sum()
2719 2719
2720 2720 x,y = numpy.where(auxBin)
2721 2721 widthX = x.max() - x.min()
2722 2722 widthY = y.max() - y.min()
2723 2723
2724 2724 #width X: 3 seg -> 12.5*3
2725 2725 #width Y:
2726 2726
2727 2727 if (auxSize < 50) or (widthX < threshX) or (widthY < threshY):
2728 2728 binArray1[auxBin] = False
2729 2729
2730 2730 return binArray1
2731 2731
2732 2732 #--------------- Specular Meteor ----------------
2733 2733
2734 2734 class SMDetection(Operation):
2735 2735 '''
2736 2736 Function DetectMeteors()
2737 2737 Project developed with paper:
2738 2738 HOLDSWORTH ET AL. 2004
2739 2739
2740 2740 Input:
2741 2741 self.dataOut.data_pre
2742 2742
2743 2743 centerReceiverIndex: From the channels, which is the center receiver
2744 2744
2745 2745 hei_ref: Height reference for the Beacon signal extraction
2746 2746 tauindex:
2747 2747 predefinedPhaseShifts: Predefined phase offset for the voltge signals
2748 2748
2749 2749 cohDetection: Whether to user Coherent detection or not
2750 2750 cohDet_timeStep: Coherent Detection calculation time step
2751 2751 cohDet_thresh: Coherent Detection phase threshold to correct phases
2752 2752
2753 2753 noise_timeStep: Noise calculation time step
2754 2754 noise_multiple: Noise multiple to define signal threshold
2755 2755
2756 2756 multDet_timeLimit: Multiple Detection Removal time limit in seconds
2757 2757 multDet_rangeLimit: Multiple Detection Removal range limit in km
2758 2758
2759 2759 phaseThresh: Maximum phase difference between receiver to be consider a meteor
2760 2760 SNRThresh: Minimum SNR threshold of the meteor signal to be consider a meteor
2761 2761
2762 2762 hmin: Minimum Height of the meteor to use it in the further wind estimations
2763 2763 hmax: Maximum Height of the meteor to use it in the further wind estimations
2764 2764 azimuth: Azimuth angle correction
2765 2765
2766 2766 Affected:
2767 2767 self.dataOut.data_param
2768 2768
2769 2769 Rejection Criteria (Errors):
2770 2770 0: No error; analysis OK
2771 2771 1: SNR < SNR threshold
2772 2772 2: angle of arrival (AOA) ambiguously determined
2773 2773 3: AOA estimate not feasible
2774 2774 4: Large difference in AOAs obtained from different antenna baselines
2775 2775 5: echo at start or end of time series
2776 2776 6: echo less than 5 examples long; too short for analysis
2777 2777 7: echo rise exceeds 0.3s
2778 2778 8: echo decay time less than twice rise time
2779 2779 9: large power level before echo
2780 2780 10: large power level after echo
2781 2781 11: poor fit to amplitude for estimation of decay time
2782 2782 12: poor fit to CCF phase variation for estimation of radial drift velocity
2783 2783 13: height unresolvable echo: not valid height within 70 to 110 km
2784 2784 14: height ambiguous echo: more then one possible height within 70 to 110 km
2785 2785 15: radial drift velocity or projected horizontal velocity exceeds 200 m/s
2786 2786 16: oscilatory echo, indicating event most likely not an underdense echo
2787 2787
2788 2788 17: phase difference in meteor Reestimation
2789 2789
2790 2790 Data Storage:
2791 2791 Meteors for Wind Estimation (8):
2792 2792 Utc Time | Range Height
2793 2793 Azimuth Zenith errorCosDir
2794 2794 VelRad errorVelRad
2795 2795 Phase0 Phase1 Phase2 Phase3
2796 2796 TypeError
2797 2797
2798 2798 '''
2799 2799
2800 2800 def run(self, dataOut, hei_ref = None, tauindex = 0,
2801 2801 phaseOffsets = None,
2802 2802 cohDetection = False, cohDet_timeStep = 1, cohDet_thresh = 25,
2803 2803 noise_timeStep = 4, noise_multiple = 4,
2804 2804 multDet_timeLimit = 1, multDet_rangeLimit = 3,
2805 2805 phaseThresh = 20, SNRThresh = 5,
2806 2806 hmin = 50, hmax=150, azimuth = 0,
2807 2807 channelPositions = None) :
2808 2808
2809 2809
2810 2810 #Getting Pairslist
2811 if channelPositions == None:
2811 if channelPositions is None:
2812 2812 # channelPositions = [(2.5,0), (0,2.5), (0,0), (0,4.5), (-2,0)] #T
2813 2813 channelPositions = [(4.5,2), (2,4.5), (2,2), (2,0), (0,2)] #Estrella
2814 2814 meteorOps = SMOperations()
2815 2815 pairslist0, distances = meteorOps.getPhasePairs(channelPositions)
2816 2816 heiRang = dataOut.getHeiRange()
2817 2817 #Get Beacon signal - No Beacon signal anymore
2818 2818 # newheis = numpy.where(self.dataOut.heightList>self.dataOut.radarControllerHeaderObj.Taus[tauindex])
2819 2819 #
2820 2820 # if hei_ref != None:
2821 2821 # newheis = numpy.where(self.dataOut.heightList>hei_ref)
2822 2822 #
2823 2823
2824 2824
2825 2825 #****************REMOVING HARDWARE PHASE DIFFERENCES***************
2826 2826 # see if the user put in pre defined phase shifts
2827 2827 voltsPShift = dataOut.data_pre.copy()
2828 2828
2829 2829 # if predefinedPhaseShifts != None:
2830 2830 # hardwarePhaseShifts = numpy.array(predefinedPhaseShifts)*numpy.pi/180
2831 2831 #
2832 2832 # # elif beaconPhaseShifts:
2833 2833 # # #get hardware phase shifts using beacon signal
2834 2834 # # hardwarePhaseShifts = self.__getHardwarePhaseDiff(self.dataOut.data_pre, pairslist, newheis, 10)
2835 2835 # # hardwarePhaseShifts = numpy.insert(hardwarePhaseShifts,centerReceiverIndex,0)
2836 2836 #
2837 2837 # else:
2838 2838 # hardwarePhaseShifts = numpy.zeros(5)
2839 2839 #
2840 2840 # voltsPShift = numpy.zeros((self.dataOut.data_pre.shape[0],self.dataOut.data_pre.shape[1],self.dataOut.data_pre.shape[2]), dtype = 'complex')
2841 2841 # for i in range(self.dataOut.data_pre.shape[0]):
2842 2842 # voltsPShift[i,:,:] = self.__shiftPhase(self.dataOut.data_pre[i,:,:], hardwarePhaseShifts[i])
2843 2843
2844 2844 #******************END OF REMOVING HARDWARE PHASE DIFFERENCES*********
2845 2845
2846 2846 #Remove DC
2847 2847 voltsDC = numpy.mean(voltsPShift,1)
2848 2848 voltsDC = numpy.mean(voltsDC,1)
2849 2849 for i in range(voltsDC.shape[0]):
2850 2850 voltsPShift[i] = voltsPShift[i] - voltsDC[i]
2851 2851
2852 2852 #Don't considerate last heights, theyre used to calculate Hardware Phase Shift
2853 2853 # voltsPShift = voltsPShift[:,:,:newheis[0][0]]
2854 2854
2855 2855 #************ FIND POWER OF DATA W/COH OR NON COH DETECTION (3.4) **********
2856 2856 #Coherent Detection
2857 2857 if cohDetection:
2858 2858 #use coherent detection to get the net power
2859 2859 cohDet_thresh = cohDet_thresh*numpy.pi/180
2860 2860 voltsPShift = self.__coherentDetection(voltsPShift, cohDet_timeStep, dataOut.timeInterval, pairslist0, cohDet_thresh)
2861 2861
2862 2862 #Non-coherent detection!
2863 2863 powerNet = numpy.nansum(numpy.abs(voltsPShift[:,:,:])**2,0)
2864 2864 #********** END OF COH/NON-COH POWER CALCULATION**********************
2865 2865
2866 2866 #********** FIND THE NOISE LEVEL AND POSSIBLE METEORS ****************
2867 2867 #Get noise
2868 2868 noise, noise1 = self.__getNoise(powerNet, noise_timeStep, dataOut.timeInterval)
2869 2869 # noise = self.getNoise1(powerNet, noise_timeStep, self.dataOut.timeInterval)
2870 2870 #Get signal threshold
2871 2871 signalThresh = noise_multiple*noise
2872 2872 #Meteor echoes detection
2873 2873 listMeteors = self.__findMeteors(powerNet, signalThresh)
2874 2874 #******* END OF NOISE LEVEL AND POSSIBLE METEORS CACULATION **********
2875 2875
2876 2876 #************** REMOVE MULTIPLE DETECTIONS (3.5) ***************************
2877 2877 #Parameters
2878 2878 heiRange = dataOut.getHeiRange()
2879 2879 rangeInterval = heiRange[1] - heiRange[0]
2880 2880 rangeLimit = multDet_rangeLimit/rangeInterval
2881 2881 timeLimit = multDet_timeLimit/dataOut.timeInterval
2882 2882 #Multiple detection removals
2883 2883 listMeteors1 = self.__removeMultipleDetections(listMeteors, rangeLimit, timeLimit)
2884 2884 #************ END OF REMOVE MULTIPLE DETECTIONS **********************
2885 2885
2886 2886 #********************* METEOR REESTIMATION (3.7, 3.8, 3.9, 3.10) ********************
2887 2887 #Parameters
2888 2888 phaseThresh = phaseThresh*numpy.pi/180
2889 2889 thresh = [phaseThresh, noise_multiple, SNRThresh]
2890 2890 #Meteor reestimation (Errors N 1, 6, 12, 17)
2891 2891 listMeteors2, listMeteorsPower, listMeteorsVolts = self.__meteorReestimation(listMeteors1, voltsPShift, pairslist0, thresh, noise, dataOut.timeInterval, dataOut.frequency)
2892 2892 # listMeteors2, listMeteorsPower, listMeteorsVolts = self.meteorReestimation3(listMeteors2, listMeteorsPower, listMeteorsVolts, voltsPShift, pairslist, thresh, noise)
2893 2893 #Estimation of decay times (Errors N 7, 8, 11)
2894 2894 listMeteors3 = self.__estimateDecayTime(listMeteors2, listMeteorsPower, dataOut.timeInterval, dataOut.frequency)
2895 2895 #******************* END OF METEOR REESTIMATION *******************
2896 2896
2897 2897 #********************* METEOR PARAMETERS CALCULATION (3.11, 3.12, 3.13) **************************
2898 2898 #Calculating Radial Velocity (Error N 15)
2899 2899 radialStdThresh = 10
2900 2900 listMeteors4 = self.__getRadialVelocity(listMeteors3, listMeteorsVolts, radialStdThresh, pairslist0, dataOut.timeInterval)
2901 2901
2902 2902 if len(listMeteors4) > 0:
2903 2903 #Setting New Array
2904 2904 date = dataOut.utctime
2905 2905 arrayParameters = self.__setNewArrays(listMeteors4, date, heiRang)
2906 2906
2907 2907 #Correcting phase offset
2908 2908 if phaseOffsets != None:
2909 2909 phaseOffsets = numpy.array(phaseOffsets)*numpy.pi/180
2910 2910 arrayParameters[:,8:12] = numpy.unwrap(arrayParameters[:,8:12] + phaseOffsets)
2911 2911
2912 2912 #Second Pairslist
2913 2913 pairsList = []
2914 2914 pairx = (0,1)
2915 2915 pairy = (2,3)
2916 2916 pairsList.append(pairx)
2917 2917 pairsList.append(pairy)
2918 2918
2919 2919 jph = numpy.array([0,0,0,0])
2920 2920 h = (hmin,hmax)
2921 2921 arrayParameters = meteorOps.getMeteorParams(arrayParameters, azimuth, h, pairsList, distances, jph)
2922 2922
2923 2923 # #Calculate AOA (Error N 3, 4)
2924 2924 # #JONES ET AL. 1998
2925 2925 # error = arrayParameters[:,-1]
2926 2926 # AOAthresh = numpy.pi/8
2927 2927 # phases = -arrayParameters[:,9:13]
2928 2928 # arrayParameters[:,4:7], arrayParameters[:,-1] = meteorOps.getAOA(phases, pairsList, error, AOAthresh, azimuth)
2929 2929 #
2930 2930 # #Calculate Heights (Error N 13 and 14)
2931 2931 # error = arrayParameters[:,-1]
2932 2932 # Ranges = arrayParameters[:,2]
2933 2933 # zenith = arrayParameters[:,5]
2934 2934 # arrayParameters[:,3], arrayParameters[:,-1] = meteorOps.getHeights(Ranges, zenith, error, hmin, hmax)
2935 2935 # error = arrayParameters[:,-1]
2936 2936 #********************* END OF PARAMETERS CALCULATION **************************
2937 2937
2938 2938 #***************************+ PASS DATA TO NEXT STEP **********************
2939 2939 # arrayFinal = arrayParameters.reshape((1,arrayParameters.shape[0],arrayParameters.shape[1]))
2940 2940 dataOut.data_param = arrayParameters
2941 2941
2942 if arrayParameters == None:
2942 if arrayParameters is None:
2943 2943 dataOut.flagNoData = True
2944 2944 else:
2945 2945 dataOut.flagNoData = True
2946 2946
2947 2947 return
2948 2948
2949 2949 def __getHardwarePhaseDiff(self, voltage0, pairslist, newheis, n):
2950 2950
2951 2951 minIndex = min(newheis[0])
2952 2952 maxIndex = max(newheis[0])
2953 2953
2954 2954 voltage = voltage0[:,:,minIndex:maxIndex+1]
2955 2955 nLength = voltage.shape[1]/n
2956 2956 nMin = 0
2957 2957 nMax = 0
2958 2958 phaseOffset = numpy.zeros((len(pairslist),n))
2959 2959
2960 2960 for i in range(n):
2961 2961 nMax += nLength
2962 2962 phaseCCF = -numpy.angle(self.__calculateCCF(voltage[:,nMin:nMax,:], pairslist, [0]))
2963 2963 phaseCCF = numpy.mean(phaseCCF, axis = 2)
2964 2964 phaseOffset[:,i] = phaseCCF.transpose()
2965 2965 nMin = nMax
2966 2966 # phaseDiff, phaseArrival = self.estimatePhaseDifference(voltage, pairslist)
2967 2967
2968 2968 #Remove Outliers
2969 2969 factor = 2
2970 2970 wt = phaseOffset - signal.medfilt(phaseOffset,(1,5))
2971 2971 dw = numpy.std(wt,axis = 1)
2972 2972 dw = dw.reshape((dw.size,1))
2973 2973 ind = numpy.where(numpy.logical_or(wt>dw*factor,wt<-dw*factor))
2974 2974 phaseOffset[ind] = numpy.nan
2975 2975 phaseOffset = stats.nanmean(phaseOffset, axis=1)
2976 2976
2977 2977 return phaseOffset
2978 2978
2979 2979 def __shiftPhase(self, data, phaseShift):
2980 2980 #this will shift the phase of a complex number
2981 2981 dataShifted = numpy.abs(data) * numpy.exp((numpy.angle(data)+phaseShift)*1j)
2982 2982 return dataShifted
2983 2983
2984 2984 def __estimatePhaseDifference(self, array, pairslist):
2985 2985 nChannel = array.shape[0]
2986 2986 nHeights = array.shape[2]
2987 2987 numPairs = len(pairslist)
2988 2988 # phaseCCF = numpy.zeros((nChannel, 5, nHeights))
2989 2989 phaseCCF = numpy.angle(self.__calculateCCF(array, pairslist, [-2,-1,0,1,2]))
2990 2990
2991 2991 #Correct phases
2992 2992 derPhaseCCF = phaseCCF[:,1:,:] - phaseCCF[:,0:-1,:]
2993 2993 indDer = numpy.where(numpy.abs(derPhaseCCF) > numpy.pi)
2994 2994
2995 2995 if indDer[0].shape[0] > 0:
2996 2996 for i in range(indDer[0].shape[0]):
2997 2997 signo = -numpy.sign(derPhaseCCF[indDer[0][i],indDer[1][i],indDer[2][i]])
2998 2998 phaseCCF[indDer[0][i],indDer[1][i]+1:,:] += signo*2*numpy.pi
2999 2999
3000 3000 # for j in range(numSides):
3001 3001 # phaseCCFAux = self.calculateCCF(arrayCenter, arraySides[j,:,:], [-2,1,0,1,2])
3002 3002 # phaseCCF[j,:,:] = numpy.angle(phaseCCFAux)
3003 3003 #
3004 3004 #Linear
3005 3005 phaseInt = numpy.zeros((numPairs,1))
3006 3006 angAllCCF = phaseCCF[:,[0,1,3,4],0]
3007 3007 for j in range(numPairs):
3008 3008 fit = stats.linregress([-2,-1,1,2],angAllCCF[j,:])
3009 3009 phaseInt[j] = fit[1]
3010 3010 #Phase Differences
3011 3011 phaseDiff = phaseInt - phaseCCF[:,2,:]
3012 3012 phaseArrival = phaseInt.reshape(phaseInt.size)
3013 3013
3014 3014 #Dealias
3015 3015 phaseArrival = numpy.angle(numpy.exp(1j*phaseArrival))
3016 3016 # indAlias = numpy.where(phaseArrival > numpy.pi)
3017 3017 # phaseArrival[indAlias] -= 2*numpy.pi
3018 3018 # indAlias = numpy.where(phaseArrival < -numpy.pi)
3019 3019 # phaseArrival[indAlias] += 2*numpy.pi
3020 3020
3021 3021 return phaseDiff, phaseArrival
3022 3022
3023 3023 def __coherentDetection(self, volts, timeSegment, timeInterval, pairslist, thresh):
3024 3024 #this function will run the coherent detection used in Holdworth et al. 2004 and return the net power
3025 3025 #find the phase shifts of each channel over 1 second intervals
3026 3026 #only look at ranges below the beacon signal
3027 3027 numProfPerBlock = numpy.ceil(timeSegment/timeInterval)
3028 3028 numBlocks = int(volts.shape[1]/numProfPerBlock)
3029 3029 numHeights = volts.shape[2]
3030 3030 nChannel = volts.shape[0]
3031 3031 voltsCohDet = volts.copy()
3032 3032
3033 3033 pairsarray = numpy.array(pairslist)
3034 3034 indSides = pairsarray[:,1]
3035 3035 # indSides = numpy.array(range(nChannel))
3036 3036 # indSides = numpy.delete(indSides, indCenter)
3037 3037 #
3038 3038 # listCenter = numpy.array_split(volts[indCenter,:,:], numBlocks, 0)
3039 3039 listBlocks = numpy.array_split(volts, numBlocks, 1)
3040 3040
3041 3041 startInd = 0
3042 3042 endInd = 0
3043 3043
3044 3044 for i in range(numBlocks):
3045 3045 startInd = endInd
3046 3046 endInd = endInd + listBlocks[i].shape[1]
3047 3047
3048 3048 arrayBlock = listBlocks[i]
3049 3049 # arrayBlockCenter = listCenter[i]
3050 3050
3051 3051 #Estimate the Phase Difference
3052 3052 phaseDiff, aux = self.__estimatePhaseDifference(arrayBlock, pairslist)
3053 3053 #Phase Difference RMS
3054 3054 arrayPhaseRMS = numpy.abs(phaseDiff)
3055 3055 phaseRMSaux = numpy.sum(arrayPhaseRMS < thresh,0)
3056 3056 indPhase = numpy.where(phaseRMSaux==4)
3057 3057 #Shifting
3058 3058 if indPhase[0].shape[0] > 0:
3059 3059 for j in range(indSides.size):
3060 3060 arrayBlock[indSides[j],:,indPhase] = self.__shiftPhase(arrayBlock[indSides[j],:,indPhase], phaseDiff[j,indPhase].transpose())
3061 3061 voltsCohDet[:,startInd:endInd,:] = arrayBlock
3062 3062
3063 3063 return voltsCohDet
3064 3064
3065 3065 def __calculateCCF(self, volts, pairslist ,laglist):
3066 3066
3067 3067 nHeights = volts.shape[2]
3068 3068 nPoints = volts.shape[1]
3069 3069 voltsCCF = numpy.zeros((len(pairslist), len(laglist), nHeights),dtype = 'complex')
3070 3070
3071 3071 for i in range(len(pairslist)):
3072 3072 volts1 = volts[pairslist[i][0]]
3073 3073 volts2 = volts[pairslist[i][1]]
3074 3074
3075 3075 for t in range(len(laglist)):
3076 3076 idxT = laglist[t]
3077 3077 if idxT >= 0:
3078 3078 vStacked = numpy.vstack((volts2[idxT:,:],
3079 3079 numpy.zeros((idxT, nHeights),dtype='complex')))
3080 3080 else:
3081 3081 vStacked = numpy.vstack((numpy.zeros((-idxT, nHeights),dtype='complex'),
3082 3082 volts2[:(nPoints + idxT),:]))
3083 3083 voltsCCF[i,t,:] = numpy.sum((numpy.conjugate(volts1)*vStacked),axis=0)
3084 3084
3085 3085 vStacked = None
3086 3086 return voltsCCF
3087 3087
3088 3088 def __getNoise(self, power, timeSegment, timeInterval):
3089 3089 numProfPerBlock = numpy.ceil(timeSegment/timeInterval)
3090 3090 numBlocks = int(power.shape[0]/numProfPerBlock)
3091 3091 numHeights = power.shape[1]
3092 3092
3093 3093 listPower = numpy.array_split(power, numBlocks, 0)
3094 3094 noise = numpy.zeros((power.shape[0], power.shape[1]))
3095 3095 noise1 = numpy.zeros((power.shape[0], power.shape[1]))
3096 3096
3097 3097 startInd = 0
3098 3098 endInd = 0
3099 3099
3100 3100 for i in range(numBlocks): #split por canal
3101 3101 startInd = endInd
3102 3102 endInd = endInd + listPower[i].shape[0]
3103 3103
3104 3104 arrayBlock = listPower[i]
3105 3105 noiseAux = numpy.mean(arrayBlock, 0)
3106 3106 # noiseAux = numpy.median(noiseAux)
3107 3107 # noiseAux = numpy.mean(arrayBlock)
3108 3108 noise[startInd:endInd,:] = noise[startInd:endInd,:] + noiseAux
3109 3109
3110 3110 noiseAux1 = numpy.mean(arrayBlock)
3111 3111 noise1[startInd:endInd,:] = noise1[startInd:endInd,:] + noiseAux1
3112 3112
3113 3113 return noise, noise1
3114 3114
3115 3115 def __findMeteors(self, power, thresh):
3116 3116 nProf = power.shape[0]
3117 3117 nHeights = power.shape[1]
3118 3118 listMeteors = []
3119 3119
3120 3120 for i in range(nHeights):
3121 3121 powerAux = power[:,i]
3122 3122 threshAux = thresh[:,i]
3123 3123
3124 3124 indUPthresh = numpy.where(powerAux > threshAux)[0]
3125 3125 indDNthresh = numpy.where(powerAux <= threshAux)[0]
3126 3126
3127 3127 j = 0
3128 3128
3129 3129 while (j < indUPthresh.size - 2):
3130 3130 if (indUPthresh[j + 2] == indUPthresh[j] + 2):
3131 3131 indDNAux = numpy.where(indDNthresh > indUPthresh[j])
3132 3132 indDNthresh = indDNthresh[indDNAux]
3133 3133
3134 3134 if (indDNthresh.size > 0):
3135 3135 indEnd = indDNthresh[0] - 1
3136 3136 indInit = indUPthresh[j]
3137 3137
3138 3138 meteor = powerAux[indInit:indEnd + 1]
3139 3139 indPeak = meteor.argmax() + indInit
3140 3140 FLA = sum(numpy.conj(meteor)*numpy.hstack((meteor[1:],0)))
3141 3141
3142 3142 listMeteors.append(numpy.array([i,indInit,indPeak,indEnd,FLA])) #CHEQUEAR!!!!!
3143 3143 j = numpy.where(indUPthresh == indEnd)[0] + 1
3144 3144 else: j+=1
3145 3145 else: j+=1
3146 3146
3147 3147 return listMeteors
3148 3148
3149 3149 def __removeMultipleDetections(self,listMeteors, rangeLimit, timeLimit):
3150 3150
3151 3151 arrayMeteors = numpy.asarray(listMeteors)
3152 3152 listMeteors1 = []
3153 3153
3154 3154 while arrayMeteors.shape[0] > 0:
3155 3155 FLAs = arrayMeteors[:,4]
3156 3156 maxFLA = FLAs.argmax()
3157 3157 listMeteors1.append(arrayMeteors[maxFLA,:])
3158 3158
3159 3159 MeteorInitTime = arrayMeteors[maxFLA,1]
3160 3160 MeteorEndTime = arrayMeteors[maxFLA,3]
3161 3161 MeteorHeight = arrayMeteors[maxFLA,0]
3162 3162
3163 3163 #Check neighborhood
3164 3164 maxHeightIndex = MeteorHeight + rangeLimit
3165 3165 minHeightIndex = MeteorHeight - rangeLimit
3166 3166 minTimeIndex = MeteorInitTime - timeLimit
3167 3167 maxTimeIndex = MeteorEndTime + timeLimit
3168 3168
3169 3169 #Check Heights
3170 3170 indHeight = numpy.logical_and(arrayMeteors[:,0] >= minHeightIndex, arrayMeteors[:,0] <= maxHeightIndex)
3171 3171 indTime = numpy.logical_and(arrayMeteors[:,3] >= minTimeIndex, arrayMeteors[:,1] <= maxTimeIndex)
3172 3172 indBoth = numpy.where(numpy.logical_and(indTime,indHeight))
3173 3173
3174 3174 arrayMeteors = numpy.delete(arrayMeteors, indBoth, axis = 0)
3175 3175
3176 3176 return listMeteors1
3177 3177
3178 3178 def __meteorReestimation(self, listMeteors, volts, pairslist, thresh, noise, timeInterval,frequency):
3179 3179 numHeights = volts.shape[2]
3180 3180 nChannel = volts.shape[0]
3181 3181
3182 3182 thresholdPhase = thresh[0]
3183 3183 thresholdNoise = thresh[1]
3184 3184 thresholdDB = float(thresh[2])
3185 3185
3186 3186 thresholdDB1 = 10**(thresholdDB/10)
3187 3187 pairsarray = numpy.array(pairslist)
3188 3188 indSides = pairsarray[:,1]
3189 3189
3190 3190 pairslist1 = list(pairslist)
3191 3191 pairslist1.append((0,1))
3192 3192 pairslist1.append((3,4))
3193 3193
3194 3194 listMeteors1 = []
3195 3195 listPowerSeries = []
3196 3196 listVoltageSeries = []
3197 3197 #volts has the war data
3198 3198
3199 3199 if frequency == 30e6:
3200 3200 timeLag = 45*10**-3
3201 3201 else:
3202 3202 timeLag = 15*10**-3
3203 3203 lag = numpy.ceil(timeLag/timeInterval)
3204 3204
3205 3205 for i in range(len(listMeteors)):
3206 3206
3207 3207 ###################### 3.6 - 3.7 PARAMETERS REESTIMATION #########################
3208 3208 meteorAux = numpy.zeros(16)
3209 3209
3210 3210 #Loading meteor Data (mHeight, mStart, mPeak, mEnd)
3211 3211 mHeight = listMeteors[i][0]
3212 3212 mStart = listMeteors[i][1]
3213 3213 mPeak = listMeteors[i][2]
3214 3214 mEnd = listMeteors[i][3]
3215 3215
3216 3216 #get the volt data between the start and end times of the meteor
3217 3217 meteorVolts = volts[:,mStart:mEnd+1,mHeight]
3218 3218 meteorVolts = meteorVolts.reshape(meteorVolts.shape[0], meteorVolts.shape[1], 1)
3219 3219
3220 3220 #3.6. Phase Difference estimation
3221 3221 phaseDiff, aux = self.__estimatePhaseDifference(meteorVolts, pairslist)
3222 3222
3223 3223 #3.7. Phase difference removal & meteor start, peak and end times reestimated
3224 3224 #meteorVolts0.- all Channels, all Profiles
3225 3225 meteorVolts0 = volts[:,:,mHeight]
3226 3226 meteorThresh = noise[:,mHeight]*thresholdNoise
3227 3227 meteorNoise = noise[:,mHeight]
3228 3228 meteorVolts0[indSides,:] = self.__shiftPhase(meteorVolts0[indSides,:], phaseDiff) #Phase Shifting
3229 3229 powerNet0 = numpy.nansum(numpy.abs(meteorVolts0)**2, axis = 0) #Power
3230 3230
3231 3231 #Times reestimation
3232 3232 mStart1 = numpy.where(powerNet0[:mPeak] < meteorThresh[:mPeak])[0]
3233 3233 if mStart1.size > 0:
3234 3234 mStart1 = mStart1[-1] + 1
3235 3235
3236 3236 else:
3237 3237 mStart1 = mPeak
3238 3238
3239 3239 mEnd1 = numpy.where(powerNet0[mPeak:] < meteorThresh[mPeak:])[0][0] + mPeak - 1
3240 3240 mEndDecayTime1 = numpy.where(powerNet0[mPeak:] < meteorNoise[mPeak:])[0]
3241 3241 if mEndDecayTime1.size == 0:
3242 3242 mEndDecayTime1 = powerNet0.size
3243 3243 else:
3244 3244 mEndDecayTime1 = mEndDecayTime1[0] + mPeak - 1
3245 3245 # mPeak1 = meteorVolts0[mStart1:mEnd1 + 1].argmax()
3246 3246
3247 3247 #meteorVolts1.- all Channels, from start to end
3248 3248 meteorVolts1 = meteorVolts0[:,mStart1:mEnd1 + 1]
3249 3249 meteorVolts2 = meteorVolts0[:,mPeak + lag:mEnd1 + 1]
3250 3250 if meteorVolts2.shape[1] == 0:
3251 3251 meteorVolts2 = meteorVolts0[:,mPeak:mEnd1 + 1]
3252 3252 meteorVolts1 = meteorVolts1.reshape(meteorVolts1.shape[0], meteorVolts1.shape[1], 1)
3253 3253 meteorVolts2 = meteorVolts2.reshape(meteorVolts2.shape[0], meteorVolts2.shape[1], 1)
3254 3254 ##################### END PARAMETERS REESTIMATION #########################
3255 3255
3256 3256 ##################### 3.8 PHASE DIFFERENCE REESTIMATION ########################
3257 3257 # if mEnd1 - mStart1 > 4: #Error Number 6: echo less than 5 samples long; too short for analysis
3258 3258 if meteorVolts2.shape[1] > 0:
3259 3259 #Phase Difference re-estimation
3260 3260 phaseDiff1, phaseDiffint = self.__estimatePhaseDifference(meteorVolts2, pairslist1) #Phase Difference Estimation
3261 3261 # phaseDiff1, phaseDiffint = self.estimatePhaseDifference(meteorVolts2, pairslist)
3262 3262 meteorVolts2 = meteorVolts2.reshape(meteorVolts2.shape[0], meteorVolts2.shape[1])
3263 3263 phaseDiff11 = numpy.reshape(phaseDiff1, (phaseDiff1.shape[0],1))
3264 3264 meteorVolts2[indSides,:] = self.__shiftPhase(meteorVolts2[indSides,:], phaseDiff11[0:4]) #Phase Shifting
3265 3265
3266 3266 #Phase Difference RMS
3267 3267 phaseRMS1 = numpy.sqrt(numpy.mean(numpy.square(phaseDiff1)))
3268 3268 powerNet1 = numpy.nansum(numpy.abs(meteorVolts1[:,:])**2,0)
3269 3269 #Data from Meteor
3270 3270 mPeak1 = powerNet1.argmax() + mStart1
3271 3271 mPeakPower1 = powerNet1.max()
3272 3272 noiseAux = sum(noise[mStart1:mEnd1 + 1,mHeight])
3273 3273 mSNR1 = (sum(powerNet1)-noiseAux)/noiseAux
3274 3274 Meteor1 = numpy.array([mHeight, mStart1, mPeak1, mEnd1, mPeakPower1, mSNR1, phaseRMS1])
3275 3275 Meteor1 = numpy.hstack((Meteor1,phaseDiffint))
3276 3276 PowerSeries = powerNet0[mStart1:mEndDecayTime1 + 1]
3277 3277 #Vectorize
3278 3278 meteorAux[0:7] = [mHeight, mStart1, mPeak1, mEnd1, mPeakPower1, mSNR1, phaseRMS1]
3279 3279 meteorAux[7:11] = phaseDiffint[0:4]
3280 3280
3281 3281 #Rejection Criterions
3282 3282 if phaseRMS1 > thresholdPhase: #Error Number 17: Phase variation
3283 3283 meteorAux[-1] = 17
3284 3284 elif mSNR1 < thresholdDB1: #Error Number 1: SNR < threshold dB
3285 3285 meteorAux[-1] = 1
3286 3286
3287 3287
3288 3288 else:
3289 3289 meteorAux[0:4] = [mHeight, mStart, mPeak, mEnd]
3290 3290 meteorAux[-1] = 6 #Error Number 6: echo less than 5 samples long; too short for analysis
3291 3291 PowerSeries = 0
3292 3292
3293 3293 listMeteors1.append(meteorAux)
3294 3294 listPowerSeries.append(PowerSeries)
3295 3295 listVoltageSeries.append(meteorVolts1)
3296 3296
3297 3297 return listMeteors1, listPowerSeries, listVoltageSeries
3298 3298
3299 3299 def __estimateDecayTime(self, listMeteors, listPower, timeInterval, frequency):
3300 3300
3301 3301 threshError = 10
3302 3302 #Depending if it is 30 or 50 MHz
3303 3303 if frequency == 30e6:
3304 3304 timeLag = 45*10**-3
3305 3305 else:
3306 3306 timeLag = 15*10**-3
3307 3307 lag = numpy.ceil(timeLag/timeInterval)
3308 3308
3309 3309 listMeteors1 = []
3310 3310
3311 3311 for i in range(len(listMeteors)):
3312 3312 meteorPower = listPower[i]
3313 3313 meteorAux = listMeteors[i]
3314 3314
3315 3315 if meteorAux[-1] == 0:
3316 3316
3317 3317 try:
3318 3318 indmax = meteorPower.argmax()
3319 3319 indlag = indmax + lag
3320 3320
3321 3321 y = meteorPower[indlag:]
3322 3322 x = numpy.arange(0, y.size)*timeLag
3323 3323
3324 3324 #first guess
3325 3325 a = y[0]
3326 3326 tau = timeLag
3327 3327 #exponential fit
3328 3328 popt, pcov = optimize.curve_fit(self.__exponential_function, x, y, p0 = [a, tau])
3329 3329 y1 = self.__exponential_function(x, *popt)
3330 3330 #error estimation
3331 3331 error = sum((y - y1)**2)/(numpy.var(y)*(y.size - popt.size))
3332 3332
3333 3333 decayTime = popt[1]
3334 3334 riseTime = indmax*timeInterval
3335 3335 meteorAux[11:13] = [decayTime, error]
3336 3336
3337 3337 #Table items 7, 8 and 11
3338 3338 if (riseTime > 0.3): #Number 7: Echo rise exceeds 0.3s
3339 3339 meteorAux[-1] = 7
3340 3340 elif (decayTime < 2*riseTime) : #Number 8: Echo decay time less than than twice rise time
3341 3341 meteorAux[-1] = 8
3342 3342 if (error > threshError): #Number 11: Poor fit to amplitude for estimation of decay time
3343 3343 meteorAux[-1] = 11
3344 3344
3345 3345
3346 3346 except:
3347 3347 meteorAux[-1] = 11
3348 3348
3349 3349
3350 3350 listMeteors1.append(meteorAux)
3351 3351
3352 3352 return listMeteors1
3353 3353
3354 3354 #Exponential Function
3355 3355
3356 3356 def __exponential_function(self, x, a, tau):
3357 3357 y = a*numpy.exp(-x/tau)
3358 3358 return y
3359 3359
3360 3360 def __getRadialVelocity(self, listMeteors, listVolts, radialStdThresh, pairslist, timeInterval):
3361 3361
3362 3362 pairslist1 = list(pairslist)
3363 3363 pairslist1.append((0,1))
3364 3364 pairslist1.append((3,4))
3365 3365 numPairs = len(pairslist1)
3366 3366 #Time Lag
3367 3367 timeLag = 45*10**-3
3368 3368 c = 3e8
3369 3369 lag = numpy.ceil(timeLag/timeInterval)
3370 3370 freq = 30e6
3371 3371
3372 3372 listMeteors1 = []
3373 3373
3374 3374 for i in range(len(listMeteors)):
3375 3375 meteorAux = listMeteors[i]
3376 3376 if meteorAux[-1] == 0:
3377 3377 mStart = listMeteors[i][1]
3378 3378 mPeak = listMeteors[i][2]
3379 3379 mLag = mPeak - mStart + lag
3380 3380
3381 3381 #get the volt data between the start and end times of the meteor
3382 3382 meteorVolts = listVolts[i]
3383 3383 meteorVolts = meteorVolts.reshape(meteorVolts.shape[0], meteorVolts.shape[1], 1)
3384 3384
3385 3385 #Get CCF
3386 3386 allCCFs = self.__calculateCCF(meteorVolts, pairslist1, [-2,-1,0,1,2])
3387 3387
3388 3388 #Method 2
3389 3389 slopes = numpy.zeros(numPairs)
3390 3390 time = numpy.array([-2,-1,1,2])*timeInterval
3391 3391 angAllCCF = numpy.angle(allCCFs[:,[0,1,3,4],0])
3392 3392
3393 3393 #Correct phases
3394 3394 derPhaseCCF = angAllCCF[:,1:] - angAllCCF[:,0:-1]
3395 3395 indDer = numpy.where(numpy.abs(derPhaseCCF) > numpy.pi)
3396 3396
3397 3397 if indDer[0].shape[0] > 0:
3398 3398 for i in range(indDer[0].shape[0]):
3399 3399 signo = -numpy.sign(derPhaseCCF[indDer[0][i],indDer[1][i]])
3400 3400 angAllCCF[indDer[0][i],indDer[1][i]+1:] += signo*2*numpy.pi
3401 3401
3402 3402 # fit = scipy.stats.linregress(numpy.array([-2,-1,1,2])*timeInterval, numpy.array([phaseLagN2s[i],phaseLagN1s[i],phaseLag1s[i],phaseLag2s[i]]))
3403 3403 for j in range(numPairs):
3404 3404 fit = stats.linregress(time, angAllCCF[j,:])
3405 3405 slopes[j] = fit[0]
3406 3406
3407 3407 #Remove Outlier
3408 3408 # indOut = numpy.argmax(numpy.abs(slopes - numpy.mean(slopes)))
3409 3409 # slopes = numpy.delete(slopes,indOut)
3410 3410 # indOut = numpy.argmax(numpy.abs(slopes - numpy.mean(slopes)))
3411 3411 # slopes = numpy.delete(slopes,indOut)
3412 3412
3413 3413 radialVelocity = -numpy.mean(slopes)*(0.25/numpy.pi)*(c/freq)
3414 3414 radialError = numpy.std(slopes)*(0.25/numpy.pi)*(c/freq)
3415 3415 meteorAux[-2] = radialError
3416 3416 meteorAux[-3] = radialVelocity
3417 3417
3418 3418 #Setting Error
3419 3419 #Number 15: Radial Drift velocity or projected horizontal velocity exceeds 200 m/s
3420 3420 if numpy.abs(radialVelocity) > 200:
3421 3421 meteorAux[-1] = 15
3422 3422 #Number 12: Poor fit to CCF variation for estimation of radial drift velocity
3423 3423 elif radialError > radialStdThresh:
3424 3424 meteorAux[-1] = 12
3425 3425
3426 3426 listMeteors1.append(meteorAux)
3427 3427 return listMeteors1
3428 3428
3429 3429 def __setNewArrays(self, listMeteors, date, heiRang):
3430 3430
3431 3431 #New arrays
3432 3432 arrayMeteors = numpy.array(listMeteors)
3433 3433 arrayParameters = numpy.zeros((len(listMeteors), 13))
3434 3434
3435 3435 #Date inclusion
3436 3436 # date = re.findall(r'\((.*?)\)', date)
3437 3437 # date = date[0].split(',')
3438 3438 # date = map(int, date)
3439 3439 #
3440 3440 # if len(date)<6:
3441 3441 # date.append(0)
3442 3442 #
3443 3443 # date = [date[0]*10000 + date[1]*100 + date[2], date[3]*10000 + date[4]*100 + date[5]]
3444 3444 # arrayDate = numpy.tile(date, (len(listMeteors), 1))
3445 3445 arrayDate = numpy.tile(date, (len(listMeteors)))
3446 3446
3447 3447 #Meteor array
3448 3448 # arrayMeteors[:,0] = heiRang[arrayMeteors[:,0].astype(int)]
3449 3449 # arrayMeteors = numpy.hstack((arrayDate, arrayMeteors))
3450 3450
3451 3451 #Parameters Array
3452 3452 arrayParameters[:,0] = arrayDate #Date
3453 3453 arrayParameters[:,1] = heiRang[arrayMeteors[:,0].astype(int)] #Range
3454 3454 arrayParameters[:,6:8] = arrayMeteors[:,-3:-1] #Radial velocity and its error
3455 3455 arrayParameters[:,8:12] = arrayMeteors[:,7:11] #Phases
3456 3456 arrayParameters[:,-1] = arrayMeteors[:,-1] #Error
3457 3457
3458 3458
3459 3459 return arrayParameters
3460 3460
3461 3461 class CorrectSMPhases(Operation):
3462 3462
3463 3463 def run(self, dataOut, phaseOffsets, hmin = 50, hmax = 150, azimuth = 45, channelPositions = None):
3464 3464
3465 3465 arrayParameters = dataOut.data_param
3466 3466 pairsList = []
3467 3467 pairx = (0,1)
3468 3468 pairy = (2,3)
3469 3469 pairsList.append(pairx)
3470 3470 pairsList.append(pairy)
3471 3471 jph = numpy.zeros(4)
3472 3472
3473 3473 phaseOffsets = numpy.array(phaseOffsets)*numpy.pi/180
3474 3474 # arrayParameters[:,8:12] = numpy.unwrap(arrayParameters[:,8:12] + phaseOffsets)
3475 3475 arrayParameters[:,8:12] = numpy.angle(numpy.exp(1j*(arrayParameters[:,8:12] + phaseOffsets)))
3476 3476
3477 3477 meteorOps = SMOperations()
3478 if channelPositions == None:
3478 if channelPositions is None:
3479 3479 # channelPositions = [(2.5,0), (0,2.5), (0,0), (0,4.5), (-2,0)] #T
3480 3480 channelPositions = [(4.5,2), (2,4.5), (2,2), (2,0), (0,2)] #Estrella
3481 3481
3482 3482 pairslist0, distances = meteorOps.getPhasePairs(channelPositions)
3483 3483 h = (hmin,hmax)
3484 3484
3485 3485 arrayParameters = meteorOps.getMeteorParams(arrayParameters, azimuth, h, pairsList, distances, jph)
3486 3486
3487 3487 dataOut.data_param = arrayParameters
3488 3488 return
3489 3489
3490 3490 class SMPhaseCalibration(Operation):
3491 3491
3492 3492 __buffer = None
3493 3493
3494 3494 __initime = None
3495 3495
3496 3496 __dataReady = False
3497 3497
3498 3498 __isConfig = False
3499 3499
3500 3500 def __checkTime(self, currentTime, initTime, paramInterval, outputInterval):
3501 3501
3502 3502 dataTime = currentTime + paramInterval
3503 3503 deltaTime = dataTime - initTime
3504 3504
3505 3505 if deltaTime >= outputInterval or deltaTime < 0:
3506 3506 return True
3507 3507
3508 3508 return False
3509 3509
3510 3510 def __getGammas(self, pairs, d, phases):
3511 3511 gammas = numpy.zeros(2)
3512 3512
3513 3513 for i in range(len(pairs)):
3514 3514
3515 3515 pairi = pairs[i]
3516 3516
3517 3517 phip3 = phases[:,pairi[0]]
3518 3518 d3 = d[pairi[0]]
3519 3519 phip2 = phases[:,pairi[1]]
3520 3520 d2 = d[pairi[1]]
3521 3521 #Calculating gamma
3522 3522 # jdcos = alp1/(k*d1)
3523 3523 # jgamma = numpy.angle(numpy.exp(1j*(d0*alp1/d1 - alp0)))
3524 3524 jgamma = -phip2*d3/d2 - phip3
3525 3525 jgamma = numpy.angle(numpy.exp(1j*jgamma))
3526 3526 # jgamma[jgamma>numpy.pi] -= 2*numpy.pi
3527 3527 # jgamma[jgamma<-numpy.pi] += 2*numpy.pi
3528 3528
3529 3529 #Revised distribution
3530 3530 jgammaArray = numpy.hstack((jgamma,jgamma+0.5*numpy.pi,jgamma-0.5*numpy.pi))
3531 3531
3532 3532 #Histogram
3533 3533 nBins = 64
3534 3534 rmin = -0.5*numpy.pi
3535 3535 rmax = 0.5*numpy.pi
3536 3536 phaseHisto = numpy.histogram(jgammaArray, bins=nBins, range=(rmin,rmax))
3537 3537
3538 3538 meteorsY = phaseHisto[0]
3539 3539 phasesX = phaseHisto[1][:-1]
3540 3540 width = phasesX[1] - phasesX[0]
3541 3541 phasesX += width/2
3542 3542
3543 3543 #Gaussian aproximation
3544 3544 bpeak = meteorsY.argmax()
3545 3545 peak = meteorsY.max()
3546 3546 jmin = bpeak - 5
3547 3547 jmax = bpeak + 5 + 1
3548 3548
3549 3549 if jmin<0:
3550 3550 jmin = 0
3551 3551 jmax = 6
3552 3552 elif jmax > meteorsY.size:
3553 3553 jmin = meteorsY.size - 6
3554 3554 jmax = meteorsY.size
3555 3555
3556 3556 x0 = numpy.array([peak,bpeak,50])
3557 3557 coeff = optimize.leastsq(self.__residualFunction, x0, args=(meteorsY[jmin:jmax], phasesX[jmin:jmax]))
3558 3558
3559 3559 #Gammas
3560 3560 gammas[i] = coeff[0][1]
3561 3561
3562 3562 return gammas
3563 3563
3564 3564 def __residualFunction(self, coeffs, y, t):
3565 3565
3566 3566 return y - self.__gauss_function(t, coeffs)
3567 3567
3568 3568 def __gauss_function(self, t, coeffs):
3569 3569
3570 3570 return coeffs[0]*numpy.exp(-0.5*((t - coeffs[1]) / coeffs[2])**2)
3571 3571
3572 3572 def __getPhases(self, azimuth, h, pairsList, d, gammas, meteorsArray):
3573 3573 meteorOps = SMOperations()
3574 3574 nchan = 4
3575 3575 pairx = pairsList[0] #x es 0
3576 3576 pairy = pairsList[1] #y es 1
3577 3577 center_xangle = 0
3578 3578 center_yangle = 0
3579 3579 range_angle = numpy.array([10*numpy.pi,numpy.pi,numpy.pi/2,numpy.pi/4])
3580 3580 ntimes = len(range_angle)
3581 3581
3582 3582 nstepsx = 20
3583 3583 nstepsy = 20
3584 3584
3585 3585 for iz in range(ntimes):
3586 3586 min_xangle = -range_angle[iz]/2 + center_xangle
3587 3587 max_xangle = range_angle[iz]/2 + center_xangle
3588 3588 min_yangle = -range_angle[iz]/2 + center_yangle
3589 3589 max_yangle = range_angle[iz]/2 + center_yangle
3590 3590
3591 3591 inc_x = (max_xangle-min_xangle)/nstepsx
3592 3592 inc_y = (max_yangle-min_yangle)/nstepsy
3593 3593
3594 3594 alpha_y = numpy.arange(nstepsy)*inc_y + min_yangle
3595 3595 alpha_x = numpy.arange(nstepsx)*inc_x + min_xangle
3596 3596 penalty = numpy.zeros((nstepsx,nstepsy))
3597 3597 jph_array = numpy.zeros((nchan,nstepsx,nstepsy))
3598 3598 jph = numpy.zeros(nchan)
3599 3599
3600 3600 # Iterations looking for the offset
3601 3601 for iy in range(int(nstepsy)):
3602 3602 for ix in range(int(nstepsx)):
3603 3603 d3 = d[pairsList[1][0]]
3604 3604 d2 = d[pairsList[1][1]]
3605 3605 d5 = d[pairsList[0][0]]
3606 3606 d4 = d[pairsList[0][1]]
3607 3607
3608 3608 alp2 = alpha_y[iy] #gamma 1
3609 3609 alp4 = alpha_x[ix] #gamma 0
3610 3610
3611 3611 alp3 = -alp2*d3/d2 - gammas[1]
3612 3612 alp5 = -alp4*d5/d4 - gammas[0]
3613 3613 # jph[pairy[1]] = alpha_y[iy]
3614 3614 # jph[pairy[0]] = -gammas[1] - alpha_y[iy]*d[pairy[1]]/d[pairy[0]]
3615 3615
3616 3616 # jph[pairx[1]] = alpha_x[ix]
3617 3617 # jph[pairx[0]] = -gammas[0] - alpha_x[ix]*d[pairx[1]]/d[pairx[0]]
3618 3618 jph[pairsList[0][1]] = alp4
3619 3619 jph[pairsList[0][0]] = alp5
3620 3620 jph[pairsList[1][0]] = alp3
3621 3621 jph[pairsList[1][1]] = alp2
3622 3622 jph_array[:,ix,iy] = jph
3623 3623 # d = [2.0,2.5,2.5,2.0]
3624 3624 #falta chequear si va a leer bien los meteoros
3625 3625 meteorsArray1 = meteorOps.getMeteorParams(meteorsArray, azimuth, h, pairsList, d, jph)
3626 3626 error = meteorsArray1[:,-1]
3627 3627 ind1 = numpy.where(error==0)[0]
3628 3628 penalty[ix,iy] = ind1.size
3629 3629
3630 3630 i,j = numpy.unravel_index(penalty.argmax(), penalty.shape)
3631 3631 phOffset = jph_array[:,i,j]
3632 3632
3633 3633 center_xangle = phOffset[pairx[1]]
3634 3634 center_yangle = phOffset[pairy[1]]
3635 3635
3636 3636 phOffset = numpy.angle(numpy.exp(1j*jph_array[:,i,j]))
3637 3637 phOffset = phOffset*180/numpy.pi
3638 3638 return phOffset
3639 3639
3640 3640
3641 3641 def run(self, dataOut, hmin, hmax, channelPositions=None, nHours = 1):
3642 3642
3643 3643 dataOut.flagNoData = True
3644 3644 self.__dataReady = False
3645 3645 dataOut.outputInterval = nHours*3600
3646 3646
3647 3647 if self.__isConfig == False:
3648 3648 # self.__initime = dataOut.datatime.replace(minute = 0, second = 0, microsecond = 03)
3649 3649 #Get Initial LTC time
3650 3650 self.__initime = datetime.datetime.utcfromtimestamp(dataOut.utctime)
3651 3651 self.__initime = (self.__initime.replace(minute = 0, second = 0, microsecond = 0) - datetime.datetime(1970, 1, 1)).total_seconds()
3652 3652
3653 3653 self.__isConfig = True
3654 3654
3655 if self.__buffer == None:
3655 if self.__buffer is None:
3656 3656 self.__buffer = dataOut.data_param.copy()
3657 3657
3658 3658 else:
3659 3659 self.__buffer = numpy.vstack((self.__buffer, dataOut.data_param))
3660 3660
3661 3661 self.__dataReady = self.__checkTime(dataOut.utctime, self.__initime, dataOut.paramInterval, dataOut.outputInterval) #Check if the buffer is ready
3662 3662
3663 3663 if self.__dataReady:
3664 3664 dataOut.utctimeInit = self.__initime
3665 3665 self.__initime += dataOut.outputInterval #to erase time offset
3666 3666
3667 3667 freq = dataOut.frequency
3668 3668 c = dataOut.C #m/s
3669 3669 lamb = c/freq
3670 3670 k = 2*numpy.pi/lamb
3671 3671 azimuth = 0
3672 3672 h = (hmin, hmax)
3673 3673 # pairs = ((0,1),(2,3)) #Estrella
3674 3674 # pairs = ((1,0),(2,3)) #T
3675 3675
3676 3676 if channelPositions is None:
3677 3677 # channelPositions = [(2.5,0), (0,2.5), (0,0), (0,4.5), (-2,0)] #T
3678 3678 channelPositions = [(4.5,2), (2,4.5), (2,2), (2,0), (0,2)] #Estrella
3679 3679 meteorOps = SMOperations()
3680 3680 pairslist0, distances = meteorOps.getPhasePairs(channelPositions)
3681 3681
3682 3682 #Checking correct order of pairs
3683 3683 pairs = []
3684 3684 if distances[1] > distances[0]:
3685 3685 pairs.append((1,0))
3686 3686 else:
3687 3687 pairs.append((0,1))
3688 3688
3689 3689 if distances[3] > distances[2]:
3690 3690 pairs.append((3,2))
3691 3691 else:
3692 3692 pairs.append((2,3))
3693 3693 # distances1 = [-distances[0]*lamb, distances[1]*lamb, -distances[2]*lamb, distances[3]*lamb]
3694 3694
3695 3695 meteorsArray = self.__buffer
3696 3696 error = meteorsArray[:,-1]
3697 3697 boolError = (error==0)|(error==3)|(error==4)|(error==13)|(error==14)
3698 3698 ind1 = numpy.where(boolError)[0]
3699 3699 meteorsArray = meteorsArray[ind1,:]
3700 3700 meteorsArray[:,-1] = 0
3701 3701 phases = meteorsArray[:,8:12]
3702 3702
3703 3703 #Calculate Gammas
3704 3704 gammas = self.__getGammas(pairs, distances, phases)
3705 3705 # gammas = numpy.array([-21.70409463,45.76935864])*numpy.pi/180
3706 3706 #Calculate Phases
3707 3707 phasesOff = self.__getPhases(azimuth, h, pairs, distances, gammas, meteorsArray)
3708 3708 phasesOff = phasesOff.reshape((1,phasesOff.size))
3709 3709 dataOut.data_output = -phasesOff
3710 3710 dataOut.flagNoData = False
3711 3711 self.__buffer = None
3712 3712
3713 3713
3714 3714 return
3715 3715
3716 3716 class SMOperations():
3717 3717
3718 3718 def __init__(self):
3719 3719
3720 3720 return
3721 3721
3722 3722 def getMeteorParams(self, arrayParameters0, azimuth, h, pairsList, distances, jph):
3723 3723
3724 3724 arrayParameters = arrayParameters0.copy()
3725 3725 hmin = h[0]
3726 3726 hmax = h[1]
3727 3727
3728 3728 #Calculate AOA (Error N 3, 4)
3729 3729 #JONES ET AL. 1998
3730 3730 AOAthresh = numpy.pi/8
3731 3731 error = arrayParameters[:,-1]
3732 3732 phases = -arrayParameters[:,8:12] + jph
3733 3733 # phases = numpy.unwrap(phases)
3734 3734 arrayParameters[:,3:6], arrayParameters[:,-1] = self.__getAOA(phases, pairsList, distances, error, AOAthresh, azimuth)
3735 3735
3736 3736 #Calculate Heights (Error N 13 and 14)
3737 3737 error = arrayParameters[:,-1]
3738 3738 Ranges = arrayParameters[:,1]
3739 3739 zenith = arrayParameters[:,4]
3740 3740 arrayParameters[:,2], arrayParameters[:,-1] = self.__getHeights(Ranges, zenith, error, hmin, hmax)
3741 3741
3742 3742 #----------------------- Get Final data ------------------------------------
3743 3743 # error = arrayParameters[:,-1]
3744 3744 # ind1 = numpy.where(error==0)[0]
3745 3745 # arrayParameters = arrayParameters[ind1,:]
3746 3746
3747 3747 return arrayParameters
3748 3748
3749 3749 def __getAOA(self, phases, pairsList, directions, error, AOAthresh, azimuth):
3750 3750
3751 3751 arrayAOA = numpy.zeros((phases.shape[0],3))
3752 3752 cosdir0, cosdir = self.__getDirectionCosines(phases, pairsList,directions)
3753 3753
3754 3754 arrayAOA[:,:2] = self.__calculateAOA(cosdir, azimuth)
3755 3755 cosDirError = numpy.sum(numpy.abs(cosdir0 - cosdir), axis = 1)
3756 3756 arrayAOA[:,2] = cosDirError
3757 3757
3758 3758 azimuthAngle = arrayAOA[:,0]
3759 3759 zenithAngle = arrayAOA[:,1]
3760 3760
3761 3761 #Setting Error
3762 3762 indError = numpy.where(numpy.logical_or(error == 3, error == 4))[0]
3763 3763 error[indError] = 0
3764 3764 #Number 3: AOA not fesible
3765 3765 indInvalid = numpy.where(numpy.logical_and((numpy.logical_or(numpy.isnan(zenithAngle), numpy.isnan(azimuthAngle))),error == 0))[0]
3766 3766 error[indInvalid] = 3
3767 3767 #Number 4: Large difference in AOAs obtained from different antenna baselines
3768 3768 indInvalid = numpy.where(numpy.logical_and(cosDirError > AOAthresh,error == 0))[0]
3769 3769 error[indInvalid] = 4
3770 3770 return arrayAOA, error
3771 3771
3772 3772 def __getDirectionCosines(self, arrayPhase, pairsList, distances):
3773 3773
3774 3774 #Initializing some variables
3775 3775 ang_aux = numpy.array([-8,-7,-6,-5,-4,-3,-2,-1,0,1,2,3,4,5,6,7,8])*2*numpy.pi
3776 3776 ang_aux = ang_aux.reshape(1,ang_aux.size)
3777 3777
3778 3778 cosdir = numpy.zeros((arrayPhase.shape[0],2))
3779 3779 cosdir0 = numpy.zeros((arrayPhase.shape[0],2))
3780 3780
3781 3781
3782 3782 for i in range(2):
3783 3783 ph0 = arrayPhase[:,pairsList[i][0]]
3784 3784 ph1 = arrayPhase[:,pairsList[i][1]]
3785 3785 d0 = distances[pairsList[i][0]]
3786 3786 d1 = distances[pairsList[i][1]]
3787 3787
3788 3788 ph0_aux = ph0 + ph1
3789 3789 ph0_aux = numpy.angle(numpy.exp(1j*ph0_aux))
3790 3790 # ph0_aux[ph0_aux > numpy.pi] -= 2*numpy.pi
3791 3791 # ph0_aux[ph0_aux < -numpy.pi] += 2*numpy.pi
3792 3792 #First Estimation
3793 3793 cosdir0[:,i] = (ph0_aux)/(2*numpy.pi*(d0 - d1))
3794 3794
3795 3795 #Most-Accurate Second Estimation
3796 3796 phi1_aux = ph0 - ph1
3797 3797 phi1_aux = phi1_aux.reshape(phi1_aux.size,1)
3798 3798 #Direction Cosine 1
3799 3799 cosdir1 = (phi1_aux + ang_aux)/(2*numpy.pi*(d0 + d1))
3800 3800
3801 3801 #Searching the correct Direction Cosine
3802 3802 cosdir0_aux = cosdir0[:,i]
3803 3803 cosdir0_aux = cosdir0_aux.reshape(cosdir0_aux.size,1)
3804 3804 #Minimum Distance
3805 3805 cosDiff = (cosdir1 - cosdir0_aux)**2
3806 3806 indcos = cosDiff.argmin(axis = 1)
3807 3807 #Saving Value obtained
3808 3808 cosdir[:,i] = cosdir1[numpy.arange(len(indcos)),indcos]
3809 3809
3810 3810 return cosdir0, cosdir
3811 3811
3812 3812 def __calculateAOA(self, cosdir, azimuth):
3813 3813 cosdirX = cosdir[:,0]
3814 3814 cosdirY = cosdir[:,1]
3815 3815
3816 3816 zenithAngle = numpy.arccos(numpy.sqrt(1 - cosdirX**2 - cosdirY**2))*180/numpy.pi
3817 3817 azimuthAngle = numpy.arctan2(cosdirX,cosdirY)*180/numpy.pi + azimuth#0 deg north, 90 deg east
3818 3818 angles = numpy.vstack((azimuthAngle, zenithAngle)).transpose()
3819 3819
3820 3820 return angles
3821 3821
3822 3822 def __getHeights(self, Ranges, zenith, error, minHeight, maxHeight):
3823 3823
3824 3824 Ramb = 375 #Ramb = c/(2*PRF)
3825 3825 Re = 6371 #Earth Radius
3826 3826 heights = numpy.zeros(Ranges.shape)
3827 3827
3828 3828 R_aux = numpy.array([0,1,2])*Ramb
3829 3829 R_aux = R_aux.reshape(1,R_aux.size)
3830 3830
3831 3831 Ranges = Ranges.reshape(Ranges.size,1)
3832 3832
3833 3833 Ri = Ranges + R_aux
3834 3834 hi = numpy.sqrt(Re**2 + Ri**2 + (2*Re*numpy.cos(zenith*numpy.pi/180)*Ri.transpose()).transpose()) - Re
3835 3835
3836 3836 #Check if there is a height between 70 and 110 km
3837 3837 h_bool = numpy.sum(numpy.logical_and(hi > minHeight, hi < maxHeight), axis = 1)
3838 3838 ind_h = numpy.where(h_bool == 1)[0]
3839 3839
3840 3840 hCorr = hi[ind_h, :]
3841 3841 ind_hCorr = numpy.where(numpy.logical_and(hi > minHeight, hi < maxHeight))
3842 3842
3843 3843 hCorr = hi[ind_hCorr][:len(ind_h)]
3844 3844 heights[ind_h] = hCorr
3845 3845
3846 3846 #Setting Error
3847 3847 #Number 13: Height unresolvable echo: not valid height within 70 to 110 km
3848 3848 #Number 14: Height ambiguous echo: more than one possible height within 70 to 110 km
3849 3849 indError = numpy.where(numpy.logical_or(error == 13, error == 14))[0]
3850 3850 error[indError] = 0
3851 3851 indInvalid2 = numpy.where(numpy.logical_and(h_bool > 1, error == 0))[0]
3852 3852 error[indInvalid2] = 14
3853 3853 indInvalid1 = numpy.where(numpy.logical_and(h_bool == 0, error == 0))[0]
3854 3854 error[indInvalid1] = 13
3855 3855
3856 3856 return heights, error
3857 3857
3858 3858 def getPhasePairs(self, channelPositions):
3859 3859 chanPos = numpy.array(channelPositions)
3860 3860 listOper = list(itertools.combinations(range(5),2))
3861 3861
3862 3862 distances = numpy.zeros(4)
3863 3863 axisX = []
3864 3864 axisY = []
3865 3865 distX = numpy.zeros(3)
3866 3866 distY = numpy.zeros(3)
3867 3867 ix = 0
3868 3868 iy = 0
3869 3869
3870 3870 pairX = numpy.zeros((2,2))
3871 3871 pairY = numpy.zeros((2,2))
3872 3872
3873 3873 for i in range(len(listOper)):
3874 3874 pairi = listOper[i]
3875 3875
3876 3876 posDif = numpy.abs(chanPos[pairi[0],:] - chanPos[pairi[1],:])
3877 3877
3878 3878 if posDif[0] == 0:
3879 3879 axisY.append(pairi)
3880 3880 distY[iy] = posDif[1]
3881 3881 iy += 1
3882 3882 elif posDif[1] == 0:
3883 3883 axisX.append(pairi)
3884 3884 distX[ix] = posDif[0]
3885 3885 ix += 1
3886 3886
3887 3887 for i in range(2):
3888 3888 if i==0:
3889 3889 dist0 = distX
3890 3890 axis0 = axisX
3891 3891 else:
3892 3892 dist0 = distY
3893 3893 axis0 = axisY
3894 3894
3895 3895 side = numpy.argsort(dist0)[:-1]
3896 3896 axis0 = numpy.array(axis0)[side,:]
3897 3897 chanC = int(numpy.intersect1d(axis0[0,:], axis0[1,:])[0])
3898 3898 axis1 = numpy.unique(numpy.reshape(axis0,4))
3899 3899 side = axis1[axis1 != chanC]
3900 3900 diff1 = chanPos[chanC,i] - chanPos[side[0],i]
3901 3901 diff2 = chanPos[chanC,i] - chanPos[side[1],i]
3902 3902 if diff1<0:
3903 3903 chan2 = side[0]
3904 3904 d2 = numpy.abs(diff1)
3905 3905 chan1 = side[1]
3906 3906 d1 = numpy.abs(diff2)
3907 3907 else:
3908 3908 chan2 = side[1]
3909 3909 d2 = numpy.abs(diff2)
3910 3910 chan1 = side[0]
3911 3911 d1 = numpy.abs(diff1)
3912 3912
3913 3913 if i==0:
3914 3914 chanCX = chanC
3915 3915 chan1X = chan1
3916 3916 chan2X = chan2
3917 3917 distances[0:2] = numpy.array([d1,d2])
3918 3918 else:
3919 3919 chanCY = chanC
3920 3920 chan1Y = chan1
3921 3921 chan2Y = chan2
3922 3922 distances[2:4] = numpy.array([d1,d2])
3923 3923 # axisXsides = numpy.reshape(axisX[ix,:],4)
3924 3924 #
3925 3925 # channelCentX = int(numpy.intersect1d(pairX[0,:], pairX[1,:])[0])
3926 3926 # channelCentY = int(numpy.intersect1d(pairY[0,:], pairY[1,:])[0])
3927 3927 #
3928 3928 # ind25X = numpy.where(pairX[0,:] != channelCentX)[0][0]
3929 3929 # ind20X = numpy.where(pairX[1,:] != channelCentX)[0][0]
3930 3930 # channel25X = int(pairX[0,ind25X])
3931 3931 # channel20X = int(pairX[1,ind20X])
3932 3932 # ind25Y = numpy.where(pairY[0,:] != channelCentY)[0][0]
3933 3933 # ind20Y = numpy.where(pairY[1,:] != channelCentY)[0][0]
3934 3934 # channel25Y = int(pairY[0,ind25Y])
3935 3935 # channel20Y = int(pairY[1,ind20Y])
3936 3936
3937 3937 # pairslist = [(channelCentX, channel25X),(channelCentX, channel20X),(channelCentY,channel25Y),(channelCentY, channel20Y)]
3938 3938 pairslist = [(chanCX, chan1X),(chanCX, chan2X),(chanCY,chan1Y),(chanCY, chan2Y)]
3939 3939
3940 3940 return pairslist, distances
3941 3941 # def __getAOA(self, phases, pairsList, error, AOAthresh, azimuth):
3942 3942 #
3943 3943 # arrayAOA = numpy.zeros((phases.shape[0],3))
3944 3944 # cosdir0, cosdir = self.__getDirectionCosines(phases, pairsList)
3945 3945 #
3946 3946 # arrayAOA[:,:2] = self.__calculateAOA(cosdir, azimuth)
3947 3947 # cosDirError = numpy.sum(numpy.abs(cosdir0 - cosdir), axis = 1)
3948 3948 # arrayAOA[:,2] = cosDirError
3949 3949 #
3950 3950 # azimuthAngle = arrayAOA[:,0]
3951 3951 # zenithAngle = arrayAOA[:,1]
3952 3952 #
3953 3953 # #Setting Error
3954 3954 # #Number 3: AOA not fesible
3955 3955 # indInvalid = numpy.where(numpy.logical_and((numpy.logical_or(numpy.isnan(zenithAngle), numpy.isnan(azimuthAngle))),error == 0))[0]
3956 3956 # error[indInvalid] = 3
3957 3957 # #Number 4: Large difference in AOAs obtained from different antenna baselines
3958 3958 # indInvalid = numpy.where(numpy.logical_and(cosDirError > AOAthresh,error == 0))[0]
3959 3959 # error[indInvalid] = 4
3960 3960 # return arrayAOA, error
3961 3961 #
3962 3962 # def __getDirectionCosines(self, arrayPhase, pairsList):
3963 3963 #
3964 3964 # #Initializing some variables
3965 3965 # ang_aux = numpy.array([-8,-7,-6,-5,-4,-3,-2,-1,0,1,2,3,4,5,6,7,8])*2*numpy.pi
3966 3966 # ang_aux = ang_aux.reshape(1,ang_aux.size)
3967 3967 #
3968 3968 # cosdir = numpy.zeros((arrayPhase.shape[0],2))
3969 3969 # cosdir0 = numpy.zeros((arrayPhase.shape[0],2))
3970 3970 #
3971 3971 #
3972 3972 # for i in range(2):
3973 3973 # #First Estimation
3974 3974 # phi0_aux = arrayPhase[:,pairsList[i][0]] + arrayPhase[:,pairsList[i][1]]
3975 3975 # #Dealias
3976 3976 # indcsi = numpy.where(phi0_aux > numpy.pi)
3977 3977 # phi0_aux[indcsi] -= 2*numpy.pi
3978 3978 # indcsi = numpy.where(phi0_aux < -numpy.pi)
3979 3979 # phi0_aux[indcsi] += 2*numpy.pi
3980 3980 # #Direction Cosine 0
3981 3981 # cosdir0[:,i] = -(phi0_aux)/(2*numpy.pi*0.5)
3982 3982 #
3983 3983 # #Most-Accurate Second Estimation
3984 3984 # phi1_aux = arrayPhase[:,pairsList[i][0]] - arrayPhase[:,pairsList[i][1]]
3985 3985 # phi1_aux = phi1_aux.reshape(phi1_aux.size,1)
3986 3986 # #Direction Cosine 1
3987 3987 # cosdir1 = -(phi1_aux + ang_aux)/(2*numpy.pi*4.5)
3988 3988 #
3989 3989 # #Searching the correct Direction Cosine
3990 3990 # cosdir0_aux = cosdir0[:,i]
3991 3991 # cosdir0_aux = cosdir0_aux.reshape(cosdir0_aux.size,1)
3992 3992 # #Minimum Distance
3993 3993 # cosDiff = (cosdir1 - cosdir0_aux)**2
3994 3994 # indcos = cosDiff.argmin(axis = 1)
3995 3995 # #Saving Value obtained
3996 3996 # cosdir[:,i] = cosdir1[numpy.arange(len(indcos)),indcos]
3997 3997 #
3998 3998 # return cosdir0, cosdir
3999 3999 #
4000 4000 # def __calculateAOA(self, cosdir, azimuth):
4001 4001 # cosdirX = cosdir[:,0]
4002 4002 # cosdirY = cosdir[:,1]
4003 4003 #
4004 4004 # zenithAngle = numpy.arccos(numpy.sqrt(1 - cosdirX**2 - cosdirY**2))*180/numpy.pi
4005 4005 # azimuthAngle = numpy.arctan2(cosdirX,cosdirY)*180/numpy.pi + azimuth #0 deg north, 90 deg east
4006 4006 # angles = numpy.vstack((azimuthAngle, zenithAngle)).transpose()
4007 4007 #
4008 4008 # return angles
4009 4009 #
4010 4010 # def __getHeights(self, Ranges, zenith, error, minHeight, maxHeight):
4011 4011 #
4012 4012 # Ramb = 375 #Ramb = c/(2*PRF)
4013 4013 # Re = 6371 #Earth Radius
4014 4014 # heights = numpy.zeros(Ranges.shape)
4015 4015 #
4016 4016 # R_aux = numpy.array([0,1,2])*Ramb
4017 4017 # R_aux = R_aux.reshape(1,R_aux.size)
4018 4018 #
4019 4019 # Ranges = Ranges.reshape(Ranges.size,1)
4020 4020 #
4021 4021 # Ri = Ranges + R_aux
4022 4022 # hi = numpy.sqrt(Re**2 + Ri**2 + (2*Re*numpy.cos(zenith*numpy.pi/180)*Ri.transpose()).transpose()) - Re
4023 4023 #
4024 4024 # #Check if there is a height between 70 and 110 km
4025 4025 # h_bool = numpy.sum(numpy.logical_and(hi > minHeight, hi < maxHeight), axis = 1)
4026 4026 # ind_h = numpy.where(h_bool == 1)[0]
4027 4027 #
4028 4028 # hCorr = hi[ind_h, :]
4029 4029 # ind_hCorr = numpy.where(numpy.logical_and(hi > minHeight, hi < maxHeight))
4030 4030 #
4031 4031 # hCorr = hi[ind_hCorr]
4032 4032 # heights[ind_h] = hCorr
4033 4033 #
4034 4034 # #Setting Error
4035 4035 # #Number 13: Height unresolvable echo: not valid height within 70 to 110 km
4036 4036 # #Number 14: Height ambiguous echo: more than one possible height within 70 to 110 km
4037 4037 #
4038 4038 # indInvalid2 = numpy.where(numpy.logical_and(h_bool > 1, error == 0))[0]
4039 4039 # error[indInvalid2] = 14
4040 4040 # indInvalid1 = numpy.where(numpy.logical_and(h_bool == 0, error == 0))[0]
4041 4041 # error[indInvalid1] = 13
4042 4042 #
4043 4043 # return heights, error
4044 4044 No newline at end of file
@@ -1,619 +1,636
1 1 '''
2 2 @author: Juan C. Espinoza
3 3 '''
4 4
5 5 import time
6 6 import json
7 7 import numpy
8 8 import paho.mqtt.client as mqtt
9 9 import zmq
10 10 import datetime
11 11 from zmq.utils.monitor import recv_monitor_message
12 12 from functools import wraps
13 13 from threading import Thread
14 14 from multiprocessing import Process
15 15
16 16 from schainpy.model.proc.jroproc_base import Operation, ProcessingUnit
17 17 from schainpy.model.data.jrodata import JROData
18 18 from schainpy.utils import log
19 19
20 20 MAXNUMX = 100
21 21 MAXNUMY = 100
22 22
23 23 class PrettyFloat(float):
24 24 def __repr__(self):
25 25 return '%.2f' % self
26 26
27 27 def roundFloats(obj):
28 28 if isinstance(obj, list):
29 29 return map(roundFloats, obj)
30 30 elif isinstance(obj, float):
31 31 return round(obj, 2)
32 32
33 33 def decimate(z, MAXNUMY):
34 34 dy = int(len(z[0])/MAXNUMY) + 1
35 35
36 36 return z[::, ::dy]
37 37
38 38 class throttle(object):
39 39 '''
40 40 Decorator that prevents a function from being called more than once every
41 41 time period.
42 42 To create a function that cannot be called more than once a minute, but
43 43 will sleep until it can be called:
44 44 @throttle(minutes=1)
45 45 def foo():
46 46 pass
47 47
48 48 for i in range(10):
49 49 foo()
50 50 print "This function has run %s times." % i
51 51 '''
52 52
53 53 def __init__(self, seconds=0, minutes=0, hours=0):
54 54 self.throttle_period = datetime.timedelta(
55 55 seconds=seconds, minutes=minutes, hours=hours
56 56 )
57 57
58 58 self.time_of_last_call = datetime.datetime.min
59 59
60 60 def __call__(self, fn):
61 61 @wraps(fn)
62 62 def wrapper(*args, **kwargs):
63 now = datetime.datetime.now()
64 time_since_last_call = now - self.time_of_last_call
65 time_left = self.throttle_period - time_since_last_call
63 coerce = kwargs.pop('coerce', None)
64 if coerce:
65 self.time_of_last_call = datetime.datetime.now()
66 return fn(*args, **kwargs)
67 else:
68 now = datetime.datetime.now()
69 time_since_last_call = now - self.time_of_last_call
70 time_left = self.throttle_period - time_since_last_call
66 71
67 if time_left > datetime.timedelta(seconds=0):
68 return
72 if time_left > datetime.timedelta(seconds=0):
73 return
69 74
70 75 self.time_of_last_call = datetime.datetime.now()
71 76 return fn(*args, **kwargs)
72 77
73 78 return wrapper
74 79
75 80 class Data(object):
76 81 '''
77 82 Object to hold data to be plotted
78 83 '''
79 84
80 85 def __init__(self, plottypes, throttle_value):
81 86 self.plottypes = plottypes
82 87 self.throttle = throttle_value
83 88 self.ended = False
84 89 self.localtime = False
85 90 self.__times = []
86 91 self.__heights = []
87 92
88 93 def __str__(self):
89 94 dum = ['{}{}'.format(key, self.shape(key)) for key in self.data]
90 95 return 'Data[{}][{}]'.format(';'.join(dum), len(self.__times))
91 96
92 97 def __len__(self):
93 98 return len(self.__times)
94 99
95 100 def __getitem__(self, key):
96 101 if key not in self.data:
97 102 raise KeyError(log.error('Missing key: {}'.format(key)))
98 103
99 104 if 'spc' in key:
100 105 ret = self.data[key]
101 106 else:
102 107 ret = numpy.array([self.data[key][x] for x in self.times])
103 108 if ret.ndim > 1:
104 109 ret = numpy.swapaxes(ret, 0, 1)
105 110 return ret
106 111
112 def __contains__(self, key):
113 return key in self.data
114
107 115 def setup(self):
108 116 '''
109 117 Configure object
110 118 '''
111 119
112 120 self.ended = False
113 121 self.data = {}
114 122 self.__times = []
115 123 self.__heights = []
116 124 self.__all_heights = set()
117 125 for plot in self.plottypes:
118 126 if 'snr' in plot:
119 127 plot = 'snr'
120 128 self.data[plot] = {}
121 129
122 130 def shape(self, key):
123 131 '''
124 132 Get the shape of the one-element data for the given key
125 133 '''
126 134
127 135 if len(self.data[key]):
128 136 if 'spc' in key:
129 137 return self.data[key].shape
130 138 return self.data[key][self.__times[0]].shape
131 139 return (0,)
132 140
133 141 def update(self, dataOut):
134 142 '''
135 143 Update data object with new dataOut
136 144 '''
137 145
138 146 tm = dataOut.utctime
139 147 if tm in self.__times:
140 148 return
141 149
142 150 self.parameters = getattr(dataOut, 'parameters', [])
143 151 self.pairs = dataOut.pairsList
144 152 self.channels = dataOut.channelList
145 153 self.interval = dataOut.getTimeInterval()
146 154 self.localtime = dataOut.useLocalTime
147 155 if 'spc' in self.plottypes or 'cspc' in self.plottypes:
148 156 self.xrange = (dataOut.getFreqRange(1)/1000., dataOut.getAcfRange(1), dataOut.getVelRange(1))
149 157 self.__heights.append(dataOut.heightList)
150 158 self.__all_heights.update(dataOut.heightList)
151 159 self.__times.append(tm)
152 160
153 161 for plot in self.plottypes:
154 162 if plot == 'spc':
155 163 z = dataOut.data_spc/dataOut.normFactor
156 164 self.data[plot] = 10*numpy.log10(z)
157 165 if plot == 'cspc':
158 166 self.data[plot] = dataOut.data_cspc
159 167 if plot == 'noise':
160 168 self.data[plot][tm] = 10*numpy.log10(dataOut.getNoise()/dataOut.normFactor)
161 169 if plot == 'rti':
162 170 self.data[plot][tm] = dataOut.getPower()
163 171 if plot == 'snr_db':
164 172 self.data['snr'][tm] = dataOut.data_SNR
165 173 if plot == 'snr':
166 174 self.data[plot][tm] = 10*numpy.log10(dataOut.data_SNR)
167 175 if plot == 'dop':
168 176 self.data[plot][tm] = 10*numpy.log10(dataOut.data_DOP)
169 177 if plot == 'mean':
170 178 self.data[plot][tm] = dataOut.data_MEAN
171 179 if plot == 'std':
172 180 self.data[plot][tm] = dataOut.data_STD
173 181 if plot == 'coh':
174 182 self.data[plot][tm] = dataOut.getCoherence()
175 183 if plot == 'phase':
176 184 self.data[plot][tm] = dataOut.getCoherence(phase=True)
177 185 if plot == 'output':
178 186 self.data[plot][tm] = dataOut.data_output
179 187 if plot == 'param':
180 188 self.data[plot][tm] = dataOut.data_param
181 189
182 190 def normalize_heights(self):
183 191 '''
184 192 Ensure same-dimension of the data for different heighList
185 193 '''
186 194
187 195 H = numpy.array(list(self.__all_heights))
188 196 H.sort()
189 197 for key in self.data:
190 198 shape = self.shape(key)[:-1] + H.shape
191 199 for tm, obj in self.data[key].items():
192 200 h = self.__heights[self.__times.index(tm)]
193 201 if H.size == h.size:
194 202 continue
195 203 index = numpy.where(numpy.in1d(H, h))[0]
196 204 dummy = numpy.zeros(shape) + numpy.nan
197 205 if len(shape) == 2:
198 206 dummy[:, index] = obj
199 207 else:
200 208 dummy[index] = obj
201 209 self.data[key][tm] = dummy
202 210
203 211 self.__heights = [H for tm in self.__times]
204 212
205 213 def jsonify(self, decimate=False):
206 214 '''
207 215 Convert data to json
208 216 '''
209 217
210 218 ret = {}
211 219 tm = self.times[-1]
212 220
213 221 for key, value in self.data:
214 222 if key in ('spc', 'cspc'):
215 223 ret[key] = roundFloats(self.data[key].to_list())
216 224 else:
217 225 ret[key] = roundFloats(self.data[key][tm].to_list())
218 226
219 227 ret['timestamp'] = tm
220 228 ret['interval'] = self.interval
221 229
222 230 @property
223 231 def times(self):
224 232 '''
225 233 Return the list of times of the current data
226 234 '''
227 235
228 236 ret = numpy.array(self.__times)
229 237 ret.sort()
230 238 return ret
231 239
232 240 @property
233 241 def heights(self):
234 242 '''
235 243 Return the list of heights of the current data
236 244 '''
237 245
238 246 return numpy.array(self.__heights[-1])
239 247
240 248 class PublishData(Operation):
241 249 '''
242 250 Operation to send data over zmq.
243 251 '''
244 252
253 __attrs__ = ['host', 'port', 'delay', 'zeromq', 'mqtt', 'verbose']
254
245 255 def __init__(self, **kwargs):
246 256 """Inicio."""
247 257 Operation.__init__(self, **kwargs)
248 258 self.isConfig = False
249 259 self.client = None
250 260 self.zeromq = None
251 261 self.mqtt = None
252 262
253 263 def on_disconnect(self, client, userdata, rc):
254 264 if rc != 0:
255 265 log.warning('Unexpected disconnection.')
256 266 self.connect()
257 267
258 268 def connect(self):
259 269 log.warning('trying to connect')
260 270 try:
261 271 self.client.connect(
262 272 host=self.host,
263 273 port=self.port,
264 274 keepalive=60*10,
265 275 bind_address='')
266 276 self.client.loop_start()
267 277 # self.client.publish(
268 278 # self.topic + 'SETUP',
269 279 # json.dumps(setup),
270 280 # retain=True
271 281 # )
272 282 except:
273 283 log.error('MQTT Conection error.')
274 284 self.client = False
275 285
276 286 def setup(self, port=1883, username=None, password=None, clientId="user", zeromq=1, verbose=True, **kwargs):
277 287 self.counter = 0
278 288 self.topic = kwargs.get('topic', 'schain')
279 289 self.delay = kwargs.get('delay', 0)
280 290 self.plottype = kwargs.get('plottype', 'spectra')
281 291 self.host = kwargs.get('host', "10.10.10.82")
282 292 self.port = kwargs.get('port', 3000)
283 293 self.clientId = clientId
284 294 self.cnt = 0
285 295 self.zeromq = zeromq
286 296 self.mqtt = kwargs.get('plottype', 0)
287 297 self.client = None
288 298 self.verbose = verbose
289 299 setup = []
290 300 if mqtt is 1:
291 301 self.client = mqtt.Client(
292 302 client_id=self.clientId + self.topic + 'SCHAIN',
293 303 clean_session=True)
294 304 self.client.on_disconnect = self.on_disconnect
295 305 self.connect()
296 306 for plot in self.plottype:
297 307 setup.append({
298 308 'plot': plot,
299 309 'topic': self.topic + plot,
300 310 'title': getattr(self, plot + '_' + 'title', False),
301 311 'xlabel': getattr(self, plot + '_' + 'xlabel', False),
302 312 'ylabel': getattr(self, plot + '_' + 'ylabel', False),
303 313 'xrange': getattr(self, plot + '_' + 'xrange', False),
304 314 'yrange': getattr(self, plot + '_' + 'yrange', False),
305 315 'zrange': getattr(self, plot + '_' + 'zrange', False),
306 316 })
307 317 if zeromq is 1:
308 318 context = zmq.Context()
309 319 self.zmq_socket = context.socket(zmq.PUSH)
310 320 server = kwargs.get('server', 'zmq.pipe')
311 321
312 322 if 'tcp://' in server:
313 323 address = server
314 324 else:
315 325 address = 'ipc:///tmp/%s' % server
316 326
317 327 self.zmq_socket.connect(address)
318 328 time.sleep(1)
319 329
320 330
321 331 def publish_data(self):
322 332 self.dataOut.finished = False
323 333 if self.mqtt is 1:
324 334 yData = self.dataOut.heightList[:2].tolist()
325 335 if self.plottype == 'spectra':
326 336 data = getattr(self.dataOut, 'data_spc')
327 337 z = data/self.dataOut.normFactor
328 338 zdB = 10*numpy.log10(z)
329 339 xlen, ylen = zdB[0].shape
330 340 dx = int(xlen/MAXNUMX) + 1
331 341 dy = int(ylen/MAXNUMY) + 1
332 342 Z = [0 for i in self.dataOut.channelList]
333 343 for i in self.dataOut.channelList:
334 344 Z[i] = zdB[i][::dx, ::dy].tolist()
335 345 payload = {
336 346 'timestamp': self.dataOut.utctime,
337 347 'data': roundFloats(Z),
338 348 'channels': ['Ch %s' % ch for ch in self.dataOut.channelList],
339 349 'interval': self.dataOut.getTimeInterval(),
340 350 'type': self.plottype,
341 351 'yData': yData
342 352 }
343 353
344 354 elif self.plottype in ('rti', 'power'):
345 355 data = getattr(self.dataOut, 'data_spc')
346 356 z = data/self.dataOut.normFactor
347 357 avg = numpy.average(z, axis=1)
348 358 avgdB = 10*numpy.log10(avg)
349 359 xlen, ylen = z[0].shape
350 360 dy = numpy.floor(ylen/self.__MAXNUMY) + 1
351 361 AVG = [0 for i in self.dataOut.channelList]
352 362 for i in self.dataOut.channelList:
353 363 AVG[i] = avgdB[i][::dy].tolist()
354 364 payload = {
355 365 'timestamp': self.dataOut.utctime,
356 366 'data': roundFloats(AVG),
357 367 'channels': ['Ch %s' % ch for ch in self.dataOut.channelList],
358 368 'interval': self.dataOut.getTimeInterval(),
359 369 'type': self.plottype,
360 370 'yData': yData
361 371 }
362 372 elif self.plottype == 'noise':
363 373 noise = self.dataOut.getNoise()/self.dataOut.normFactor
364 374 noisedB = 10*numpy.log10(noise)
365 375 payload = {
366 376 'timestamp': self.dataOut.utctime,
367 377 'data': roundFloats(noisedB.reshape(-1, 1).tolist()),
368 378 'channels': ['Ch %s' % ch for ch in self.dataOut.channelList],
369 379 'interval': self.dataOut.getTimeInterval(),
370 380 'type': self.plottype,
371 381 'yData': yData
372 382 }
373 383 elif self.plottype == 'snr':
374 384 data = getattr(self.dataOut, 'data_SNR')
375 385 avgdB = 10*numpy.log10(data)
376 386
377 387 ylen = data[0].size
378 388 dy = numpy.floor(ylen/self.__MAXNUMY) + 1
379 389 AVG = [0 for i in self.dataOut.channelList]
380 390 for i in self.dataOut.channelList:
381 391 AVG[i] = avgdB[i][::dy].tolist()
382 392 payload = {
383 393 'timestamp': self.dataOut.utctime,
384 394 'data': roundFloats(AVG),
385 395 'channels': ['Ch %s' % ch for ch in self.dataOut.channelList],
386 396 'type': self.plottype,
387 397 'yData': yData
388 398 }
389 399 else:
390 400 print "Tipo de grafico invalido"
391 401 payload = {
392 402 'data': 'None',
393 403 'timestamp': 'None',
394 404 'type': None
395 405 }
396 406
397 407 self.client.publish(self.topic + self.plottype, json.dumps(payload), qos=0)
398 408
399 409 if self.zeromq is 1:
400 410 if self.verbose:
401 411 log.log(
402 412 'Sending {} - {}'.format(self.dataOut.type, self.dataOut.datatime),
403 413 self.name
404 414 )
405 415 self.zmq_socket.send_pyobj(self.dataOut)
406 416
407 417 def run(self, dataOut, **kwargs):
408 418 self.dataOut = dataOut
409 419 if not self.isConfig:
410 420 self.setup(**kwargs)
411 421 self.isConfig = True
412 422
413 423 self.publish_data()
414 424 time.sleep(self.delay)
415 425
416 426 def close(self):
417 427 if self.zeromq is 1:
418 428 self.dataOut.finished = True
419 429 self.zmq_socket.send_pyobj(self.dataOut)
420 430 time.sleep(0.1)
421 431 self.zmq_socket.close()
422 432 if self.client:
423 433 self.client.loop_stop()
424 434 self.client.disconnect()
425 435
426 436
427 437 class ReceiverData(ProcessingUnit):
428 438
439 __attrs__ = ['server']
440
429 441 def __init__(self, **kwargs):
430 442
431 443 ProcessingUnit.__init__(self, **kwargs)
432 444
433 445 self.isConfig = False
434 446 server = kwargs.get('server', 'zmq.pipe')
435 447 if 'tcp://' in server:
436 448 address = server
437 449 else:
438 450 address = 'ipc:///tmp/%s' % server
439 451
440 452 self.address = address
441 453 self.dataOut = JROData()
442 454
443 455 def setup(self):
444 456
445 457 self.context = zmq.Context()
446 458 self.receiver = self.context.socket(zmq.PULL)
447 459 self.receiver.bind(self.address)
448 460 time.sleep(0.5)
449 461 log.success('ReceiverData from {}'.format(self.address))
450 462
451 463
452 464 def run(self):
453 465
454 466 if not self.isConfig:
455 467 self.setup()
456 468 self.isConfig = True
457 469
458 470 self.dataOut = self.receiver.recv_pyobj()
459 471 log.log('{} - {}'.format(self.dataOut.type,
460 472 self.dataOut.datatime.ctime(),),
461 473 'Receiving')
462 474
463 475
464 476 class PlotterReceiver(ProcessingUnit, Process):
465 477
466 478 throttle_value = 5
479 __attrs__ = ['server', 'plottypes', 'realtime', 'localtime', 'throttle']
467 480
468 481 def __init__(self, **kwargs):
469 482
470 483 ProcessingUnit.__init__(self, **kwargs)
471 484 Process.__init__(self)
472 485 self.mp = False
473 486 self.isConfig = False
474 487 self.isWebConfig = False
475 488 self.connections = 0
476 489 server = kwargs.get('server', 'zmq.pipe')
477 490 plot_server = kwargs.get('plot_server', 'zmq.web')
478 491 if 'tcp://' in server:
479 492 address = server
480 493 else:
481 494 address = 'ipc:///tmp/%s' % server
482 495
483 496 if 'tcp://' in plot_server:
484 497 plot_address = plot_server
485 498 else:
486 499 plot_address = 'ipc:///tmp/%s' % plot_server
487 500
488 501 self.address = address
489 502 self.plot_address = plot_address
490 503 self.plottypes = [s.strip() for s in kwargs.get('plottypes', 'rti').split(',')]
491 504 self.realtime = kwargs.get('realtime', False)
492 505 self.localtime = kwargs.get('localtime', True)
493 506 self.throttle_value = kwargs.get('throttle', 5)
494 507 self.sendData = self.initThrottle(self.throttle_value)
495 508 self.dates = []
496 509 self.setup()
497 510
498 511 def setup(self):
499 512
500 513 self.data = Data(self.plottypes, self.throttle_value)
501 514 self.isConfig = True
502 515
503 516 def event_monitor(self, monitor):
504 517
505 518 events = {}
506 519
507 520 for name in dir(zmq):
508 521 if name.startswith('EVENT_'):
509 522 value = getattr(zmq, name)
510 523 events[value] = name
511 524
512 525 while monitor.poll():
513 526 evt = recv_monitor_message(monitor)
514 527 if evt['event'] == 32:
515 528 self.connections += 1
516 529 if evt['event'] == 512:
517 530 pass
518 531
519 532 evt.update({'description': events[evt['event']]})
520 533
521 534 if evt['event'] == zmq.EVENT_MONITOR_STOPPED:
522 535 break
523 536 monitor.close()
524 537 print('event monitor thread done!')
525 538
526 539 def initThrottle(self, throttle_value):
527 540
528 541 @throttle(seconds=throttle_value)
529 542 def sendDataThrottled(fn_sender, data):
530 543 fn_sender(data)
531 544
532 545 return sendDataThrottled
533 546
534 547 def send(self, data):
535 548 log.success('Sending {}'.format(data), self.name)
536 549 self.sender.send_pyobj(data)
537 550
538 551 def run(self):
539 552
540 553 log.success(
541 554 'Starting from {}'.format(self.address),
542 555 self.name
543 556 )
544 557
545 558 self.context = zmq.Context()
546 559 self.receiver = self.context.socket(zmq.PULL)
547 560 self.receiver.bind(self.address)
548 561 monitor = self.receiver.get_monitor_socket()
549 562 self.sender = self.context.socket(zmq.PUB)
550 563 if self.realtime:
551 564 self.sender_web = self.context.socket(zmq.PUB)
552 565 self.sender_web.connect(self.plot_address)
553 566 time.sleep(1)
554 567
555 568 if 'server' in self.kwargs:
556 569 self.sender.bind("ipc:///tmp/{}.plots".format(self.kwargs['server']))
557 570 else:
558 571 self.sender.bind("ipc:///tmp/zmq.plots")
559 572
560 573 time.sleep(2)
561 574
562 575 t = Thread(target=self.event_monitor, args=(monitor,))
563 576 t.start()
564 577
565 578 while True:
566 579 dataOut = self.receiver.recv_pyobj()
567 tm = dataOut.utctime
568 if dataOut.useLocalTime:
569 if not self.localtime:
570 tm += time.timezone
571 dt = datetime.datetime.fromtimestamp(tm).date()
572 else:
573 if self.localtime:
574 tm -= time.timezone
575 dt = datetime.datetime.utcfromtimestamp(tm).date()
576 sended = False
577 if dt not in self.dates:
578 if self.data:
579 self.data.ended = True
580 self.send(self.data)
581 sended = True
582 self.data.setup()
583 self.dates.append(dt)
584
585 self.data.update(dataOut)
580 if not dataOut.flagNoData:
581 if dataOut.type == 'Parameters':
582 tm = dataOut.utctimeInit
583 else:
584 tm = dataOut.utctime
585 if dataOut.useLocalTime:
586 if not self.localtime:
587 tm += time.timezone
588 dt = datetime.datetime.fromtimestamp(tm).date()
589 else:
590 if self.localtime:
591 tm -= time.timezone
592 dt = datetime.datetime.utcfromtimestamp(tm).date()
593 coerce = False
594 if dt not in self.dates:
595 if self.data:
596 self.data.ended = True
597 self.send(self.data)
598 coerce = True
599 self.data.setup()
600 self.dates.append(dt)
586 601
602 self.data.update(dataOut)
603
587 604 if dataOut.finished is True:
588 605 self.connections -= 1
589 606 if self.connections == 0 and dt in self.dates:
590 607 self.data.ended = True
591 608 self.send(self.data)
592 609 self.data.setup()
593 610 else:
594 611 if self.realtime:
595 612 self.send(self.data)
596 613 # self.sender_web.send_string(self.data.jsonify())
597 else:
598 if not sended:
599 self.sendData(self.send, self.data)
614 else:
615 self.sendData(self.send, self.data, coerce=coerce)
616 coerce = False
600 617
601 618 return
602 619
603 620 def sendToWeb(self):
604 621
605 622 if not self.isWebConfig:
606 623 context = zmq.Context()
607 624 sender_web_config = context.socket(zmq.PUB)
608 625 if 'tcp://' in self.plot_address:
609 626 dum, address, port = self.plot_address.split(':')
610 627 conf_address = '{}:{}:{}'.format(dum, address, int(port)+1)
611 628 else:
612 629 conf_address = self.plot_address + '.config'
613 630 sender_web_config.bind(conf_address)
614 631 time.sleep(1)
615 632 for kwargs in self.operationKwargs.values():
616 633 if 'plot' in kwargs:
617 634 log.success('[Sending] Config data to web for {}'.format(kwargs['code'].upper()))
618 635 sender_web_config.send_string(json.dumps(kwargs))
619 636 self.isWebConfig = True
@@ -1,81 +1,80
1 1 import schainpy
2 2 from schainpy.model import Operation, ProcessingUnit
3 from importlib import import_module
4 3 from pydoc import locate
5 4
6 5 def clean_modules(module):
7 6 noEndsUnder = [x for x in module if not x.endswith('__')]
8 7 noStartUnder = [x for x in noEndsUnder if not x.startswith('__')]
9 8 noFullUpper = [x for x in noStartUnder if not x.isupper()]
10 9 return noFullUpper
11 10
12 11 def check_module(possible, instance):
13 12 def check(x):
14 try:
13 try:
15 14 instancia = locate('schainpy.model.{}'.format(x))
16 15 return isinstance(instancia(), instance)
17 16 except Exception as e:
18 return False
17 return False
19 18 clean = clean_modules(possible)
20 19 return [x for x in clean if check(x)]
21 20
22 21
23 22 def getProcs():
24 module = dir(import_module('schainpy.model'))
23 module = dir(schainpy.model)
25 24 procs = check_module(module, ProcessingUnit)
26 25 try:
27 26 procs.remove('ProcessingUnit')
28 27 except Exception as e:
29 28 pass
30 29 return procs
31 30
32 31 def getOperations():
33 module = dir(import_module('schainpy.model'))
32 module = dir(schainpy.model)
34 33 noProcs = [x for x in module if not x.endswith('Proc')]
35 34 operations = check_module(noProcs, Operation)
36 35 try:
37 36 operations.remove('Operation')
38 37 except Exception as e:
39 38 pass
40 39 return operations
41 40
42 41 def getArgs(op):
43 42 module = locate('schainpy.model.{}'.format(op))
44 43 args = module().getAllowedArgs()
45 44 try:
46 45 args.remove('self')
47 46 except Exception as e:
48 47 pass
49 48 try:
50 49 args.remove('dataOut')
51 50 except Exception as e:
52 51 pass
53 52 return args
54 53
55 54 def getAll():
56 allModules = dir(import_module('schainpy.model'))
55 allModules = dir(schainpy.model)
57 56 modules = check_module(allModules, Operation)
58 57 modules.extend(check_module(allModules, ProcessingUnit))
59 58 return modules
60 59
61 60 def formatArgs(op):
62 61 args = getArgs(op)
63 62
64 63 argsAsKey = ["\t'{}'".format(x) for x in args]
65 64 argsFormatted = ": 'string',\n".join(argsAsKey)
66 65
67 66 print op
68 67 print "parameters = { \n" + argsFormatted + ": 'string',\n }"
69 68 print '\n'
70 69
71 70
72 71 if __name__ == "__main__":
73 72 getAll()
74 73 [formatArgs(x) for x in getAll()]
75 74
76 75 '''
77 76 parameters = {
78 77 'id': ,
79 78 'wintitle': ,
80 79 }
81 80 ''' No newline at end of file
General Comments 0
You need to be logged in to leave comments. Login now