##// END OF EJS Templates
Danny Scipión -
r1362:036afbeb089e merge
parent child
Show More
@@ -1,688 +1,691
1 1 # Copyright (c) 2012-2020 Jicamarca Radio Observatory
2 2 # All rights reserved.
3 3 #
4 4 # Distributed under the terms of the BSD 3-clause license.
5 5 """Base class to create plot operations
6 6
7 7 """
8 8
9 9 import os
10 10 import sys
11 11 import zmq
12 12 import time
13 13 import numpy
14 14 import datetime
15 15 from collections import deque
16 16 from functools import wraps
17 17 from threading import Thread
18 18 import matplotlib
19 19
20 20 if 'BACKEND' in os.environ:
21 21 matplotlib.use(os.environ['BACKEND'])
22 22 elif 'linux' in sys.platform:
23 23 matplotlib.use("TkAgg")
24 24 elif 'darwin' in sys.platform:
25 25 matplotlib.use('MacOSX')
26 26 else:
27 27 from schainpy.utils import log
28 28 log.warning('Using default Backend="Agg"', 'INFO')
29 29 matplotlib.use('Agg')
30 30
31 31 import matplotlib.pyplot as plt
32 32 from matplotlib.patches import Polygon
33 33 from mpl_toolkits.axes_grid1 import make_axes_locatable
34 34 from matplotlib.ticker import FuncFormatter, LinearLocator, MultipleLocator
35 35
36 36 from schainpy.model.data.jrodata import PlotterData
37 37 from schainpy.model.proc.jroproc_base import ProcessingUnit, Operation, MPDecorator
38 38 from schainpy.utils import log
39 39
40 40 jet_values = matplotlib.pyplot.get_cmap('jet', 100)(numpy.arange(100))[10:90]
41 41 blu_values = matplotlib.pyplot.get_cmap(
42 42 'seismic_r', 20)(numpy.arange(20))[10:15]
43 43 ncmap = matplotlib.colors.LinearSegmentedColormap.from_list(
44 44 'jro', numpy.vstack((blu_values, jet_values)))
45 45 matplotlib.pyplot.register_cmap(cmap=ncmap)
46 46
47 47 CMAPS = [plt.get_cmap(s) for s in ('jro', 'jet', 'viridis',
48 48 'plasma', 'inferno', 'Greys', 'seismic', 'bwr', 'coolwarm')]
49 49
50 50 EARTH_RADIUS = 6.3710e3
51 51
52 52 def ll2xy(lat1, lon1, lat2, lon2):
53 53
54 54 p = 0.017453292519943295
55 55 a = 0.5 - numpy.cos((lat2 - lat1) * p)/2 + numpy.cos(lat1 * p) * \
56 56 numpy.cos(lat2 * p) * (1 - numpy.cos((lon2 - lon1) * p)) / 2
57 57 r = 12742 * numpy.arcsin(numpy.sqrt(a))
58 58 theta = numpy.arctan2(numpy.sin((lon2-lon1)*p)*numpy.cos(lat2*p), numpy.cos(lat1*p)
59 59 * numpy.sin(lat2*p)-numpy.sin(lat1*p)*numpy.cos(lat2*p)*numpy.cos((lon2-lon1)*p))
60 60 theta = -theta + numpy.pi/2
61 61 return r*numpy.cos(theta), r*numpy.sin(theta)
62 62
63 63
64 64 def km2deg(km):
65 65 '''
66 66 Convert distance in km to degrees
67 67 '''
68 68
69 69 return numpy.rad2deg(km/EARTH_RADIUS)
70 70
71 71
72 72 def figpause(interval):
73 73 backend = plt.rcParams['backend']
74 74 if backend in matplotlib.rcsetup.interactive_bk:
75 75 figManager = matplotlib._pylab_helpers.Gcf.get_active()
76 76 if figManager is not None:
77 77 canvas = figManager.canvas
78 78 if canvas.figure.stale:
79 79 canvas.draw()
80 80 try:
81 81 canvas.start_event_loop(interval)
82 82 except:
83 83 pass
84 84 return
85 85
86 86 def popup(message):
87 87 '''
88 88 '''
89 89
90 90 fig = plt.figure(figsize=(12, 8), facecolor='r')
91 91 text = '\n'.join([s.strip() for s in message.split(':')])
92 92 fig.text(0.01, 0.5, text, ha='left', va='center',
93 93 size='20', weight='heavy', color='w')
94 94 fig.show()
95 95 figpause(1000)
96 96
97 97
98 98 class Throttle(object):
99 99 '''
100 100 Decorator that prevents a function from being called more than once every
101 101 time period.
102 102 To create a function that cannot be called more than once a minute, but
103 103 will sleep until it can be called:
104 104 @Throttle(minutes=1)
105 105 def foo():
106 106 pass
107 107
108 108 for i in range(10):
109 109 foo()
110 110 print "This function has run %s times." % i
111 111 '''
112 112
113 113 def __init__(self, seconds=0, minutes=0, hours=0):
114 114 self.throttle_period = datetime.timedelta(
115 115 seconds=seconds, minutes=minutes, hours=hours
116 116 )
117 117
118 118 self.time_of_last_call = datetime.datetime.min
119 119
120 120 def __call__(self, fn):
121 121 @wraps(fn)
122 122 def wrapper(*args, **kwargs):
123 123 coerce = kwargs.pop('coerce', None)
124 124 if coerce:
125 125 self.time_of_last_call = datetime.datetime.now()
126 126 return fn(*args, **kwargs)
127 127 else:
128 128 now = datetime.datetime.now()
129 129 time_since_last_call = now - self.time_of_last_call
130 130 time_left = self.throttle_period - time_since_last_call
131 131
132 132 if time_left > datetime.timedelta(seconds=0):
133 133 return
134 134
135 135 self.time_of_last_call = datetime.datetime.now()
136 136 return fn(*args, **kwargs)
137 137
138 138 return wrapper
139 139
140 140 def apply_throttle(value):
141 141
142 142 @Throttle(seconds=value)
143 143 def fnThrottled(fn):
144 144 fn()
145 145
146 146 return fnThrottled
147 147
148 148
149 149 @MPDecorator
150 150 class Plot(Operation):
151 151 """Base class for Schain plotting operations
152 152
153 153 This class should never be use directtly you must subclass a new operation,
154 154 children classes must be defined as follow:
155 155
156 156 ExamplePlot(Plot):
157 157
158 158 CODE = 'code'
159 159 colormap = 'jet'
160 160 plot_type = 'pcolor' # options are ('pcolor', 'pcolorbuffer', 'scatter', 'scatterbuffer')
161 161
162 162 def setup(self):
163 163 pass
164 164
165 165 def plot(self):
166 166 pass
167 167
168 168 """
169 169
170 170 CODE = 'Figure'
171 171 colormap = 'jet'
172 172 bgcolor = 'white'
173 173 buffering = True
174 174 __missing = 1E30
175 175
176 176 __attrs__ = ['show', 'save', 'ymin', 'ymax', 'zmin', 'zmax', 'title',
177 177 'showprofile']
178 178
179 179 def __init__(self):
180 180
181 181 Operation.__init__(self)
182 182 self.isConfig = False
183 183 self.isPlotConfig = False
184 184 self.save_time = 0
185 185 self.sender_time = 0
186 186 self.data = None
187 187 self.firsttime = True
188 188 self.sender_queue = deque(maxlen=10)
189 189 self.plots_adjust = {'left': 0.125, 'right': 0.9, 'bottom': 0.15, 'top': 0.9, 'wspace': 0.2, 'hspace': 0.2}
190 190
191 191 def __fmtTime(self, x, pos):
192 192 '''
193 193 '''
194 194
195 195 return '{}'.format(self.getDateTime(x).strftime('%H:%M'))
196 196
197 197 def __setup(self, **kwargs):
198 198 '''
199 199 Initialize variables
200 200 '''
201 201
202 202 self.figures = []
203 203 self.axes = []
204 204 self.cb_axes = []
205 205 self.localtime = kwargs.pop('localtime', True)
206 206 self.show = kwargs.get('show', True)
207 207 self.save = kwargs.get('save', False)
208 208 self.save_period = kwargs.get('save_period', 0)
209 209 self.colormap = kwargs.get('colormap', self.colormap)
210 210 self.colormap_coh = kwargs.get('colormap_coh', 'jet')
211 211 self.colormap_phase = kwargs.get('colormap_phase', 'RdBu_r')
212 212 self.colormaps = kwargs.get('colormaps', None)
213 213 self.bgcolor = kwargs.get('bgcolor', self.bgcolor)
214 214 self.showprofile = kwargs.get('showprofile', False)
215 215 self.title = kwargs.get('wintitle', self.CODE.upper())
216 216 self.cb_label = kwargs.get('cb_label', None)
217 217 self.cb_labels = kwargs.get('cb_labels', None)
218 218 self.labels = kwargs.get('labels', None)
219 219 self.xaxis = kwargs.get('xaxis', 'frequency')
220 220 self.zmin = kwargs.get('zmin', None)
221 221 self.zmax = kwargs.get('zmax', None)
222 222 self.zlimits = kwargs.get('zlimits', None)
223 223 self.xmin = kwargs.get('xmin', None)
224 224 self.xmax = kwargs.get('xmax', None)
225 225 self.xrange = kwargs.get('xrange', 12)
226 226 self.xscale = kwargs.get('xscale', None)
227 227 self.ymin = kwargs.get('ymin', None)
228 228 self.ymax = kwargs.get('ymax', None)
229 229 self.yscale = kwargs.get('yscale', None)
230 230 self.xlabel = kwargs.get('xlabel', None)
231 231 self.attr_time = kwargs.get('attr_time', 'utctime')
232 232 self.attr_data = kwargs.get('attr_data', 'data_param')
233 233 self.decimation = kwargs.get('decimation', None)
234 self.showSNR = kwargs.get('showSNR', False)
235 234 self.oneFigure = kwargs.get('oneFigure', True)
236 235 self.width = kwargs.get('width', None)
237 236 self.height = kwargs.get('height', None)
238 237 self.colorbar = kwargs.get('colorbar', True)
239 238 self.factors = kwargs.get('factors', [1, 1, 1, 1, 1, 1, 1, 1])
240 239 self.channels = kwargs.get('channels', None)
241 240 self.titles = kwargs.get('titles', [])
242 241 self.polar = False
243 242 self.type = kwargs.get('type', 'iq')
244 243 self.grid = kwargs.get('grid', False)
245 244 self.pause = kwargs.get('pause', False)
246 245 self.save_code = kwargs.get('save_code', self.CODE)
247 246 self.throttle = kwargs.get('throttle', 0)
248 247 self.exp_code = kwargs.get('exp_code', None)
249 248 self.server = kwargs.get('server', False)
250 249 self.sender_period = kwargs.get('sender_period', 60)
251 250 self.tag = kwargs.get('tag', '')
252 251 self.height_index = kwargs.get('height_index', None)
253 252 self.__throttle_plot = apply_throttle(self.throttle)
254 253 code = self.attr_data if self.attr_data else self.CODE
255 254 self.data = PlotterData(self.CODE, self.exp_code, self.localtime)
256 255
257 256 if self.server:
258 257 if not self.server.startswith('tcp://'):
259 258 self.server = 'tcp://{}'.format(self.server)
260 259 log.success(
261 260 'Sending to server: {}'.format(self.server),
262 261 self.name
263 262 )
264 263
264 if isinstance(self.attr_data, str):
265 self.attr_data = [self.attr_data]
266
265 267 def __setup_plot(self):
266 268 '''
267 269 Common setup for all figures, here figures and axes are created
268 270 '''
269 271
270 272 self.setup()
271 273
272 274 self.time_label = 'LT' if self.localtime else 'UTC'
273 275
274 276 if self.width is None:
275 277 self.width = 8
276 278
277 279 self.figures = []
278 280 self.axes = []
279 281 self.cb_axes = []
280 282 self.pf_axes = []
281 283 self.cmaps = []
282 284
283 285 size = '15%' if self.ncols == 1 else '30%'
284 286 pad = '4%' if self.ncols == 1 else '8%'
285 287
286 288 if self.oneFigure:
287 289 if self.height is None:
288 290 self.height = 1.4 * self.nrows + 1
289 291 fig = plt.figure(figsize=(self.width, self.height),
290 292 edgecolor='k',
291 293 facecolor='w')
292 294 self.figures.append(fig)
293 295 for n in range(self.nplots):
294 296 ax = fig.add_subplot(self.nrows, self.ncols,
295 297 n + 1, polar=self.polar)
296 298 ax.tick_params(labelsize=8)
297 299 ax.firsttime = True
298 300 ax.index = 0
299 301 ax.press = None
300 302 self.axes.append(ax)
301 303 if self.showprofile:
302 304 cax = self.__add_axes(ax, size=size, pad=pad)
303 305 cax.tick_params(labelsize=8)
304 306 self.pf_axes.append(cax)
305 307 else:
306 308 if self.height is None:
307 309 self.height = 3
308 310 for n in range(self.nplots):
309 311 fig = plt.figure(figsize=(self.width, self.height),
310 312 edgecolor='k',
311 313 facecolor='w')
312 314 ax = fig.add_subplot(1, 1, 1, polar=self.polar)
313 315 ax.tick_params(labelsize=8)
314 316 ax.firsttime = True
315 317 ax.index = 0
316 318 ax.press = None
317 319 self.figures.append(fig)
318 320 self.axes.append(ax)
319 321 if self.showprofile:
320 322 cax = self.__add_axes(ax, size=size, pad=pad)
321 323 cax.tick_params(labelsize=8)
322 324 self.pf_axes.append(cax)
323 325
324 326 for n in range(self.nrows):
327 print(self.nrows)
325 328 if self.colormaps is not None:
326 329 cmap = plt.get_cmap(self.colormaps[n])
327 330 else:
328 331 cmap = plt.get_cmap(self.colormap)
329 332 cmap.set_bad(self.bgcolor, 1.)
330 333 self.cmaps.append(cmap)
331 334
332 335 def __add_axes(self, ax, size='30%', pad='8%'):
333 336 '''
334 337 Add new axes to the given figure
335 338 '''
336 339 divider = make_axes_locatable(ax)
337 340 nax = divider.new_horizontal(size=size, pad=pad)
338 341 ax.figure.add_axes(nax)
339 342 return nax
340 343
341 344 def fill_gaps(self, x_buffer, y_buffer, z_buffer):
342 345 '''
343 346 Create a masked array for missing data
344 347 '''
345 348 if x_buffer.shape[0] < 2:
346 349 return x_buffer, y_buffer, z_buffer
347 350
348 351 deltas = x_buffer[1:] - x_buffer[0:-1]
349 352 x_median = numpy.median(deltas)
350 353
351 354 index = numpy.where(deltas > 5 * x_median)
352 355
353 356 if len(index[0]) != 0:
354 357 z_buffer[::, index[0], ::] = self.__missing
355 358 z_buffer = numpy.ma.masked_inside(z_buffer,
356 359 0.99 * self.__missing,
357 360 1.01 * self.__missing)
358 361
359 362 return x_buffer, y_buffer, z_buffer
360 363
361 364 def decimate(self):
362 365
363 366 # dx = int(len(self.x)/self.__MAXNUMX) + 1
364 367 dy = int(len(self.y) / self.decimation) + 1
365 368
366 369 # x = self.x[::dx]
367 370 x = self.x
368 371 y = self.y[::dy]
369 372 z = self.z[::, ::, ::dy]
370 373
371 374 return x, y, z
372 375
373 376 def format(self):
374 377 '''
375 378 Set min and max values, labels, ticks and titles
376 379 '''
377 380
378 381 for n, ax in enumerate(self.axes):
379 382 if ax.firsttime:
380 383 if self.xaxis != 'time':
381 384 xmin = self.xmin
382 385 xmax = self.xmax
383 386 else:
384 387 xmin = self.tmin
385 388 xmax = self.tmin + self.xrange*60*60
386 389 ax.xaxis.set_major_formatter(FuncFormatter(self.__fmtTime))
387 390 ax.xaxis.set_major_locator(LinearLocator(9))
388 391 ymin = self.ymin if self.ymin is not None else numpy.nanmin(self.y[numpy.isfinite(self.y)])
389 392 ymax = self.ymax if self.ymax is not None else numpy.nanmax(self.y[numpy.isfinite(self.y)])
390 393 ax.set_facecolor(self.bgcolor)
391 394 if self.xscale:
392 395 ax.xaxis.set_major_formatter(FuncFormatter(
393 396 lambda x, pos: '{0:g}'.format(x*self.xscale)))
394 397 if self.yscale:
395 398 ax.yaxis.set_major_formatter(FuncFormatter(
396 399 lambda x, pos: '{0:g}'.format(x*self.yscale)))
397 400 if self.xlabel is not None:
398 401 ax.set_xlabel(self.xlabel)
399 402 if self.ylabel is not None:
400 403 ax.set_ylabel(self.ylabel)
401 404 if self.showprofile:
402 405 self.pf_axes[n].set_ylim(ymin, ymax)
403 406 self.pf_axes[n].set_xlim(self.zmin, self.zmax)
404 407 self.pf_axes[n].set_xlabel('dB')
405 408 self.pf_axes[n].grid(b=True, axis='x')
406 409 [tick.set_visible(False)
407 410 for tick in self.pf_axes[n].get_yticklabels()]
408 411 if self.colorbar:
409 412 ax.cbar = plt.colorbar(
410 413 ax.plt, ax=ax, fraction=0.05, pad=0.02, aspect=10)
411 414 ax.cbar.ax.tick_params(labelsize=8)
412 415 ax.cbar.ax.press = None
413 416 if self.cb_label:
414 417 ax.cbar.set_label(self.cb_label, size=8)
415 418 elif self.cb_labels:
416 419 ax.cbar.set_label(self.cb_labels[n], size=8)
417 420 else:
418 421 ax.cbar = None
419 422 ax.set_xlim(xmin, xmax)
420 423 ax.set_ylim(ymin, ymax)
421 424 ax.firsttime = False
422 425 if self.grid:
423 426 ax.grid(True)
424 427 if not self.polar:
425 428 ax.set_title('{} {} {}'.format(
426 429 self.titles[n],
427 430 self.getDateTime(self.data.max_time).strftime(
428 431 '%Y-%m-%d %H:%M:%S'),
429 432 self.time_label),
430 433 size=8)
431 434 else:
432 435 ax.set_title('{}'.format(self.titles[n]), size=8)
433 436 ax.set_ylim(0, 90)
434 437 ax.set_yticks(numpy.arange(0, 90, 20))
435 438 ax.yaxis.labelpad = 40
436 439
437 440 if self.firsttime:
438 441 for n, fig in enumerate(self.figures):
439 442 fig.subplots_adjust(**self.plots_adjust)
440 443 self.firsttime = False
441 444
442 445 def clear_figures(self):
443 446 '''
444 447 Reset axes for redraw plots
445 448 '''
446 449
447 450 for ax in self.axes+self.pf_axes+self.cb_axes:
448 451 ax.clear()
449 452 ax.firsttime = True
450 453 if hasattr(ax, 'cbar') and ax.cbar:
451 454 ax.cbar.remove()
452 455
453 456 def __plot(self):
454 457 '''
455 458 Main function to plot, format and save figures
456 459 '''
457 460
458 461 self.plot()
459 462 self.format()
460 463
461 464 for n, fig in enumerate(self.figures):
462 465 if self.nrows == 0 or self.nplots == 0:
463 466 log.warning('No data', self.name)
464 467 fig.text(0.5, 0.5, 'No Data', fontsize='large', ha='center')
465 468 fig.canvas.manager.set_window_title(self.CODE)
466 469 continue
467 470
468 471 fig.canvas.manager.set_window_title('{} - {}'.format(self.title,
469 472 self.getDateTime(self.data.max_time).strftime('%Y/%m/%d')))
470 473 fig.canvas.draw()
471 474 if self.show:
472 475 fig.show()
473 476 figpause(0.01)
474 477
475 478 if self.save:
476 479 self.save_figure(n)
477 480
478 481 if self.server:
479 482 self.send_to_server()
480 483
481 484 def __update(self, dataOut, timestamp):
482 485 '''
483 486 '''
484 487
485 488 metadata = {
486 489 'yrange': dataOut.heightList,
487 490 'interval': dataOut.timeInterval,
488 491 'channels': dataOut.channelList
489 492 }
490 493
491 494 data, meta = self.update(dataOut)
492 495 metadata.update(meta)
493 496 self.data.update(data, timestamp, metadata)
494 497
495 498 def save_figure(self, n):
496 499 '''
497 500 '''
498 501
499 502 if (self.data.max_time - self.save_time) <= self.save_period:
500 503 return
501 504
502 505 self.save_time = self.data.max_time
503 506
504 507 fig = self.figures[n]
505 508
509 if self.throttle == 0:
506 510 figname = os.path.join(
507 511 self.save,
508 512 self.save_code,
509 513 '{}_{}.png'.format(
510 514 self.save_code,
511 515 self.getDateTime(self.data.max_time).strftime(
512 516 '%Y%m%d_%H%M%S'
513 517 ),
514 518 )
515 519 )
516 520 log.log('Saving figure: {}'.format(figname), self.name)
517 521 if not os.path.isdir(os.path.dirname(figname)):
518 522 os.makedirs(os.path.dirname(figname))
519 523 fig.savefig(figname)
520 524
521 if self.throttle == 0:
522 525 figname = os.path.join(
523 526 self.save,
524 527 '{}_{}.png'.format(
525 528 self.save_code,
526 529 self.getDateTime(self.data.min_time).strftime(
527 530 '%Y%m%d'
528 531 ),
529 532 )
530 533 )
531 534 fig.savefig(figname)
532 535
533 536 def send_to_server(self):
534 537 '''
535 538 '''
536 539
537 540 if self.exp_code == None:
538 541 log.warning('Missing `exp_code` skipping sending to server...')
539 542
540 543 last_time = self.data.max_time
541 544 interval = last_time - self.sender_time
542 545 if interval < self.sender_period:
543 546 return
544 547
545 548 self.sender_time = last_time
546 549
547 550 attrs = ['titles', 'zmin', 'zmax', 'tag', 'ymin', 'ymax']
548 551 for attr in attrs:
549 552 value = getattr(self, attr)
550 553 if value:
551 554 if isinstance(value, (numpy.float32, numpy.float64)):
552 555 value = round(float(value), 2)
553 556 self.data.meta[attr] = value
554 557 if self.colormap == 'jet':
555 558 self.data.meta['colormap'] = 'Jet'
556 559 elif 'RdBu' in self.colormap:
557 560 self.data.meta['colormap'] = 'RdBu'
558 561 else:
559 562 self.data.meta['colormap'] = 'Viridis'
560 563 self.data.meta['interval'] = int(interval)
561 564
562 565 self.sender_queue.append(last_time)
563 566
564 567 while True:
565 568 try:
566 569 tm = self.sender_queue.popleft()
567 570 except IndexError:
568 571 break
569 572 msg = self.data.jsonify(tm, self.save_code, self.plot_type)
570 573 self.socket.send_string(msg)
571 574 socks = dict(self.poll.poll(2000))
572 575 if socks.get(self.socket) == zmq.POLLIN:
573 576 reply = self.socket.recv_string()
574 577 if reply == 'ok':
575 578 log.log("Response from server ok", self.name)
576 579 time.sleep(0.1)
577 580 continue
578 581 else:
579 582 log.warning(
580 583 "Malformed reply from server: {}".format(reply), self.name)
581 584 else:
582 585 log.warning(
583 586 "No response from server, retrying...", self.name)
584 587 self.sender_queue.appendleft(tm)
585 588 self.socket.setsockopt(zmq.LINGER, 0)
586 589 self.socket.close()
587 590 self.poll.unregister(self.socket)
588 591 self.socket = self.context.socket(zmq.REQ)
589 592 self.socket.connect(self.server)
590 593 self.poll.register(self.socket, zmq.POLLIN)
591 594 break
592 595
593 596 def setup(self):
594 597 '''
595 598 This method should be implemented in the child class, the following
596 599 attributes should be set:
597 600
598 601 self.nrows: number of rows
599 602 self.ncols: number of cols
600 603 self.nplots: number of plots (channels or pairs)
601 604 self.ylabel: label for Y axes
602 605 self.titles: list of axes title
603 606
604 607 '''
605 608 raise NotImplementedError
606 609
607 610 def plot(self):
608 611 '''
609 612 Must be defined in the child class, the actual plotting method
610 613 '''
611 614 raise NotImplementedError
612 615
613 616 def update(self, dataOut):
614 617 '''
615 618 Must be defined in the child class, update self.data with new data
616 619 '''
617 620
618 621 data = {
619 622 self.CODE: getattr(dataOut, 'data_{}'.format(self.CODE))
620 623 }
621 624 meta = {}
622 625
623 626 return data, meta
624 627
625 628 def run(self, dataOut, **kwargs):
626 629 '''
627 630 Main plotting routine
628 631 '''
629 632
630 633 if self.isConfig is False:
631 634 self.__setup(**kwargs)
632 635
633 636 if self.localtime:
634 637 self.getDateTime = datetime.datetime.fromtimestamp
635 638 else:
636 639 self.getDateTime = datetime.datetime.utcfromtimestamp
637 640
638 641 self.data.setup()
639 642 self.isConfig = True
640 643 if self.server:
641 644 self.context = zmq.Context()
642 645 self.socket = self.context.socket(zmq.REQ)
643 646 self.socket.connect(self.server)
644 647 self.poll = zmq.Poller()
645 648 self.poll.register(self.socket, zmq.POLLIN)
646 649
647 650 tm = getattr(dataOut, self.attr_time)
648 651
649 652 if self.data and 'time' in self.xaxis and (tm - self.tmin) >= self.xrange*60*60:
650 653 self.save_time = tm
651 654 self.__plot()
652 655 self.tmin += self.xrange*60*60
653 656 self.data.setup()
654 657 self.clear_figures()
655 658
656 659 self.__update(dataOut, tm)
657 660
658 661 if self.isPlotConfig is False:
659 662 self.__setup_plot()
660 663 self.isPlotConfig = True
661 664 if self.xaxis == 'time':
662 665 dt = self.getDateTime(tm)
663 666 if self.xmin is None:
664 667 self.tmin = tm
665 668 self.xmin = dt.hour
666 669 minutes = (self.xmin-int(self.xmin)) * 60
667 670 seconds = (minutes - int(minutes)) * 60
668 671 self.tmin = (dt.replace(hour=int(self.xmin), minute=int(minutes), second=int(seconds)) -
669 672 datetime.datetime(1970, 1, 1)).total_seconds()
670 673 if self.localtime:
671 674 self.tmin += time.timezone
672 675
673 676 if self.xmin is not None and self.xmax is not None:
674 677 self.xrange = self.xmax - self.xmin
675 678
676 679 if self.throttle == 0:
677 680 self.__plot()
678 681 else:
679 682 self.__throttle_plot(self.__plot)#, coerce=coerce)
680 683
681 684 def close(self):
682 685
683 686 if self.data and not self.data.flagNoData:
684 687 self.save_time = self.data.max_time
685 688 self.__plot()
686 689 if self.data and not self.data.flagNoData and self.pause:
687 690 figpause(10)
688 691
@@ -1,371 +1,370
1 1 import os
2 2 import datetime
3 3 import numpy
4 4
5 5 from schainpy.model.graphics.jroplot_base import Plot, plt
6 6 from schainpy.model.graphics.jroplot_spectra import SpectraPlot, RTIPlot, CoherencePlot, SpectraCutPlot
7 7 from schainpy.utils import log
8 8
9 9 EARTH_RADIUS = 6.3710e3
10 10
11 11
12 12 def ll2xy(lat1, lon1, lat2, lon2):
13 13
14 14 p = 0.017453292519943295
15 15 a = 0.5 - numpy.cos((lat2 - lat1) * p)/2 + numpy.cos(lat1 * p) * \
16 16 numpy.cos(lat2 * p) * (1 - numpy.cos((lon2 - lon1) * p)) / 2
17 17 r = 12742 * numpy.arcsin(numpy.sqrt(a))
18 18 theta = numpy.arctan2(numpy.sin((lon2-lon1)*p)*numpy.cos(lat2*p), numpy.cos(lat1*p)
19 19 * numpy.sin(lat2*p)-numpy.sin(lat1*p)*numpy.cos(lat2*p)*numpy.cos((lon2-lon1)*p))
20 20 theta = -theta + numpy.pi/2
21 21 return r*numpy.cos(theta), r*numpy.sin(theta)
22 22
23 23
24 24 def km2deg(km):
25 25 '''
26 26 Convert distance in km to degrees
27 27 '''
28 28
29 29 return numpy.rad2deg(km/EARTH_RADIUS)
30 30
31 31
32 32
33 33 class SpectralMomentsPlot(SpectraPlot):
34 34 '''
35 35 Plot for Spectral Moments
36 36 '''
37 37 CODE = 'spc_moments'
38 38 # colormap = 'jet'
39 39 # plot_type = 'pcolor'
40 40
41 41 class DobleGaussianPlot(SpectraPlot):
42 42 '''
43 43 Plot for Double Gaussian Plot
44 44 '''
45 45 CODE = 'gaussian_fit'
46 46 # colormap = 'jet'
47 47 # plot_type = 'pcolor'
48 48
49 49 class DoubleGaussianSpectraCutPlot(SpectraCutPlot):
50 50 '''
51 51 Plot SpectraCut with Double Gaussian Fit
52 52 '''
53 53 CODE = 'cut_gaussian_fit'
54 54
55 55 class SnrPlot(RTIPlot):
56 56 '''
57 57 Plot for SNR Data
58 58 '''
59 59
60 60 CODE = 'snr'
61 61 colormap = 'jet'
62 62
63 63 def update(self, dataOut):
64 64
65 65 data = {
66 66 'snr': 10*numpy.log10(dataOut.data_snr)
67 67 }
68 68
69 69 return data, {}
70 70
71 71 class DopplerPlot(RTIPlot):
72 72 '''
73 73 Plot for DOPPLER Data (1st moment)
74 74 '''
75 75
76 76 CODE = 'dop'
77 77 colormap = 'jet'
78 78
79 79 def update(self, dataOut):
80 80
81 81 data = {
82 82 'dop': 10*numpy.log10(dataOut.data_dop)
83 83 }
84 84
85 85 return data, {}
86 86
87 87 class PowerPlot(RTIPlot):
88 88 '''
89 89 Plot for Power Data (0 moment)
90 90 '''
91 91
92 92 CODE = 'pow'
93 93 colormap = 'jet'
94 94
95 95 def update(self, dataOut):
96 96
97 97 data = {
98 98 'pow': 10*numpy.log10(dataOut.data_pow/dataOut.normFactor)
99 99 }
100 100
101 101 return data, {}
102 102
103 103 class SpectralWidthPlot(RTIPlot):
104 104 '''
105 105 Plot for Spectral Width Data (2nd moment)
106 106 '''
107 107
108 108 CODE = 'width'
109 109 colormap = 'jet'
110 110
111 111 def update(self, dataOut):
112 112
113 113 data = {
114 114 'width': dataOut.data_width
115 115 }
116 116
117 117 return data, {}
118 118
119 119 class SkyMapPlot(Plot):
120 120 '''
121 121 Plot for meteors detection data
122 122 '''
123 123
124 124 CODE = 'param'
125 125
126 126 def setup(self):
127 127
128 128 self.ncols = 1
129 129 self.nrows = 1
130 130 self.width = 7.2
131 131 self.height = 7.2
132 132 self.nplots = 1
133 133 self.xlabel = 'Zonal Zenith Angle (deg)'
134 134 self.ylabel = 'Meridional Zenith Angle (deg)'
135 135 self.polar = True
136 136 self.ymin = -180
137 137 self.ymax = 180
138 138 self.colorbar = False
139 139
140 140 def plot(self):
141 141
142 142 arrayParameters = numpy.concatenate(self.data['param'])
143 143 error = arrayParameters[:, -1]
144 144 indValid = numpy.where(error == 0)[0]
145 145 finalMeteor = arrayParameters[indValid, :]
146 146 finalAzimuth = finalMeteor[:, 3]
147 147 finalZenith = finalMeteor[:, 4]
148 148
149 149 x = finalAzimuth * numpy.pi / 180
150 150 y = finalZenith
151 151
152 152 ax = self.axes[0]
153 153
154 154 if ax.firsttime:
155 155 ax.plot = ax.plot(x, y, 'bo', markersize=5)[0]
156 156 else:
157 157 ax.plot.set_data(x, y)
158 158
159 159 dt1 = self.getDateTime(self.data.min_time).strftime('%y/%m/%d %H:%M:%S')
160 160 dt2 = self.getDateTime(self.data.max_time).strftime('%y/%m/%d %H:%M:%S')
161 161 title = 'Meteor Detection Sky Map\n %s - %s \n Number of events: %5.0f\n' % (dt1,
162 162 dt2,
163 163 len(x))
164 164 self.titles[0] = title
165 165
166 166
167 167 class GenericRTIPlot(Plot):
168 168 '''
169 169 Plot for data_xxxx object
170 170 '''
171 171
172 172 CODE = 'param'
173 173 colormap = 'viridis'
174 174 plot_type = 'pcolorbuffer'
175 175
176 176 def setup(self):
177 177 self.xaxis = 'time'
178 178 self.ncols = 1
179 self.nrows = self.data.shape(self.attr_data)[0]
179 self.nrows = self.data.shape('param')[0]
180 180 self.nplots = self.nrows
181 181 self.plots_adjust.update({'hspace':0.8, 'left': 0.1, 'bottom': 0.08, 'right':0.95, 'top': 0.95})
182 182
183 183 if not self.xlabel:
184 184 self.xlabel = 'Time'
185 185
186 186 self.ylabel = 'Range [km]'
187 187 if not self.titles:
188 self.titles = self.data.parameters \
189 if self.data.parameters else ['Param {}'.format(x) for x in range(self.nrows)]
188 self.titles = ['Param {}'.format(x) for x in range(self.nrows)]
190 189
191 190 def update(self, dataOut):
192 191
193 192 data = {
194 self.attr_data : getattr(dataOut, self.attr_data)
193 'param' : numpy.concatenate([getattr(dataOut, attr) for attr in self.attr_data], axis=0)
195 194 }
196 195
197 196 meta = {}
198 197
199 198 return data, meta
200 199
201 200 def plot(self):
202 201 # self.data.normalize_heights()
203 202 self.x = self.data.times
204 203 self.y = self.data.yrange
205 self.z = self.data[self.attr_data]
204 self.z = self.data['param']
206 205
207 206 self.z = numpy.ma.masked_invalid(self.z)
208 207
209 208 if self.decimation is None:
210 209 x, y, z = self.fill_gaps(self.x, self.y, self.z)
211 210 else:
212 211 x, y, z = self.fill_gaps(*self.decimate())
213 212
214 213 for n, ax in enumerate(self.axes):
215 214
216 215 self.zmax = self.zmax if self.zmax is not None else numpy.max(
217 216 self.z[n])
218 217 self.zmin = self.zmin if self.zmin is not None else numpy.min(
219 218 self.z[n])
220 219
221 220 if ax.firsttime:
222 221 if self.zlimits is not None:
223 222 self.zmin, self.zmax = self.zlimits[n]
224 223
225 224 ax.plt = ax.pcolormesh(x, y, z[n].T * self.factors[n],
226 225 vmin=self.zmin,
227 226 vmax=self.zmax,
228 227 cmap=self.cmaps[n]
229 228 )
230 229 else:
231 230 if self.zlimits is not None:
232 231 self.zmin, self.zmax = self.zlimits[n]
233 232 ax.collections.remove(ax.collections[0])
234 233 ax.plt = ax.pcolormesh(x, y, z[n].T * self.factors[n],
235 234 vmin=self.zmin,
236 235 vmax=self.zmax,
237 236 cmap=self.cmaps[n]
238 237 )
239 238
240 239
241 240 class PolarMapPlot(Plot):
242 241 '''
243 242 Plot for weather radar
244 243 '''
245 244
246 245 CODE = 'param'
247 246 colormap = 'seismic'
248 247
249 248 def setup(self):
250 249 self.ncols = 1
251 250 self.nrows = 1
252 251 self.width = 9
253 252 self.height = 8
254 253 self.mode = self.data.meta['mode']
255 254 if self.channels is not None:
256 255 self.nplots = len(self.channels)
257 256 self.nrows = len(self.channels)
258 257 else:
259 258 self.nplots = self.data.shape(self.CODE)[0]
260 259 self.nrows = self.nplots
261 260 self.channels = list(range(self.nplots))
262 261 if self.mode == 'E':
263 262 self.xlabel = 'Longitude'
264 263 self.ylabel = 'Latitude'
265 264 else:
266 265 self.xlabel = 'Range (km)'
267 266 self.ylabel = 'Height (km)'
268 267 self.bgcolor = 'white'
269 268 self.cb_labels = self.data.meta['units']
270 269 self.lat = self.data.meta['latitude']
271 270 self.lon = self.data.meta['longitude']
272 271 self.xmin, self.xmax = float(
273 272 km2deg(self.xmin) + self.lon), float(km2deg(self.xmax) + self.lon)
274 273 self.ymin, self.ymax = float(
275 274 km2deg(self.ymin) + self.lat), float(km2deg(self.ymax) + self.lat)
276 275 # self.polar = True
277 276
278 277 def plot(self):
279 278
280 279 for n, ax in enumerate(self.axes):
281 280 data = self.data['param'][self.channels[n]]
282 281
283 282 zeniths = numpy.linspace(
284 283 0, self.data.meta['max_range'], data.shape[1])
285 284 if self.mode == 'E':
286 285 azimuths = -numpy.radians(self.data.yrange)+numpy.pi/2
287 286 r, theta = numpy.meshgrid(zeniths, azimuths)
288 287 x, y = r*numpy.cos(theta)*numpy.cos(numpy.radians(self.data.meta['elevation'])), r*numpy.sin(
289 288 theta)*numpy.cos(numpy.radians(self.data.meta['elevation']))
290 289 x = km2deg(x) + self.lon
291 290 y = km2deg(y) + self.lat
292 291 else:
293 292 azimuths = numpy.radians(self.data.yrange)
294 293 r, theta = numpy.meshgrid(zeniths, azimuths)
295 294 x, y = r*numpy.cos(theta), r*numpy.sin(theta)
296 295 self.y = zeniths
297 296
298 297 if ax.firsttime:
299 298 if self.zlimits is not None:
300 299 self.zmin, self.zmax = self.zlimits[n]
301 300 ax.plt = ax.pcolormesh( # r, theta, numpy.ma.array(data, mask=numpy.isnan(data)),
302 301 x, y, numpy.ma.array(data, mask=numpy.isnan(data)),
303 302 vmin=self.zmin,
304 303 vmax=self.zmax,
305 304 cmap=self.cmaps[n])
306 305 else:
307 306 if self.zlimits is not None:
308 307 self.zmin, self.zmax = self.zlimits[n]
309 308 ax.collections.remove(ax.collections[0])
310 309 ax.plt = ax.pcolormesh( # r, theta, numpy.ma.array(data, mask=numpy.isnan(data)),
311 310 x, y, numpy.ma.array(data, mask=numpy.isnan(data)),
312 311 vmin=self.zmin,
313 312 vmax=self.zmax,
314 313 cmap=self.cmaps[n])
315 314
316 315 if self.mode == 'A':
317 316 continue
318 317
319 318 # plot district names
320 319 f = open('/data/workspace/schain_scripts/distrito.csv')
321 320 for line in f:
322 321 label, lon, lat = [s.strip() for s in line.split(',') if s]
323 322 lat = float(lat)
324 323 lon = float(lon)
325 324 # ax.plot(lon, lat, '.b', ms=2)
326 325 ax.text(lon, lat, label.decode('utf8'), ha='center',
327 326 va='bottom', size='8', color='black')
328 327
329 328 # plot limites
330 329 limites = []
331 330 tmp = []
332 331 for line in open('/data/workspace/schain_scripts/lima.csv'):
333 332 if '#' in line:
334 333 if tmp:
335 334 limites.append(tmp)
336 335 tmp = []
337 336 continue
338 337 values = line.strip().split(',')
339 338 tmp.append((float(values[0]), float(values[1])))
340 339 for points in limites:
341 340 ax.add_patch(
342 341 Polygon(points, ec='k', fc='none', ls='--', lw=0.5))
343 342
344 343 # plot Cuencas
345 344 for cuenca in ('rimac', 'lurin', 'mala', 'chillon', 'chilca', 'chancay-huaral'):
346 345 f = open('/data/workspace/schain_scripts/{}.csv'.format(cuenca))
347 346 values = [line.strip().split(',') for line in f]
348 347 points = [(float(s[0]), float(s[1])) for s in values]
349 348 ax.add_patch(Polygon(points, ec='b', fc='none'))
350 349
351 350 # plot grid
352 351 for r in (15, 30, 45, 60):
353 352 ax.add_artist(plt.Circle((self.lon, self.lat),
354 353 km2deg(r), color='0.6', fill=False, lw=0.2))
355 354 ax.text(
356 355 self.lon + (km2deg(r))*numpy.cos(60*numpy.pi/180),
357 356 self.lat + (km2deg(r))*numpy.sin(60*numpy.pi/180),
358 357 '{}km'.format(r),
359 358 ha='center', va='bottom', size='8', color='0.6', weight='heavy')
360 359
361 360 if self.mode == 'E':
362 361 title = 'El={}$^\circ$'.format(self.data.meta['elevation'])
363 362 label = 'E{:02d}'.format(int(self.data.meta['elevation']))
364 363 else:
365 364 title = 'Az={}$^\circ$'.format(self.data.meta['azimuth'])
366 365 label = 'A{:02d}'.format(int(self.data.meta['azimuth']))
367 366
368 367 self.save_labels = ['{}-{}'.format(lbl, label) for lbl in self.labels]
369 368 self.titles = ['{} {}'.format(
370 369 self.data.parameters[x], title) for x in self.channels]
371 370
@@ -1,462 +1,453
1 1 import os
2 2 import sys
3 3 import glob
4 import fnmatch
5 import datetime
6 import time
7 import re
8 import h5py
9 4 import numpy
10 5
11 import pylab as plb
12 from scipy.optimize import curve_fit
13 from scipy import asarray as ar, exp
14 6
15 7 SPEED_OF_LIGHT = 299792458
16 8 SPEED_OF_LIGHT = 3e8
17 9
18 10 from .utils import folder_in_range
19 11
20 12 import schainpy.admin
21 13 from schainpy.model.data.jrodata import Spectra
22 from schainpy.model.proc.jroproc_base import ProcessingUnit, Operation, MPDecorator
14 from schainpy.model.proc.jroproc_base import ProcessingUnit
23 15 from schainpy.utils import log
24 from schainpy.model.io.jroIO_base import JRODataReader
16
25 17
26 18 def pol2cart(rho, phi):
27 19 x = rho * numpy.cos(phi)
28 20 y = rho * numpy.sin(phi)
29 21 return(x, y)
30 22
31 23 FILE_STRUCTURE = numpy.dtype([ # HEADER 48bytes
32 24 ('FileMgcNumber', '<u4'), # 0x23020100
33 25 ('nFDTdataRecors', '<u4'),
34 26 ('OffsetStartHeader', '<u4'),
35 27 ('RadarUnitId', '<u4'),
36 28 ('SiteName', 'S32'), # Null terminated
37 29 ])
38 30
39 31
40 32 class FileHeaderBLTR():
41 33
42 34 def __init__(self, fo):
43 35
44 36 self.fo = fo
45 37 self.size = 48
46 38 self.read()
47 39
48 40 def read(self):
49 41
50 42 header = numpy.fromfile(self.fo, FILE_STRUCTURE, 1)
51 43 self.FileMgcNumber = hex(header['FileMgcNumber'][0])
52 44 self.nFDTdataRecors = int(header['nFDTdataRecors'][0])
53 45 self.RadarUnitId = int(header['RadarUnitId'][0])
54 46 self.OffsetStartHeader = int(header['OffsetStartHeader'][0])
55 47 self.SiteName = header['SiteName'][0]
56 48
57 49 def write(self, fp):
58 50
59 51 headerTuple = (self.FileMgcNumber,
60 52 self.nFDTdataRecors,
61 53 self.RadarUnitId,
62 54 self.SiteName,
63 55 self.size)
64 56
65 57 header = numpy.array(headerTuple, FILE_STRUCTURE)
66 58 header.tofile(fp)
67 59 ''' ndarray.tofile(fid, sep, format) Write array to a file as text or binary (default).
68 60
69 61 fid : file or str
70 62 An open file object, or a string containing a filename.
71 63
72 64 sep : str
73 65 Separator between array items for text output. If "" (empty), a binary file is written,
74 66 equivalent to file.write(a.tobytes()).
75 67
76 68 format : str
77 69 Format string for text file output. Each entry in the array is formatted to text by
78 70 first converting it to the closest Python type, and then using "format" % item.
79 71
80 72 '''
81 73
82 74 return 1
83 75
84 76
85 77 RECORD_STRUCTURE = numpy.dtype([ # RECORD HEADER 180+20N bytes
86 78 ('RecMgcNumber', '<u4'), # 0x23030001
87 79 ('RecCounter', '<u4'), # Record counter(0,1, ...)
88 80 # Offset to start of next record form start of this record
89 81 ('Off2StartNxtRec', '<u4'),
90 82 # Offset to start of data from start of this record
91 83 ('Off2StartData', '<u4'),
92 84 # Epoch time stamp of start of acquisition (seconds)
93 85 ('nUtime', '<i4'),
94 86 # Millisecond component of time stamp (0,...,999)
95 87 ('nMilisec', '<u4'),
96 88 # Experiment tag name (null terminated)
97 89 ('ExpTagName', 'S32'),
98 90 # Experiment comment (null terminated)
99 91 ('ExpComment', 'S32'),
100 92 # Site latitude (from GPS) in degrees (positive implies North)
101 93 ('SiteLatDegrees', '<f4'),
102 94 # Site longitude (from GPS) in degrees (positive implies East)
103 95 ('SiteLongDegrees', '<f4'),
104 96 # RTC GPS engine status (0=SEEK, 1=LOCK, 2=NOT FITTED, 3=UNAVAILABLE)
105 97 ('RTCgpsStatus', '<u4'),
106 98 ('TransmitFrec', '<u4'), # Transmit frequency (Hz)
107 99 ('ReceiveFrec', '<u4'), # Receive frequency
108 100 # First local oscillator frequency (Hz)
109 101 ('FirstOsciFrec', '<u4'),
110 102 # (0="O", 1="E", 2="linear 1", 3="linear2")
111 103 ('Polarisation', '<u4'),
112 104 # Receiver filter settings (0,1,2,3)
113 105 ('ReceiverFiltSett', '<u4'),
114 106 # Number of modes in use (1 or 2)
115 107 ('nModesInUse', '<u4'),
116 108 # Dual Mode index number for these data (0 or 1)
117 109 ('DualModeIndex', '<u4'),
118 110 # Dual Mode range correction for these data (m)
119 111 ('DualModeRange', '<u4'),
120 112 # Number of digital channels acquired (2*N)
121 113 ('nDigChannels', '<u4'),
122 114 # Sampling resolution (meters)
123 115 ('SampResolution', '<u4'),
124 116 # Number of range gates sampled
125 117 ('nHeights', '<u4'),
126 118 # Start range of sampling (meters)
127 119 ('StartRangeSamp', '<u4'),
128 120 ('PRFhz', '<u4'), # PRF (Hz)
129 121 ('nCohInt', '<u4'), # Integrations
130 122 # Number of data points transformed
131 123 ('nProfiles', '<u4'),
132 124 # Number of receive beams stored in file (1 or N)
133 125 ('nChannels', '<u4'),
134 126 ('nIncohInt', '<u4'), # Number of spectral averages
135 127 # FFT windowing index (0 = no window)
136 128 ('FFTwindowingInd', '<u4'),
137 129 # Beam steer angle (azimuth) in degrees (clockwise from true North)
138 130 ('BeamAngleAzim', '<f4'),
139 131 # Beam steer angle (zenith) in degrees (0=> vertical)
140 132 ('BeamAngleZen', '<f4'),
141 133 # Antenna coordinates (Range(meters), Bearing(degrees)) - N pairs
142 134 ('AntennaCoord0', '<f4'),
143 135 # Antenna coordinates (Range(meters), Bearing(degrees)) - N pairs
144 136 ('AntennaAngl0', '<f4'),
145 137 # Antenna coordinates (Range(meters), Bearing(degrees)) - N pairs
146 138 ('AntennaCoord1', '<f4'),
147 139 # Antenna coordinates (Range(meters), Bearing(degrees)) - N pairs
148 140 ('AntennaAngl1', '<f4'),
149 141 # Antenna coordinates (Range(meters), Bearing(degrees)) - N pairs
150 142 ('AntennaCoord2', '<f4'),
151 143 # Antenna coordinates (Range(meters), Bearing(degrees)) - N pairs
152 144 ('AntennaAngl2', '<f4'),
153 145 # Receiver phase calibration (degrees) - N values
154 146 ('RecPhaseCalibr0', '<f4'),
155 147 # Receiver phase calibration (degrees) - N values
156 148 ('RecPhaseCalibr1', '<f4'),
157 149 # Receiver phase calibration (degrees) - N values
158 150 ('RecPhaseCalibr2', '<f4'),
159 151 # Receiver amplitude calibration (ratio relative to receiver one) - N values
160 152 ('RecAmpCalibr0', '<f4'),
161 153 # Receiver amplitude calibration (ratio relative to receiver one) - N values
162 154 ('RecAmpCalibr1', '<f4'),
163 155 # Receiver amplitude calibration (ratio relative to receiver one) - N values
164 156 ('RecAmpCalibr2', '<f4'),
165 157 # Receiver gains in dB - N values
166 158 ('ReceiverGaindB0', '<i4'),
167 159 # Receiver gains in dB - N values
168 160 ('ReceiverGaindB1', '<i4'),
169 161 # Receiver gains in dB - N values
170 162 ('ReceiverGaindB2', '<i4'),
171 163 ])
172 164
173 165
174 166 class RecordHeaderBLTR():
175 167
176 168 def __init__(self, fo):
177 169
178 170 self.fo = fo
179 171 self.OffsetStartHeader = 48
180 172 self.Off2StartNxtRec = 811248
181 173
182 174 def read(self, block):
183 175 OffRHeader = self.OffsetStartHeader + block * self.Off2StartNxtRec
184 176 self.fo.seek(OffRHeader, os.SEEK_SET)
185 177 header = numpy.fromfile(self.fo, RECORD_STRUCTURE, 1)
186 178 self.RecMgcNumber = hex(header['RecMgcNumber'][0]) # 0x23030001
187 179 self.RecCounter = int(header['RecCounter'][0])
188 180 self.Off2StartNxtRec = int(header['Off2StartNxtRec'][0])
189 181 self.Off2StartData = int(header['Off2StartData'][0])
190 182 self.nUtime = header['nUtime'][0]
191 183 self.nMilisec = header['nMilisec'][0]
192 184 self.ExpTagName = '' # str(header['ExpTagName'][0])
193 185 self.ExpComment = '' # str(header['ExpComment'][0])
194 186 self.SiteLatDegrees = header['SiteLatDegrees'][0]
195 187 self.SiteLongDegrees = header['SiteLongDegrees'][0]
196 188 self.RTCgpsStatus = header['RTCgpsStatus'][0]
197 189 self.TransmitFrec = header['TransmitFrec'][0]
198 190 self.ReceiveFrec = header['ReceiveFrec'][0]
199 191 self.FirstOsciFrec = header['FirstOsciFrec'][0]
200 192 self.Polarisation = header['Polarisation'][0]
201 193 self.ReceiverFiltSett = header['ReceiverFiltSett'][0]
202 194 self.nModesInUse = header['nModesInUse'][0]
203 195 self.DualModeIndex = header['DualModeIndex'][0]
204 196 self.DualModeRange = header['DualModeRange'][0]
205 197 self.nDigChannels = header['nDigChannels'][0]
206 198 self.SampResolution = header['SampResolution'][0]
207 199 self.nHeights = header['nHeights'][0]
208 200 self.StartRangeSamp = header['StartRangeSamp'][0]
209 201 self.PRFhz = header['PRFhz'][0]
210 202 self.nCohInt = header['nCohInt'][0]
211 203 self.nProfiles = header['nProfiles'][0]
212 204 self.nChannels = header['nChannels'][0]
213 205 self.nIncohInt = header['nIncohInt'][0]
214 206 self.FFTwindowingInd = header['FFTwindowingInd'][0]
215 207 self.BeamAngleAzim = header['BeamAngleAzim'][0]
216 208 self.BeamAngleZen = header['BeamAngleZen'][0]
217 209 self.AntennaCoord0 = header['AntennaCoord0'][0]
218 210 self.AntennaAngl0 = header['AntennaAngl0'][0]
219 211 self.AntennaCoord1 = header['AntennaCoord1'][0]
220 212 self.AntennaAngl1 = header['AntennaAngl1'][0]
221 213 self.AntennaCoord2 = header['AntennaCoord2'][0]
222 214 self.AntennaAngl2 = header['AntennaAngl2'][0]
223 215 self.RecPhaseCalibr0 = header['RecPhaseCalibr0'][0]
224 216 self.RecPhaseCalibr1 = header['RecPhaseCalibr1'][0]
225 217 self.RecPhaseCalibr2 = header['RecPhaseCalibr2'][0]
226 218 self.RecAmpCalibr0 = header['RecAmpCalibr0'][0]
227 219 self.RecAmpCalibr1 = header['RecAmpCalibr1'][0]
228 220 self.RecAmpCalibr2 = header['RecAmpCalibr2'][0]
229 221 self.ReceiverGaindB0 = header['ReceiverGaindB0'][0]
230 222 self.ReceiverGaindB1 = header['ReceiverGaindB1'][0]
231 223 self.ReceiverGaindB2 = header['ReceiverGaindB2'][0]
232 224 self.ipp = 0.5 * (SPEED_OF_LIGHT / self.PRFhz)
233 225 self.RHsize = 180 + 20 * self.nChannels
234 226 self.Datasize = self.nProfiles * self.nChannels * self.nHeights * 2 * 4
235 227 endFp = self.OffsetStartHeader + self.RecCounter * self.Off2StartNxtRec
236 228
237 229
238 230 if OffRHeader > endFp:
239 231 sys.stderr.write(
240 232 "Warning %s: Size value read from System Header is lower than it has to be\n" % fp)
241 233 return 0
242 234
243 235 if OffRHeader < endFp:
244 236 sys.stderr.write(
245 237 "Warning %s: Size value read from System Header size is greater than it has to be\n" % fp)
246 238 return 0
247 239
248 240 return 1
249 241
250 242
251 243 class BLTRSpectraReader (ProcessingUnit):
252 244
253 245 def __init__(self):
254 246
255 247 ProcessingUnit.__init__(self)
256 248
257 249 self.ext = ".fdt"
258 250 self.optchar = "P"
259 251 self.fpFile = None
260 252 self.fp = None
261 253 self.BlockCounter = 0
262 254 self.fileSizeByHeader = None
263 255 self.filenameList = []
264 256 self.fileSelector = 0
265 257 self.Off2StartNxtRec = 0
266 258 self.RecCounter = 0
267 259 self.flagNoMoreFiles = 0
268 260 self.data_spc = None
269 261 self.data_cspc = None
270 262 self.path = None
271 263 self.OffsetStartHeader = 0
272 264 self.Off2StartData = 0
273 265 self.ipp = 0
274 266 self.nFDTdataRecors = 0
275 267 self.blocksize = 0
276 268 self.dataOut = Spectra()
277 269 self.dataOut.flagNoData = False
278 270
279 271 def search_files(self):
280 272 '''
281 273 Function that indicates the number of .fdt files that exist in the folder to be read.
282 274 It also creates an organized list with the names of the files to read.
283 275 '''
284 276
285 277 files = glob.glob(os.path.join(self.path, '*{}'.format(self.ext)))
286 278 files = sorted(files)
287 279 for f in files:
288 280 filename = f.split('/')[-1]
289 281 if folder_in_range(filename.split('.')[1], self.startDate, self.endDate, '%Y%m%d'):
290 282 self.filenameList.append(f)
291 283
292 284 def run(self, **kwargs):
293 285 '''
294 286 This method will be the one that will initiate the data entry, will be called constantly.
295 287 You should first verify that your Setup () is set up and then continue to acquire
296 288 the data to be processed with getData ().
297 289 '''
298 290 if not self.isConfig:
299 291 self.setup(**kwargs)
300 292 self.isConfig = True
301 293
302 294 self.getData()
303 295
304 296 def setup(self,
305 297 path=None,
306 298 startDate=None,
307 299 endDate=None,
308 300 startTime=None,
309 301 endTime=None,
310 302 walk=True,
311 303 code=None,
312 304 online=False,
313 305 mode=None,
314 306 **kwargs):
315 307
316 308 self.isConfig = True
317 309
318 310 self.path = path
319 311 self.startDate = startDate
320 312 self.endDate = endDate
321 313 self.startTime = startTime
322 314 self.endTime = endTime
323 315 self.walk = walk
324 316 self.mode = int(mode)
325 317 self.search_files()
326 318 if self.filenameList:
327 319 self.readFile()
328 320
329 321 def getData(self):
330 322 '''
331 323 Before starting this function, you should check that there is still an unread file,
332 324 If there are still blocks to read or if the data block is empty.
333 325
334 326 You should call the file "read".
335 327
336 328 '''
337 329
338 330 if self.flagNoMoreFiles:
339 331 self.dataOut.flagNoData = True
340 332 raise schainpy.admin.SchainError('No more files')
341 333
342 334 self.readBlock()
343 335
344 336 def readFile(self):
345 337 '''
346 338 You must indicate if you are reading in Online or Offline mode and load the
347 339 The parameters for this file reading mode.
348 340
349 341 Then you must do 2 actions:
350 342
351 343 1. Get the BLTR FileHeader.
352 344 2. Start reading the first block.
353 345 '''
354 346
355 347 if self.fileSelector < len(self.filenameList):
356 348 log.success('Opening file: {}'.format(self.filenameList[self.fileSelector]), self.name)
357 349 self.fp = open(self.filenameList[self.fileSelector])
358 350 self.fheader = FileHeaderBLTR(self.fp)
359 351 self.rheader = RecordHeaderBLTR(self.fp)
360 352 self.nFDTdataRecors = self.fheader.nFDTdataRecors
361 353 self.fileSelector += 1
362 354 self.BlockCounter = 0
363 355 return 1
364 356 else:
365 357 self.flagNoMoreFiles = True
366 358 self.dataOut.flagNoData = True
367 359 return 0
368 360
369 361 def readBlock(self):
370 362 '''
371 363 It should be checked if the block has data, if it is not passed to the next file.
372 364
373 365 Then the following is done:
374 366
375 367 1. Read the RecordHeader
376 368 2. Fill the buffer with the current block number.
377 369
378 370 '''
379 371
380 372 if self.BlockCounter == self.nFDTdataRecors:
381 373 if not self.readFile():
382 374 return
383 375
384 376 if self.mode == 1:
385 377 self.rheader.read(self.BlockCounter+1)
386 378 elif self.mode == 0:
387 379 self.rheader.read(self.BlockCounter)
388 380
389 381 self.RecCounter = self.rheader.RecCounter
390 382 self.OffsetStartHeader = self.rheader.OffsetStartHeader
391 383 self.Off2StartNxtRec = self.rheader.Off2StartNxtRec
392 384 self.Off2StartData = self.rheader.Off2StartData
393 385 self.nProfiles = self.rheader.nProfiles
394 386 self.nChannels = self.rheader.nChannels
395 387 self.nHeights = self.rheader.nHeights
396 388 self.frequency = self.rheader.TransmitFrec
397 389 self.DualModeIndex = self.rheader.DualModeIndex
398 390 self.pairsList = [(0, 1), (0, 2), (1, 2)]
399 391 self.dataOut.pairsList = self.pairsList
400 392 self.nRdPairs = len(self.dataOut.pairsList)
401 393 self.dataOut.nRdPairs = self.nRdPairs
402 394 self.dataOut.heightList = (self.rheader.StartRangeSamp + numpy.arange(self.nHeights) * self.rheader.SampResolution) / 1000.
403 395 self.dataOut.channelList = range(self.nChannels)
404 396 self.dataOut.nProfiles=self.rheader.nProfiles
405 397 self.dataOut.nIncohInt=self.rheader.nIncohInt
406 398 self.dataOut.nCohInt=self.rheader.nCohInt
407 399 self.dataOut.ippSeconds= 1/float(self.rheader.PRFhz)
408 400 self.dataOut.PRF=self.rheader.PRFhz
409 401 self.dataOut.nFFTPoints=self.rheader.nProfiles
410 402 self.dataOut.utctime = self.rheader.nUtime + self.rheader.nMilisec/1000.
411 403 self.dataOut.timeZone = 0
412 404 self.dataOut.useLocalTime = False
413 405 self.dataOut.nmodes = 2
414 406 log.log('Reading block {} - {}'.format(self.BlockCounter, self.dataOut.datatime), self.name)
415 407 OffDATA = self.OffsetStartHeader + self.RecCounter * \
416 408 self.Off2StartNxtRec + self.Off2StartData
417 409 self.fp.seek(OffDATA, os.SEEK_SET)
418 410
419 411 self.data_fft = numpy.fromfile(self.fp, [('complex','<c8')], self.nProfiles*self.nChannels*self.nHeights )
420 412 self.data_fft = self.data_fft.astype(numpy.dtype('complex'))
421 413 self.data_block = numpy.reshape(self.data_fft,(self.nHeights, self.nChannels, self.nProfiles))
422 414 self.data_block = numpy.transpose(self.data_block, (1,2,0))
423 415 copy = self.data_block.copy()
424 416 spc = copy * numpy.conjugate(copy)
425 417 self.data_spc = numpy.absolute(spc) # valor absoluto o magnitud
426 self.dataOut.data_spc = self.data_spc
427 418
428 419 cspc = self.data_block.copy()
429 420 self.data_cspc = self.data_block.copy()
430 421
431 422 for i in range(self.nRdPairs):
432 423
433 424 chan_index0 = self.dataOut.pairsList[i][0]
434 425 chan_index1 = self.dataOut.pairsList[i][1]
435 426
436 427 self.data_cspc[i, :, :] = cspc[chan_index0, :, :] * numpy.conjugate(cspc[chan_index1, :, :])
437 428
438 429 '''Getting Eij and Nij'''
439 430 (AntennaX0, AntennaY0) = pol2cart(
440 431 self.rheader.AntennaCoord0, self.rheader.AntennaAngl0 * numpy.pi / 180)
441 432 (AntennaX1, AntennaY1) = pol2cart(
442 433 self.rheader.AntennaCoord1, self.rheader.AntennaAngl1 * numpy.pi / 180)
443 434 (AntennaX2, AntennaY2) = pol2cart(
444 435 self.rheader.AntennaCoord2, self.rheader.AntennaAngl2 * numpy.pi / 180)
445 436
446 437 E01 = AntennaX0 - AntennaX1
447 438 N01 = AntennaY0 - AntennaY1
448 439
449 440 E02 = AntennaX0 - AntennaX2
450 441 N02 = AntennaY0 - AntennaY2
451 442
452 443 E12 = AntennaX1 - AntennaX2
453 444 N12 = AntennaY1 - AntennaY2
454 445
455 446 self.ChanDist = numpy.array(
456 447 [[E01, N01], [E02, N02], [E12, N12]])
457 448
458 449 self.dataOut.ChanDist = self.ChanDist
459 450
460 451 self.BlockCounter += 2
461 452 self.dataOut.data_spc = self.data_spc
462 453 self.dataOut.data_cspc =self.data_cspc
@@ -1,627 +1,627
1 1 import os
2 2 import time
3 3 import datetime
4 4
5 5 import numpy
6 6 import h5py
7 7
8 8 import schainpy.admin
9 9 from schainpy.model.data.jrodata import *
10 10 from schainpy.model.proc.jroproc_base import ProcessingUnit, Operation, MPDecorator
11 11 from schainpy.model.io.jroIO_base import *
12 12 from schainpy.utils import log
13 13
14 14
15 15 class HDFReader(Reader, ProcessingUnit):
16 16 """Processing unit to read HDF5 format files
17 17
18 18 This unit reads HDF5 files created with `HDFWriter` operation contains
19 19 by default two groups Data and Metadata all variables would be saved as `dataOut`
20 20 attributes.
21 21 It is possible to read any HDF5 file by given the structure in the `description`
22 22 parameter, also you can add extra values to metadata with the parameter `extras`.
23 23
24 24 Parameters:
25 25 -----------
26 26 path : str
27 27 Path where files are located.
28 28 startDate : date
29 29 Start date of the files
30 30 endDate : list
31 31 End date of the files
32 32 startTime : time
33 33 Start time of the files
34 34 endTime : time
35 35 End time of the files
36 36 description : dict, optional
37 37 Dictionary with the description of the HDF5 file
38 38 extras : dict, optional
39 39 Dictionary with extra metadata to be be added to `dataOut`
40 40
41 41 Examples
42 42 --------
43 43
44 44 desc = {
45 45 'Data': {
46 46 'data_output': ['u', 'v', 'w'],
47 47 'utctime': 'timestamps',
48 48 } ,
49 49 'Metadata': {
50 50 'heightList': 'heights'
51 51 }
52 52 }
53 53
54 54 desc = {
55 55 'Data': {
56 56 'data_output': 'winds',
57 57 'utctime': 'timestamps'
58 58 },
59 59 'Metadata': {
60 60 'heightList': 'heights'
61 61 }
62 62 }
63 63
64 64 extras = {
65 65 'timeZone': 300
66 66 }
67 67
68 68 reader = project.addReadUnit(
69 69 name='HDFReader',
70 70 path='/path/to/files',
71 71 startDate='2019/01/01',
72 72 endDate='2019/01/31',
73 73 startTime='00:00:00',
74 74 endTime='23:59:59',
75 75 # description=json.dumps(desc),
76 76 # extras=json.dumps(extras),
77 77 )
78 78
79 79 """
80 80
81 81 __attrs__ = ['path', 'startDate', 'endDate', 'startTime', 'endTime', 'description', 'extras']
82 82
83 83 def __init__(self):
84 84 ProcessingUnit.__init__(self)
85 85 self.dataOut = Parameters()
86 86 self.ext = ".hdf5"
87 87 self.optchar = "D"
88 88 self.meta = {}
89 89 self.data = {}
90 90 self.open_file = h5py.File
91 91 self.open_mode = 'r'
92 92 self.description = {}
93 93 self.extras = {}
94 94 self.filefmt = "*%Y%j***"
95 95 self.folderfmt = "*%Y%j"
96 96
97 97 def setup(self, **kwargs):
98 98
99 99 self.set_kwargs(**kwargs)
100 100 if not self.ext.startswith('.'):
101 101 self.ext = '.{}'.format(self.ext)
102 102
103 103 if self.online:
104 104 log.log("Searching files in online mode...", self.name)
105 105
106 106 for nTries in range(self.nTries):
107 107 fullpath = self.searchFilesOnLine(self.path, self.startDate,
108 108 self.endDate, self.expLabel, self.ext, self.walk,
109 109 self.filefmt, self.folderfmt)
110 110 try:
111 111 fullpath = next(fullpath)
112 112 except:
113 113 fullpath = None
114 114
115 115 if fullpath:
116 116 break
117 117
118 118 log.warning(
119 119 'Waiting {} sec for a valid file in {}: try {} ...'.format(
120 120 self.delay, self.path, nTries + 1),
121 121 self.name)
122 122 time.sleep(self.delay)
123 123
124 124 if not(fullpath):
125 125 raise schainpy.admin.SchainError(
126 126 'There isn\'t any valid file in {}'.format(self.path))
127 127
128 128 pathname, filename = os.path.split(fullpath)
129 129 self.year = int(filename[1:5])
130 130 self.doy = int(filename[5:8])
131 131 self.set = int(filename[8:11]) - 1
132 132 else:
133 133 log.log("Searching files in {}".format(self.path), self.name)
134 134 self.filenameList = self.searchFilesOffLine(self.path, self.startDate,
135 135 self.endDate, self.expLabel, self.ext, self.walk, self.filefmt, self.folderfmt)
136 136
137 137 self.setNextFile()
138 138
139 139 return
140 140
141 141 def readFirstHeader(self):
142 142 '''Read metadata and data'''
143 143
144 144 self.__readMetadata()
145 145 self.__readData()
146 146 self.__setBlockList()
147 147
148 148 if 'type' in self.meta:
149 149 self.dataOut = eval(self.meta['type'])()
150 150
151 151 for attr in self.meta:
152 152 setattr(self.dataOut, attr, self.meta[attr])
153 153
154 154 self.blockIndex = 0
155 155
156 156 return
157 157
158 158 def __setBlockList(self):
159 159 '''
160 160 Selects the data within the times defined
161 161
162 162 self.fp
163 163 self.startTime
164 164 self.endTime
165 165 self.blockList
166 166 self.blocksPerFile
167 167
168 168 '''
169 169
170 170 startTime = self.startTime
171 171 endTime = self.endTime
172 172
173 173 thisUtcTime = self.data['utctime']
174 174 self.interval = numpy.min(thisUtcTime[1:] - thisUtcTime[:-1])
175 175
176 176 thisDatetime = datetime.datetime.utcfromtimestamp(thisUtcTime[0])
177 177
178 178 thisDate = thisDatetime.date()
179 179 thisTime = thisDatetime.time()
180 180
181 181 startUtcTime = (datetime.datetime.combine(thisDate, startTime) - datetime.datetime(1970, 1, 1)).total_seconds()
182 182 endUtcTime = (datetime.datetime.combine(thisDate, endTime) - datetime.datetime(1970, 1, 1)).total_seconds()
183 183
184 184 ind = numpy.where(numpy.logical_and(thisUtcTime >= startUtcTime, thisUtcTime < endUtcTime))[0]
185 185
186 186 self.blockList = ind
187 187 self.blocksPerFile = len(ind)
188 188 return
189 189
190 190 def __readMetadata(self):
191 191 '''
192 192 Reads Metadata
193 193 '''
194 194
195 195 meta = {}
196 196
197 197 if self.description:
198 198 for key, value in self.description['Metadata'].items():
199 meta[key] = self.fp[value].value
199 meta[key] = self.fp[value][()]
200 200 else:
201 201 grp = self.fp['Metadata']
202 202 for name in grp:
203 meta[name] = grp[name].value
203 meta[name] = grp[name][()]
204 204
205 205 if self.extras:
206 206 for key, value in self.extras.items():
207 207 meta[key] = value
208 208 self.meta = meta
209 209
210 210 return
211 211
212 212 def __readData(self):
213 213
214 214 data = {}
215 215
216 216 if self.description:
217 217 for key, value in self.description['Data'].items():
218 218 if isinstance(value, str):
219 219 if isinstance(self.fp[value], h5py.Dataset):
220 data[key] = self.fp[value].value
220 data[key] = self.fp[value][()]
221 221 elif isinstance(self.fp[value], h5py.Group):
222 222 array = []
223 223 for ch in self.fp[value]:
224 array.append(self.fp[value][ch].value)
224 array.append(self.fp[value][ch][()])
225 225 data[key] = numpy.array(array)
226 226 elif isinstance(value, list):
227 227 array = []
228 228 for ch in value:
229 array.append(self.fp[ch].value)
229 array.append(self.fp[ch][()])
230 230 data[key] = numpy.array(array)
231 231 else:
232 232 grp = self.fp['Data']
233 233 for name in grp:
234 234 if isinstance(grp[name], h5py.Dataset):
235 array = grp[name].value
235 array = grp[name][()]
236 236 elif isinstance(grp[name], h5py.Group):
237 237 array = []
238 238 for ch in grp[name]:
239 array.append(grp[name][ch].value)
239 array.append(grp[name][ch][()])
240 240 array = numpy.array(array)
241 241 else:
242 242 log.warning('Unknown type: {}'.format(name))
243 243
244 244 if name in self.description:
245 245 key = self.description[name]
246 246 else:
247 247 key = name
248 248 data[key] = array
249 249
250 250 self.data = data
251 251 return
252 252
253 253 def getData(self):
254 254
255 255 for attr in self.data:
256 256 if self.data[attr].ndim == 1:
257 257 setattr(self.dataOut, attr, self.data[attr][self.blockIndex])
258 258 else:
259 259 setattr(self.dataOut, attr, self.data[attr][:, self.blockIndex])
260 260
261 261 self.dataOut.flagNoData = False
262 262 self.blockIndex += 1
263 263
264 264 log.log("Block No. {}/{} -> {}".format(
265 265 self.blockIndex,
266 266 self.blocksPerFile,
267 267 self.dataOut.datatime.ctime()), self.name)
268 268
269 269 return
270 270
271 271 def run(self, **kwargs):
272 272
273 273 if not(self.isConfig):
274 274 self.setup(**kwargs)
275 275 self.isConfig = True
276 276
277 277 if self.blockIndex == self.blocksPerFile:
278 278 self.setNextFile()
279 279
280 280 self.getData()
281 281
282 282 return
283 283
284 284 @MPDecorator
285 285 class HDFWriter(Operation):
286 286 """Operation to write HDF5 files.
287 287
288 288 The HDF5 file contains by default two groups Data and Metadata where
289 289 you can save any `dataOut` attribute specified by `dataList` and `metadataList`
290 290 parameters, data attributes are normaly time dependent where the metadata
291 291 are not.
292 292 It is possible to customize the structure of the HDF5 file with the
293 293 optional description parameter see the examples.
294 294
295 295 Parameters:
296 296 -----------
297 297 path : str
298 298 Path where files will be saved.
299 299 blocksPerFile : int
300 300 Number of blocks per file
301 301 metadataList : list
302 302 List of the dataOut attributes that will be saved as metadata
303 303 dataList : int
304 304 List of the dataOut attributes that will be saved as data
305 305 setType : bool
306 306 If True the name of the files corresponds to the timestamp of the data
307 307 description : dict, optional
308 308 Dictionary with the desired description of the HDF5 file
309 309
310 310 Examples
311 311 --------
312 312
313 313 desc = {
314 314 'data_output': {'winds': ['z', 'w', 'v']},
315 315 'utctime': 'timestamps',
316 316 'heightList': 'heights'
317 317 }
318 318 desc = {
319 319 'data_output': ['z', 'w', 'v'],
320 320 'utctime': 'timestamps',
321 321 'heightList': 'heights'
322 322 }
323 323 desc = {
324 324 'Data': {
325 325 'data_output': 'winds',
326 326 'utctime': 'timestamps'
327 327 },
328 328 'Metadata': {
329 329 'heightList': 'heights'
330 330 }
331 331 }
332 332
333 333 writer = proc_unit.addOperation(name='HDFWriter')
334 334 writer.addParameter(name='path', value='/path/to/file')
335 335 writer.addParameter(name='blocksPerFile', value='32')
336 336 writer.addParameter(name='metadataList', value='heightList,timeZone')
337 337 writer.addParameter(name='dataList',value='data_output,utctime')
338 338 # writer.addParameter(name='description',value=json.dumps(desc))
339 339
340 340 """
341 341
342 342 ext = ".hdf5"
343 343 optchar = "D"
344 344 filename = None
345 345 path = None
346 346 setFile = None
347 347 fp = None
348 348 firsttime = True
349 349 #Configurations
350 350 blocksPerFile = None
351 351 blockIndex = None
352 352 dataOut = None
353 353 #Data Arrays
354 354 dataList = None
355 355 metadataList = None
356 356 currentDay = None
357 357 lastTime = None
358 358
359 359 def __init__(self):
360 360
361 361 Operation.__init__(self)
362 362 return
363 363
364 364 def setup(self, path=None, blocksPerFile=10, metadataList=None, dataList=None, setType=None, description=None):
365 365 self.path = path
366 366 self.blocksPerFile = blocksPerFile
367 367 self.metadataList = metadataList
368 368 self.dataList = [s.strip() for s in dataList]
369 369 self.setType = setType
370 370 self.description = description
371 371
372 372 if self.metadataList is None:
373 373 self.metadataList = self.dataOut.metadata_list
374 374
375 375 tableList = []
376 376 dsList = []
377 377
378 378 for i in range(len(self.dataList)):
379 379 dsDict = {}
380 380 if hasattr(self.dataOut, self.dataList[i]):
381 381 dataAux = getattr(self.dataOut, self.dataList[i])
382 382 dsDict['variable'] = self.dataList[i]
383 383 else:
384 384 log.warning('Attribute {} not found in dataOut', self.name)
385 385 continue
386 386
387 387 if dataAux is None:
388 388 continue
389 389 elif isinstance(dataAux, (int, float, numpy.integer, numpy.float)):
390 390 dsDict['nDim'] = 0
391 391 else:
392 392 dsDict['nDim'] = len(dataAux.shape)
393 393 dsDict['shape'] = dataAux.shape
394 394 dsDict['dsNumber'] = dataAux.shape[0]
395 395 dsDict['dtype'] = dataAux.dtype
396 396
397 397 dsList.append(dsDict)
398 398
399 399 self.dsList = dsList
400 400 self.currentDay = self.dataOut.datatime.date()
401 401
402 402 def timeFlag(self):
403 403 currentTime = self.dataOut.utctime
404 404 timeTuple = time.localtime(currentTime)
405 405 dataDay = timeTuple.tm_yday
406 406
407 407 if self.lastTime is None:
408 408 self.lastTime = currentTime
409 409 self.currentDay = dataDay
410 410 return False
411 411
412 412 timeDiff = currentTime - self.lastTime
413 413
414 414 #Si el dia es diferente o si la diferencia entre un dato y otro supera la hora
415 415 if dataDay != self.currentDay:
416 416 self.currentDay = dataDay
417 417 return True
418 418 elif timeDiff > 3*60*60:
419 419 self.lastTime = currentTime
420 420 return True
421 421 else:
422 422 self.lastTime = currentTime
423 423 return False
424 424
425 425 def run(self, dataOut, path, blocksPerFile=10, metadataList=None,
426 426 dataList=[], setType=None, description={}):
427 427
428 428 self.dataOut = dataOut
429 429 if not(self.isConfig):
430 430 self.setup(path=path, blocksPerFile=blocksPerFile,
431 431 metadataList=metadataList, dataList=dataList,
432 432 setType=setType, description=description)
433 433
434 434 self.isConfig = True
435 435 self.setNextFile()
436 436
437 437 self.putData()
438 438 return
439 439
440 440 def setNextFile(self):
441 441
442 442 ext = self.ext
443 443 path = self.path
444 444 setFile = self.setFile
445 445
446 446 timeTuple = time.localtime(self.dataOut.utctime)
447 447 subfolder = 'd%4.4d%3.3d' % (timeTuple.tm_year,timeTuple.tm_yday)
448 448 fullpath = os.path.join(path, subfolder)
449 449
450 450 if os.path.exists(fullpath):
451 451 filesList = os.listdir(fullpath)
452 452 filesList = [k for k in filesList if k.startswith(self.optchar)]
453 453 if len( filesList ) > 0:
454 454 filesList = sorted(filesList, key=str.lower)
455 455 filen = filesList[-1]
456 456 # el filename debera tener el siguiente formato
457 457 # 0 1234 567 89A BCDE (hex)
458 458 # x YYYY DDD SSS .ext
459 459 if isNumber(filen[8:11]):
460 460 setFile = int(filen[8:11]) #inicializo mi contador de seteo al seteo del ultimo file
461 461 else:
462 462 setFile = -1
463 463 else:
464 464 setFile = -1 #inicializo mi contador de seteo
465 465 else:
466 466 os.makedirs(fullpath)
467 467 setFile = -1 #inicializo mi contador de seteo
468 468
469 469 if self.setType is None:
470 470 setFile += 1
471 471 file = '%s%4.4d%3.3d%03d%s' % (self.optchar,
472 472 timeTuple.tm_year,
473 473 timeTuple.tm_yday,
474 474 setFile,
475 475 ext )
476 476 else:
477 477 setFile = timeTuple.tm_hour*60+timeTuple.tm_min
478 478 file = '%s%4.4d%3.3d%04d%s' % (self.optchar,
479 479 timeTuple.tm_year,
480 480 timeTuple.tm_yday,
481 481 setFile,
482 482 ext )
483 483
484 484 self.filename = os.path.join( path, subfolder, file )
485 485
486 486 #Setting HDF5 File
487 487 self.fp = h5py.File(self.filename, 'w')
488 488 #write metadata
489 489 self.writeMetadata(self.fp)
490 490 #Write data
491 491 self.writeData(self.fp)
492 492
493 493 def getLabel(self, name, x=None):
494 494
495 495 if x is None:
496 496 if 'Data' in self.description:
497 497 data = self.description['Data']
498 498 if 'Metadata' in self.description:
499 499 data.update(self.description['Metadata'])
500 500 else:
501 501 data = self.description
502 502 if name in data:
503 503 if isinstance(data[name], str):
504 504 return data[name]
505 505 elif isinstance(data[name], list):
506 506 return None
507 507 elif isinstance(data[name], dict):
508 508 for key, value in data[name].items():
509 509 return key
510 510 return name
511 511 else:
512 512 if 'Metadata' in self.description:
513 513 meta = self.description['Metadata']
514 514 else:
515 515 meta = self.description
516 516 if name in meta:
517 517 if isinstance(meta[name], list):
518 518 return meta[name][x]
519 519 elif isinstance(meta[name], dict):
520 520 for key, value in meta[name].items():
521 521 return value[x]
522 522 if 'cspc' in name:
523 523 return 'pair{:02d}'.format(x)
524 524 else:
525 525 return 'channel{:02d}'.format(x)
526 526
527 527 def writeMetadata(self, fp):
528 528
529 529 if self.description:
530 530 if 'Metadata' in self.description:
531 531 grp = fp.create_group('Metadata')
532 532 else:
533 533 grp = fp
534 534 else:
535 535 grp = fp.create_group('Metadata')
536 536
537 537 for i in range(len(self.metadataList)):
538 538 if not hasattr(self.dataOut, self.metadataList[i]):
539 539 log.warning('Metadata: `{}` not found'.format(self.metadataList[i]), self.name)
540 540 continue
541 541 value = getattr(self.dataOut, self.metadataList[i])
542 542 if isinstance(value, bool):
543 543 if value is True:
544 544 value = 1
545 545 else:
546 546 value = 0
547 547 grp.create_dataset(self.getLabel(self.metadataList[i]), data=value)
548 548 return
549 549
550 550 def writeData(self, fp):
551 551
552 552 if self.description:
553 553 if 'Data' in self.description:
554 554 grp = fp.create_group('Data')
555 555 else:
556 556 grp = fp
557 557 else:
558 558 grp = fp.create_group('Data')
559 559
560 560 dtsets = []
561 561 data = []
562 562
563 563 for dsInfo in self.dsList:
564 564 if dsInfo['nDim'] == 0:
565 565 ds = grp.create_dataset(
566 566 self.getLabel(dsInfo['variable']),
567 567 (self.blocksPerFile, ),
568 568 chunks=True,
569 569 dtype=numpy.float64)
570 570 dtsets.append(ds)
571 571 data.append((dsInfo['variable'], -1))
572 572 else:
573 573 label = self.getLabel(dsInfo['variable'])
574 574 if label is not None:
575 575 sgrp = grp.create_group(label)
576 576 else:
577 577 sgrp = grp
578 578 for i in range(dsInfo['dsNumber']):
579 579 ds = sgrp.create_dataset(
580 580 self.getLabel(dsInfo['variable'], i),
581 581 (self.blocksPerFile, ) + dsInfo['shape'][1:],
582 582 chunks=True,
583 583 dtype=dsInfo['dtype'])
584 584 dtsets.append(ds)
585 585 data.append((dsInfo['variable'], i))
586 586 fp.flush()
587 587
588 588 log.log('Creating file: {}'.format(fp.filename), self.name)
589 589
590 590 self.ds = dtsets
591 591 self.data = data
592 592 self.firsttime = True
593 593 self.blockIndex = 0
594 594 return
595 595
596 596 def putData(self):
597 597
598 598 if (self.blockIndex == self.blocksPerFile) or self.timeFlag():
599 599 self.closeFile()
600 600 self.setNextFile()
601 601
602 602 for i, ds in enumerate(self.ds):
603 603 attr, ch = self.data[i]
604 604 if ch == -1:
605 605 ds[self.blockIndex] = getattr(self.dataOut, attr)
606 606 else:
607 607 ds[self.blockIndex] = getattr(self.dataOut, attr)[ch]
608 608
609 609 self.fp.flush()
610 610 self.blockIndex += 1
611 611 log.log('Block No. {}/{}'.format(self.blockIndex, self.blocksPerFile), self.name)
612 612
613 613 return
614 614
615 615 def closeFile(self):
616 616
617 617 if self.blockIndex != self.blocksPerFile:
618 618 for ds in self.ds:
619 619 ds.resize(self.blockIndex, axis=0)
620 620
621 621 if self.fp:
622 622 self.fp.flush()
623 623 self.fp.close()
624 624
625 625 def close(self):
626 626
627 627 self.closeFile()
@@ -1,413 +1,400
1 1 '''
2 2 Created on Oct 24, 2016
3 3
4 4 @author: roj- LouVD
5 5 '''
6 6
7 7 import numpy
8 import copy
9 8 import datetime
10 9 import time
11 from time import gmtime
12 10
13 from numpy import transpose
14
15 from schainpy.model.proc.jroproc_base import ProcessingUnit, Operation, MPDecorator
11 from schainpy.model.proc.jroproc_base import ProcessingUnit, Operation
16 12 from schainpy.model.data.jrodata import Parameters
17 13
18 14
19 15 class BLTRParametersProc(ProcessingUnit):
20 16 '''
21 17 Processing unit for BLTR parameters data (winds)
22 18
23 19 Inputs:
24 20 self.dataOut.nmodes - Number of operation modes
25 21 self.dataOut.nchannels - Number of channels
26 22 self.dataOut.nranges - Number of ranges
27 23
28 24 self.dataOut.data_snr - SNR array
29 25 self.dataOut.data_output - Zonal, Vertical and Meridional velocity array
30 26 self.dataOut.height - Height array (km)
31 27 self.dataOut.time - Time array (seconds)
32 28
33 29 self.dataOut.fileIndex -Index of the file currently read
34 30 self.dataOut.lat - Latitude coordinate of BLTR location
35 31
36 32 self.dataOut.doy - Experiment doy (number of the day in the current year)
37 33 self.dataOut.month - Experiment month
38 34 self.dataOut.day - Experiment day
39 35 self.dataOut.year - Experiment year
40 36 '''
41 37
42 38 def __init__(self):
43 39 '''
44 40 Inputs: None
45 41 '''
46 42 ProcessingUnit.__init__(self)
47 43 self.dataOut = Parameters()
48 44
49 45 def setup(self, mode):
50 46 '''
51 47 '''
52 48 self.dataOut.mode = mode
53 49
54 50 def run(self, mode, snr_threshold=None):
55 51 '''
56 52 Inputs:
57 53 mode = High resolution (0) or Low resolution (1) data
58 54 snr_threshold = snr filter value
59 55 '''
60 56
61 57 if not self.isConfig:
62 58 self.setup(mode)
63 59 self.isConfig = True
64 60
65 61 if self.dataIn.type == 'Parameters':
66 62 self.dataOut.copy(self.dataIn)
67 63
68 64 self.dataOut.data_param = self.dataOut.data[mode]
69 65 self.dataOut.heightList = self.dataOut.height[0]
70 66 self.dataOut.data_snr = self.dataOut.data_snr[mode]
71
72 data_param = numpy.zeros([4, len(self.dataOut.heightList)])
73
74 67 SNRavg = numpy.average(self.dataOut.data_snr, axis=0)
75 68 SNRavgdB = 10*numpy.log10(SNRavg)
69 self.dataOut.data_snr_avg_db = SNRavgdB.reshape(1, *SNRavgdB.shape)
70
76 71 # Censoring Data
77 72 if snr_threshold is not None:
78 73 for i in range(3):
79 74 self.dataOut.data_param[i][SNRavgdB <= snr_threshold] = numpy.nan
80 # Including AvgSNR in data_param
81 for i in range(data_param.shape[0]):
82 if i == 3:
83 data_param[i] = SNRavgdB
84 else:
85 data_param[i] = self.dataOut.data_param[i]
86
87 self.dataOut.data_param = data_param
88 75
89 76 # TODO
90 77
91 78 class OutliersFilter(Operation):
92 79
93 80 def __init__(self):
94 81 '''
95 82 '''
96 83 Operation.__init__(self)
97 84
98 85 def run(self, svalue2, method, factor, filter, npoints=9):
99 86 '''
100 87 Inputs:
101 88 svalue - string to select array velocity
102 89 svalue2 - string to choose axis filtering
103 90 method - 0 for SMOOTH or 1 for MEDIAN
104 91 factor - number used to set threshold
105 92 filter - 1 for data filtering using the standard deviation criteria else 0
106 93 npoints - number of points for mask filter
107 94 '''
108 95
109 96 print(' Outliers Filter {} {} / threshold = {}'.format(svalue, svalue, factor))
110 97
111 98
112 99 yaxis = self.dataOut.heightList
113 100 xaxis = numpy.array([[self.dataOut.utctime]])
114 101
115 102 # Zonal
116 103 value_temp = self.dataOut.data_output[0]
117 104
118 105 # Zonal
119 106 value_temp = self.dataOut.data_output[1]
120 107
121 108 # Vertical
122 109 value_temp = numpy.transpose(self.dataOut.data_output[2])
123 110
124 111 htemp = yaxis
125 112 std = value_temp
126 113 for h in range(len(htemp)):
127 114 nvalues_valid = len(numpy.where(numpy.isfinite(value_temp[h]))[0])
128 115 minvalid = npoints
129 116
130 117 #only if valid values greater than the minimum required (10%)
131 118 if nvalues_valid > minvalid:
132 119
133 120 if method == 0:
134 121 #SMOOTH
135 122 w = value_temp[h] - self.Smooth(input=value_temp[h], width=npoints, edge_truncate=1)
136 123
137 124
138 125 if method == 1:
139 126 #MEDIAN
140 127 w = value_temp[h] - self.Median(input=value_temp[h], width = npoints)
141 128
142 129 dw = numpy.std(w[numpy.where(numpy.isfinite(w))],ddof = 1)
143 130
144 131 threshold = dw*factor
145 132 value_temp[numpy.where(w > threshold),h] = numpy.nan
146 133 value_temp[numpy.where(w < -1*threshold),h] = numpy.nan
147 134
148 135
149 136 #At the end
150 137 if svalue2 == 'inHeight':
151 138 value_temp = numpy.transpose(value_temp)
152 139 output_array[:,m] = value_temp
153 140
154 141 if svalue == 'zonal':
155 142 self.dataOut.data_output[0] = output_array
156 143
157 144 elif svalue == 'meridional':
158 145 self.dataOut.data_output[1] = output_array
159 146
160 147 elif svalue == 'vertical':
161 148 self.dataOut.data_output[2] = output_array
162 149
163 150 return self.dataOut.data_output
164 151
165 152
166 153 def Median(self,input,width):
167 154 '''
168 155 Inputs:
169 156 input - Velocity array
170 157 width - Number of points for mask filter
171 158
172 159 '''
173 160
174 161 if numpy.mod(width,2) == 1:
175 162 pc = int((width - 1) / 2)
176 163 cont = 0
177 164 output = []
178 165
179 166 for i in range(len(input)):
180 167 if i >= pc and i < len(input) - pc:
181 168 new2 = input[i-pc:i+pc+1]
182 169 temp = numpy.where(numpy.isfinite(new2))
183 170 new = new2[temp]
184 171 value = numpy.median(new)
185 172 output.append(value)
186 173
187 174 output = numpy.array(output)
188 175 output = numpy.hstack((input[0:pc],output))
189 176 output = numpy.hstack((output,input[-pc:len(input)]))
190 177
191 178 return output
192 179
193 180 def Smooth(self,input,width,edge_truncate = None):
194 181 '''
195 182 Inputs:
196 183 input - Velocity array
197 184 width - Number of points for mask filter
198 185 edge_truncate - 1 for truncate the convolution product else
199 186
200 187 '''
201 188
202 189 if numpy.mod(width,2) == 0:
203 190 real_width = width + 1
204 191 nzeros = width / 2
205 192 else:
206 193 real_width = width
207 194 nzeros = (width - 1) / 2
208 195
209 196 half_width = int(real_width)/2
210 197 length = len(input)
211 198
212 199 gate = numpy.ones(real_width,dtype='float')
213 200 norm_of_gate = numpy.sum(gate)
214 201
215 202 nan_process = 0
216 203 nan_id = numpy.where(numpy.isnan(input))
217 204 if len(nan_id[0]) > 0:
218 205 nan_process = 1
219 206 pb = numpy.zeros(len(input))
220 207 pb[nan_id] = 1.
221 208 input[nan_id] = 0.
222 209
223 210 if edge_truncate == True:
224 211 output = numpy.convolve(input/norm_of_gate,gate,mode='same')
225 212 elif edge_truncate == False or edge_truncate == None:
226 213 output = numpy.convolve(input/norm_of_gate,gate,mode='valid')
227 214 output = numpy.hstack((input[0:half_width],output))
228 215 output = numpy.hstack((output,input[len(input)-half_width:len(input)]))
229 216
230 217 if nan_process:
231 218 pb = numpy.convolve(pb/norm_of_gate,gate,mode='valid')
232 219 pb = numpy.hstack((numpy.zeros(half_width),pb))
233 220 pb = numpy.hstack((pb,numpy.zeros(half_width)))
234 221 output[numpy.where(pb > 0.9999)] = numpy.nan
235 222 input[nan_id] = numpy.nan
236 223 return output
237 224
238 225 def Average(self,aver=0,nhaver=1):
239 226 '''
240 227 Inputs:
241 228 aver - Indicates the time period over which is averaged or consensus data
242 229 nhaver - Indicates the decimation factor in heights
243 230
244 231 '''
245 232 nhpoints = 48
246 233
247 234 lat_piura = -5.17
248 235 lat_huancayo = -12.04
249 236 lat_porcuya = -5.8
250 237
251 238 if '%2.2f'%self.dataOut.lat == '%2.2f'%lat_piura:
252 239 hcm = 3.
253 240 if self.dataOut.year == 2003 :
254 241 if self.dataOut.doy >= 25 and self.dataOut.doy < 64:
255 242 nhpoints = 12
256 243
257 244 elif '%2.2f'%self.dataOut.lat == '%2.2f'%lat_huancayo:
258 245 hcm = 3.
259 246 if self.dataOut.year == 2003 :
260 247 if self.dataOut.doy >= 25 and self.dataOut.doy < 64:
261 248 nhpoints = 12
262 249
263 250
264 251 elif '%2.2f'%self.dataOut.lat == '%2.2f'%lat_porcuya:
265 252 hcm = 5.#2
266 253
267 254 pdata = 0.2
268 255 taver = [1,2,3,4,6,8,12,24]
269 256 t0 = 0
270 257 tf = 24
271 258 ntime =(tf-t0)/taver[aver]
272 259 ti = numpy.arange(ntime)
273 260 tf = numpy.arange(ntime) + taver[aver]
274 261
275 262
276 263 old_height = self.dataOut.heightList
277 264
278 265 if nhaver > 1:
279 266 num_hei = len(self.dataOut.heightList)/nhaver/self.dataOut.nmodes
280 267 deltha = 0.05*nhaver
281 268 minhvalid = pdata*nhaver
282 269 for im in range(self.dataOut.nmodes):
283 270 new_height = numpy.arange(num_hei)*deltha + self.dataOut.height[im,0] + deltha/2.
284 271
285 272
286 273 data_fHeigths_List = []
287 274 data_fZonal_List = []
288 275 data_fMeridional_List = []
289 276 data_fVertical_List = []
290 277 startDTList = []
291 278
292 279
293 280 for i in range(ntime):
294 281 height = old_height
295 282
296 283 start = datetime.datetime(self.dataOut.year,self.dataOut.month,self.dataOut.day) + datetime.timedelta(hours = int(ti[i])) - datetime.timedelta(hours = 5)
297 284 stop = datetime.datetime(self.dataOut.year,self.dataOut.month,self.dataOut.day) + datetime.timedelta(hours = int(tf[i])) - datetime.timedelta(hours = 5)
298 285
299 286
300 287 limit_sec1 = time.mktime(start.timetuple())
301 288 limit_sec2 = time.mktime(stop.timetuple())
302 289
303 290 t1 = numpy.where(self.f_timesec >= limit_sec1)
304 291 t2 = numpy.where(self.f_timesec < limit_sec2)
305 292 time_select = []
306 293 for val_sec in t1[0]:
307 294 if val_sec in t2[0]:
308 295 time_select.append(val_sec)
309 296
310 297
311 298 time_select = numpy.array(time_select,dtype = 'int')
312 299 minvalid = numpy.ceil(pdata*nhpoints)
313 300
314 301 zon_aver = numpy.zeros([self.dataOut.nranges,self.dataOut.nmodes],dtype='f4') + numpy.nan
315 302 mer_aver = numpy.zeros([self.dataOut.nranges,self.dataOut.nmodes],dtype='f4') + numpy.nan
316 303 ver_aver = numpy.zeros([self.dataOut.nranges,self.dataOut.nmodes],dtype='f4') + numpy.nan
317 304
318 305 if nhaver > 1:
319 306 new_zon_aver = numpy.zeros([num_hei,self.dataOut.nmodes],dtype='f4') + numpy.nan
320 307 new_mer_aver = numpy.zeros([num_hei,self.dataOut.nmodes],dtype='f4') + numpy.nan
321 308 new_ver_aver = numpy.zeros([num_hei,self.dataOut.nmodes],dtype='f4') + numpy.nan
322 309
323 310 if len(time_select) > minvalid:
324 311 time_average = self.f_timesec[time_select]
325 312
326 313 for im in range(self.dataOut.nmodes):
327 314
328 315 for ih in range(self.dataOut.nranges):
329 316 if numpy.sum(numpy.isfinite(self.f_zon[time_select,ih,im])) >= minvalid:
330 317 zon_aver[ih,im] = numpy.nansum(self.f_zon[time_select,ih,im]) / numpy.sum(numpy.isfinite(self.f_zon[time_select,ih,im]))
331 318
332 319 if numpy.sum(numpy.isfinite(self.f_mer[time_select,ih,im])) >= minvalid:
333 320 mer_aver[ih,im] = numpy.nansum(self.f_mer[time_select,ih,im]) / numpy.sum(numpy.isfinite(self.f_mer[time_select,ih,im]))
334 321
335 322 if numpy.sum(numpy.isfinite(self.f_ver[time_select,ih,im])) >= minvalid:
336 323 ver_aver[ih,im] = numpy.nansum(self.f_ver[time_select,ih,im]) / numpy.sum(numpy.isfinite(self.f_ver[time_select,ih,im]))
337 324
338 325 if nhaver > 1:
339 326 for ih in range(num_hei):
340 327 hvalid = numpy.arange(nhaver) + nhaver*ih
341 328
342 329 if numpy.sum(numpy.isfinite(zon_aver[hvalid,im])) >= minvalid:
343 330 new_zon_aver[ih,im] = numpy.nansum(zon_aver[hvalid,im]) / numpy.sum(numpy.isfinite(zon_aver[hvalid,im]))
344 331
345 332 if numpy.sum(numpy.isfinite(mer_aver[hvalid,im])) >= minvalid:
346 333 new_mer_aver[ih,im] = numpy.nansum(mer_aver[hvalid,im]) / numpy.sum(numpy.isfinite(mer_aver[hvalid,im]))
347 334
348 335 if numpy.sum(numpy.isfinite(ver_aver[hvalid,im])) >= minvalid:
349 336 new_ver_aver[ih,im] = numpy.nansum(ver_aver[hvalid,im]) / numpy.sum(numpy.isfinite(ver_aver[hvalid,im]))
350 337 if nhaver > 1:
351 338 zon_aver = new_zon_aver
352 339 mer_aver = new_mer_aver
353 340 ver_aver = new_ver_aver
354 341 height = new_height
355 342
356 343
357 344 tstart = time_average[0]
358 345 tend = time_average[-1]
359 346 startTime = time.gmtime(tstart)
360 347
361 348 year = startTime.tm_year
362 349 month = startTime.tm_mon
363 350 day = startTime.tm_mday
364 351 hour = startTime.tm_hour
365 352 minute = startTime.tm_min
366 353 second = startTime.tm_sec
367 354
368 355 startDTList.append(datetime.datetime(year,month,day,hour,minute,second))
369 356
370 357
371 358 o_height = numpy.array([])
372 359 o_zon_aver = numpy.array([])
373 360 o_mer_aver = numpy.array([])
374 361 o_ver_aver = numpy.array([])
375 362 if self.dataOut.nmodes > 1:
376 363 for im in range(self.dataOut.nmodes):
377 364
378 365 if im == 0:
379 366 h_select = numpy.where(numpy.bitwise_and(height[0,:] >=0,height[0,:] <= hcm,numpy.isfinite(height[0,:])))
380 367 else:
381 368 h_select = numpy.where(numpy.bitwise_and(height[1,:] > hcm,height[1,:] < 20,numpy.isfinite(height[1,:])))
382 369
383 370
384 371 ht = h_select[0]
385 372
386 373 o_height = numpy.hstack((o_height,height[im,ht]))
387 374 o_zon_aver = numpy.hstack((o_zon_aver,zon_aver[ht,im]))
388 375 o_mer_aver = numpy.hstack((o_mer_aver,mer_aver[ht,im]))
389 376 o_ver_aver = numpy.hstack((o_ver_aver,ver_aver[ht,im]))
390 377
391 378 data_fHeigths_List.append(o_height)
392 379 data_fZonal_List.append(o_zon_aver)
393 380 data_fMeridional_List.append(o_mer_aver)
394 381 data_fVertical_List.append(o_ver_aver)
395 382
396 383
397 384 else:
398 385 h_select = numpy.where(numpy.bitwise_and(height[0,:] <= hcm,numpy.isfinite(height[0,:])))
399 386 ht = h_select[0]
400 387 o_height = numpy.hstack((o_height,height[im,ht]))
401 388 o_zon_aver = numpy.hstack((o_zon_aver,zon_aver[ht,im]))
402 389 o_mer_aver = numpy.hstack((o_mer_aver,mer_aver[ht,im]))
403 390 o_ver_aver = numpy.hstack((o_ver_aver,ver_aver[ht,im]))
404 391
405 392 data_fHeigths_List.append(o_height)
406 393 data_fZonal_List.append(o_zon_aver)
407 394 data_fMeridional_List.append(o_mer_aver)
408 395 data_fVertical_List.append(o_ver_aver)
409 396
410 397
411 398 return startDTList, data_fHeigths_List, data_fZonal_List, data_fMeridional_List, data_fVertical_List
412 399
413 400
General Comments 0
You need to be logged in to leave comments. Login now