##// END OF EJS Templates
PulsePairByblock
rflores -
r1438:7e8668e1fc32
parent child
Show More

The requested changes are too big and content was truncated. Show full diff

This diff has been collapsed as it changes many lines, (789 lines changed) Show them Hide them
@@ -1,1082 +1,1865
1 1 import os
2 2 import datetime
3 3 import numpy
4 4
5 5 from schainpy.model.graphics.jroplot_base import Plot, plt
6 6 from schainpy.model.graphics.jroplot_spectra import SpectraPlot, RTIPlot, CoherencePlot, SpectraCutPlot
7 7 from schainpy.utils import log
8 8 # libreria wradlib
9 9 import wradlib as wrl
10 10
11 11 EARTH_RADIUS = 6.3710e3
12 12
13 13
14 14 def ll2xy(lat1, lon1, lat2, lon2):
15 15
16 16 p = 0.017453292519943295
17 17 a = 0.5 - numpy.cos((lat2 - lat1) * p)/2 + numpy.cos(lat1 * p) * \
18 18 numpy.cos(lat2 * p) * (1 - numpy.cos((lon2 - lon1) * p)) / 2
19 19 r = 12742 * numpy.arcsin(numpy.sqrt(a))
20 20 theta = numpy.arctan2(numpy.sin((lon2-lon1)*p)*numpy.cos(lat2*p), numpy.cos(lat1*p)
21 21 * numpy.sin(lat2*p)-numpy.sin(lat1*p)*numpy.cos(lat2*p)*numpy.cos((lon2-lon1)*p))
22 22 theta = -theta + numpy.pi/2
23 23 return r*numpy.cos(theta), r*numpy.sin(theta)
24 24
25 25
26 26 def km2deg(km):
27 27 '''
28 28 Convert distance in km to degrees
29 29 '''
30 30
31 31 return numpy.rad2deg(km/EARTH_RADIUS)
32 32
33 33
34 34
35 35 class SpectralMomentsPlot(SpectraPlot):
36 36 '''
37 37 Plot for Spectral Moments
38 38 '''
39 39 CODE = 'spc_moments'
40 40 # colormap = 'jet'
41 41 # plot_type = 'pcolor'
42 42
43 43 class DobleGaussianPlot(SpectraPlot):
44 44 '''
45 45 Plot for Double Gaussian Plot
46 46 '''
47 47 CODE = 'gaussian_fit'
48 48 # colormap = 'jet'
49 49 # plot_type = 'pcolor'
50 50
51 51 class DoubleGaussianSpectraCutPlot(SpectraCutPlot):
52 52 '''
53 53 Plot SpectraCut with Double Gaussian Fit
54 54 '''
55 55 CODE = 'cut_gaussian_fit'
56 56
57 57 class SnrPlot(RTIPlot):
58 58 '''
59 59 Plot for SNR Data
60 60 '''
61 61
62 62 CODE = 'snr'
63 63 colormap = 'jet'
64 64
65 65 def update(self, dataOut):
66 66
67 67 data = {
68 68 'snr': 10*numpy.log10(dataOut.data_snr)
69 69 }
70 70
71 71 return data, {}
72 72
73 73 class DopplerPlot(RTIPlot):
74 74 '''
75 75 Plot for DOPPLER Data (1st moment)
76 76 '''
77 77
78 78 CODE = 'dop'
79 79 colormap = 'jet'
80 80
81 81 def update(self, dataOut):
82 82
83 83 data = {
84 84 'dop': 10*numpy.log10(dataOut.data_dop)
85 85 }
86 86
87 87 return data, {}
88 88
89 89 class PowerPlot(RTIPlot):
90 90 '''
91 91 Plot for Power Data (0 moment)
92 92 '''
93 93
94 94 CODE = 'pow'
95 95 colormap = 'jet'
96 96
97 97 def update(self, dataOut):
98 98 data = {
99 99 'pow': 10*numpy.log10(dataOut.data_pow/dataOut.normFactor)
100 100 }
101 101 return data, {}
102 102
103 103 class SpectralWidthPlot(RTIPlot):
104 104 '''
105 105 Plot for Spectral Width Data (2nd moment)
106 106 '''
107 107
108 108 CODE = 'width'
109 109 colormap = 'jet'
110 110
111 111 def update(self, dataOut):
112 112
113 113 data = {
114 114 'width': dataOut.data_width
115 115 }
116 116
117 117 return data, {}
118 118
119 119 class SkyMapPlot(Plot):
120 120 '''
121 121 Plot for meteors detection data
122 122 '''
123 123
124 124 CODE = 'param'
125 125
126 126 def setup(self):
127 127
128 128 self.ncols = 1
129 129 self.nrows = 1
130 130 self.width = 7.2
131 131 self.height = 7.2
132 132 self.nplots = 1
133 133 self.xlabel = 'Zonal Zenith Angle (deg)'
134 134 self.ylabel = 'Meridional Zenith Angle (deg)'
135 135 self.polar = True
136 136 self.ymin = -180
137 137 self.ymax = 180
138 138 self.colorbar = False
139 139
140 140 def plot(self):
141 141
142 142 arrayParameters = numpy.concatenate(self.data['param'])
143 143 error = arrayParameters[:, -1]
144 144 indValid = numpy.where(error == 0)[0]
145 145 finalMeteor = arrayParameters[indValid, :]
146 146 finalAzimuth = finalMeteor[:, 3]
147 147 finalZenith = finalMeteor[:, 4]
148 148
149 149 x = finalAzimuth * numpy.pi / 180
150 150 y = finalZenith
151 151
152 152 ax = self.axes[0]
153 153
154 154 if ax.firsttime:
155 155 ax.plot = ax.plot(x, y, 'bo', markersize=5)[0]
156 156 else:
157 157 ax.plot.set_data(x, y)
158 158
159 159 dt1 = self.getDateTime(self.data.min_time).strftime('%y/%m/%d %H:%M:%S')
160 160 dt2 = self.getDateTime(self.data.max_time).strftime('%y/%m/%d %H:%M:%S')
161 161 title = 'Meteor Detection Sky Map\n %s - %s \n Number of events: %5.0f\n' % (dt1,
162 162 dt2,
163 163 len(x))
164 164 self.titles[0] = title
165 165
166 166
167 167 class GenericRTIPlot(Plot):
168 168 '''
169 169 Plot for data_xxxx object
170 170 '''
171 171
172 172 CODE = 'param'
173 173 colormap = 'viridis'
174 174 plot_type = 'pcolorbuffer'
175 175
176 176 def setup(self):
177 177 self.xaxis = 'time'
178 178 self.ncols = 1
179 179 self.nrows = self.data.shape('param')[0]
180 180 self.nplots = self.nrows
181 181 self.plots_adjust.update({'hspace':0.8, 'left': 0.1, 'bottom': 0.08, 'right':0.95, 'top': 0.95})
182 182
183 183 if not self.xlabel:
184 184 self.xlabel = 'Time'
185 185
186 186 self.ylabel = 'Range [km]'
187 187 if not self.titles:
188 188 self.titles = ['Param {}'.format(x) for x in range(self.nrows)]
189 189
190 190 def update(self, dataOut):
191 191
192 192 data = {
193 193 'param' : numpy.concatenate([getattr(dataOut, attr) for attr in self.attr_data], axis=0)
194 194 }
195 195
196 196 meta = {}
197 197
198 198 return data, meta
199 199
200 200 def plot(self):
201 201 # self.data.normalize_heights()
202 202 self.x = self.data.times
203 203 self.y = self.data.yrange
204 204 self.z = self.data['param']
205 205 self.z = 10*numpy.log10(self.z)
206 206 self.z = numpy.ma.masked_invalid(self.z)
207 207
208 208 if self.decimation is None:
209 209 x, y, z = self.fill_gaps(self.x, self.y, self.z)
210 210 else:
211 211 x, y, z = self.fill_gaps(*self.decimate())
212 212
213 213 for n, ax in enumerate(self.axes):
214 214
215 215 self.zmax = self.zmax if self.zmax is not None else numpy.max(
216 216 self.z[n])
217 217 self.zmin = self.zmin if self.zmin is not None else numpy.min(
218 218 self.z[n])
219 219
220 220 if ax.firsttime:
221 221 if self.zlimits is not None:
222 222 self.zmin, self.zmax = self.zlimits[n]
223 223
224 224 ax.plt = ax.pcolormesh(x, y, z[n].T * self.factors[n],
225 225 vmin=self.zmin,
226 226 vmax=self.zmax,
227 227 cmap=self.cmaps[n]
228 228 )
229 229 else:
230 230 if self.zlimits is not None:
231 231 self.zmin, self.zmax = self.zlimits[n]
232 232 ax.collections.remove(ax.collections[0])
233 233 ax.plt = ax.pcolormesh(x, y, z[n].T * self.factors[n],
234 234 vmin=self.zmin,
235 235 vmax=self.zmax,
236 236 cmap=self.cmaps[n]
237 237 )
238 238
239 239
240 240 class PolarMapPlot(Plot):
241 241 '''
242 242 Plot for weather radar
243 243 '''
244 244
245 245 CODE = 'param'
246 246 colormap = 'seismic'
247 247
248 248 def setup(self):
249 249 self.ncols = 1
250 250 self.nrows = 1
251 251 self.width = 9
252 252 self.height = 8
253 253 self.mode = self.data.meta['mode']
254 254 if self.channels is not None:
255 255 self.nplots = len(self.channels)
256 256 self.nrows = len(self.channels)
257 257 else:
258 258 self.nplots = self.data.shape(self.CODE)[0]
259 259 self.nrows = self.nplots
260 260 self.channels = list(range(self.nplots))
261 261 if self.mode == 'E':
262 262 self.xlabel = 'Longitude'
263 263 self.ylabel = 'Latitude'
264 264 else:
265 265 self.xlabel = 'Range (km)'
266 266 self.ylabel = 'Height (km)'
267 267 self.bgcolor = 'white'
268 268 self.cb_labels = self.data.meta['units']
269 269 self.lat = self.data.meta['latitude']
270 270 self.lon = self.data.meta['longitude']
271 271 self.xmin, self.xmax = float(
272 272 km2deg(self.xmin) + self.lon), float(km2deg(self.xmax) + self.lon)
273 273 self.ymin, self.ymax = float(
274 274 km2deg(self.ymin) + self.lat), float(km2deg(self.ymax) + self.lat)
275 275 # self.polar = True
276 276
277 277 def plot(self):
278 278
279 279 for n, ax in enumerate(self.axes):
280 280 data = self.data['param'][self.channels[n]]
281 281
282 282 zeniths = numpy.linspace(
283 283 0, self.data.meta['max_range'], data.shape[1])
284 284 if self.mode == 'E':
285 285 azimuths = -numpy.radians(self.data.yrange)+numpy.pi/2
286 286 r, theta = numpy.meshgrid(zeniths, azimuths)
287 287 x, y = r*numpy.cos(theta)*numpy.cos(numpy.radians(self.data.meta['elevation'])), r*numpy.sin(
288 288 theta)*numpy.cos(numpy.radians(self.data.meta['elevation']))
289 289 x = km2deg(x) + self.lon
290 290 y = km2deg(y) + self.lat
291 291 else:
292 292 azimuths = numpy.radians(self.data.yrange)
293 293 r, theta = numpy.meshgrid(zeniths, azimuths)
294 294 x, y = r*numpy.cos(theta), r*numpy.sin(theta)
295 295 self.y = zeniths
296 296
297 297 if ax.firsttime:
298 298 if self.zlimits is not None:
299 299 self.zmin, self.zmax = self.zlimits[n]
300 300 ax.plt = ax.pcolormesh( # r, theta, numpy.ma.array(data, mask=numpy.isnan(data)),
301 301 x, y, numpy.ma.array(data, mask=numpy.isnan(data)),
302 302 vmin=self.zmin,
303 303 vmax=self.zmax,
304 304 cmap=self.cmaps[n])
305 305 else:
306 306 if self.zlimits is not None:
307 307 self.zmin, self.zmax = self.zlimits[n]
308 308 ax.collections.remove(ax.collections[0])
309 309 ax.plt = ax.pcolormesh( # r, theta, numpy.ma.array(data, mask=numpy.isnan(data)),
310 310 x, y, numpy.ma.array(data, mask=numpy.isnan(data)),
311 311 vmin=self.zmin,
312 312 vmax=self.zmax,
313 313 cmap=self.cmaps[n])
314 314
315 315 if self.mode == 'A':
316 316 continue
317 317
318 318 # plot district names
319 319 f = open('/data/workspace/schain_scripts/distrito.csv')
320 320 for line in f:
321 321 label, lon, lat = [s.strip() for s in line.split(',') if s]
322 322 lat = float(lat)
323 323 lon = float(lon)
324 324 # ax.plot(lon, lat, '.b', ms=2)
325 325 ax.text(lon, lat, label.decode('utf8'), ha='center',
326 326 va='bottom', size='8', color='black')
327 327
328 328 # plot limites
329 329 limites = []
330 330 tmp = []
331 331 for line in open('/data/workspace/schain_scripts/lima.csv'):
332 332 if '#' in line:
333 333 if tmp:
334 334 limites.append(tmp)
335 335 tmp = []
336 336 continue
337 337 values = line.strip().split(',')
338 338 tmp.append((float(values[0]), float(values[1])))
339 339 for points in limites:
340 340 ax.add_patch(
341 341 Polygon(points, ec='k', fc='none', ls='--', lw=0.5))
342 342
343 343 # plot Cuencas
344 344 for cuenca in ('rimac', 'lurin', 'mala', 'chillon', 'chilca', 'chancay-huaral'):
345 345 f = open('/data/workspace/schain_scripts/{}.csv'.format(cuenca))
346 346 values = [line.strip().split(',') for line in f]
347 347 points = [(float(s[0]), float(s[1])) for s in values]
348 348 ax.add_patch(Polygon(points, ec='b', fc='none'))
349 349
350 350 # plot grid
351 351 for r in (15, 30, 45, 60):
352 352 ax.add_artist(plt.Circle((self.lon, self.lat),
353 353 km2deg(r), color='0.6', fill=False, lw=0.2))
354 354 ax.text(
355 355 self.lon + (km2deg(r))*numpy.cos(60*numpy.pi/180),
356 356 self.lat + (km2deg(r))*numpy.sin(60*numpy.pi/180),
357 357 '{}km'.format(r),
358 358 ha='center', va='bottom', size='8', color='0.6', weight='heavy')
359 359
360 360 if self.mode == 'E':
361 361 title = 'El={}$^\circ$'.format(self.data.meta['elevation'])
362 362 label = 'E{:02d}'.format(int(self.data.meta['elevation']))
363 363 else:
364 364 title = 'Az={}$^\circ$'.format(self.data.meta['azimuth'])
365 365 label = 'A{:02d}'.format(int(self.data.meta['azimuth']))
366 366
367 367 self.save_labels = ['{}-{}'.format(lbl, label) for lbl in self.labels]
368 368 self.titles = ['{} {}'.format(
369 369 self.data.parameters[x], title) for x in self.channels]
370 370
371 371 class WeatherPlot(Plot):
372 372 CODE = 'weather'
373 373 plot_name = 'weather'
374 374 plot_type = 'ppistyle'
375 375 buffering = False
376 376
377 377 def setup(self):
378 378 self.ncols = 1
379 379 self.nrows = 1
380 380 self.width =8
381 381 self.height =8
382 382 self.nplots= 1
383 383 self.ylabel= 'Range [Km]'
384 384 self.titles= ['Weather']
385 385 self.colorbar=False
386 386 self.ini =0
387 387 self.len_azi =0
388 388 self.buffer_ini = None
389 389 self.buffer_azi = None
390 390 self.plots_adjust.update({'wspace': 0.4, 'hspace':0.4, 'left': 0.1, 'right': 0.9, 'bottom': 0.08})
391 391 self.flag =0
392 392 self.indicador= 0
393 393 self.last_data_azi = None
394 394 self.val_mean = None
395 395
396 396 def update(self, dataOut):
397 397
398 398 data = {}
399 399 meta = {}
400 400 if hasattr(dataOut, 'dataPP_POWER'):
401 401 factor = 1
402 402 if hasattr(dataOut, 'nFFTPoints'):
403 403 factor = dataOut.normFactor
404 404 #print("DIME EL SHAPE PORFAVOR",dataOut.data_360.shape)
405 405 data['weather'] = 10*numpy.log10(dataOut.data_360[1]/(factor))
406 406 data['azi'] = dataOut.data_azi
407 407 data['ele'] = dataOut.data_ele
408 408 return data, meta
409 409
410 410 def get2List(self,angulos):
411 411 list1=[]
412 412 list2=[]
413 413 for i in reversed(range(len(angulos))):
414 414 diff_ = angulos[i]-angulos[i-1]
415 415 if diff_ >1.5:
416 416 list1.append(i-1)
417 417 list2.append(diff_)
418 418 return list(reversed(list1)),list(reversed(list2))
419 419
420 420 def fixData360(self,list_,ang_):
421 421 if list_[0]==-1:
422 422 vec = numpy.where(ang_<ang_[0])
423 423 ang_[vec] = ang_[vec]+360
424 424 return ang_
425 425 return ang_
426 426
427 427 def fixData360HL(self,angulos):
428 428 vec = numpy.where(angulos>=360)
429 429 angulos[vec]=angulos[vec]-360
430 430 return angulos
431 431
432 432 def search_pos(self,pos,list_):
433 433 for i in range(len(list_)):
434 434 if pos == list_[i]:
435 435 return True,i
436 436 i=None
437 437 return False,i
438 438
439 439 def fixDataComp(self,ang_,list1_,list2_):
440 440 size = len(ang_)
441 441 size2 = 0
442 442 for i in range(len(list2_)):
443 443 size2=size2+round(list2_[i])-1
444 444 new_size= size+size2
445 445 ang_new = numpy.zeros(new_size)
446 446 ang_new2 = numpy.zeros(new_size)
447 447
448 448 tmp = 0
449 449 c = 0
450 450 for i in range(len(ang_)):
451 451 ang_new[tmp +c] = ang_[i]
452 452 ang_new2[tmp+c] = ang_[i]
453 453 condition , value = self.search_pos(i,list1_)
454 454 if condition:
455 455 pos = tmp + c + 1
456 456 for k in range(round(list2_[value])-1):
457 457 ang_new[pos+k] = ang_new[pos+k-1]+1
458 458 ang_new2[pos+k] = numpy.nan
459 459 tmp = pos +k
460 460 c = 0
461 461 c=c+1
462 462 return ang_new,ang_new2
463 463
464 464 def globalCheckPED(self,angulos):
465 465 l1,l2 = self.get2List(angulos)
466 466 if len(l1)>0:
467 467 angulos2 = self.fixData360(list_=l1,ang_=angulos)
468 468 l1,l2 = self.get2List(angulos2)
469 469
470 470 ang1_,ang2_ = self.fixDataComp(ang_=angulos2,list1_=l1,list2_=l2)
471 471 ang1_ = self.fixData360HL(ang1_)
472 472 ang2_ = self.fixData360HL(ang2_)
473 473 else:
474 474 ang1_= angulos
475 475 ang2_= angulos
476 476 return ang1_,ang2_
477 477
478 478 def analizeDATA(self,data_azi):
479 479 list1 = []
480 480 list2 = []
481 481 dat = data_azi
482 482 for i in reversed(range(1,len(dat))):
483 483 if dat[i]>dat[i-1]:
484 484 diff = int(dat[i])-int(dat[i-1])
485 485 else:
486 486 diff = 360+int(dat[i])-int(dat[i-1])
487 487 if diff > 1:
488 488 list1.append(i-1)
489 489 list2.append(diff-1)
490 490 return list1,list2
491 491
492 492 def fixDATANEW(self,data_azi,data_weather):
493 493 list1,list2 = self.analizeDATA(data_azi)
494 494 if len(list1)== 0:
495 495 return data_azi,data_weather
496 496 else:
497 497 resize = 0
498 498 for i in range(len(list2)):
499 499 resize= resize + list2[i]
500 500 new_data_azi = numpy.resize(data_azi,resize)
501 501 new_data_weather= numpy.resize(date_weather,resize)
502 502
503 503 for i in range(len(list2)):
504 504 j=0
505 505 position=list1[i]+1
506 506 for j in range(list2[i]):
507 507 new_data_azi[position+j]=new_data_azi[position+j-1]+1
508 508 return new_data_azi
509 509
510 510 def fixDATA(self,data_azi):
511 511 data=data_azi
512 512 for i in range(len(data)):
513 513 if numpy.isnan(data[i]):
514 514 data[i]=data[i-1]+1
515 515 return data
516 516
517 517 def replaceNAN(self,data_weather,data_azi,val):
518 518 data= data_azi
519 519 data_T= data_weather
520 520 if data.shape[0]> data_T.shape[0]:
521 521 data_N = numpy.ones( [data.shape[0],data_T.shape[1]])
522 522 c = 0
523 523 for i in range(len(data)):
524 524 if numpy.isnan(data[i]):
525 525 data_N[i,:]=numpy.ones(data_T.shape[1])*numpy.nan
526 526 else:
527 527 data_N[i,:]=data_T[c,:]
528 528 c=c+1
529 529 return data_N
530 530 else:
531 531 for i in range(len(data)):
532 532 if numpy.isnan(data[i]):
533 533 data_T[i,:]=numpy.ones(data_T.shape[1])*numpy.nan
534 534 return data_T
535 535
536 536 def const_ploteo(self,data_weather,data_azi,step,res):
537 537 if self.ini==0:
538 538 #-------
539 539 n = (360/res)-len(data_azi)
540 540 #--------------------- new -------------------------
541 541 data_azi_new ,data_azi_old= self.globalCheckPED(data_azi)
542 542 #------------------------
543 543 start = data_azi_new[-1] + res
544 544 end = data_azi_new[0] - res
545 545 #------ new
546 546 self.last_data_azi = end
547 547 if start>end:
548 548 end = end + 360
549 549 azi_vacia = numpy.linspace(start,end,int(n))
550 550 azi_vacia = numpy.where(azi_vacia>360,azi_vacia-360,azi_vacia)
551 551 data_azi = numpy.hstack((data_azi_new,azi_vacia))
552 552 # RADAR
553 553 val_mean = numpy.mean(data_weather[:,-1])
554 554 self.val_mean = val_mean
555 555 data_weather_cmp = numpy.ones([(360-data_weather.shape[0]),data_weather.shape[1]])*val_mean
556 556 data_weather = self.replaceNAN(data_weather=data_weather,data_azi=data_azi_old,val=self.val_mean)
557 557 data_weather = numpy.vstack((data_weather,data_weather_cmp))
558 558 else:
559 559 # azimuth
560 560 flag=0
561 561 start_azi = self.res_azi[0]
562 562 #-----------new------------
563 563 data_azi ,data_azi_old= self.globalCheckPED(data_azi)
564 564 data_weather = self.replaceNAN(data_weather=data_weather,data_azi=data_azi_old,val=self.val_mean)
565 565 #--------------------------
566 566 start = data_azi[0]
567 567 end = data_azi[-1]
568 568 self.last_data_azi= end
569 569 if start< start_azi:
570 570 start = start +360
571 571 if end <start_azi:
572 572 end = end +360
573 573
574 574 pos_ini = int((start-start_azi)/res)
575 575 len_azi = len(data_azi)
576 576 if (360-pos_ini)<len_azi:
577 577 if pos_ini+1==360:
578 578 pos_ini=0
579 579 else:
580 580 flag=1
581 581 dif= 360-pos_ini
582 582 comp= len_azi-dif
583 583 #-----------------
584 584 if flag==0:
585 585 # AZIMUTH
586 586 self.res_azi[pos_ini:pos_ini+len_azi] = data_azi
587 587 # RADAR
588 588 self.res_weather[pos_ini:pos_ini+len_azi,:] = data_weather
589 589 else:
590 590 # AZIMUTH
591 591 self.res_azi[pos_ini:pos_ini+dif] = data_azi[0:dif]
592 592 self.res_azi[0:comp] = data_azi[dif:]
593 593 # RADAR
594 594 self.res_weather[pos_ini:pos_ini+dif,:] = data_weather[0:dif,:]
595 595 self.res_weather[0:comp,:] = data_weather[dif:,:]
596 596 flag=0
597 597 data_azi = self.res_azi
598 598 data_weather = self.res_weather
599 599
600 600 return data_weather,data_azi
601 601
602 602 def plot(self):
603 603 thisDatetime = datetime.datetime.utcfromtimestamp(self.data.times[-1]).strftime('%Y-%m-%d %H:%M:%S')
604 604 data = self.data[-1]
605 605 r = self.data.yrange
606 606 delta_height = r[1]-r[0]
607 607 r_mask = numpy.where(r>=0)[0]
608 608 r = numpy.arange(len(r_mask))*delta_height
609 609 self.y = 2*r
610 610 # RADAR
611 611 #data_weather = data['weather']
612 612 # PEDESTAL
613 613 #data_azi = data['azi']
614 614 res = 1
615 615 # STEP
616 616 step = (360/(res*data['weather'].shape[0]))
617 617
618 618 self.res_weather, self.res_azi = self.const_ploteo(data_weather=data['weather'][:,r_mask],data_azi=data['azi'],step=step,res=res)
619 619 self.res_ele = numpy.mean(data['ele'])
620 620 ################# PLOTEO ###################
621 621 for i,ax in enumerate(self.axes):
622 622 if ax.firsttime:
623 623 plt.clf()
624 624 cgax, pm = wrl.vis.plot_ppi(self.res_weather,r=r,az=self.res_azi,fig=self.figures[0], proj='cg', vmin=20, vmax=80)
625 625 else:
626 626 plt.clf()
627 627 cgax, pm = wrl.vis.plot_ppi(self.res_weather,r=r,az=self.res_azi,fig=self.figures[0], proj='cg', vmin=20, vmax=80)
628 628 caax = cgax.parasites[0]
629 629 paax = cgax.parasites[1]
630 630 cbar = plt.gcf().colorbar(pm, pad=0.075)
631 631 caax.set_xlabel('x_range [km]')
632 632 caax.set_ylabel('y_range [km]')
633 633 plt.text(1.0, 1.05, 'Azimuth '+str(thisDatetime)+" Step "+str(self.ini)+ " Elev: "+str(round(self.res_ele,2)), transform=caax.transAxes, va='bottom',ha='right')
634 634
635 635 self.ini= self.ini+1
636 636
637 637
638 638 class WeatherRHIPlot(Plot):
639 639 CODE = 'weather'
640 640 plot_name = 'weather'
641 641 plot_type = 'rhistyle'
642 642 buffering = False
643 643 data_ele_tmp = None
644 644
645 645 def setup(self):
646 646 print("********************")
647 647 print("********************")
648 648 print("********************")
649 649 print("SETUP WEATHER PLOT")
650 650 self.ncols = 1
651 651 self.nrows = 1
652 652 self.nplots= 1
653 653 self.ylabel= 'Range [Km]'
654 654 self.titles= ['Weather']
655 655 if self.channels is not None:
656 656 self.nplots = len(self.channels)
657 657 self.nrows = len(self.channels)
658 658 else:
659 659 self.nplots = self.data.shape(self.CODE)[0]
660 660 self.nrows = self.nplots
661 661 self.channels = list(range(self.nplots))
662 662 print("channels",self.channels)
663 663 print("que saldra", self.data.shape(self.CODE)[0])
664 664 self.titles = ['{} Channel {}'.format(self.CODE.upper(), x) for x in range(self.nrows)]
665 665 print("self.titles",self.titles)
666 666 self.colorbar=False
667 667 self.width =8
668 668 self.height =8
669 669 self.ini =0
670 670 self.len_azi =0
671 671 self.buffer_ini = None
672 672 self.buffer_ele = None
673 673 self.plots_adjust.update({'wspace': 0.4, 'hspace':0.4, 'left': 0.1, 'right': 0.9, 'bottom': 0.08})
674 674 self.flag =0
675 675 self.indicador= 0
676 676 self.last_data_ele = None
677 677 self.val_mean = None
678 678
679 679 def update(self, dataOut):
680 680
681 681 data = {}
682 682 meta = {}
683 683 if hasattr(dataOut, 'dataPP_POWER'):
684 684 factor = 1
685 685 if hasattr(dataOut, 'nFFTPoints'):
686 686 factor = dataOut.normFactor
687 687 print("dataOut",dataOut.data_360.shape)
688 688 #
689 689 data['weather'] = 10*numpy.log10(dataOut.data_360/(factor))
690 690 #
691 691 #data['weather'] = 10*numpy.log10(dataOut.data_360[1]/(factor))
692 692 data['azi'] = dataOut.data_azi
693 693 data['ele'] = dataOut.data_ele
694 print("UPDATE")
695 print("data[weather]",data['weather'].shape)
696 print("data[azi]",data['azi'])
694 #print("UPDATE")
695 #print("data[weather]",data['weather'].shape)
696 #print("data[azi]",data['azi'])
697 697 return data, meta
698 698
699 699 def get2List(self,angulos):
700 700 list1=[]
701 701 list2=[]
702 702 for i in reversed(range(len(angulos))):
703 703 if not i==0:#el caso de i=0 evalula el primero de la lista con el ultimo y no es relevante
704 704 diff_ = angulos[i]-angulos[i-1]
705 705 if abs(diff_) >1.5:
706 706 list1.append(i-1)
707 707 list2.append(diff_)
708 708 return list(reversed(list1)),list(reversed(list2))
709 709
710 710 def fixData90(self,list_,ang_):
711 711 if list_[0]==-1:
712 712 vec = numpy.where(ang_<ang_[0])
713 713 ang_[vec] = ang_[vec]+90
714 714 return ang_
715 715 return ang_
716 716
717 717 def fixData90HL(self,angulos):
718 718 vec = numpy.where(angulos>=90)
719 719 angulos[vec]=angulos[vec]-90
720 720 return angulos
721 721
722 722
723 723 def search_pos(self,pos,list_):
724 724 for i in range(len(list_)):
725 725 if pos == list_[i]:
726 726 return True,i
727 727 i=None
728 728 return False,i
729 729
730 730 def fixDataComp(self,ang_,list1_,list2_,tipo_case):
731 731 size = len(ang_)
732 732 size2 = 0
733 733 for i in range(len(list2_)):
734 734 size2=size2+round(abs(list2_[i]))-1
735 735 new_size= size+size2
736 736 ang_new = numpy.zeros(new_size)
737 737 ang_new2 = numpy.zeros(new_size)
738 738
739 739 tmp = 0
740 740 c = 0
741 741 for i in range(len(ang_)):
742 742 ang_new[tmp +c] = ang_[i]
743 743 ang_new2[tmp+c] = ang_[i]
744 744 condition , value = self.search_pos(i,list1_)
745 745 if condition:
746 746 pos = tmp + c + 1
747 747 for k in range(round(abs(list2_[value]))-1):
748 748 if tipo_case==0 or tipo_case==3:#subida
749 749 ang_new[pos+k] = ang_new[pos+k-1]+1
750 750 ang_new2[pos+k] = numpy.nan
751 751 elif tipo_case==1 or tipo_case==2:#bajada
752 752 ang_new[pos+k] = ang_new[pos+k-1]-1
753 753 ang_new2[pos+k] = numpy.nan
754 754
755 755 tmp = pos +k
756 756 c = 0
757 757 c=c+1
758 758 return ang_new,ang_new2
759 759
760 760 def globalCheckPED(self,angulos,tipo_case):
761 761 l1,l2 = self.get2List(angulos)
762 762 ##print("l1",l1)
763 763 ##print("l2",l2)
764 764 if len(l1)>0:
765 765 #angulos2 = self.fixData90(list_=l1,ang_=angulos)
766 766 #l1,l2 = self.get2List(angulos2)
767 767 ang1_,ang2_ = self.fixDataComp(ang_=angulos,list1_=l1,list2_=l2,tipo_case=tipo_case)
768 768 #ang1_ = self.fixData90HL(ang1_)
769 769 #ang2_ = self.fixData90HL(ang2_)
770 770 else:
771 771 ang1_= angulos
772 772 ang2_= angulos
773 773 return ang1_,ang2_
774 774
775 775
776 776 def replaceNAN(self,data_weather,data_ele,val):
777 777 data= data_ele
778 778 data_T= data_weather
779 779 if data.shape[0]> data_T.shape[0]:
780 780 data_N = numpy.ones( [data.shape[0],data_T.shape[1]])
781 781 c = 0
782 782 for i in range(len(data)):
783 783 if numpy.isnan(data[i]):
784 784 data_N[i,:]=numpy.ones(data_T.shape[1])*numpy.nan
785 785 else:
786 786 data_N[i,:]=data_T[c,:]
787 787 c=c+1
788 788 return data_N
789 789 else:
790 790 for i in range(len(data)):
791 791 if numpy.isnan(data[i]):
792 792 data_T[i,:]=numpy.ones(data_T.shape[1])*numpy.nan
793 793 return data_T
794 794
795 795 def check_case(self,data_ele,ang_max,ang_min):
796 796 start = data_ele[0]
797 797 end = data_ele[-1]
798 798 number = (end-start)
799 799 len_ang=len(data_ele)
800 print("start",start)
801 print("end",end)
802 print("number",number)
803
804 print("len_ang",len_ang)
805
806 #exit(1)
800 807
801 808 if start<end and (round(abs(number)+1)>=len_ang or (numpy.argmin(data_ele)==0)):#caso subida
802 809 return 0
803 810 #elif start>end and (round(abs(number)+1)>=len_ang or(numpy.argmax(data_ele)==0)):#caso bajada
804 811 # return 1
805 812 elif round(abs(number)+1)>=len_ang and (start>end or(numpy.argmax(data_ele)==0)):#caso bajada
806 813 return 1
807 814 elif round(abs(number)+1)<len_ang and data_ele[-2]>data_ele[-1]:# caso BAJADA CAMBIO ANG MAX
808 815 return 2
809 816 elif round(abs(number)+1)<len_ang and data_ele[-2]<data_ele[-1] :# caso SUBIDA CAMBIO ANG MIN
810 817 return 3
811 818
812 819
813 820 def const_ploteo(self,val_ch,data_weather,data_ele,step,res,ang_max,ang_min):
814 821 ang_max= ang_max
815 822 ang_min= ang_min
816 823 data_weather=data_weather
817 824 val_ch=val_ch
818 825 ##print("*********************DATA WEATHER**************************************")
819 826 ##print(data_weather)
820 827 if self.ini==0:
821 828 '''
822 829 print("**********************************************")
823 830 print("**********************************************")
824 831 print("***************ini**************")
825 832 print("**********************************************")
826 833 print("**********************************************")
827 834 '''
828 835 #print("data_ele",data_ele)
829 836 #----------------------------------------------------------
830 837 tipo_case = self.check_case(data_ele,ang_max,ang_min)
831 838 print("check_case",tipo_case)
839 #exit(1)
832 840 #--------------------- new -------------------------
833 841 data_ele_new ,data_ele_old= self.globalCheckPED(data_ele,tipo_case)
834 842
835 843 #-------------------------CAMBIOS RHI---------------------------------
836 844 start= ang_min
837 845 end = ang_max
838 846 n= (ang_max-ang_min)/res
839 847 #------ new
840 848 self.start_data_ele = data_ele_new[0]
841 849 self.end_data_ele = data_ele_new[-1]
842 850 if tipo_case==0 or tipo_case==3: # SUBIDA
843 851 n1= round(self.start_data_ele)- start
844 852 n2= end - round(self.end_data_ele)
853 print(self.start_data_ele)
854 print(self.end_data_ele)
845 855 if n1>0:
846 856 ele1= numpy.linspace(ang_min+1,self.start_data_ele-1,n1)
847 857 ele1_nan= numpy.ones(n1)*numpy.nan
848 858 data_ele = numpy.hstack((ele1,data_ele_new))
859 print("ele1_nan",ele1_nan.shape)
860 print("data_ele_old",data_ele_old.shape)
849 861 data_ele_old = numpy.hstack((ele1_nan,data_ele_old))
850 862 if n2>0:
851 863 ele2= numpy.linspace(self.end_data_ele+1,end,n2)
852 864 ele2_nan= numpy.ones(n2)*numpy.nan
853 865 data_ele = numpy.hstack((data_ele,ele2))
866 print("ele2_nan",ele2_nan.shape)
867 print("data_ele_old",data_ele_old.shape)
854 868 data_ele_old = numpy.hstack((data_ele_old,ele2_nan))
855 869
856 870 if tipo_case==1 or tipo_case==2: # BAJADA
857 871 data_ele_new = data_ele_new[::-1] # reversa
858 872 data_ele_old = data_ele_old[::-1]# reversa
859 873 data_weather = data_weather[::-1,:]# reversa
860 874 vec= numpy.where(data_ele_new<ang_max)
861 875 data_ele_new = data_ele_new[vec]
862 876 data_ele_old = data_ele_old[vec]
863 877 data_weather = data_weather[vec[0]]
864 878 vec2= numpy.where(0<data_ele_new)
865 879 data_ele_new = data_ele_new[vec2]
866 880 data_ele_old = data_ele_old[vec2]
867 881 data_weather = data_weather[vec2[0]]
868 882 self.start_data_ele = data_ele_new[0]
869 883 self.end_data_ele = data_ele_new[-1]
870 884
871 885 n1= round(self.start_data_ele)- start
872 886 n2= end - round(self.end_data_ele)-1
873 887 print(self.start_data_ele)
874 888 print(self.end_data_ele)
875 889 if n1>0:
876 890 ele1= numpy.linspace(ang_min+1,self.start_data_ele-1,n1)
877 891 ele1_nan= numpy.ones(n1)*numpy.nan
878 892 data_ele = numpy.hstack((ele1,data_ele_new))
879 893 data_ele_old = numpy.hstack((ele1_nan,data_ele_old))
880 894 if n2>0:
881 895 ele2= numpy.linspace(self.end_data_ele+1,end,n2)
882 896 ele2_nan= numpy.ones(n2)*numpy.nan
883 897 data_ele = numpy.hstack((data_ele,ele2))
884 898 data_ele_old = numpy.hstack((data_ele_old,ele2_nan))
885 899 # RADAR
886 900 # NOTA data_ele y data_weather es la variable que retorna
887 901 val_mean = numpy.mean(data_weather[:,-1])
888 902 self.val_mean = val_mean
889 903 data_weather = self.replaceNAN(data_weather=data_weather,data_ele=data_ele_old,val=self.val_mean)
890 904 self.data_ele_tmp[val_ch]= data_ele_old
891 905 else:
892 906 #print("**********************************************")
893 907 #print("****************VARIABLE**********************")
894 908 #-------------------------CAMBIOS RHI---------------------------------
895 909 #---------------------------------------------------------------------
896 910 ##print("INPUT data_ele",data_ele)
897 911 flag=0
898 912 start_ele = self.res_ele[0]
899 913 tipo_case = self.check_case(data_ele,ang_max,ang_min)
900 914 #print("TIPO DE DATA",tipo_case)
901 915 #-----------new------------
902 916 data_ele ,data_ele_old = self.globalCheckPED(data_ele,tipo_case)
903 917 data_weather = self.replaceNAN(data_weather=data_weather,data_ele=data_ele_old,val=self.val_mean)
904 918
905 919 #-------------------------------NEW RHI ITERATIVO-------------------------
906 920
907 921 if tipo_case==0 : # SUBIDA
908 922 vec = numpy.where(data_ele<ang_max)
909 923 data_ele = data_ele[vec]
910 924 data_ele_old = data_ele_old[vec]
911 925 data_weather = data_weather[vec[0]]
912 926
913 927 vec2 = numpy.where(0<data_ele)
914 928 data_ele= data_ele[vec2]
915 929 data_ele_old= data_ele_old[vec2]
916 930 ##print(data_ele_new)
917 931 data_weather= data_weather[vec2[0]]
918 932
919 933 new_i_ele = int(round(data_ele[0]))
920 934 new_f_ele = int(round(data_ele[-1]))
921 935 #print(new_i_ele)
922 936 #print(new_f_ele)
923 937 #print(data_ele,len(data_ele))
924 938 #print(data_ele_old,len(data_ele_old))
925 939 if new_i_ele< 2:
926 940 self.data_ele_tmp[val_ch] = numpy.ones(ang_max-ang_min)*numpy.nan
927 941 self.res_weather[val_ch] = self.replaceNAN(data_weather=self.res_weather[val_ch],data_ele=self.data_ele_tmp[val_ch],val=self.val_mean)
928 942 self.data_ele_tmp[val_ch][new_i_ele:new_i_ele+len(data_ele)]=data_ele_old
929 943 self.res_ele[new_i_ele:new_i_ele+len(data_ele)]= data_ele
930 944 self.res_weather[val_ch][new_i_ele:new_i_ele+len(data_ele),:]= data_weather
931 945 data_ele = self.res_ele
932 946 data_weather = self.res_weather[val_ch]
933 947
934 948 elif tipo_case==1 : #BAJADA
935 949 data_ele = data_ele[::-1] # reversa
936 950 data_ele_old = data_ele_old[::-1]# reversa
937 951 data_weather = data_weather[::-1,:]# reversa
938 952 vec= numpy.where(data_ele<ang_max)
939 953 data_ele = data_ele[vec]
940 954 data_ele_old = data_ele_old[vec]
941 955 data_weather = data_weather[vec[0]]
942 956 vec2= numpy.where(0<data_ele)
943 957 data_ele = data_ele[vec2]
944 958 data_ele_old = data_ele_old[vec2]
945 959 data_weather = data_weather[vec2[0]]
946 960
947 961
948 962 new_i_ele = int(round(data_ele[0]))
949 963 new_f_ele = int(round(data_ele[-1]))
950 964 #print(data_ele)
951 965 #print(ang_max)
952 966 #print(data_ele_old)
953 967 if new_i_ele <= 1:
954 968 new_i_ele = 1
955 969 if round(data_ele[-1])>=ang_max-1:
956 970 self.data_ele_tmp[val_ch] = numpy.ones(ang_max-ang_min)*numpy.nan
957 971 self.res_weather[val_ch] = self.replaceNAN(data_weather=self.res_weather[val_ch],data_ele=self.data_ele_tmp[val_ch],val=self.val_mean)
958 972 self.data_ele_tmp[val_ch][new_i_ele-1:new_i_ele+len(data_ele)-1]=data_ele_old
959 973 self.res_ele[new_i_ele-1:new_i_ele+len(data_ele)-1]= data_ele
960 974 self.res_weather[val_ch][new_i_ele-1:new_i_ele+len(data_ele)-1,:]= data_weather
961 975 data_ele = self.res_ele
962 976 data_weather = self.res_weather[val_ch]
963 977
964 978 elif tipo_case==2: #bajada
965 979 vec = numpy.where(data_ele<ang_max)
966 980 data_ele = data_ele[vec]
967 981 data_weather= data_weather[vec[0]]
968 982
969 983 len_vec = len(vec)
970 984 data_ele_new = data_ele[::-1] # reversa
971 985 data_weather = data_weather[::-1,:]
972 986 new_i_ele = int(data_ele_new[0])
973 987 new_f_ele = int(data_ele_new[-1])
974 988
975 989 n1= new_i_ele- ang_min
976 990 n2= ang_max - new_f_ele-1
977 991 if n1>0:
978 992 ele1= numpy.linspace(ang_min+1,new_i_ele-1,n1)
979 993 ele1_nan= numpy.ones(n1)*numpy.nan
980 994 data_ele = numpy.hstack((ele1,data_ele_new))
981 995 data_ele_old = numpy.hstack((ele1_nan,data_ele_new))
982 996 if n2>0:
983 997 ele2= numpy.linspace(new_f_ele+1,ang_max,n2)
984 998 ele2_nan= numpy.ones(n2)*numpy.nan
985 999 data_ele = numpy.hstack((data_ele,ele2))
986 1000 data_ele_old = numpy.hstack((data_ele_old,ele2_nan))
987 1001
988 1002 self.data_ele_tmp[val_ch] = data_ele_old
989 1003 self.res_ele = data_ele
990 1004 self.res_weather[val_ch] = self.replaceNAN(data_weather=data_weather,data_ele=data_ele_old,val=self.val_mean)
991 1005 data_ele = self.res_ele
992 1006 data_weather = self.res_weather[val_ch]
993 1007
994 1008 elif tipo_case==3:#subida
995 1009 vec = numpy.where(0<data_ele)
996 1010 data_ele= data_ele[vec]
997 1011 data_ele_new = data_ele
998 1012 data_ele_old= data_ele_old[vec]
999 1013 data_weather= data_weather[vec[0]]
1000 1014 pos_ini = numpy.argmin(data_ele)
1001 1015 if pos_ini>0:
1002 1016 len_vec= len(data_ele)
1003 1017 vec3 = numpy.linspace(pos_ini,len_vec-1,len_vec-pos_ini).astype(int)
1004 1018 #print(vec3)
1005 1019 data_ele= data_ele[vec3]
1006 1020 data_ele_new = data_ele
1007 1021 data_ele_old= data_ele_old[vec3]
1008 1022 data_weather= data_weather[vec3]
1009 1023
1010 1024 new_i_ele = int(data_ele_new[0])
1011 1025 new_f_ele = int(data_ele_new[-1])
1012 1026 n1= new_i_ele- ang_min
1013 1027 n2= ang_max - new_f_ele-1
1014 1028 if n1>0:
1015 1029 ele1= numpy.linspace(ang_min+1,new_i_ele-1,n1)
1016 1030 ele1_nan= numpy.ones(n1)*numpy.nan
1017 1031 data_ele = numpy.hstack((ele1,data_ele_new))
1018 1032 data_ele_old = numpy.hstack((ele1_nan,data_ele_new))
1019 1033 if n2>0:
1020 1034 ele2= numpy.linspace(new_f_ele+1,ang_max,n2)
1021 1035 ele2_nan= numpy.ones(n2)*numpy.nan
1022 1036 data_ele = numpy.hstack((data_ele,ele2))
1023 1037 data_ele_old = numpy.hstack((data_ele_old,ele2_nan))
1024 1038
1025 1039 self.data_ele_tmp[val_ch] = data_ele_old
1026 1040 self.res_ele = data_ele
1027 1041 self.res_weather[val_ch] = self.replaceNAN(data_weather=data_weather,data_ele=data_ele_old,val=self.val_mean)
1028 1042 data_ele = self.res_ele
1029 1043 data_weather = self.res_weather[val_ch]
1030 1044 #print("self.data_ele_tmp",self.data_ele_tmp)
1031 1045 return data_weather,data_ele
1032 1046
1033 1047
1034 1048 def plot(self):
1035 1049 thisDatetime = datetime.datetime.utcfromtimestamp(self.data.times[-1]).strftime('%Y-%m-%d %H:%M:%S')
1036 1050 data = self.data[-1]
1037 1051 r = self.data.yrange
1038 1052 delta_height = r[1]-r[0]
1039 1053 r_mask = numpy.where(r>=0)[0]
1040 1054 ##print("delta_height",delta_height)
1041 1055 #print("r_mask",r_mask,len(r_mask))
1042 1056 r = numpy.arange(len(r_mask))*delta_height
1043 1057 self.y = 2*r
1044 1058 res = 1
1045 1059 ###print("data['weather'].shape[0]",data['weather'].shape[0])
1046 1060 ang_max = self.ang_max
1047 1061 ang_min = self.ang_min
1048 1062 var_ang =ang_max - ang_min
1049 1063 step = (int(var_ang)/(res*data['weather'].shape[0]))
1050 1064 ###print("step",step)
1051 1065 #--------------------------------------------------------
1052 1066 ##print('weather',data['weather'].shape)
1053 1067 ##print('ele',data['ele'].shape)
1054 1068
1055 1069 ###self.res_weather, self.res_ele = self.const_ploteo(data_weather=data['weather'][:,r_mask],data_ele=data['ele'],step=step,res=res,ang_max=ang_max,ang_min=ang_min)
1056 1070 ###self.res_azi = numpy.mean(data['azi'])
1057 1071 ###print("self.res_ele",self.res_ele)
1058 1072 plt.clf()
1059 1073 subplots = [121, 122]
1060 1074 if self.ini==0:
1061 1075 self.data_ele_tmp = numpy.ones([self.nplots,int(var_ang)])*numpy.nan
1062 1076 self.res_weather= numpy.ones([self.nplots,int(var_ang),len(r_mask)])*numpy.nan
1063 1077 print("SHAPE",self.data_ele_tmp.shape)
1064 1078
1065 1079 for i,ax in enumerate(self.axes):
1066 1080 self.res_weather[i], self.res_ele = self.const_ploteo(val_ch=i, data_weather=data['weather'][i][:,r_mask],data_ele=data['ele'],step=step,res=res,ang_max=ang_max,ang_min=ang_min)
1067 1081 self.res_azi = numpy.mean(data['azi'])
1082 if i==0:
1083 print("*****************************************************************************to plot**************************",self.res_weather[i].shape)
1068 1084 if ax.firsttime:
1069 1085 #plt.clf()
1070 1086 cgax, pm = wrl.vis.plot_rhi(self.res_weather[i],r=r,th=self.res_ele,ax=subplots[i], proj='cg',vmin=20, vmax=80)
1071 1087 #fig=self.figures[0]
1072 1088 else:
1073 1089 #plt.clf()
1090 if i==0:
1091 print(self.res_weather[i])
1092 print(self.res_ele)
1093 cgax, pm = wrl.vis.plot_rhi(self.res_weather[i],r=r,th=self.res_ele,ax=subplots[i], proj='cg',vmin=20, vmax=80)
1094 caax = cgax.parasites[0]
1095 paax = cgax.parasites[1]
1096 cbar = plt.gcf().colorbar(pm, pad=0.075)
1097 caax.set_xlabel('x_range [km]')
1098 caax.set_ylabel('y_range [km]')
1099 plt.text(1.0, 1.05, 'Elevacion '+str(thisDatetime)+" Step "+str(self.ini)+ " Azi: "+str(round(self.res_azi,2)), transform=caax.transAxes, va='bottom',ha='right')
1100 print("***************************self.ini****************************",self.ini)
1101 self.ini= self.ini+1
1102
1103 class WeatherRHI_vRF2_Plot(Plot):
1104 CODE = 'weather'
1105 plot_name = 'weather'
1106 plot_type = 'rhistyle'
1107 buffering = False
1108 data_ele_tmp = None
1109
1110 def setup(self):
1111 print("********************")
1112 print("********************")
1113 print("********************")
1114 print("SETUP WEATHER PLOT")
1115 self.ncols = 1
1116 self.nrows = 1
1117 self.nplots= 1
1118 self.ylabel= 'Range [Km]'
1119 self.titles= ['Weather']
1120 if self.channels is not None:
1121 self.nplots = len(self.channels)
1122 self.nrows = len(self.channels)
1123 else:
1124 self.nplots = self.data.shape(self.CODE)[0]
1125 self.nrows = self.nplots
1126 self.channels = list(range(self.nplots))
1127 print("channels",self.channels)
1128 print("que saldra", self.data.shape(self.CODE)[0])
1129 self.titles = ['{} Channel {}'.format(self.CODE.upper(), x) for x in range(self.nrows)]
1130 print("self.titles",self.titles)
1131 self.colorbar=False
1132 self.width =8
1133 self.height =8
1134 self.ini =0
1135 self.len_azi =0
1136 self.buffer_ini = None
1137 self.buffer_ele = None
1138 self.plots_adjust.update({'wspace': 0.4, 'hspace':0.4, 'left': 0.1, 'right': 0.9, 'bottom': 0.08})
1139 self.flag =0
1140 self.indicador= 0
1141 self.last_data_ele = None
1142 self.val_mean = None
1143
1144 def update(self, dataOut):
1145
1146 data = {}
1147 meta = {}
1148 if hasattr(dataOut, 'dataPP_POWER'):
1149 factor = 1
1150 if hasattr(dataOut, 'nFFTPoints'):
1151 factor = dataOut.normFactor
1152 print("dataOut",dataOut.data_360.shape)
1153 #
1154 data['weather'] = 10*numpy.log10(dataOut.data_360/(factor))
1155 #
1156 #data['weather'] = 10*numpy.log10(dataOut.data_360[1]/(factor))
1157 data['azi'] = dataOut.data_azi
1158 data['ele'] = dataOut.data_ele
1159 data['case_flag'] = dataOut.case_flag
1160 #print("UPDATE")
1161 #print("data[weather]",data['weather'].shape)
1162 #print("data[azi]",data['azi'])
1163 return data, meta
1164
1165 def get2List(self,angulos):
1166 list1=[]
1167 list2=[]
1168 for i in reversed(range(len(angulos))):
1169 if not i==0:#el caso de i=0 evalula el primero de la lista con el ultimo y no es relevante
1170 diff_ = angulos[i]-angulos[i-1]
1171 if abs(diff_) >1.5:
1172 list1.append(i-1)
1173 list2.append(diff_)
1174 return list(reversed(list1)),list(reversed(list2))
1175
1176 def fixData90(self,list_,ang_):
1177 if list_[0]==-1:
1178 vec = numpy.where(ang_<ang_[0])
1179 ang_[vec] = ang_[vec]+90
1180 return ang_
1181 return ang_
1182
1183 def fixData90HL(self,angulos):
1184 vec = numpy.where(angulos>=90)
1185 angulos[vec]=angulos[vec]-90
1186 return angulos
1187
1188
1189 def search_pos(self,pos,list_):
1190 for i in range(len(list_)):
1191 if pos == list_[i]:
1192 return True,i
1193 i=None
1194 return False,i
1195
1196 def fixDataComp(self,ang_,list1_,list2_,tipo_case):
1197 size = len(ang_)
1198 size2 = 0
1199 for i in range(len(list2_)):
1200 size2=size2+round(abs(list2_[i]))-1
1201 new_size= size+size2
1202 ang_new = numpy.zeros(new_size)
1203 ang_new2 = numpy.zeros(new_size)
1204
1205 tmp = 0
1206 c = 0
1207 for i in range(len(ang_)):
1208 ang_new[tmp +c] = ang_[i]
1209 ang_new2[tmp+c] = ang_[i]
1210 condition , value = self.search_pos(i,list1_)
1211 if condition:
1212 pos = tmp + c + 1
1213 for k in range(round(abs(list2_[value]))-1):
1214 if tipo_case==0 or tipo_case==3:#subida
1215 ang_new[pos+k] = ang_new[pos+k-1]+1
1216 ang_new2[pos+k] = numpy.nan
1217 elif tipo_case==1 or tipo_case==2:#bajada
1218 ang_new[pos+k] = ang_new[pos+k-1]-1
1219 ang_new2[pos+k] = numpy.nan
1220
1221 tmp = pos +k
1222 c = 0
1223 c=c+1
1224 return ang_new,ang_new2
1225
1226 def globalCheckPED(self,angulos,tipo_case):
1227 l1,l2 = self.get2List(angulos)
1228 ##print("l1",l1)
1229 ##print("l2",l2)
1230 if len(l1)>0:
1231 #angulos2 = self.fixData90(list_=l1,ang_=angulos)
1232 #l1,l2 = self.get2List(angulos2)
1233 ang1_,ang2_ = self.fixDataComp(ang_=angulos,list1_=l1,list2_=l2,tipo_case=tipo_case)
1234 #ang1_ = self.fixData90HL(ang1_)
1235 #ang2_ = self.fixData90HL(ang2_)
1236 else:
1237 ang1_= angulos
1238 ang2_= angulos
1239 return ang1_,ang2_
1240
1241
1242 def replaceNAN(self,data_weather,data_ele,val):
1243 data= data_ele
1244 data_T= data_weather
1245 if data.shape[0]> data_T.shape[0]:
1246 data_N = numpy.ones( [data.shape[0],data_T.shape[1]])
1247 c = 0
1248 for i in range(len(data)):
1249 if numpy.isnan(data[i]):
1250 data_N[i,:]=numpy.ones(data_T.shape[1])*numpy.nan
1251 else:
1252 data_N[i,:]=data_T[c,:]
1253 c=c+1
1254 return data_N
1255 else:
1256 for i in range(len(data)):
1257 if numpy.isnan(data[i]):
1258 data_T[i,:]=numpy.ones(data_T.shape[1])*numpy.nan
1259 return data_T
1260
1261 def check_case(self,data_ele,ang_max,ang_min):
1262 start = data_ele[0]
1263 end = data_ele[-1]
1264 number = (end-start)
1265 len_ang=len(data_ele)
1266 print("start",start)
1267 print("end",end)
1268 print("number",number)
1269
1270 print("len_ang",len_ang)
1271
1272 #exit(1)
1273
1274 if start<end and (round(abs(number)+1)>=len_ang or (numpy.argmin(data_ele)==0)):#caso subida
1275 return 0
1276 #elif start>end and (round(abs(number)+1)>=len_ang or(numpy.argmax(data_ele)==0)):#caso bajada
1277 # return 1
1278 elif round(abs(number)+1)>=len_ang and (start>end or(numpy.argmax(data_ele)==0)):#caso bajada
1279 return 1
1280 elif round(abs(number)+1)<len_ang and data_ele[-2]>data_ele[-1]:# caso BAJADA CAMBIO ANG MAX
1281 return 2
1282 elif round(abs(number)+1)<len_ang and data_ele[-2]<data_ele[-1] :# caso SUBIDA CAMBIO ANG MIN
1283 return 3
1284
1285
1286 def const_ploteo(self,val_ch,data_weather,data_ele,step,res,ang_max,ang_min,case_flag):
1287 ang_max= ang_max
1288 ang_min= ang_min
1289 data_weather=data_weather
1290 val_ch=val_ch
1291 ##print("*********************DATA WEATHER**************************************")
1292 ##print(data_weather)
1293 if self.ini==0:
1294 '''
1295 print("**********************************************")
1296 print("**********************************************")
1297 print("***************ini**************")
1298 print("**********************************************")
1299 print("**********************************************")
1300 '''
1301 #print("data_ele",data_ele)
1302 #----------------------------------------------------------
1303 tipo_case = case_flag[-1]
1304 #tipo_case = self.check_case(data_ele,ang_max,ang_min)
1305 print("check_case",tipo_case)
1306 #exit(1)
1307 #--------------------- new -------------------------
1308 data_ele_new ,data_ele_old= self.globalCheckPED(data_ele,tipo_case)
1309
1310 #-------------------------CAMBIOS RHI---------------------------------
1311 start= ang_min
1312 end = ang_max
1313 n= (ang_max-ang_min)/res
1314 #------ new
1315 self.start_data_ele = data_ele_new[0]
1316 self.end_data_ele = data_ele_new[-1]
1317 if tipo_case==0 or tipo_case==3: # SUBIDA
1318 n1= round(self.start_data_ele)- start
1319 n2= end - round(self.end_data_ele)
1320 print(self.start_data_ele)
1321 print(self.end_data_ele)
1322 if n1>0:
1323 ele1= numpy.linspace(ang_min+1,self.start_data_ele-1,n1)
1324 ele1_nan= numpy.ones(n1)*numpy.nan
1325 data_ele = numpy.hstack((ele1,data_ele_new))
1326 print("ele1_nan",ele1_nan.shape)
1327 print("data_ele_old",data_ele_old.shape)
1328 data_ele_old = numpy.hstack((ele1_nan,data_ele_old))
1329 if n2>0:
1330 ele2= numpy.linspace(self.end_data_ele+1,end,n2)
1331 ele2_nan= numpy.ones(n2)*numpy.nan
1332 data_ele = numpy.hstack((data_ele,ele2))
1333 print("ele2_nan",ele2_nan.shape)
1334 print("data_ele_old",data_ele_old.shape)
1335 data_ele_old = numpy.hstack((data_ele_old,ele2_nan))
1336
1337 if tipo_case==1 or tipo_case==2: # BAJADA
1338 data_ele_new = data_ele_new[::-1] # reversa
1339 data_ele_old = data_ele_old[::-1]# reversa
1340 data_weather = data_weather[::-1,:]# reversa
1341 vec= numpy.where(data_ele_new<ang_max)
1342 data_ele_new = data_ele_new[vec]
1343 data_ele_old = data_ele_old[vec]
1344 data_weather = data_weather[vec[0]]
1345 vec2= numpy.where(0<data_ele_new)
1346 data_ele_new = data_ele_new[vec2]
1347 data_ele_old = data_ele_old[vec2]
1348 data_weather = data_weather[vec2[0]]
1349 self.start_data_ele = data_ele_new[0]
1350 self.end_data_ele = data_ele_new[-1]
1351
1352 n1= round(self.start_data_ele)- start
1353 n2= end - round(self.end_data_ele)-1
1354 print(self.start_data_ele)
1355 print(self.end_data_ele)
1356 if n1>0:
1357 ele1= numpy.linspace(ang_min+1,self.start_data_ele-1,n1)
1358 ele1_nan= numpy.ones(n1)*numpy.nan
1359 data_ele = numpy.hstack((ele1,data_ele_new))
1360 data_ele_old = numpy.hstack((ele1_nan,data_ele_old))
1361 if n2>0:
1362 ele2= numpy.linspace(self.end_data_ele+1,end,n2)
1363 ele2_nan= numpy.ones(n2)*numpy.nan
1364 data_ele = numpy.hstack((data_ele,ele2))
1365 data_ele_old = numpy.hstack((data_ele_old,ele2_nan))
1366 # RADAR
1367 # NOTA data_ele y data_weather es la variable que retorna
1368 val_mean = numpy.mean(data_weather[:,-1])
1369 self.val_mean = val_mean
1370 data_weather = self.replaceNAN(data_weather=data_weather,data_ele=data_ele_old,val=self.val_mean)
1371 print("eleold",data_ele_old)
1372 print(self.data_ele_tmp[val_ch])
1373 print(data_ele_old.shape[0])
1374 print(self.data_ele_tmp[val_ch].shape[0])
1375 if (data_ele_old.shape[0]==91 or self.data_ele_tmp[val_ch].shape[0]==91):
1376 import sys
1377 print("EXIT",self.ini)
1378
1379 sys.exit(1)
1380 self.data_ele_tmp[val_ch]= data_ele_old
1381 else:
1382 #print("**********************************************")
1383 #print("****************VARIABLE**********************")
1384 #-------------------------CAMBIOS RHI---------------------------------
1385 #---------------------------------------------------------------------
1386 ##print("INPUT data_ele",data_ele)
1387 flag=0
1388 start_ele = self.res_ele[0]
1389 #tipo_case = self.check_case(data_ele,ang_max,ang_min)
1390 tipo_case = case_flag[-1]
1391 #print("TIPO DE DATA",tipo_case)
1392 #-----------new------------
1393 data_ele ,data_ele_old = self.globalCheckPED(data_ele,tipo_case)
1394 data_weather = self.replaceNAN(data_weather=data_weather,data_ele=data_ele_old,val=self.val_mean)
1395
1396 #-------------------------------NEW RHI ITERATIVO-------------------------
1397
1398 if tipo_case==0 : # SUBIDA
1399 vec = numpy.where(data_ele<ang_max)
1400 data_ele = data_ele[vec]
1401 data_ele_old = data_ele_old[vec]
1402 data_weather = data_weather[vec[0]]
1403
1404 vec2 = numpy.where(0<data_ele)
1405 data_ele= data_ele[vec2]
1406 data_ele_old= data_ele_old[vec2]
1407 ##print(data_ele_new)
1408 data_weather= data_weather[vec2[0]]
1409
1410 new_i_ele = int(round(data_ele[0]))
1411 new_f_ele = int(round(data_ele[-1]))
1412 #print(new_i_ele)
1413 #print(new_f_ele)
1414 #print(data_ele,len(data_ele))
1415 #print(data_ele_old,len(data_ele_old))
1416 if new_i_ele< 2:
1417 self.data_ele_tmp[val_ch] = numpy.ones(ang_max-ang_min)*numpy.nan
1418 self.res_weather[val_ch] = self.replaceNAN(data_weather=self.res_weather[val_ch],data_ele=self.data_ele_tmp[val_ch],val=self.val_mean)
1419 self.data_ele_tmp[val_ch][new_i_ele:new_i_ele+len(data_ele)]=data_ele_old
1420 self.res_ele[new_i_ele:new_i_ele+len(data_ele)]= data_ele
1421 self.res_weather[val_ch][new_i_ele:new_i_ele+len(data_ele),:]= data_weather
1422 data_ele = self.res_ele
1423 data_weather = self.res_weather[val_ch]
1424
1425 elif tipo_case==1 : #BAJADA
1426 data_ele = data_ele[::-1] # reversa
1427 data_ele_old = data_ele_old[::-1]# reversa
1428 data_weather = data_weather[::-1,:]# reversa
1429 vec= numpy.where(data_ele<ang_max)
1430 data_ele = data_ele[vec]
1431 data_ele_old = data_ele_old[vec]
1432 data_weather = data_weather[vec[0]]
1433 vec2= numpy.where(0<data_ele)
1434 data_ele = data_ele[vec2]
1435 data_ele_old = data_ele_old[vec2]
1436 data_weather = data_weather[vec2[0]]
1437
1438
1439 new_i_ele = int(round(data_ele[0]))
1440 new_f_ele = int(round(data_ele[-1]))
1441 #print(data_ele)
1442 #print(ang_max)
1443 #print(data_ele_old)
1444 if new_i_ele <= 1:
1445 new_i_ele = 1
1446 if round(data_ele[-1])>=ang_max-1:
1447 self.data_ele_tmp[val_ch] = numpy.ones(ang_max-ang_min)*numpy.nan
1448 self.res_weather[val_ch] = self.replaceNAN(data_weather=self.res_weather[val_ch],data_ele=self.data_ele_tmp[val_ch],val=self.val_mean)
1449 self.data_ele_tmp[val_ch][new_i_ele-1:new_i_ele+len(data_ele)-1]=data_ele_old
1450 self.res_ele[new_i_ele-1:new_i_ele+len(data_ele)-1]= data_ele
1451 self.res_weather[val_ch][new_i_ele-1:new_i_ele+len(data_ele)-1,:]= data_weather
1452 data_ele = self.res_ele
1453 data_weather = self.res_weather[val_ch]
1454
1455 elif tipo_case==2: #bajada
1456 vec = numpy.where(data_ele<ang_max)
1457 data_ele = data_ele[vec]
1458 data_weather= data_weather[vec[0]]
1459
1460 len_vec = len(vec)
1461 data_ele_new = data_ele[::-1] # reversa
1462 data_weather = data_weather[::-1,:]
1463 new_i_ele = int(data_ele_new[0])
1464 new_f_ele = int(data_ele_new[-1])
1465
1466 n1= new_i_ele- ang_min
1467 n2= ang_max - new_f_ele-1
1468 if n1>0:
1469 ele1= numpy.linspace(ang_min+1,new_i_ele-1,n1)
1470 ele1_nan= numpy.ones(n1)*numpy.nan
1471 data_ele = numpy.hstack((ele1,data_ele_new))
1472 data_ele_old = numpy.hstack((ele1_nan,data_ele_new))
1473 if n2>0:
1474 ele2= numpy.linspace(new_f_ele+1,ang_max,n2)
1475 ele2_nan= numpy.ones(n2)*numpy.nan
1476 data_ele = numpy.hstack((data_ele,ele2))
1477 data_ele_old = numpy.hstack((data_ele_old,ele2_nan))
1478
1479 self.data_ele_tmp[val_ch] = data_ele_old
1480 self.res_ele = data_ele
1481 self.res_weather[val_ch] = self.replaceNAN(data_weather=data_weather,data_ele=data_ele_old,val=self.val_mean)
1482 data_ele = self.res_ele
1483 data_weather = self.res_weather[val_ch]
1484
1485 elif tipo_case==3:#subida
1486 vec = numpy.where(0<data_ele)
1487 data_ele= data_ele[vec]
1488 data_ele_new = data_ele
1489 data_ele_old= data_ele_old[vec]
1490 data_weather= data_weather[vec[0]]
1491 pos_ini = numpy.argmin(data_ele)
1492 if pos_ini>0:
1493 len_vec= len(data_ele)
1494 vec3 = numpy.linspace(pos_ini,len_vec-1,len_vec-pos_ini).astype(int)
1495 #print(vec3)
1496 data_ele= data_ele[vec3]
1497 data_ele_new = data_ele
1498 data_ele_old= data_ele_old[vec3]
1499 data_weather= data_weather[vec3]
1500
1501 new_i_ele = int(data_ele_new[0])
1502 new_f_ele = int(data_ele_new[-1])
1503 n1= new_i_ele- ang_min
1504 n2= ang_max - new_f_ele-1
1505 if n1>0:
1506 ele1= numpy.linspace(ang_min+1,new_i_ele-1,n1)
1507 ele1_nan= numpy.ones(n1)*numpy.nan
1508 data_ele = numpy.hstack((ele1,data_ele_new))
1509 data_ele_old = numpy.hstack((ele1_nan,data_ele_new))
1510 if n2>0:
1511 ele2= numpy.linspace(new_f_ele+1,ang_max,n2)
1512 ele2_nan= numpy.ones(n2)*numpy.nan
1513 data_ele = numpy.hstack((data_ele,ele2))
1514 data_ele_old = numpy.hstack((data_ele_old,ele2_nan))
1515
1516 self.data_ele_tmp[val_ch] = data_ele_old
1517 self.res_ele = data_ele
1518 self.res_weather[val_ch] = self.replaceNAN(data_weather=data_weather,data_ele=data_ele_old,val=self.val_mean)
1519 data_ele = self.res_ele
1520 data_weather = self.res_weather[val_ch]
1521 #print("self.data_ele_tmp",self.data_ele_tmp)
1522 return data_weather,data_ele
1523
1524
1525 def plot(self):
1526 thisDatetime = datetime.datetime.utcfromtimestamp(self.data.times[-1]).strftime('%Y-%m-%d %H:%M:%S')
1527 data = self.data[-1]
1528 r = self.data.yrange
1529 delta_height = r[1]-r[0]
1530 r_mask = numpy.where(r>=0)[0]
1531 ##print("delta_height",delta_height)
1532 #print("r_mask",r_mask,len(r_mask))
1533 r = numpy.arange(len(r_mask))*delta_height
1534 self.y = 2*r
1535 res = 1
1536 ###print("data['weather'].shape[0]",data['weather'].shape[0])
1537 ang_max = self.ang_max
1538 ang_min = self.ang_min
1539 var_ang =ang_max - ang_min
1540 step = (int(var_ang)/(res*data['weather'].shape[0]))
1541 ###print("step",step)
1542 #--------------------------------------------------------
1543 ##print('weather',data['weather'].shape)
1544 ##print('ele',data['ele'].shape)
1545
1546 ###self.res_weather, self.res_ele = self.const_ploteo(data_weather=data['weather'][:,r_mask],data_ele=data['ele'],step=step,res=res,ang_max=ang_max,ang_min=ang_min)
1547 ###self.res_azi = numpy.mean(data['azi'])
1548 ###print("self.res_ele",self.res_ele)
1549 plt.clf()
1550 subplots = [121, 122]
1551 try:
1552 if self.data[-2]['ele'].max()<data['ele'].max():
1553 self.ini=0
1554 except:
1555 pass
1556 if self.ini==0:
1557 self.data_ele_tmp = numpy.ones([self.nplots,int(var_ang)])*numpy.nan
1558 self.res_weather= numpy.ones([self.nplots,int(var_ang),len(r_mask)])*numpy.nan
1559 print("SHAPE",self.data_ele_tmp.shape)
1560
1561 for i,ax in enumerate(self.axes):
1562 self.res_weather[i], self.res_ele = self.const_ploteo(val_ch=i, data_weather=data['weather'][i][:,r_mask],data_ele=data['ele'],step=step,res=res,ang_max=ang_max,ang_min=ang_min,case_flag=self.data['case_flag'])
1563 self.res_azi = numpy.mean(data['azi'])
1564
1565 if ax.firsttime:
1566 #plt.clf()
1567 print("Frist Plot")
1568 cgax, pm = wrl.vis.plot_rhi(self.res_weather[i],r=r,th=self.res_ele,ax=subplots[i], proj='cg',vmin=20, vmax=80)
1569 #fig=self.figures[0]
1570 else:
1571 #plt.clf()
1572 print("ELSE PLOT")
1573 cgax, pm = wrl.vis.plot_rhi(self.res_weather[i],r=r,th=self.res_ele,ax=subplots[i], proj='cg',vmin=20, vmax=80)
1574 caax = cgax.parasites[0]
1575 paax = cgax.parasites[1]
1576 cbar = plt.gcf().colorbar(pm, pad=0.075)
1577 caax.set_xlabel('x_range [km]')
1578 caax.set_ylabel('y_range [km]')
1579 plt.text(1.0, 1.05, 'Elevacion '+str(thisDatetime)+" Step "+str(self.ini)+ " Azi: "+str(round(self.res_azi,2)), transform=caax.transAxes, va='bottom',ha='right')
1580 print("***************************self.ini****************************",self.ini)
1581 self.ini= self.ini+1
1582
1583 class WeatherRHI_vRF_Plot(Plot):
1584 CODE = 'weather'
1585 plot_name = 'weather'
1586 plot_type = 'rhistyle'
1587 buffering = False
1588 data_ele_tmp = None
1589
1590 def setup(self):
1591 print("********************")
1592 print("********************")
1593 print("********************")
1594 print("SETUP WEATHER PLOT")
1595 self.ncols = 1
1596 self.nrows = 1
1597 self.nplots= 1
1598 self.ylabel= 'Range [Km]'
1599 self.titles= ['Weather']
1600 if self.channels is not None:
1601 self.nplots = len(self.channels)
1602 self.nrows = len(self.channels)
1603 else:
1604 self.nplots = self.data.shape(self.CODE)[0]
1605 self.nrows = self.nplots
1606 self.channels = list(range(self.nplots))
1607 print("channels",self.channels)
1608 print("que saldra", self.data.shape(self.CODE)[0])
1609 self.titles = ['{} Channel {}'.format(self.CODE.upper(), x) for x in range(self.nrows)]
1610 print("self.titles",self.titles)
1611 self.colorbar=False
1612 self.width =8
1613 self.height =8
1614 self.ini =0
1615 self.len_azi =0
1616 self.buffer_ini = None
1617 self.buffer_ele = None
1618 self.plots_adjust.update({'wspace': 0.4, 'hspace':0.4, 'left': 0.1, 'right': 0.9, 'bottom': 0.08})
1619 self.flag =0
1620 self.indicador= 0
1621 self.last_data_ele = None
1622 self.val_mean = None
1623
1624 def update(self, dataOut):
1625
1626 data = {}
1627 meta = {}
1628 if hasattr(dataOut, 'dataPP_POWER'):
1629 factor = 1
1630 if hasattr(dataOut, 'nFFTPoints'):
1631 factor = dataOut.normFactor
1632 print("dataOut",dataOut.data_360.shape)
1633 #
1634 data['weather'] = 10*numpy.log10(dataOut.data_360/(factor))
1635 #
1636 #data['weather'] = 10*numpy.log10(dataOut.data_360[1]/(factor))
1637 data['azi'] = dataOut.data_azi
1638 data['ele'] = dataOut.data_ele
1639 data['case_flag'] = dataOut.case_flag
1640 #print("UPDATE")
1641 #print("data[weather]",data['weather'].shape)
1642 #print("data[azi]",data['azi'])
1643 return data, meta
1644
1645 def get2List(self,angulos):
1646 list1=[]
1647 list2=[]
1648 #print(angulos)
1649 #exit(1)
1650 for i in reversed(range(len(angulos))):
1651 if not i==0:#el caso de i=0 evalula el primero de la lista con el ultimo y no es relevante
1652 diff_ = angulos[i]-angulos[i-1]
1653 if abs(diff_) >1.5:
1654 list1.append(i-1)
1655 list2.append(diff_)
1656 return list(reversed(list1)),list(reversed(list2))
1657
1658 def fixData90(self,list_,ang_):
1659 if list_[0]==-1:
1660 vec = numpy.where(ang_<ang_[0])
1661 ang_[vec] = ang_[vec]+90
1662 return ang_
1663 return ang_
1664
1665 def fixData90HL(self,angulos):
1666 vec = numpy.where(angulos>=90)
1667 angulos[vec]=angulos[vec]-90
1668 return angulos
1669
1670
1671 def search_pos(self,pos,list_):
1672 for i in range(len(list_)):
1673 if pos == list_[i]:
1674 return True,i
1675 i=None
1676 return False,i
1677
1678 def fixDataComp(self,ang_,list1_,list2_,tipo_case):
1679 size = len(ang_)
1680 size2 = 0
1681 for i in range(len(list2_)):
1682 size2=size2+round(abs(list2_[i]))-1
1683 new_size= size+size2
1684 ang_new = numpy.zeros(new_size)
1685 ang_new2 = numpy.zeros(new_size)
1686
1687 tmp = 0
1688 c = 0
1689 for i in range(len(ang_)):
1690 ang_new[tmp +c] = ang_[i]
1691 ang_new2[tmp+c] = ang_[i]
1692 condition , value = self.search_pos(i,list1_)
1693 if condition:
1694 pos = tmp + c + 1
1695 for k in range(round(abs(list2_[value]))-1):
1696 if tipo_case==0 or tipo_case==3:#subida
1697 ang_new[pos+k] = ang_new[pos+k-1]+1
1698 ang_new2[pos+k] = numpy.nan
1699 elif tipo_case==1 or tipo_case==2:#bajada
1700 ang_new[pos+k] = ang_new[pos+k-1]-1
1701 ang_new2[pos+k] = numpy.nan
1702
1703 tmp = pos +k
1704 c = 0
1705 c=c+1
1706 return ang_new,ang_new2
1707
1708 def globalCheckPED(self,angulos,tipo_case):
1709 l1,l2 = self.get2List(angulos)
1710 print("l1",l1)
1711 print("l2",l2)
1712 if len(l1)>0:
1713 #angulos2 = self.fixData90(list_=l1,ang_=angulos)
1714 #l1,l2 = self.get2List(angulos2)
1715 ang1_,ang2_ = self.fixDataComp(ang_=angulos,list1_=l1,list2_=l2,tipo_case=tipo_case)
1716 #ang1_ = self.fixData90HL(ang1_)
1717 #ang2_ = self.fixData90HL(ang2_)
1718 else:
1719 ang1_= angulos
1720 ang2_= angulos
1721 return ang1_,ang2_
1722
1723
1724 def replaceNAN(self,data_weather,data_ele,val):
1725 data= data_ele
1726 data_T= data_weather
1727 #print(data.shape[0])
1728 #print(data_T.shape[0])
1729 #exit(1)
1730 if data.shape[0]> data_T.shape[0]:
1731 data_N = numpy.ones( [data.shape[0],data_T.shape[1]])
1732 c = 0
1733 for i in range(len(data)):
1734 if numpy.isnan(data[i]):
1735 data_N[i,:]=numpy.ones(data_T.shape[1])*numpy.nan
1736 else:
1737 data_N[i,:]=data_T[c,:]
1738 c=c+1
1739 return data_N
1740 else:
1741 for i in range(len(data)):
1742 if numpy.isnan(data[i]):
1743 data_T[i,:]=numpy.ones(data_T.shape[1])*numpy.nan
1744 return data_T
1745
1746
1747 def const_ploteo(self,val_ch,data_weather,data_ele,step,res,ang_max,ang_min,case_flag):
1748 ang_max= ang_max
1749 ang_min= ang_min
1750 data_weather=data_weather
1751 val_ch=val_ch
1752 ##print("*********************DATA WEATHER**************************************")
1753 ##print(data_weather)
1754
1755 '''
1756 print("**********************************************")
1757 print("**********************************************")
1758 print("***************ini**************")
1759 print("**********************************************")
1760 print("**********************************************")
1761 '''
1762 #print("data_ele",data_ele)
1763 #----------------------------------------------------------
1764
1765 #exit(1)
1766 tipo_case = case_flag[-1]
1767 print("tipo_case",tipo_case)
1768 #--------------------- new -------------------------
1769 data_ele_new ,data_ele_old= self.globalCheckPED(data_ele,tipo_case)
1770
1771 #-------------------------CAMBIOS RHI---------------------------------
1772
1773 vec = numpy.where(data_ele<ang_max)
1774 data_ele = data_ele[vec]
1775 data_weather= data_weather[vec[0]]
1776
1777 len_vec = len(vec)
1778 data_ele_new = data_ele[::-1] # reversa
1779 data_weather = data_weather[::-1,:]
1780 new_i_ele = int(data_ele_new[0])
1781 new_f_ele = int(data_ele_new[-1])
1782
1783 n1= new_i_ele- ang_min
1784 n2= ang_max - new_f_ele-1
1785 if n1>0:
1786 ele1= numpy.linspace(ang_min+1,new_i_ele-1,n1)
1787 ele1_nan= numpy.ones(n1)*numpy.nan
1788 data_ele = numpy.hstack((ele1,data_ele_new))
1789 data_ele_old = numpy.hstack((ele1_nan,data_ele_new))
1790 if n2>0:
1791 ele2= numpy.linspace(new_f_ele+1,ang_max,n2)
1792 ele2_nan= numpy.ones(n2)*numpy.nan
1793 data_ele = numpy.hstack((data_ele,ele2))
1794 data_ele_old = numpy.hstack((data_ele_old,ele2_nan))
1795
1796
1797 print("ele shape",data_ele.shape)
1798 print(data_ele)
1799
1800 #print("self.data_ele_tmp",self.data_ele_tmp)
1801 val_mean = numpy.mean(data_weather[:,-1])
1802 self.val_mean = val_mean
1803 data_weather = self.replaceNAN(data_weather=data_weather,data_ele=data_ele_old,val=self.val_mean)
1804 self.data_ele_tmp[val_ch]= data_ele_old
1805
1806
1807 print("data_weather shape",data_weather.shape)
1808 print(data_weather)
1809 #exit(1)
1810 return data_weather,data_ele
1811
1812
1813 def plot(self):
1814 thisDatetime = datetime.datetime.utcfromtimestamp(self.data.times[-1]).strftime('%Y-%m-%d %H:%M:%S')
1815 data = self.data[-1]
1816 r = self.data.yrange
1817 delta_height = r[1]-r[0]
1818 r_mask = numpy.where(r>=0)[0]
1819 ##print("delta_height",delta_height)
1820 #print("r_mask",r_mask,len(r_mask))
1821 r = numpy.arange(len(r_mask))*delta_height
1822 self.y = 2*r
1823 res = 1
1824 ###print("data['weather'].shape[0]",data['weather'].shape[0])
1825 ang_max = self.ang_max
1826 ang_min = self.ang_min
1827 var_ang =ang_max - ang_min
1828 step = (int(var_ang)/(res*data['weather'].shape[0]))
1829 ###print("step",step)
1830 #--------------------------------------------------------
1831 ##print('weather',data['weather'].shape)
1832 ##print('ele',data['ele'].shape)
1833
1834 ###self.res_weather, self.res_ele = self.const_ploteo(data_weather=data['weather'][:,r_mask],data_ele=data['ele'],step=step,res=res,ang_max=ang_max,ang_min=ang_min)
1835 ###self.res_azi = numpy.mean(data['azi'])
1836 ###print("self.res_ele",self.res_ele)
1837 plt.clf()
1838 subplots = [121, 122]
1839 if self.ini==0:
1840 self.data_ele_tmp = numpy.ones([self.nplots,int(var_ang)])*numpy.nan
1841 self.res_weather= numpy.ones([self.nplots,int(var_ang),len(r_mask)])*numpy.nan
1842 print("SHAPE",self.data_ele_tmp.shape)
1843
1844 for i,ax in enumerate(self.axes):
1845 self.res_weather[i], self.res_ele = self.const_ploteo(val_ch=i, data_weather=data['weather'][i][:,r_mask],data_ele=data['ele'],step=step,res=res,ang_max=ang_max,ang_min=ang_min,case_flag=self.data['case_flag'])
1846 self.res_azi = numpy.mean(data['azi'])
1847
1848 print(self.res_ele)
1849 #exit(1)
1850 if ax.firsttime:
1851 #plt.clf()
1852 cgax, pm = wrl.vis.plot_rhi(self.res_weather[i],r=r,th=self.res_ele,ax=subplots[i], proj='cg',vmin=20, vmax=80)
1853 #fig=self.figures[0]
1854 else:
1855
1856 #plt.clf()
1074 1857 cgax, pm = wrl.vis.plot_rhi(self.res_weather[i],r=r,th=self.res_ele,ax=subplots[i], proj='cg',vmin=20, vmax=80)
1075 1858 caax = cgax.parasites[0]
1076 1859 paax = cgax.parasites[1]
1077 1860 cbar = plt.gcf().colorbar(pm, pad=0.075)
1078 1861 caax.set_xlabel('x_range [km]')
1079 1862 caax.set_ylabel('y_range [km]')
1080 1863 plt.text(1.0, 1.05, 'Elevacion '+str(thisDatetime)+" Step "+str(self.ini)+ " Azi: "+str(round(self.res_azi,2)), transform=caax.transAxes, va='bottom',ha='right')
1081 1864 print("***************************self.ini****************************",self.ini)
1082 1865 self.ini= self.ini+1
@@ -1,834 +1,835
1 1 '''
2 2 Created on Jul 3, 2014
3 3
4 4 @author: roj-idl71
5 5 '''
6 6 # SUBCHANNELS EN VEZ DE CHANNELS
7 7 # BENCHMARKS -> PROBLEMAS CON ARCHIVOS GRANDES -> INCONSTANTE EN EL TIEMPO
8 8 # ACTUALIZACION DE VERSION
9 9 # HEADERS
10 10 # MODULO DE ESCRITURA
11 11 # METADATA
12 12
13 13 import os
14 14 import time
15 15 import datetime
16 16 import numpy
17 17 import timeit
18 18 from fractions import Fraction
19 19 from time import time
20 20 from time import sleep
21 21
22 22 import schainpy.admin
23 23 from schainpy.model.data.jroheaderIO import RadarControllerHeader, SystemHeader
24 24 from schainpy.model.data.jrodata import Voltage
25 25 from schainpy.model.proc.jroproc_base import ProcessingUnit, Operation, MPDecorator
26 26
27 27 import pickle
28 28 try:
29 29 import digital_rf
30 30 except:
31 31 pass
32 32
33 33
34 34 class DigitalRFReader(ProcessingUnit):
35 35 '''
36 36 classdocs
37 37 '''
38 38
39 39 def __init__(self):
40 40 '''
41 41 Constructor
42 42 '''
43 43
44 44 ProcessingUnit.__init__(self)
45 45
46 46 self.dataOut = Voltage()
47 47 self.__printInfo = True
48 48 self.__flagDiscontinuousBlock = False
49 49 self.__bufferIndex = 9999999
50 50 self.__codeType = 0
51 51 self.__ippKm = None
52 52 self.__nCode = None
53 53 self.__nBaud = None
54 54 self.__code = None
55 55 self.dtype = None
56 56 self.oldAverage = None
57 57 self.path = None
58 58
59 59 def close(self):
60 60 print('Average of writing to digital rf format is ', self.oldAverage * 1000)
61 61 return
62 62
63 63 def __getCurrentSecond(self):
64 64
65 65 return self.__thisUnixSample / self.__sample_rate
66 66
67 67 thisSecond = property(__getCurrentSecond, "I'm the 'thisSecond' property.")
68 68
69 69 def __setFileHeader(self):
70 70 '''
71 71 In this method will be initialized every parameter of dataOut object (header, no data)
72 72 '''
73 73 ippSeconds = 1.0 * self.__nSamples / self.__sample_rate
74 74 if not self.getByBlock:
75 75 nProfiles = 1.0 / ippSeconds # Number of profiles in one second
76 76 else:
77 77 nProfiles = self.nProfileBlocks # Number of profiles in one block
78 78
79 79 try:
80 80 self.dataOut.radarControllerHeaderObj = RadarControllerHeader(
81 81 self.__radarControllerHeader)
82 82 except:
83 83 self.dataOut.radarControllerHeaderObj = RadarControllerHeader(
84 84 txA=0,
85 85 txB=0,
86 86 nWindows=1,
87 87 nHeights=self.__nSamples,
88 88 firstHeight=self.__firstHeigth,
89 89 deltaHeight=self.__deltaHeigth,
90 90 codeType=self.__codeType,
91 91 nCode=self.__nCode, nBaud=self.__nBaud,
92 92 code=self.__code)
93 93
94 94 try:
95 95 self.dataOut.systemHeaderObj = SystemHeader(self.__systemHeader)
96 96 except:
97 97 self.dataOut.systemHeaderObj = SystemHeader(nSamples=self.__nSamples,
98 98 nProfiles=nProfiles,
99 99 nChannels=len(
100 100 self.__channelList),
101 101 adcResolution=14)
102 102 self.dataOut.type = "Voltage"
103 103
104 104 self.dataOut.data = None
105 105
106 106 self.dataOut.dtype = self.dtype
107 107
108 108 # self.dataOut.nChannels = 0
109 109
110 110 # self.dataOut.nHeights = 0
111 111
112 112 self.dataOut.nProfiles = int(nProfiles)
113 113
114 114 self.dataOut.heightList = self.__firstHeigth + \
115 115 numpy.arange(self.__nSamples, dtype=numpy.float) * \
116 116 self.__deltaHeigth
117 117
118 118 #self.dataOut.channelList = list(range(self.__num_subchannels))
119 119 self.dataOut.channelList = list(range(len(self.__channelList)))
120 120 if not self.getByBlock:
121 121
122 122 self.dataOut.blocksize = self.dataOut.nChannels * self.dataOut.nHeights
123 123 else:
124 124 self.dataOut.blocksize = self.dataOut.nChannels * self.dataOut.nHeights*self.nProfileBlocks
125 125
126 126 # self.dataOut.channelIndexList = None
127 127
128 128 self.dataOut.flagNoData = True
129 129 if not self.getByBlock:
130 130 self.dataOut.flagDataAsBlock = False
131 131 else:
132 132 self.dataOut.flagDataAsBlock = True
133 133 # Set to TRUE if the data is discontinuous
134 134 self.dataOut.flagDiscontinuousBlock = False
135 135
136 136 self.dataOut.utctime = None
137 137
138 138 # timezone like jroheader, difference in minutes between UTC and localtime
139 139 self.dataOut.timeZone = self.__timezone / 60
140 140
141 141 self.dataOut.dstFlag = 0
142 142
143 143 self.dataOut.errorCount = 0
144 144
145 145 try:
146 146 self.dataOut.nCohInt = self.fixed_metadata_dict.get(
147 147 'nCohInt', self.nCohInt)
148 148
149 149 # asumo que la data esta decodificada
150 150 self.dataOut.flagDecodeData = self.fixed_metadata_dict.get(
151 151 'flagDecodeData', self.flagDecodeData)
152 152
153 153 # asumo que la data esta sin flip
154 154 self.dataOut.flagDeflipData = self.fixed_metadata_dict['flagDeflipData']
155 155
156 156 self.dataOut.flagShiftFFT = self.fixed_metadata_dict['flagShiftFFT']
157 157
158 158 self.dataOut.useLocalTime = self.fixed_metadata_dict['useLocalTime']
159 159 except:
160 160 pass
161 161
162 162 self.dataOut.ippSeconds = ippSeconds
163 163
164 164 # Time interval between profiles
165 165 # self.dataOut.timeInterval = self.dataOut.ippSeconds * self.dataOut.nCohInt
166 166
167 167 self.dataOut.frequency = self.__frequency
168 168
169 169 self.dataOut.realtime = self.__online
170 170
171 171 def findDatafiles(self, path, startDate=None, endDate=None):
172 172
173 173 if not os.path.isdir(path):
174 174 return []
175 175
176 176 try:
177 177 digitalReadObj = digital_rf.DigitalRFReader(
178 178 path, load_all_metadata=True)
179 179 except:
180 180 digitalReadObj = digital_rf.DigitalRFReader(path)
181 181
182 182 channelNameList = digitalReadObj.get_channels()
183 183
184 184 if not channelNameList:
185 185 return []
186 186
187 187 metadata_dict = digitalReadObj.get_rf_file_metadata(channelNameList[0])
188 188
189 189 sample_rate = metadata_dict['sample_rate'][0]
190 190
191 191 this_metadata_file = digitalReadObj.get_metadata(channelNameList[0])
192 192
193 193 try:
194 194 timezone = this_metadata_file['timezone'].value
195 195 except:
196 196 timezone = 0
197 197
198 198 startUTCSecond, endUTCSecond = digitalReadObj.get_bounds(
199 199 channelNameList[0]) / sample_rate - timezone
200 200
201 201 startDatetime = datetime.datetime.utcfromtimestamp(startUTCSecond)
202 202 endDatatime = datetime.datetime.utcfromtimestamp(endUTCSecond)
203 203
204 204 if not startDate:
205 205 startDate = startDatetime.date()
206 206
207 207 if not endDate:
208 208 endDate = endDatatime.date()
209 209
210 210 dateList = []
211 211
212 212 thisDatetime = startDatetime
213 213
214 214 while(thisDatetime <= endDatatime):
215 215
216 216 thisDate = thisDatetime.date()
217 217
218 218 if thisDate < startDate:
219 219 continue
220 220
221 221 if thisDate > endDate:
222 222 break
223 223
224 224 dateList.append(thisDate)
225 225 thisDatetime += datetime.timedelta(1)
226 226
227 227 return dateList
228 228
229 229 def setup(self, path=None,
230 230 startDate=None,
231 231 endDate=None,
232 232 startTime=datetime.time(0, 0, 0),
233 233 endTime=datetime.time(23, 59, 59),
234 234 channelList=None,
235 235 nSamples=None,
236 236 online=False,
237 237 delay=60,
238 238 buffer_size=1024,
239 239 ippKm=None,
240 240 nCohInt=1,
241 241 nCode=1,
242 242 nBaud=1,
243 243 flagDecodeData=False,
244 244 code=numpy.ones((1, 1), dtype=numpy.int),
245 245 getByBlock=0,
246 246 nProfileBlocks=1,
247 247 **kwargs):
248 248 '''
249 249 In this method we should set all initial parameters.
250 250
251 251 Inputs:
252 252 path
253 253 startDate
254 254 endDate
255 255 startTime
256 256 endTime
257 257 set
258 258 expLabel
259 259 ext
260 260 online
261 261 delay
262 262 '''
263 263 self.path = path
264 264 self.nCohInt = nCohInt
265 265 self.flagDecodeData = flagDecodeData
266 266 self.i = 0
267 267
268 268 self.getByBlock = getByBlock
269 269 self.nProfileBlocks = nProfileBlocks
270 270 if not os.path.isdir(path):
271 271 raise ValueError("[Reading] Directory %s does not exist" % path)
272 272
273 273 try:
274 274 self.digitalReadObj = digital_rf.DigitalRFReader(
275 275 path, load_all_metadata=True)
276 276 except:
277 277 self.digitalReadObj = digital_rf.DigitalRFReader(path)
278 278
279 279 channelNameList = self.digitalReadObj.get_channels()
280 280
281 281 if not channelNameList:
282 282 raise ValueError("[Reading] Directory %s does not have any files" % path)
283 283
284 284 if not channelList:
285 285 channelList = list(range(len(channelNameList)))
286 286
287 287 ########## Reading metadata ######################
288 288
289 289 top_properties = self.digitalReadObj.get_properties(
290 290 channelNameList[channelList[0]])
291 291
292 292 self.__num_subchannels = top_properties['num_subchannels']
293 293 self.__sample_rate = 1.0 * \
294 294 top_properties['sample_rate_numerator'] / \
295 295 top_properties['sample_rate_denominator']
296 296 # self.__samples_per_file = top_properties['samples_per_file'][0]
297 297 self.__deltaHeigth = 1e6 * 0.15 / self.__sample_rate # why 0.15?
298 298
299 299 this_metadata_file = self.digitalReadObj.get_digital_metadata(
300 300 channelNameList[channelList[0]])
301 301 metadata_bounds = this_metadata_file.get_bounds()
302 302 self.fixed_metadata_dict = this_metadata_file.read(
303 303 metadata_bounds[0])[metadata_bounds[0]] # GET FIRST HEADER
304 304
305 305 try:
306 306 self.__processingHeader = self.fixed_metadata_dict['processingHeader']
307 307 self.__radarControllerHeader = self.fixed_metadata_dict['radarControllerHeader']
308 308 self.__systemHeader = self.fixed_metadata_dict['systemHeader']
309 309 self.dtype = pickle.loads(self.fixed_metadata_dict['dtype'])
310 310 except:
311 311 pass
312 312
313 313 self.__frequency = None
314 314
315 315 self.__frequency = self.fixed_metadata_dict.get('frequency', 1)
316 316
317 317 self.__timezone = self.fixed_metadata_dict.get('timezone', 18000)
318 318
319 319 try:
320 320 nSamples = self.fixed_metadata_dict['nSamples']
321 321 except:
322 322 nSamples = None
323 323
324 324 self.__firstHeigth = 0
325 325
326 326 try:
327 327 codeType = self.__radarControllerHeader['codeType']
328 328 except:
329 329 codeType = 0
330 330
331 331 try:
332 332 if codeType:
333 333 nCode = self.__radarControllerHeader['nCode']
334 334 nBaud = self.__radarControllerHeader['nBaud']
335 335 code = self.__radarControllerHeader['code']
336 336 except:
337 337 pass
338 338
339 339 if not ippKm:
340 340 try:
341 341 # seconds to km
342 342 ippKm = self.__radarControllerHeader['ipp']
343 343 except:
344 344 ippKm = None
345 345 ####################################################
346 346 self.__ippKm = ippKm
347 347 startUTCSecond = None
348 348 endUTCSecond = None
349 349
350 350 if startDate:
351 351 startDatetime = datetime.datetime.combine(startDate, startTime)
352 352 startUTCSecond = (
353 353 startDatetime - datetime.datetime(1970, 1, 1)).total_seconds() + self.__timezone
354 354
355 355 if endDate:
356 356 endDatetime = datetime.datetime.combine(endDate, endTime)
357 357 endUTCSecond = (endDatetime - datetime.datetime(1970,
358 358 1, 1)).total_seconds() + self.__timezone
359 359
360 360
361 361 print(startUTCSecond,endUTCSecond)
362 362 start_index, end_index = self.digitalReadObj.get_bounds(
363 363 channelNameList[channelList[0]])
364 364
365 365 ##print("*****",start_index,end_index)
366 366 if not startUTCSecond:
367 367 startUTCSecond = start_index / self.__sample_rate
368 368
369 369 if start_index > startUTCSecond * self.__sample_rate:
370 370 startUTCSecond = start_index / self.__sample_rate
371 371
372 372 if not endUTCSecond:
373 373 endUTCSecond = end_index / self.__sample_rate
374 374
375 375 if end_index < endUTCSecond * self.__sample_rate:
376 376 endUTCSecond = end_index / self.__sample_rate
377 377 if not nSamples:
378 378 if not ippKm:
379 379 raise ValueError("[Reading] nSamples or ippKm should be defined")
380 380 nSamples = int(ippKm / (1e6 * 0.15 / self.__sample_rate))
381 381
382 382 channelBoundList = []
383 383 channelNameListFiltered = []
384 384
385 385 for thisIndexChannel in channelList:
386 386 thisChannelName = channelNameList[thisIndexChannel]
387 387 start_index, end_index = self.digitalReadObj.get_bounds(
388 388 thisChannelName)
389 389 channelBoundList.append((start_index, end_index))
390 390 channelNameListFiltered.append(thisChannelName)
391 391
392 392 self.profileIndex = 0
393 393 self.i = 0
394 394 self.__delay = delay
395 395
396 396 self.__codeType = codeType
397 397 self.__nCode = nCode
398 398 self.__nBaud = nBaud
399 399 self.__code = code
400 400
401 401 self.__datapath = path
402 402 self.__online = online
403 403 self.__channelList = channelList
404 404 self.__channelNameList = channelNameListFiltered
405 405 self.__channelBoundList = channelBoundList
406 406 self.__nSamples = nSamples
407 407 if self.getByBlock:
408 408 nSamples = nSamples*nProfileBlocks
409 409
410 410
411 411 self.__samples_to_read = int(nSamples) # FIJO: AHORA 40
412 412 self.__nChannels = len(self.__channelList)
413 413 #print("------------------------------------------")
414 414 #print("self.__samples_to_read",self.__samples_to_read)
415 415 #print("self.__nSamples",self.__nSamples)
416 416 # son iguales y el buffer_index da 0
417 417 self.__startUTCSecond = startUTCSecond
418 418 self.__endUTCSecond = endUTCSecond
419 419
420 420 self.__timeInterval = 1.0 * self.__samples_to_read / \
421 421 self.__sample_rate # Time interval
422 422
423 423 if online:
424 424 # self.__thisUnixSample = int(endUTCSecond*self.__sample_rate - 4*self.__samples_to_read)
425 425 startUTCSecond = numpy.floor(endUTCSecond)
426 426
427 427 # por que en el otro metodo lo primero q se hace es sumar samplestoread
428 428 self.__thisUnixSample = int(startUTCSecond * self.__sample_rate) - self.__samples_to_read
429 429
430 430 #self.__data_buffer = numpy.zeros(
431 431 # (self.__num_subchannels, self.__samples_to_read), dtype=numpy.complex)
432 432 self.__data_buffer = numpy.zeros((int(len(channelList)), self.__samples_to_read), dtype=numpy.complex)
433 433
434 434
435 435 self.__setFileHeader()
436 436 self.isConfig = True
437 437
438 438 print("[Reading] Digital RF Data was found from %s to %s " % (
439 439 datetime.datetime.utcfromtimestamp(
440 440 self.__startUTCSecond - self.__timezone),
441 441 datetime.datetime.utcfromtimestamp(
442 442 self.__endUTCSecond - self.__timezone)
443 443 ))
444 444
445 445 print("[Reading] Starting process from %s to %s" % (datetime.datetime.utcfromtimestamp(startUTCSecond - self.__timezone),
446 446 datetime.datetime.utcfromtimestamp(
447 447 endUTCSecond - self.__timezone)
448 448 ))
449 449 self.oldAverage = None
450 450 self.count = 0
451 451 self.executionTime = 0
452 452
453 453 def __reload(self):
454 454 # print
455 455 # print "%s not in range [%s, %s]" %(
456 456 # datetime.datetime.utcfromtimestamp(self.thisSecond - self.__timezone),
457 457 # datetime.datetime.utcfromtimestamp(self.__startUTCSecond - self.__timezone),
458 458 # datetime.datetime.utcfromtimestamp(self.__endUTCSecond - self.__timezone)
459 459 # )
460 460 print("[Reading] reloading metadata ...")
461 461
462 462 try:
463 463 self.digitalReadObj.reload(complete_update=True)
464 464 except:
465 465 self.digitalReadObj = digital_rf.DigitalRFReader(self.path)
466 466
467 467 start_index, end_index = self.digitalReadObj.get_bounds(
468 468 self.__channelNameList[self.__channelList[0]])
469 469
470 470 if start_index > self.__startUTCSecond * self.__sample_rate:
471 471 self.__startUTCSecond = 1.0 * start_index / self.__sample_rate
472 472
473 473 if end_index > self.__endUTCSecond * self.__sample_rate:
474 474 self.__endUTCSecond = 1.0 * end_index / self.__sample_rate
475 475 print()
476 476 print("[Reading] New timerange found [%s, %s] " % (
477 477 datetime.datetime.utcfromtimestamp(
478 478 self.__startUTCSecond - self.__timezone),
479 479 datetime.datetime.utcfromtimestamp(
480 480 self.__endUTCSecond - self.__timezone)
481 481 ))
482 482
483 483 return True
484 484
485 485 return False
486 486
487 487 def timeit(self, toExecute):
488 488 t0 = time.time()
489 489 toExecute()
490 490 self.executionTime = time.time() - t0
491 491 if self.oldAverage is None:
492 492 self.oldAverage = self.executionTime
493 493 self.oldAverage = (self.executionTime + self.count *
494 494 self.oldAverage) / (self.count + 1.0)
495 495 self.count = self.count + 1.0
496 496 return
497 497
498 498 def __readNextBlock(self, seconds=30, volt_scale=1):
499 499 '''
500 500 '''
501 501
502 502 # Set the next data
503 503 self.__flagDiscontinuousBlock = False
504 504 self.__thisUnixSample += self.__samples_to_read
505 505
506 506 if self.__thisUnixSample + 2 * self.__samples_to_read > self.__endUTCSecond * self.__sample_rate:
507 507 print ("[Reading] There are no more data into selected time-range")
508 508 if self.__online:
509 509 sleep(3)
510 510 self.__reload()
511 511 else:
512 512 return False
513 513
514 514 if self.__thisUnixSample + 2 * self.__samples_to_read > self.__endUTCSecond * self.__sample_rate:
515 515 return False
516 516 self.__thisUnixSample -= self.__samples_to_read
517 517
518 518 indexChannel = 0
519 519
520 520 dataOk = False
521 521
522 522 for thisChannelName in self.__channelNameList: # TODO VARIOS CHANNELS?
523 523 for indexSubchannel in range(self.__num_subchannels):
524 524 try:
525 525 t0 = time()
526 526 result = self.digitalReadObj.read_vector_c81d(self.__thisUnixSample,
527 527 self.__samples_to_read,
528 528 thisChannelName, sub_channel=indexSubchannel)
529 529 self.executionTime = time() - t0
530 530 if self.oldAverage is None:
531 531 self.oldAverage = self.executionTime
532 532 self.oldAverage = (
533 533 self.executionTime + self.count * self.oldAverage) / (self.count + 1.0)
534 534 self.count = self.count + 1.0
535 535
536 536 except IOError as e:
537 537 # read next profile
538 538 self.__flagDiscontinuousBlock = True
539 539 print("[Reading] %s" % datetime.datetime.utcfromtimestamp(self.thisSecond - self.__timezone), e)
540 540 break
541 541
542 542 if result.shape[0] != self.__samples_to_read:
543 543 self.__flagDiscontinuousBlock = True
544 544 print("[Reading] %s: Too few samples were found, just %d/%d samples" % (datetime.datetime.utcfromtimestamp(self.thisSecond - self.__timezone),
545 545 result.shape[0],
546 546 self.__samples_to_read))
547 547 break
548 548
549 549 self.__data_buffer[indexChannel, :] = result * volt_scale
550 550 indexChannel+=1
551 551
552 552 dataOk = True
553 553
554 554 self.__utctime = self.__thisUnixSample / self.__sample_rate
555 555
556 556 if not dataOk:
557 557 return False
558 558
559 559 print("[Reading] %s: %d samples <> %f sec" % (datetime.datetime.utcfromtimestamp(self.thisSecond - self.__timezone),
560 560 self.__samples_to_read,
561 561 self.__timeInterval))
562 562
563 563 self.__bufferIndex = 0
564 564
565 565 return True
566 566
567 567 def __isBufferEmpty(self):
568
568
569 569 return self.__bufferIndex > self.__samples_to_read - self.__nSamples # 40960 - 40
570 570
571 571 def getData(self, seconds=30, nTries=5):
572 572 '''
573 573 This method gets the data from files and put the data into the dataOut object
574 574
575 575 In addition, increase el the buffer counter in one.
576 576
577 577 Return:
578 578 data : retorna un perfil de voltages (alturas * canales) copiados desde el
579 579 buffer. Si no hay mas archivos a leer retorna None.
580 580
581 581 Affected:
582 582 self.dataOut
583 583 self.profileIndex
584 584 self.flagDiscontinuousBlock
585 585 self.flagIsNewBlock
586 586 '''
587 587 #print("getdata")
588 588 err_counter = 0
589 589 self.dataOut.flagNoData = True
590 590
591 591
592 592 if self.__isBufferEmpty():
593 593 #print("hi")
594 594 self.__flagDiscontinuousBlock = False
595 595
596 596 while True:
597 597 if self.__readNextBlock():
598 598 break
599 599 if self.__thisUnixSample > self.__endUTCSecond * self.__sample_rate:
600 600 raise schainpy.admin.SchainError('Error')
601 601 return
602 602
603 603 if self.__flagDiscontinuousBlock:
604 604 raise schainpy.admin.SchainError('discontinuous block found')
605 605 return
606 606
607 607 if not self.__online:
608 608 raise schainpy.admin.SchainError('Online?')
609 609 return
610 610
611 611 err_counter += 1
612 612 if err_counter > nTries:
613 613 raise schainpy.admin.SchainError('Max retrys reach')
614 614 return
615 615
616 616 print('[Reading] waiting %d seconds to read a new block' % seconds)
617 617 sleep(seconds)
618 618
619 619
620 620 if not self.getByBlock:
621 621
622 622 #print("self.__bufferIndex",self.__bufferIndex)# este valor siempre es cero aparentemente
623 623 self.dataOut.data = self.__data_buffer[:, self.__bufferIndex:self.__bufferIndex + self.__nSamples]
624 624 self.dataOut.utctime = ( self.__thisUnixSample + self.__bufferIndex) / self.__sample_rate
625 625 self.dataOut.flagNoData = False
626 626 self.dataOut.flagDiscontinuousBlock = self.__flagDiscontinuousBlock
627 627 self.dataOut.profileIndex = self.profileIndex
628 628
629 629 self.__bufferIndex += self.__nSamples
630 630 self.profileIndex += 1
631 631
632 632 if self.profileIndex == self.dataOut.nProfiles:
633 633 self.profileIndex = 0
634 634 else:
635 635 # ojo debo anadir el readNextBLock y el __isBufferEmpty(
636 636 self.dataOut.flagNoData = False
637 637 buffer = self.__data_buffer[:,self.__bufferIndex:self.__bufferIndex + self.__samples_to_read]
638 638 buffer = buffer.reshape((self.__nChannels, self.nProfileBlocks, int(self.__samples_to_read/self.nProfileBlocks)))
639 self.dataOut.nProfileBlocks = self.nProfileBlocks
639 640 self.dataOut.data = buffer
640 641 self.dataOut.utctime = ( self.__thisUnixSample + self.__bufferIndex) / self.__sample_rate
641 642 self.profileIndex += self.__samples_to_read
642 643 self.__bufferIndex += self.__samples_to_read
643 644 self.dataOut.flagDiscontinuousBlock = self.__flagDiscontinuousBlock
644 645 return True
645 646
646 647
647 648 def printInfo(self):
648 649 '''
649 650 '''
650 651 if self.__printInfo == False:
651 652 return
652 653
653 654 # self.systemHeaderObj.printInfo()
654 655 # self.radarControllerHeaderObj.printInfo()
655 656
656 657 self.__printInfo = False
657 658
658 659 def printNumberOfBlock(self):
659 660 '''
660 661 '''
661 662 return
662 663 # print self.profileIndex
663 664
664 665 def run(self, **kwargs):
665 666 '''
666 667 This method will be called many times so here you should put all your code
667 668 '''
668 669
669 670 if not self.isConfig:
670 671 self.setup(**kwargs)
671 672
672 673 self.getData(seconds=self.__delay)
673 674
674 675 return
675 676
676 677 @MPDecorator
677 678 class DigitalRFWriter(Operation):
678 679 '''
679 680 classdocs
680 681 '''
681 682
682 683 def __init__(self, **kwargs):
683 684 '''
684 685 Constructor
685 686 '''
686 687 Operation.__init__(self, **kwargs)
687 688 self.metadata_dict = {}
688 689 self.dataOut = None
689 690 self.dtype = None
690 691 self.oldAverage = 0
691 692
692 693 def setHeader(self):
693 694
694 695 self.metadata_dict['frequency'] = self.dataOut.frequency
695 696 self.metadata_dict['timezone'] = self.dataOut.timeZone
696 697 self.metadata_dict['dtype'] = pickle.dumps(self.dataOut.dtype)
697 698 self.metadata_dict['nProfiles'] = self.dataOut.nProfiles
698 699 self.metadata_dict['heightList'] = self.dataOut.heightList
699 700 self.metadata_dict['channelList'] = self.dataOut.channelList
700 701 self.metadata_dict['flagDecodeData'] = self.dataOut.flagDecodeData
701 702 self.metadata_dict['flagDeflipData'] = self.dataOut.flagDeflipData
702 703 self.metadata_dict['flagShiftFFT'] = self.dataOut.flagShiftFFT
703 704 self.metadata_dict['useLocalTime'] = self.dataOut.useLocalTime
704 705 self.metadata_dict['nCohInt'] = self.dataOut.nCohInt
705 706 self.metadata_dict['type'] = self.dataOut.type
706 707 self.metadata_dict['flagDataAsBlock']= getattr(
707 708 self.dataOut, 'flagDataAsBlock', None) # chequear
708 709
709 710 def setup(self, dataOut, path, frequency, fileCadence, dirCadence, metadataCadence, set=0, metadataFile='metadata', ext='.h5'):
710 711 '''
711 712 In this method we should set all initial parameters.
712 713 Input:
713 714 dataOut: Input data will also be outputa data
714 715 '''
715 716 self.setHeader()
716 717 self.__ippSeconds = dataOut.ippSeconds
717 718 self.__deltaH = dataOut.getDeltaH()
718 719 self.__sample_rate = 1e6 * 0.15 / self.__deltaH
719 720 self.__dtype = dataOut.dtype
720 721 if len(dataOut.dtype) == 2:
721 722 self.__dtype = dataOut.dtype[0]
722 723 self.__nSamples = dataOut.systemHeaderObj.nSamples
723 724 self.__nProfiles = dataOut.nProfiles
724 725
725 726 if self.dataOut.type != 'Voltage':
726 727 raise 'Digital RF cannot be used with this data type'
727 728 self.arr_data = numpy.ones((1, dataOut.nFFTPoints * len(
728 729 self.dataOut.channelList)), dtype=[('r', self.__dtype), ('i', self.__dtype)])
729 730 else:
730 731 self.arr_data = numpy.ones((self.__nSamples, len(
731 732 self.dataOut.channelList)), dtype=[('r', self.__dtype), ('i', self.__dtype)])
732 733
733 734 file_cadence_millisecs = 1000
734 735
735 736 sample_rate_fraction = Fraction(self.__sample_rate).limit_denominator()
736 737 sample_rate_numerator = int(sample_rate_fraction.numerator)
737 738 sample_rate_denominator = int(sample_rate_fraction.denominator)
738 739 start_global_index = dataOut.utctime * self.__sample_rate
739 740
740 741 uuid = 'prueba'
741 742 compression_level = 0
742 743 checksum = False
743 744 is_complex = True
744 745 num_subchannels = len(dataOut.channelList)
745 746 is_continuous = True
746 747 marching_periods = False
747 748
748 749 self.digitalWriteObj = digital_rf.DigitalRFWriter(path, self.__dtype, dirCadence,
749 750 fileCadence, start_global_index,
750 751 sample_rate_numerator, sample_rate_denominator, uuid, compression_level, checksum,
751 752 is_complex, num_subchannels, is_continuous, marching_periods)
752 753 metadata_dir = os.path.join(path, 'metadata')
753 754 os.system('mkdir %s' % (metadata_dir))
754 755 self.digitalMetadataWriteObj = digital_rf.DigitalMetadataWriter(metadata_dir, dirCadence, 1, # 236, file_cadence_millisecs / 1000
755 756 sample_rate_numerator, sample_rate_denominator,
756 757 metadataFile)
757 758 self.isConfig = True
758 759 self.currentSample = 0
759 760 self.oldAverage = 0
760 761 self.count = 0
761 762 return
762 763
763 764 def writeMetadata(self):
764 765 start_idx = self.__sample_rate * self.dataOut.utctime
765 766
766 767 self.metadata_dict['processingHeader'] = self.dataOut.processingHeaderObj.getAsDict(
767 768 )
768 769 self.metadata_dict['radarControllerHeader'] = self.dataOut.radarControllerHeaderObj.getAsDict(
769 770 )
770 771 self.metadata_dict['systemHeader'] = self.dataOut.systemHeaderObj.getAsDict(
771 772 )
772 773 self.digitalMetadataWriteObj.write(start_idx, self.metadata_dict)
773 774 return
774 775
775 776 def timeit(self, toExecute):
776 777 t0 = time()
777 778 toExecute()
778 779 self.executionTime = time() - t0
779 780 if self.oldAverage is None:
780 781 self.oldAverage = self.executionTime
781 782 self.oldAverage = (self.executionTime + self.count *
782 783 self.oldAverage) / (self.count + 1.0)
783 784 self.count = self.count + 1.0
784 785 return
785 786
786 787 def writeData(self):
787 788 if self.dataOut.type != 'Voltage':
788 789 raise 'Digital RF cannot be used with this data type'
789 790 for channel in self.dataOut.channelList:
790 791 for i in range(self.dataOut.nFFTPoints):
791 792 self.arr_data[1][channel * self.dataOut.nFFTPoints +
792 793 i]['r'] = self.dataOut.data[channel][i].real
793 794 self.arr_data[1][channel * self.dataOut.nFFTPoints +
794 795 i]['i'] = self.dataOut.data[channel][i].imag
795 796 else:
796 797 for i in range(self.dataOut.systemHeaderObj.nSamples):
797 798 for channel in self.dataOut.channelList:
798 799 self.arr_data[i][channel]['r'] = self.dataOut.data[channel][i].real
799 800 self.arr_data[i][channel]['i'] = self.dataOut.data[channel][i].imag
800 801
801 802 def f(): return self.digitalWriteObj.rf_write(self.arr_data)
802 803 self.timeit(f)
803 804
804 805 return
805 806
806 807 def run(self, dataOut, frequency=49.92e6, path=None, fileCadence=1000, dirCadence=36000, metadataCadence=1, **kwargs):
807 808 '''
808 809 This method will be called many times so here you should put all your code
809 810 Inputs:
810 811 dataOut: object with the data
811 812 '''
812 813 # print dataOut.__dict__
813 814 self.dataOut = dataOut
814 815 if not self.isConfig:
815 816 self.setup(dataOut, path, frequency, fileCadence,
816 817 dirCadence, metadataCadence, **kwargs)
817 818 self.writeMetadata()
818 819
819 820 self.writeData()
820 821
821 822 ## self.currentSample += 1
822 823 # if self.dataOut.flagDataAsBlock or self.currentSample == 1:
823 824 # self.writeMetadata()
824 825 ## if self.currentSample == self.__nProfiles: self.currentSample = 0
825 826
826 827 return dataOut# en la version 2.7 no aparece este return
827 828
828 829 def close(self):
829 830 print('[Writing] - Closing files ')
830 831 print('Average of writing to digital rf format is ', self.oldAverage * 1000)
831 832 try:
832 833 self.digitalWriteObj.close()
833 834 except:
834 835 pass
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
@@ -1,1633 +1,1862
1 1 import sys
2 2 import numpy,math
3 3 from scipy import interpolate
4 4 from schainpy.model.proc.jroproc_base import ProcessingUnit, Operation, MPDecorator
5 5 from schainpy.model.data.jrodata import Voltage,hildebrand_sekhon
6 6 from schainpy.utils import log
7 7 from time import time
8 8
9 9
10 10
11 11 class VoltageProc(ProcessingUnit):
12 12
13 13 def __init__(self):
14 14
15 15 ProcessingUnit.__init__(self)
16 16
17 17 self.dataOut = Voltage()
18 18 self.flip = 1
19 19 self.setupReq = False
20 20
21 21 def run(self):
22 22
23 23 if self.dataIn.type == 'AMISR':
24 24 self.__updateObjFromAmisrInput()
25 25
26 26 if self.dataIn.type == 'Voltage':
27 27 self.dataOut.copy(self.dataIn)
28 28
29 29 def __updateObjFromAmisrInput(self):
30 30
31 31 self.dataOut.timeZone = self.dataIn.timeZone
32 32 self.dataOut.dstFlag = self.dataIn.dstFlag
33 33 self.dataOut.errorCount = self.dataIn.errorCount
34 34 self.dataOut.useLocalTime = self.dataIn.useLocalTime
35 35
36 36 self.dataOut.flagNoData = self.dataIn.flagNoData
37 37 self.dataOut.data = self.dataIn.data
38 38 self.dataOut.utctime = self.dataIn.utctime
39 39 self.dataOut.channelList = self.dataIn.channelList
40 40 #self.dataOut.timeInterval = self.dataIn.timeInterval
41 41 self.dataOut.heightList = self.dataIn.heightList
42 42 self.dataOut.nProfiles = self.dataIn.nProfiles
43 43
44 44 self.dataOut.nCohInt = self.dataIn.nCohInt
45 45 self.dataOut.ippSeconds = self.dataIn.ippSeconds
46 46 self.dataOut.frequency = self.dataIn.frequency
47 47
48 48 self.dataOut.azimuth = self.dataIn.azimuth
49 49 self.dataOut.zenith = self.dataIn.zenith
50 50
51 51 self.dataOut.beam.codeList = self.dataIn.beam.codeList
52 52 self.dataOut.beam.azimuthList = self.dataIn.beam.azimuthList
53 53 self.dataOut.beam.zenithList = self.dataIn.beam.zenithList
54 54
55 55
56 56 class selectChannels(Operation):
57 57
58 58 def run(self, dataOut, channelList):
59 59
60 60 channelIndexList = []
61 61 self.dataOut = dataOut
62 62 for channel in channelList:
63 63 if channel not in self.dataOut.channelList:
64 64 raise ValueError("Channel %d is not in %s" %(channel, str(self.dataOut.channelList)))
65 65
66 66 index = self.dataOut.channelList.index(channel)
67 67 channelIndexList.append(index)
68 68 self.selectChannelsByIndex(channelIndexList)
69 69 return self.dataOut
70 70
71 71 def selectChannelsByIndex(self, channelIndexList):
72 72 """
73 73 Selecciona un bloque de datos en base a canales segun el channelIndexList
74 74
75 75 Input:
76 76 channelIndexList : lista sencilla de canales a seleccionar por ej. [2,3,7]
77 77
78 78 Affected:
79 79 self.dataOut.data
80 80 self.dataOut.channelIndexList
81 81 self.dataOut.nChannels
82 82 self.dataOut.m_ProcessingHeader.totalSpectra
83 83 self.dataOut.systemHeaderObj.numChannels
84 84 self.dataOut.m_ProcessingHeader.blockSize
85 85
86 86 Return:
87 87 None
88 88 """
89 89
90 90 for channelIndex in channelIndexList:
91 91 if channelIndex not in self.dataOut.channelIndexList:
92 92 raise ValueError("The value %d in channelIndexList is not valid" %channelIndex)
93 93
94 94 if self.dataOut.type == 'Voltage':
95 95 if self.dataOut.flagDataAsBlock:
96 96 """
97 97 Si la data es obtenida por bloques, dimension = [nChannels, nProfiles, nHeis]
98 98 """
99 99 data = self.dataOut.data[channelIndexList,:,:]
100 100 else:
101 101 data = self.dataOut.data[channelIndexList,:]
102 102
103 103 self.dataOut.data = data
104 104 # self.dataOut.channelList = [self.dataOut.channelList[i] for i in channelIndexList]
105 105 self.dataOut.channelList = range(len(channelIndexList))
106 106
107 107 elif self.dataOut.type == 'Spectra':
108 108 data_spc = self.dataOut.data_spc[channelIndexList, :]
109 109 data_dc = self.dataOut.data_dc[channelIndexList, :]
110 110
111 111 self.dataOut.data_spc = data_spc
112 112 self.dataOut.data_dc = data_dc
113 113
114 114 # self.dataOut.channelList = [self.dataOut.channelList[i] for i in channelIndexList]
115 115 self.dataOut.channelList = range(len(channelIndexList))
116 116 self.__selectPairsByChannel(channelIndexList)
117 117
118 118 return 1
119 119
120 120 def __selectPairsByChannel(self, channelList=None):
121 121
122 122 if channelList == None:
123 123 return
124 124
125 125 pairsIndexListSelected = []
126 126 for pairIndex in self.dataOut.pairsIndexList:
127 127 # First pair
128 128 if self.dataOut.pairsList[pairIndex][0] not in channelList:
129 129 continue
130 130 # Second pair
131 131 if self.dataOut.pairsList[pairIndex][1] not in channelList:
132 132 continue
133 133
134 134 pairsIndexListSelected.append(pairIndex)
135 135
136 136 if not pairsIndexListSelected:
137 137 self.dataOut.data_cspc = None
138 138 self.dataOut.pairsList = []
139 139 return
140 140
141 141 self.dataOut.data_cspc = self.dataOut.data_cspc[pairsIndexListSelected]
142 142 self.dataOut.pairsList = [self.dataOut.pairsList[i]
143 143 for i in pairsIndexListSelected]
144 144
145 145 return
146 146
147 147 class selectHeights(Operation):
148 148
149 149 def run(self, dataOut, minHei=None, maxHei=None, minIndex=None, maxIndex=None):
150 150 """
151 151 Selecciona un bloque de datos en base a un grupo de valores de alturas segun el rango
152 152 minHei <= height <= maxHei
153 153
154 154 Input:
155 155 minHei : valor minimo de altura a considerar
156 156 maxHei : valor maximo de altura a considerar
157 157
158 158 Affected:
159 159 Indirectamente son cambiados varios valores a travez del metodo selectHeightsByIndex
160 160
161 161 Return:
162 162 1 si el metodo se ejecuto con exito caso contrario devuelve 0
163 163 """
164 164
165 165 self.dataOut = dataOut
166 166
167 167 if minHei and maxHei:
168 168
169 169 if (minHei < self.dataOut.heightList[0]):
170 170 minHei = self.dataOut.heightList[0]
171 171
172 172 if (maxHei > self.dataOut.heightList[-1]):
173 173 maxHei = self.dataOut.heightList[-1]
174 174
175 175 minIndex = 0
176 176 maxIndex = 0
177 177 heights = self.dataOut.heightList
178 178
179 179 inda = numpy.where(heights >= minHei)
180 180 indb = numpy.where(heights <= maxHei)
181 181
182 182 try:
183 183 minIndex = inda[0][0]
184 184 except:
185 185 minIndex = 0
186 186
187 187 try:
188 188 maxIndex = indb[0][-1]
189 189 except:
190 190 maxIndex = len(heights)
191 191
192 192 self.selectHeightsByIndex(minIndex, maxIndex)
193 193
194 194 return self.dataOut
195 195
196 196 def selectHeightsByIndex(self, minIndex, maxIndex):
197 197 """
198 198 Selecciona un bloque de datos en base a un grupo indices de alturas segun el rango
199 199 minIndex <= index <= maxIndex
200 200
201 201 Input:
202 202 minIndex : valor de indice minimo de altura a considerar
203 203 maxIndex : valor de indice maximo de altura a considerar
204 204
205 205 Affected:
206 206 self.dataOut.data
207 207 self.dataOut.heightList
208 208
209 209 Return:
210 210 1 si el metodo se ejecuto con exito caso contrario devuelve 0
211 211 """
212 212
213 213 if self.dataOut.type == 'Voltage':
214 214 if (minIndex < 0) or (minIndex > maxIndex):
215 215 raise ValueError("Height index range (%d,%d) is not valid" % (minIndex, maxIndex))
216 216
217 217 if (maxIndex >= self.dataOut.nHeights):
218 218 maxIndex = self.dataOut.nHeights
219
219 #print("shapeeee",self.dataOut.data.shape)
220 220 #voltage
221 221 if self.dataOut.flagDataAsBlock:
222 222 """
223 223 Si la data es obtenida por bloques, dimension = [nChannels, nProfiles, nHeis]
224 224 """
225 225 data = self.dataOut.data[:,:, minIndex:maxIndex]
226 226 else:
227 227 data = self.dataOut.data[:, minIndex:maxIndex]
228 228
229 229 # firstHeight = self.dataOut.heightList[minIndex]
230 230
231 231 self.dataOut.data = data
232 232 self.dataOut.heightList = self.dataOut.heightList[minIndex:maxIndex]
233 233
234 234 if self.dataOut.nHeights <= 1:
235 235 raise ValueError("selectHeights: Too few heights. Current number of heights is %d" %(self.dataOut.nHeights))
236 236 elif self.dataOut.type == 'Spectra':
237 237 if (minIndex < 0) or (minIndex > maxIndex):
238 238 raise ValueError("Error selecting heights: Index range (%d,%d) is not valid" % (
239 239 minIndex, maxIndex))
240 240
241 241 if (maxIndex >= self.dataOut.nHeights):
242 242 maxIndex = self.dataOut.nHeights - 1
243 243
244 244 # Spectra
245 245 data_spc = self.dataOut.data_spc[:, :, minIndex:maxIndex + 1]
246 246
247 247 data_cspc = None
248 248 if self.dataOut.data_cspc is not None:
249 249 data_cspc = self.dataOut.data_cspc[:, :, minIndex:maxIndex + 1]
250 250
251 251 data_dc = None
252 252 if self.dataOut.data_dc is not None:
253 253 data_dc = self.dataOut.data_dc[:, minIndex:maxIndex + 1]
254 254
255 255 self.dataOut.data_spc = data_spc
256 256 self.dataOut.data_cspc = data_cspc
257 257 self.dataOut.data_dc = data_dc
258 258
259 259 self.dataOut.heightList = self.dataOut.heightList[minIndex:maxIndex + 1]
260 260
261 261 return 1
262 262
263 263
264 264 class filterByHeights(Operation):
265 265
266 266 def run(self, dataOut, window):
267 267
268 268 deltaHeight = dataOut.heightList[1] - dataOut.heightList[0]
269 269
270 270 if window == None:
271 271 window = (dataOut.radarControllerHeaderObj.txA/dataOut.radarControllerHeaderObj.nBaud) / deltaHeight
272 272
273 273 newdelta = deltaHeight * window
274 274 r = dataOut.nHeights % window
275 275 newheights = (dataOut.nHeights-r)/window
276 276
277 277 if newheights <= 1:
278 278 raise ValueError("filterByHeights: Too few heights. Current number of heights is %d and window is %d" %(dataOut.nHeights, window))
279 279
280 280 if dataOut.flagDataAsBlock:
281 281 """
282 282 Si la data es obtenida por bloques, dimension = [nChannels, nProfiles, nHeis]
283 283 """
284 284 buffer = dataOut.data[:, :, 0:int(dataOut.nHeights-r)]
285 285 buffer = buffer.reshape(dataOut.nChannels, dataOut.nProfiles, int(dataOut.nHeights/window), window)
286 286 buffer = numpy.sum(buffer,3)
287 287
288 288 else:
289 289 buffer = dataOut.data[:,0:int(dataOut.nHeights-r)]
290 290 buffer = buffer.reshape(dataOut.nChannels,int(dataOut.nHeights/window),int(window))
291 291 buffer = numpy.sum(buffer,2)
292 292
293 293 dataOut.data = buffer
294 294 dataOut.heightList = dataOut.heightList[0] + numpy.arange( newheights )*newdelta
295 295 dataOut.windowOfFilter = window
296 296
297 297 return dataOut
298 298
299 299
300 300 class setH0(Operation):
301 301
302 302 def run(self, dataOut, h0, deltaHeight = None):
303 303
304 304 if not deltaHeight:
305 305 deltaHeight = dataOut.heightList[1] - dataOut.heightList[0]
306 306
307 307 nHeights = dataOut.nHeights
308 308
309 309 newHeiRange = h0 + numpy.arange(nHeights)*deltaHeight
310 310
311 311 dataOut.heightList = newHeiRange
312 312
313 313 return dataOut
314 314
315 315
316 316 class deFlip(Operation):
317 317
318 318 def run(self, dataOut, channelList = []):
319 319
320 320 data = dataOut.data.copy()
321 321
322 322 if dataOut.flagDataAsBlock:
323 323 flip = self.flip
324 324 profileList = list(range(dataOut.nProfiles))
325 325
326 326 if not channelList:
327 327 for thisProfile in profileList:
328 328 data[:,thisProfile,:] = data[:,thisProfile,:]*flip
329 329 flip *= -1.0
330 330 else:
331 331 for thisChannel in channelList:
332 332 if thisChannel not in dataOut.channelList:
333 333 continue
334 334
335 335 for thisProfile in profileList:
336 336 data[thisChannel,thisProfile,:] = data[thisChannel,thisProfile,:]*flip
337 337 flip *= -1.0
338 338
339 339 self.flip = flip
340 340
341 341 else:
342 342 if not channelList:
343 343 data[:,:] = data[:,:]*self.flip
344 344 else:
345 345 for thisChannel in channelList:
346 346 if thisChannel not in dataOut.channelList:
347 347 continue
348 348
349 349 data[thisChannel,:] = data[thisChannel,:]*self.flip
350 350
351 351 self.flip *= -1.
352 352
353 353 dataOut.data = data
354 354
355 355 return dataOut
356 356
357 357
358 358 class setAttribute(Operation):
359 359 '''
360 360 Set an arbitrary attribute(s) to dataOut
361 361 '''
362 362
363 363 def __init__(self):
364 364
365 365 Operation.__init__(self)
366 366 self._ready = False
367 367
368 368 def run(self, dataOut, **kwargs):
369 369
370 370 for key, value in kwargs.items():
371 371 setattr(dataOut, key, value)
372 372
373 373 return dataOut
374 374
375 375
376 376 @MPDecorator
377 377 class printAttribute(Operation):
378 378 '''
379 379 Print an arbitrary attribute of dataOut
380 380 '''
381 381
382 382 def __init__(self):
383 383
384 384 Operation.__init__(self)
385 385
386 386 def run(self, dataOut, attributes):
387 387
388 388 if isinstance(attributes, str):
389 389 attributes = [attributes]
390 390 for attr in attributes:
391 391 if hasattr(dataOut, attr):
392 392 log.log(getattr(dataOut, attr), attr)
393 393
394 394
395 395 class interpolateHeights(Operation):
396 396
397 397 def run(self, dataOut, topLim, botLim):
398 398 #69 al 72 para julia
399 399 #82-84 para meteoros
400 400 if len(numpy.shape(dataOut.data))==2:
401 401 sampInterp = (dataOut.data[:,botLim-1] + dataOut.data[:,topLim+1])/2
402 402 sampInterp = numpy.transpose(numpy.tile(sampInterp,(topLim-botLim + 1,1)))
403 403 #dataOut.data[:,botLim:limSup+1] = sampInterp
404 404 dataOut.data[:,botLim:topLim+1] = sampInterp
405 405 else:
406 406 nHeights = dataOut.data.shape[2]
407 407 x = numpy.hstack((numpy.arange(botLim),numpy.arange(topLim+1,nHeights)))
408 408 y = dataOut.data[:,:,list(range(botLim))+list(range(topLim+1,nHeights))]
409 409 f = interpolate.interp1d(x, y, axis = 2)
410 410 xnew = numpy.arange(botLim,topLim+1)
411 411 ynew = f(xnew)
412 412 dataOut.data[:,:,botLim:topLim+1] = ynew
413 413
414 414 return dataOut
415 415
416 416
417 417 class CohInt(Operation):
418 418
419 419 isConfig = False
420 420 __profIndex = 0
421 421 __byTime = False
422 422 __initime = None
423 423 __lastdatatime = None
424 424 __integrationtime = None
425 425 __buffer = None
426 426 __bufferStride = []
427 427 __dataReady = False
428 428 __profIndexStride = 0
429 429 __dataToPutStride = False
430 430 n = None
431 431
432 432 def __init__(self, **kwargs):
433 433
434 434 Operation.__init__(self, **kwargs)
435 435
436 436 def setup(self, n=None, timeInterval=None, stride=None, overlapping=False, byblock=False):
437 437 """
438 438 Set the parameters of the integration class.
439 439
440 440 Inputs:
441 441
442 442 n : Number of coherent integrations
443 443 timeInterval : Time of integration. If the parameter "n" is selected this one does not work
444 444 overlapping :
445 445 """
446 446
447 447 self.__initime = None
448 448 self.__lastdatatime = 0
449 449 self.__buffer = None
450 450 self.__dataReady = False
451 451 self.byblock = byblock
452 452 self.stride = stride
453 453
454 454 if n == None and timeInterval == None:
455 455 raise ValueError("n or timeInterval should be specified ...")
456 456
457 457 if n != None:
458 458 self.n = n
459 459 self.__byTime = False
460 460 else:
461 461 self.__integrationtime = timeInterval #* 60. #if (type(timeInterval)!=integer) -> change this line
462 462 self.n = 9999
463 463 self.__byTime = True
464 464
465 465 if overlapping:
466 466 self.__withOverlapping = True
467 467 self.__buffer = None
468 468 else:
469 469 self.__withOverlapping = False
470 470 self.__buffer = 0
471 471
472 472 self.__profIndex = 0
473 473
474 474 def putData(self, data):
475 475
476 476 """
477 477 Add a profile to the __buffer and increase in one the __profileIndex
478 478
479 479 """
480 480
481 481 if not self.__withOverlapping:
482 482 self.__buffer += data.copy()
483 483 self.__profIndex += 1
484 484 return
485 485
486 486 #Overlapping data
487 487 nChannels, nHeis = data.shape
488 488 data = numpy.reshape(data, (1, nChannels, nHeis))
489 489
490 490 #If the buffer is empty then it takes the data value
491 491 if self.__buffer is None:
492 492 self.__buffer = data
493 493 self.__profIndex += 1
494 494 return
495 495
496 496 #If the buffer length is lower than n then stakcing the data value
497 497 if self.__profIndex < self.n:
498 498 self.__buffer = numpy.vstack((self.__buffer, data))
499 499 self.__profIndex += 1
500 500 return
501 501
502 502 #If the buffer length is equal to n then replacing the last buffer value with the data value
503 503 self.__buffer = numpy.roll(self.__buffer, -1, axis=0)
504 504 self.__buffer[self.n-1] = data
505 505 self.__profIndex = self.n
506 506 return
507 507
508 508
509 509 def pushData(self):
510 510 """
511 511 Return the sum of the last profiles and the profiles used in the sum.
512 512
513 513 Affected:
514 514
515 515 self.__profileIndex
516 516
517 517 """
518 518
519 519 if not self.__withOverlapping:
520 520 data = self.__buffer
521 521 n = self.__profIndex
522 522
523 523 self.__buffer = 0
524 524 self.__profIndex = 0
525 525
526 526 return data, n
527 527
528 528 #Integration with Overlapping
529 529 data = numpy.sum(self.__buffer, axis=0)
530 530 # print data
531 531 # raise
532 532 n = self.__profIndex
533 533
534 534 return data, n
535 535
536 536 def byProfiles(self, data):
537 537
538 538 self.__dataReady = False
539 539 avgdata = None
540 540 # n = None
541 541 # print data
542 542 # raise
543 543 self.putData(data)
544 544
545 545 if self.__profIndex == self.n:
546 546 avgdata, n = self.pushData()
547 547 self.__dataReady = True
548 548
549 549 return avgdata
550 550
551 551 def byTime(self, data, datatime):
552 552
553 553 self.__dataReady = False
554 554 avgdata = None
555 555 n = None
556 556
557 557 self.putData(data)
558 558
559 559 if (datatime - self.__initime) >= self.__integrationtime:
560 560 avgdata, n = self.pushData()
561 561 self.n = n
562 562 self.__dataReady = True
563 563
564 564 return avgdata
565 565
566 566 def integrateByStride(self, data, datatime):
567 567 # print data
568 568 if self.__profIndex == 0:
569 569 self.__buffer = [[data.copy(), datatime]]
570 570 else:
571 571 self.__buffer.append([data.copy(),datatime])
572 572 self.__profIndex += 1
573 573 self.__dataReady = False
574 574
575 575 if self.__profIndex == self.n * self.stride :
576 576 self.__dataToPutStride = True
577 577 self.__profIndexStride = 0
578 578 self.__profIndex = 0
579 579 self.__bufferStride = []
580 580 for i in range(self.stride):
581 581 current = self.__buffer[i::self.stride]
582 582 data = numpy.sum([t[0] for t in current], axis=0)
583 583 avgdatatime = numpy.average([t[1] for t in current])
584 584 # print data
585 585 self.__bufferStride.append((data, avgdatatime))
586 586
587 587 if self.__dataToPutStride:
588 588 self.__dataReady = True
589 589 self.__profIndexStride += 1
590 590 if self.__profIndexStride == self.stride:
591 591 self.__dataToPutStride = False
592 592 # print self.__bufferStride[self.__profIndexStride - 1]
593 593 # raise
594 594 return self.__bufferStride[self.__profIndexStride - 1]
595 595
596 596
597 597 return None, None
598 598
599 599 def integrate(self, data, datatime=None):
600 600
601 601 if self.__initime == None:
602 602 self.__initime = datatime
603 603
604 604 if self.__byTime:
605 605 avgdata = self.byTime(data, datatime)
606 606 else:
607 607 avgdata = self.byProfiles(data)
608 608
609 609
610 610 self.__lastdatatime = datatime
611 611
612 612 if avgdata is None:
613 613 return None, None
614 614
615 615 avgdatatime = self.__initime
616 616
617 617 deltatime = datatime - self.__lastdatatime
618 618
619 619 if not self.__withOverlapping:
620 620 self.__initime = datatime
621 621 else:
622 622 self.__initime += deltatime
623 623
624 624 return avgdata, avgdatatime
625 625
626 626 def integrateByBlock(self, dataOut):
627 627
628 628 times = int(dataOut.data.shape[1]/self.n)
629 629 avgdata = numpy.zeros((dataOut.nChannels, times, dataOut.nHeights), dtype=numpy.complex)
630 630
631 631 id_min = 0
632 632 id_max = self.n
633 633
634 634 for i in range(times):
635 635 junk = dataOut.data[:,id_min:id_max,:]
636 636 avgdata[:,i,:] = junk.sum(axis=1)
637 637 id_min += self.n
638 638 id_max += self.n
639 639
640 640 timeInterval = dataOut.ippSeconds*self.n
641 641 avgdatatime = (times - 1) * timeInterval + dataOut.utctime
642 642 self.__dataReady = True
643 643 return avgdata, avgdatatime
644 644
645 645 def run(self, dataOut, n=None, timeInterval=None, stride=None, overlapping=False, byblock=False, **kwargs):
646 646
647 647 if not self.isConfig:
648 648 self.setup(n=n, stride=stride, timeInterval=timeInterval, overlapping=overlapping, byblock=byblock, **kwargs)
649 649 self.isConfig = True
650 650
651 651 if dataOut.flagDataAsBlock:
652 652 """
653 653 Si la data es leida por bloques, dimension = [nChannels, nProfiles, nHeis]
654 654 """
655 655 avgdata, avgdatatime = self.integrateByBlock(dataOut)
656 656 dataOut.nProfiles /= self.n
657 657 else:
658 658 if stride is None:
659 659 avgdata, avgdatatime = self.integrate(dataOut.data, dataOut.utctime)
660 660 else:
661 661 avgdata, avgdatatime = self.integrateByStride(dataOut.data, dataOut.utctime)
662 662
663 663
664 664 # dataOut.timeInterval *= n
665 665 dataOut.flagNoData = True
666 666
667 667 if self.__dataReady:
668 668 dataOut.data = avgdata
669 669 if not dataOut.flagCohInt:
670 670 dataOut.nCohInt *= self.n
671 671 dataOut.flagCohInt = True
672 672 dataOut.utctime = avgdatatime
673 673 # print avgdata, avgdatatime
674 674 # raise
675 675 # dataOut.timeInterval = dataOut.ippSeconds * dataOut.nCohInt
676 676 dataOut.flagNoData = False
677 677 return dataOut
678 678
679 679 class Decoder(Operation):
680 680
681 681 isConfig = False
682 682 __profIndex = 0
683 683
684 684 code = None
685 685
686 686 nCode = None
687 687 nBaud = None
688 688
689 689 def __init__(self, **kwargs):
690 690
691 691 Operation.__init__(self, **kwargs)
692 692
693 693 self.times = None
694 694 self.osamp = None
695 695 # self.__setValues = False
696 696 self.isConfig = False
697 697 self.setupReq = False
698 698 def setup(self, code, osamp, dataOut):
699 699
700 700 self.__profIndex = 0
701 701
702 702 self.code = code
703 703
704 704 self.nCode = len(code)
705 705 self.nBaud = len(code[0])
706 706
707 707 if (osamp != None) and (osamp >1):
708 708 self.osamp = osamp
709 709 self.code = numpy.repeat(code, repeats=self.osamp, axis=1)
710 710 self.nBaud = self.nBaud*self.osamp
711 711
712 712 self.__nChannels = dataOut.nChannels
713 713 self.__nProfiles = dataOut.nProfiles
714 714 self.__nHeis = dataOut.nHeights
715 715
716 716 if self.__nHeis < self.nBaud:
717 717 raise ValueError('Number of heights (%d) should be greater than number of bauds (%d)' %(self.__nHeis, self.nBaud))
718 718
719 719 #Frequency
720 720 __codeBuffer = numpy.zeros((self.nCode, self.__nHeis), dtype=numpy.complex)
721 721
722 722 __codeBuffer[:,0:self.nBaud] = self.code
723 723
724 724 self.fft_code = numpy.conj(numpy.fft.fft(__codeBuffer, axis=1))
725 725
726 726 if dataOut.flagDataAsBlock:
727 727
728 728 self.ndatadec = self.__nHeis #- self.nBaud + 1
729 729
730 730 self.datadecTime = numpy.zeros((self.__nChannels, self.__nProfiles, self.ndatadec), dtype=numpy.complex)
731 731
732 732 else:
733 733
734 734 #Time
735 735 self.ndatadec = self.__nHeis #- self.nBaud + 1
736 736
737 737 self.datadecTime = numpy.zeros((self.__nChannels, self.ndatadec), dtype=numpy.complex)
738 738
739 739 def __convolutionInFreq(self, data):
740 740
741 741 fft_code = self.fft_code[self.__profIndex].reshape(1,-1)
742 742
743 743 fft_data = numpy.fft.fft(data, axis=1)
744 744
745 745 conv = fft_data*fft_code
746 746
747 747 data = numpy.fft.ifft(conv,axis=1)
748 748
749 749 return data
750 750
751 751 def __convolutionInFreqOpt(self, data):
752 752
753 753 raise NotImplementedError
754 754
755 755 def __convolutionInTime(self, data):
756 756
757 757 code = self.code[self.__profIndex]
758 758 for i in range(self.__nChannels):
759 759 self.datadecTime[i,:] = numpy.correlate(data[i,:], code, mode='full')[self.nBaud-1:]
760 760
761 761 return self.datadecTime
762 762
763 763 def __convolutionByBlockInTime(self, data):
764 764
765 765 repetitions = int(self.__nProfiles / self.nCode)
766 766 junk = numpy.lib.stride_tricks.as_strided(self.code, (repetitions, self.code.size), (0, self.code.itemsize))
767 767 junk = junk.flatten()
768 768 code_block = numpy.reshape(junk, (self.nCode*repetitions, self.nBaud))
769 769 profilesList = range(self.__nProfiles)
770 770
771 771 for i in range(self.__nChannels):
772 772 for j in profilesList:
773 773 self.datadecTime[i,j,:] = numpy.correlate(data[i,j,:], code_block[j,:], mode='full')[self.nBaud-1:]
774 774 return self.datadecTime
775 775
776 776 def __convolutionByBlockInFreq(self, data):
777 777
778 778 raise NotImplementedError("Decoder by frequency fro Blocks not implemented")
779 779
780 780
781 781 fft_code = self.fft_code[self.__profIndex].reshape(1,-1)
782 782
783 783 fft_data = numpy.fft.fft(data, axis=2)
784 784
785 785 conv = fft_data*fft_code
786 786
787 787 data = numpy.fft.ifft(conv,axis=2)
788 788
789 789 return data
790 790
791 791
792 792 def run(self, dataOut, code=None, nCode=None, nBaud=None, mode = 0, osamp=None, times=None):
793 793
794 794 if dataOut.flagDecodeData:
795 795 print("This data is already decoded, recoding again ...")
796 796
797 797 if not self.isConfig:
798 798
799 799 if code is None:
800 800 if dataOut.code is None:
801 801 raise ValueError("Code could not be read from %s instance. Enter a value in Code parameter" %dataOut.type)
802 802
803 803 code = dataOut.code
804 804 else:
805 805 code = numpy.array(code).reshape(nCode,nBaud)
806 806 self.setup(code, osamp, dataOut)
807 807
808 808 self.isConfig = True
809 809
810 810 if mode == 3:
811 811 sys.stderr.write("Decoder Warning: mode=%d is not valid, using mode=0\n" %mode)
812 812
813 813 if times != None:
814 814 sys.stderr.write("Decoder Warning: Argument 'times' in not used anymore\n")
815 815
816 816 if self.code is None:
817 817 print("Fail decoding: Code is not defined.")
818 818 return
819 819
820 820 self.__nProfiles = dataOut.nProfiles
821 821 datadec = None
822 822
823 823 if mode == 3:
824 824 mode = 0
825 825
826 826 if dataOut.flagDataAsBlock:
827 827 """
828 828 Decoding when data have been read as block,
829 829 """
830 830
831 831 if mode == 0:
832 832 datadec = self.__convolutionByBlockInTime(dataOut.data)
833 833 if mode == 1:
834 834 datadec = self.__convolutionByBlockInFreq(dataOut.data)
835 835 else:
836 836 """
837 837 Decoding when data have been read profile by profile
838 838 """
839 839 if mode == 0:
840 840 datadec = self.__convolutionInTime(dataOut.data)
841 841
842 842 if mode == 1:
843 843 datadec = self.__convolutionInFreq(dataOut.data)
844 844
845 845 if mode == 2:
846 846 datadec = self.__convolutionInFreqOpt(dataOut.data)
847 847
848 848 if datadec is None:
849 849 raise ValueError("Codification mode selected is not valid: mode=%d. Try selecting 0 or 1" %mode)
850 850
851 851 dataOut.code = self.code
852 852 dataOut.nCode = self.nCode
853 853 dataOut.nBaud = self.nBaud
854 854
855 855 dataOut.data = datadec
856 856
857 857 dataOut.heightList = dataOut.heightList[0:datadec.shape[-1]]
858 858
859 859 dataOut.flagDecodeData = True #asumo q la data esta decodificada
860 860
861 861 if self.__profIndex == self.nCode-1:
862 862 self.__profIndex = 0
863 863 return dataOut
864 864
865 865 self.__profIndex += 1
866 866
867 867 return dataOut
868 868 # dataOut.flagDeflipData = True #asumo q la data no esta sin flip
869 869
870 870
871 871 class ProfileConcat(Operation):
872 872
873 873 isConfig = False
874 874 buffer = None
875 875
876 876 def __init__(self, **kwargs):
877 877
878 878 Operation.__init__(self, **kwargs)
879 879 self.profileIndex = 0
880 880
881 881 def reset(self):
882 882 self.buffer = numpy.zeros_like(self.buffer)
883 883 self.start_index = 0
884 884 self.times = 1
885 885
886 886 def setup(self, data, m, n=1):
887 887 self.buffer = numpy.zeros((data.shape[0],data.shape[1]*m),dtype=type(data[0,0]))
888 888 self.nHeights = data.shape[1]#.nHeights
889 889 self.start_index = 0
890 890 self.times = 1
891 891
892 892 def concat(self, data):
893 893
894 894 self.buffer[:,self.start_index:self.nHeights*self.times] = data.copy()
895 895 self.start_index = self.start_index + self.nHeights
896 896
897 897 def run(self, dataOut, m):
898 898 dataOut.flagNoData = True
899 899
900 900 if not self.isConfig:
901 901 self.setup(dataOut.data, m, 1)
902 902 self.isConfig = True
903 903
904 904 if dataOut.flagDataAsBlock:
905 905 raise ValueError("ProfileConcat can only be used when voltage have been read profile by profile, getBlock = False")
906 906
907 907 else:
908 908 self.concat(dataOut.data)
909 909 self.times += 1
910 910 if self.times > m:
911 911 dataOut.data = self.buffer
912 912 self.reset()
913 913 dataOut.flagNoData = False
914 914 # se deben actualizar mas propiedades del header y del objeto dataOut, por ejemplo, las alturas
915 915 deltaHeight = dataOut.heightList[1] - dataOut.heightList[0]
916 916 xf = dataOut.heightList[0] + dataOut.nHeights * deltaHeight * m
917 917 dataOut.heightList = numpy.arange(dataOut.heightList[0], xf, deltaHeight)
918 918 dataOut.ippSeconds *= m
919 919 return dataOut
920 920
921 921 class ProfileSelector(Operation):
922 922
923 923 profileIndex = None
924 924 # Tamanho total de los perfiles
925 925 nProfiles = None
926 926
927 927 def __init__(self, **kwargs):
928 928
929 929 Operation.__init__(self, **kwargs)
930 930 self.profileIndex = 0
931 931
932 932 def incProfileIndex(self):
933 933
934 934 self.profileIndex += 1
935 935
936 936 if self.profileIndex >= self.nProfiles:
937 937 self.profileIndex = 0
938 938
939 939 def isThisProfileInRange(self, profileIndex, minIndex, maxIndex):
940 940
941 941 if profileIndex < minIndex:
942 942 return False
943 943
944 944 if profileIndex > maxIndex:
945 945 return False
946 946
947 947 return True
948 948
949 949 def isThisProfileInList(self, profileIndex, profileList):
950 950
951 951 if profileIndex not in profileList:
952 952 return False
953 953
954 954 return True
955 955
956 956 def run(self, dataOut, profileList=None, profileRangeList=None, beam=None, byblock=False, rangeList = None, nProfiles=None):
957 957
958 958 """
959 959 ProfileSelector:
960 960
961 961 Inputs:
962 962 profileList : Index of profiles selected. Example: profileList = (0,1,2,7,8)
963 963
964 964 profileRangeList : Minimum and maximum profile indexes. Example: profileRangeList = (4, 30)
965 965
966 966 rangeList : List of profile ranges. Example: rangeList = ((4, 30), (32, 64), (128, 256))
967 967
968 968 """
969 969
970 970 if rangeList is not None:
971 971 if type(rangeList[0]) not in (tuple, list):
972 972 rangeList = [rangeList]
973 973
974 974 dataOut.flagNoData = True
975 975
976 976 if dataOut.flagDataAsBlock:
977 977 """
978 978 data dimension = [nChannels, nProfiles, nHeis]
979 979 """
980 980 if profileList != None:
981 981 dataOut.data = dataOut.data[:,profileList,:]
982 982
983 983 if profileRangeList != None:
984 984 minIndex = profileRangeList[0]
985 985 maxIndex = profileRangeList[1]
986 986 profileList = list(range(minIndex, maxIndex+1))
987 987
988 988 dataOut.data = dataOut.data[:,minIndex:maxIndex+1,:]
989 989
990 990 if rangeList != None:
991 991
992 992 profileList = []
993 993
994 994 for thisRange in rangeList:
995 995 minIndex = thisRange[0]
996 996 maxIndex = thisRange[1]
997 997
998 998 profileList.extend(list(range(minIndex, maxIndex+1)))
999 999
1000 1000 dataOut.data = dataOut.data[:,profileList,:]
1001 1001
1002 1002 dataOut.nProfiles = len(profileList)
1003 1003 dataOut.profileIndex = dataOut.nProfiles - 1
1004 1004 dataOut.flagNoData = False
1005 1005
1006 1006 return dataOut
1007 1007
1008 1008 """
1009 1009 data dimension = [nChannels, nHeis]
1010 1010 """
1011 1011
1012 1012 if profileList != None:
1013 1013
1014 1014 if self.isThisProfileInList(dataOut.profileIndex, profileList):
1015 1015
1016 1016 self.nProfiles = len(profileList)
1017 1017 dataOut.nProfiles = self.nProfiles
1018 1018 dataOut.profileIndex = self.profileIndex
1019 1019 dataOut.flagNoData = False
1020 1020
1021 1021 self.incProfileIndex()
1022 1022 return dataOut
1023 1023
1024 1024 if profileRangeList != None:
1025 1025
1026 1026 minIndex = profileRangeList[0]
1027 1027 maxIndex = profileRangeList[1]
1028 1028
1029 1029 if self.isThisProfileInRange(dataOut.profileIndex, minIndex, maxIndex):
1030 1030
1031 1031 self.nProfiles = maxIndex - minIndex + 1
1032 1032 dataOut.nProfiles = self.nProfiles
1033 1033 dataOut.profileIndex = self.profileIndex
1034 1034 dataOut.flagNoData = False
1035 1035
1036 1036 self.incProfileIndex()
1037 1037 return dataOut
1038 1038
1039 1039 if rangeList != None:
1040 1040
1041 1041 nProfiles = 0
1042 1042
1043 1043 for thisRange in rangeList:
1044 1044 minIndex = thisRange[0]
1045 1045 maxIndex = thisRange[1]
1046 1046
1047 1047 nProfiles += maxIndex - minIndex + 1
1048 1048
1049 1049 for thisRange in rangeList:
1050 1050
1051 1051 minIndex = thisRange[0]
1052 1052 maxIndex = thisRange[1]
1053 1053
1054 1054 if self.isThisProfileInRange(dataOut.profileIndex, minIndex, maxIndex):
1055 1055
1056 1056 self.nProfiles = nProfiles
1057 1057 dataOut.nProfiles = self.nProfiles
1058 1058 dataOut.profileIndex = self.profileIndex
1059 1059 dataOut.flagNoData = False
1060 1060
1061 1061 self.incProfileIndex()
1062 1062
1063 1063 break
1064 1064
1065 1065 return dataOut
1066 1066
1067 1067
1068 1068 if beam != None: #beam is only for AMISR data
1069 1069 if self.isThisProfileInList(dataOut.profileIndex, dataOut.beamRangeDict[beam]):
1070 1070 dataOut.flagNoData = False
1071 1071 dataOut.profileIndex = self.profileIndex
1072 1072
1073 1073 self.incProfileIndex()
1074 1074
1075 1075 return dataOut
1076 1076
1077 1077 raise ValueError("ProfileSelector needs profileList, profileRangeList or rangeList parameter")
1078 1078
1079 1079
1080 1080 class Reshaper(Operation):
1081 1081
1082 1082 def __init__(self, **kwargs):
1083 1083
1084 1084 Operation.__init__(self, **kwargs)
1085 1085
1086 1086 self.__buffer = None
1087 1087 self.__nitems = 0
1088 1088
1089 1089 def __appendProfile(self, dataOut, nTxs):
1090 1090
1091 1091 if self.__buffer is None:
1092 1092 shape = (dataOut.nChannels, int(dataOut.nHeights/nTxs) )
1093 1093 self.__buffer = numpy.empty(shape, dtype = dataOut.data.dtype)
1094 1094
1095 1095 ini = dataOut.nHeights * self.__nitems
1096 1096 end = ini + dataOut.nHeights
1097 1097
1098 1098 self.__buffer[:, ini:end] = dataOut.data
1099 1099
1100 1100 self.__nitems += 1
1101 1101
1102 1102 return int(self.__nitems*nTxs)
1103 1103
1104 1104 def __getBuffer(self):
1105 1105
1106 1106 if self.__nitems == int(1./self.__nTxs):
1107 1107
1108 1108 self.__nitems = 0
1109 1109
1110 1110 return self.__buffer.copy()
1111 1111
1112 1112 return None
1113 1113
1114 1114 def __checkInputs(self, dataOut, shape, nTxs):
1115 1115
1116 1116 if shape is None and nTxs is None:
1117 1117 raise ValueError("Reshaper: shape of factor should be defined")
1118 1118
1119 1119 if nTxs:
1120 1120 if nTxs < 0:
1121 1121 raise ValueError("nTxs should be greater than 0")
1122 1122
1123 1123 if nTxs < 1 and dataOut.nProfiles % (1./nTxs) != 0:
1124 1124 raise ValueError("nProfiles= %d is not divisibled by (1./nTxs) = %f" %(dataOut.nProfiles, (1./nTxs)))
1125 1125
1126 1126 shape = [dataOut.nChannels, dataOut.nProfiles*nTxs, dataOut.nHeights/nTxs]
1127 1127
1128 1128 return shape, nTxs
1129 1129
1130 1130 if len(shape) != 2 and len(shape) != 3:
1131 1131 raise ValueError("shape dimension should be equal to 2 or 3. shape = (nProfiles, nHeis) or (nChannels, nProfiles, nHeis). Actually shape = (%d, %d, %d)" %(dataOut.nChannels, dataOut.nProfiles, dataOut.nHeights))
1132 1132
1133 1133 if len(shape) == 2:
1134 1134 shape_tuple = [dataOut.nChannels]
1135 1135 shape_tuple.extend(shape)
1136 1136 else:
1137 1137 shape_tuple = list(shape)
1138 1138
1139 1139 nTxs = 1.0*shape_tuple[1]/dataOut.nProfiles
1140 1140
1141 1141 return shape_tuple, nTxs
1142 1142
1143 1143 def run(self, dataOut, shape=None, nTxs=None):
1144 1144
1145 1145 shape_tuple, self.__nTxs = self.__checkInputs(dataOut, shape, nTxs)
1146 1146
1147 1147 dataOut.flagNoData = True
1148 1148 profileIndex = None
1149 1149
1150 1150 if dataOut.flagDataAsBlock:
1151 1151
1152 1152 dataOut.data = numpy.reshape(dataOut.data, shape_tuple)
1153 1153 dataOut.flagNoData = False
1154 1154
1155 1155 profileIndex = int(dataOut.nProfiles*self.__nTxs) - 1
1156 1156
1157 1157 else:
1158 1158
1159 1159 if self.__nTxs < 1:
1160 1160
1161 1161 self.__appendProfile(dataOut, self.__nTxs)
1162 1162 new_data = self.__getBuffer()
1163 1163
1164 1164 if new_data is not None:
1165 1165 dataOut.data = new_data
1166 1166 dataOut.flagNoData = False
1167 1167
1168 1168 profileIndex = dataOut.profileIndex*nTxs
1169 1169
1170 1170 else:
1171 1171 raise ValueError("nTxs should be greater than 0 and lower than 1, or use VoltageReader(..., getblock=True)")
1172 1172
1173 1173 deltaHeight = dataOut.heightList[1] - dataOut.heightList[0]
1174 1174
1175 1175 dataOut.heightList = numpy.arange(dataOut.nHeights/self.__nTxs) * deltaHeight + dataOut.heightList[0]
1176 1176
1177 1177 dataOut.nProfiles = int(dataOut.nProfiles*self.__nTxs)
1178 1178
1179 1179 dataOut.profileIndex = profileIndex
1180 1180
1181 1181 dataOut.ippSeconds /= self.__nTxs
1182 1182
1183 1183 return dataOut
1184 1184
1185 1185 class SplitProfiles(Operation):
1186 1186
1187 1187 def __init__(self, **kwargs):
1188 1188
1189 1189 Operation.__init__(self, **kwargs)
1190 1190
1191 1191 def run(self, dataOut, n):
1192 1192
1193 1193 dataOut.flagNoData = True
1194 1194 profileIndex = None
1195 1195
1196 1196 if dataOut.flagDataAsBlock:
1197 1197
1198 1198 #nchannels, nprofiles, nsamples
1199 1199 shape = dataOut.data.shape
1200 1200
1201 1201 if shape[2] % n != 0:
1202 1202 raise ValueError("Could not split the data, n=%d has to be multiple of %d" %(n, shape[2]))
1203 1203
1204 1204 new_shape = shape[0], shape[1]*n, int(shape[2]/n)
1205 1205
1206 1206 dataOut.data = numpy.reshape(dataOut.data, new_shape)
1207 1207 dataOut.flagNoData = False
1208 1208
1209 1209 profileIndex = int(dataOut.nProfiles/n) - 1
1210 1210
1211 1211 else:
1212 1212
1213 1213 raise ValueError("Could not split the data when is read Profile by Profile. Use VoltageReader(..., getblock=True)")
1214 1214
1215 1215 deltaHeight = dataOut.heightList[1] - dataOut.heightList[0]
1216 1216
1217 1217 dataOut.heightList = numpy.arange(dataOut.nHeights/n) * deltaHeight + dataOut.heightList[0]
1218 1218
1219 1219 dataOut.nProfiles = int(dataOut.nProfiles*n)
1220 1220
1221 1221 dataOut.profileIndex = profileIndex
1222 1222
1223 1223 dataOut.ippSeconds /= n
1224 1224
1225 1225 return dataOut
1226 1226
1227 1227 class CombineProfiles(Operation):
1228 1228 def __init__(self, **kwargs):
1229 1229
1230 1230 Operation.__init__(self, **kwargs)
1231 1231
1232 1232 self.__remData = None
1233 1233 self.__profileIndex = 0
1234 1234
1235 1235 def run(self, dataOut, n):
1236 1236
1237 1237 dataOut.flagNoData = True
1238 1238 profileIndex = None
1239 1239
1240 1240 if dataOut.flagDataAsBlock:
1241 1241
1242 1242 #nchannels, nprofiles, nsamples
1243 1243 shape = dataOut.data.shape
1244 1244 new_shape = shape[0], shape[1]/n, shape[2]*n
1245 1245
1246 1246 if shape[1] % n != 0:
1247 1247 raise ValueError("Could not split the data, n=%d has to be multiple of %d" %(n, shape[1]))
1248 1248
1249 1249 dataOut.data = numpy.reshape(dataOut.data, new_shape)
1250 1250 dataOut.flagNoData = False
1251 1251
1252 1252 profileIndex = int(dataOut.nProfiles*n) - 1
1253 1253
1254 1254 else:
1255 1255
1256 1256 #nchannels, nsamples
1257 1257 if self.__remData is None:
1258 1258 newData = dataOut.data
1259 1259 else:
1260 1260 newData = numpy.concatenate((self.__remData, dataOut.data), axis=1)
1261 1261
1262 1262 self.__profileIndex += 1
1263 1263
1264 1264 if self.__profileIndex < n:
1265 1265 self.__remData = newData
1266 1266 #continue
1267 1267 return
1268 1268
1269 1269 self.__profileIndex = 0
1270 1270 self.__remData = None
1271 1271
1272 1272 dataOut.data = newData
1273 1273 dataOut.flagNoData = False
1274 1274
1275 1275 profileIndex = dataOut.profileIndex/n
1276 1276
1277 1277
1278 1278 deltaHeight = dataOut.heightList[1] - dataOut.heightList[0]
1279 1279
1280 1280 dataOut.heightList = numpy.arange(dataOut.nHeights*n) * deltaHeight + dataOut.heightList[0]
1281 1281
1282 1282 dataOut.nProfiles = int(dataOut.nProfiles/n)
1283 1283
1284 1284 dataOut.profileIndex = profileIndex
1285 1285
1286 1286 dataOut.ippSeconds *= n
1287 1287
1288 1288 return dataOut
1289 1289
1290 1290 class PulsePair(Operation):
1291 1291 '''
1292 1292 Function PulsePair(Signal Power, Velocity)
1293 1293 The real component of Lag[0] provides Intensity Information
1294 1294 The imag component of Lag[1] Phase provides Velocity Information
1295 1295
1296 1296 Configuration Parameters:
1297 1297 nPRF = Number of Several PRF
1298 1298 theta = Degree Azimuth angel Boundaries
1299 1299
1300 1300 Input:
1301 1301 self.dataOut
1302 1302 lag[N]
1303 1303 Affected:
1304 1304 self.dataOut.spc
1305 1305 '''
1306 1306 isConfig = False
1307 1307 __profIndex = 0
1308 1308 __initime = None
1309 1309 __lastdatatime = None
1310 1310 __buffer = None
1311 1311 noise = None
1312 1312 __dataReady = False
1313 1313 n = None
1314 1314 __nch = 0
1315 1315 __nHeis = 0
1316 1316 removeDC = False
1317 1317 ipp = None
1318 1318 lambda_ = 0
1319 1319
1320 1320 def __init__(self,**kwargs):
1321 1321 Operation.__init__(self,**kwargs)
1322 1322
1323 1323 def setup(self, dataOut, n = None, removeDC=False):
1324 1324 '''
1325 1325 n= Numero de PRF's de entrada
1326 1326 '''
1327 1327 self.__initime = None
1328 1328 ####print("[INICIO]-setup del METODO PULSE PAIR")
1329 1329 self.__lastdatatime = 0
1330 1330 self.__dataReady = False
1331 1331 self.__buffer = 0
1332 1332 self.__profIndex = 0
1333 1333 self.noise = None
1334 1334 self.__nch = dataOut.nChannels
1335 1335 self.__nHeis = dataOut.nHeights
1336 1336 self.removeDC = removeDC
1337 1337 self.lambda_ = 3.0e8/(9345.0e6)
1338 1338 self.ippSec = dataOut.ippSeconds
1339 1339 self.nCohInt = dataOut.nCohInt
1340 1340 ####print("IPPseconds",dataOut.ippSeconds)
1341 1341 ####print("ELVALOR DE n es:", n)
1342 1342 if n == None:
1343 1343 raise ValueError("n should be specified.")
1344 1344
1345 1345 if n != None:
1346 1346 if n<2:
1347 1347 raise ValueError("n should be greater than 2")
1348 1348
1349 1349 self.n = n
1350 1350 self.__nProf = n
1351 1351
1352 1352 self.__buffer = numpy.zeros((dataOut.nChannels,
1353 1353 n,
1354 1354 dataOut.nHeights),
1355 1355 dtype='complex')
1356 1356
1357 1357 def putData(self,data):
1358 1358 '''
1359 1359 Add a profile to he __buffer and increase in one the __profiel Index
1360 1360 '''
1361 1361 self.__buffer[:,self.__profIndex,:]= data
1362 1362 self.__profIndex += 1
1363 1363 return
1364 1364
1365 1365 def pushData(self,dataOut):
1366 1366 '''
1367 1367 Return the PULSEPAIR and the profiles used in the operation
1368 1368 Affected : self.__profileIndex
1369 1369 '''
1370 1370 #----------------- Remove DC-----------------------------------
1371 1371 if self.removeDC==True:
1372 1372 mean = numpy.mean(self.__buffer,1)
1373 1373 tmp = mean.reshape(self.__nch,1,self.__nHeis)
1374 1374 dc= numpy.tile(tmp,[1,self.__nProf,1])
1375 1375 self.__buffer = self.__buffer - dc
1376 1376 #------------------Calculo de Potencia ------------------------
1377 1377 pair0 = self.__buffer*numpy.conj(self.__buffer)
1378 1378 pair0 = pair0.real
1379 1379 lag_0 = numpy.sum(pair0,1)
1380 1380 #-----------------Calculo de Cscp------------------------------ New
1381 1381 cspc_pair01 = self.__buffer[0]*self.__buffer[1]
1382 1382 #------------------Calculo de Ruido x canal--------------------
1383 1383 self.noise = numpy.zeros(self.__nch)
1384 1384 for i in range(self.__nch):
1385 1385 daux = numpy.sort(pair0[i,:,:],axis= None)
1386 1386 self.noise[i]=hildebrand_sekhon( daux ,self.nCohInt)
1387 1387
1388 1388 self.noise = self.noise.reshape(self.__nch,1)
1389 1389 self.noise = numpy.tile(self.noise,[1,self.__nHeis])
1390 1390 noise_buffer = self.noise.reshape(self.__nch,1,self.__nHeis)
1391 1391 noise_buffer = numpy.tile(noise_buffer,[1,self.__nProf,1])
1392 1392 #------------------ Potencia recibida= P , Potencia senal = S , Ruido= N--
1393 1393 #------------------ P= S+N ,P=lag_0/N ---------------------------------
1394 1394 #-------------------- Power --------------------------------------------------
1395 1395 data_power = lag_0/(self.n*self.nCohInt)
1396 1396 #--------------------CCF------------------------------------------------------
1397 1397 data_ccf =numpy.sum(cspc_pair01,axis=0)/(self.n*self.nCohInt)
1398 1398 #------------------ Senal --------------------------------------------------
1399 1399 data_intensity = pair0 - noise_buffer
1400 1400 data_intensity = numpy.sum(data_intensity,axis=1)*(self.n*self.nCohInt)#*self.nCohInt)
1401 1401 #data_intensity = (lag_0-self.noise*self.n)*(self.n*self.nCohInt)
1402 1402 for i in range(self.__nch):
1403 1403 for j in range(self.__nHeis):
1404 1404 if data_intensity[i][j] < 0:
1405 1405 data_intensity[i][j] = numpy.min(numpy.absolute(data_intensity[i][j]))
1406 1406
1407 1407 #----------------- Calculo de Frecuencia y Velocidad doppler--------
1408 1408 pair1 = self.__buffer[:,:-1,:]*numpy.conjugate(self.__buffer[:,1:,:])
1409 1409 lag_1 = numpy.sum(pair1,1)
1410 1410 data_freq = (-1/(2.0*math.pi*self.ippSec*self.nCohInt))*numpy.angle(lag_1)
1411 1411 data_velocity = (self.lambda_/2.0)*data_freq
1412 1412
1413 1413 #---------------- Potencia promedio estimada de la Senal-----------
1414 1414 lag_0 = lag_0/self.n
1415 1415 S = lag_0-self.noise
1416 1416
1417 1417 #---------------- Frecuencia Doppler promedio ---------------------
1418 1418 lag_1 = lag_1/(self.n-1)
1419 1419 R1 = numpy.abs(lag_1)
1420 1420
1421 1421 #---------------- Calculo del SNR----------------------------------
1422 1422 data_snrPP = S/self.noise
1423 1423 for i in range(self.__nch):
1424 1424 for j in range(self.__nHeis):
1425 1425 if data_snrPP[i][j] < 1.e-20:
1426 1426 data_snrPP[i][j] = 1.e-20
1427 1427
1428 1428 #----------------- Calculo del ancho espectral ----------------------
1429 1429 L = S/R1
1430 1430 L = numpy.where(L<0,1,L)
1431 1431 L = numpy.log(L)
1432 1432 tmp = numpy.sqrt(numpy.absolute(L))
1433 1433 data_specwidth = (self.lambda_/(2*math.sqrt(2)*math.pi*self.ippSec*self.nCohInt))*tmp*numpy.sign(L)
1434 1434 n = self.__profIndex
1435 1435
1436 1436 self.__buffer = numpy.zeros((self.__nch, self.__nProf,self.__nHeis), dtype='complex')
1437 1437 self.__profIndex = 0
1438 1438 return data_power,data_intensity,data_velocity,data_snrPP,data_specwidth,data_ccf,n
1439 1439
1440 1440
1441 1441 def pulsePairbyProfiles(self,dataOut):
1442 1442
1443 1443 self.__dataReady = False
1444 1444 data_power = None
1445 1445 data_intensity = None
1446 1446 data_velocity = None
1447 1447 data_specwidth = None
1448 1448 data_snrPP = None
1449 1449 data_ccf = None
1450 1450 self.putData(data=dataOut.data)
1451 1451 if self.__profIndex == self.n:
1452 1452 data_power,data_intensity, data_velocity,data_snrPP,data_specwidth,data_ccf, n = self.pushData(dataOut=dataOut)
1453 1453 self.__dataReady = True
1454 1454
1455 1455 return data_power, data_intensity, data_velocity, data_snrPP,data_specwidth,data_ccf
1456 1456
1457 1457
1458 1458 def pulsePairOp(self, dataOut, datatime= None):
1459 1459
1460 1460 if self.__initime == None:
1461 1461 self.__initime = datatime
1462 1462 data_power, data_intensity, data_velocity, data_snrPP,data_specwidth,data_ccf = self.pulsePairbyProfiles(dataOut)
1463 1463 self.__lastdatatime = datatime
1464 1464
1465 1465 if data_power is None:
1466 1466 return None, None, None,None,None,None,None
1467 1467
1468 1468 avgdatatime = self.__initime
1469 1469 deltatime = datatime - self.__lastdatatime
1470 1470 self.__initime = datatime
1471 1471
1472 1472 return data_power, data_intensity, data_velocity, data_snrPP,data_specwidth,data_ccf, avgdatatime
1473 1473
1474 1474 def run(self, dataOut,n = None,removeDC= False, overlapping= False,**kwargs):
1475
1475 #print("hey")
1476 #print(dataOut.data.shape)
1477 #exit(1)
1478 #print(self.__profIndex)
1476 1479 if not self.isConfig:
1477 1480 self.setup(dataOut = dataOut, n = n , removeDC=removeDC , **kwargs)
1478 1481 self.isConfig = True
1479 1482 data_power, data_intensity, data_velocity,data_snrPP,data_specwidth,data_ccf, avgdatatime = self.pulsePairOp(dataOut, dataOut.utctime)
1480 1483 dataOut.flagNoData = True
1481 1484
1482 1485 if self.__dataReady:
1483 1486 ###print("READY ----------------------------------")
1484 1487 dataOut.nCohInt *= self.n
1485 1488 dataOut.dataPP_POW = data_intensity # S
1486 1489 dataOut.dataPP_POWER = data_power # P valor que corresponde a POTENCIA MOMENTO
1487 1490 dataOut.dataPP_DOP = data_velocity
1488 1491 dataOut.dataPP_SNR = data_snrPP
1489 1492 dataOut.dataPP_WIDTH = data_specwidth
1490 1493 dataOut.dataPP_CCF = data_ccf
1491 1494 dataOut.PRFbyAngle = self.n #numero de PRF*cada angulo rotado que equivale a un tiempo.
1492 1495 dataOut.nProfiles = int(dataOut.nProfiles/n)
1493 1496 dataOut.utctime = avgdatatime
1494 1497 dataOut.flagNoData = False
1495 1498 return dataOut
1496 1499
1500 class PulsePair_vRF(Operation):
1501 '''
1502 Function PulsePair(Signal Power, Velocity)
1503 The real component of Lag[0] provides Intensity Information
1504 The imag component of Lag[1] Phase provides Velocity Information
1505
1506 Configuration Parameters:
1507 nPRF = Number of Several PRF
1508 theta = Degree Azimuth angel Boundaries
1509
1510 Input:
1511 self.dataOut
1512 lag[N]
1513 Affected:
1514 self.dataOut.spc
1515 '''
1516 isConfig = False
1517 __profIndex = 0
1518 __initime = None
1519 __lastdatatime = None
1520 __buffer = None
1521 noise = None
1522 __dataReady = False
1523 n = None
1524 __nch = 0
1525 __nHeis = 0
1526 removeDC = False
1527 ipp = None
1528 lambda_ = 0
1529
1530 def __init__(self,**kwargs):
1531 Operation.__init__(self,**kwargs)
1497 1532
1533 def setup(self, dataOut, n = None, removeDC=False):
1534 '''
1535 n= Numero de PRF's de entrada
1536 '''
1537 self.__initime = None
1538 ####print("[INICIO]-setup del METODO PULSE PAIR")
1539 self.__lastdatatime = 0
1540 self.__dataReady = False
1541 self.__buffer = 0
1542 self.__profIndex = 0
1543 self.noise = None
1544 self.__nch = dataOut.nChannels
1545 self.__nHeis = dataOut.nHeights
1546 self.removeDC = removeDC
1547 self.lambda_ = 3.0e8/(9345.0e6)
1548 self.ippSec = dataOut.ippSeconds
1549 self.nCohInt = dataOut.nCohInt
1550 ####print("IPPseconds",dataOut.ippSeconds)
1551 ####print("ELVALOR DE n es:", n)
1552 if n == None:
1553 raise ValueError("n should be specified.")
1554
1555 if n != None:
1556 if n<2:
1557 raise ValueError("n should be greater than 2")
1558
1559 self.n = n
1560 self.__nProf = n
1561
1562 self.__buffer = numpy.zeros((dataOut.nChannels,
1563 n,
1564 dataOut.nHeights),
1565 dtype='complex')
1566
1567 def putData(self,data):
1568 '''
1569 Add a profile to he __buffer and increase in one the __profiel Index
1570 '''
1571 self.__buffer[:,self.__profIndex,:]= data
1572 self.__profIndex += 1
1573 return
1574
1575 def putDataByBlock(self,data,n):
1576 '''
1577 Add a profile to he __buffer and increase in one the __profiel Index
1578 '''
1579 self.__buffer[:]= data
1580 self.__profIndex = n
1581 return
1582
1583 def pushData(self,dataOut):
1584 '''
1585 Return the PULSEPAIR and the profiles used in the operation
1586 Affected : self.__profileIndex
1587 '''
1588 #----------------- Remove DC-----------------------------------
1589 if self.removeDC==True:
1590 mean = numpy.mean(self.__buffer,1)
1591 tmp = mean.reshape(self.__nch,1,self.__nHeis)
1592 dc= numpy.tile(tmp,[1,self.__nProf,1])
1593 self.__buffer = self.__buffer - dc
1594 #------------------Calculo de Potencia ------------------------
1595 pair0 = self.__buffer*numpy.conj(self.__buffer)
1596 pair0 = pair0.real
1597 lag_0 = numpy.sum(pair0,1)
1598 #-----------------Calculo de Cscp------------------------------ New
1599 cspc_pair01 = self.__buffer[0]*self.__buffer[1]
1600 #------------------Calculo de Ruido x canal--------------------
1601 self.noise = numpy.zeros(self.__nch)
1602 for i in range(self.__nch):
1603 daux = numpy.sort(pair0[i,:,:],axis= None)
1604 self.noise[i]=hildebrand_sekhon( daux ,self.nCohInt)
1605
1606 self.noise = self.noise.reshape(self.__nch,1)
1607 self.noise = numpy.tile(self.noise,[1,self.__nHeis])
1608 noise_buffer = self.noise.reshape(self.__nch,1,self.__nHeis)
1609 noise_buffer = numpy.tile(noise_buffer,[1,self.__nProf,1])
1610 #------------------ Potencia recibida= P , Potencia senal = S , Ruido= N--
1611 #------------------ P= S+N ,P=lag_0/N ---------------------------------
1612 #-------------------- Power --------------------------------------------------
1613 data_power = lag_0/(self.n*self.nCohInt)
1614 #--------------------CCF------------------------------------------------------
1615 data_ccf =numpy.sum(cspc_pair01,axis=0)/(self.n*self.nCohInt)
1616 #------------------ Senal --------------------------------------------------
1617 data_intensity = pair0 - noise_buffer
1618 data_intensity = numpy.sum(data_intensity,axis=1)*(self.n*self.nCohInt)#*self.nCohInt)
1619 #data_intensity = (lag_0-self.noise*self.n)*(self.n*self.nCohInt)
1620 for i in range(self.__nch):
1621 for j in range(self.__nHeis):
1622 if data_intensity[i][j] < 0:
1623 data_intensity[i][j] = numpy.min(numpy.absolute(data_intensity[i][j]))
1624
1625 #----------------- Calculo de Frecuencia y Velocidad doppler--------
1626 pair1 = self.__buffer[:,:-1,:]*numpy.conjugate(self.__buffer[:,1:,:])
1627 lag_1 = numpy.sum(pair1,1)
1628 data_freq = (-1/(2.0*math.pi*self.ippSec*self.nCohInt))*numpy.angle(lag_1)
1629 data_velocity = (self.lambda_/2.0)*data_freq
1630
1631 #---------------- Potencia promedio estimada de la Senal-----------
1632 lag_0 = lag_0/self.n
1633 S = lag_0-self.noise
1634
1635 #---------------- Frecuencia Doppler promedio ---------------------
1636 lag_1 = lag_1/(self.n-1)
1637 R1 = numpy.abs(lag_1)
1638
1639 #---------------- Calculo del SNR----------------------------------
1640 data_snrPP = S/self.noise
1641 for i in range(self.__nch):
1642 for j in range(self.__nHeis):
1643 if data_snrPP[i][j] < 1.e-20:
1644 data_snrPP[i][j] = 1.e-20
1645
1646 #----------------- Calculo del ancho espectral ----------------------
1647 L = S/R1
1648 L = numpy.where(L<0,1,L)
1649 L = numpy.log(L)
1650 tmp = numpy.sqrt(numpy.absolute(L))
1651 data_specwidth = (self.lambda_/(2*math.sqrt(2)*math.pi*self.ippSec*self.nCohInt))*tmp*numpy.sign(L)
1652 n = self.__profIndex
1653
1654 self.__buffer = numpy.zeros((self.__nch, self.__nProf,self.__nHeis), dtype='complex')
1655 self.__profIndex = 0
1656 return data_power,data_intensity,data_velocity,data_snrPP,data_specwidth,data_ccf,n
1657
1658
1659 def pulsePairbyProfiles(self,dataOut,n):
1660
1661 self.__dataReady = False
1662 data_power = None
1663 data_intensity = None
1664 data_velocity = None
1665 data_specwidth = None
1666 data_snrPP = None
1667 data_ccf = None
1668
1669 if dataOut.flagDataAsBlock:
1670 self.putDataByBlock(data=dataOut.data,n=n)
1671 else:
1672 self.putData(data=dataOut.data)
1673 if self.__profIndex == self.n:
1674 data_power,data_intensity, data_velocity,data_snrPP,data_specwidth,data_ccf, n = self.pushData(dataOut=dataOut)
1675 self.__dataReady = True
1676
1677 return data_power, data_intensity, data_velocity, data_snrPP,data_specwidth,data_ccf
1678
1679
1680 def pulsePairOp(self, dataOut, n, datatime= None):
1681
1682 if self.__initime == None:
1683 self.__initime = datatime
1684 data_power, data_intensity, data_velocity, data_snrPP,data_specwidth,data_ccf = self.pulsePairbyProfiles(dataOut,n)
1685 self.__lastdatatime = datatime
1686
1687 if data_power is None:
1688 return None, None, None,None,None,None,None
1689
1690 avgdatatime = self.__initime
1691 deltatime = datatime - self.__lastdatatime
1692 self.__initime = datatime
1693
1694 return data_power, data_intensity, data_velocity, data_snrPP,data_specwidth,data_ccf, avgdatatime
1695
1696 def run(self, dataOut,n = None,removeDC= False, overlapping= False,**kwargs):
1697 #print("hey")
1698 #print(dataOut.data.shape)
1699 #exit(1)
1700 if dataOut.flagDataAsBlock:
1701 n = dataOut.nProfileBlocks
1702 #print(self.__profIndex)
1703 if not self.isConfig:
1704 self.setup(dataOut = dataOut, n = n , removeDC=removeDC , **kwargs)
1705 self.isConfig = True
1706
1707
1708 data_power, data_intensity, data_velocity,data_snrPP,data_specwidth,data_ccf, avgdatatime = self.pulsePairOp(dataOut, n, dataOut.utctime)
1709
1710
1711 dataOut.flagNoData = True
1712
1713 if self.__dataReady:
1714 ###print("READY ----------------------------------")
1715 dataOut.nCohInt *= self.n
1716 dataOut.dataPP_POW = data_intensity # S
1717 dataOut.dataPP_POWER = data_power # P valor que corresponde a POTENCIA MOMENTO
1718 dataOut.dataPP_DOP = data_velocity
1719 dataOut.dataPP_SNR = data_snrPP
1720 dataOut.dataPP_WIDTH = data_specwidth
1721 dataOut.dataPP_CCF = data_ccf
1722 dataOut.PRFbyAngle = self.n #numero de PRF*cada angulo rotado que equivale a un tiempo.
1723 dataOut.nProfiles = int(dataOut.nProfiles/n)
1724 dataOut.utctime = avgdatatime
1725 dataOut.flagNoData = False
1726 return dataOut
1498 1727
1499 1728 # import collections
1500 1729 # from scipy.stats import mode
1501 1730 #
1502 1731 # class Synchronize(Operation):
1503 1732 #
1504 1733 # isConfig = False
1505 1734 # __profIndex = 0
1506 1735 #
1507 1736 # def __init__(self, **kwargs):
1508 1737 #
1509 1738 # Operation.__init__(self, **kwargs)
1510 1739 # # self.isConfig = False
1511 1740 # self.__powBuffer = None
1512 1741 # self.__startIndex = 0
1513 1742 # self.__pulseFound = False
1514 1743 #
1515 1744 # def __findTxPulse(self, dataOut, channel=0, pulse_with = None):
1516 1745 #
1517 1746 # #Read data
1518 1747 #
1519 1748 # powerdB = dataOut.getPower(channel = channel)
1520 1749 # noisedB = dataOut.getNoise(channel = channel)[0]
1521 1750 #
1522 1751 # self.__powBuffer.extend(powerdB.flatten())
1523 1752 #
1524 1753 # dataArray = numpy.array(self.__powBuffer)
1525 1754 #
1526 1755 # filteredPower = numpy.correlate(dataArray, dataArray[0:self.__nSamples], "same")
1527 1756 #
1528 1757 # maxValue = numpy.nanmax(filteredPower)
1529 1758 #
1530 1759 # if maxValue < noisedB + 10:
1531 1760 # #No se encuentra ningun pulso de transmision
1532 1761 # return None
1533 1762 #
1534 1763 # maxValuesIndex = numpy.where(filteredPower > maxValue - 0.1*abs(maxValue))[0]
1535 1764 #
1536 1765 # if len(maxValuesIndex) < 2:
1537 1766 # #Solo se encontro un solo pulso de transmision de un baudio, esperando por el siguiente TX
1538 1767 # return None
1539 1768 #
1540 1769 # phasedMaxValuesIndex = maxValuesIndex - self.__nSamples
1541 1770 #
1542 1771 # #Seleccionar solo valores con un espaciamiento de nSamples
1543 1772 # pulseIndex = numpy.intersect1d(maxValuesIndex, phasedMaxValuesIndex)
1544 1773 #
1545 1774 # if len(pulseIndex) < 2:
1546 1775 # #Solo se encontro un pulso de transmision con ancho mayor a 1
1547 1776 # return None
1548 1777 #
1549 1778 # spacing = pulseIndex[1:] - pulseIndex[:-1]
1550 1779 #
1551 1780 # #remover senales que se distancien menos de 10 unidades o muestras
1552 1781 # #(No deberian existir IPP menor a 10 unidades)
1553 1782 #
1554 1783 # realIndex = numpy.where(spacing > 10 )[0]
1555 1784 #
1556 1785 # if len(realIndex) < 2:
1557 1786 # #Solo se encontro un pulso de transmision con ancho mayor a 1
1558 1787 # return None
1559 1788 #
1560 1789 # #Eliminar pulsos anchos (deja solo la diferencia entre IPPs)
1561 1790 # realPulseIndex = pulseIndex[realIndex]
1562 1791 #
1563 1792 # period = mode(realPulseIndex[1:] - realPulseIndex[:-1])[0][0]
1564 1793 #
1565 1794 # print "IPP = %d samples" %period
1566 1795 #
1567 1796 # self.__newNSamples = dataOut.nHeights #int(period)
1568 1797 # self.__startIndex = int(realPulseIndex[0])
1569 1798 #
1570 1799 # return 1
1571 1800 #
1572 1801 #
1573 1802 # def setup(self, nSamples, nChannels, buffer_size = 4):
1574 1803 #
1575 1804 # self.__powBuffer = collections.deque(numpy.zeros( buffer_size*nSamples,dtype=numpy.float),
1576 1805 # maxlen = buffer_size*nSamples)
1577 1806 #
1578 1807 # bufferList = []
1579 1808 #
1580 1809 # for i in range(nChannels):
1581 1810 # bufferByChannel = collections.deque(numpy.zeros( buffer_size*nSamples, dtype=numpy.complex) + numpy.NAN,
1582 1811 # maxlen = buffer_size*nSamples)
1583 1812 #
1584 1813 # bufferList.append(bufferByChannel)
1585 1814 #
1586 1815 # self.__nSamples = nSamples
1587 1816 # self.__nChannels = nChannels
1588 1817 # self.__bufferList = bufferList
1589 1818 #
1590 1819 # def run(self, dataOut, channel = 0):
1591 1820 #
1592 1821 # if not self.isConfig:
1593 1822 # nSamples = dataOut.nHeights
1594 1823 # nChannels = dataOut.nChannels
1595 1824 # self.setup(nSamples, nChannels)
1596 1825 # self.isConfig = True
1597 1826 #
1598 1827 # #Append new data to internal buffer
1599 1828 # for thisChannel in range(self.__nChannels):
1600 1829 # bufferByChannel = self.__bufferList[thisChannel]
1601 1830 # bufferByChannel.extend(dataOut.data[thisChannel])
1602 1831 #
1603 1832 # if self.__pulseFound:
1604 1833 # self.__startIndex -= self.__nSamples
1605 1834 #
1606 1835 # #Finding Tx Pulse
1607 1836 # if not self.__pulseFound:
1608 1837 # indexFound = self.__findTxPulse(dataOut, channel)
1609 1838 #
1610 1839 # if indexFound == None:
1611 1840 # dataOut.flagNoData = True
1612 1841 # return
1613 1842 #
1614 1843 # self.__arrayBuffer = numpy.zeros((self.__nChannels, self.__newNSamples), dtype = numpy.complex)
1615 1844 # self.__pulseFound = True
1616 1845 # self.__startIndex = indexFound
1617 1846 #
1618 1847 # #If pulse was found ...
1619 1848 # for thisChannel in range(self.__nChannels):
1620 1849 # bufferByChannel = self.__bufferList[thisChannel]
1621 1850 # #print self.__startIndex
1622 1851 # x = numpy.array(bufferByChannel)
1623 1852 # self.__arrayBuffer[thisChannel] = x[self.__startIndex:self.__startIndex+self.__newNSamples]
1624 1853 #
1625 1854 # deltaHeight = dataOut.heightList[1] - dataOut.heightList[0]
1626 1855 # dataOut.heightList = numpy.arange(self.__newNSamples)*deltaHeight
1627 1856 # # dataOut.ippSeconds = (self.__newNSamples / deltaHeight)/1e6
1628 1857 #
1629 1858 # dataOut.data = self.__arrayBuffer
1630 1859 #
1631 1860 # self.__startIndex += self.__newNSamples
1632 1861 #
1633 1862 # return
General Comments 0
You need to be logged in to leave comments. Login now