##// END OF EJS Templates
Danny Scipión -
r1346:a455424d10b0 merge
parent child
Show More
@@ -1,8 +1,8
1 1 """Signal chain python package"""
2 2
3 3 try:
4 4 from schainpy.controller import Project
5 5 except:
6 6 pass
7 7
8 __version__ = '3.0.0b5'
8 __version__ = '3.0.0b6'
@@ -1,1193 +1,1066
1 1 # Copyright (c) 2012-2020 Jicamarca Radio Observatory
2 2 # All rights reserved.
3 3 #
4 4 # Distributed under the terms of the BSD 3-clause license.
5 5 """Definition of diferent Data objects for different types of data
6 6
7 7 Here you will find the diferent data objects for the different types
8 8 of data, this data objects must be used as dataIn or dataOut objects in
9 9 processing units and operations. Currently the supported data objects are:
10 10 Voltage, Spectra, SpectraHeis, Fits, Correlation and Parameters
11 11 """
12 12
13 13 import copy
14 14 import numpy
15 15 import datetime
16 16 import json
17 17
18 18 import schainpy.admin
19 19 from schainpy.utils import log
20 20 from .jroheaderIO import SystemHeader, RadarControllerHeader
21 21 from schainpy.model.data import _noise
22 22
23 23
24 24 def getNumpyDtype(dataTypeCode):
25 25
26 26 if dataTypeCode == 0:
27 27 numpyDtype = numpy.dtype([('real', '<i1'), ('imag', '<i1')])
28 28 elif dataTypeCode == 1:
29 29 numpyDtype = numpy.dtype([('real', '<i2'), ('imag', '<i2')])
30 30 elif dataTypeCode == 2:
31 31 numpyDtype = numpy.dtype([('real', '<i4'), ('imag', '<i4')])
32 32 elif dataTypeCode == 3:
33 33 numpyDtype = numpy.dtype([('real', '<i8'), ('imag', '<i8')])
34 34 elif dataTypeCode == 4:
35 35 numpyDtype = numpy.dtype([('real', '<f4'), ('imag', '<f4')])
36 36 elif dataTypeCode == 5:
37 37 numpyDtype = numpy.dtype([('real', '<f8'), ('imag', '<f8')])
38 38 else:
39 39 raise ValueError('dataTypeCode was not defined')
40 40
41 41 return numpyDtype
42 42
43 43
44 44 def getDataTypeCode(numpyDtype):
45 45
46 46 if numpyDtype == numpy.dtype([('real', '<i1'), ('imag', '<i1')]):
47 47 datatype = 0
48 48 elif numpyDtype == numpy.dtype([('real', '<i2'), ('imag', '<i2')]):
49 49 datatype = 1
50 50 elif numpyDtype == numpy.dtype([('real', '<i4'), ('imag', '<i4')]):
51 51 datatype = 2
52 52 elif numpyDtype == numpy.dtype([('real', '<i8'), ('imag', '<i8')]):
53 53 datatype = 3
54 54 elif numpyDtype == numpy.dtype([('real', '<f4'), ('imag', '<f4')]):
55 55 datatype = 4
56 56 elif numpyDtype == numpy.dtype([('real', '<f8'), ('imag', '<f8')]):
57 57 datatype = 5
58 58 else:
59 59 datatype = None
60 60
61 61 return datatype
62 62
63 63
64 64 def hildebrand_sekhon(data, navg):
65 65 """
66 66 This method is for the objective determination of the noise level in Doppler spectra. This
67 67 implementation technique is based on the fact that the standard deviation of the spectral
68 68 densities is equal to the mean spectral density for white Gaussian noise
69 69
70 70 Inputs:
71 71 Data : heights
72 72 navg : numbers of averages
73 73
74 74 Return:
75 75 mean : noise's level
76 76 """
77 77
78 78 sortdata = numpy.sort(data, axis=None)
79 79 '''
80 80 lenOfData = len(sortdata)
81 81 nums_min = lenOfData*0.2
82 82
83 83 if nums_min <= 5:
84 84
85 85 nums_min = 5
86 86
87 87 sump = 0.
88 88 sumq = 0.
89 89
90 90 j = 0
91 91 cont = 1
92 92
93 93 while((cont == 1)and(j < lenOfData)):
94 94
95 95 sump += sortdata[j]
96 96 sumq += sortdata[j]**2
97 97
98 98 if j > nums_min:
99 99 rtest = float(j)/(j-1) + 1.0/navg
100 100 if ((sumq*j) > (rtest*sump**2)):
101 101 j = j - 1
102 102 sump = sump - sortdata[j]
103 103 sumq = sumq - sortdata[j]**2
104 104 cont = 0
105 105
106 106 j += 1
107 107
108 108 lnoise = sump / j
109 109 '''
110 110 return _noise.hildebrand_sekhon(sortdata, navg)
111 111
112 112
113 113 class Beam:
114 114
115 115 def __init__(self):
116 116 self.codeList = []
117 117 self.azimuthList = []
118 118 self.zenithList = []
119 119
120 120
121 121 class GenericData(object):
122 122
123 123 flagNoData = True
124 124
125 125 def copy(self, inputObj=None):
126 126
127 127 if inputObj == None:
128 128 return copy.deepcopy(self)
129 129
130 130 for key in list(inputObj.__dict__.keys()):
131 131
132 132 attribute = inputObj.__dict__[key]
133 133
134 134 # If this attribute is a tuple or list
135 135 if type(inputObj.__dict__[key]) in (tuple, list):
136 136 self.__dict__[key] = attribute[:]
137 137 continue
138 138
139 139 # If this attribute is another object or instance
140 140 if hasattr(attribute, '__dict__'):
141 141 self.__dict__[key] = attribute.copy()
142 142 continue
143 143
144 144 self.__dict__[key] = inputObj.__dict__[key]
145 145
146 146 def deepcopy(self):
147 147
148 148 return copy.deepcopy(self)
149 149
150 150 def isEmpty(self):
151 151
152 152 return self.flagNoData
153 153
154 154 def isReady(self):
155 155
156 156 return not self.flagNoData
157 157
158 158
159 159 class JROData(GenericData):
160 160
161 161 systemHeaderObj = SystemHeader()
162 162 radarControllerHeaderObj = RadarControllerHeader()
163 163 type = None
164 164 datatype = None # dtype but in string
165 165 nProfiles = None
166 166 heightList = None
167 167 channelList = None
168 168 flagDiscontinuousBlock = False
169 169 useLocalTime = False
170 170 utctime = None
171 171 timeZone = None
172 172 dstFlag = None
173 173 errorCount = None
174 174 blocksize = None
175 175 flagDecodeData = False # asumo q la data no esta decodificada
176 176 flagDeflipData = False # asumo q la data no esta sin flip
177 177 flagShiftFFT = False
178 178 nCohInt = None
179 179 windowOfFilter = 1
180 180 C = 3e8
181 181 frequency = 49.92e6
182 182 realtime = False
183 183 beacon_heiIndexList = None
184 184 last_block = None
185 185 blocknow = None
186 186 azimuth = None
187 187 zenith = None
188 188 beam = Beam()
189 189 profileIndex = None
190 190 error = None
191 191 data = None
192 192 nmodes = None
193 193 metadata_list = ['heightList', 'timeZone', 'type']
194 194
195 195 def __str__(self):
196 196
197 197 return '{} - {}'.format(self.type, self.datatime())
198 198
199 199 def getNoise(self):
200 200
201 201 raise NotImplementedError
202 202
203 203 @property
204 204 def nChannels(self):
205 205
206 206 return len(self.channelList)
207 207
208 208 @property
209 209 def channelIndexList(self):
210 210
211 211 return list(range(self.nChannels))
212 212
213 213 @property
214 214 def nHeights(self):
215 215
216 216 return len(self.heightList)
217 217
218 218 def getDeltaH(self):
219 219
220 220 return self.heightList[1] - self.heightList[0]
221 221
222 222 @property
223 223 def ltctime(self):
224 224
225 225 if self.useLocalTime:
226 226 return self.utctime - self.timeZone * 60
227 227
228 228 return self.utctime
229 229
230 230 @property
231 231 def datatime(self):
232 232
233 233 datatimeValue = datetime.datetime.utcfromtimestamp(self.ltctime)
234 234 return datatimeValue
235 235
236 236 def getTimeRange(self):
237 237
238 238 datatime = []
239 239
240 240 datatime.append(self.ltctime)
241 241 datatime.append(self.ltctime + self.timeInterval + 1)
242 242
243 243 datatime = numpy.array(datatime)
244 244
245 245 return datatime
246 246
247 247 def getFmaxTimeResponse(self):
248 248
249 249 period = (10**-6) * self.getDeltaH() / (0.15)
250 250
251 251 PRF = 1. / (period * self.nCohInt)
252 252
253 253 fmax = PRF
254 254
255 255 return fmax
256 256
257 257 def getFmax(self):
258 258 PRF = 1. / (self.ippSeconds * self.nCohInt)
259 259
260 260 fmax = PRF
261 261 return fmax
262 262
263 263 def getVmax(self):
264 264
265 265 _lambda = self.C / self.frequency
266 266
267 267 vmax = self.getFmax() * _lambda / 2
268 268
269 269 return vmax
270 270
271 271 @property
272 272 def ippSeconds(self):
273 273 '''
274 274 '''
275 275 return self.radarControllerHeaderObj.ippSeconds
276 276
277 277 @ippSeconds.setter
278 278 def ippSeconds(self, ippSeconds):
279 279 '''
280 280 '''
281 281 self.radarControllerHeaderObj.ippSeconds = ippSeconds
282 282
283 283 @property
284 284 def code(self):
285 285 '''
286 286 '''
287 287 return self.radarControllerHeaderObj.code
288 288
289 289 @code.setter
290 290 def code(self, code):
291 291 '''
292 292 '''
293 293 self.radarControllerHeaderObj.code = code
294 294
295 295 @property
296 296 def nCode(self):
297 297 '''
298 298 '''
299 299 return self.radarControllerHeaderObj.nCode
300 300
301 301 @nCode.setter
302 302 def nCode(self, ncode):
303 303 '''
304 304 '''
305 305 self.radarControllerHeaderObj.nCode = ncode
306 306
307 307 @property
308 308 def nBaud(self):
309 309 '''
310 310 '''
311 311 return self.radarControllerHeaderObj.nBaud
312 312
313 313 @nBaud.setter
314 314 def nBaud(self, nbaud):
315 315 '''
316 316 '''
317 317 self.radarControllerHeaderObj.nBaud = nbaud
318 318
319 319 @property
320 320 def ipp(self):
321 321 '''
322 322 '''
323 323 return self.radarControllerHeaderObj.ipp
324 324
325 325 @ipp.setter
326 326 def ipp(self, ipp):
327 327 '''
328 328 '''
329 329 self.radarControllerHeaderObj.ipp = ipp
330 330
331 331 @property
332 332 def metadata(self):
333 333 '''
334 334 '''
335 335
336 336 return {attr: getattr(self, attr) for attr in self.metadata_list}
337 337
338 338
339 339 class Voltage(JROData):
340 340
341 341 dataPP_POW = None
342 342 dataPP_DOP = None
343 343 dataPP_WIDTH = None
344 344 dataPP_SNR = None
345 345
346 346 def __init__(self):
347 347 '''
348 348 Constructor
349 349 '''
350 350
351 351 self.useLocalTime = True
352 352 self.radarControllerHeaderObj = RadarControllerHeader()
353 353 self.systemHeaderObj = SystemHeader()
354 354 self.type = "Voltage"
355 355 self.data = None
356 356 self.nProfiles = None
357 357 self.heightList = None
358 358 self.channelList = None
359 359 self.flagNoData = True
360 360 self.flagDiscontinuousBlock = False
361 361 self.utctime = None
362 362 self.timeZone = 0
363 363 self.dstFlag = None
364 364 self.errorCount = None
365 365 self.nCohInt = None
366 366 self.blocksize = None
367 367 self.flagCohInt = False
368 368 self.flagDecodeData = False # asumo q la data no esta decodificada
369 369 self.flagDeflipData = False # asumo q la data no esta sin flip
370 370 self.flagShiftFFT = False
371 371 self.flagDataAsBlock = False # Asumo que la data es leida perfil a perfil
372 372 self.profileIndex = 0
373 373 self.metadata_list = ['type', 'heightList', 'timeZone', 'nProfiles', 'channelList', 'nCohInt',
374 374 'code', 'nCode', 'nBaud', 'ippSeconds', 'ipp']
375 375
376 376 def getNoisebyHildebrand(self, channel=None):
377 377 """
378 378 Determino el nivel de ruido usando el metodo Hildebrand-Sekhon
379 379
380 380 Return:
381 381 noiselevel
382 382 """
383 383
384 384 if channel != None:
385 385 data = self.data[channel]
386 386 nChannels = 1
387 387 else:
388 388 data = self.data
389 389 nChannels = self.nChannels
390 390
391 391 noise = numpy.zeros(nChannels)
392 392 power = data * numpy.conjugate(data)
393 393
394 394 for thisChannel in range(nChannels):
395 395 if nChannels == 1:
396 396 daux = power[:].real
397 397 else:
398 398 daux = power[thisChannel, :].real
399 399 noise[thisChannel] = hildebrand_sekhon(daux, self.nCohInt)
400 400
401 401 return noise
402 402
403 403 def getNoise(self, type=1, channel=None):
404 404
405 405 if type == 1:
406 406 noise = self.getNoisebyHildebrand(channel)
407 407
408 408 return noise
409 409
410 410 def getPower(self, channel=None):
411 411
412 412 if channel != None:
413 413 data = self.data[channel]
414 414 else:
415 415 data = self.data
416 416
417 417 power = data * numpy.conjugate(data)
418 418 powerdB = 10 * numpy.log10(power.real)
419 419 powerdB = numpy.squeeze(powerdB)
420 420
421 421 return powerdB
422 422
423 423 @property
424 424 def timeInterval(self):
425 425
426 426 return self.ippSeconds * self.nCohInt
427 427
428 428 noise = property(getNoise, "I'm the 'nHeights' property.")
429 429
430 430
431 431 class Spectra(JROData):
432 432
433 433 def __init__(self):
434 434 '''
435 435 Constructor
436 436 '''
437 437
438 438 self.useLocalTime = True
439 439 self.radarControllerHeaderObj = RadarControllerHeader()
440 440 self.systemHeaderObj = SystemHeader()
441 441 self.type = "Spectra"
442 442 self.timeZone = 0
443 443 self.nProfiles = None
444 444 self.heightList = None
445 445 self.channelList = None
446 446 self.pairsList = None
447 447 self.flagNoData = True
448 448 self.flagDiscontinuousBlock = False
449 449 self.utctime = None
450 450 self.nCohInt = None
451 451 self.nIncohInt = None
452 452 self.blocksize = None
453 453 self.nFFTPoints = None
454 454 self.wavelength = None
455 455 self.flagDecodeData = False # asumo q la data no esta decodificada
456 456 self.flagDeflipData = False # asumo q la data no esta sin flip
457 457 self.flagShiftFFT = False
458 458 self.ippFactor = 1
459 459 self.beacon_heiIndexList = []
460 460 self.noise_estimation = None
461 461 self.metadata_list = ['type', 'heightList', 'timeZone', 'pairsList', 'channelList', 'nCohInt',
462 462 'code', 'nCode', 'nBaud', 'ippSeconds', 'ipp','nIncohInt', 'nFFTPoints', 'nProfiles']
463 463
464 464 def getNoisebyHildebrand(self, xmin_index=None, xmax_index=None, ymin_index=None, ymax_index=None):
465 465 """
466 466 Determino el nivel de ruido usando el metodo Hildebrand-Sekhon
467 467
468 468 Return:
469 469 noiselevel
470 470 """
471 471
472 472 noise = numpy.zeros(self.nChannels)
473 473
474 474 for channel in range(self.nChannels):
475 475 daux = self.data_spc[channel,
476 476 xmin_index:xmax_index, ymin_index:ymax_index]
477 477 noise[channel] = hildebrand_sekhon(daux, self.nIncohInt)
478 478
479 479 return noise
480 480
481 481 def getNoise(self, xmin_index=None, xmax_index=None, ymin_index=None, ymax_index=None):
482 482
483 483 if self.noise_estimation is not None:
484 484 # this was estimated by getNoise Operation defined in jroproc_spectra.py
485 485 return self.noise_estimation
486 486 else:
487 487 noise = self.getNoisebyHildebrand(
488 488 xmin_index, xmax_index, ymin_index, ymax_index)
489 489 return noise
490 490
491 491 def getFreqRangeTimeResponse(self, extrapoints=0):
492 492
493 493 deltafreq = self.getFmaxTimeResponse() / (self.nFFTPoints * self.ippFactor)
494 494 freqrange = deltafreq * (numpy.arange(self.nFFTPoints + extrapoints) - self.nFFTPoints / 2.) - deltafreq / 2
495 495
496 496 return freqrange
497 497
498 498 def getAcfRange(self, extrapoints=0):
499 499
500 500 deltafreq = 10. / (self.getFmax() / (self.nFFTPoints * self.ippFactor))
501 501 freqrange = deltafreq * (numpy.arange(self.nFFTPoints + extrapoints) -self.nFFTPoints / 2.) - deltafreq / 2
502 502
503 503 return freqrange
504 504
505 505 def getFreqRange(self, extrapoints=0):
506 506
507 507 deltafreq = self.getFmax() / (self.nFFTPoints * self.ippFactor)
508 508 freqrange = deltafreq * (numpy.arange(self.nFFTPoints + extrapoints) -self.nFFTPoints / 2.) - deltafreq / 2
509 509
510 510 return freqrange
511 511
512 512 def getVelRange(self, extrapoints=0):
513 513
514 514 deltav = self.getVmax() / (self.nFFTPoints * self.ippFactor)
515 515 velrange = deltav * (numpy.arange(self.nFFTPoints + extrapoints) - self.nFFTPoints / 2.)
516 516
517 517 if self.nmodes:
518 518 return velrange/self.nmodes
519 519 else:
520 520 return velrange
521 521
522 522 @property
523 523 def nPairs(self):
524 524
525 525 return len(self.pairsList)
526 526
527 527 @property
528 528 def pairsIndexList(self):
529 529
530 530 return list(range(self.nPairs))
531 531
532 532 @property
533 533 def normFactor(self):
534 534
535 535 pwcode = 1
536 536
537 537 if self.flagDecodeData:
538 538 pwcode = numpy.sum(self.code[0]**2)
539 539 #normFactor = min(self.nFFTPoints,self.nProfiles)*self.nIncohInt*self.nCohInt*pwcode*self.windowOfFilter
540 540 normFactor = self.nProfiles * self.nIncohInt * self.nCohInt * pwcode * self.windowOfFilter
541 541
542 542 return normFactor
543 543
544 544 @property
545 545 def flag_cspc(self):
546 546
547 547 if self.data_cspc is None:
548 548 return True
549 549
550 550 return False
551 551
552 552 @property
553 553 def flag_dc(self):
554 554
555 555 if self.data_dc is None:
556 556 return True
557 557
558 558 return False
559 559
560 560 @property
561 561 def timeInterval(self):
562 562
563 563 timeInterval = self.ippSeconds * self.nCohInt * self.nIncohInt * self.nProfiles * self.ippFactor
564 564 if self.nmodes:
565 565 return self.nmodes*timeInterval
566 566 else:
567 567 return timeInterval
568 568
569 569 def getPower(self):
570 570
571 571 factor = self.normFactor
572 572 z = self.data_spc / factor
573 573 z = numpy.where(numpy.isfinite(z), z, numpy.NAN)
574 574 avg = numpy.average(z, axis=1)
575 575
576 576 return 10 * numpy.log10(avg)
577 577
578 578 def getCoherence(self, pairsList=None, phase=False):
579 579
580 580 z = []
581 581 if pairsList is None:
582 582 pairsIndexList = self.pairsIndexList
583 583 else:
584 584 pairsIndexList = []
585 585 for pair in pairsList:
586 586 if pair not in self.pairsList:
587 587 raise ValueError("Pair %s is not in dataOut.pairsList" % (
588 588 pair))
589 589 pairsIndexList.append(self.pairsList.index(pair))
590 590 for i in range(len(pairsIndexList)):
591 591 pair = self.pairsList[pairsIndexList[i]]
592 592 ccf = numpy.average(self.data_cspc[pairsIndexList[i], :, :], axis=0)
593 593 powa = numpy.average(self.data_spc[pair[0], :, :], axis=0)
594 594 powb = numpy.average(self.data_spc[pair[1], :, :], axis=0)
595 595 avgcoherenceComplex = ccf / numpy.sqrt(powa * powb)
596 596 if phase:
597 597 data = numpy.arctan2(avgcoherenceComplex.imag,
598 598 avgcoherenceComplex.real) * 180 / numpy.pi
599 599 else:
600 600 data = numpy.abs(avgcoherenceComplex)
601 601
602 602 z.append(data)
603 603
604 604 return numpy.array(z)
605 605
606 606 def setValue(self, value):
607 607
608 608 print("This property should not be initialized")
609 609
610 610 return
611 611
612 612 noise = property(getNoise, setValue, "I'm the 'nHeights' property.")
613 613
614 614
615 615 class SpectraHeis(Spectra):
616 616
617 617 def __init__(self):
618 618
619 619 self.radarControllerHeaderObj = RadarControllerHeader()
620 620 self.systemHeaderObj = SystemHeader()
621 621 self.type = "SpectraHeis"
622 622 self.nProfiles = None
623 623 self.heightList = None
624 624 self.channelList = None
625 625 self.flagNoData = True
626 626 self.flagDiscontinuousBlock = False
627 627 self.utctime = None
628 628 self.blocksize = None
629 629 self.profileIndex = 0
630 630 self.nCohInt = 1
631 631 self.nIncohInt = 1
632 632
633 633 @property
634 634 def normFactor(self):
635 635 pwcode = 1
636 636 if self.flagDecodeData:
637 637 pwcode = numpy.sum(self.code[0]**2)
638 638
639 639 normFactor = self.nIncohInt * self.nCohInt * pwcode
640 640
641 641 return normFactor
642 642
643 643 @property
644 644 def timeInterval(self):
645 645
646 646 return self.ippSeconds * self.nCohInt * self.nIncohInt
647 647
648 648
649 649 class Fits(JROData):
650 650
651 651 def __init__(self):
652 652
653 653 self.type = "Fits"
654 654 self.nProfiles = None
655 655 self.heightList = None
656 656 self.channelList = None
657 657 self.flagNoData = True
658 658 self.utctime = None
659 659 self.nCohInt = 1
660 660 self.nIncohInt = 1
661 661 self.useLocalTime = True
662 662 self.profileIndex = 0
663 663 self.timeZone = 0
664 664
665 665 def getTimeRange(self):
666 666
667 667 datatime = []
668 668
669 669 datatime.append(self.ltctime)
670 670 datatime.append(self.ltctime + self.timeInterval)
671 671
672 672 datatime = numpy.array(datatime)
673 673
674 674 return datatime
675 675
676 676 def getChannelIndexList(self):
677 677
678 678 return list(range(self.nChannels))
679 679
680 680 def getNoise(self, type=1):
681 681
682 682
683 683 if type == 1:
684 684 noise = self.getNoisebyHildebrand()
685 685
686 686 if type == 2:
687 687 noise = self.getNoisebySort()
688 688
689 689 if type == 3:
690 690 noise = self.getNoisebyWindow()
691 691
692 692 return noise
693 693
694 694 @property
695 695 def timeInterval(self):
696 696
697 697 timeInterval = self.ippSeconds * self.nCohInt * self.nIncohInt
698 698
699 699 return timeInterval
700 700
701 701 @property
702 702 def ippSeconds(self):
703 703 '''
704 704 '''
705 705 return self.ipp_sec
706 706
707 707 noise = property(getNoise, "I'm the 'nHeights' property.")
708 708
709 709
710 710 class Correlation(JROData):
711 711
712 712 def __init__(self):
713 713 '''
714 714 Constructor
715 715 '''
716 716 self.radarControllerHeaderObj = RadarControllerHeader()
717 717 self.systemHeaderObj = SystemHeader()
718 718 self.type = "Correlation"
719 719 self.data = None
720 720 self.dtype = None
721 721 self.nProfiles = None
722 722 self.heightList = None
723 723 self.channelList = None
724 724 self.flagNoData = True
725 725 self.flagDiscontinuousBlock = False
726 726 self.utctime = None
727 727 self.timeZone = 0
728 728 self.dstFlag = None
729 729 self.errorCount = None
730 730 self.blocksize = None
731 731 self.flagDecodeData = False # asumo q la data no esta decodificada
732 732 self.flagDeflipData = False # asumo q la data no esta sin flip
733 733 self.pairsList = None
734 734 self.nPoints = None
735 735
736 736 def getPairsList(self):
737 737
738 738 return self.pairsList
739 739
740 740 def getNoise(self, mode=2):
741 741
742 742 indR = numpy.where(self.lagR == 0)[0][0]
743 743 indT = numpy.where(self.lagT == 0)[0][0]
744 744
745 745 jspectra0 = self.data_corr[:, :, indR, :]
746 746 jspectra = copy.copy(jspectra0)
747 747
748 748 num_chan = jspectra.shape[0]
749 749 num_hei = jspectra.shape[2]
750 750
751 751 freq_dc = jspectra.shape[1] / 2
752 752 ind_vel = numpy.array([-2, -1, 1, 2]) + freq_dc
753 753
754 754 if ind_vel[0] < 0:
755 755 ind_vel[list(range(0, 1))] = ind_vel[list(
756 756 range(0, 1))] + self.num_prof
757 757
758 758 if mode == 1:
759 759 jspectra[:, freq_dc, :] = (
760 760 jspectra[:, ind_vel[1], :] + jspectra[:, ind_vel[2], :]) / 2 # CORRECCION
761 761
762 762 if mode == 2:
763 763
764 764 vel = numpy.array([-2, -1, 1, 2])
765 765 xx = numpy.zeros([4, 4])
766 766
767 767 for fil in range(4):
768 768 xx[fil, :] = vel[fil]**numpy.asarray(list(range(4)))
769 769
770 770 xx_inv = numpy.linalg.inv(xx)
771 771 xx_aux = xx_inv[0, :]
772 772
773 773 for ich in range(num_chan):
774 774 yy = jspectra[ich, ind_vel, :]
775 775 jspectra[ich, freq_dc, :] = numpy.dot(xx_aux, yy)
776 776
777 777 junkid = jspectra[ich, freq_dc, :] <= 0
778 778 cjunkid = sum(junkid)
779 779
780 780 if cjunkid.any():
781 781 jspectra[ich, freq_dc, junkid.nonzero()] = (
782 782 jspectra[ich, ind_vel[1], junkid] + jspectra[ich, ind_vel[2], junkid]) / 2
783 783
784 784 noise = jspectra0[:, freq_dc, :] - jspectra[:, freq_dc, :]
785 785
786 786 return noise
787 787
788 788 @property
789 789 def timeInterval(self):
790 790
791 791 return self.ippSeconds * self.nCohInt * self.nProfiles
792 792
793 793 def splitFunctions(self):
794 794
795 795 pairsList = self.pairsList
796 796 ccf_pairs = []
797 797 acf_pairs = []
798 798 ccf_ind = []
799 799 acf_ind = []
800 800 for l in range(len(pairsList)):
801 801 chan0 = pairsList[l][0]
802 802 chan1 = pairsList[l][1]
803 803
804 804 # Obteniendo pares de Autocorrelacion
805 805 if chan0 == chan1:
806 806 acf_pairs.append(chan0)
807 807 acf_ind.append(l)
808 808 else:
809 809 ccf_pairs.append(pairsList[l])
810 810 ccf_ind.append(l)
811 811
812 812 data_acf = self.data_cf[acf_ind]
813 813 data_ccf = self.data_cf[ccf_ind]
814 814
815 815 return acf_ind, ccf_ind, acf_pairs, ccf_pairs, data_acf, data_ccf
816 816
817 817 @property
818 818 def normFactor(self):
819 819 acf_ind, ccf_ind, acf_pairs, ccf_pairs, data_acf, data_ccf = self.splitFunctions()
820 820 acf_pairs = numpy.array(acf_pairs)
821 821 normFactor = numpy.zeros((self.nPairs, self.nHeights))
822 822
823 823 for p in range(self.nPairs):
824 824 pair = self.pairsList[p]
825 825
826 826 ch0 = pair[0]
827 827 ch1 = pair[1]
828 828
829 829 ch0_max = numpy.max(data_acf[acf_pairs == ch0, :, :], axis=1)
830 830 ch1_max = numpy.max(data_acf[acf_pairs == ch1, :, :], axis=1)
831 831 normFactor[p, :] = numpy.sqrt(ch0_max * ch1_max)
832 832
833 833 return normFactor
834 834
835 835
836 836 class Parameters(Spectra):
837 837
838 838 groupList = None # List of Pairs, Groups, etc
839 839 data_param = None # Parameters obtained
840 840 data_pre = None # Data Pre Parametrization
841 841 data_SNR = None # Signal to Noise Ratio
842 842 abscissaList = None # Abscissa, can be velocities, lags or time
843 843 utctimeInit = None # Initial UTC time
844 844 paramInterval = None # Time interval to calculate Parameters in seconds
845 845 useLocalTime = True
846 846 # Fitting
847 847 data_error = None # Error of the estimation
848 848 constants = None
849 849 library = None
850 850 # Output signal
851 851 outputInterval = None # Time interval to calculate output signal in seconds
852 852 data_output = None # Out signal
853 853 nAvg = None
854 854 noise_estimation = None
855 855 GauSPC = None # Fit gaussian SPC
856 856
857 857 def __init__(self):
858 858 '''
859 859 Constructor
860 860 '''
861 861 self.radarControllerHeaderObj = RadarControllerHeader()
862 862 self.systemHeaderObj = SystemHeader()
863 863 self.type = "Parameters"
864 864 self.timeZone = 0
865 865
866 866 def getTimeRange1(self, interval):
867 867
868 868 datatime = []
869 869
870 870 if self.useLocalTime:
871 871 time1 = self.utctimeInit - self.timeZone * 60
872 872 else:
873 873 time1 = self.utctimeInit
874 874
875 875 datatime.append(time1)
876 876 datatime.append(time1 + interval)
877 877 datatime = numpy.array(datatime)
878 878
879 879 return datatime
880 880
881 881 @property
882 882 def timeInterval(self):
883 883
884 884 if hasattr(self, 'timeInterval1'):
885 885 return self.timeInterval1
886 886 else:
887 887 return self.paramInterval
888 888
889 889 def setValue(self, value):
890 890
891 891 print("This property should not be initialized")
892 892
893 893 return
894 894
895 895 def getNoise(self):
896 896
897 897 return self.spc_noise
898 898
899 899 noise = property(getNoise, setValue, "I'm the 'Noise' property.")
900 900
901 901
902 902 class PlotterData(object):
903 903 '''
904 904 Object to hold data to be plotted
905 905 '''
906 906
907 907 MAXNUMX = 200
908 908 MAXNUMY = 200
909 909
910 def __init__(self, code, throttle_value, exp_code, localtime=True, buffering=True, snr=False):
910 def __init__(self, code, exp_code, localtime=True):
911 911
912 912 self.key = code
913 self.throttle = throttle_value
914 913 self.exp_code = exp_code
915 self.buffering = buffering
916 914 self.ready = False
917 915 self.flagNoData = False
918 916 self.localtime = localtime
919 917 self.data = {}
920 918 self.meta = {}
921 919 self.__heights = []
922 920
923 if 'snr' in code:
924 self.plottypes = ['snr']
925 elif code == 'spc':
926 self.plottypes = ['spc', 'noise', 'rti']
927 elif code == 'cspc':
928 self.plottypes = ['cspc', 'spc', 'noise', 'rti']
929 elif code == 'rti':
930 self.plottypes = ['noise', 'rti']
931 else:
932 self.plottypes = [code]
933
934 if 'snr' not in self.plottypes and snr:
935 self.plottypes.append('snr')
936
937 for plot in self.plottypes:
938 self.data[plot] = {}
939
940 921 def __str__(self):
941 922 dum = ['{}{}'.format(key, self.shape(key)) for key in self.data]
942 923 return 'Data[{}][{}]'.format(';'.join(dum), len(self.times))
943 924
944 925 def __len__(self):
945 return len(self.data[self.key])
926 return len(self.data)
946 927
947 928 def __getitem__(self, key):
948
949 if key not in self.data:
950 raise KeyError(log.error('Missing key: {}'.format(key)))
951 if 'spc' in key or not self.buffering:
952 ret = self.data[key][self.tm]
953 elif 'scope' in key:
954 ret = numpy.array(self.data[key][float(self.tm)])
955 else:
956 ret = numpy.array([self.data[key][x] for x in self.times])
929 if isinstance(key, int):
930 return self.data[self.times[key]]
931 elif isinstance(key, str):
932 ret = numpy.array([self.data[x][key] for x in self.times])
957 933 if ret.ndim > 1:
958 934 ret = numpy.swapaxes(ret, 0, 1)
959 return ret
935 return ret
960 936
961 937 def __contains__(self, key):
962 return key in self.data
938 return key in self.data[self.min_time]
963 939
964 940 def setup(self):
965 941 '''
966 942 Configure object
967 943 '''
968 944 self.type = ''
969 945 self.ready = False
970 946 del self.data
971 947 self.data = {}
972 948 self.__heights = []
973 949 self.__all_heights = set()
974 for plot in self.plottypes:
975 if 'snr' in plot:
976 plot = 'snr'
977 elif 'spc_moments' == plot:
978 plot = 'moments'
979 self.data[plot] = {}
980
981 if 'spc' in self.data or 'rti' in self.data or 'cspc' in self.data or 'moments' in self.data:
982 self.data['noise'] = {}
983 self.data['rti'] = {}
984 if 'noise' not in self.plottypes:
985 self.plottypes.append('noise')
986 if 'rti' not in self.plottypes:
987 self.plottypes.append('rti')
988 950
989 951 def shape(self, key):
990 952 '''
991 953 Get the shape of the one-element data for the given key
992 954 '''
993 955
994 if len(self.data[key]):
995 if 'spc' in key or not self.buffering:
996 return self.data[key].shape
997 return self.data[key][self.times[0]].shape
956 if len(self.data[self.min_time][key]):
957 return self.data[self.min_time][key].shape
998 958 return (0,)
999 959
1000 def update(self, dataOut, tm):
960 def update(self, data, tm, meta={}):
1001 961 '''
1002 962 Update data object with new dataOut
1003 963 '''
1004 964
1005 self.profileIndex = dataOut.profileIndex
1006 self.tm = tm
1007 self.type = dataOut.type
1008 self.parameters = getattr(dataOut, 'parameters', [])
1009
1010 if hasattr(dataOut, 'meta'):
1011 self.meta.update(dataOut.meta)
1012
1013 if hasattr(dataOut, 'pairsList'):
1014 self.pairs = dataOut.pairsList
1015
1016 self.interval = dataOut.timeInterval
1017 if True in ['spc' in ptype for ptype in self.plottypes]:
1018 self.xrange = (dataOut.getFreqRange(1)/1000.,
1019 dataOut.getAcfRange(1), dataOut.getVelRange(1))
1020 self.__heights.append(dataOut.heightList)
1021 self.__all_heights.update(dataOut.heightList)
1022
1023 for plot in self.plottypes:
1024 if plot in ('spc', 'spc_moments', 'spc_cut'):
1025 z = dataOut.data_spc/dataOut.normFactor
1026 buffer = 10*numpy.log10(z)
1027 if plot == 'cspc':
1028 buffer = (dataOut.data_spc, dataOut.data_cspc)
1029 if plot == 'noise':
1030 buffer = 10*numpy.log10(dataOut.getNoise()/dataOut.normFactor)
1031 if plot in ('rti', 'spcprofile'):
1032 buffer = dataOut.getPower()
1033 if plot == 'snr_db':
1034 buffer = dataOut.data_SNR
1035 if plot == 'snr':
1036 buffer = 10*numpy.log10(dataOut.data_SNR)
1037 if plot == 'dop':
1038 buffer = dataOut.data_DOP
1039 if plot == 'pow':
1040 buffer = 10*numpy.log10(dataOut.data_POW)
1041 if plot == 'width':
1042 buffer = dataOut.data_WIDTH
1043 if plot == 'coh':
1044 buffer = dataOut.getCoherence()
1045 if plot == 'phase':
1046 buffer = dataOut.getCoherence(phase=True)
1047 if plot == 'output':
1048 buffer = dataOut.data_output
1049 if plot == 'param':
1050 buffer = dataOut.data_param
1051 if plot == 'scope':
1052 buffer = dataOut.data
1053 self.flagDataAsBlock = dataOut.flagDataAsBlock
1054 self.nProfiles = dataOut.nProfiles
1055 if plot == 'pp_power':
1056 buffer = dataOut.dataPP_POWER
1057 self.flagDataAsBlock = dataOut.flagDataAsBlock
1058 self.nProfiles = dataOut.nProfiles
1059 if plot == 'pp_signal':
1060 buffer = dataOut.dataPP_POW
1061 self.flagDataAsBlock = dataOut.flagDataAsBlock
1062 self.nProfiles = dataOut.nProfiles
1063 if plot == 'pp_velocity':
1064 buffer = dataOut.dataPP_DOP
1065 self.flagDataAsBlock = dataOut.flagDataAsBlock
1066 self.nProfiles = dataOut.nProfiles
1067 if plot == 'pp_specwidth':
1068 buffer = dataOut.dataPP_WIDTH
1069 self.flagDataAsBlock = dataOut.flagDataAsBlock
1070 self.nProfiles = dataOut.nProfiles
1071
1072 if plot == 'spc':
1073 self.data['spc'][tm] = buffer
1074 elif plot == 'cspc':
1075 self.data['cspc'][tm] = buffer
1076 elif plot == 'spc_moments':
1077 self.data['spc'][tm] = buffer
1078 self.data['moments'][tm] = dataOut.moments
1079 else:
1080 if self.buffering:
1081 self.data[plot][tm] = buffer
1082 else:
1083 self.data[plot][tm] = buffer
1084
1085 if dataOut.channelList is None:
1086 self.channels = range(buffer.shape[0])
1087 else:
1088 self.channels = dataOut.channelList
1089
1090 if buffer is None:
1091 self.flagNoData = True
1092 raise schainpy.admin.SchainWarning('Attribute data_{} is empty'.format(self.key))
965 self.data[tm] = data
966
967 for key, value in meta.items():
968 setattr(self, key, value)
1093 969
1094 970 def normalize_heights(self):
1095 971 '''
1096 972 Ensure same-dimension of the data for different heighList
1097 973 '''
1098 974
1099 975 H = numpy.array(list(self.__all_heights))
1100 976 H.sort()
1101 977 for key in self.data:
1102 978 shape = self.shape(key)[:-1] + H.shape
1103 979 for tm, obj in list(self.data[key].items()):
1104 980 h = self.__heights[self.times.tolist().index(tm)]
1105 981 if H.size == h.size:
1106 982 continue
1107 983 index = numpy.where(numpy.in1d(H, h))[0]
1108 984 dummy = numpy.zeros(shape) + numpy.nan
1109 985 if len(shape) == 2:
1110 986 dummy[:, index] = obj
1111 987 else:
1112 988 dummy[index] = obj
1113 989 self.data[key][tm] = dummy
1114 990
1115 991 self.__heights = [H for tm in self.times]
1116 992
1117 993 def jsonify(self, tm, plot_name, plot_type, decimate=False):
1118 994 '''
1119 995 Convert data to json
1120 996 '''
1121 997
1122 dy = int(self.heights.size/self.MAXNUMY) + 1
1123 if self.key in ('spc', 'cspc'):
1124 dx = int(self.data[self.key][tm].shape[1]/self.MAXNUMX) + 1
998 meta = {}
999 meta['xrange'] = []
1000 dy = int(len(self.yrange)/self.MAXNUMY) + 1
1001 tmp = self.data[tm][self.key]
1002 shape = tmp.shape
1003 if len(shape) == 2:
1004 data = self.roundFloats(self.data[tm][self.key][::, ::dy].tolist())
1005 elif len(shape) == 3:
1006 dx = int(self.data[tm][self.key].shape[1]/self.MAXNUMX) + 1
1125 1007 data = self.roundFloats(
1126 self.data[self.key][tm][::, ::dx, ::dy].tolist())
1008 self.data[tm][self.key][::, ::dx, ::dy].tolist())
1009 meta['xrange'] = self.roundFloats(self.xrange[2][::dx].tolist())
1127 1010 else:
1128 if self.key is 'noise':
1129 data = [[x] for x in self.roundFloats(self.data[self.key][tm].tolist())]
1130 else:
1131 data = self.roundFloats(self.data[self.key][tm][::, ::dy].tolist())
1132
1133 meta = {}
1011 data = self.roundFloats(self.data[tm][self.key].tolist())
1012
1134 1013 ret = {
1135 1014 'plot': plot_name,
1136 1015 'code': self.exp_code,
1137 1016 'time': float(tm),
1138 1017 'data': data,
1139 1018 }
1140 1019 meta['type'] = plot_type
1141 1020 meta['interval'] = float(self.interval)
1142 1021 meta['localtime'] = self.localtime
1143 meta['yrange'] = self.roundFloats(self.heights[::dy].tolist())
1144 if 'spc' in self.data or 'cspc' in self.data:
1145 meta['xrange'] = self.roundFloats(self.xrange[2][::dx].tolist())
1146 else:
1147 meta['xrange'] = []
1148
1022 meta['yrange'] = self.roundFloats(self.yrange[::dy].tolist())
1149 1023 meta.update(self.meta)
1150 1024 ret['metadata'] = meta
1151 1025 return json.dumps(ret)
1152 1026
1153 1027 @property
1154 1028 def times(self):
1155 1029 '''
1156 1030 Return the list of times of the current data
1157 1031 '''
1158 1032
1159 ret = numpy.array([t for t in self.data[self.key]])
1160 if self:
1161 ret.sort()
1162 return ret
1033 ret = [t for t in self.data]
1034 ret.sort()
1035 return numpy.array(ret)
1163 1036
1164 1037 @property
1165 1038 def min_time(self):
1166 1039 '''
1167 1040 Return the minimun time value
1168 1041 '''
1169 1042
1170 1043 return self.times[0]
1171 1044
1172 1045 @property
1173 1046 def max_time(self):
1174 1047 '''
1175 1048 Return the maximun time value
1176 1049 '''
1177 1050
1178 1051 return self.times[-1]
1179 1052
1180 @property
1181 def heights(self):
1182 '''
1183 Return the list of heights of the current data
1184 '''
1053 # @property
1054 # def heights(self):
1055 # '''
1056 # Return the list of heights of the current data
1057 # '''
1185 1058
1186 return numpy.array(self.__heights[-1])
1059 # return numpy.array(self.__heights[-1])
1187 1060
1188 1061 @staticmethod
1189 1062 def roundFloats(obj):
1190 1063 if isinstance(obj, list):
1191 1064 return list(map(PlotterData.roundFloats, obj))
1192 1065 elif isinstance(obj, float):
1193 1066 return round(obj, 2)
@@ -1,906 +1,906
1 1 '''
2 2
3 3 $Author: murco $
4 4 $Id: JROHeaderIO.py 151 2012-10-31 19:00:51Z murco $
5 5 '''
6 6 import sys
7 7 import numpy
8 8 import copy
9 9 import datetime
10 10 import inspect
11 11 from schainpy.utils import log
12 12
13 13 SPEED_OF_LIGHT = 299792458
14 14 SPEED_OF_LIGHT = 3e8
15 15
16 16 BASIC_STRUCTURE = numpy.dtype([
17 17 ('nSize', '<u4'),
18 18 ('nVersion', '<u2'),
19 19 ('nDataBlockId', '<u4'),
20 20 ('nUtime', '<u4'),
21 21 ('nMilsec', '<u2'),
22 22 ('nTimezone', '<i2'),
23 23 ('nDstflag', '<i2'),
24 24 ('nErrorCount', '<u4')
25 25 ])
26 26
27 27 SYSTEM_STRUCTURE = numpy.dtype([
28 28 ('nSize', '<u4'),
29 29 ('nNumSamples', '<u4'),
30 30 ('nNumProfiles', '<u4'),
31 31 ('nNumChannels', '<u4'),
32 32 ('nADCResolution', '<u4'),
33 33 ('nPCDIOBusWidth', '<u4'),
34 34 ])
35 35
36 36 RADAR_STRUCTURE = numpy.dtype([
37 37 ('nSize', '<u4'),
38 38 ('nExpType', '<u4'),
39 39 ('nNTx', '<u4'),
40 40 ('fIpp', '<f4'),
41 41 ('fTxA', '<f4'),
42 42 ('fTxB', '<f4'),
43 43 ('nNumWindows', '<u4'),
44 44 ('nNumTaus', '<u4'),
45 45 ('nCodeType', '<u4'),
46 46 ('nLine6Function', '<u4'),
47 47 ('nLine5Function', '<u4'),
48 48 ('fClock', '<f4'),
49 49 ('nPrePulseBefore', '<u4'),
50 50 ('nPrePulseAfter', '<u4'),
51 51 ('sRangeIPP', '<a20'),
52 52 ('sRangeTxA', '<a20'),
53 53 ('sRangeTxB', '<a20'),
54 54 ])
55 55
56 56 SAMPLING_STRUCTURE = numpy.dtype(
57 57 [('h0', '<f4'), ('dh', '<f4'), ('nsa', '<u4')])
58 58
59 59
60 60 PROCESSING_STRUCTURE = numpy.dtype([
61 61 ('nSize', '<u4'),
62 62 ('nDataType', '<u4'),
63 63 ('nSizeOfDataBlock', '<u4'),
64 64 ('nProfilesperBlock', '<u4'),
65 65 ('nDataBlocksperFile', '<u4'),
66 66 ('nNumWindows', '<u4'),
67 67 ('nProcessFlags', '<u4'),
68 68 ('nCoherentIntegrations', '<u4'),
69 69 ('nIncoherentIntegrations', '<u4'),
70 70 ('nTotalSpectra', '<u4')
71 71 ])
72 72
73 73
74 74 class Header(object):
75 75
76 76 def __init__(self):
77 77 raise NotImplementedError
78 78
79 79 def copy(self):
80 80 return copy.deepcopy(self)
81 81
82 82 def read(self):
83 83
84 84 raise NotImplementedError
85 85
86 86 def write(self):
87 87
88 88 raise NotImplementedError
89 89
90 90 def getAllowedArgs(self):
91 91 args = inspect.getargspec(self.__init__).args
92 92 try:
93 93 args.remove('self')
94 94 except:
95 95 pass
96 96 return args
97 97
98 98 def getAsDict(self):
99 99 args = self.getAllowedArgs()
100 100 asDict = {}
101 101 for x in args:
102 102 asDict[x] = self[x]
103 103 return asDict
104 104
105 105 def __getitem__(self, name):
106 106 return getattr(self, name)
107 107
108 108 def printInfo(self):
109 109
110 110 message = "#" * 50 + "\n"
111 111 message += self.__class__.__name__.upper() + "\n"
112 112 message += "#" * 50 + "\n"
113 113
114 114 keyList = list(self.__dict__.keys())
115 115 keyList.sort()
116 116
117 117 for key in keyList:
118 118 message += "%s = %s" % (key, self.__dict__[key]) + "\n"
119 119
120 120 if "size" not in keyList:
121 121 attr = getattr(self, "size")
122 122
123 123 if attr:
124 124 message += "%s = %s" % ("size", attr) + "\n"
125 125
126 126 print(message)
127 127
128 128
129 129 class BasicHeader(Header):
130 130
131 131 size = None
132 132 version = None
133 133 dataBlock = None
134 134 utc = None
135 135 ltc = None
136 136 miliSecond = None
137 137 timeZone = None
138 138 dstFlag = None
139 139 errorCount = None
140 140 datatime = None
141 141 structure = BASIC_STRUCTURE
142 142 __LOCALTIME = None
143 143
144 144 def __init__(self, useLocalTime=True):
145 145
146 146 self.size = 24
147 147 self.version = 0
148 148 self.dataBlock = 0
149 149 self.utc = 0
150 150 self.miliSecond = 0
151 151 self.timeZone = 0
152 152 self.dstFlag = 0
153 153 self.errorCount = 0
154 154
155 155 self.useLocalTime = useLocalTime
156 156
157 157 def read(self, fp):
158 158
159 159 self.length = 0
160 160 try:
161 161 if hasattr(fp, 'read'):
162 162 header = numpy.fromfile(fp, BASIC_STRUCTURE, 1)
163 163 else:
164 164 header = numpy.fromstring(fp, BASIC_STRUCTURE, 1)
165 165 except Exception as e:
166 166 print("BasicHeader: ")
167 167 print(e)
168 168 return 0
169 169
170 170 self.size = int(header['nSize'][0])
171 171 self.version = int(header['nVersion'][0])
172 172 self.dataBlock = int(header['nDataBlockId'][0])
173 173 self.utc = int(header['nUtime'][0])
174 174 self.miliSecond = int(header['nMilsec'][0])
175 175 self.timeZone = int(header['nTimezone'][0])
176 176 self.dstFlag = int(header['nDstflag'][0])
177 177 self.errorCount = int(header['nErrorCount'][0])
178 178
179 179 if self.size < 24:
180 180 return 0
181 181
182 182 self.length = header.nbytes
183 183 return 1
184 184
185 185 def write(self, fp):
186 186
187 187 headerTuple = (self.size, self.version, self.dataBlock, self.utc,
188 188 self.miliSecond, self.timeZone, self.dstFlag, self.errorCount)
189 189 header = numpy.array(headerTuple, BASIC_STRUCTURE)
190 190 header.tofile(fp)
191 191
192 192 return 1
193 193
194 194 def get_ltc(self):
195 195
196 196 return self.utc - self.timeZone * 60
197 197
198 198 def set_ltc(self, value):
199 199
200 200 self.utc = value + self.timeZone * 60
201 201
202 202 def get_datatime(self):
203 203
204 204 return datetime.datetime.utcfromtimestamp(self.ltc)
205 205
206 206 ltc = property(get_ltc, set_ltc)
207 207 datatime = property(get_datatime)
208 208
209 209
210 210 class SystemHeader(Header):
211 211
212 212 size = None
213 213 nSamples = None
214 214 nProfiles = None
215 215 nChannels = None
216 216 adcResolution = None
217 217 pciDioBusWidth = None
218 218 structure = SYSTEM_STRUCTURE
219 219
220 220 def __init__(self, nSamples=0, nProfiles=0, nChannels=0, adcResolution=14, pciDioBusWidth=0):
221 221
222 222 self.size = 24
223 223 self.nSamples = nSamples
224 224 self.nProfiles = nProfiles
225 225 self.nChannels = nChannels
226 226 self.adcResolution = adcResolution
227 227 self.pciDioBusWidth = pciDioBusWidth
228 228
229 229 def read(self, fp):
230 230 self.length = 0
231 231 try:
232 232 startFp = fp.tell()
233 233 except Exception as e:
234 234 startFp = None
235 235 pass
236 236
237 237 try:
238 238 if hasattr(fp, 'read'):
239 239 header = numpy.fromfile(fp, SYSTEM_STRUCTURE, 1)
240 240 else:
241 241 header = numpy.fromstring(fp, SYSTEM_STRUCTURE, 1)
242 242 except Exception as e:
243 243 print("System Header: " + str(e))
244 244 return 0
245 245
246 246 self.size = header['nSize'][0]
247 247 self.nSamples = header['nNumSamples'][0]
248 248 self.nProfiles = header['nNumProfiles'][0]
249 249 self.nChannels = header['nNumChannels'][0]
250 250 self.adcResolution = header['nADCResolution'][0]
251 251 self.pciDioBusWidth = header['nPCDIOBusWidth'][0]
252 252
253 253 if startFp is not None:
254 254 endFp = self.size + startFp
255 255
256 256 if fp.tell() > endFp:
257 257 sys.stderr.write(
258 258 "Warning %s: Size value read from System Header is lower than it has to be\n" % fp.name)
259 259 return 0
260 260
261 261 if fp.tell() < endFp:
262 262 sys.stderr.write(
263 263 "Warning %s: Size value read from System Header size is greater than it has to be\n" % fp.name)
264 264 return 0
265 265
266 266 self.length = header.nbytes
267 267 return 1
268 268
269 269 def write(self, fp):
270 270
271 271 headerTuple = (self.size, self.nSamples, self.nProfiles,
272 272 self.nChannels, self.adcResolution, self.pciDioBusWidth)
273 273 header = numpy.array(headerTuple, SYSTEM_STRUCTURE)
274 274 header.tofile(fp)
275 275
276 276 return 1
277 277
278 278
279 279 class RadarControllerHeader(Header):
280 280
281 281 expType = None
282 282 nTx = None
283 283 ipp = None
284 284 txA = None
285 285 txB = None
286 286 nWindows = None
287 287 numTaus = None
288 288 codeType = None
289 289 line6Function = None
290 290 line5Function = None
291 291 fClock = None
292 292 prePulseBefore = None
293 293 prePulseAfter = None
294 294 rangeIpp = None
295 295 rangeTxA = None
296 296 rangeTxB = None
297 297 structure = RADAR_STRUCTURE
298 298 __size = None
299 299
300 300 def __init__(self, expType=2, nTx=1,
301 301 ipp=None, txA=0, txB=0,
302 302 nWindows=None, nHeights=None, firstHeight=None, deltaHeight=None,
303 303 numTaus=0, line6Function=0, line5Function=0, fClock=None,
304 304 prePulseBefore=0, prePulseAfter=0,
305 codeType=0, nCode=0, nBaud=0, code=None,
305 codeType=0, nCode=0, nBaud=0, code=[],
306 306 flip1=0, flip2=0):
307 307
308 308 # self.size = 116
309 309 self.expType = expType
310 310 self.nTx = nTx
311 311 self.ipp = ipp
312 312 self.txA = txA
313 313 self.txB = txB
314 314 self.rangeIpp = ipp
315 315 self.rangeTxA = txA
316 316 self.rangeTxB = txB
317 317
318 318 self.nWindows = nWindows
319 319 self.numTaus = numTaus
320 320 self.codeType = codeType
321 321 self.line6Function = line6Function
322 322 self.line5Function = line5Function
323 323 self.fClock = fClock
324 324 self.prePulseBefore = prePulseBefore
325 325 self.prePulseAfter = prePulseAfter
326 326
327 327 self.nHeights = nHeights
328 328 self.firstHeight = firstHeight
329 329 self.deltaHeight = deltaHeight
330 330 self.samplesWin = nHeights
331 331
332 332 self.nCode = nCode
333 333 self.nBaud = nBaud
334 334 self.code = code
335 335 self.flip1 = flip1
336 336 self.flip2 = flip2
337 337
338 338 self.code_size = int(numpy.ceil(self.nBaud / 32.)) * self.nCode * 4
339 339 # self.dynamic = numpy.array([],numpy.dtype('byte'))
340 340
341 341 if self.fClock is None and self.deltaHeight is not None:
342 342 self.fClock = 0.15 / (deltaHeight * 1e-6) # 0.15Km / (height * 1u)
343 343
344 344 def read(self, fp):
345 345 self.length = 0
346 346 try:
347 347 startFp = fp.tell()
348 348 except Exception as e:
349 349 startFp = None
350 350 pass
351 351
352 352 try:
353 353 if hasattr(fp, 'read'):
354 354 header = numpy.fromfile(fp, RADAR_STRUCTURE, 1)
355 355 else:
356 356 header = numpy.fromstring(fp, RADAR_STRUCTURE, 1)
357 357 self.length += header.nbytes
358 358 except Exception as e:
359 359 print("RadarControllerHeader: " + str(e))
360 360 return 0
361 361
362 362 size = int(header['nSize'][0])
363 363 self.expType = int(header['nExpType'][0])
364 364 self.nTx = int(header['nNTx'][0])
365 365 self.ipp = float(header['fIpp'][0])
366 366 self.txA = float(header['fTxA'][0])
367 367 self.txB = float(header['fTxB'][0])
368 368 self.nWindows = int(header['nNumWindows'][0])
369 369 self.numTaus = int(header['nNumTaus'][0])
370 370 self.codeType = int(header['nCodeType'][0])
371 371 self.line6Function = int(header['nLine6Function'][0])
372 372 self.line5Function = int(header['nLine5Function'][0])
373 373 self.fClock = float(header['fClock'][0])
374 374 self.prePulseBefore = int(header['nPrePulseBefore'][0])
375 375 self.prePulseAfter = int(header['nPrePulseAfter'][0])
376 376 self.rangeIpp = header['sRangeIPP'][0]
377 377 self.rangeTxA = header['sRangeTxA'][0]
378 378 self.rangeTxB = header['sRangeTxB'][0]
379 379
380 380 try:
381 381 if hasattr(fp, 'read'):
382 382 samplingWindow = numpy.fromfile(
383 383 fp, SAMPLING_STRUCTURE, self.nWindows)
384 384 else:
385 385 samplingWindow = numpy.fromstring(
386 386 fp[self.length:], SAMPLING_STRUCTURE, self.nWindows)
387 387 self.length += samplingWindow.nbytes
388 388 except Exception as e:
389 389 print("RadarControllerHeader: " + str(e))
390 390 return 0
391 391 self.nHeights = int(numpy.sum(samplingWindow['nsa']))
392 392 self.firstHeight = samplingWindow['h0']
393 393 self.deltaHeight = samplingWindow['dh']
394 394 self.samplesWin = samplingWindow['nsa']
395 395
396 396 try:
397 397 if hasattr(fp, 'read'):
398 398 self.Taus = numpy.fromfile(fp, '<f4', self.numTaus)
399 399 else:
400 400 self.Taus = numpy.fromstring(
401 401 fp[self.length:], '<f4', self.numTaus)
402 402 self.length += self.Taus.nbytes
403 403 except Exception as e:
404 404 print("RadarControllerHeader: " + str(e))
405 405 return 0
406 406
407 407 self.code_size = 0
408 408 if self.codeType != 0:
409 409
410 410 try:
411 411 if hasattr(fp, 'read'):
412 412 self.nCode = numpy.fromfile(fp, '<u4', 1)[0]
413 413 self.length += self.nCode.nbytes
414 414 self.nBaud = numpy.fromfile(fp, '<u4', 1)[0]
415 415 self.length += self.nBaud.nbytes
416 416 else:
417 417 self.nCode = numpy.fromstring(
418 418 fp[self.length:], '<u4', 1)[0]
419 419 self.length += self.nCode.nbytes
420 420 self.nBaud = numpy.fromstring(
421 421 fp[self.length:], '<u4', 1)[0]
422 422 self.length += self.nBaud.nbytes
423 423 except Exception as e:
424 424 print("RadarControllerHeader: " + str(e))
425 425 return 0
426 426 code = numpy.empty([self.nCode, self.nBaud], dtype='i1')
427 427
428 428 for ic in range(self.nCode):
429 429 try:
430 430 if hasattr(fp, 'read'):
431 431 temp = numpy.fromfile(fp, 'u4', int(
432 432 numpy.ceil(self.nBaud / 32.)))
433 433 else:
434 434 temp = numpy.fromstring(
435 435 fp, 'u4', int(numpy.ceil(self.nBaud / 32.)))
436 436 self.length += temp.nbytes
437 437 except Exception as e:
438 438 print("RadarControllerHeader: " + str(e))
439 439 return 0
440 440
441 441 for ib in range(self.nBaud - 1, -1, -1):
442 442 code[ic, ib] = temp[int(ib / 32)] % 2
443 443 temp[int(ib / 32)] = temp[int(ib / 32)] / 2
444 444
445 445 self.code = 2.0 * code - 1.0
446 446 self.code_size = int(numpy.ceil(self.nBaud / 32.)) * self.nCode * 4
447 447
448 448 # if self.line5Function == RCfunction.FLIP:
449 449 # self.flip1 = numpy.fromfile(fp,'<u4',1)
450 450 #
451 451 # if self.line6Function == RCfunction.FLIP:
452 452 # self.flip2 = numpy.fromfile(fp,'<u4',1)
453 453 if startFp is not None:
454 454 endFp = size + startFp
455 455
456 456 if fp.tell() != endFp:
457 457 # fp.seek(endFp)
458 458 print("%s: Radar Controller Header size is not consistent: from data [%d] != from header field [%d]" % (fp.name, fp.tell() - startFp, size))
459 459 # return 0
460 460
461 461 if fp.tell() > endFp:
462 462 sys.stderr.write(
463 463 "Warning %s: Size value read from Radar Controller header is lower than it has to be\n" % fp.name)
464 464 # return 0
465 465
466 466 if fp.tell() < endFp:
467 467 sys.stderr.write(
468 468 "Warning %s: Size value read from Radar Controller header is greater than it has to be\n" % fp.name)
469 469
470 470 return 1
471 471
472 472 def write(self, fp):
473 473
474 474 headerTuple = (self.size,
475 475 self.expType,
476 476 self.nTx,
477 477 self.ipp,
478 478 self.txA,
479 479 self.txB,
480 480 self.nWindows,
481 481 self.numTaus,
482 482 self.codeType,
483 483 self.line6Function,
484 484 self.line5Function,
485 485 self.fClock,
486 486 self.prePulseBefore,
487 487 self.prePulseAfter,
488 488 self.rangeIpp,
489 489 self.rangeTxA,
490 490 self.rangeTxB)
491 491
492 492 header = numpy.array(headerTuple, RADAR_STRUCTURE)
493 493 header.tofile(fp)
494 494
495 495 sampleWindowTuple = (
496 496 self.firstHeight, self.deltaHeight, self.samplesWin)
497 497 samplingWindow = numpy.array(sampleWindowTuple, SAMPLING_STRUCTURE)
498 498 samplingWindow.tofile(fp)
499 499
500 500 if self.numTaus > 0:
501 501 self.Taus.tofile(fp)
502 502
503 503 if self.codeType != 0:
504 504 nCode = numpy.array(self.nCode, '<u4')
505 505 nCode.tofile(fp)
506 506 nBaud = numpy.array(self.nBaud, '<u4')
507 507 nBaud.tofile(fp)
508 508 code1 = (self.code + 1.0) / 2.
509 509
510 510 for ic in range(self.nCode):
511 511 tempx = numpy.zeros(int(numpy.ceil(self.nBaud / 32.)))
512 512 start = 0
513 513 end = 32
514 514 for i in range(len(tempx)):
515 515 code_selected = code1[ic, start:end]
516 516 for j in range(len(code_selected) - 1, -1, -1):
517 517 if code_selected[j] == 1:
518 518 tempx[i] = tempx[i] + \
519 519 2**(len(code_selected) - 1 - j)
520 520 start = start + 32
521 521 end = end + 32
522 522
523 523 tempx = tempx.astype('u4')
524 524 tempx.tofile(fp)
525 525
526 526 # if self.line5Function == RCfunction.FLIP:
527 527 # self.flip1.tofile(fp)
528 528 #
529 529 # if self.line6Function == RCfunction.FLIP:
530 530 # self.flip2.tofile(fp)
531 531
532 532 return 1
533 533
534 534 def get_ippSeconds(self):
535 535 '''
536 536 '''
537 537 ippSeconds = 2.0 * 1000 * self.ipp / SPEED_OF_LIGHT
538 538
539 539 return ippSeconds
540 540
541 541 def set_ippSeconds(self, ippSeconds):
542 542 '''
543 543 '''
544 544
545 545 self.ipp = ippSeconds * SPEED_OF_LIGHT / (2.0 * 1000)
546 546
547 547 return
548 548
549 549 def get_size(self):
550 550
551 551 self.__size = 116 + 12 * self.nWindows + 4 * self.numTaus
552 552
553 553 if self.codeType != 0:
554 554 self.__size += 4 + 4 + 4 * self.nCode * \
555 555 numpy.ceil(self.nBaud / 32.)
556 556
557 557 return self.__size
558 558
559 559 def set_size(self, value):
560 560
561 561 raise IOError("size is a property and it cannot be set, just read")
562 562
563 563 return
564 564
565 565 ippSeconds = property(get_ippSeconds, set_ippSeconds)
566 566 size = property(get_size, set_size)
567 567
568 568
569 569 class ProcessingHeader(Header):
570 570
571 571 # size = None
572 572 dtype = None
573 573 blockSize = None
574 574 profilesPerBlock = None
575 575 dataBlocksPerFile = None
576 576 nWindows = None
577 577 processFlags = None
578 578 nCohInt = None
579 579 nIncohInt = None
580 580 totalSpectra = None
581 581 structure = PROCESSING_STRUCTURE
582 582 flag_dc = None
583 583 flag_cspc = None
584 584
585 585 def __init__(self, dtype=0, blockSize=0, profilesPerBlock=0, dataBlocksPerFile=0, nWindows=0, processFlags=0, nCohInt=0,
586 586 nIncohInt=0, totalSpectra=0, nHeights=0, firstHeight=0, deltaHeight=0, samplesWin=0, spectraComb=0, nCode=0,
587 587 code=0, nBaud=None, shif_fft=False, flag_dc=False, flag_cspc=False, flag_decode=False, flag_deflip=False
588 588 ):
589 589
590 590 # self.size = 0
591 591 self.dtype = dtype
592 592 self.blockSize = blockSize
593 593 self.profilesPerBlock = 0
594 594 self.dataBlocksPerFile = 0
595 595 self.nWindows = 0
596 596 self.processFlags = 0
597 597 self.nCohInt = 0
598 598 self.nIncohInt = 0
599 599 self.totalSpectra = 0
600 600
601 601 self.nHeights = 0
602 602 self.firstHeight = 0
603 603 self.deltaHeight = 0
604 604 self.samplesWin = 0
605 605 self.spectraComb = 0
606 606 self.nCode = None
607 607 self.code = None
608 608 self.nBaud = None
609 609
610 610 self.shif_fft = False
611 611 self.flag_dc = False
612 612 self.flag_cspc = False
613 613 self.flag_decode = False
614 614 self.flag_deflip = False
615 615 self.length = 0
616 616
617 617 def read(self, fp):
618 618 self.length = 0
619 619 try:
620 620 startFp = fp.tell()
621 621 except Exception as e:
622 622 startFp = None
623 623 pass
624 624
625 625 try:
626 626 if hasattr(fp, 'read'):
627 627 header = numpy.fromfile(fp, PROCESSING_STRUCTURE, 1)
628 628 else:
629 629 header = numpy.fromstring(fp, PROCESSING_STRUCTURE, 1)
630 630 self.length += header.nbytes
631 631 except Exception as e:
632 632 print("ProcessingHeader: " + str(e))
633 633 return 0
634 634
635 635 size = int(header['nSize'][0])
636 636 self.dtype = int(header['nDataType'][0])
637 637 self.blockSize = int(header['nSizeOfDataBlock'][0])
638 638 self.profilesPerBlock = int(header['nProfilesperBlock'][0])
639 639 self.dataBlocksPerFile = int(header['nDataBlocksperFile'][0])
640 640 self.nWindows = int(header['nNumWindows'][0])
641 641 self.processFlags = header['nProcessFlags']
642 642 self.nCohInt = int(header['nCoherentIntegrations'][0])
643 643 self.nIncohInt = int(header['nIncoherentIntegrations'][0])
644 644 self.totalSpectra = int(header['nTotalSpectra'][0])
645 645
646 646 try:
647 647 if hasattr(fp, 'read'):
648 648 samplingWindow = numpy.fromfile(
649 649 fp, SAMPLING_STRUCTURE, self.nWindows)
650 650 else:
651 651 samplingWindow = numpy.fromstring(
652 652 fp[self.length:], SAMPLING_STRUCTURE, self.nWindows)
653 653 self.length += samplingWindow.nbytes
654 654 except Exception as e:
655 655 print("ProcessingHeader: " + str(e))
656 656 return 0
657 657
658 658 self.nHeights = int(numpy.sum(samplingWindow['nsa']))
659 659 self.firstHeight = float(samplingWindow['h0'][0])
660 660 self.deltaHeight = float(samplingWindow['dh'][0])
661 661 self.samplesWin = samplingWindow['nsa'][0]
662 662
663 663 try:
664 664 if hasattr(fp, 'read'):
665 665 self.spectraComb = numpy.fromfile(
666 666 fp, 'u1', 2 * self.totalSpectra)
667 667 else:
668 668 self.spectraComb = numpy.fromstring(
669 669 fp[self.length:], 'u1', 2 * self.totalSpectra)
670 670 self.length += self.spectraComb.nbytes
671 671 except Exception as e:
672 672 print("ProcessingHeader: " + str(e))
673 673 return 0
674 674
675 675 if ((self.processFlags & PROCFLAG.DEFINE_PROCESS_CODE) == PROCFLAG.DEFINE_PROCESS_CODE):
676 676 self.nCode = int(numpy.fromfile(fp, '<u4', 1))
677 677 self.nBaud = int(numpy.fromfile(fp, '<u4', 1))
678 678 self.code = numpy.fromfile(
679 679 fp, '<f4', self.nCode * self.nBaud).reshape(self.nCode, self.nBaud)
680 680
681 681 if ((self.processFlags & PROCFLAG.EXP_NAME_ESP) == PROCFLAG.EXP_NAME_ESP):
682 682 exp_name_len = int(numpy.fromfile(fp, '<u4', 1))
683 683 exp_name = numpy.fromfile(fp, 'u1', exp_name_len + 1)
684 684
685 685 if ((self.processFlags & PROCFLAG.SHIFT_FFT_DATA) == PROCFLAG.SHIFT_FFT_DATA):
686 686 self.shif_fft = True
687 687 else:
688 688 self.shif_fft = False
689 689
690 690 if ((self.processFlags & PROCFLAG.SAVE_CHANNELS_DC) == PROCFLAG.SAVE_CHANNELS_DC):
691 691 self.flag_dc = True
692 692 else:
693 693 self.flag_dc = False
694 694
695 695 if ((self.processFlags & PROCFLAG.DECODE_DATA) == PROCFLAG.DECODE_DATA):
696 696 self.flag_decode = True
697 697 else:
698 698 self.flag_decode = False
699 699
700 700 if ((self.processFlags & PROCFLAG.DEFLIP_DATA) == PROCFLAG.DEFLIP_DATA):
701 701 self.flag_deflip = True
702 702 else:
703 703 self.flag_deflip = False
704 704
705 705 nChannels = 0
706 706 nPairs = 0
707 707 pairList = []
708 708
709 709 for i in range(0, self.totalSpectra * 2, 2):
710 710 if self.spectraComb[i] == self.spectraComb[i + 1]:
711 711 nChannels = nChannels + 1 # par de canales iguales
712 712 else:
713 713 nPairs = nPairs + 1 # par de canales diferentes
714 714 pairList.append((self.spectraComb[i], self.spectraComb[i + 1]))
715 715
716 716 self.flag_cspc = False
717 717 if nPairs > 0:
718 718 self.flag_cspc = True
719 719
720 720 if startFp is not None:
721 721 endFp = size + startFp
722 722 if fp.tell() > endFp:
723 723 sys.stderr.write(
724 724 "Warning: Processing header size is lower than it has to be")
725 725 return 0
726 726
727 727 if fp.tell() < endFp:
728 728 sys.stderr.write(
729 729 "Warning: Processing header size is greater than it is considered")
730 730
731 731 return 1
732 732
733 733 def write(self, fp):
734 734 # Clear DEFINE_PROCESS_CODE
735 735 self.processFlags = self.processFlags & (~PROCFLAG.DEFINE_PROCESS_CODE)
736 736
737 737 headerTuple = (self.size,
738 738 self.dtype,
739 739 self.blockSize,
740 740 self.profilesPerBlock,
741 741 self.dataBlocksPerFile,
742 742 self.nWindows,
743 743 self.processFlags,
744 744 self.nCohInt,
745 745 self.nIncohInt,
746 746 self.totalSpectra)
747 747
748 748 header = numpy.array(headerTuple, PROCESSING_STRUCTURE)
749 749 header.tofile(fp)
750 750
751 751 if self.nWindows != 0:
752 752 sampleWindowTuple = (
753 753 self.firstHeight, self.deltaHeight, self.samplesWin)
754 754 samplingWindow = numpy.array(sampleWindowTuple, SAMPLING_STRUCTURE)
755 755 samplingWindow.tofile(fp)
756 756
757 757 if self.totalSpectra != 0:
758 758 # spectraComb = numpy.array([],numpy.dtype('u1'))
759 759 spectraComb = self.spectraComb
760 760 spectraComb.tofile(fp)
761 761
762 762 # if self.processFlags & PROCFLAG.DEFINE_PROCESS_CODE == PROCFLAG.DEFINE_PROCESS_CODE:
763 763 # nCode = numpy.array([self.nCode], numpy.dtype('u4')) #Probar con un dato que almacene codigo, hasta el momento no se hizo la prueba
764 764 # nCode.tofile(fp)
765 765 #
766 766 # nBaud = numpy.array([self.nBaud], numpy.dtype('u4'))
767 767 # nBaud.tofile(fp)
768 768 #
769 769 # code = self.code.reshape(self.nCode*self.nBaud)
770 770 # code = code.astype(numpy.dtype('<f4'))
771 771 # code.tofile(fp)
772 772
773 773 return 1
774 774
775 775 def get_size(self):
776 776
777 777 self.__size = 40 + 12 * self.nWindows + 2 * self.totalSpectra
778 778
779 779 # if self.processFlags & PROCFLAG.DEFINE_PROCESS_CODE == PROCFLAG.DEFINE_PROCESS_CODE:
780 780 # self.__size += 4 + 4 + 4*self.nCode*numpy.ceil(self.nBaud/32.)
781 781 # self.__size += 4 + 4 + 4 * self.nCode * self.nBaud
782 782
783 783 return self.__size
784 784
785 785 def set_size(self, value):
786 786
787 787 raise IOError("size is a property and it cannot be set, just read")
788 788
789 789 return
790 790
791 791 size = property(get_size, set_size)
792 792
793 793
794 794 class RCfunction:
795 795 NONE = 0
796 796 FLIP = 1
797 797 CODE = 2
798 798 SAMPLING = 3
799 799 LIN6DIV256 = 4
800 800 SYNCHRO = 5
801 801
802 802
803 803 class nCodeType:
804 804 NONE = 0
805 805 USERDEFINE = 1
806 806 BARKER2 = 2
807 807 BARKER3 = 3
808 808 BARKER4 = 4
809 809 BARKER5 = 5
810 810 BARKER7 = 6
811 811 BARKER11 = 7
812 812 BARKER13 = 8
813 813 AC128 = 9
814 814 COMPLEMENTARYCODE2 = 10
815 815 COMPLEMENTARYCODE4 = 11
816 816 COMPLEMENTARYCODE8 = 12
817 817 COMPLEMENTARYCODE16 = 13
818 818 COMPLEMENTARYCODE32 = 14
819 819 COMPLEMENTARYCODE64 = 15
820 820 COMPLEMENTARYCODE128 = 16
821 821 CODE_BINARY28 = 17
822 822
823 823
824 824 class PROCFLAG:
825 825
826 826 COHERENT_INTEGRATION = numpy.uint32(0x00000001)
827 827 DECODE_DATA = numpy.uint32(0x00000002)
828 828 SPECTRA_CALC = numpy.uint32(0x00000004)
829 829 INCOHERENT_INTEGRATION = numpy.uint32(0x00000008)
830 830 POST_COHERENT_INTEGRATION = numpy.uint32(0x00000010)
831 831 SHIFT_FFT_DATA = numpy.uint32(0x00000020)
832 832
833 833 DATATYPE_CHAR = numpy.uint32(0x00000040)
834 834 DATATYPE_SHORT = numpy.uint32(0x00000080)
835 835 DATATYPE_LONG = numpy.uint32(0x00000100)
836 836 DATATYPE_INT64 = numpy.uint32(0x00000200)
837 837 DATATYPE_FLOAT = numpy.uint32(0x00000400)
838 838 DATATYPE_DOUBLE = numpy.uint32(0x00000800)
839 839
840 840 DATAARRANGE_CONTIGUOUS_CH = numpy.uint32(0x00001000)
841 841 DATAARRANGE_CONTIGUOUS_H = numpy.uint32(0x00002000)
842 842 DATAARRANGE_CONTIGUOUS_P = numpy.uint32(0x00004000)
843 843
844 844 SAVE_CHANNELS_DC = numpy.uint32(0x00008000)
845 845 DEFLIP_DATA = numpy.uint32(0x00010000)
846 846 DEFINE_PROCESS_CODE = numpy.uint32(0x00020000)
847 847
848 848 ACQ_SYS_NATALIA = numpy.uint32(0x00040000)
849 849 ACQ_SYS_ECHOTEK = numpy.uint32(0x00080000)
850 850 ACQ_SYS_ADRXD = numpy.uint32(0x000C0000)
851 851 ACQ_SYS_JULIA = numpy.uint32(0x00100000)
852 852 ACQ_SYS_XXXXXX = numpy.uint32(0x00140000)
853 853
854 854 EXP_NAME_ESP = numpy.uint32(0x00200000)
855 855 CHANNEL_NAMES_ESP = numpy.uint32(0x00400000)
856 856
857 857 OPERATION_MASK = numpy.uint32(0x0000003F)
858 858 DATATYPE_MASK = numpy.uint32(0x00000FC0)
859 859 DATAARRANGE_MASK = numpy.uint32(0x00007000)
860 860 ACQ_SYS_MASK = numpy.uint32(0x001C0000)
861 861
862 862
863 863 dtype0 = numpy.dtype([('real', '<i1'), ('imag', '<i1')])
864 864 dtype1 = numpy.dtype([('real', '<i2'), ('imag', '<i2')])
865 865 dtype2 = numpy.dtype([('real', '<i4'), ('imag', '<i4')])
866 866 dtype3 = numpy.dtype([('real', '<i8'), ('imag', '<i8')])
867 867 dtype4 = numpy.dtype([('real', '<f4'), ('imag', '<f4')])
868 868 dtype5 = numpy.dtype([('real', '<f8'), ('imag', '<f8')])
869 869
870 870 NUMPY_DTYPE_LIST = [dtype0, dtype1, dtype2, dtype3, dtype4, dtype5]
871 871
872 872 PROCFLAG_DTYPE_LIST = [PROCFLAG.DATATYPE_CHAR,
873 873 PROCFLAG.DATATYPE_SHORT,
874 874 PROCFLAG.DATATYPE_LONG,
875 875 PROCFLAG.DATATYPE_INT64,
876 876 PROCFLAG.DATATYPE_FLOAT,
877 877 PROCFLAG.DATATYPE_DOUBLE]
878 878
879 879 DTYPE_WIDTH = [1, 2, 4, 8, 4, 8]
880 880
881 881
882 882 def get_dtype_index(numpy_dtype):
883 883
884 884 index = None
885 885
886 886 for i in range(len(NUMPY_DTYPE_LIST)):
887 887 if numpy_dtype == NUMPY_DTYPE_LIST[i]:
888 888 index = i
889 889 break
890 890
891 891 return index
892 892
893 893
894 894 def get_numpy_dtype(index):
895 895
896 896 return NUMPY_DTYPE_LIST[index]
897 897
898 898
899 899 def get_procflag_dtype(index):
900 900
901 901 return PROCFLAG_DTYPE_LIST[index]
902 902
903 903
904 904 def get_dtype_width(index):
905 905
906 906 return DTYPE_WIDTH[index] No newline at end of file
@@ -1,665 +1,688
1 1 # Copyright (c) 2012-2020 Jicamarca Radio Observatory
2 2 # All rights reserved.
3 3 #
4 4 # Distributed under the terms of the BSD 3-clause license.
5 5 """Base class to create plot operations
6 6
7 7 """
8 8
9 9 import os
10 10 import sys
11 11 import zmq
12 12 import time
13 13 import numpy
14 14 import datetime
15 from multiprocessing import Queue
15 from collections import deque
16 16 from functools import wraps
17 17 from threading import Thread
18 18 import matplotlib
19 19
20 20 if 'BACKEND' in os.environ:
21 21 matplotlib.use(os.environ['BACKEND'])
22 22 elif 'linux' in sys.platform:
23 23 matplotlib.use("TkAgg")
24 24 elif 'darwin' in sys.platform:
25 matplotlib.use('WxAgg')
25 matplotlib.use('MacOSX')
26 26 else:
27 27 from schainpy.utils import log
28 28 log.warning('Using default Backend="Agg"', 'INFO')
29 29 matplotlib.use('Agg')
30 30
31 31 import matplotlib.pyplot as plt
32 32 from matplotlib.patches import Polygon
33 33 from mpl_toolkits.axes_grid1 import make_axes_locatable
34 34 from matplotlib.ticker import FuncFormatter, LinearLocator, MultipleLocator
35 35
36 36 from schainpy.model.data.jrodata import PlotterData
37 37 from schainpy.model.proc.jroproc_base import ProcessingUnit, Operation, MPDecorator
38 38 from schainpy.utils import log
39 39
40 40 jet_values = matplotlib.pyplot.get_cmap('jet', 100)(numpy.arange(100))[10:90]
41 41 blu_values = matplotlib.pyplot.get_cmap(
42 42 'seismic_r', 20)(numpy.arange(20))[10:15]
43 43 ncmap = matplotlib.colors.LinearSegmentedColormap.from_list(
44 44 'jro', numpy.vstack((blu_values, jet_values)))
45 45 matplotlib.pyplot.register_cmap(cmap=ncmap)
46 46
47 47 CMAPS = [plt.get_cmap(s) for s in ('jro', 'jet', 'viridis',
48 48 'plasma', 'inferno', 'Greys', 'seismic', 'bwr', 'coolwarm')]
49 49
50 50 EARTH_RADIUS = 6.3710e3
51 51
52 52 def ll2xy(lat1, lon1, lat2, lon2):
53 53
54 54 p = 0.017453292519943295
55 55 a = 0.5 - numpy.cos((lat2 - lat1) * p)/2 + numpy.cos(lat1 * p) * \
56 56 numpy.cos(lat2 * p) * (1 - numpy.cos((lon2 - lon1) * p)) / 2
57 57 r = 12742 * numpy.arcsin(numpy.sqrt(a))
58 58 theta = numpy.arctan2(numpy.sin((lon2-lon1)*p)*numpy.cos(lat2*p), numpy.cos(lat1*p)
59 59 * numpy.sin(lat2*p)-numpy.sin(lat1*p)*numpy.cos(lat2*p)*numpy.cos((lon2-lon1)*p))
60 60 theta = -theta + numpy.pi/2
61 61 return r*numpy.cos(theta), r*numpy.sin(theta)
62 62
63 63
64 64 def km2deg(km):
65 65 '''
66 66 Convert distance in km to degrees
67 67 '''
68 68
69 69 return numpy.rad2deg(km/EARTH_RADIUS)
70 70
71 71
72 72 def figpause(interval):
73 73 backend = plt.rcParams['backend']
74 74 if backend in matplotlib.rcsetup.interactive_bk:
75 75 figManager = matplotlib._pylab_helpers.Gcf.get_active()
76 76 if figManager is not None:
77 77 canvas = figManager.canvas
78 78 if canvas.figure.stale:
79 79 canvas.draw()
80 80 try:
81 81 canvas.start_event_loop(interval)
82 82 except:
83 83 pass
84 84 return
85 85
86
87 86 def popup(message):
88 87 '''
89 88 '''
90 89
91 90 fig = plt.figure(figsize=(12, 8), facecolor='r')
92 91 text = '\n'.join([s.strip() for s in message.split(':')])
93 92 fig.text(0.01, 0.5, text, ha='left', va='center',
94 93 size='20', weight='heavy', color='w')
95 94 fig.show()
96 95 figpause(1000)
97 96
98 97
99 98 class Throttle(object):
100 99 '''
101 100 Decorator that prevents a function from being called more than once every
102 101 time period.
103 102 To create a function that cannot be called more than once a minute, but
104 103 will sleep until it can be called:
105 104 @Throttle(minutes=1)
106 105 def foo():
107 106 pass
108 107
109 108 for i in range(10):
110 109 foo()
111 110 print "This function has run %s times." % i
112 111 '''
113 112
114 113 def __init__(self, seconds=0, minutes=0, hours=0):
115 114 self.throttle_period = datetime.timedelta(
116 115 seconds=seconds, minutes=minutes, hours=hours
117 116 )
118 117
119 118 self.time_of_last_call = datetime.datetime.min
120 119
121 120 def __call__(self, fn):
122 121 @wraps(fn)
123 122 def wrapper(*args, **kwargs):
124 123 coerce = kwargs.pop('coerce', None)
125 124 if coerce:
126 125 self.time_of_last_call = datetime.datetime.now()
127 126 return fn(*args, **kwargs)
128 127 else:
129 128 now = datetime.datetime.now()
130 129 time_since_last_call = now - self.time_of_last_call
131 130 time_left = self.throttle_period - time_since_last_call
132 131
133 132 if time_left > datetime.timedelta(seconds=0):
134 133 return
135 134
136 135 self.time_of_last_call = datetime.datetime.now()
137 136 return fn(*args, **kwargs)
138 137
139 138 return wrapper
140 139
141 140 def apply_throttle(value):
142 141
143 142 @Throttle(seconds=value)
144 143 def fnThrottled(fn):
145 144 fn()
146 145
147 146 return fnThrottled
148 147
149 148
150 149 @MPDecorator
151 150 class Plot(Operation):
152 151 """Base class for Schain plotting operations
153 152
154 153 This class should never be use directtly you must subclass a new operation,
155 154 children classes must be defined as follow:
156 155
157 156 ExamplePlot(Plot):
158 157
159 158 CODE = 'code'
160 159 colormap = 'jet'
161 160 plot_type = 'pcolor' # options are ('pcolor', 'pcolorbuffer', 'scatter', 'scatterbuffer')
162 161
163 162 def setup(self):
164 163 pass
165 164
166 165 def plot(self):
167 166 pass
168 167
169 168 """
170 169
171 170 CODE = 'Figure'
172 171 colormap = 'jet'
173 172 bgcolor = 'white'
174 173 buffering = True
175 174 __missing = 1E30
176 175
177 176 __attrs__ = ['show', 'save', 'ymin', 'ymax', 'zmin', 'zmax', 'title',
178 177 'showprofile']
179 178
180 179 def __init__(self):
181 180
182 181 Operation.__init__(self)
183 182 self.isConfig = False
184 183 self.isPlotConfig = False
185 184 self.save_time = 0
186 185 self.sender_time = 0
187 186 self.data = None
188 187 self.firsttime = True
189 self.sender_queue = Queue(maxsize=60)
188 self.sender_queue = deque(maxlen=10)
190 189 self.plots_adjust = {'left': 0.125, 'right': 0.9, 'bottom': 0.15, 'top': 0.9, 'wspace': 0.2, 'hspace': 0.2}
191 190
192 191 def __fmtTime(self, x, pos):
193 192 '''
194 193 '''
195 194
196 195 return '{}'.format(self.getDateTime(x).strftime('%H:%M'))
197 196
198 197 def __setup(self, **kwargs):
199 198 '''
200 199 Initialize variables
201 200 '''
202 201
203 202 self.figures = []
204 203 self.axes = []
205 204 self.cb_axes = []
206 205 self.localtime = kwargs.pop('localtime', True)
207 206 self.show = kwargs.get('show', True)
208 207 self.save = kwargs.get('save', False)
209 208 self.save_period = kwargs.get('save_period', 0)
210 209 self.colormap = kwargs.get('colormap', self.colormap)
211 210 self.colormap_coh = kwargs.get('colormap_coh', 'jet')
212 211 self.colormap_phase = kwargs.get('colormap_phase', 'RdBu_r')
213 212 self.colormaps = kwargs.get('colormaps', None)
214 213 self.bgcolor = kwargs.get('bgcolor', self.bgcolor)
215 214 self.showprofile = kwargs.get('showprofile', False)
216 215 self.title = kwargs.get('wintitle', self.CODE.upper())
217 216 self.cb_label = kwargs.get('cb_label', None)
218 217 self.cb_labels = kwargs.get('cb_labels', None)
219 218 self.labels = kwargs.get('labels', None)
220 219 self.xaxis = kwargs.get('xaxis', 'frequency')
221 220 self.zmin = kwargs.get('zmin', None)
222 221 self.zmax = kwargs.get('zmax', None)
223 222 self.zlimits = kwargs.get('zlimits', None)
224 223 self.xmin = kwargs.get('xmin', None)
225 224 self.xmax = kwargs.get('xmax', None)
226 225 self.xrange = kwargs.get('xrange', 12)
227 226 self.xscale = kwargs.get('xscale', None)
228 227 self.ymin = kwargs.get('ymin', None)
229 228 self.ymax = kwargs.get('ymax', None)
230 229 self.yscale = kwargs.get('yscale', None)
231 230 self.xlabel = kwargs.get('xlabel', None)
232 231 self.attr_time = kwargs.get('attr_time', 'utctime')
232 self.attr_data = kwargs.get('attr_data', 'data_param')
233 233 self.decimation = kwargs.get('decimation', None)
234 234 self.showSNR = kwargs.get('showSNR', False)
235 235 self.oneFigure = kwargs.get('oneFigure', True)
236 236 self.width = kwargs.get('width', None)
237 237 self.height = kwargs.get('height', None)
238 238 self.colorbar = kwargs.get('colorbar', True)
239 239 self.factors = kwargs.get('factors', [1, 1, 1, 1, 1, 1, 1, 1])
240 240 self.channels = kwargs.get('channels', None)
241 241 self.titles = kwargs.get('titles', [])
242 242 self.polar = False
243 243 self.type = kwargs.get('type', 'iq')
244 244 self.grid = kwargs.get('grid', False)
245 245 self.pause = kwargs.get('pause', False)
246 246 self.save_code = kwargs.get('save_code', self.CODE)
247 247 self.throttle = kwargs.get('throttle', 0)
248 248 self.exp_code = kwargs.get('exp_code', None)
249 249 self.server = kwargs.get('server', False)
250 250 self.sender_period = kwargs.get('sender_period', 60)
251 251 self.tag = kwargs.get('tag', '')
252 252 self.height_index = kwargs.get('height_index', None)
253 253 self.__throttle_plot = apply_throttle(self.throttle)
254 self.data = PlotterData(
255 self.CODE, self.throttle, self.exp_code, self.localtime, self.buffering, snr=self.showSNR)
254 code = self.attr_data if self.attr_data else self.CODE
255 self.data = PlotterData(self.CODE, self.exp_code, self.localtime)
256 256
257 257 if self.server:
258 258 if not self.server.startswith('tcp://'):
259 259 self.server = 'tcp://{}'.format(self.server)
260 260 log.success(
261 261 'Sending to server: {}'.format(self.server),
262 262 self.name
263 263 )
264 264
265 265 def __setup_plot(self):
266 266 '''
267 267 Common setup for all figures, here figures and axes are created
268 268 '''
269 269
270 270 self.setup()
271 271
272 272 self.time_label = 'LT' if self.localtime else 'UTC'
273 273
274 274 if self.width is None:
275 275 self.width = 8
276 276
277 277 self.figures = []
278 278 self.axes = []
279 279 self.cb_axes = []
280 280 self.pf_axes = []
281 281 self.cmaps = []
282 282
283 283 size = '15%' if self.ncols == 1 else '30%'
284 284 pad = '4%' if self.ncols == 1 else '8%'
285 285
286 286 if self.oneFigure:
287 287 if self.height is None:
288 288 self.height = 1.4 * self.nrows + 1
289 289 fig = plt.figure(figsize=(self.width, self.height),
290 290 edgecolor='k',
291 291 facecolor='w')
292 292 self.figures.append(fig)
293 293 for n in range(self.nplots):
294 294 ax = fig.add_subplot(self.nrows, self.ncols,
295 295 n + 1, polar=self.polar)
296 296 ax.tick_params(labelsize=8)
297 297 ax.firsttime = True
298 298 ax.index = 0
299 299 ax.press = None
300 300 self.axes.append(ax)
301 301 if self.showprofile:
302 302 cax = self.__add_axes(ax, size=size, pad=pad)
303 303 cax.tick_params(labelsize=8)
304 304 self.pf_axes.append(cax)
305 305 else:
306 306 if self.height is None:
307 307 self.height = 3
308 308 for n in range(self.nplots):
309 309 fig = plt.figure(figsize=(self.width, self.height),
310 310 edgecolor='k',
311 311 facecolor='w')
312 312 ax = fig.add_subplot(1, 1, 1, polar=self.polar)
313 313 ax.tick_params(labelsize=8)
314 314 ax.firsttime = True
315 315 ax.index = 0
316 316 ax.press = None
317 317 self.figures.append(fig)
318 318 self.axes.append(ax)
319 319 if self.showprofile:
320 320 cax = self.__add_axes(ax, size=size, pad=pad)
321 321 cax.tick_params(labelsize=8)
322 322 self.pf_axes.append(cax)
323 323
324 324 for n in range(self.nrows):
325 325 if self.colormaps is not None:
326 326 cmap = plt.get_cmap(self.colormaps[n])
327 327 else:
328 328 cmap = plt.get_cmap(self.colormap)
329 329 cmap.set_bad(self.bgcolor, 1.)
330 330 self.cmaps.append(cmap)
331 331
332 332 def __add_axes(self, ax, size='30%', pad='8%'):
333 333 '''
334 334 Add new axes to the given figure
335 335 '''
336 336 divider = make_axes_locatable(ax)
337 337 nax = divider.new_horizontal(size=size, pad=pad)
338 338 ax.figure.add_axes(nax)
339 339 return nax
340 340
341 341 def fill_gaps(self, x_buffer, y_buffer, z_buffer):
342 342 '''
343 343 Create a masked array for missing data
344 344 '''
345 345 if x_buffer.shape[0] < 2:
346 346 return x_buffer, y_buffer, z_buffer
347 347
348 348 deltas = x_buffer[1:] - x_buffer[0:-1]
349 349 x_median = numpy.median(deltas)
350 350
351 351 index = numpy.where(deltas > 5 * x_median)
352 352
353 353 if len(index[0]) != 0:
354 354 z_buffer[::, index[0], ::] = self.__missing
355 355 z_buffer = numpy.ma.masked_inside(z_buffer,
356 356 0.99 * self.__missing,
357 357 1.01 * self.__missing)
358 358
359 359 return x_buffer, y_buffer, z_buffer
360 360
361 361 def decimate(self):
362 362
363 363 # dx = int(len(self.x)/self.__MAXNUMX) + 1
364 364 dy = int(len(self.y) / self.decimation) + 1
365 365
366 366 # x = self.x[::dx]
367 367 x = self.x
368 368 y = self.y[::dy]
369 369 z = self.z[::, ::, ::dy]
370 370
371 371 return x, y, z
372 372
373 373 def format(self):
374 374 '''
375 375 Set min and max values, labels, ticks and titles
376 376 '''
377 377
378 378 for n, ax in enumerate(self.axes):
379 379 if ax.firsttime:
380 380 if self.xaxis != 'time':
381 381 xmin = self.xmin
382 382 xmax = self.xmax
383 383 else:
384 384 xmin = self.tmin
385 385 xmax = self.tmin + self.xrange*60*60
386 386 ax.xaxis.set_major_formatter(FuncFormatter(self.__fmtTime))
387 387 ax.xaxis.set_major_locator(LinearLocator(9))
388 ymin = self.ymin if self.ymin else numpy.nanmin(self.y)
389 ymax = self.ymax if self.ymax else numpy.nanmax(self.y)
388 ymin = self.ymin if self.ymin is not None else numpy.nanmin(self.y[numpy.isfinite(self.y)])
389 ymax = self.ymax if self.ymax is not None else numpy.nanmax(self.y[numpy.isfinite(self.y)])
390 390 ax.set_facecolor(self.bgcolor)
391 391 if self.xscale:
392 392 ax.xaxis.set_major_formatter(FuncFormatter(
393 393 lambda x, pos: '{0:g}'.format(x*self.xscale)))
394 394 if self.yscale:
395 395 ax.yaxis.set_major_formatter(FuncFormatter(
396 396 lambda x, pos: '{0:g}'.format(x*self.yscale)))
397 397 if self.xlabel is not None:
398 398 ax.set_xlabel(self.xlabel)
399 399 if self.ylabel is not None:
400 400 ax.set_ylabel(self.ylabel)
401 401 if self.showprofile:
402 402 self.pf_axes[n].set_ylim(ymin, ymax)
403 403 self.pf_axes[n].set_xlim(self.zmin, self.zmax)
404 404 self.pf_axes[n].set_xlabel('dB')
405 405 self.pf_axes[n].grid(b=True, axis='x')
406 406 [tick.set_visible(False)
407 407 for tick in self.pf_axes[n].get_yticklabels()]
408 408 if self.colorbar:
409 409 ax.cbar = plt.colorbar(
410 410 ax.plt, ax=ax, fraction=0.05, pad=0.02, aspect=10)
411 411 ax.cbar.ax.tick_params(labelsize=8)
412 412 ax.cbar.ax.press = None
413 413 if self.cb_label:
414 414 ax.cbar.set_label(self.cb_label, size=8)
415 415 elif self.cb_labels:
416 416 ax.cbar.set_label(self.cb_labels[n], size=8)
417 417 else:
418 418 ax.cbar = None
419 419 ax.set_xlim(xmin, xmax)
420 420 ax.set_ylim(ymin, ymax)
421 421 ax.firsttime = False
422 422 if self.grid:
423 423 ax.grid(True)
424 424 if not self.polar:
425 425 ax.set_title('{} {} {}'.format(
426 426 self.titles[n],
427 427 self.getDateTime(self.data.max_time).strftime(
428 428 '%Y-%m-%d %H:%M:%S'),
429 429 self.time_label),
430 430 size=8)
431 431 else:
432 432 ax.set_title('{}'.format(self.titles[n]), size=8)
433 433 ax.set_ylim(0, 90)
434 434 ax.set_yticks(numpy.arange(0, 90, 20))
435 435 ax.yaxis.labelpad = 40
436 436
437 437 if self.firsttime:
438 438 for n, fig in enumerate(self.figures):
439 439 fig.subplots_adjust(**self.plots_adjust)
440 440 self.firsttime = False
441 441
442 442 def clear_figures(self):
443 443 '''
444 444 Reset axes for redraw plots
445 445 '''
446 446
447 447 for ax in self.axes+self.pf_axes+self.cb_axes:
448 448 ax.clear()
449 449 ax.firsttime = True
450 450 if hasattr(ax, 'cbar') and ax.cbar:
451 451 ax.cbar.remove()
452 452
453 453 def __plot(self):
454 454 '''
455 455 Main function to plot, format and save figures
456 456 '''
457 457
458 458 self.plot()
459 459 self.format()
460 460
461 461 for n, fig in enumerate(self.figures):
462 462 if self.nrows == 0 or self.nplots == 0:
463 463 log.warning('No data', self.name)
464 464 fig.text(0.5, 0.5, 'No Data', fontsize='large', ha='center')
465 465 fig.canvas.manager.set_window_title(self.CODE)
466 466 continue
467 467
468 468 fig.canvas.manager.set_window_title('{} - {}'.format(self.title,
469 469 self.getDateTime(self.data.max_time).strftime('%Y/%m/%d')))
470 470 fig.canvas.draw()
471 471 if self.show:
472 472 fig.show()
473 473 figpause(0.01)
474 474
475 475 if self.save:
476 476 self.save_figure(n)
477 477
478 478 if self.server:
479 479 self.send_to_server()
480 480
481 def __update(self, dataOut, timestamp):
482 '''
483 '''
484
485 metadata = {
486 'yrange': dataOut.heightList,
487 'interval': dataOut.timeInterval,
488 'channels': dataOut.channelList
489 }
490
491 data, meta = self.update(dataOut)
492 metadata.update(meta)
493 self.data.update(data, timestamp, metadata)
494
481 495 def save_figure(self, n):
482 496 '''
483 497 '''
484 498
485 if (self.data.tm - self.save_time) <= self.save_period:
499 if (self.data.max_time - self.save_time) <= self.save_period:
486 500 return
487 501
488 self.save_time = self.data.tm
502 self.save_time = self.data.max_time
489 503
490 504 fig = self.figures[n]
491 505
492 506 figname = os.path.join(
493 507 self.save,
494 508 self.save_code,
495 509 '{}_{}.png'.format(
496 510 self.save_code,
497 511 self.getDateTime(self.data.max_time).strftime(
498 512 '%Y%m%d_%H%M%S'
499 513 ),
500 514 )
501 515 )
502 516 log.log('Saving figure: {}'.format(figname), self.name)
503 517 if not os.path.isdir(os.path.dirname(figname)):
504 518 os.makedirs(os.path.dirname(figname))
505 519 fig.savefig(figname)
506 520
507 521 if self.throttle == 0:
508 522 figname = os.path.join(
509 523 self.save,
510 524 '{}_{}.png'.format(
511 525 self.save_code,
512 526 self.getDateTime(self.data.min_time).strftime(
513 527 '%Y%m%d'
514 528 ),
515 529 )
516 530 )
517 531 fig.savefig(figname)
518 532
519 533 def send_to_server(self):
520 534 '''
521 535 '''
522 536
523 interval = self.data.tm - self.sender_time
537 if self.exp_code == None:
538 log.warning('Missing `exp_code` skipping sending to server...')
539
540 last_time = self.data.max_time
541 interval = last_time - self.sender_time
524 542 if interval < self.sender_period:
525 543 return
526 544
527 self.sender_time = self.data.tm
545 self.sender_time = last_time
528 546
529 547 attrs = ['titles', 'zmin', 'zmax', 'tag', 'ymin', 'ymax']
530 548 for attr in attrs:
531 549 value = getattr(self, attr)
532 550 if value:
533 551 if isinstance(value, (numpy.float32, numpy.float64)):
534 552 value = round(float(value), 2)
535 553 self.data.meta[attr] = value
536 554 if self.colormap == 'jet':
537 555 self.data.meta['colormap'] = 'Jet'
538 556 elif 'RdBu' in self.colormap:
539 557 self.data.meta['colormap'] = 'RdBu'
540 558 else:
541 559 self.data.meta['colormap'] = 'Viridis'
542 560 self.data.meta['interval'] = int(interval)
543 561
544 try:
545 self.sender_queue.put(self.data.tm, block=False)
546 except:
547 tm = self.sender_queue.get()
548 self.sender_queue.put(self.data.tm)
562 self.sender_queue.append(last_time)
549 563
550 564 while True:
551 if self.sender_queue.empty():
552 break
553 tm = self.sender_queue.get()
554 565 try:
555 msg = self.data.jsonify(tm, self.save_code, self.plot_type)
556 except:
557 continue
566 tm = self.sender_queue.popleft()
567 except IndexError:
568 break
569 msg = self.data.jsonify(tm, self.save_code, self.plot_type)
558 570 self.socket.send_string(msg)
559 socks = dict(self.poll.poll(5000))
571 socks = dict(self.poll.poll(2000))
560 572 if socks.get(self.socket) == zmq.POLLIN:
561 573 reply = self.socket.recv_string()
562 574 if reply == 'ok':
563 575 log.log("Response from server ok", self.name)
564 time.sleep(0.2)
576 time.sleep(0.1)
565 577 continue
566 578 else:
567 579 log.warning(
568 580 "Malformed reply from server: {}".format(reply), self.name)
569 581 else:
570 582 log.warning(
571 583 "No response from server, retrying...", self.name)
572 self.sender_queue.put(self.data.tm)
584 self.sender_queue.appendleft(tm)
573 585 self.socket.setsockopt(zmq.LINGER, 0)
574 586 self.socket.close()
575 587 self.poll.unregister(self.socket)
576 time.sleep(0.1)
577 588 self.socket = self.context.socket(zmq.REQ)
578 589 self.socket.connect(self.server)
579 590 self.poll.register(self.socket, zmq.POLLIN)
580 591 break
581 592
582 593 def setup(self):
583 594 '''
584 595 This method should be implemented in the child class, the following
585 596 attributes should be set:
586 597
587 598 self.nrows: number of rows
588 599 self.ncols: number of cols
589 600 self.nplots: number of plots (channels or pairs)
590 601 self.ylabel: label for Y axes
591 602 self.titles: list of axes title
592 603
593 604 '''
594 605 raise NotImplementedError
595 606
596 607 def plot(self):
597 608 '''
598 Must be defined in the child class
609 Must be defined in the child class, the actual plotting method
599 610 '''
600 611 raise NotImplementedError
612
613 def update(self, dataOut):
614 '''
615 Must be defined in the child class, update self.data with new data
616 '''
617
618 data = {
619 self.CODE: getattr(dataOut, 'data_{}'.format(self.CODE))
620 }
621 meta = {}
622
623 return data, meta
601 624
602 625 def run(self, dataOut, **kwargs):
603 626 '''
604 627 Main plotting routine
605 628 '''
606 629
607 630 if self.isConfig is False:
608 631 self.__setup(**kwargs)
609 632
610 633 if self.localtime:
611 634 self.getDateTime = datetime.datetime.fromtimestamp
612 635 else:
613 636 self.getDateTime = datetime.datetime.utcfromtimestamp
614 637
615 638 self.data.setup()
616 639 self.isConfig = True
617 640 if self.server:
618 641 self.context = zmq.Context()
619 642 self.socket = self.context.socket(zmq.REQ)
620 643 self.socket.connect(self.server)
621 644 self.poll = zmq.Poller()
622 645 self.poll.register(self.socket, zmq.POLLIN)
623 646
624 647 tm = getattr(dataOut, self.attr_time)
625 648
626 if self.data and 'time' in self.xaxis and (tm - self.tmin) >= self.xrange*60*60:
649 if self.data and 'time' in self.xaxis and (tm - self.tmin) >= self.xrange*60*60:
627 650 self.save_time = tm
628 651 self.__plot()
629 652 self.tmin += self.xrange*60*60
630 653 self.data.setup()
631 654 self.clear_figures()
632 655
633 self.data.update(dataOut, tm)
656 self.__update(dataOut, tm)
634 657
635 658 if self.isPlotConfig is False:
636 659 self.__setup_plot()
637 660 self.isPlotConfig = True
638 661 if self.xaxis == 'time':
639 662 dt = self.getDateTime(tm)
640 663 if self.xmin is None:
641 664 self.tmin = tm
642 665 self.xmin = dt.hour
643 666 minutes = (self.xmin-int(self.xmin)) * 60
644 667 seconds = (minutes - int(minutes)) * 60
645 668 self.tmin = (dt.replace(hour=int(self.xmin), minute=int(minutes), second=int(seconds)) -
646 669 datetime.datetime(1970, 1, 1)).total_seconds()
647 670 if self.localtime:
648 671 self.tmin += time.timezone
649 672
650 673 if self.xmin is not None and self.xmax is not None:
651 674 self.xrange = self.xmax - self.xmin
652 675
653 676 if self.throttle == 0:
654 677 self.__plot()
655 678 else:
656 679 self.__throttle_plot(self.__plot)#, coerce=coerce)
657 680
658 681 def close(self):
659 682
660 683 if self.data and not self.data.flagNoData:
661 self.save_time = self.data.tm
684 self.save_time = self.data.max_time
662 685 self.__plot()
663 686 if self.data and not self.data.flagNoData and self.pause:
664 687 figpause(10)
665 688
@@ -1,342 +1,101
1 '''
2 Created on Jul 9, 2014
1 # Copyright (c) 2012-2020 Jicamarca Radio Observatory
2 # All rights reserved.
3 #
4 # Distributed under the terms of the BSD 3-clause license.
5 """Classes to plo Specra Heis data
3 6
4 @author: roj-idl71
5 '''
6 import os
7 import datetime
8 import numpy
9
10 from schainpy.model.graphics.jroplot_base import Plot
11
12
13 class SpectraHeisScope(Plot):
14
15
16 isConfig = None
17 __nsubplots = None
18
19 WIDTHPROF = None
20 HEIGHTPROF = None
21 PREFIX = 'spc'
22
23 def __init__(self):#, **kwargs):
24
25 Plot.__init__(self)#, **kwargs)
26 self.isConfig = False
27 self.__nsubplots = 1
28
29 self.WIDTH = 230
30 self.HEIGHT = 250
31 self.WIDTHPROF = 120
32 self.HEIGHTPROF = 0
33 self.counter_imagwr = 0
34
35 self.PLOT_CODE = SPEC_CODE
36
37 def getSubplots(self):
38
39 ncol = int(numpy.sqrt(self.nplots)+0.9)
40 nrow = int(self.nplots*1./ncol + 0.9)
41
42 return nrow, ncol
43
44 def setup(self, id, nplots, wintitle, show):
45
46 showprofile = False
47 self.__showprofile = showprofile
48 self.nplots = nplots
49
50 ncolspan = 1
51 colspan = 1
52 if showprofile:
53 ncolspan = 3
54 colspan = 2
55 self.__nsubplots = 2
56
57 self.createFigure(id = id,
58 wintitle = wintitle,
59 widthplot = self.WIDTH + self.WIDTHPROF,
60 heightplot = self.HEIGHT + self.HEIGHTPROF,
61 show = show)
62
63 nrow, ncol = self.getSubplots()
64
65 counter = 0
66 for y in range(nrow):
67 for x in range(ncol):
68
69 if counter >= self.nplots:
70 break
71
72 self.addAxes(nrow, ncol*ncolspan, y, x*ncolspan, colspan, 1)
73
74 if showprofile:
75 self.addAxes(nrow, ncol*ncolspan, y, x*ncolspan+colspan, 1, 1)
76
77 counter += 1
78
79
80 def run(self, dataOut, id, wintitle="", channelList=None,
81 xmin=None, xmax=None, ymin=None, ymax=None, save=False,
82 figpath='./', figfile=None, ftp=False, wr_period=1, show=True,
83 server=None, folder=None, username=None, password=None,
84 ftp_wei=0, exp_code=0, sub_exp_code=0, plot_pos=0):
85
86 """
87
88 Input:
89 dataOut :
90 id :
91 wintitle :
92 channelList :
93 xmin : None,
94 xmax : None,
95 ymin : None,
96 ymax : None,
97 """
98
99 if dataOut.flagNoData:
100 return dataOut
101
102 if dataOut.realtime:
103 if not(isRealtime(utcdatatime = dataOut.utctime)):
104 print('Skipping this plot function')
105 return
106
107 if channelList == None:
108 channelIndexList = dataOut.channelIndexList
109 else:
110 channelIndexList = []
111 for channel in channelList:
112 if channel not in dataOut.channelList:
113 raise ValueError("Channel %d is not in dataOut.channelList")
114 channelIndexList.append(dataOut.channelList.index(channel))
115
116 # x = dataOut.heightList
117 c = 3E8
118 deltaHeight = dataOut.heightList[1] - dataOut.heightList[0]
119 #deberia cambiar para el caso de 1Mhz y 100KHz
120 x = numpy.arange(-1*dataOut.nHeights/2.,dataOut.nHeights/2.)*(c/(2*deltaHeight*dataOut.nHeights*1000))
121 #para 1Mhz descomentar la siguiente linea
122 #x= x/(10000.0)
123 # y = dataOut.data[channelIndexList,:] * numpy.conjugate(dataOut.data[channelIndexList,:])
124 # y = y.real
125 factor = dataOut.normFactor
126 data = dataOut.data_spc / factor
127 datadB = 10.*numpy.log10(data)
128 y = datadB
129
130 #thisDatetime = dataOut.datatime
131 thisDatetime = datetime.datetime.utcfromtimestamp(dataOut.getTimeRange()[0])
132 title = wintitle + " Scope: %s" %(thisDatetime.strftime("%d-%b-%Y %H:%M:%S"))
133 xlabel = ""
134 #para 1Mhz descomentar la siguiente linea
135 #xlabel = "Frequency x 10000"
136 ylabel = "Intensity (dB)"
137
138 if not self.isConfig:
139 nplots = len(channelIndexList)
140
141 self.setup(id=id,
142 nplots=nplots,
143 wintitle=wintitle,
144 show=show)
145
146 if xmin == None: xmin = numpy.nanmin(x)
147 if xmax == None: xmax = numpy.nanmax(x)
148 if ymin == None: ymin = numpy.nanmin(y)
149 if ymax == None: ymax = numpy.nanmax(y)
150
151 self.FTP_WEI = ftp_wei
152 self.EXP_CODE = exp_code
153 self.SUB_EXP_CODE = sub_exp_code
154 self.PLOT_POS = plot_pos
155
156 self.isConfig = True
157
158 self.setWinTitle(title)
159
160 for i in range(len(self.axesList)):
161 ychannel = y[i,:]
162 str_datetime = '%s %s'%(thisDatetime.strftime("%Y/%m/%d"),thisDatetime.strftime("%H:%M:%S"))
163 title = "Channel %d: %4.2fdB: %s" %(dataOut.channelList[channelIndexList[i]], numpy.max(ychannel), str_datetime)
164 axes = self.axesList[i]
165 axes.pline(x, ychannel,
166 xmin=xmin, xmax=xmax, ymin=ymin, ymax=ymax,
167 xlabel=xlabel, ylabel=ylabel, title=title, grid='both')
168
169
170 self.draw()
171
172 self.save(figpath=figpath,
173 figfile=figfile,
174 save=save,
175 ftp=ftp,
176 wr_period=wr_period,
177 thisDatetime=thisDatetime)
178
179 return dataOut
180
181
182 class RTIfromSpectraHeis(Plot):
183
184 isConfig = None
185 __nsubplots = None
186
187 PREFIX = 'rtinoise'
188
189 def __init__(self):#, **kwargs):
190 Plot.__init__(self)#, **kwargs)
191 self.timerange = 24*60*60
192 self.isConfig = False
193 self.__nsubplots = 1
194
195 self.WIDTH = 820
196 self.HEIGHT = 200
197 self.WIDTHPROF = 120
198 self.HEIGHTPROF = 0
199 self.counter_imagwr = 0
200 self.xdata = None
201 self.ydata = None
202 self.figfile = None
203
204 self.PLOT_CODE = RTI_CODE
205
206 def getSubplots(self):
207
208 ncol = 1
209 nrow = 1
210
211 return nrow, ncol
212
213 def setup(self, id, nplots, wintitle, showprofile=True, show=True):
214
215 self.__showprofile = showprofile
216 self.nplots = nplots
217
218 ncolspan = 7
219 colspan = 6
220 self.__nsubplots = 2
221
222 self.createFigure(id = id,
223 wintitle = wintitle,
224 widthplot = self.WIDTH+self.WIDTHPROF,
225 heightplot = self.HEIGHT+self.HEIGHTPROF,
226 show = show)
227
228 nrow, ncol = self.getSubplots()
229
230 self.addAxes(nrow, ncol*ncolspan, 0, 0, colspan, 1)
7 """
231 8
9 import numpy
232 10
233 def run(self, dataOut, id, wintitle="", channelList=None, showprofile='True',
234 xmin=None, xmax=None, ymin=None, ymax=None,
235 timerange=None,
236 save=False, figpath='./', figfile=None, ftp=False, wr_period=1, show=True,
237 server=None, folder=None, username=None, password=None,
238 ftp_wei=0, exp_code=0, sub_exp_code=0, plot_pos=0):
11 from schainpy.model.graphics.jroplot_base import Plot, plt
239 12
240 if dataOut.flagNoData:
241 return dataOut
242 13
14 class SpectraHeisPlot(Plot):
243 15
244 if channelList == None:
245 channelIndexList = dataOut.channelIndexList
246 channelList = dataOut.channelList
247 else:
248 channelIndexList = []
249 for channel in channelList:
250 if channel not in dataOut.channelList:
251 raise ValueError("Channel %d is not in dataOut.channelList")
252 channelIndexList.append(dataOut.channelList.index(channel))
16 CODE = 'spc_heis'
253 17
254 if timerange != None:
255 self.timerange = timerange
18 def setup(self):
256 19
257 x = dataOut.getTimeRange()
258 y = dataOut.heightList
20 self.nplots = len(self.data.channels)
21 self.ncols = int(numpy.sqrt(self.nplots) + 0.9)
22 self.nrows = int((1.0 * self.nplots / self.ncols) + 0.9)
23 self.height = 2.6 * self.nrows
24 self.width = 3.5 * self.ncols
25 self.plots_adjust.update({'wspace': 0.4, 'hspace':0.4, 'left': 0.1, 'right': 0.95, 'bottom': 0.08})
26 self.ylabel = 'Intensity [dB]'
27 self.xlabel = 'Frequency [KHz]'
28 self.colorbar = False
259 29
260 factor = dataOut.normFactor
261 data = dataOut.data_spc / factor
262 data = numpy.average(data,axis=1)
263 datadB = 10*numpy.log10(data)
30 def update(self, dataOut):
264 31
265 # factor = dataOut.normFactor
266 # noise = dataOut.getNoise()/factor
267 # noisedB = 10*numpy.log10(noise)
32 data = {}
33 meta = {}
34 spc = 10*numpy.log10(dataOut.data_spc / dataOut.normFactor)
35 data['spc_heis'] = spc
36
37 return data, meta
268 38
269 #thisDatetime = dataOut.datatime
270 thisDatetime = datetime.datetime.utcfromtimestamp(dataOut.getTimeRange()[0])
271 title = wintitle + " RTI: %s" %(thisDatetime.strftime("%d-%b-%Y"))
272 xlabel = "Local Time"
273 ylabel = "Intensity (dB)"
39 def plot(self):
274 40
275 if not self.isConfig:
41 c = 3E8
42 deltaHeight = self.data.yrange[1] - self.data.yrange[0]
43 x = numpy.arange(-1*len(self.data.yrange)/2., len(self.data.yrange)/2.)*(c/(2*deltaHeight*len(self.data.yrange)*1000))
44 self.y = self.data[-1]['spc_heis']
45 self.titles = []
276 46
277 nplots = 1
47 for n, ax in enumerate(self.axes):
48 ychannel = self.y[n,:]
49 if ax.firsttime:
50 self.xmin = min(x) if self.xmin is None else self.xmin
51 self.xmax = max(x) if self.xmax is None else self.xmax
52 ax.plt = ax.plot(x, ychannel, lw=1, color='b')[0]
53 else:
54 ax.plt.set_data(x, ychannel)
278 55
279 self.setup(id=id,
280 nplots=nplots,
281 wintitle=wintitle,
282 showprofile=showprofile,
283 show=show)
56 self.titles.append("Channel {}: {:4.2f}dB".format(n, numpy.max(ychannel)))
284 57
285 self.tmin, self.tmax = self.getTimeLim(x, xmin, xmax)
286 58
287 if ymin == None: ymin = numpy.nanmin(datadB)
288 if ymax == None: ymax = numpy.nanmax(datadB)
59 class RTIHeisPlot(Plot):
289 60
290 self.name = thisDatetime.strftime("%Y%m%d_%H%M%S")
291 self.isConfig = True
292 self.figfile = figfile
293 self.xdata = numpy.array([])
294 self.ydata = numpy.array([])
61 CODE = 'rti_heis'
295 62
296 self.FTP_WEI = ftp_wei
297 self.EXP_CODE = exp_code
298 self.SUB_EXP_CODE = sub_exp_code
299 self.PLOT_POS = plot_pos
63 def setup(self):
300 64
301 self.setWinTitle(title)
65 self.xaxis = 'time'
66 self.ncols = 1
67 self.nrows = 1
68 self.nplots = 1
69 self.ylabel = 'Intensity [dB]'
70 self.xlabel = 'Time'
71 self.titles = ['RTI']
72 self.colorbar = False
73 self.height = 4
74 self.plots_adjust.update({'right': 0.85 })
302 75
76 def update(self, dataOut):
303 77
304 # title = "RTI %s" %(thisDatetime.strftime("%d-%b-%Y"))
305 title = "RTI - %s" %(thisDatetime.strftime("%d-%b-%Y %H:%M:%S"))
78 data = {}
79 meta = {}
80 spc = dataOut.data_spc / dataOut.normFactor
81 spc = 10*numpy.log10(numpy.average(spc, axis=1))
82 data['rti_heis'] = spc
83
84 return data, meta
306 85
307 legendlabels = ["channel %d"%idchannel for idchannel in channelList]
308 axes = self.axesList[0]
86 def plot(self):
309 87
310 self.xdata = numpy.hstack((self.xdata, x[0:1]))
88 x = self.data.times
89 Y = self.data['rti_heis']
311 90
312 if len(self.ydata)==0:
313 self.ydata = datadB[channelIndexList].reshape(-1,1)
91 if self.axes[0].firsttime:
92 self.ymin = numpy.nanmin(Y) - 5 if self.ymin == None else self.ymin
93 self.ymax = numpy.nanmax(Y) + 5 if self.ymax == None else self.ymax
94 for ch in self.data.channels:
95 y = Y[ch]
96 self.axes[0].plot(x, y, lw=1, label='Ch{}'.format(ch))
97 plt.legend(bbox_to_anchor=(1.18, 1.0))
314 98 else:
315 self.ydata = numpy.hstack((self.ydata, datadB[channelIndexList].reshape(-1,1)))
316
317
318 axes.pmultilineyaxis(x=self.xdata, y=self.ydata,
319 xmin=self.tmin, xmax=self.tmax, ymin=ymin, ymax=ymax,
320 xlabel=xlabel, ylabel=ylabel, title=title, legendlabels=legendlabels, marker='.', markersize=8, linestyle="solid", grid='both',
321 XAxisAsTime=True
322 )
323
324 self.draw()
325
326 update_figfile = False
327
328 if dataOut.ltctime >= self.tmax:
329 self.counter_imagwr = wr_period
330 self.isConfig = False
331 update_figfile = True
332
333 self.save(figpath=figpath,
334 figfile=figfile,
335 save=save,
336 ftp=ftp,
337 wr_period=wr_period,
338 thisDatetime=thisDatetime,
339 update_figfile=update_figfile)
340
341
342 return dataOut No newline at end of file
99 for ch in self.data.channels:
100 y = Y[ch]
101 self.axes[0].lines[ch].set_data(x, y)
@@ -1,339 +1,358
1 1 import os
2 2 import datetime
3 3 import numpy
4 4
5 5 from schainpy.model.graphics.jroplot_base import Plot, plt
6 6 from schainpy.model.graphics.jroplot_spectra import SpectraPlot, RTIPlot, CoherencePlot
7 7 from schainpy.utils import log
8 8
9 9 EARTH_RADIUS = 6.3710e3
10 10
11 11
12 12 def ll2xy(lat1, lon1, lat2, lon2):
13 13
14 14 p = 0.017453292519943295
15 15 a = 0.5 - numpy.cos((lat2 - lat1) * p)/2 + numpy.cos(lat1 * p) * \
16 16 numpy.cos(lat2 * p) * (1 - numpy.cos((lon2 - lon1) * p)) / 2
17 17 r = 12742 * numpy.arcsin(numpy.sqrt(a))
18 18 theta = numpy.arctan2(numpy.sin((lon2-lon1)*p)*numpy.cos(lat2*p), numpy.cos(lat1*p)
19 19 * numpy.sin(lat2*p)-numpy.sin(lat1*p)*numpy.cos(lat2*p)*numpy.cos((lon2-lon1)*p))
20 20 theta = -theta + numpy.pi/2
21 21 return r*numpy.cos(theta), r*numpy.sin(theta)
22 22
23 23
24 24 def km2deg(km):
25 25 '''
26 26 Convert distance in km to degrees
27 27 '''
28 28
29 29 return numpy.rad2deg(km/EARTH_RADIUS)
30 30
31 31
32 32
33 33 class SpectralMomentsPlot(SpectraPlot):
34 34 '''
35 35 Plot for Spectral Moments
36 36 '''
37 37 CODE = 'spc_moments'
38 38 colormap = 'jet'
39 39 plot_type = 'pcolor'
40 40
41 41
42 42 class SnrPlot(RTIPlot):
43 43 '''
44 44 Plot for SNR Data
45 45 '''
46 46
47 47 CODE = 'snr'
48 48 colormap = 'jet'
49 49
50 def update(self, dataOut):
51
52 data = {
53 'snr': 10*numpy.log10(dataOut.data_snr)
54 }
55
56 return data, {}
50 57
51 58 class DopplerPlot(RTIPlot):
52 59 '''
53 60 Plot for DOPPLER Data (1st moment)
54 61 '''
55 62
56 63 CODE = 'dop'
57 64 colormap = 'jet'
58 65
66 def update(self, dataOut):
67
68 data = {
69 'dop': 10*numpy.log10(dataOut.data_dop)
70 }
71
72 return data, {}
59 73
60 74 class PowerPlot(RTIPlot):
61 75 '''
62 76 Plot for Power Data (0 moment)
63 77 '''
64 78
65 79 CODE = 'pow'
66 80 colormap = 'jet'
67 81
82 def update(self, dataOut):
83
84 data = {
85 'pow': 10*numpy.log10(dataOut.data_pow)
86 }
87
88 return data, {}
68 89
69 90 class SpectralWidthPlot(RTIPlot):
70 91 '''
71 92 Plot for Spectral Width Data (2nd moment)
72 93 '''
73 94
74 95 CODE = 'width'
75 96 colormap = 'jet'
76 97
98 def update(self, dataOut):
99
100 data = {
101 'width': dataOut.data_width
102 }
103
104 return data, {}
77 105
78 106 class SkyMapPlot(Plot):
79 107 '''
80 108 Plot for meteors detection data
81 109 '''
82 110
83 111 CODE = 'param'
84 112
85 113 def setup(self):
86 114
87 115 self.ncols = 1
88 116 self.nrows = 1
89 117 self.width = 7.2
90 118 self.height = 7.2
91 119 self.nplots = 1
92 120 self.xlabel = 'Zonal Zenith Angle (deg)'
93 121 self.ylabel = 'Meridional Zenith Angle (deg)'
94 122 self.polar = True
95 123 self.ymin = -180
96 124 self.ymax = 180
97 125 self.colorbar = False
98 126
99 127 def plot(self):
100 128
101 129 arrayParameters = numpy.concatenate(self.data['param'])
102 130 error = arrayParameters[:, -1]
103 131 indValid = numpy.where(error == 0)[0]
104 132 finalMeteor = arrayParameters[indValid, :]
105 133 finalAzimuth = finalMeteor[:, 3]
106 134 finalZenith = finalMeteor[:, 4]
107 135
108 136 x = finalAzimuth * numpy.pi / 180
109 137 y = finalZenith
110 138
111 139 ax = self.axes[0]
112 140
113 141 if ax.firsttime:
114 142 ax.plot = ax.plot(x, y, 'bo', markersize=5)[0]
115 143 else:
116 144 ax.plot.set_data(x, y)
117 145
118 146 dt1 = self.getDateTime(self.data.min_time).strftime('%y/%m/%d %H:%M:%S')
119 147 dt2 = self.getDateTime(self.data.max_time).strftime('%y/%m/%d %H:%M:%S')
120 148 title = 'Meteor Detection Sky Map\n %s - %s \n Number of events: %5.0f\n' % (dt1,
121 149 dt2,
122 150 len(x))
123 151 self.titles[0] = title
124 152
125 153
126 class ParametersPlot(RTIPlot):
154 class GenericRTIPlot(Plot):
127 155 '''
128 Plot for data_param object
156 Plot for data_xxxx object
129 157 '''
130 158
131 159 CODE = 'param'
132 colormap = 'seismic'
160 colormap = 'viridis'
161 plot_type = 'pcolorbuffer'
133 162
134 163 def setup(self):
135 164 self.xaxis = 'time'
136 165 self.ncols = 1
137 self.nrows = self.data.shape(self.CODE)[0]
166 self.nrows = self.data.shape(self.attr_data)[0]
138 167 self.nplots = self.nrows
139 168 self.plots_adjust.update({'hspace':0.8, 'left': 0.1, 'bottom': 0.08, 'right':0.95, 'top': 0.95})
140 169
141 170 if not self.xlabel:
142 171 self.xlabel = 'Time'
143
144 if self.showSNR:
145 self.nrows += 1
146 self.nplots += 1
147 172
148 173 self.ylabel = 'Height [km]'
149 174 if not self.titles:
150 175 self.titles = self.data.parameters \
151 176 if self.data.parameters else ['Param {}'.format(x) for x in range(self.nrows)]
152 if self.showSNR:
153 self.titles.append('SNR')
154 177
178 def update(self, dataOut):
179
180 data = {
181 self.attr_data : getattr(dataOut, self.attr_data)
182 }
183
184 meta = {}
185
186 return data, meta
187
155 188 def plot(self):
156 self.data.normalize_heights()
189 # self.data.normalize_heights()
157 190 self.x = self.data.times
158 self.y = self.data.heights
159 if self.showSNR:
160 self.z = numpy.concatenate(
161 (self.data[self.CODE], self.data['snr'])
162 )
163 else:
164 self.z = self.data[self.CODE]
191 self.y = self.data.yrange
192 self.z = self.data[self.attr_data]
165 193
166 194 self.z = numpy.ma.masked_invalid(self.z)
167 195
168 196 if self.decimation is None:
169 197 x, y, z = self.fill_gaps(self.x, self.y, self.z)
170 198 else:
171 199 x, y, z = self.fill_gaps(*self.decimate())
172 200
173 201 for n, ax in enumerate(self.axes):
174 202
175 203 self.zmax = self.zmax if self.zmax is not None else numpy.max(
176 204 self.z[n])
177 205 self.zmin = self.zmin if self.zmin is not None else numpy.min(
178 206 self.z[n])
179 207
180 208 if ax.firsttime:
181 209 if self.zlimits is not None:
182 210 self.zmin, self.zmax = self.zlimits[n]
183 211
184 212 ax.plt = ax.pcolormesh(x, y, z[n].T * self.factors[n],
185 213 vmin=self.zmin,
186 214 vmax=self.zmax,
187 215 cmap=self.cmaps[n]
188 216 )
189 217 else:
190 218 if self.zlimits is not None:
191 219 self.zmin, self.zmax = self.zlimits[n]
192 220 ax.collections.remove(ax.collections[0])
193 221 ax.plt = ax.pcolormesh(x, y, z[n].T * self.factors[n],
194 222 vmin=self.zmin,
195 223 vmax=self.zmax,
196 224 cmap=self.cmaps[n]
197 225 )
198 226
199 227
200 class OutputPlot(ParametersPlot):
201 '''
202 Plot data_output object
203 '''
204
205 CODE = 'output'
206 colormap = 'seismic'
207
208
209 228 class PolarMapPlot(Plot):
210 229 '''
211 230 Plot for weather radar
212 231 '''
213 232
214 233 CODE = 'param'
215 234 colormap = 'seismic'
216 235
217 236 def setup(self):
218 237 self.ncols = 1
219 238 self.nrows = 1
220 239 self.width = 9
221 240 self.height = 8
222 241 self.mode = self.data.meta['mode']
223 242 if self.channels is not None:
224 243 self.nplots = len(self.channels)
225 244 self.nrows = len(self.channels)
226 245 else:
227 246 self.nplots = self.data.shape(self.CODE)[0]
228 247 self.nrows = self.nplots
229 248 self.channels = list(range(self.nplots))
230 249 if self.mode == 'E':
231 250 self.xlabel = 'Longitude'
232 251 self.ylabel = 'Latitude'
233 252 else:
234 253 self.xlabel = 'Range (km)'
235 254 self.ylabel = 'Height (km)'
236 255 self.bgcolor = 'white'
237 256 self.cb_labels = self.data.meta['units']
238 257 self.lat = self.data.meta['latitude']
239 258 self.lon = self.data.meta['longitude']
240 259 self.xmin, self.xmax = float(
241 260 km2deg(self.xmin) + self.lon), float(km2deg(self.xmax) + self.lon)
242 261 self.ymin, self.ymax = float(
243 262 km2deg(self.ymin) + self.lat), float(km2deg(self.ymax) + self.lat)
244 263 # self.polar = True
245 264
246 265 def plot(self):
247 266
248 267 for n, ax in enumerate(self.axes):
249 268 data = self.data['param'][self.channels[n]]
250 269
251 270 zeniths = numpy.linspace(
252 271 0, self.data.meta['max_range'], data.shape[1])
253 272 if self.mode == 'E':
254 azimuths = -numpy.radians(self.data.heights)+numpy.pi/2
273 azimuths = -numpy.radians(self.data.yrange)+numpy.pi/2
255 274 r, theta = numpy.meshgrid(zeniths, azimuths)
256 275 x, y = r*numpy.cos(theta)*numpy.cos(numpy.radians(self.data.meta['elevation'])), r*numpy.sin(
257 276 theta)*numpy.cos(numpy.radians(self.data.meta['elevation']))
258 277 x = km2deg(x) + self.lon
259 278 y = km2deg(y) + self.lat
260 279 else:
261 azimuths = numpy.radians(self.data.heights)
280 azimuths = numpy.radians(self.data.yrange)
262 281 r, theta = numpy.meshgrid(zeniths, azimuths)
263 282 x, y = r*numpy.cos(theta), r*numpy.sin(theta)
264 283 self.y = zeniths
265 284
266 285 if ax.firsttime:
267 286 if self.zlimits is not None:
268 287 self.zmin, self.zmax = self.zlimits[n]
269 288 ax.plt = ax.pcolormesh( # r, theta, numpy.ma.array(data, mask=numpy.isnan(data)),
270 289 x, y, numpy.ma.array(data, mask=numpy.isnan(data)),
271 290 vmin=self.zmin,
272 291 vmax=self.zmax,
273 292 cmap=self.cmaps[n])
274 293 else:
275 294 if self.zlimits is not None:
276 295 self.zmin, self.zmax = self.zlimits[n]
277 296 ax.collections.remove(ax.collections[0])
278 297 ax.plt = ax.pcolormesh( # r, theta, numpy.ma.array(data, mask=numpy.isnan(data)),
279 298 x, y, numpy.ma.array(data, mask=numpy.isnan(data)),
280 299 vmin=self.zmin,
281 300 vmax=self.zmax,
282 301 cmap=self.cmaps[n])
283 302
284 303 if self.mode == 'A':
285 304 continue
286 305
287 306 # plot district names
288 307 f = open('/data/workspace/schain_scripts/distrito.csv')
289 308 for line in f:
290 309 label, lon, lat = [s.strip() for s in line.split(',') if s]
291 310 lat = float(lat)
292 311 lon = float(lon)
293 312 # ax.plot(lon, lat, '.b', ms=2)
294 313 ax.text(lon, lat, label.decode('utf8'), ha='center',
295 314 va='bottom', size='8', color='black')
296 315
297 316 # plot limites
298 317 limites = []
299 318 tmp = []
300 319 for line in open('/data/workspace/schain_scripts/lima.csv'):
301 320 if '#' in line:
302 321 if tmp:
303 322 limites.append(tmp)
304 323 tmp = []
305 324 continue
306 325 values = line.strip().split(',')
307 326 tmp.append((float(values[0]), float(values[1])))
308 327 for points in limites:
309 328 ax.add_patch(
310 329 Polygon(points, ec='k', fc='none', ls='--', lw=0.5))
311 330
312 331 # plot Cuencas
313 332 for cuenca in ('rimac', 'lurin', 'mala', 'chillon', 'chilca', 'chancay-huaral'):
314 333 f = open('/data/workspace/schain_scripts/{}.csv'.format(cuenca))
315 334 values = [line.strip().split(',') for line in f]
316 335 points = [(float(s[0]), float(s[1])) for s in values]
317 336 ax.add_patch(Polygon(points, ec='b', fc='none'))
318 337
319 338 # plot grid
320 339 for r in (15, 30, 45, 60):
321 340 ax.add_artist(plt.Circle((self.lon, self.lat),
322 341 km2deg(r), color='0.6', fill=False, lw=0.2))
323 342 ax.text(
324 343 self.lon + (km2deg(r))*numpy.cos(60*numpy.pi/180),
325 344 self.lat + (km2deg(r))*numpy.sin(60*numpy.pi/180),
326 345 '{}km'.format(r),
327 346 ha='center', va='bottom', size='8', color='0.6', weight='heavy')
328 347
329 348 if self.mode == 'E':
330 349 title = 'El={}$^\circ$'.format(self.data.meta['elevation'])
331 350 label = 'E{:02d}'.format(int(self.data.meta['elevation']))
332 351 else:
333 352 title = 'Az={}$^\circ$'.format(self.data.meta['azimuth'])
334 353 label = 'A{:02d}'.format(int(self.data.meta['azimuth']))
335 354
336 355 self.save_labels = ['{}-{}'.format(lbl, label) for lbl in self.labels]
337 356 self.titles = ['{} {}'.format(
338 357 self.data.parameters[x], title) for x in self.channels]
339 358
@@ -1,641 +1,702
1 '''
2 Created on Jul 9, 2014
3 Modified on May 10, 2020
1 # Copyright (c) 2012-2020 Jicamarca Radio Observatory
2 # All rights reserved.
3 #
4 # Distributed under the terms of the BSD 3-clause license.
5 """Classes to plot Spectra data
4 6
5 @author: Juan C. Espinoza
6 '''
7 """
7 8
8 9 import os
9 import datetime
10 10 import numpy
11 11
12 from schainpy.model.graphics.jroplot_base import Plot, plt
12 from schainpy.model.graphics.jroplot_base import Plot, plt, log
13 13
14 14
15 15 class SpectraPlot(Plot):
16 16 '''
17 17 Plot for Spectra data
18 18 '''
19 19
20 20 CODE = 'spc'
21 21 colormap = 'jet'
22 22 plot_type = 'pcolor'
23 buffering = False
23 24
24 25 def setup(self):
25 26 self.nplots = len(self.data.channels)
26 27 self.ncols = int(numpy.sqrt(self.nplots) + 0.9)
27 28 self.nrows = int((1.0 * self.nplots / self.ncols) + 0.9)
28 29 self.height = 2.6 * self.nrows
29 30 self.cb_label = 'dB'
30 31 if self.showprofile:
31 32 self.width = 4 * self.ncols
32 33 else:
33 34 self.width = 3.5 * self.ncols
34 35 self.plots_adjust.update({'wspace': 0.4, 'hspace':0.4, 'left': 0.1, 'right': 0.9, 'bottom': 0.08})
35 36 self.ylabel = 'Range [km]'
36 37
38 def update(self, dataOut):
39
40 data = {}
41 meta = {}
42 spc = 10*numpy.log10(dataOut.data_spc/dataOut.normFactor)
43 data['spc'] = spc
44 data['rti'] = dataOut.getPower()
45 data['noise'] = 10*numpy.log10(dataOut.getNoise()/dataOut.normFactor)
46 meta['xrange'] = (dataOut.getFreqRange(1)/1000., dataOut.getAcfRange(1), dataOut.getVelRange(1))
47 if self.CODE == 'spc_moments':
48 data['moments'] = dataOut.moments
49
50 return data, meta
51
37 52 def plot(self):
38 53 if self.xaxis == "frequency":
39 54 x = self.data.xrange[0]
40 55 self.xlabel = "Frequency (kHz)"
41 56 elif self.xaxis == "time":
42 57 x = self.data.xrange[1]
43 58 self.xlabel = "Time (ms)"
44 59 else:
45 60 x = self.data.xrange[2]
46 61 self.xlabel = "Velocity (m/s)"
47 62
48 63 if self.CODE == 'spc_moments':
49 64 x = self.data.xrange[2]
50 65 self.xlabel = "Velocity (m/s)"
51 66
52 67 self.titles = []
53 68
54 y = self.data.heights
69 y = self.data.yrange
55 70 self.y = y
56 z = self.data['spc']
71
72 data = self.data[-1]
73 z = data['spc']
57 74
58 75 for n, ax in enumerate(self.axes):
59 noise = self.data['noise'][n][-1]
76 noise = data['noise'][n]
60 77 if self.CODE == 'spc_moments':
61 mean = self.data['moments'][n, :, 1, :][-1]
78 mean = data['moments'][n, 2]
62 79 if ax.firsttime:
63 80 self.xmax = self.xmax if self.xmax else numpy.nanmax(x)
64 81 self.xmin = self.xmin if self.xmin else -self.xmax
65 82 self.zmin = self.zmin if self.zmin else numpy.nanmin(z)
66 83 self.zmax = self.zmax if self.zmax else numpy.nanmax(z)
67 84 ax.plt = ax.pcolormesh(x, y, z[n].T,
68 85 vmin=self.zmin,
69 86 vmax=self.zmax,
70 87 cmap=plt.get_cmap(self.colormap)
71 88 )
72 89
73 90 if self.showprofile:
74 91 ax.plt_profile = self.pf_axes[n].plot(
75 self.data['rti'][n][-1], y)[0]
92 data['rti'][n], y)[0]
76 93 ax.plt_noise = self.pf_axes[n].plot(numpy.repeat(noise, len(y)), y,
77 94 color="k", linestyle="dashed", lw=1)[0]
78 95 if self.CODE == 'spc_moments':
79 96 ax.plt_mean = ax.plot(mean, y, color='k')[0]
80 97 else:
81 98 ax.plt.set_array(z[n].T.ravel())
82 99 if self.showprofile:
83 ax.plt_profile.set_data(self.data['rti'][n][-1], y)
100 ax.plt_profile.set_data(data['rti'][n], y)
84 101 ax.plt_noise.set_data(numpy.repeat(noise, len(y)), y)
85 102 if self.CODE == 'spc_moments':
86 103 ax.plt_mean.set_data(mean, y)
87 104 self.titles.append('CH {}: {:3.2f}dB'.format(n, noise))
88 105
89 106
90 107 class CrossSpectraPlot(Plot):
91 108
92 109 CODE = 'cspc'
93 110 colormap = 'jet'
94 111 plot_type = 'pcolor'
95 112 zmin_coh = None
96 113 zmax_coh = None
97 114 zmin_phase = None
98 115 zmax_phase = None
99 116
100 117 def setup(self):
101 118
102 119 self.ncols = 4
103 self.nrows = len(self.data.pairs)
104 self.nplots = self.nrows * 4
120 self.nplots = len(self.data.pairs) * 2
121 self.nrows = int((1.0 * self.nplots / self.ncols) + 0.9)
105 122 self.width = 3.1 * self.ncols
106 123 self.height = 2.6 * self.nrows
107 124 self.ylabel = 'Range [km]'
108 125 self.showprofile = False
109 126 self.plots_adjust.update({'left': 0.08, 'right': 0.92, 'wspace': 0.5, 'hspace':0.4, 'top':0.95, 'bottom': 0.08})
110 127
128 def update(self, dataOut):
129
130 data = {}
131 meta = {}
132
133 spc = dataOut.data_spc
134 cspc = dataOut.data_cspc
135 meta['xrange'] = (dataOut.getFreqRange(1)/1000., dataOut.getAcfRange(1), dataOut.getVelRange(1))
136 meta['pairs'] = dataOut.pairsList
137
138 tmp = []
139
140 for n, pair in enumerate(meta['pairs']):
141 out = cspc[n] / numpy.sqrt(spc[pair[0]] * spc[pair[1]])
142 coh = numpy.abs(out)
143 phase = numpy.arctan2(out.imag, out.real) * 180 / numpy.pi
144 tmp.append(coh)
145 tmp.append(phase)
146
147 data['cspc'] = numpy.array(tmp)
148
149 return data, meta
150
111 151 def plot(self):
112 152
113 153 if self.xaxis == "frequency":
114 154 x = self.data.xrange[0]
115 155 self.xlabel = "Frequency (kHz)"
116 156 elif self.xaxis == "time":
117 157 x = self.data.xrange[1]
118 158 self.xlabel = "Time (ms)"
119 159 else:
120 160 x = self.data.xrange[2]
121 161 self.xlabel = "Velocity (m/s)"
122 162
123 163 self.titles = []
124 164
125 y = self.data.heights
165 y = self.data.yrange
126 166 self.y = y
127 nspc = self.data['spc']
128 spc = self.data['cspc'][0]
129 cspc = self.data['cspc'][1]
130 167
131 for n in range(self.nrows):
132 noise = self.data['noise'][:,-1]
133 pair = self.data.pairs[n]
134 ax = self.axes[4 * n]
135 if ax.firsttime:
136 self.xmax = self.xmax if self.xmax else numpy.nanmax(x)
137 self.xmin = self.xmin if self.xmin else -self.xmax
138 self.zmin = self.zmin if self.zmin else numpy.nanmin(nspc)
139 self.zmax = self.zmax if self.zmax else numpy.nanmax(nspc)
140 ax.plt = ax.pcolormesh(x , y , nspc[pair[0]].T,
141 vmin=self.zmin,
142 vmax=self.zmax,
143 cmap=plt.get_cmap(self.colormap)
144 )
145 else:
146 ax.plt.set_array(nspc[pair[0]].T.ravel())
147 self.titles.append('CH {}: {:3.2f}dB'.format(pair[0], noise[pair[0]]))
148
149 ax = self.axes[4 * n + 1]
150 if ax.firsttime:
151 ax.plt = ax.pcolormesh(x , y, nspc[pair[1]].T,
152 vmin=self.zmin,
153 vmax=self.zmax,
154 cmap=plt.get_cmap(self.colormap)
155 )
156 else:
157 ax.plt.set_array(nspc[pair[1]].T.ravel())
158 self.titles.append('CH {}: {:3.2f}dB'.format(pair[1], noise[pair[1]]))
159
160 out = cspc[n] / numpy.sqrt(spc[pair[0]] * spc[pair[1]])
161 coh = numpy.abs(out)
162 phase = numpy.arctan2(out.imag, out.real) * 180 / numpy.pi
168 data = self.data[-1]
169 cspc = data['cspc']
163 170
164 ax = self.axes[4 * n + 2]
171 for n in range(len(self.data.pairs)):
172 pair = self.data.pairs[n]
173 coh = cspc[n*2]
174 phase = cspc[n*2+1]
175 ax = self.axes[2 * n]
165 176 if ax.firsttime:
166 177 ax.plt = ax.pcolormesh(x, y, coh.T,
167 178 vmin=0,
168 179 vmax=1,
169 180 cmap=plt.get_cmap(self.colormap_coh)
170 181 )
171 182 else:
172 183 ax.plt.set_array(coh.T.ravel())
173 184 self.titles.append(
174 185 'Coherence Ch{} * Ch{}'.format(pair[0], pair[1]))
175 186
176 ax = self.axes[4 * n + 3]
187 ax = self.axes[2 * n + 1]
177 188 if ax.firsttime:
178 189 ax.plt = ax.pcolormesh(x, y, phase.T,
179 190 vmin=-180,
180 191 vmax=180,
181 192 cmap=plt.get_cmap(self.colormap_phase)
182 193 )
183 194 else:
184 195 ax.plt.set_array(phase.T.ravel())
185 196 self.titles.append('Phase CH{} * CH{}'.format(pair[0], pair[1]))
186 197
187 198
188 199 class RTIPlot(Plot):
189 200 '''
190 201 Plot for RTI data
191 202 '''
192 203
193 204 CODE = 'rti'
194 205 colormap = 'jet'
195 206 plot_type = 'pcolorbuffer'
196 207
197 208 def setup(self):
198 209 self.xaxis = 'time'
199 210 self.ncols = 1
200 211 self.nrows = len(self.data.channels)
201 212 self.nplots = len(self.data.channels)
202 213 self.ylabel = 'Range [km]'
203 214 self.xlabel = 'Time'
204 215 self.cb_label = 'dB'
205 216 self.plots_adjust.update({'hspace':0.8, 'left': 0.1, 'bottom': 0.08, 'right':0.95})
206 217 self.titles = ['{} Channel {}'.format(
207 218 self.CODE.upper(), x) for x in range(self.nrows)]
208 219
220 def update(self, dataOut):
221
222 data = {}
223 meta = {}
224 data['rti'] = dataOut.getPower()
225 data['noise'] = 10*numpy.log10(dataOut.getNoise()/dataOut.normFactor)
226
227 return data, meta
228
209 229 def plot(self):
210 230 self.x = self.data.times
211 self.y = self.data.heights
231 self.y = self.data.yrange
212 232 self.z = self.data[self.CODE]
213 233 self.z = numpy.ma.masked_invalid(self.z)
214 234
215 235 if self.decimation is None:
216 236 x, y, z = self.fill_gaps(self.x, self.y, self.z)
217 237 else:
218 238 x, y, z = self.fill_gaps(*self.decimate())
219 239
220 240 for n, ax in enumerate(self.axes):
221 241 self.zmin = self.zmin if self.zmin else numpy.min(self.z)
222 242 self.zmax = self.zmax if self.zmax else numpy.max(self.z)
243 data = self.data[-1]
223 244 if ax.firsttime:
224 245 ax.plt = ax.pcolormesh(x, y, z[n].T,
225 246 vmin=self.zmin,
226 247 vmax=self.zmax,
227 248 cmap=plt.get_cmap(self.colormap)
228 249 )
229 250 if self.showprofile:
230 251 ax.plot_profile = self.pf_axes[n].plot(
231 self.data['rti'][n][-1], self.y)[0]
232 ax.plot_noise = self.pf_axes[n].plot(numpy.repeat(self.data['noise'][n][-1], len(self.y)), self.y,
252 data['rti'][n], self.y)[0]
253 ax.plot_noise = self.pf_axes[n].plot(numpy.repeat(data['noise'][n], len(self.y)), self.y,
233 254 color="k", linestyle="dashed", lw=1)[0]
234 255 else:
235 256 ax.collections.remove(ax.collections[0])
236 257 ax.plt = ax.pcolormesh(x, y, z[n].T,
237 258 vmin=self.zmin,
238 259 vmax=self.zmax,
239 260 cmap=plt.get_cmap(self.colormap)
240 261 )
241 262 if self.showprofile:
242 ax.plot_profile.set_data(self.data['rti'][n][-1], self.y)
263 ax.plot_profile.set_data(data['rti'][n], self.y)
243 264 ax.plot_noise.set_data(numpy.repeat(
244 self.data['noise'][n][-1], len(self.y)), self.y)
265 data['noise'][n], len(self.y)), self.y)
245 266
246 267
247 268 class CoherencePlot(RTIPlot):
248 269 '''
249 270 Plot for Coherence data
250 271 '''
251 272
252 273 CODE = 'coh'
253 274
254 275 def setup(self):
255 276 self.xaxis = 'time'
256 277 self.ncols = 1
257 278 self.nrows = len(self.data.pairs)
258 279 self.nplots = len(self.data.pairs)
259 280 self.ylabel = 'Range [km]'
260 281 self.xlabel = 'Time'
261 282 self.plots_adjust.update({'hspace':0.6, 'left': 0.1, 'bottom': 0.1,'right':0.95})
262 283 if self.CODE == 'coh':
263 284 self.cb_label = ''
264 285 self.titles = [
265 286 'Coherence Map Ch{} * Ch{}'.format(x[0], x[1]) for x in self.data.pairs]
266 287 else:
267 288 self.cb_label = 'Degrees'
268 289 self.titles = [
269 290 'Phase Map Ch{} * Ch{}'.format(x[0], x[1]) for x in self.data.pairs]
270 291
292 def update(self, dataOut):
293
294 data = {}
295 meta = {}
296 data['coh'] = dataOut.getCoherence()
297 meta['pairs'] = dataOut.pairsList
298
299 return data, meta
271 300
272 301 class PhasePlot(CoherencePlot):
273 302 '''
274 303 Plot for Phase map data
275 304 '''
276 305
277 306 CODE = 'phase'
278 307 colormap = 'seismic'
279 308
309 def update(self, dataOut):
310
311 data = {}
312 meta = {}
313 data['phase'] = dataOut.getCoherence(phase=True)
314 meta['pairs'] = dataOut.pairsList
315
316 return data, meta
280 317
281 318 class NoisePlot(Plot):
282 319 '''
283 320 Plot for noise
284 321 '''
285 322
286 323 CODE = 'noise'
287 324 plot_type = 'scatterbuffer'
288 325
289
290 326 def setup(self):
291 327 self.xaxis = 'time'
292 328 self.ncols = 1
293 329 self.nrows = 1
294 330 self.nplots = 1
295 331 self.ylabel = 'Intensity [dB]'
296 332 self.xlabel = 'Time'
297 333 self.titles = ['Noise']
298 334 self.colorbar = False
335 self.plots_adjust.update({'right': 0.85 })
336
337 def update(self, dataOut):
338
339 data = {}
340 meta = {}
341 data['noise'] = 10*numpy.log10(dataOut.getNoise()/dataOut.normFactor).reshape(dataOut.nChannels, 1)
342 meta['yrange'] = numpy.array([])
343
344 return data, meta
299 345
300 346 def plot(self):
301 347
302 348 x = self.data.times
303 349 xmin = self.data.min_time
304 350 xmax = xmin + self.xrange * 60 * 60
305 Y = self.data[self.CODE]
351 Y = self.data['noise']
306 352
307 353 if self.axes[0].firsttime:
354 self.ymin = numpy.nanmin(Y) - 5
355 self.ymax = numpy.nanmax(Y) + 5
308 356 for ch in self.data.channels:
309 357 y = Y[ch]
310 358 self.axes[0].plot(x, y, lw=1, label='Ch{}'.format(ch))
311 plt.legend()
359 plt.legend(bbox_to_anchor=(1.18, 1.0))
312 360 else:
313 361 for ch in self.data.channels:
314 362 y = Y[ch]
315 363 self.axes[0].lines[ch].set_data(x, y)
316 364
317 self.ymin = numpy.nanmin(Y) - 5
318 self.ymax = numpy.nanmax(Y) + 5
319
320
365
321 366 class PowerProfilePlot(Plot):
322 367
323 CODE = 'spcprofile'
368 CODE = 'pow_profile'
324 369 plot_type = 'scatter'
325 buffering = False
326 370
327 371 def setup(self):
328 372
329 373 self.ncols = 1
330 374 self.nrows = 1
331 375 self.nplots = 1
332 376 self.height = 4
333 377 self.width = 3
334 378 self.ylabel = 'Range [km]'
335 379 self.xlabel = 'Intensity [dB]'
336 380 self.titles = ['Power Profile']
337 381 self.colorbar = False
338 382
383 def update(self, dataOut):
384
385 data = {}
386 meta = {}
387 data[self.CODE] = dataOut.getPower()
388
389 return data, meta
390
339 391 def plot(self):
340 392
341 y = self.data.heights
393 y = self.data.yrange
342 394 self.y = y
343 395
344 x = self.data['spcprofile']
396 x = self.data[-1][self.CODE]
345 397
346 398 if self.xmin is None: self.xmin = numpy.nanmin(x)*0.9
347 399 if self.xmax is None: self.xmax = numpy.nanmax(x)*1.1
348 400
349 401 if self.axes[0].firsttime:
350 402 for ch in self.data.channels:
351 403 self.axes[0].plot(x[ch], y, lw=1, label='Ch{}'.format(ch))
352 404 plt.legend()
353 405 else:
354 406 for ch in self.data.channels:
355 407 self.axes[0].lines[ch].set_data(x[ch], y)
356 408
357 409
358 410 class SpectraCutPlot(Plot):
359 411
360 412 CODE = 'spc_cut'
361 413 plot_type = 'scatter'
362 414 buffering = False
363 415
364 416 def setup(self):
365 417
366 418 self.nplots = len(self.data.channels)
367 419 self.ncols = int(numpy.sqrt(self.nplots) + 0.9)
368 420 self.nrows = int((1.0 * self.nplots / self.ncols) + 0.9)
369 421 self.width = 3.4 * self.ncols + 1.5
370 422 self.height = 3 * self.nrows
371 423 self.ylabel = 'Power [dB]'
372 424 self.colorbar = False
373 425 self.plots_adjust.update({'left':0.1, 'hspace':0.3, 'right': 0.75, 'bottom':0.08})
374 426
427 def update(self, dataOut):
428
429 data = {}
430 meta = {}
431 spc = 10*numpy.log10(dataOut.data_spc/dataOut.normFactor)
432 data['spc'] = spc
433 meta['xrange'] = (dataOut.getFreqRange(1)/1000., dataOut.getAcfRange(1), dataOut.getVelRange(1))
434
435 return data, meta
436
375 437 def plot(self):
376 438 if self.xaxis == "frequency":
377 439 x = self.data.xrange[0][1:]
378 440 self.xlabel = "Frequency (kHz)"
379 441 elif self.xaxis == "time":
380 442 x = self.data.xrange[1]
381 443 self.xlabel = "Time (ms)"
382 444 else:
383 445 x = self.data.xrange[2]
384 446 self.xlabel = "Velocity (m/s)"
385 447
386 448 self.titles = []
387 449
388 y = self.data.heights
389 #self.y = y
390 z = self.data['spc_cut']
450 y = self.data.yrange
451 z = self.data[-1]['spc']
391 452
392 453 if self.height_index:
393 454 index = numpy.array(self.height_index)
394 455 else:
395 456 index = numpy.arange(0, len(y), int((len(y))/9))
396 457
397 458 for n, ax in enumerate(self.axes):
398 459 if ax.firsttime:
399 460 self.xmax = self.xmax if self.xmax else numpy.nanmax(x)
400 461 self.xmin = self.xmin if self.xmin else -self.xmax
401 462 self.ymin = self.ymin if self.ymin else numpy.nanmin(z)
402 463 self.ymax = self.ymax if self.ymax else numpy.nanmax(z)
403 464 ax.plt = ax.plot(x, z[n, :, index].T)
404 465 labels = ['Range = {:2.1f}km'.format(y[i]) for i in index]
405 466 self.figures[0].legend(ax.plt, labels, loc='center right')
406 467 else:
407 468 for i, line in enumerate(ax.plt):
408 469 line.set_data(x, z[n, :, i])
409 470 self.titles.append('CH {}'.format(n))
410 471
411 472
412 473 class BeaconPhase(Plot):
413 474
414 475 __isConfig = None
415 476 __nsubplots = None
416 477
417 478 PREFIX = 'beacon_phase'
418 479
419 480 def __init__(self):
420 481 Plot.__init__(self)
421 482 self.timerange = 24*60*60
422 483 self.isConfig = False
423 484 self.__nsubplots = 1
424 485 self.counter_imagwr = 0
425 486 self.WIDTH = 800
426 487 self.HEIGHT = 400
427 488 self.WIDTHPROF = 120
428 489 self.HEIGHTPROF = 0
429 490 self.xdata = None
430 491 self.ydata = None
431 492
432 493 self.PLOT_CODE = BEACON_CODE
433 494
434 495 self.FTP_WEI = None
435 496 self.EXP_CODE = None
436 497 self.SUB_EXP_CODE = None
437 498 self.PLOT_POS = None
438 499
439 500 self.filename_phase = None
440 501
441 502 self.figfile = None
442 503
443 504 self.xmin = None
444 505 self.xmax = None
445 506
446 507 def getSubplots(self):
447 508
448 509 ncol = 1
449 510 nrow = 1
450 511
451 512 return nrow, ncol
452 513
453 514 def setup(self, id, nplots, wintitle, showprofile=True, show=True):
454 515
455 516 self.__showprofile = showprofile
456 517 self.nplots = nplots
457 518
458 519 ncolspan = 7
459 520 colspan = 6
460 521 self.__nsubplots = 2
461 522
462 523 self.createFigure(id = id,
463 524 wintitle = wintitle,
464 525 widthplot = self.WIDTH+self.WIDTHPROF,
465 526 heightplot = self.HEIGHT+self.HEIGHTPROF,
466 527 show=show)
467 528
468 529 nrow, ncol = self.getSubplots()
469 530
470 531 self.addAxes(nrow, ncol*ncolspan, 0, 0, colspan, 1)
471 532
472 533 def save_phase(self, filename_phase):
473 534 f = open(filename_phase,'w+')
474 535 f.write('\n\n')
475 536 f.write('JICAMARCA RADIO OBSERVATORY - Beacon Phase \n')
476 537 f.write('DD MM YYYY HH MM SS pair(2,0) pair(2,1) pair(2,3) pair(2,4)\n\n' )
477 538 f.close()
478 539
479 540 def save_data(self, filename_phase, data, data_datetime):
480 541 f=open(filename_phase,'a')
481 542 timetuple_data = data_datetime.timetuple()
482 543 day = str(timetuple_data.tm_mday)
483 544 month = str(timetuple_data.tm_mon)
484 545 year = str(timetuple_data.tm_year)
485 546 hour = str(timetuple_data.tm_hour)
486 547 minute = str(timetuple_data.tm_min)
487 548 second = str(timetuple_data.tm_sec)
488 549 f.write(day+' '+month+' '+year+' '+hour+' '+minute+' '+second+' '+str(data[0])+' '+str(data[1])+' '+str(data[2])+' '+str(data[3])+'\n')
489 550 f.close()
490 551
491 552 def plot(self):
492 553 log.warning('TODO: Not yet implemented...')
493 554
494 555 def run(self, dataOut, id, wintitle="", pairsList=None, showprofile='True',
495 556 xmin=None, xmax=None, ymin=None, ymax=None, hmin=None, hmax=None,
496 557 timerange=None,
497 558 save=False, figpath='./', figfile=None, show=True, ftp=False, wr_period=1,
498 559 server=None, folder=None, username=None, password=None,
499 560 ftp_wei=0, exp_code=0, sub_exp_code=0, plot_pos=0):
500 561
501 562 if dataOut.flagNoData:
502 563 return dataOut
503 564
504 565 if not isTimeInHourRange(dataOut.datatime, xmin, xmax):
505 566 return
506 567
507 568 if pairsList == None:
508 569 pairsIndexList = dataOut.pairsIndexList[:10]
509 570 else:
510 571 pairsIndexList = []
511 572 for pair in pairsList:
512 573 if pair not in dataOut.pairsList:
513 574 raise ValueError("Pair %s is not in dataOut.pairsList" %(pair))
514 575 pairsIndexList.append(dataOut.pairsList.index(pair))
515 576
516 577 if pairsIndexList == []:
517 578 return
518 579
519 580 # if len(pairsIndexList) > 4:
520 581 # pairsIndexList = pairsIndexList[0:4]
521 582
522 583 hmin_index = None
523 584 hmax_index = None
524 585
525 586 if hmin != None and hmax != None:
526 587 indexes = numpy.arange(dataOut.nHeights)
527 588 hmin_list = indexes[dataOut.heightList >= hmin]
528 589 hmax_list = indexes[dataOut.heightList <= hmax]
529 590
530 591 if hmin_list.any():
531 592 hmin_index = hmin_list[0]
532 593
533 594 if hmax_list.any():
534 595 hmax_index = hmax_list[-1]+1
535 596
536 597 x = dataOut.getTimeRange()
537 598
538 599 thisDatetime = dataOut.datatime
539 600
540 601 title = wintitle + " Signal Phase" # : %s" %(thisDatetime.strftime("%d-%b-%Y"))
541 602 xlabel = "Local Time"
542 603 ylabel = "Phase (degrees)"
543 604
544 605 update_figfile = False
545 606
546 607 nplots = len(pairsIndexList)
547 608 #phase = numpy.zeros((len(pairsIndexList),len(dataOut.beacon_heiIndexList)))
548 609 phase_beacon = numpy.zeros(len(pairsIndexList))
549 610 for i in range(nplots):
550 611 pair = dataOut.pairsList[pairsIndexList[i]]
551 612 ccf = numpy.average(dataOut.data_cspc[pairsIndexList[i], :, hmin_index:hmax_index], axis=0)
552 613 powa = numpy.average(dataOut.data_spc[pair[0], :, hmin_index:hmax_index], axis=0)
553 614 powb = numpy.average(dataOut.data_spc[pair[1], :, hmin_index:hmax_index], axis=0)
554 615 avgcoherenceComplex = ccf/numpy.sqrt(powa*powb)
555 616 phase = numpy.arctan2(avgcoherenceComplex.imag, avgcoherenceComplex.real)*180/numpy.pi
556 617
557 618 if dataOut.beacon_heiIndexList:
558 619 phase_beacon[i] = numpy.average(phase[dataOut.beacon_heiIndexList])
559 620 else:
560 621 phase_beacon[i] = numpy.average(phase)
561 622
562 623 if not self.isConfig:
563 624
564 625 nplots = len(pairsIndexList)
565 626
566 627 self.setup(id=id,
567 628 nplots=nplots,
568 629 wintitle=wintitle,
569 630 showprofile=showprofile,
570 631 show=show)
571 632
572 633 if timerange != None:
573 634 self.timerange = timerange
574 635
575 636 self.xmin, self.xmax = self.getTimeLim(x, xmin, xmax, timerange)
576 637
577 638 if ymin == None: ymin = 0
578 639 if ymax == None: ymax = 360
579 640
580 641 self.FTP_WEI = ftp_wei
581 642 self.EXP_CODE = exp_code
582 643 self.SUB_EXP_CODE = sub_exp_code
583 644 self.PLOT_POS = plot_pos
584 645
585 646 self.name = thisDatetime.strftime("%Y%m%d_%H%M%S")
586 647 self.isConfig = True
587 648 self.figfile = figfile
588 649 self.xdata = numpy.array([])
589 650 self.ydata = numpy.array([])
590 651
591 652 update_figfile = True
592 653
593 654 #open file beacon phase
594 655 path = '%s%03d' %(self.PREFIX, self.id)
595 656 beacon_file = os.path.join(path,'%s.txt'%self.name)
596 657 self.filename_phase = os.path.join(figpath,beacon_file)
597 658 #self.save_phase(self.filename_phase)
598 659
599 660
600 661 #store data beacon phase
601 662 #self.save_data(self.filename_phase, phase_beacon, thisDatetime)
602 663
603 664 self.setWinTitle(title)
604 665
605 666
606 667 title = "Phase Plot %s" %(thisDatetime.strftime("%Y/%m/%d %H:%M:%S"))
607 668
608 669 legendlabels = ["Pair (%d,%d)"%(pair[0], pair[1]) for pair in dataOut.pairsList]
609 670
610 671 axes = self.axesList[0]
611 672
612 673 self.xdata = numpy.hstack((self.xdata, x[0:1]))
613 674
614 675 if len(self.ydata)==0:
615 676 self.ydata = phase_beacon.reshape(-1,1)
616 677 else:
617 678 self.ydata = numpy.hstack((self.ydata, phase_beacon.reshape(-1,1)))
618 679
619 680
620 681 axes.pmultilineyaxis(x=self.xdata, y=self.ydata,
621 682 xmin=self.xmin, xmax=self.xmax, ymin=ymin, ymax=ymax,
622 683 xlabel=xlabel, ylabel=ylabel, title=title, legendlabels=legendlabels, marker='x', markersize=8, linestyle="solid",
623 684 XAxisAsTime=True, grid='both'
624 685 )
625 686
626 687 self.draw()
627 688
628 689 if dataOut.ltctime >= self.xmax:
629 690 self.counter_imagwr = wr_period
630 691 self.isConfig = False
631 692 update_figfile = True
632 693
633 694 self.save(figpath=figpath,
634 695 figfile=figfile,
635 696 save=save,
636 697 ftp=ftp,
637 698 wr_period=wr_period,
638 699 thisDatetime=thisDatetime,
639 700 update_figfile=update_figfile)
640 701
641 702 return dataOut No newline at end of file
@@ -1,297 +1,302
1 1 '''
2 2 Created on Jul 9, 2014
3 3
4 4 @author: roj-idl71
5 5 '''
6 6 import os
7 7 import datetime
8 8 import numpy
9 9
10 10 from schainpy.model.graphics.jroplot_base import Plot, plt
11 11
12 12
13 13 class ScopePlot(Plot):
14 14
15 15 '''
16 16 Plot for Scope
17 17 '''
18 18
19 19 CODE = 'scope'
20 20 plot_type = 'scatter'
21 21
22 22 def setup(self):
23 23
24 24 self.xaxis = 'Range (Km)'
25 25 self.ncols = 1
26 26 self.nrows = 1
27 27 self.nplots = 1
28 28 self.ylabel = 'Intensity [dB]'
29 29 self.titles = ['Scope']
30 30 self.colorbar = False
31 31 self.width = 6
32 32 self.height = 4
33 33
34 def update(self, dataOut):
35
36 data = {}
37 meta = {
38 'nProfiles': dataOut.nProfiles,
39 'flagDataAsBlock': dataOut.flagDataAsBlock,
40 'profileIndex': dataOut.profileIndex,
41 }
42 if self.CODE == 'scope':
43 data[self.CODE] = dataOut.data
44 elif self.CODE == 'pp_power':
45 data[self.CODE] = dataOut.dataPP_POWER
46 elif self.CODE == 'pp_signal':
47 data[self.CODE] = dataOut.dataPP_POW
48 elif self.CODE == 'pp_velocity':
49 data[self.CODE] = dataOut.dataPP_DOP
50 elif self.CODE == 'pp_specwidth':
51 data[self.CODE] = dataOut.dataPP_WIDTH
52
53 return data, meta
54
34 55 def plot_iq(self, x, y, channelIndexList, thisDatetime, wintitle):
35 56
36 57 yreal = y[channelIndexList,:].real
37 58 yimag = y[channelIndexList,:].imag
38 59 title = wintitle + " Scope: %s" %(thisDatetime.strftime("%d-%b-%Y"))
39 60 self.xlabel = "Range (Km)"
40 61 self.ylabel = "Intensity - IQ"
41 62
42 63 self.y = yreal
43 64 self.x = x
44 self.xmin = min(x)
45 self.xmax = max(x)
46
47 65
48 66 self.titles[0] = title
49 67
50 68 for i,ax in enumerate(self.axes):
51 69 title = "Channel %d" %(i)
52 70 if ax.firsttime:
71 self.xmin = min(x)
72 self.xmax = max(x)
53 73 ax.plt_r = ax.plot(x, yreal[i,:], color='b')[0]
54 74 ax.plt_i = ax.plot(x, yimag[i,:], color='r')[0]
55 75 else:
56 76 ax.plt_r.set_data(x, yreal[i,:])
57 77 ax.plt_i.set_data(x, yimag[i,:])
58 78
59 79 def plot_power(self, x, y, channelIndexList, thisDatetime, wintitle):
60 80 y = y[channelIndexList,:] * numpy.conjugate(y[channelIndexList,:])
61 81 yreal = y.real
62 82 yreal = 10*numpy.log10(yreal)
63 83 self.y = yreal
64 title = wintitle + " Scope: %s" %(thisDatetime.strftime("%d-%b-%Y"))
84 title = wintitle + " Power: %s" %(thisDatetime.strftime("%d-%b-%Y"))
65 85 self.xlabel = "Range (Km)"
66 self.ylabel = "Intensity"
67 self.xmin = min(x)
68 self.xmax = max(x)
86 self.ylabel = "Intensity [dB]"
69 87
70 88
71 89 self.titles[0] = title
72 90
73 91 for i,ax in enumerate(self.axes):
74 92 title = "Channel %d" %(i)
75
76 93 ychannel = yreal[i,:]
77 94
78 95 if ax.firsttime:
96 self.xmin = min(x)
97 self.xmax = max(x)
79 98 ax.plt_r = ax.plot(x, ychannel)[0]
80 99 else:
81 #pass
82 100 ax.plt_r.set_data(x, ychannel)
83 101
84 102 def plot_weatherpower(self, x, y, channelIndexList, thisDatetime, wintitle):
85 103
86 104
87 105 y = y[channelIndexList,:]
88 106 yreal = y.real
89 107 yreal = 10*numpy.log10(yreal)
90 108 self.y = yreal
91 109 title = wintitle + " Scope: %s" %(thisDatetime.strftime("%d-%b-%Y %H:%M:%S"))
92 110 self.xlabel = "Range (Km)"
93 111 self.ylabel = "Intensity"
94 112 self.xmin = min(x)
95 113 self.xmax = max(x)
96 114
97 115 self.titles[0] =title
98 116 for i,ax in enumerate(self.axes):
99 117 title = "Channel %d" %(i)
100 118
101 119 ychannel = yreal[i,:]
102 120
103 121 if ax.firsttime:
104 122 ax.plt_r = ax.plot(x, ychannel)[0]
105 123 else:
106 124 #pass
107 125 ax.plt_r.set_data(x, ychannel)
108 126
109 127 def plot_weathervelocity(self, x, y, channelIndexList, thisDatetime, wintitle):
110 128
111 129 x = x[channelIndexList,:]
112 130 yreal = y
113 131 self.y = yreal
114 132 title = wintitle + " Scope: %s" %(thisDatetime.strftime("%d-%b-%Y %H:%M:%S"))
115 133 self.xlabel = "Velocity (m/s)"
116 134 self.ylabel = "Range (Km)"
117 135 self.xmin = numpy.min(x)
118 136 self.xmax = numpy.max(x)
119 137 self.titles[0] =title
120 138 for i,ax in enumerate(self.axes):
121 139 title = "Channel %d" %(i)
122 140 xchannel = x[i,:]
123 141 if ax.firsttime:
124 142 ax.plt_r = ax.plot(xchannel, yreal)[0]
125 143 else:
126 144 #pass
127 145 ax.plt_r.set_data(xchannel, yreal)
128 146
129 147 def plot_weatherspecwidth(self, x, y, channelIndexList, thisDatetime, wintitle):
130 148
131 149 x = x[channelIndexList,:]
132 150 yreal = y
133 151 self.y = yreal
134 152 title = wintitle + " Scope: %s" %(thisDatetime.strftime("%d-%b-%Y %H:%M:%S"))
135 153 self.xlabel = "width "
136 154 self.ylabel = "Range (Km)"
137 155 self.xmin = numpy.min(x)
138 156 self.xmax = numpy.max(x)
139 157 self.titles[0] =title
140 158 for i,ax in enumerate(self.axes):
141 159 title = "Channel %d" %(i)
142 160 xchannel = x[i,:]
143 161 if ax.firsttime:
144 162 ax.plt_r = ax.plot(xchannel, yreal)[0]
145 163 else:
146 164 #pass
147 165 ax.plt_r.set_data(xchannel, yreal)
148 166
149 167 def plot(self):
150 168 if self.channels:
151 169 channels = self.channels
152 170 else:
153 171 channels = self.data.channels
154 172
155 173 thisDatetime = datetime.datetime.utcfromtimestamp(self.data.times[-1])
156 if self.CODE == "pp_power":
157 scope = self.data['pp_power']
158 elif self.CODE == "pp_signal":
159 scope = self.data["pp_signal"]
160 elif self.CODE == "pp_velocity":
161 scope = self.data["pp_velocity"]
162 elif self.CODE == "pp_specwidth":
163 scope = self.data["pp_specwidth"]
164 else:
165 scope =self.data["scope"]
174
175 scope = self.data[-1][self.CODE]
166 176
167 177 if self.data.flagDataAsBlock:
168 178
169 179 for i in range(self.data.nProfiles):
170 180
171 181 wintitle1 = " [Profile = %d] " %i
172 182 if self.CODE =="scope":
173 183 if self.type == "power":
174 self.plot_power(self.data.heights,
184 self.plot_power(self.data.yrange,
175 185 scope[:,i,:],
176 186 channels,
177 187 thisDatetime,
178 188 wintitle1
179 189 )
180 190
181 191 if self.type == "iq":
182 self.plot_iq(self.data.heights,
192 self.plot_iq(self.data.yrange,
183 193 scope[:,i,:],
184 194 channels,
185 195 thisDatetime,
186 196 wintitle1
187 197 )
188 198 if self.CODE=="pp_power":
189 self.plot_weatherpower(self.data.heights,
199 self.plot_weatherpower(self.data.yrange,
190 200 scope[:,i,:],
191 201 channels,
192 202 thisDatetime,
193 203 wintitle
194 204 )
195 205 if self.CODE=="pp_signal":
196 self.plot_weatherpower(self.data.heights,
206 self.plot_weatherpower(self.data.yrange,
197 207 scope[:,i,:],
198 208 channels,
199 209 thisDatetime,
200 210 wintitle
201 211 )
202 212 if self.CODE=="pp_velocity":
203 213 self.plot_weathervelocity(scope[:,i,:],
204 self.data.heights,
214 self.data.yrange,
205 215 channels,
206 216 thisDatetime,
207 217 wintitle
208 218 )
209 219 if self.CODE=="pp_spcwidth":
210 220 self.plot_weatherspecwidth(scope[:,i,:],
211 self.data.heights,
221 self.data.yrange,
212 222 channels,
213 223 thisDatetime,
214 224 wintitle
215 225 )
216 226 else:
217 227 wintitle = " [Profile = %d] " %self.data.profileIndex
218 228 if self.CODE== "scope":
219 229 if self.type == "power":
220 self.plot_power(self.data.heights,
230 self.plot_power(self.data.yrange,
221 231 scope,
222 232 channels,
223 233 thisDatetime,
224 234 wintitle
225 235 )
226 236
227 237 if self.type == "iq":
228 self.plot_iq(self.data.heights,
238 self.plot_iq(self.data.yrange,
229 239 scope,
230 240 channels,
231 241 thisDatetime,
232 242 wintitle
233 243 )
234 244 if self.CODE=="pp_power":
235 self.plot_weatherpower(self.data.heights,
245 self.plot_weatherpower(self.data.yrange,
236 246 scope,
237 247 channels,
238 248 thisDatetime,
239 249 wintitle
240 250 )
241 251 if self.CODE=="pp_signal":
242 self.plot_weatherpower(self.data.heights,
252 self.plot_weatherpower(self.data.yrange,
243 253 scope,
244 254 channels,
245 255 thisDatetime,
246 256 wintitle
247 257 )
248 258 if self.CODE=="pp_velocity":
249 259 self.plot_weathervelocity(scope,
250 self.data.heights,
260 self.data.yrange,
251 261 channels,
252 262 thisDatetime,
253 263 wintitle
254 264 )
255 265 if self.CODE=="pp_specwidth":
256 266 self.plot_weatherspecwidth(scope,
257 self.data.heights,
267 self.data.yrange,
258 268 channels,
259 269 thisDatetime,
260 270 wintitle
261 271 )
262 272
263 273
264
265 274 class PulsepairPowerPlot(ScopePlot):
266 275 '''
267 276 Plot for P= S+N
268 277 '''
269 278
270 279 CODE = 'pp_power'
271 280 plot_type = 'scatter'
272 buffering = False
273 281
274 282 class PulsepairVelocityPlot(ScopePlot):
275 283 '''
276 284 Plot for VELOCITY
277 285 '''
278 286 CODE = 'pp_velocity'
279 287 plot_type = 'scatter'
280 buffering = False
281 288
282 289 class PulsepairSpecwidthPlot(ScopePlot):
283 290 '''
284 291 Plot for WIDTH
285 292 '''
286 293 CODE = 'pp_specwidth'
287 294 plot_type = 'scatter'
288 buffering = False
289 295
290 296 class PulsepairSignalPlot(ScopePlot):
291 297 '''
292 298 Plot for S
293 299 '''
294 300
295 301 CODE = 'pp_signal'
296 302 plot_type = 'scatter'
297 buffering = False
@@ -1,355 +1,355
1 1 '''
2 2 Created on Nov 9, 2016
3 3
4 4 @author: roj- LouVD
5 5 '''
6 6
7 7
8 8 import os
9 9 import sys
10 10 import time
11 11 import glob
12 12 import datetime
13 13
14 14 import numpy
15 15
16 16 import schainpy.admin
17 17 from schainpy.model.proc.jroproc_base import ProcessingUnit, MPDecorator
18 18 from schainpy.model.data.jrodata import Parameters
19 19 from schainpy.model.io.jroIO_base import Reader
20 20 from schainpy.utils import log
21 21
22 22 FILE_HEADER_STRUCTURE = numpy.dtype([
23 23 ('FMN', '<u4'),
24 24 ('nrec', '<u4'),
25 25 ('fr_offset', '<u4'),
26 26 ('id', '<u4'),
27 27 ('site', 'u1', (32,))
28 28 ])
29 29
30 30 REC_HEADER_STRUCTURE = numpy.dtype([
31 31 ('rmn', '<u4'),
32 32 ('rcounter', '<u4'),
33 33 ('nr_offset', '<u4'),
34 34 ('tr_offset', '<u4'),
35 35 ('time', '<u4'),
36 36 ('time_msec', '<u4'),
37 37 ('tag', 'u1', (32,)),
38 38 ('comments', 'u1', (32,)),
39 39 ('lat', '<f4'),
40 40 ('lon', '<f4'),
41 41 ('gps_status', '<u4'),
42 42 ('freq', '<u4'),
43 43 ('freq0', '<u4'),
44 44 ('nchan', '<u4'),
45 45 ('delta_r', '<u4'),
46 46 ('nranges', '<u4'),
47 47 ('r0', '<u4'),
48 48 ('prf', '<u4'),
49 49 ('ncoh', '<u4'),
50 50 ('npoints', '<u4'),
51 51 ('polarization', '<i4'),
52 52 ('rx_filter', '<u4'),
53 53 ('nmodes', '<u4'),
54 54 ('dmode_index', '<u4'),
55 55 ('dmode_rngcorr', '<u4'),
56 56 ('nrxs', '<u4'),
57 57 ('acf_length', '<u4'),
58 58 ('acf_lags', '<u4'),
59 59 ('sea_to_atmos', '<f4'),
60 60 ('sea_notch', '<u4'),
61 61 ('lh_sea', '<u4'),
62 62 ('hh_sea', '<u4'),
63 63 ('nbins_sea', '<u4'),
64 64 ('min_snr', '<f4'),
65 65 ('min_cc', '<f4'),
66 66 ('max_time_diff', '<f4')
67 67 ])
68 68
69 69 DATA_STRUCTURE = numpy.dtype([
70 70 ('range', '<u4'),
71 71 ('status', '<u4'),
72 72 ('zonal', '<f4'),
73 73 ('meridional', '<f4'),
74 74 ('vertical', '<f4'),
75 75 ('zonal_a', '<f4'),
76 76 ('meridional_a', '<f4'),
77 77 ('corrected_fading', '<f4'), # seconds
78 78 ('uncorrected_fading', '<f4'), # seconds
79 79 ('time_diff', '<f4'),
80 80 ('major_axis', '<f4'),
81 81 ('axial_ratio', '<f4'),
82 82 ('orientation', '<f4'),
83 83 ('sea_power', '<u4'),
84 84 ('sea_algorithm', '<u4')
85 85 ])
86 86
87 87
88 88 class BLTRParamReader(Reader, ProcessingUnit):
89 89 '''
90 90 Boundary Layer and Tropospheric Radar (BLTR) reader, Wind velocities and SNR
91 91 from *.sswma files
92 92 '''
93 93
94 94 ext = '.sswma'
95 95
96 96 def __init__(self):
97 97
98 98 ProcessingUnit.__init__(self)
99 99
100 100 self.dataOut = Parameters()
101 101 self.dataOut.timezone = 300
102 102 self.counter_records = 0
103 103 self.flagNoMoreFiles = 0
104 104 self.isConfig = False
105 105 self.filename = None
106 106 self.status_value = 0
107 107 self.datatime = datetime.datetime(1900,1,1)
108 108 self.filefmt = "*********%Y%m%d******"
109 109
110 110 def setup(self, **kwargs):
111 111
112 112 self.set_kwargs(**kwargs)
113 113
114 114 if self.path is None:
115 115 raise ValueError("The path is not valid")
116 116
117 117 if self.online:
118 118 log.log("Searching files in online mode...", self.name)
119 119
120 120 for nTries in range(self.nTries):
121 121 fullpath = self.searchFilesOnLine(self.path, self.startDate,
122 122 self.endDate, self.expLabel, self.ext, self.walk,
123 123 self.filefmt, self.folderfmt)
124 124 try:
125 125 fullpath = next(fullpath)
126 126 except:
127 127 fullpath = None
128 128
129 129 if fullpath:
130 130 self.fileSize = os.path.getsize(fullpath)
131 131 self.filename = fullpath
132 132 self.flagIsNewFile = 1
133 133 if self.fp != None:
134 134 self.fp.close()
135 135 self.fp = self.open_file(fullpath, self.open_mode)
136 136 self.flagNoMoreFiles = 0
137 137 break
138 138
139 139 log.warning(
140 140 'Waiting {} sec for a valid file in {}: try {} ...'.format(
141 141 self.delay, self.path, nTries + 1),
142 142 self.name)
143 143 time.sleep(self.delay)
144 144
145 145 if not(fullpath):
146 146 raise schainpy.admin.SchainError(
147 147 'There isn\'t any valid file in {}'.format(self.path))
148 148 self.readFirstHeader()
149 149 else:
150 150 log.log("Searching files in {}".format(self.path), self.name)
151 151 self.filenameList = self.searchFilesOffLine(self.path, self.startDate,
152 152 self.endDate, self.expLabel, self.ext, self.walk, self.filefmt, self.folderfmt)
153 153 self.setNextFile()
154 154
155 155 def checkForRealPath(self, nextFile, nextDay):
156 156 '''
157 157 '''
158 158
159 159 dt = self.datatime + datetime.timedelta(1)
160 160 filename = '{}.{}{}'.format(self.siteFile, dt.strftime('%Y%m%d'), self.ext)
161 161 fullfilename = os.path.join(self.path, filename)
162 162 if os.path.exists(fullfilename):
163 163 return fullfilename, filename
164 164 return None, filename
165 165
166 166
167 167 def readFirstHeader(self):
168 168 '''
169 169 '''
170 170
171 171 # 'peru2' ---> Piura - 'peru1' ---> Huancayo or Porcuya
172 172 self.siteFile = self.filename.split('/')[-1].split('.')[0]
173 173 self.header_file = numpy.fromfile(self.fp, FILE_HEADER_STRUCTURE, 1)
174 174 self.nrecords = self.header_file['nrec'][0]
175 175 self.counter_records = 0
176 176 self.flagIsNewFile = 0
177 177 self.fileIndex += 1
178 178
179 179 def readNextBlock(self):
180 180
181 181 while True:
182 182 if not self.online and self.counter_records == self.nrecords:
183 183 self.flagIsNewFile = 1
184 184 if not self.setNextFile():
185 185 return 0
186 186 try:
187 187 pointer = self.fp.tell()
188 188 self.readBlock()
189 189 except:
190 190 if self.online and self.waitDataBlock(pointer, 38512) == 1:
191 191 continue
192 192 else:
193 193 if not self.setNextFile():
194 194 return 0
195 195
196 196 if (self.datatime < datetime.datetime.combine(self.startDate, self.startTime)) or \
197 197 (self.datatime > datetime.datetime.combine(self.endDate, self.endTime)):
198 198 log.warning(
199 199 'Reading Record No. {}/{} -> {} [Skipping]'.format(
200 200 self.counter_records,
201 201 self.nrecords,
202 202 self.datatime.ctime()),
203 203 'BLTRParamReader')
204 204 continue
205 205 break
206 206
207 207 log.log('Reading Record No. {} -> {}'.format(
208 208 self.counter_records,
209 209 self.datatime.ctime()), 'BLTRParamReader')
210 210
211 211 return 1
212 212
213 213 def readBlock(self):
214 214
215 215 pointer = self.fp.tell()
216 216 header_rec = numpy.fromfile(self.fp, REC_HEADER_STRUCTURE, 1)
217 217 self.nchannels = int(header_rec['nchan'][0] / 2)
218 218 self.kchan = header_rec['nrxs'][0]
219 219 self.nmodes = header_rec['nmodes'][0]
220 220 self.nranges = header_rec['nranges'][0]
221 221 self.fp.seek(pointer)
222 222 self.height = numpy.empty((self.nmodes, self.nranges))
223 223 self.snr = numpy.empty((self.nmodes, int(self.nchannels), self.nranges))
224 224 self.buffer = numpy.empty((self.nmodes, 3, self.nranges))
225 225 self.flagDiscontinuousBlock = 0
226 226
227 227 for mode in range(self.nmodes):
228 228 self.readHeader()
229 229 data = self.readData()
230 230 self.height[mode] = (data[0] - self.correction) / 1000.
231 231 self.buffer[mode] = data[1]
232 232 self.snr[mode] = data[2]
233 233
234 234 self.counter_records = self.counter_records + self.nmodes
235 235
236 236 return
237 237
238 238 def readHeader(self):
239 239 '''
240 240 RecordHeader of BLTR rawdata file
241 241 '''
242 242
243 243 header_structure = numpy.dtype(
244 244 REC_HEADER_STRUCTURE.descr + [
245 245 ('antenna_coord', 'f4', (2, int(self.nchannels))),
246 246 ('rx_gains', 'u4', (int(self.nchannels),)),
247 247 ('rx_analysis', 'u4', (int(self.nchannels),))
248 248 ]
249 249 )
250 250
251 251 self.header_rec = numpy.fromfile(self.fp, header_structure, 1)
252 252 self.lat = self.header_rec['lat'][0]
253 253 self.lon = self.header_rec['lon'][0]
254 254 self.delta = self.header_rec['delta_r'][0]
255 255 self.correction = self.header_rec['dmode_rngcorr'][0]
256 256 self.imode = self.header_rec['dmode_index'][0]
257 257 self.antenna = self.header_rec['antenna_coord']
258 258 self.rx_gains = self.header_rec['rx_gains']
259 259 self.time = self.header_rec['time'][0]
260 260 dt = datetime.datetime.utcfromtimestamp(self.time)
261 261 if dt.date()>self.datatime.date():
262 262 self.flagDiscontinuousBlock = 1
263 263 self.datatime = dt
264 264
265 265 def readData(self):
266 266 '''
267 267 Reading and filtering data block record of BLTR rawdata file,
268 268 filtering is according to status_value.
269 269
270 270 Input:
271 271 status_value - Array data is set to NAN for values that are not
272 272 equal to status_value
273 273
274 274 '''
275 275 self.nchannels = int(self.nchannels)
276 276
277 277 data_structure = numpy.dtype(
278 278 DATA_STRUCTURE.descr + [
279 279 ('rx_saturation', 'u4', (self.nchannels,)),
280 280 ('chan_offset', 'u4', (2 * self.nchannels,)),
281 281 ('rx_amp', 'u4', (self.nchannels,)),
282 282 ('rx_snr', 'f4', (self.nchannels,)),
283 283 ('cross_snr', 'f4', (self.kchan,)),
284 284 ('sea_power_relative', 'f4', (self.kchan,))]
285 285 )
286 286
287 287 data = numpy.fromfile(self.fp, data_structure, self.nranges)
288 288
289 289 height = data['range']
290 290 winds = numpy.array(
291 291 (data['zonal'], data['meridional'], data['vertical']))
292 292 snr = data['rx_snr'].T
293 293
294 294 winds[numpy.where(winds == -9999.)] = numpy.nan
295 295 winds[:, numpy.where(data['status'] != self.status_value)] = numpy.nan
296 296 snr[numpy.where(snr == -9999.)] = numpy.nan
297 297 snr[:, numpy.where(data['status'] != self.status_value)] = numpy.nan
298 298 snr = numpy.power(10, snr / 10)
299 299
300 300 return height, winds, snr
301 301
302 302 def set_output(self):
303 303 '''
304 304 Storing data from databuffer to dataOut object
305 305 '''
306 306
307 self.dataOut.data_SNR = self.snr
307 self.dataOut.data_snr = self.snr
308 308 self.dataOut.height = self.height
309 309 self.dataOut.data = self.buffer
310 310 self.dataOut.utctimeInit = self.time
311 311 self.dataOut.utctime = self.dataOut.utctimeInit
312 312 self.dataOut.useLocalTime = False
313 313 self.dataOut.paramInterval = 157
314 314 self.dataOut.site = self.siteFile
315 315 self.dataOut.nrecords = self.nrecords / self.nmodes
316 316 self.dataOut.lat = self.lat
317 317 self.dataOut.lon = self.lon
318 318 self.dataOut.channelList = list(range(self.nchannels))
319 319 self.dataOut.kchan = self.kchan
320 320 self.dataOut.delta = self.delta
321 321 self.dataOut.correction = self.correction
322 322 self.dataOut.nmodes = self.nmodes
323 323 self.dataOut.imode = self.imode
324 324 self.dataOut.antenna = self.antenna
325 325 self.dataOut.rx_gains = self.rx_gains
326 326 self.dataOut.flagNoData = False
327 327 self.dataOut.flagDiscontinuousBlock = self.flagDiscontinuousBlock
328 328
329 329 def getData(self):
330 330 '''
331 331 Storing data from databuffer to dataOut object
332 332 '''
333 333 if self.flagNoMoreFiles:
334 334 self.dataOut.flagNoData = True
335 335 return 0
336 336
337 337 if not self.readNextBlock():
338 338 self.dataOut.flagNoData = True
339 339 return 0
340 340
341 341 self.set_output()
342 342
343 343 return 1
344 344
345 345 def run(self, **kwargs):
346 346 '''
347 347 '''
348 348
349 349 if not(self.isConfig):
350 350 self.setup(**kwargs)
351 351 self.isConfig = True
352 352
353 353 self.getData()
354 354
355 355 return No newline at end of file
@@ -1,626 +1,627
1 1 import os
2 2 import time
3 3 import datetime
4 4
5 5 import numpy
6 6 import h5py
7 7
8 8 import schainpy.admin
9 9 from schainpy.model.data.jrodata import *
10 10 from schainpy.model.proc.jroproc_base import ProcessingUnit, Operation, MPDecorator
11 11 from schainpy.model.io.jroIO_base import *
12 12 from schainpy.utils import log
13 13
14 14
15 15 class HDFReader(Reader, ProcessingUnit):
16 16 """Processing unit to read HDF5 format files
17 17
18 18 This unit reads HDF5 files created with `HDFWriter` operation contains
19 19 by default two groups Data and Metadata all variables would be saved as `dataOut`
20 20 attributes.
21 21 It is possible to read any HDF5 file by given the structure in the `description`
22 22 parameter, also you can add extra values to metadata with the parameter `extras`.
23 23
24 24 Parameters:
25 25 -----------
26 26 path : str
27 27 Path where files are located.
28 28 startDate : date
29 29 Start date of the files
30 30 endDate : list
31 31 End date of the files
32 32 startTime : time
33 33 Start time of the files
34 34 endTime : time
35 35 End time of the files
36 36 description : dict, optional
37 37 Dictionary with the description of the HDF5 file
38 38 extras : dict, optional
39 39 Dictionary with extra metadata to be be added to `dataOut`
40 40
41 41 Examples
42 42 --------
43 43
44 44 desc = {
45 45 'Data': {
46 46 'data_output': ['u', 'v', 'w'],
47 47 'utctime': 'timestamps',
48 48 } ,
49 49 'Metadata': {
50 50 'heightList': 'heights'
51 51 }
52 52 }
53 53
54 54 desc = {
55 55 'Data': {
56 56 'data_output': 'winds',
57 57 'utctime': 'timestamps'
58 58 },
59 59 'Metadata': {
60 60 'heightList': 'heights'
61 61 }
62 62 }
63 63
64 64 extras = {
65 65 'timeZone': 300
66 66 }
67 67
68 68 reader = project.addReadUnit(
69 69 name='HDFReader',
70 70 path='/path/to/files',
71 71 startDate='2019/01/01',
72 72 endDate='2019/01/31',
73 73 startTime='00:00:00',
74 74 endTime='23:59:59',
75 75 # description=json.dumps(desc),
76 76 # extras=json.dumps(extras),
77 77 )
78 78
79 79 """
80 80
81 81 __attrs__ = ['path', 'startDate', 'endDate', 'startTime', 'endTime', 'description', 'extras']
82 82
83 83 def __init__(self):
84 84 ProcessingUnit.__init__(self)
85 85 self.dataOut = Parameters()
86 86 self.ext = ".hdf5"
87 87 self.optchar = "D"
88 88 self.meta = {}
89 89 self.data = {}
90 90 self.open_file = h5py.File
91 91 self.open_mode = 'r'
92 92 self.description = {}
93 93 self.extras = {}
94 94 self.filefmt = "*%Y%j***"
95 95 self.folderfmt = "*%Y%j"
96 96
97 97 def setup(self, **kwargs):
98 98
99 99 self.set_kwargs(**kwargs)
100 100 if not self.ext.startswith('.'):
101 101 self.ext = '.{}'.format(self.ext)
102 102
103 103 if self.online:
104 104 log.log("Searching files in online mode...", self.name)
105 105
106 106 for nTries in range(self.nTries):
107 107 fullpath = self.searchFilesOnLine(self.path, self.startDate,
108 108 self.endDate, self.expLabel, self.ext, self.walk,
109 109 self.filefmt, self.folderfmt)
110 110 try:
111 111 fullpath = next(fullpath)
112 112 except:
113 113 fullpath = None
114 114
115 115 if fullpath:
116 116 break
117 117
118 118 log.warning(
119 119 'Waiting {} sec for a valid file in {}: try {} ...'.format(
120 120 self.delay, self.path, nTries + 1),
121 121 self.name)
122 122 time.sleep(self.delay)
123 123
124 124 if not(fullpath):
125 125 raise schainpy.admin.SchainError(
126 126 'There isn\'t any valid file in {}'.format(self.path))
127 127
128 128 pathname, filename = os.path.split(fullpath)
129 129 self.year = int(filename[1:5])
130 130 self.doy = int(filename[5:8])
131 131 self.set = int(filename[8:11]) - 1
132 132 else:
133 133 log.log("Searching files in {}".format(self.path), self.name)
134 134 self.filenameList = self.searchFilesOffLine(self.path, self.startDate,
135 135 self.endDate, self.expLabel, self.ext, self.walk, self.filefmt, self.folderfmt)
136 136
137 137 self.setNextFile()
138 138
139 139 return
140 140
141 141 def readFirstHeader(self):
142 142 '''Read metadata and data'''
143 143
144 144 self.__readMetadata()
145 145 self.__readData()
146 146 self.__setBlockList()
147 147
148 148 if 'type' in self.meta:
149 149 self.dataOut = eval(self.meta['type'])()
150 150
151 151 for attr in self.meta:
152 152 setattr(self.dataOut, attr, self.meta[attr])
153 153
154 154 self.blockIndex = 0
155 155
156 156 return
157 157
158 158 def __setBlockList(self):
159 159 '''
160 160 Selects the data within the times defined
161 161
162 162 self.fp
163 163 self.startTime
164 164 self.endTime
165 165 self.blockList
166 166 self.blocksPerFile
167 167
168 168 '''
169 169
170 170 startTime = self.startTime
171 171 endTime = self.endTime
172 172
173 173 thisUtcTime = self.data['utctime']
174 174 self.interval = numpy.min(thisUtcTime[1:] - thisUtcTime[:-1])
175 175
176 176 thisDatetime = datetime.datetime.utcfromtimestamp(thisUtcTime[0])
177 177
178 178 thisDate = thisDatetime.date()
179 179 thisTime = thisDatetime.time()
180 180
181 181 startUtcTime = (datetime.datetime.combine(thisDate, startTime) - datetime.datetime(1970, 1, 1)).total_seconds()
182 182 endUtcTime = (datetime.datetime.combine(thisDate, endTime) - datetime.datetime(1970, 1, 1)).total_seconds()
183 183
184 184 ind = numpy.where(numpy.logical_and(thisUtcTime >= startUtcTime, thisUtcTime < endUtcTime))[0]
185 185
186 186 self.blockList = ind
187 187 self.blocksPerFile = len(ind)
188 188 return
189 189
190 190 def __readMetadata(self):
191 191 '''
192 192 Reads Metadata
193 193 '''
194 194
195 195 meta = {}
196 196
197 197 if self.description:
198 198 for key, value in self.description['Metadata'].items():
199 199 meta[key] = self.fp[value].value
200 200 else:
201 201 grp = self.fp['Metadata']
202 202 for name in grp:
203 203 meta[name] = grp[name].value
204 204
205 205 if self.extras:
206 206 for key, value in self.extras.items():
207 207 meta[key] = value
208 208 self.meta = meta
209 209
210 210 return
211 211
212 212 def __readData(self):
213 213
214 214 data = {}
215 215
216 216 if self.description:
217 217 for key, value in self.description['Data'].items():
218 218 if isinstance(value, str):
219 219 if isinstance(self.fp[value], h5py.Dataset):
220 220 data[key] = self.fp[value].value
221 221 elif isinstance(self.fp[value], h5py.Group):
222 222 array = []
223 223 for ch in self.fp[value]:
224 224 array.append(self.fp[value][ch].value)
225 225 data[key] = numpy.array(array)
226 226 elif isinstance(value, list):
227 227 array = []
228 228 for ch in value:
229 229 array.append(self.fp[ch].value)
230 230 data[key] = numpy.array(array)
231 231 else:
232 232 grp = self.fp['Data']
233 233 for name in grp:
234 234 if isinstance(grp[name], h5py.Dataset):
235 235 array = grp[name].value
236 236 elif isinstance(grp[name], h5py.Group):
237 237 array = []
238 238 for ch in grp[name]:
239 239 array.append(grp[name][ch].value)
240 240 array = numpy.array(array)
241 241 else:
242 242 log.warning('Unknown type: {}'.format(name))
243 243
244 244 if name in self.description:
245 245 key = self.description[name]
246 246 else:
247 247 key = name
248 248 data[key] = array
249 249
250 250 self.data = data
251 251 return
252 252
253 253 def getData(self):
254 254
255 255 for attr in self.data:
256 256 if self.data[attr].ndim == 1:
257 257 setattr(self.dataOut, attr, self.data[attr][self.blockIndex])
258 258 else:
259 259 setattr(self.dataOut, attr, self.data[attr][:, self.blockIndex])
260 260
261 261 self.dataOut.flagNoData = False
262 262 self.blockIndex += 1
263 263
264 264 log.log("Block No. {}/{} -> {}".format(
265 265 self.blockIndex,
266 266 self.blocksPerFile,
267 267 self.dataOut.datatime.ctime()), self.name)
268 268
269 269 return
270 270
271 271 def run(self, **kwargs):
272 272
273 273 if not(self.isConfig):
274 274 self.setup(**kwargs)
275 275 self.isConfig = True
276 276
277 277 if self.blockIndex == self.blocksPerFile:
278 278 self.setNextFile()
279 279
280 280 self.getData()
281 281
282 282 return
283 283
284 284 @MPDecorator
285 285 class HDFWriter(Operation):
286 286 """Operation to write HDF5 files.
287 287
288 288 The HDF5 file contains by default two groups Data and Metadata where
289 289 you can save any `dataOut` attribute specified by `dataList` and `metadataList`
290 290 parameters, data attributes are normaly time dependent where the metadata
291 291 are not.
292 292 It is possible to customize the structure of the HDF5 file with the
293 293 optional description parameter see the examples.
294 294
295 295 Parameters:
296 296 -----------
297 297 path : str
298 298 Path where files will be saved.
299 299 blocksPerFile : int
300 300 Number of blocks per file
301 301 metadataList : list
302 302 List of the dataOut attributes that will be saved as metadata
303 303 dataList : int
304 304 List of the dataOut attributes that will be saved as data
305 305 setType : bool
306 306 If True the name of the files corresponds to the timestamp of the data
307 307 description : dict, optional
308 308 Dictionary with the desired description of the HDF5 file
309 309
310 310 Examples
311 311 --------
312 312
313 313 desc = {
314 314 'data_output': {'winds': ['z', 'w', 'v']},
315 315 'utctime': 'timestamps',
316 316 'heightList': 'heights'
317 317 }
318 318 desc = {
319 319 'data_output': ['z', 'w', 'v'],
320 320 'utctime': 'timestamps',
321 321 'heightList': 'heights'
322 322 }
323 323 desc = {
324 324 'Data': {
325 325 'data_output': 'winds',
326 326 'utctime': 'timestamps'
327 327 },
328 328 'Metadata': {
329 329 'heightList': 'heights'
330 330 }
331 331 }
332 332
333 333 writer = proc_unit.addOperation(name='HDFWriter')
334 334 writer.addParameter(name='path', value='/path/to/file')
335 335 writer.addParameter(name='blocksPerFile', value='32')
336 336 writer.addParameter(name='metadataList', value='heightList,timeZone')
337 337 writer.addParameter(name='dataList',value='data_output,utctime')
338 338 # writer.addParameter(name='description',value=json.dumps(desc))
339 339
340 340 """
341 341
342 342 ext = ".hdf5"
343 343 optchar = "D"
344 344 filename = None
345 345 path = None
346 346 setFile = None
347 347 fp = None
348 348 firsttime = True
349 349 #Configurations
350 350 blocksPerFile = None
351 351 blockIndex = None
352 352 dataOut = None
353 353 #Data Arrays
354 354 dataList = None
355 355 metadataList = None
356 356 currentDay = None
357 357 lastTime = None
358 358
359 359 def __init__(self):
360 360
361 361 Operation.__init__(self)
362 362 return
363 363
364 364 def setup(self, path=None, blocksPerFile=10, metadataList=None, dataList=None, setType=None, description=None):
365 365 self.path = path
366 366 self.blocksPerFile = blocksPerFile
367 367 self.metadataList = metadataList
368 368 self.dataList = [s.strip() for s in dataList]
369 369 self.setType = setType
370 370 self.description = description
371 371
372 372 if self.metadataList is None:
373 373 self.metadataList = self.dataOut.metadata_list
374 374
375 375 tableList = []
376 376 dsList = []
377 377
378 378 for i in range(len(self.dataList)):
379 379 dsDict = {}
380 380 if hasattr(self.dataOut, self.dataList[i]):
381 381 dataAux = getattr(self.dataOut, self.dataList[i])
382 382 dsDict['variable'] = self.dataList[i]
383 383 else:
384 384 log.warning('Attribute {} not found in dataOut', self.name)
385 385 continue
386 386
387 387 if dataAux is None:
388 388 continue
389 389 elif isinstance(dataAux, (int, float, numpy.integer, numpy.float)):
390 390 dsDict['nDim'] = 0
391 391 else:
392 392 dsDict['nDim'] = len(dataAux.shape)
393 393 dsDict['shape'] = dataAux.shape
394 394 dsDict['dsNumber'] = dataAux.shape[0]
395 395 dsDict['dtype'] = dataAux.dtype
396 396
397 397 dsList.append(dsDict)
398 398
399 399 self.dsList = dsList
400 400 self.currentDay = self.dataOut.datatime.date()
401 401
402 402 def timeFlag(self):
403 403 currentTime = self.dataOut.utctime
404 404 timeTuple = time.localtime(currentTime)
405 405 dataDay = timeTuple.tm_yday
406 406
407 407 if self.lastTime is None:
408 408 self.lastTime = currentTime
409 409 self.currentDay = dataDay
410 410 return False
411 411
412 412 timeDiff = currentTime - self.lastTime
413 413
414 414 #Si el dia es diferente o si la diferencia entre un dato y otro supera la hora
415 415 if dataDay != self.currentDay:
416 416 self.currentDay = dataDay
417 417 return True
418 418 elif timeDiff > 3*60*60:
419 419 self.lastTime = currentTime
420 420 return True
421 421 else:
422 422 self.lastTime = currentTime
423 423 return False
424 424
425 425 def run(self, dataOut, path, blocksPerFile=10, metadataList=None,
426 426 dataList=[], setType=None, description={}):
427 427
428 428 self.dataOut = dataOut
429 429 if not(self.isConfig):
430 430 self.setup(path=path, blocksPerFile=blocksPerFile,
431 431 metadataList=metadataList, dataList=dataList,
432 432 setType=setType, description=description)
433 433
434 434 self.isConfig = True
435 435 self.setNextFile()
436 436
437 437 self.putData()
438 438 return
439 439
440 440 def setNextFile(self):
441 441
442 442 ext = self.ext
443 443 path = self.path
444 444 setFile = self.setFile
445 445
446 446 timeTuple = time.localtime(self.dataOut.utctime)
447 447 subfolder = 'd%4.4d%3.3d' % (timeTuple.tm_year,timeTuple.tm_yday)
448 448 fullpath = os.path.join(path, subfolder)
449 449
450 450 if os.path.exists(fullpath):
451 451 filesList = os.listdir(fullpath)
452 452 filesList = [k for k in filesList if k.startswith(self.optchar)]
453 453 if len( filesList ) > 0:
454 454 filesList = sorted(filesList, key=str.lower)
455 455 filen = filesList[-1]
456 456 # el filename debera tener el siguiente formato
457 457 # 0 1234 567 89A BCDE (hex)
458 458 # x YYYY DDD SSS .ext
459 459 if isNumber(filen[8:11]):
460 460 setFile = int(filen[8:11]) #inicializo mi contador de seteo al seteo del ultimo file
461 461 else:
462 462 setFile = -1
463 463 else:
464 464 setFile = -1 #inicializo mi contador de seteo
465 465 else:
466 466 os.makedirs(fullpath)
467 467 setFile = -1 #inicializo mi contador de seteo
468 468
469 469 if self.setType is None:
470 470 setFile += 1
471 471 file = '%s%4.4d%3.3d%03d%s' % (self.optchar,
472 472 timeTuple.tm_year,
473 473 timeTuple.tm_yday,
474 474 setFile,
475 475 ext )
476 476 else:
477 477 setFile = timeTuple.tm_hour*60+timeTuple.tm_min
478 478 file = '%s%4.4d%3.3d%04d%s' % (self.optchar,
479 479 timeTuple.tm_year,
480 480 timeTuple.tm_yday,
481 481 setFile,
482 482 ext )
483 483
484 484 self.filename = os.path.join( path, subfolder, file )
485 485
486 486 #Setting HDF5 File
487 487 self.fp = h5py.File(self.filename, 'w')
488 488 #write metadata
489 489 self.writeMetadata(self.fp)
490 490 #Write data
491 491 self.writeData(self.fp)
492 492
493 493 def getLabel(self, name, x=None):
494 494
495 495 if x is None:
496 496 if 'Data' in self.description:
497 497 data = self.description['Data']
498 498 if 'Metadata' in self.description:
499 499 data.update(self.description['Metadata'])
500 500 else:
501 501 data = self.description
502 502 if name in data:
503 503 if isinstance(data[name], str):
504 504 return data[name]
505 505 elif isinstance(data[name], list):
506 506 return None
507 507 elif isinstance(data[name], dict):
508 508 for key, value in data[name].items():
509 509 return key
510 510 return name
511 511 else:
512 512 if 'Metadata' in self.description:
513 513 meta = self.description['Metadata']
514 514 else:
515 515 meta = self.description
516 516 if name in meta:
517 517 if isinstance(meta[name], list):
518 518 return meta[name][x]
519 519 elif isinstance(meta[name], dict):
520 520 for key, value in meta[name].items():
521 521 return value[x]
522 522 if 'cspc' in name:
523 523 return 'pair{:02d}'.format(x)
524 524 else:
525 525 return 'channel{:02d}'.format(x)
526 526
527 527 def writeMetadata(self, fp):
528 528
529 529 if self.description:
530 530 if 'Metadata' in self.description:
531 531 grp = fp.create_group('Metadata')
532 532 else:
533 533 grp = fp
534 534 else:
535 535 grp = fp.create_group('Metadata')
536 536
537 537 for i in range(len(self.metadataList)):
538 538 if not hasattr(self.dataOut, self.metadataList[i]):
539 539 log.warning('Metadata: `{}` not found'.format(self.metadataList[i]), self.name)
540 540 continue
541 541 value = getattr(self.dataOut, self.metadataList[i])
542 542 if isinstance(value, bool):
543 543 if value is True:
544 544 value = 1
545 545 else:
546 546 value = 0
547 547 grp.create_dataset(self.getLabel(self.metadataList[i]), data=value)
548 548 return
549 549
550 550 def writeData(self, fp):
551 551
552 552 if self.description:
553 553 if 'Data' in self.description:
554 554 grp = fp.create_group('Data')
555 555 else:
556 556 grp = fp
557 557 else:
558 558 grp = fp.create_group('Data')
559 559
560 560 dtsets = []
561 561 data = []
562 562
563 563 for dsInfo in self.dsList:
564 564 if dsInfo['nDim'] == 0:
565 565 ds = grp.create_dataset(
566 566 self.getLabel(dsInfo['variable']),
567 567 (self.blocksPerFile, ),
568 568 chunks=True,
569 569 dtype=numpy.float64)
570 570 dtsets.append(ds)
571 571 data.append((dsInfo['variable'], -1))
572 572 else:
573 573 label = self.getLabel(dsInfo['variable'])
574 574 if label is not None:
575 575 sgrp = grp.create_group(label)
576 576 else:
577 577 sgrp = grp
578 578 for i in range(dsInfo['dsNumber']):
579 579 ds = sgrp.create_dataset(
580 580 self.getLabel(dsInfo['variable'], i),
581 581 (self.blocksPerFile, ) + dsInfo['shape'][1:],
582 582 chunks=True,
583 583 dtype=dsInfo['dtype'])
584 584 dtsets.append(ds)
585 585 data.append((dsInfo['variable'], i))
586 586 fp.flush()
587 587
588 588 log.log('Creating file: {}'.format(fp.filename), self.name)
589 589
590 590 self.ds = dtsets
591 591 self.data = data
592 592 self.firsttime = True
593 593 self.blockIndex = 0
594 594 return
595 595
596 596 def putData(self):
597 597
598 598 if (self.blockIndex == self.blocksPerFile) or self.timeFlag():
599 599 self.closeFile()
600 600 self.setNextFile()
601 601
602 602 for i, ds in enumerate(self.ds):
603 603 attr, ch = self.data[i]
604 604 if ch == -1:
605 605 ds[self.blockIndex] = getattr(self.dataOut, attr)
606 606 else:
607 607 ds[self.blockIndex] = getattr(self.dataOut, attr)[ch]
608 608
609 609 self.fp.flush()
610 610 self.blockIndex += 1
611 611 log.log('Block No. {}/{}'.format(self.blockIndex, self.blocksPerFile), self.name)
612 612
613 613 return
614 614
615 615 def closeFile(self):
616 616
617 617 if self.blockIndex != self.blocksPerFile:
618 618 for ds in self.ds:
619 619 ds.resize(self.blockIndex, axis=0)
620 620
621 self.fp.flush()
622 self.fp.close()
621 if self.fp:
622 self.fp.flush()
623 self.fp.close()
623 624
624 625 def close(self):
625 626
626 627 self.closeFile()
@@ -1,343 +1,343
1 1 '''
2 2 Created on Set 10, 2017
3 3
4 4 @author: Juan C. Espinoza
5 5 '''
6 6
7 7
8 8 import os
9 9 import sys
10 10 import time
11 11 import glob
12 12 import datetime
13 13
14 14 import numpy
15 15
16 16 from schainpy.model.proc.jroproc_base import ProcessingUnit
17 17 from schainpy.model.data.jrodata import Parameters
18 18 from schainpy.model.io.jroIO_base import JRODataReader, isNumber
19 19 from schainpy.utils import log
20 20
21 21 FILE_HEADER_STRUCTURE = numpy.dtype([
22 22 ('year', 'f'),
23 23 ('doy', 'f'),
24 24 ('nint', 'f'),
25 25 ('navg', 'f'),
26 26 ('fh', 'f'),
27 27 ('dh', 'f'),
28 28 ('nheights', 'f'),
29 29 ('ipp', 'f')
30 30 ])
31 31
32 32 REC_HEADER_STRUCTURE = numpy.dtype([
33 33 ('magic', 'f'),
34 34 ('hours', 'f'),
35 35 ('interval', 'f'),
36 36 ('h0', 'f'),
37 37 ('nheights', 'f'),
38 38 ('snr1', 'f'),
39 39 ('snr2', 'f'),
40 40 ('snr', 'f'),
41 41 ])
42 42
43 43 DATA_STRUCTURE = numpy.dtype([
44 44 ('range', '<u4'),
45 45 ('status', '<u4'),
46 46 ('zonal', '<f4'),
47 47 ('meridional', '<f4'),
48 48 ('vertical', '<f4'),
49 49 ('zonal_a', '<f4'),
50 50 ('meridional_a', '<f4'),
51 51 ('corrected_fading', '<f4'), # seconds
52 52 ('uncorrected_fading', '<f4'), # seconds
53 53 ('time_diff', '<f4'),
54 54 ('major_axis', '<f4'),
55 55 ('axial_ratio', '<f4'),
56 56 ('orientation', '<f4'),
57 57 ('sea_power', '<u4'),
58 58 ('sea_algorithm', '<u4')
59 59 ])
60 60
61 61
62 62 class JULIAParamReader(JRODataReader, ProcessingUnit):
63 63 '''
64 64 Julia data (eej, spf, 150km) *.dat files
65 65 '''
66 66
67 67 ext = '.dat'
68 68
69 69 def __init__(self, **kwargs):
70 70
71 71 ProcessingUnit.__init__(self, **kwargs)
72 72
73 73 self.dataOut = Parameters()
74 74 self.counter_records = 0
75 75 self.flagNoMoreFiles = 0
76 76 self.isConfig = False
77 77 self.filename = None
78 78 self.clockpulse = 0.15
79 79 self.kd = 213.6
80 80
81 81 def setup(self,
82 82 path=None,
83 83 startDate=None,
84 84 endDate=None,
85 85 ext=None,
86 86 startTime=datetime.time(0, 0, 0),
87 87 endTime=datetime.time(23, 59, 59),
88 88 timezone=0,
89 89 format=None,
90 90 **kwargs):
91 91
92 92 self.path = path
93 93 self.startDate = startDate
94 94 self.endDate = endDate
95 95 self.startTime = startTime
96 96 self.endTime = endTime
97 97 self.datatime = datetime.datetime(1900, 1, 1)
98 98 self.format = format
99 99
100 100 if self.path is None:
101 101 raise ValueError("The path is not valid")
102 102
103 103 if ext is None:
104 104 ext = self.ext
105 105
106 106 self.search_files(self.path, startDate, endDate, ext)
107 107 self.timezone = timezone
108 108 self.fileIndex = 0
109 109
110 110 if not self.fileList:
111 111 log.warning('There is no files matching these date in the folder: {}'.format(
112 112 path), self.name)
113 113
114 114 self.setNextFile()
115 115
116 116 def search_files(self, path, startDate, endDate, ext):
117 117 '''
118 118 Searching for BLTR rawdata file in path
119 119 Creating a list of file to proces included in [startDate,endDate]
120 120
121 121 Input:
122 122 path - Path to find BLTR rawdata files
123 123 startDate - Select file from this date
124 124 enDate - Select file until this date
125 125 ext - Extension of the file to read
126 126 '''
127 127
128 128 log.success('Searching files in {} '.format(path), self.name)
129 129 fileList0 = glob.glob1(path, '{}*{}'.format(self.format.upper(), ext))
130 130 fileList0.sort()
131 131
132 132 self.fileList = []
133 133 self.dateFileList = []
134 134
135 135 for thisFile in fileList0:
136 136 year = thisFile[2:4]
137 137 if not isNumber(year):
138 138 continue
139 139
140 140 month = thisFile[4:6]
141 141 if not isNumber(month):
142 142 continue
143 143
144 144 day = thisFile[6:8]
145 145 if not isNumber(day):
146 146 continue
147 147
148 148 year, month, day = int(year), int(month), int(day)
149 149 dateFile = datetime.date(year+2000, month, day)
150 150
151 151 if (startDate > dateFile) or (endDate < dateFile):
152 152 continue
153 153
154 154 self.fileList.append(thisFile)
155 155 self.dateFileList.append(dateFile)
156 156
157 157 return
158 158
159 159 def setNextFile(self):
160 160
161 161 file_id = self.fileIndex
162 162
163 163 if file_id == len(self.fileList):
164 164 log.success('No more files in the folder', self.name)
165 165 self.flagNoMoreFiles = 1
166 166 return 0
167 167
168 168 log.success('Opening {}'.format(self.fileList[file_id]), self.name)
169 169 filename = os.path.join(self.path, self.fileList[file_id])
170 170
171 171 dirname, name = os.path.split(filename)
172 172 self.siteFile = name.split('.')[0]
173 173 if self.filename is not None:
174 174 self.fp.close()
175 175 self.filename = filename
176 176 self.fp = open(self.filename, 'rb')
177 177
178 178 self.header_file = numpy.fromfile(self.fp, FILE_HEADER_STRUCTURE, 1)
179 179 yy = self.header_file['year'] - 1900 * (self.header_file['year'] > 3000)
180 180 self.year = int(yy + 1900 * (yy < 1000))
181 181 self.doy = int(self.header_file['doy'])
182 182 self.dH = round(self.header_file['dh'], 2)
183 183 self.ipp = round(self.header_file['ipp'], 2)
184 184 self.sizeOfFile = os.path.getsize(self.filename)
185 185 self.counter_records = 0
186 186 self.flagIsNewFile = 0
187 187 self.fileIndex += 1
188 188
189 189 return 1
190 190
191 191 def readNextBlock(self):
192 192
193 193 while True:
194 194 if not self.readBlock():
195 195 self.flagIsNewFile = 1
196 196 if not self.setNextFile():
197 197 return 0
198 198
199 199 if (self.datatime < datetime.datetime.combine(self.startDate, self.startTime)) or \
200 200 (self.datatime > datetime.datetime.combine(self.endDate, self.endTime)):
201 201 log.warning(
202 202 'Reading Record No. {} -> {} [Skipping]'.format(
203 203 self.counter_records,
204 204 self.datatime.ctime()),
205 205 self.name)
206 206 continue
207 207 break
208 208
209 209 log.log('Reading Record No. {} -> {}'.format(
210 210 self.counter_records,
211 211 self.datatime.ctime()), self.name)
212 212
213 213 return 1
214 214
215 215 def readBlock(self):
216 216
217 217 pointer = self.fp.tell()
218 218 heights, dt = self.readHeader()
219 219 self.fp.seek(pointer)
220 220 buffer_h = []
221 221 buffer_d = []
222 222 while True:
223 223 pointer = self.fp.tell()
224 224 if pointer == self.sizeOfFile:
225 225 return 0
226 226 heights, datatime = self.readHeader()
227 227 if dt == datatime:
228 228 buffer_h.append(heights)
229 229 buffer_d.append(self.readData(len(heights)))
230 230 continue
231 231 self.fp.seek(pointer)
232 232 break
233 233
234 234 if dt.date() > self.datatime.date():
235 235 self.flagDiscontinuousBlock = 1
236 236 self.datatime = dt
237 237 self.time = (dt - datetime.datetime(1970, 1, 1)).total_seconds() + time.timezone
238 238 self.heights = numpy.concatenate(buffer_h)
239 239 self.buffer = numpy.zeros((5, len(self.heights))) + numpy.nan
240 240 self.buffer[0, :] = numpy.concatenate([buf[0] for buf in buffer_d])
241 241 self.buffer[1, :] = numpy.concatenate([buf[1] for buf in buffer_d])
242 242 self.buffer[2, :] = numpy.concatenate([buf[2] for buf in buffer_d])
243 243 self.buffer[3, :] = numpy.concatenate([buf[3] for buf in buffer_d])
244 244 self.buffer[4, :] = numpy.concatenate([buf[4] for buf in buffer_d])
245 245
246 246 self.counter_records += 1
247 247
248 248 return 1
249 249
250 250 def readHeader(self):
251 251 '''
252 252 Parse recordHeader
253 253 '''
254 254
255 255 self.header_rec = numpy.fromfile(self.fp, REC_HEADER_STRUCTURE, 1)
256 256 self.interval = self.header_rec['interval']
257 257 if self.header_rec['magic'] == 888.:
258 258 self.header_rec['h0'] = round(self.header_rec['h0'], 2)
259 259 nheights = int(self.header_rec['nheights'])
260 260 hours = float(self.header_rec['hours'][0])
261 261 heights = numpy.arange(nheights) * self.dH + self.header_rec['h0']
262 262 datatime = datetime.datetime(self.year, 1, 1) + datetime.timedelta(days=self.doy-1, hours=hours)
263 263 return heights, datatime
264 264 else:
265 265 return False
266 266
267 267 def readData(self, N):
268 268 '''
269 269 Parse data
270 270 '''
271 271
272 272 buffer = numpy.fromfile(self.fp, 'f', 8*N).reshape(N, 8)
273 273
274 274 pow0 = buffer[:, 0]
275 275 pow1 = buffer[:, 1]
276 276 acf0 = (buffer[:,2] + buffer[:,3]*1j) / pow0
277 277 acf1 = (buffer[:,4] + buffer[:,5]*1j) / pow1
278 278 dccf = (buffer[:,6] + buffer[:,7]*1j) / (pow0*pow1)
279 279
280 280 ### SNR
281 281 sno = (pow0 + pow1 - self.header_rec['snr']) / self.header_rec['snr']
282 282 sno10 = numpy.log10(sno)
283 283 # dsno = 1.0 / numpy.sqrt(self.header_file['nint'] * self.header_file['navg']) * (1 + (1 / sno))
284 284
285 285 ### Vertical Drift
286 286 sp = numpy.sqrt(numpy.abs(acf0)*numpy.abs(acf1))
287 287 sp[numpy.where(numpy.abs(sp) >= 1.0)] = numpy.sqrt(0.9999)
288 288
289 289 vzo = -numpy.arctan2(acf0.imag + acf1.imag,acf0.real + acf1.real)*1.5E5*1.5/(self.ipp*numpy.pi)
290 290 dvzo = numpy.sqrt(1.0 - sp*sp)*0.338*1.5E5/(numpy.sqrt(self.header_file['nint']*self.header_file['navg'])*sp*self.ipp)
291 291 err = numpy.where(dvzo <= 0.1)
292 292 dvzo[err] = 0.1
293 293
294 294 #Zonal Drifts
295 295 dt = self.header_file['nint']*self.ipp / 1.5E5
296 296 coh = numpy.sqrt(numpy.abs(dccf))
297 297 err = numpy.where(coh >= 1.0)
298 298 coh[err] = numpy.sqrt(0.99999)
299 299
300 300 err = numpy.where(coh <= 0.1)
301 301 coh[err] = numpy.sqrt(0.1)
302 302
303 303 vxo = numpy.arctan2(dccf.imag, dccf.real)*self.header_rec['h0']*1.0E3/(self.kd*dt)
304 304 dvxo = numpy.sqrt(1.0 - coh*coh)*self.header_rec['h0']*1.0E3/(numpy.sqrt(self.header_file['nint']*self.header_file['navg'])*coh*self.kd*dt)
305 305
306 306 err = numpy.where(dvxo <= 0.1)
307 307 dvxo[err] = 0.1
308 308
309 309 return vzo, dvzo, vxo, dvxo, sno10
310 310
311 311 def set_output(self):
312 312 '''
313 313 Storing data from databuffer to dataOut object
314 314 '''
315 315
316 self.dataOut.data_SNR = self.buffer[4].reshape(1, -1)
316 self.dataOut.data_snr = self.buffer[4].reshape(1, -1)
317 317 self.dataOut.heightList = self.heights
318 318 self.dataOut.data_param = self.buffer[0:4,]
319 319 self.dataOut.utctimeInit = self.time
320 320 self.dataOut.utctime = self.time
321 321 self.dataOut.useLocalTime = True
322 322 self.dataOut.paramInterval = self.interval
323 323 self.dataOut.timezone = self.timezone
324 324 self.dataOut.sizeOfFile = self.sizeOfFile
325 325 self.dataOut.flagNoData = False
326 326 self.dataOut.flagDiscontinuousBlock = self.flagDiscontinuousBlock
327 327
328 328 def getData(self):
329 329 '''
330 330 Storing data from databuffer to dataOut object
331 331 '''
332 332 if self.flagNoMoreFiles:
333 333 self.dataOut.flagNoData = True
334 334 log.success('No file left to process', self.name)
335 335 return 0
336 336
337 337 if not self.readNextBlock():
338 338 self.dataOut.flagNoData = True
339 339 return 0
340 340
341 341 self.set_output()
342 342
343 343 return 1 No newline at end of file
@@ -1,402 +1,402
1 1 '''
2 2 Created on Oct 24, 2016
3 3
4 4 @author: roj- LouVD
5 5 '''
6 6
7 7 import numpy
8 8 import copy
9 9 import datetime
10 10 import time
11 11 from time import gmtime
12 12
13 13 from numpy import transpose
14 14
15 15 from schainpy.model.proc.jroproc_base import ProcessingUnit, Operation, MPDecorator
16 16 from schainpy.model.data.jrodata import Parameters
17 17
18 18
19 19 class BLTRParametersProc(ProcessingUnit):
20 20 '''
21 21 Processing unit for BLTR parameters data (winds)
22 22
23 23 Inputs:
24 24 self.dataOut.nmodes - Number of operation modes
25 25 self.dataOut.nchannels - Number of channels
26 26 self.dataOut.nranges - Number of ranges
27 27
28 self.dataOut.data_SNR - SNR array
28 self.dataOut.data_snr - SNR array
29 29 self.dataOut.data_output - Zonal, Vertical and Meridional velocity array
30 30 self.dataOut.height - Height array (km)
31 31 self.dataOut.time - Time array (seconds)
32 32
33 33 self.dataOut.fileIndex -Index of the file currently read
34 34 self.dataOut.lat - Latitude coordinate of BLTR location
35 35
36 36 self.dataOut.doy - Experiment doy (number of the day in the current year)
37 37 self.dataOut.month - Experiment month
38 38 self.dataOut.day - Experiment day
39 39 self.dataOut.year - Experiment year
40 40 '''
41 41
42 42 def __init__(self):
43 43 '''
44 44 Inputs: None
45 45 '''
46 46 ProcessingUnit.__init__(self)
47 47 self.dataOut = Parameters()
48 48
49 49 def setup(self, mode):
50 50 '''
51 51 '''
52 52 self.dataOut.mode = mode
53 53
54 54 def run(self, mode, snr_threshold=None):
55 55 '''
56 56 Inputs:
57 57 mode = High resolution (0) or Low resolution (1) data
58 58 snr_threshold = snr filter value
59 59 '''
60 60
61 61 if not self.isConfig:
62 62 self.setup(mode)
63 63 self.isConfig = True
64 64
65 65 if self.dataIn.type == 'Parameters':
66 66 self.dataOut.copy(self.dataIn)
67 67
68 68 self.dataOut.data_param = self.dataOut.data[mode]
69 69 self.dataOut.heightList = self.dataOut.height[0]
70 self.dataOut.data_SNR = self.dataOut.data_SNR[mode]
70 self.dataOut.data_snr = self.dataOut.data_snr[mode]
71 71
72 72 if snr_threshold is not None:
73 SNRavg = numpy.average(self.dataOut.data_SNR, axis=0)
73 SNRavg = numpy.average(self.dataOut.data_snr, axis=0)
74 74 SNRavgdB = 10*numpy.log10(SNRavg)
75 75 for i in range(3):
76 76 self.dataOut.data_param[i][SNRavgdB <= snr_threshold] = numpy.nan
77 77
78 78 # TODO
79 79
80 80 class OutliersFilter(Operation):
81 81
82 82 def __init__(self):
83 83 '''
84 84 '''
85 85 Operation.__init__(self)
86 86
87 87 def run(self, svalue2, method, factor, filter, npoints=9):
88 88 '''
89 89 Inputs:
90 90 svalue - string to select array velocity
91 91 svalue2 - string to choose axis filtering
92 92 method - 0 for SMOOTH or 1 for MEDIAN
93 93 factor - number used to set threshold
94 94 filter - 1 for data filtering using the standard deviation criteria else 0
95 95 npoints - number of points for mask filter
96 96 '''
97 97
98 98 print(' Outliers Filter {} {} / threshold = {}'.format(svalue, svalue, factor))
99 99
100 100
101 101 yaxis = self.dataOut.heightList
102 102 xaxis = numpy.array([[self.dataOut.utctime]])
103 103
104 104 # Zonal
105 105 value_temp = self.dataOut.data_output[0]
106 106
107 107 # Zonal
108 108 value_temp = self.dataOut.data_output[1]
109 109
110 110 # Vertical
111 111 value_temp = numpy.transpose(self.dataOut.data_output[2])
112 112
113 113 htemp = yaxis
114 114 std = value_temp
115 115 for h in range(len(htemp)):
116 116 nvalues_valid = len(numpy.where(numpy.isfinite(value_temp[h]))[0])
117 117 minvalid = npoints
118 118
119 119 #only if valid values greater than the minimum required (10%)
120 120 if nvalues_valid > minvalid:
121 121
122 122 if method == 0:
123 123 #SMOOTH
124 124 w = value_temp[h] - self.Smooth(input=value_temp[h], width=npoints, edge_truncate=1)
125 125
126 126
127 127 if method == 1:
128 128 #MEDIAN
129 129 w = value_temp[h] - self.Median(input=value_temp[h], width = npoints)
130 130
131 131 dw = numpy.std(w[numpy.where(numpy.isfinite(w))],ddof = 1)
132 132
133 133 threshold = dw*factor
134 134 value_temp[numpy.where(w > threshold),h] = numpy.nan
135 135 value_temp[numpy.where(w < -1*threshold),h] = numpy.nan
136 136
137 137
138 138 #At the end
139 139 if svalue2 == 'inHeight':
140 140 value_temp = numpy.transpose(value_temp)
141 141 output_array[:,m] = value_temp
142 142
143 143 if svalue == 'zonal':
144 144 self.dataOut.data_output[0] = output_array
145 145
146 146 elif svalue == 'meridional':
147 147 self.dataOut.data_output[1] = output_array
148 148
149 149 elif svalue == 'vertical':
150 150 self.dataOut.data_output[2] = output_array
151 151
152 152 return self.dataOut.data_output
153 153
154 154
155 155 def Median(self,input,width):
156 156 '''
157 157 Inputs:
158 158 input - Velocity array
159 159 width - Number of points for mask filter
160 160
161 161 '''
162 162
163 163 if numpy.mod(width,2) == 1:
164 164 pc = int((width - 1) / 2)
165 165 cont = 0
166 166 output = []
167 167
168 168 for i in range(len(input)):
169 169 if i >= pc and i < len(input) - pc:
170 170 new2 = input[i-pc:i+pc+1]
171 171 temp = numpy.where(numpy.isfinite(new2))
172 172 new = new2[temp]
173 173 value = numpy.median(new)
174 174 output.append(value)
175 175
176 176 output = numpy.array(output)
177 177 output = numpy.hstack((input[0:pc],output))
178 178 output = numpy.hstack((output,input[-pc:len(input)]))
179 179
180 180 return output
181 181
182 182 def Smooth(self,input,width,edge_truncate = None):
183 183 '''
184 184 Inputs:
185 185 input - Velocity array
186 186 width - Number of points for mask filter
187 187 edge_truncate - 1 for truncate the convolution product else
188 188
189 189 '''
190 190
191 191 if numpy.mod(width,2) == 0:
192 192 real_width = width + 1
193 193 nzeros = width / 2
194 194 else:
195 195 real_width = width
196 196 nzeros = (width - 1) / 2
197 197
198 198 half_width = int(real_width)/2
199 199 length = len(input)
200 200
201 201 gate = numpy.ones(real_width,dtype='float')
202 202 norm_of_gate = numpy.sum(gate)
203 203
204 204 nan_process = 0
205 205 nan_id = numpy.where(numpy.isnan(input))
206 206 if len(nan_id[0]) > 0:
207 207 nan_process = 1
208 208 pb = numpy.zeros(len(input))
209 209 pb[nan_id] = 1.
210 210 input[nan_id] = 0.
211 211
212 212 if edge_truncate == True:
213 213 output = numpy.convolve(input/norm_of_gate,gate,mode='same')
214 214 elif edge_truncate == False or edge_truncate == None:
215 215 output = numpy.convolve(input/norm_of_gate,gate,mode='valid')
216 216 output = numpy.hstack((input[0:half_width],output))
217 217 output = numpy.hstack((output,input[len(input)-half_width:len(input)]))
218 218
219 219 if nan_process:
220 220 pb = numpy.convolve(pb/norm_of_gate,gate,mode='valid')
221 221 pb = numpy.hstack((numpy.zeros(half_width),pb))
222 222 pb = numpy.hstack((pb,numpy.zeros(half_width)))
223 223 output[numpy.where(pb > 0.9999)] = numpy.nan
224 224 input[nan_id] = numpy.nan
225 225 return output
226 226
227 227 def Average(self,aver=0,nhaver=1):
228 228 '''
229 229 Inputs:
230 230 aver - Indicates the time period over which is averaged or consensus data
231 231 nhaver - Indicates the decimation factor in heights
232 232
233 233 '''
234 234 nhpoints = 48
235 235
236 236 lat_piura = -5.17
237 237 lat_huancayo = -12.04
238 238 lat_porcuya = -5.8
239 239
240 240 if '%2.2f'%self.dataOut.lat == '%2.2f'%lat_piura:
241 241 hcm = 3.
242 242 if self.dataOut.year == 2003 :
243 243 if self.dataOut.doy >= 25 and self.dataOut.doy < 64:
244 244 nhpoints = 12
245 245
246 246 elif '%2.2f'%self.dataOut.lat == '%2.2f'%lat_huancayo:
247 247 hcm = 3.
248 248 if self.dataOut.year == 2003 :
249 249 if self.dataOut.doy >= 25 and self.dataOut.doy < 64:
250 250 nhpoints = 12
251 251
252 252
253 253 elif '%2.2f'%self.dataOut.lat == '%2.2f'%lat_porcuya:
254 254 hcm = 5.#2
255 255
256 256 pdata = 0.2
257 257 taver = [1,2,3,4,6,8,12,24]
258 258 t0 = 0
259 259 tf = 24
260 260 ntime =(tf-t0)/taver[aver]
261 261 ti = numpy.arange(ntime)
262 262 tf = numpy.arange(ntime) + taver[aver]
263 263
264 264
265 265 old_height = self.dataOut.heightList
266 266
267 267 if nhaver > 1:
268 268 num_hei = len(self.dataOut.heightList)/nhaver/self.dataOut.nmodes
269 269 deltha = 0.05*nhaver
270 270 minhvalid = pdata*nhaver
271 271 for im in range(self.dataOut.nmodes):
272 272 new_height = numpy.arange(num_hei)*deltha + self.dataOut.height[im,0] + deltha/2.
273 273
274 274
275 275 data_fHeigths_List = []
276 276 data_fZonal_List = []
277 277 data_fMeridional_List = []
278 278 data_fVertical_List = []
279 279 startDTList = []
280 280
281 281
282 282 for i in range(ntime):
283 283 height = old_height
284 284
285 285 start = datetime.datetime(self.dataOut.year,self.dataOut.month,self.dataOut.day) + datetime.timedelta(hours = int(ti[i])) - datetime.timedelta(hours = 5)
286 286 stop = datetime.datetime(self.dataOut.year,self.dataOut.month,self.dataOut.day) + datetime.timedelta(hours = int(tf[i])) - datetime.timedelta(hours = 5)
287 287
288 288
289 289 limit_sec1 = time.mktime(start.timetuple())
290 290 limit_sec2 = time.mktime(stop.timetuple())
291 291
292 292 t1 = numpy.where(self.f_timesec >= limit_sec1)
293 293 t2 = numpy.where(self.f_timesec < limit_sec2)
294 294 time_select = []
295 295 for val_sec in t1[0]:
296 296 if val_sec in t2[0]:
297 297 time_select.append(val_sec)
298 298
299 299
300 300 time_select = numpy.array(time_select,dtype = 'int')
301 301 minvalid = numpy.ceil(pdata*nhpoints)
302 302
303 303 zon_aver = numpy.zeros([self.dataOut.nranges,self.dataOut.nmodes],dtype='f4') + numpy.nan
304 304 mer_aver = numpy.zeros([self.dataOut.nranges,self.dataOut.nmodes],dtype='f4') + numpy.nan
305 305 ver_aver = numpy.zeros([self.dataOut.nranges,self.dataOut.nmodes],dtype='f4') + numpy.nan
306 306
307 307 if nhaver > 1:
308 308 new_zon_aver = numpy.zeros([num_hei,self.dataOut.nmodes],dtype='f4') + numpy.nan
309 309 new_mer_aver = numpy.zeros([num_hei,self.dataOut.nmodes],dtype='f4') + numpy.nan
310 310 new_ver_aver = numpy.zeros([num_hei,self.dataOut.nmodes],dtype='f4') + numpy.nan
311 311
312 312 if len(time_select) > minvalid:
313 313 time_average = self.f_timesec[time_select]
314 314
315 315 for im in range(self.dataOut.nmodes):
316 316
317 317 for ih in range(self.dataOut.nranges):
318 318 if numpy.sum(numpy.isfinite(self.f_zon[time_select,ih,im])) >= minvalid:
319 319 zon_aver[ih,im] = numpy.nansum(self.f_zon[time_select,ih,im]) / numpy.sum(numpy.isfinite(self.f_zon[time_select,ih,im]))
320 320
321 321 if numpy.sum(numpy.isfinite(self.f_mer[time_select,ih,im])) >= minvalid:
322 322 mer_aver[ih,im] = numpy.nansum(self.f_mer[time_select,ih,im]) / numpy.sum(numpy.isfinite(self.f_mer[time_select,ih,im]))
323 323
324 324 if numpy.sum(numpy.isfinite(self.f_ver[time_select,ih,im])) >= minvalid:
325 325 ver_aver[ih,im] = numpy.nansum(self.f_ver[time_select,ih,im]) / numpy.sum(numpy.isfinite(self.f_ver[time_select,ih,im]))
326 326
327 327 if nhaver > 1:
328 328 for ih in range(num_hei):
329 329 hvalid = numpy.arange(nhaver) + nhaver*ih
330 330
331 331 if numpy.sum(numpy.isfinite(zon_aver[hvalid,im])) >= minvalid:
332 332 new_zon_aver[ih,im] = numpy.nansum(zon_aver[hvalid,im]) / numpy.sum(numpy.isfinite(zon_aver[hvalid,im]))
333 333
334 334 if numpy.sum(numpy.isfinite(mer_aver[hvalid,im])) >= minvalid:
335 335 new_mer_aver[ih,im] = numpy.nansum(mer_aver[hvalid,im]) / numpy.sum(numpy.isfinite(mer_aver[hvalid,im]))
336 336
337 337 if numpy.sum(numpy.isfinite(ver_aver[hvalid,im])) >= minvalid:
338 338 new_ver_aver[ih,im] = numpy.nansum(ver_aver[hvalid,im]) / numpy.sum(numpy.isfinite(ver_aver[hvalid,im]))
339 339 if nhaver > 1:
340 340 zon_aver = new_zon_aver
341 341 mer_aver = new_mer_aver
342 342 ver_aver = new_ver_aver
343 343 height = new_height
344 344
345 345
346 346 tstart = time_average[0]
347 347 tend = time_average[-1]
348 348 startTime = time.gmtime(tstart)
349 349
350 350 year = startTime.tm_year
351 351 month = startTime.tm_mon
352 352 day = startTime.tm_mday
353 353 hour = startTime.tm_hour
354 354 minute = startTime.tm_min
355 355 second = startTime.tm_sec
356 356
357 357 startDTList.append(datetime.datetime(year,month,day,hour,minute,second))
358 358
359 359
360 360 o_height = numpy.array([])
361 361 o_zon_aver = numpy.array([])
362 362 o_mer_aver = numpy.array([])
363 363 o_ver_aver = numpy.array([])
364 364 if self.dataOut.nmodes > 1:
365 365 for im in range(self.dataOut.nmodes):
366 366
367 367 if im == 0:
368 368 h_select = numpy.where(numpy.bitwise_and(height[0,:] >=0,height[0,:] <= hcm,numpy.isfinite(height[0,:])))
369 369 else:
370 370 h_select = numpy.where(numpy.bitwise_and(height[1,:] > hcm,height[1,:] < 20,numpy.isfinite(height[1,:])))
371 371
372 372
373 373 ht = h_select[0]
374 374
375 375 o_height = numpy.hstack((o_height,height[im,ht]))
376 376 o_zon_aver = numpy.hstack((o_zon_aver,zon_aver[ht,im]))
377 377 o_mer_aver = numpy.hstack((o_mer_aver,mer_aver[ht,im]))
378 378 o_ver_aver = numpy.hstack((o_ver_aver,ver_aver[ht,im]))
379 379
380 380 data_fHeigths_List.append(o_height)
381 381 data_fZonal_List.append(o_zon_aver)
382 382 data_fMeridional_List.append(o_mer_aver)
383 383 data_fVertical_List.append(o_ver_aver)
384 384
385 385
386 386 else:
387 387 h_select = numpy.where(numpy.bitwise_and(height[0,:] <= hcm,numpy.isfinite(height[0,:])))
388 388 ht = h_select[0]
389 389 o_height = numpy.hstack((o_height,height[im,ht]))
390 390 o_zon_aver = numpy.hstack((o_zon_aver,zon_aver[ht,im]))
391 391 o_mer_aver = numpy.hstack((o_mer_aver,mer_aver[ht,im]))
392 392 o_ver_aver = numpy.hstack((o_ver_aver,ver_aver[ht,im]))
393 393
394 394 data_fHeigths_List.append(o_height)
395 395 data_fZonal_List.append(o_zon_aver)
396 396 data_fMeridional_List.append(o_mer_aver)
397 397 data_fVertical_List.append(o_ver_aver)
398 398
399 399
400 400 return startDTList, data_fHeigths_List, data_fZonal_List, data_fMeridional_List, data_fVertical_List
401 401
402 402
@@ -1,3956 +1,3956
1 1 import numpy
2 2 import math
3 3 from scipy import optimize, interpolate, signal, stats, ndimage
4 4 import scipy
5 5 import re
6 6 import datetime
7 7 import copy
8 8 import sys
9 9 import importlib
10 10 import itertools
11 11 from multiprocessing import Pool, TimeoutError
12 12 from multiprocessing.pool import ThreadPool
13 13 import time
14 14
15 15 from scipy.optimize import fmin_l_bfgs_b #optimize with bounds on state papameters
16 16 from .jroproc_base import ProcessingUnit, Operation, MPDecorator
17 17 from schainpy.model.data.jrodata import Parameters, hildebrand_sekhon
18 18 from scipy import asarray as ar,exp
19 19 from scipy.optimize import curve_fit
20 20 from schainpy.utils import log
21 21 import warnings
22 22 from numpy import NaN
23 23 from scipy.optimize.optimize import OptimizeWarning
24 24 warnings.filterwarnings('ignore')
25 25
26 26 import matplotlib.pyplot as plt
27 27
28 28 SPEED_OF_LIGHT = 299792458
29 29
30 30
31 31 '''solving pickling issue'''
32 32
33 33 def _pickle_method(method):
34 34 func_name = method.__func__.__name__
35 35 obj = method.__self__
36 36 cls = method.__self__.__class__
37 37 return _unpickle_method, (func_name, obj, cls)
38 38
39 39 def _unpickle_method(func_name, obj, cls):
40 40 for cls in cls.mro():
41 41 try:
42 42 func = cls.__dict__[func_name]
43 43 except KeyError:
44 44 pass
45 45 else:
46 46 break
47 47 return func.__get__(obj, cls)
48 48
49 49
50 50 class ParametersProc(ProcessingUnit):
51 51
52 52 METHODS = {}
53 53 nSeconds = None
54 54
55 55 def __init__(self):
56 56 ProcessingUnit.__init__(self)
57 57
58 58 # self.objectDict = {}
59 59 self.buffer = None
60 60 self.firstdatatime = None
61 61 self.profIndex = 0
62 62 self.dataOut = Parameters()
63 63 self.setupReq = False #Agregar a todas las unidades de proc
64 64
65 65 def __updateObjFromInput(self):
66 66
67 67 self.dataOut.inputUnit = self.dataIn.type
68 68
69 69 self.dataOut.timeZone = self.dataIn.timeZone
70 70 self.dataOut.dstFlag = self.dataIn.dstFlag
71 71 self.dataOut.errorCount = self.dataIn.errorCount
72 72 self.dataOut.useLocalTime = self.dataIn.useLocalTime
73 73
74 74 self.dataOut.radarControllerHeaderObj = self.dataIn.radarControllerHeaderObj.copy()
75 75 self.dataOut.systemHeaderObj = self.dataIn.systemHeaderObj.copy()
76 76 self.dataOut.channelList = self.dataIn.channelList
77 77 self.dataOut.heightList = self.dataIn.heightList
78 78 self.dataOut.dtype = numpy.dtype([('real','<f4'),('imag','<f4')])
79 79 # self.dataOut.nHeights = self.dataIn.nHeights
80 80 # self.dataOut.nChannels = self.dataIn.nChannels
81 81 # self.dataOut.nBaud = self.dataIn.nBaud
82 82 # self.dataOut.nCode = self.dataIn.nCode
83 83 # self.dataOut.code = self.dataIn.code
84 84 # self.dataOut.nProfiles = self.dataOut.nFFTPoints
85 85 self.dataOut.flagDiscontinuousBlock = self.dataIn.flagDiscontinuousBlock
86 86 # self.dataOut.utctime = self.firstdatatime
87 87 self.dataOut.utctime = self.dataIn.utctime
88 88 self.dataOut.flagDecodeData = self.dataIn.flagDecodeData #asumo q la data esta decodificada
89 89 self.dataOut.flagDeflipData = self.dataIn.flagDeflipData #asumo q la data esta sin flip
90 90 self.dataOut.nCohInt = self.dataIn.nCohInt
91 91 # self.dataOut.nIncohInt = 1
92 92 # self.dataOut.ippSeconds = self.dataIn.ippSeconds
93 93 # self.dataOut.windowOfFilter = self.dataIn.windowOfFilter
94 94 self.dataOut.timeInterval1 = self.dataIn.timeInterval
95 95 self.dataOut.heightList = self.dataIn.heightList
96 96 self.dataOut.frequency = self.dataIn.frequency
97 97 # self.dataOut.noise = self.dataIn.noise
98 98
99 99 def run(self):
100 100
101 101
102 102
103 103 #---------------------- Voltage Data ---------------------------
104 104
105 105 if self.dataIn.type == "Voltage":
106 106
107 107 self.__updateObjFromInput()
108 108 self.dataOut.data_pre = self.dataIn.data.copy()
109 109 self.dataOut.flagNoData = False
110 110 self.dataOut.utctimeInit = self.dataIn.utctime
111 111 self.dataOut.paramInterval = self.dataIn.nProfiles*self.dataIn.nCohInt*self.dataIn.ippSeconds
112 112 if hasattr(self.dataIn, 'dataPP_POW'):
113 113 self.dataOut.dataPP_POW = self.dataIn.dataPP_POW
114 114
115 115 if hasattr(self.dataIn, 'dataPP_POWER'):
116 116 self.dataOut.dataPP_POWER = self.dataIn.dataPP_POWER
117 117
118 118 if hasattr(self.dataIn, 'dataPP_DOP'):
119 119 self.dataOut.dataPP_DOP = self.dataIn.dataPP_DOP
120 120
121 121 if hasattr(self.dataIn, 'dataPP_SNR'):
122 122 self.dataOut.dataPP_SNR = self.dataIn.dataPP_SNR
123 123
124 124 if hasattr(self.dataIn, 'dataPP_WIDTH'):
125 125 self.dataOut.dataPP_WIDTH = self.dataIn.dataPP_WIDTH
126 126 return
127 127
128 128 #---------------------- Spectra Data ---------------------------
129 129
130 130 if self.dataIn.type == "Spectra":
131 131
132 132 self.dataOut.data_pre = [self.dataIn.data_spc, self.dataIn.data_cspc]
133 133 self.dataOut.data_spc = self.dataIn.data_spc
134 134 self.dataOut.data_cspc = self.dataIn.data_cspc
135 135 self.dataOut.nProfiles = self.dataIn.nProfiles
136 136 self.dataOut.nIncohInt = self.dataIn.nIncohInt
137 137 self.dataOut.nFFTPoints = self.dataIn.nFFTPoints
138 138 self.dataOut.ippFactor = self.dataIn.ippFactor
139 139 self.dataOut.abscissaList = self.dataIn.getVelRange(1)
140 140 self.dataOut.spc_noise = self.dataIn.getNoise()
141 141 self.dataOut.spc_range = (self.dataIn.getFreqRange(1) , self.dataIn.getAcfRange(1) , self.dataIn.getVelRange(1))
142 142 # self.dataOut.normFactor = self.dataIn.normFactor
143 143 self.dataOut.pairsList = self.dataIn.pairsList
144 144 self.dataOut.groupList = self.dataIn.pairsList
145 145 self.dataOut.flagNoData = False
146 146
147 147 if hasattr(self.dataIn, 'ChanDist'): #Distances of receiver channels
148 148 self.dataOut.ChanDist = self.dataIn.ChanDist
149 149 else: self.dataOut.ChanDist = None
150 150
151 151 #if hasattr(self.dataIn, 'VelRange'): #Velocities range
152 152 # self.dataOut.VelRange = self.dataIn.VelRange
153 153 #else: self.dataOut.VelRange = None
154 154
155 155 if hasattr(self.dataIn, 'RadarConst'): #Radar Constant
156 156 self.dataOut.RadarConst = self.dataIn.RadarConst
157 157
158 158 if hasattr(self.dataIn, 'NPW'): #NPW
159 159 self.dataOut.NPW = self.dataIn.NPW
160 160
161 161 if hasattr(self.dataIn, 'COFA'): #COFA
162 162 self.dataOut.COFA = self.dataIn.COFA
163 163
164 164
165 165
166 166 #---------------------- Correlation Data ---------------------------
167 167
168 168 if self.dataIn.type == "Correlation":
169 169 acf_ind, ccf_ind, acf_pairs, ccf_pairs, data_acf, data_ccf = self.dataIn.splitFunctions()
170 170
171 171 self.dataOut.data_pre = (self.dataIn.data_cf[acf_ind,:], self.dataIn.data_cf[ccf_ind,:,:])
172 172 self.dataOut.normFactor = (self.dataIn.normFactor[acf_ind,:], self.dataIn.normFactor[ccf_ind,:])
173 173 self.dataOut.groupList = (acf_pairs, ccf_pairs)
174 174
175 175 self.dataOut.abscissaList = self.dataIn.lagRange
176 176 self.dataOut.noise = self.dataIn.noise
177 self.dataOut.data_SNR = self.dataIn.SNR
177 self.dataOut.data_snr = self.dataIn.SNR
178 178 self.dataOut.flagNoData = False
179 179 self.dataOut.nAvg = self.dataIn.nAvg
180 180
181 181 #---------------------- Parameters Data ---------------------------
182 182
183 183 if self.dataIn.type == "Parameters":
184 184 self.dataOut.copy(self.dataIn)
185 185 self.dataOut.flagNoData = False
186 186
187 187 return True
188 188
189 189 self.__updateObjFromInput()
190 190 self.dataOut.utctimeInit = self.dataIn.utctime
191 191 self.dataOut.paramInterval = self.dataIn.timeInterval
192 192
193 193 return
194 194
195 195
196 196 def target(tups):
197 197
198 198 obj, args = tups
199 199
200 200 return obj.FitGau(args)
201 201
202 202 class RemoveWideGC(Operation):
203 203 ''' This class remove the wide clutter and replace it with a simple interpolation points
204 204 This mainly applies to CLAIRE radar
205 205
206 206 ClutterWidth : Width to look for the clutter peak
207 207
208 208 Input:
209 209
210 210 self.dataOut.data_pre : SPC and CSPC
211 211 self.dataOut.spc_range : To select wind and rainfall velocities
212 212
213 213 Affected:
214 214
215 215 self.dataOut.data_pre : It is used for the new SPC and CSPC ranges of wind
216 216
217 217 Written by D. Scipión 25.02.2021
218 218 '''
219 219 def __init__(self):
220 220 Operation.__init__(self)
221 221 self.i = 0
222 222 self.ich = 0
223 223 self.ir = 0
224 224
225 225 def run(self, dataOut, ClutterWidth=2.5):
226 226
227 227 self.spc = dataOut.data_pre[0].copy()
228 228 self.spc_out = dataOut.data_pre[0].copy()
229 229 self.Num_Chn = self.spc.shape[0]
230 230 self.Num_Hei = self.spc.shape[1]
231 231 VelRange = dataOut.spc_range[2][:-1]
232 232 dv = VelRange[1]-VelRange[0]
233 233
234 234 # Find the velocities that corresponds to zero
235 235 gc_values = numpy.where(numpy.abs(VelRange) <= ClutterWidth)
236 236
237 237 # Removing novalid data from the spectra
238 238 for ich in range(self.Num_Chn):
239 239 # Estimate the noise at aech range
240 240
241 241 for ir in range(self.Num_Hei):
242 242 # Estimate the noise at aech range
243 243 HSn = hildebrand_sekhon(self.spc[ich,:,ir],dataOut.nIncohInt)
244 244 # Removing the noise floor at each range
245 245 novalid = numpy.where(self.spc[ich,:,ir] < HSn)
246 246 self.spc[novalid,ir] = HSn
247 247
248 248 junk = [HSn, numpy.transpose(self.spc[ich,gc_values,ir]), HSn]
249 249 j1index = numpy.where(numpy.diff[junk]>0)
250 250 j2index = numpy.where(numpy.diff[junk]<0)
251 251 junk3 = numpy.diff(j1index)
252 252 junk4 = numpy.diff(j2index)
253 253
254 254 valleyindex = j2index[junk4>1]
255 255 peakindex = j1index[junk3>1]
256 256
257 257 # Identify the clutter (close to zero)
258 258 isvalid = numpy.where(where.abs(VelRange[gc_values[peakindex]]) <= 2.5*dv)
259 259 # if isempty(isvalid)
260 260 # continue
261 261 # if numel(isvalid)>1
262 262 # [~,vindex]= numpy.max(spc[gc_values[peakindex[isvalid]],ir])
263 263 # isvalid = isvalid[vindex]
264 264 # clutter peak
265 265 gcpeak = peakindex[isvalid]
266 266 # left and right index of the clutter
267 267 gcvl = valleyindex[numpy.where(valleyindex < gcpeak, 1, 'last' )]
268 268 gcvr = valleyindex[numpy.where(valleyindex > gcpeak, 1)]
269 269
270 270 # Removing the clutter
271 271 interpindex = [gc_values(gcvl), gc_values(gcvr)]
272 272 gcindex = gc_values[gcvl+1:gcvr-1]
273 273 self.spc_out[ich,gcindex,ir] = numpy.interp(VelRange[gcindex],VelRange[interpindex],self.spc[ich,interpindex,ir])
274 274
275 275 dataOut.data_pre[0] = self.spc_out
276 276 return dataOut
277 277
278 278 class SpectralFilters(Operation):
279 279 ''' This class allows to replace the novalid values with noise for each channel
280 280 This applies to CLAIRE RADAR
281 281
282 282 PositiveLimit : RightLimit of novalid data
283 283 NegativeLimit : LeftLimit of novalid data
284 284
285 285 Input:
286 286
287 287 self.dataOut.data_pre : SPC and CSPC
288 288 self.dataOut.spc_range : To select wind and rainfall velocities
289 289
290 290 Affected:
291 291
292 292 self.dataOut.data_pre : It is used for the new SPC and CSPC ranges of wind
293 293
294 294 Written by D. Scipión 29.01.2021
295 295 '''
296 296 def __init__(self):
297 297 Operation.__init__(self)
298 298 self.i = 0
299 299
300 300 def run(self, dataOut, PositiveLimit=1.5, NegativeLimit=-1.5):
301 301
302 302 self.spc = dataOut.data_pre[0].copy()
303 303 self.Num_Chn = self.spc.shape[0]
304 304 VelRange = dataOut.spc_range[2]
305 305
306 306 # novalid corresponds to data within the Negative and PositiveLimit
307 307 novalid = numpy.where((VelRange[:-1] >= NegativeLimit) & (VelRange[:-1] <= PositiveLimit))
308 308
309 309 # Removing novalid data from the spectra
310 310 for i in range(self.Num_Chn):
311 311 self.spc[i,novalid,:] = dataOut.noise[i]
312 312 dataOut.data_pre[0] = self.spc
313 313 return dataOut
314 314
315 315 class GaussianFit(Operation):
316 316
317 317 '''
318 318 Function that fit of one and two generalized gaussians (gg) based
319 319 on the PSD shape across an "power band" identified from a cumsum of
320 320 the measured spectrum - noise.
321 321
322 322 Input:
323 323 self.dataOut.data_pre : SelfSpectra
324 324
325 325 Output:
326 326 self.dataOut.SPCparam : SPC_ch1, SPC_ch2
327 327
328 328 '''
329 329 def __init__(self):
330 330 Operation.__init__(self)
331 331 self.i=0
332 332
333 333
334 334 def run(self, dataOut, num_intg=7, pnoise=1., SNRlimit=-9): #num_intg: Incoherent integrations, pnoise: Noise, vel_arr: range of velocities, similar to the ftt points
335 335 """This routine will find a couple of generalized Gaussians to a power spectrum
336 336 input: spc
337 337 output:
338 338 Amplitude0,shift0,width0,p0,Amplitude1,shift1,width1,p1,noise
339 339 """
340 340
341 341 self.spc = dataOut.data_pre[0].copy()
342 342 self.Num_Hei = self.spc.shape[2]
343 343 self.Num_Bin = self.spc.shape[1]
344 344 self.Num_Chn = self.spc.shape[0]
345 345 Vrange = dataOut.abscissaList
346 346
347 347 GauSPC = numpy.empty([self.Num_Chn,self.Num_Bin,self.Num_Hei])
348 348 SPC_ch1 = numpy.empty([self.Num_Bin,self.Num_Hei])
349 349 SPC_ch2 = numpy.empty([self.Num_Bin,self.Num_Hei])
350 350 SPC_ch1[:] = numpy.NaN
351 351 SPC_ch2[:] = numpy.NaN
352 352
353 353
354 354 start_time = time.time()
355 355
356 356 noise_ = dataOut.spc_noise[0].copy()
357 357
358 358
359 359 pool = Pool(processes=self.Num_Chn)
360 360 args = [(Vrange, Ch, pnoise, noise_, num_intg, SNRlimit) for Ch in range(self.Num_Chn)]
361 361 objs = [self for __ in range(self.Num_Chn)]
362 362 attrs = list(zip(objs, args))
363 363 gauSPC = pool.map(target, attrs)
364 364 dataOut.SPCparam = numpy.asarray(SPCparam)
365 365
366 366 ''' Parameters:
367 367 1. Amplitude
368 368 2. Shift
369 369 3. Width
370 370 4. Power
371 371 '''
372 372
373 373 def FitGau(self, X):
374 374
375 375 Vrange, ch, pnoise, noise_, num_intg, SNRlimit = X
376 376
377 377 SPCparam = []
378 378 SPC_ch1 = numpy.empty([self.Num_Bin,self.Num_Hei])
379 379 SPC_ch2 = numpy.empty([self.Num_Bin,self.Num_Hei])
380 380 SPC_ch1[:] = 0 #numpy.NaN
381 381 SPC_ch2[:] = 0 #numpy.NaN
382 382
383 383
384 384
385 385 for ht in range(self.Num_Hei):
386 386
387 387
388 388 spc = numpy.asarray(self.spc)[ch,:,ht]
389 389
390 390 #############################################
391 391 # normalizing spc and noise
392 392 # This part differs from gg1
393 393 spc_norm_max = max(spc)
394 394 #spc = spc / spc_norm_max
395 395 pnoise = pnoise #/ spc_norm_max
396 396 #############################################
397 397
398 398 fatspectra=1.0
399 399
400 400 wnoise = noise_ #/ spc_norm_max
401 401 #wnoise,stdv,i_max,index =enoise(spc,num_intg) #noise estimate using Hildebrand Sekhon, only wnoise is used
402 402 #if wnoise>1.1*pnoise: # to be tested later
403 403 # wnoise=pnoise
404 404 noisebl=wnoise*0.9;
405 405 noisebh=wnoise*1.1
406 406 spc=spc-wnoise
407 407
408 408 minx=numpy.argmin(spc)
409 409 #spcs=spc.copy()
410 410 spcs=numpy.roll(spc,-minx)
411 411 cum=numpy.cumsum(spcs)
412 412 tot_noise=wnoise * self.Num_Bin #64;
413 413
414 414 snr = sum(spcs)/tot_noise
415 415 snrdB=10.*numpy.log10(snr)
416 416
417 417 if snrdB < SNRlimit :
418 418 snr = numpy.NaN
419 419 SPC_ch1[:,ht] = 0#numpy.NaN
420 420 SPC_ch1[:,ht] = 0#numpy.NaN
421 421 SPCparam = (SPC_ch1,SPC_ch2)
422 422 continue
423 423
424 424
425 425 #if snrdB<-18 or numpy.isnan(snrdB) or num_intg<4:
426 426 # return [None,]*4,[None,]*4,None,snrdB,None,None,[None,]*5,[None,]*9,None
427 427
428 428 cummax=max(cum);
429 429 epsi=0.08*fatspectra # cumsum to narrow down the energy region
430 430 cumlo=cummax*epsi;
431 431 cumhi=cummax*(1-epsi)
432 432 powerindex=numpy.array(numpy.where(numpy.logical_and(cum>cumlo, cum<cumhi))[0])
433 433
434 434
435 435 if len(powerindex) < 1:# case for powerindex 0
436 436 continue
437 437 powerlo=powerindex[0]
438 438 powerhi=powerindex[-1]
439 439 powerwidth=powerhi-powerlo
440 440
441 441 firstpeak=powerlo+powerwidth/10.# first gaussian energy location
442 442 secondpeak=powerhi-powerwidth/10.#second gaussian energy location
443 443 midpeak=(firstpeak+secondpeak)/2.
444 444 firstamp=spcs[int(firstpeak)]
445 445 secondamp=spcs[int(secondpeak)]
446 446 midamp=spcs[int(midpeak)]
447 447
448 448 x=numpy.arange( self.Num_Bin )
449 449 y_data=spc+wnoise
450 450
451 451 ''' single Gaussian '''
452 452 shift0=numpy.mod(midpeak+minx, self.Num_Bin )
453 453 width0=powerwidth/4.#Initialization entire power of spectrum divided by 4
454 454 power0=2.
455 455 amplitude0=midamp
456 456 state0=[shift0,width0,amplitude0,power0,wnoise]
457 457 bnds=(( 0,(self.Num_Bin-1) ),(1,powerwidth),(0,None),(0.5,3.),(noisebl,noisebh))
458 458 lsq1=fmin_l_bfgs_b(self.misfit1,state0,args=(y_data,x,num_intg),bounds=bnds,approx_grad=True)
459 459
460 460 chiSq1=lsq1[1];
461 461
462 462
463 463 if fatspectra<1.0 and powerwidth<4:
464 464 choice=0
465 465 Amplitude0=lsq1[0][2]
466 466 shift0=lsq1[0][0]
467 467 width0=lsq1[0][1]
468 468 p0=lsq1[0][3]
469 469 Amplitude1=0.
470 470 shift1=0.
471 471 width1=0.
472 472 p1=0.
473 473 noise=lsq1[0][4]
474 474 #return (numpy.array([shift0,width0,Amplitude0,p0]),
475 475 # numpy.array([shift1,width1,Amplitude1,p1]),noise,snrdB,chiSq1,6.,sigmas1,[None,]*9,choice)
476 476
477 477 ''' two gaussians '''
478 478 #shift0=numpy.mod(firstpeak+minx,64); shift1=numpy.mod(secondpeak+minx,64)
479 479 shift0=numpy.mod(firstpeak+minx, self.Num_Bin );
480 480 shift1=numpy.mod(secondpeak+minx, self.Num_Bin )
481 481 width0=powerwidth/6.;
482 482 width1=width0
483 483 power0=2.;
484 484 power1=power0
485 485 amplitude0=firstamp;
486 486 amplitude1=secondamp
487 487 state0=[shift0,width0,amplitude0,power0,shift1,width1,amplitude1,power1,wnoise]
488 488 #bnds=((0,63),(1,powerwidth/2.),(0,None),(0.5,3.),(0,63),(1,powerwidth/2.),(0,None),(0.5,3.),(noisebl,noisebh))
489 489 bnds=(( 0,(self.Num_Bin-1) ),(1,powerwidth/2.),(0,None),(0.5,3.),( 0,(self.Num_Bin-1)),(1,powerwidth/2.),(0,None),(0.5,3.),(noisebl,noisebh))
490 490 #bnds=(( 0,(self.Num_Bin-1) ),(1,powerwidth/2.),(0,None),(0.5,3.),( 0,(self.Num_Bin-1)),(1,powerwidth/2.),(0,None),(0.5,3.),(0.1,0.5))
491 491
492 492 lsq2 = fmin_l_bfgs_b( self.misfit2 , state0 , args=(y_data,x,num_intg) , bounds=bnds , approx_grad=True )
493 493
494 494
495 495 chiSq2=lsq2[1];
496 496
497 497
498 498
499 499 oneG=(chiSq1<5 and chiSq1/chiSq2<2.0) and (abs(lsq2[0][0]-lsq2[0][4])<(lsq2[0][1]+lsq2[0][5])/3. or abs(lsq2[0][0]-lsq2[0][4])<10)
500 500
501 501 if snrdB>-12: # when SNR is strong pick the peak with least shift (LOS velocity) error
502 502 if oneG:
503 503 choice=0
504 504 else:
505 505 w1=lsq2[0][1]; w2=lsq2[0][5]
506 506 a1=lsq2[0][2]; a2=lsq2[0][6]
507 507 p1=lsq2[0][3]; p2=lsq2[0][7]
508 508 s1=(2**(1+1./p1))*scipy.special.gamma(1./p1)/p1;
509 509 s2=(2**(1+1./p2))*scipy.special.gamma(1./p2)/p2;
510 510 gp1=a1*w1*s1; gp2=a2*w2*s2 # power content of each ggaussian with proper p scaling
511 511
512 512 if gp1>gp2:
513 513 if a1>0.7*a2:
514 514 choice=1
515 515 else:
516 516 choice=2
517 517 elif gp2>gp1:
518 518 if a2>0.7*a1:
519 519 choice=2
520 520 else:
521 521 choice=1
522 522 else:
523 523 choice=numpy.argmax([a1,a2])+1
524 524 #else:
525 525 #choice=argmin([std2a,std2b])+1
526 526
527 527 else: # with low SNR go to the most energetic peak
528 528 choice=numpy.argmax([lsq1[0][2]*lsq1[0][1],lsq2[0][2]*lsq2[0][1],lsq2[0][6]*lsq2[0][5]])
529 529
530 530
531 531 shift0=lsq2[0][0];
532 532 vel0=Vrange[0] + shift0*(Vrange[1]-Vrange[0])
533 533 shift1=lsq2[0][4];
534 534 vel1=Vrange[0] + shift1*(Vrange[1]-Vrange[0])
535 535
536 536 max_vel = 1.0
537 537
538 538 #first peak will be 0, second peak will be 1
539 539 if vel0 > -1.0 and vel0 < max_vel : #first peak is in the correct range
540 540 shift0=lsq2[0][0]
541 541 width0=lsq2[0][1]
542 542 Amplitude0=lsq2[0][2]
543 543 p0=lsq2[0][3]
544 544
545 545 shift1=lsq2[0][4]
546 546 width1=lsq2[0][5]
547 547 Amplitude1=lsq2[0][6]
548 548 p1=lsq2[0][7]
549 549 noise=lsq2[0][8]
550 550 else:
551 551 shift1=lsq2[0][0]
552 552 width1=lsq2[0][1]
553 553 Amplitude1=lsq2[0][2]
554 554 p1=lsq2[0][3]
555 555
556 556 shift0=lsq2[0][4]
557 557 width0=lsq2[0][5]
558 558 Amplitude0=lsq2[0][6]
559 559 p0=lsq2[0][7]
560 560 noise=lsq2[0][8]
561 561
562 562 if Amplitude0<0.05: # in case the peak is noise
563 563 shift0,width0,Amplitude0,p0 = [0,0,0,0]#4*[numpy.NaN]
564 564 if Amplitude1<0.05:
565 565 shift1,width1,Amplitude1,p1 = [0,0,0,0]#4*[numpy.NaN]
566 566
567 567
568 568 SPC_ch1[:,ht] = noise + Amplitude0*numpy.exp(-0.5*(abs(x-shift0))/width0)**p0
569 569 SPC_ch2[:,ht] = noise + Amplitude1*numpy.exp(-0.5*(abs(x-shift1))/width1)**p1
570 570 SPCparam = (SPC_ch1,SPC_ch2)
571 571
572 572
573 573 return GauSPC
574 574
575 575 def y_model1(self,x,state):
576 576 shift0,width0,amplitude0,power0,noise=state
577 577 model0=amplitude0*numpy.exp(-0.5*abs((x-shift0)/width0)**power0)
578 578
579 579 model0u=amplitude0*numpy.exp(-0.5*abs((x-shift0- self.Num_Bin )/width0)**power0)
580 580
581 581 model0d=amplitude0*numpy.exp(-0.5*abs((x-shift0+ self.Num_Bin )/width0)**power0)
582 582 return model0+model0u+model0d+noise
583 583
584 584 def y_model2(self,x,state): #Equation for two generalized Gaussians with Nyquist
585 585 shift0,width0,amplitude0,power0,shift1,width1,amplitude1,power1,noise=state
586 586 model0=amplitude0*numpy.exp(-0.5*abs((x-shift0)/width0)**power0)
587 587
588 588 model0u=amplitude0*numpy.exp(-0.5*abs((x-shift0- self.Num_Bin )/width0)**power0)
589 589
590 590 model0d=amplitude0*numpy.exp(-0.5*abs((x-shift0+ self.Num_Bin )/width0)**power0)
591 591 model1=amplitude1*numpy.exp(-0.5*abs((x-shift1)/width1)**power1)
592 592
593 593 model1u=amplitude1*numpy.exp(-0.5*abs((x-shift1- self.Num_Bin )/width1)**power1)
594 594
595 595 model1d=amplitude1*numpy.exp(-0.5*abs((x-shift1+ self.Num_Bin )/width1)**power1)
596 596 return model0+model0u+model0d+model1+model1u+model1d+noise
597 597
598 598 def misfit1(self,state,y_data,x,num_intg): # This function compares how close real data is with the model data, the close it is, the better it is.
599 599
600 600 return num_intg*sum((numpy.log(y_data)-numpy.log(self.y_model1(x,state)))**2)#/(64-5.) # /(64-5.) can be commented
601 601
602 602 def misfit2(self,state,y_data,x,num_intg):
603 603 return num_intg*sum((numpy.log(y_data)-numpy.log(self.y_model2(x,state)))**2)#/(64-9.)
604 604
605 605
606 606
607 607 class PrecipitationProc(Operation):
608 608
609 609 '''
610 610 Operator that estimates Reflectivity factor (Z), and estimates rainfall Rate (R)
611 611
612 612 Input:
613 613 self.dataOut.data_pre : SelfSpectra
614 614
615 615 Output:
616 616
617 617 self.dataOut.data_output : Reflectivity factor, rainfall Rate
618 618
619 619
620 620 Parameters affected:
621 621 '''
622 622
623 623 def __init__(self):
624 624 Operation.__init__(self)
625 625 self.i=0
626 626
627 627 def run(self, dataOut, radar=None, Pt=5000, Gt=295.1209, Gr=70.7945, Lambda=0.6741, aL=2.5118,
628 628 tauW=4e-06, ThetaT=0.1656317, ThetaR=0.36774087, Km2 = 0.93, Altitude=3350):
629 629
630 630 if radar == "MIRA35C" :
631 631
632 632 self.spc = dataOut.data_pre[0].copy()
633 633 self.Num_Hei = self.spc.shape[2]
634 634 self.Num_Bin = self.spc.shape[1]
635 635 self.Num_Chn = self.spc.shape[0]
636 636 Ze = self.dBZeMODE2(dataOut)
637 637
638 638 else:
639 639
640 640 self.spc = dataOut.data_pre[0].copy()
641 641
642 642 #NOTA SE DEBE REMOVER EL RANGO DEL PULSO TX
643 643 self.spc[:,:,0:7]= numpy.NaN
644 644
645 645 self.Num_Hei = self.spc.shape[2]
646 646 self.Num_Bin = self.spc.shape[1]
647 647 self.Num_Chn = self.spc.shape[0]
648 648
649 649 VelRange = dataOut.spc_range[2]
650 650
651 651 ''' Se obtiene la constante del RADAR '''
652 652
653 653 self.Pt = Pt
654 654 self.Gt = Gt
655 655 self.Gr = Gr
656 656 self.Lambda = Lambda
657 657 self.aL = aL
658 658 self.tauW = tauW
659 659 self.ThetaT = ThetaT
660 660 self.ThetaR = ThetaR
661 661 self.GSys = 10**(36.63/10) # Ganancia de los LNA 36.63 dB
662 662 self.lt = 10**(1.67/10) # Perdida en cables Tx 1.67 dB
663 663 self.lr = 10**(5.73/10) # Perdida en cables Rx 5.73 dB
664 664
665 665 Numerator = ( (4*numpy.pi)**3 * aL**2 * 16 * numpy.log(2) )
666 666 Denominator = ( Pt * Gt * Gr * Lambda**2 * SPEED_OF_LIGHT * tauW * numpy.pi * ThetaT * ThetaR)
667 667 RadarConstant = 10e-26 * Numerator / Denominator #
668 668 ExpConstant = 10**(40/10) #Constante Experimental
669 669
670 670 SignalPower = numpy.zeros([self.Num_Chn,self.Num_Bin,self.Num_Hei])
671 671 for i in range(self.Num_Chn):
672 672 SignalPower[i,:,:] = self.spc[i,:,:] - dataOut.noise[i]
673 673 SignalPower[numpy.where(SignalPower < 0)] = 1e-20
674 674
675 675 SPCmean = numpy.mean(SignalPower, 0)
676 676
677 677 Pr = SPCmean[:,:]/dataOut.normFactor
678 678
679 679 # Declaring auxiliary variables
680 680 Range = dataOut.heightList*1000. #Range in m
681 681 # replicate the heightlist to obtain a matrix [Num_Bin,Num_Hei]
682 682 rMtrx = numpy.transpose(numpy.transpose([dataOut.heightList*1000.] * self.Num_Bin))
683 683 zMtrx = rMtrx+Altitude
684 684 # replicate the VelRange to obtain a matrix [Num_Bin,Num_Hei]
685 685 VelMtrx = numpy.transpose(numpy.tile(VelRange[:-1], (self.Num_Hei,1)))
686 686
687 687 # height dependence to air density Foote and Du Toit (1969)
688 688 delv_z = 1 + 3.68e-5 * zMtrx + 1.71e-9 * zMtrx**2
689 689 VMtrx = VelMtrx / delv_z #Normalized velocity
690 690 VMtrx[numpy.where(VMtrx> 9.6)] = numpy.NaN
691 691 # Diameter is related to the fall speed of falling drops
692 692 D_Vz = -1.667 * numpy.log( 0.9369 - 0.097087 * VMtrx ) # D in [mm]
693 693 # Only valid for D>= 0.16 mm
694 694 D_Vz[numpy.where(D_Vz < 0.16)] = numpy.NaN
695 695
696 696 #Calculate Radar Reflectivity ETAn
697 697 ETAn = (RadarConstant *ExpConstant) * Pr * rMtrx**2 #Reflectivity (ETA)
698 698 ETAd = ETAn * 6.18 * exp( -0.6 * D_Vz ) * delv_z
699 699 # Radar Cross Section
700 700 sigmaD = Km2 * (D_Vz * 1e-3 )**6 * numpy.pi**5 / Lambda**4
701 701 # Drop Size Distribution
702 702 DSD = ETAn / sigmaD
703 703 # Equivalente Reflectivy
704 704 Ze_eqn = numpy.nansum( DSD * D_Vz**6 ,axis=0)
705 705 Ze_org = numpy.nansum(ETAn * Lambda**4, axis=0) / (1e-18*numpy.pi**5 * Km2) # [mm^6 /m^3]
706 706 # RainFall Rate
707 707 RR = 0.0006*numpy.pi * numpy.nansum( D_Vz**3 * DSD * VelMtrx ,0) #mm/hr
708 708
709 709 # Censoring the data
710 710 # Removing data with SNRth < 0dB se debe considerar el SNR por canal
711 711 SNRth = 10**(-30/10) #-20dB
712 712 novalid = numpy.where((dataOut.data_SNR[0,:] <SNRth) | (dataOut.data_SNR[1,:] <SNRth) | (dataOut.data_SNR[2,:] <SNRth)) # AND condition. Maybe OR condition better
713 713 W = numpy.nanmean(dataOut.data_DOP,0)
714 714 W[novalid] = numpy.NaN
715 715 Ze_org[novalid] = numpy.NaN
716 716 RR[novalid] = numpy.NaN
717 717
718 718 dataOut.data_output = RR[8]
719 719 dataOut.data_param = numpy.ones([3,self.Num_Hei])
720 720 dataOut.channelList = [0,1,2]
721 721
722 722 dataOut.data_param[0]=10*numpy.log10(Ze_org)
723 723 dataOut.data_param[1]=W
724 724 dataOut.data_param[2]=RR
725 725 return dataOut
726 726
727 727 def dBZeMODE2(self, dataOut): # Processing for MIRA35C
728 728
729 729 NPW = dataOut.NPW
730 730 COFA = dataOut.COFA
731 731
732 732 SNR = numpy.array([self.spc[0,:,:] / NPW[0]]) #, self.spc[1,:,:] / NPW[1]])
733 733 RadarConst = dataOut.RadarConst
734 734 #frequency = 34.85*10**9
735 735
736 736 ETA = numpy.zeros(([self.Num_Chn ,self.Num_Hei]))
737 737 data_output = numpy.ones([self.Num_Chn , self.Num_Hei])*numpy.NaN
738 738
739 739 ETA = numpy.sum(SNR,1)
740 740
741 741 ETA = numpy.where(ETA != 0. , ETA, numpy.NaN)
742 742
743 743 Ze = numpy.ones([self.Num_Chn, self.Num_Hei] )
744 744
745 745 for r in range(self.Num_Hei):
746 746
747 747 Ze[0,r] = ( ETA[0,r] ) * COFA[0,r][0] * RadarConst * ((r/5000.)**2)
748 748 #Ze[1,r] = ( ETA[1,r] ) * COFA[1,r][0] * RadarConst * ((r/5000.)**2)
749 749
750 750 return Ze
751 751
752 752 # def GetRadarConstant(self):
753 753 #
754 754 # """
755 755 # Constants:
756 756 #
757 757 # Pt: Transmission Power dB 5kW 5000
758 758 # Gt: Transmission Gain dB 24.7 dB 295.1209
759 759 # Gr: Reception Gain dB 18.5 dB 70.7945
760 760 # Lambda: Wavelenght m 0.6741 m 0.6741
761 761 # aL: Attenuation loses dB 4dB 2.5118
762 762 # tauW: Width of transmission pulse s 4us 4e-6
763 763 # ThetaT: Transmission antenna bean angle rad 0.1656317 rad 0.1656317
764 764 # ThetaR: Reception antenna beam angle rad 0.36774087 rad 0.36774087
765 765 #
766 766 # """
767 767 #
768 768 # Numerator = ( (4*numpy.pi)**3 * aL**2 * 16 * numpy.log(2) )
769 769 # Denominator = ( Pt * Gt * Gr * Lambda**2 * SPEED_OF_LIGHT * TauW * numpy.pi * ThetaT * TheraR)
770 770 # RadarConstant = Numerator / Denominator
771 771 #
772 772 # return RadarConstant
773 773
774 774
775 775
776 776 class FullSpectralAnalysis(Operation):
777 777
778 778 """
779 779 Function that implements Full Spectral Analysis technique.
780 780
781 781 Input:
782 782 self.dataOut.data_pre : SelfSpectra and CrossSpectra data
783 783 self.dataOut.groupList : Pairlist of channels
784 784 self.dataOut.ChanDist : Physical distance between receivers
785 785
786 786
787 787 Output:
788 788
789 789 self.dataOut.data_output : Zonal wind, Meridional wind, and Vertical wind
790 790
791 791
792 792 Parameters affected: Winds, height range, SNR
793 793
794 794 """
795 795 def run(self, dataOut, Xi01=None, Xi02=None, Xi12=None, Eta01=None, Eta02=None, Eta12=None, SNRlimit=7, minheight=None, maxheight=None):
796 796
797 797 self.indice=int(numpy.random.rand()*1000)
798 798
799 799 spc = dataOut.data_pre[0].copy()
800 800 cspc = dataOut.data_pre[1]
801 801
802 802 """Erick: NOTE THE RANGE OF THE PULSE TX MUST BE REMOVED"""
803 803
804 804 SNRspc = spc.copy()
805 805 SNRspc[:,:,0:7]= numpy.NaN # D. Scipión... the cleaning should not be hardwired in the code... it needs to be flexible... NEEDS TO BE REMOVED
806 806
807 807 """##########################################"""
808 808
809 809
810 810 nChannel = spc.shape[0]
811 811 nProfiles = spc.shape[1]
812 812 nHeights = spc.shape[2]
813 813
814 814 # first_height = 0.75 #km (ref: data header 20170822)
815 815 # resolution_height = 0.075 #km
816 816 '''
817 817 finding height range. check this when radar parameters are changed!
818 818 '''
819 819 if maxheight is not None:
820 820 # range_max = math.ceil((maxheight - first_height) / resolution_height) # theoretical
821 821 range_max = math.ceil(13.26 * maxheight - 3) # empirical, works better
822 822 else:
823 823 range_max = nHeights
824 824 if minheight is not None:
825 825 # range_min = int((minheight - first_height) / resolution_height) # theoretical
826 826 range_min = int(13.26 * minheight - 5) # empirical, works better
827 827 if range_min < 0:
828 828 range_min = 0
829 829 else:
830 830 range_min = 0
831 831
832 832 pairsList = dataOut.groupList
833 833 if dataOut.ChanDist is not None :
834 834 ChanDist = dataOut.ChanDist
835 835 else:
836 836 ChanDist = numpy.array([[Xi01, Eta01],[Xi02,Eta02],[Xi12,Eta12]])
837 837
838 838 FrecRange = dataOut.spc_range[0]
839 839
840 840 data_SNR=numpy.zeros([nProfiles])
841 841 noise = dataOut.noise
842 842
843 dataOut.data_SNR = (numpy.mean(SNRspc,axis=1)- noise[0]) / noise[0]
843 dataOut.data_snr = (numpy.mean(SNRspc,axis=1)- noise[0]) / noise[0]
844 844
845 dataOut.data_SNR[numpy.where( dataOut.data_SNR <0 )] = 1e-20
845 dataOut.data_snr[numpy.where( dataOut.data_snr <0 )] = 1e-20
846 846
847 847
848 848 data_output=numpy.ones([spc.shape[0],spc.shape[2]])*numpy.NaN
849 849
850 850 velocityX=[]
851 851 velocityY=[]
852 852 velocityV=[]
853 853
854 dbSNR = 10*numpy.log10(dataOut.data_SNR)
854 dbSNR = 10*numpy.log10(dataOut.data_snr)
855 855 dbSNR = numpy.average(dbSNR,0)
856 856
857 857 '''***********************************************WIND ESTIMATION**************************************'''
858 858
859 859 for Height in range(nHeights):
860 860
861 861 if Height >= range_min and Height < range_max:
862 862 # error_code unused, yet maybe useful for future analysis.
863 863 [Vzon,Vmer,Vver, error_code] = self.WindEstimation(spc[:,:,Height], cspc[:,:,Height], pairsList, ChanDist, Height, noise, dataOut.spc_range, dbSNR[Height], SNRlimit)
864 864 else:
865 865 Vzon,Vmer,Vver = 0., 0., numpy.NaN
866 866
867 867
868 868 if abs(Vzon) < 100. and abs(Vzon) > 0. and abs(Vmer) < 100. and abs(Vmer) > 0.:
869 869 velocityX=numpy.append(velocityX, Vzon)
870 870 velocityY=numpy.append(velocityY, -Vmer)
871 871
872 872 else:
873 873 velocityX=numpy.append(velocityX, numpy.NaN)
874 874 velocityY=numpy.append(velocityY, numpy.NaN)
875 875
876 876 if dbSNR[Height] > SNRlimit:
877 877 velocityV=numpy.append(velocityV, -Vver) # reason for this minus sign -> convention? (taken from Ericks version) D.S. yes!
878 878 else:
879 879 velocityV=numpy.append(velocityV, numpy.NaN)
880 880
881 881
882 882 '''Change the numpy.array (velocityX) sign when trying to process BLTR data (Erick)'''
883 883 data_output[0] = numpy.array(velocityX)
884 884 data_output[1] = numpy.array(velocityY)
885 885 data_output[2] = velocityV
886 886
887 887
888 888 dataOut.data_output = data_output
889 889
890 890 return dataOut
891 891
892 892
893 893 def moving_average(self,x, N=2):
894 894 """ convolution for smoothenig data. note that last N-1 values are convolution with zeroes """
895 895 return numpy.convolve(x, numpy.ones((N,))/N)[(N-1):]
896 896
897 897 def gaus(self,xSamples,Amp,Mu,Sigma):
898 898 return ( Amp / ((2*numpy.pi)**0.5 * Sigma) ) * numpy.exp( -( xSamples - Mu )**2 / ( 2 * (Sigma**2) ))
899 899
900 900 def Moments(self, ySamples, xSamples):
901 901 Power = numpy.nanmean(ySamples) # Power, 0th Moment
902 902 yNorm = ySamples / numpy.nansum(ySamples)
903 903 RadVel = numpy.nansum(xSamples * yNorm) # Radial Velocity, 1st Moment
904 904 Sigma2 = abs(numpy.nansum(yNorm * (xSamples - RadVel)**2)) # Spectral Width, 2nd Moment
905 905 StdDev = Sigma2**0.5 # Desv. Estandar, Ancho espectral
906 906 return numpy.array([Power,RadVel,StdDev])
907 907
908 908 def StopWindEstimation(self, error_code):
909 909 '''
910 910 the wind calculation and returns zeros
911 911 '''
912 912 Vzon = 0
913 913 Vmer = 0
914 914 Vver = numpy.nan
915 915 return Vzon, Vmer, Vver, error_code
916 916
917 917 def AntiAliasing(self, interval, maxstep):
918 918 """
919 919 function to prevent errors from aliased values when computing phaseslope
920 920 """
921 921 antialiased = numpy.zeros(len(interval))*0.0
922 922 copyinterval = interval.copy()
923 923
924 924 antialiased[0] = copyinterval[0]
925 925
926 926 for i in range(1,len(antialiased)):
927 927
928 928 step = interval[i] - interval[i-1]
929 929
930 930 if step > maxstep:
931 931 copyinterval -= 2*numpy.pi
932 932 antialiased[i] = copyinterval[i]
933 933
934 934 elif step < maxstep*(-1):
935 935 copyinterval += 2*numpy.pi
936 936 antialiased[i] = copyinterval[i]
937 937
938 938 else:
939 939 antialiased[i] = copyinterval[i].copy()
940 940
941 941 return antialiased
942 942
943 943 def WindEstimation(self, spc, cspc, pairsList, ChanDist, Height, noise, AbbsisaRange, dbSNR, SNRlimit):
944 944 """
945 945 Function that Calculates Zonal, Meridional and Vertical wind velocities.
946 946 Initial Version by E. Bocanegra updated by J. Zibell until Nov. 2019.
947 947
948 948 Input:
949 949 spc, cspc : self spectra and cross spectra data. In Briggs notation something like S_i*(S_i)_conj, (S_j)_conj respectively.
950 950 pairsList : Pairlist of channels
951 951 ChanDist : array of xi_ij and eta_ij
952 952 Height : height at which data is processed
953 953 noise : noise in [channels] format for specific height
954 954 Abbsisarange : range of the frequencies or velocities
955 955 dbSNR, SNRlimit : signal to noise ratio in db, lower limit
956 956
957 957 Output:
958 958 Vzon, Vmer, Vver : wind velocities
959 959 error_code : int that states where code is terminated
960 960
961 961 0 : no error detected
962 962 1 : Gaussian of mean spc exceeds widthlimit
963 963 2 : no Gaussian of mean spc found
964 964 3 : SNR to low or velocity to high -> prec. e.g.
965 965 4 : at least one Gaussian of cspc exceeds widthlimit
966 966 5 : zero out of three cspc Gaussian fits converged
967 967 6 : phase slope fit could not be found
968 968 7 : arrays used to fit phase have different length
969 969 8 : frequency range is either too short (len <= 5) or very long (> 30% of cspc)
970 970
971 971 """
972 972
973 973 error_code = 0
974 974
975 975
976 976 SPC_Samples = numpy.ones([spc.shape[0],spc.shape[1]]) # for normalized spc values for one height
977 977 phase = numpy.ones([spc.shape[0],spc.shape[1]]) # phase between channels
978 978 CSPC_Samples = numpy.ones([spc.shape[0],spc.shape[1]],dtype=numpy.complex_) # for normalized cspc values
979 979 PhaseSlope = numpy.zeros(spc.shape[0]) # slope of the phases, channelwise
980 980 PhaseInter = numpy.ones(spc.shape[0]) # intercept to the slope of the phases, channelwise
981 981 xFrec = AbbsisaRange[0][0:spc.shape[1]] # frequency range
982 982 xVel = AbbsisaRange[2][0:spc.shape[1]] # velocity range
983 983 SPCav = numpy.average(spc, axis=0)-numpy.average(noise) # spc[0]-noise[0] %D.S. why??? I suggest only spc....
984 984
985 985 SPCmoments_vel = self.Moments(SPCav, xVel ) # SPCmoments_vel[1] corresponds to vertical velocity and is used to determine if signal corresponds to wind (if .. <3)
986 986 # D.S. I suggest to each moment to be calculated independently, because the signal level y/o interferences are not the same in all channels and
987 987 # signal or SNR seems to be contaminated
988 988 CSPCmoments = []
989 989
990 990 '''Getting Eij and Nij'''
991 991
992 992 Xi01, Xi02, Xi12 = ChanDist[:,0]
993 993 Eta01, Eta02, Eta12 = ChanDist[:,1]
994 994
995 995 # update nov 19
996 996 widthlimit = 7 # maximum width in Hz of the gaussian, empirically determined. Anything above 10 is unrealistic, often values between 1 and 5 correspond to proper fits.
997 997
998 998 '''************************* SPC is normalized ********************************'''
999 999
1000 1000 spc_norm = spc.copy() # need copy() because untouched spc is needed for normalization of cspc below
1001 1001 spc_norm = numpy.where(numpy.isfinite(spc_norm), spc_norm, numpy.NAN)
1002 1002
1003 1003 # D. Scipión: It is necessary to define DeltaF or DeltaV... it is wrong to use Factor_Norm. It's constant... not a variable
1004 1004
1005 1005 # For each channel
1006 1006 for i in range(spc.shape[0]):
1007 1007
1008 1008 spc_sub = spc_norm[i,:] - noise[i] # spc not smoothed here or in previous version.
1009 1009 # D. Scipión: Factor_Norm has to be replaced by DeltaF or DeltaV - It's a constant
1010 1010 Factor_Norm = 2*numpy.max(xFrec) / numpy.count_nonzero(~numpy.isnan(spc_sub)) # usually = Freq range / nfft
1011 1011 normalized_spc = spc_sub / (numpy.nansum(numpy.abs(spc_sub)) * Factor_Norm)
1012 1012
1013 1013 xSamples = xFrec # the frequency range is taken
1014 1014 SPC_Samples[i] = normalized_spc # Normalized SPC values are taken
1015 1015
1016 1016 '''********************** FITTING MEAN SPC GAUSSIAN **********************'''
1017 1017
1018 1018 """ the gaussian of the mean: first subtract noise, then normalize. this is legal because
1019 1019 you only fit the curve and don't need the absolute value of height for calculation,
1020 1020 only for estimation of width. for normalization of cross spectra, you need initial,
1021 1021 unnormalized self-spectra With noise.
1022 1022
1023 1023 Technically, you don't even need to normalize the self-spectra, as you only need the
1024 1024 width of the peak. However, it was left this way. Note that the normalization has a flaw:
1025 1025 due to subtraction of the noise, some values are below zero. Raw "spc" values should be
1026 1026 >= 0, as it is the modulus squared of the signals (complex * it's conjugate)
1027 1027 """
1028 1028
1029 1029 SPCMean = numpy.average(SPC_Samples, axis=0)
1030 1030
1031 1031 popt = [1e-10,0,1e-10]
1032 1032 SPCMoments = self.Moments(SPCMean, xSamples)
1033 1033
1034 1034 if dbSNR > SNRlimit and numpy.abs(SPCmoments_vel[1]) < 3:
1035 1035 try:
1036 1036 popt,pcov = curve_fit(self.gaus,xSamples,SPCMean,p0=SPCMoments)#, bounds=(-numpy.inf, [numpy.inf, numpy.inf, 10])). Setting bounds does not make the code faster but only keeps the fit from finding the minimum.
1037 1037 if popt[2] > widthlimit: # CONDITION
1038 1038 return self.StopWindEstimation(error_code = 1)
1039 1039
1040 1040 FitGauss = self.gaus(xSamples,*popt)
1041 1041
1042 1042 except :#RuntimeError:
1043 1043 return self.StopWindEstimation(error_code = 2)
1044 1044
1045 1045 else:
1046 1046 return self.StopWindEstimation(error_code = 3)
1047 1047
1048 1048
1049 1049
1050 1050 '''***************************** CSPC Normalization *************************
1051 1051 new section:
1052 1052 The Spc spectra are used to normalize the crossspectra. Peaks from precipitation
1053 1053 influence the norm which is not desired. First, a range is identified where the
1054 1054 wind peak is estimated -> sum_wind is sum of those frequencies. Next, the area
1055 1055 around it gets cut off and values replaced by mean determined by the boundary
1056 1056 data -> sum_noise (spc is not normalized here, thats why the noise is important)
1057 1057
1058 1058 The sums are then added and multiplied by range/datapoints, because you need
1059 1059 an integral and not a sum for normalization.
1060 1060
1061 1061 A norm is found according to Briggs 92.
1062 1062 '''
1063 1063
1064 1064 radarWavelength = 0.6741 # meters
1065 1065 # D.S. This does not need to hardwired... It has to be in function of the radar frequency
1066 1066
1067 1067 count_limit_freq = numpy.abs(popt[1]) + widthlimit # Hz, m/s can be also used if velocity is desired abscissa.
1068 1068 # count_limit_freq = numpy.max(xFrec)
1069 1069
1070 1070 channel_integrals = numpy.zeros(3)
1071 1071
1072 1072 for i in range(spc.shape[0]):
1073 1073 '''
1074 1074 find the point in array corresponding to count_limit frequency.
1075 1075 sum over all frequencies in the range around zero Hz @ math.ceil(N_freq/2)
1076 1076 '''
1077 1077 N_freq = numpy.count_nonzero(~numpy.isnan(spc[i,:]))
1078 1078 count_limit_int = int(math.ceil( count_limit_freq / numpy.max(xFrec) * (N_freq / 2) )) # gives integer point
1079 1079 sum_wind = numpy.nansum( spc[i, (math.ceil(N_freq/2) - count_limit_int) : (math.ceil(N_freq / 2) + count_limit_int)] ) #N_freq/2 is where frequency (velocity) is zero, i.e. middle of spectrum.
1080 1080 sum_noise = (numpy.mean(spc[i, :4]) + numpy.mean(spc[i, -6:-2]))/2.0 * (N_freq - 2*count_limit_int)
1081 1081 channel_integrals[i] = (sum_noise + sum_wind) * (2*numpy.max(xFrec) / N_freq)
1082 1082
1083 1083
1084 1084 cross_integrals_peak = numpy.zeros(3)
1085 1085 # cross_integrals_totalrange = numpy.zeros(3)
1086 1086
1087 1087 for i in range(spc.shape[0]):
1088 1088
1089 1089 cspc_norm = cspc[i,:].copy() # cspc not smoothed here or in previous version
1090 1090
1091 1091 chan_index0 = pairsList[i][0]
1092 1092 chan_index1 = pairsList[i][1]
1093 1093
1094 1094 cross_integrals_peak[i] = channel_integrals[chan_index0]*channel_integrals[chan_index1]
1095 1095 normalized_cspc = cspc_norm / numpy.sqrt(cross_integrals_peak[i])
1096 1096 CSPC_Samples[i] = normalized_cspc
1097 1097
1098 1098 ''' Finding cross integrals without subtracting any peaks:'''
1099 1099 # FactorNorm0 = 2*numpy.max(xFrec) / numpy.count_nonzero(~numpy.isnan(spc[chan_index0,:]))
1100 1100 # FactorNorm1 = 2*numpy.max(xFrec) / numpy.count_nonzero(~numpy.isnan(spc[chan_index1,:]))
1101 1101 # cross_integrals_totalrange[i] = (numpy.nansum(spc[chan_index0,:])) * FactorNorm0 * (numpy.nansum(spc[chan_index1,:])) * FactorNorm1
1102 1102 # normalized_cspc = cspc_norm / numpy.sqrt(cross_integrals_totalrange[i])
1103 1103 # CSPC_Samples[i] = normalized_cspc
1104 1104
1105 1105
1106 1106 phase[i] = numpy.arctan2(CSPC_Samples[i].imag, CSPC_Samples[i].real)
1107 1107
1108 1108
1109 1109 CSPCmoments = numpy.vstack([self.Moments(numpy.abs(CSPC_Samples[0]), xSamples),
1110 1110 self.Moments(numpy.abs(CSPC_Samples[1]), xSamples),
1111 1111 self.Moments(numpy.abs(CSPC_Samples[2]), xSamples)])
1112 1112
1113 1113
1114 1114 '''***Sorting out NaN entries***'''
1115 1115 CSPCMask01 = numpy.abs(CSPC_Samples[0])
1116 1116 CSPCMask02 = numpy.abs(CSPC_Samples[1])
1117 1117 CSPCMask12 = numpy.abs(CSPC_Samples[2])
1118 1118
1119 1119 mask01 = ~numpy.isnan(CSPCMask01)
1120 1120 mask02 = ~numpy.isnan(CSPCMask02)
1121 1121 mask12 = ~numpy.isnan(CSPCMask12)
1122 1122
1123 1123 CSPCMask01 = CSPCMask01[mask01]
1124 1124 CSPCMask02 = CSPCMask02[mask02]
1125 1125 CSPCMask12 = CSPCMask12[mask12]
1126 1126
1127 1127
1128 1128 popt01, popt02, popt12 = [1e-10,1e-10,1e-10], [1e-10,1e-10,1e-10] ,[1e-10,1e-10,1e-10]
1129 1129 FitGauss01, FitGauss02, FitGauss12 = numpy.empty(len(xSamples))*0, numpy.empty(len(xSamples))*0, numpy.empty(len(xSamples))*0
1130 1130
1131 1131 '''*******************************FIT GAUSS CSPC************************************'''
1132 1132
1133 1133 try:
1134 1134 popt01,pcov = curve_fit(self.gaus,xSamples[mask01],numpy.abs(CSPCMask01),p0=CSPCmoments[0])
1135 1135 if popt01[2] > widthlimit: # CONDITION
1136 1136 return self.StopWindEstimation(error_code = 4)
1137 1137
1138 1138 popt02,pcov = curve_fit(self.gaus,xSamples[mask02],numpy.abs(CSPCMask02),p0=CSPCmoments[1])
1139 1139 if popt02[2] > widthlimit: # CONDITION
1140 1140 return self.StopWindEstimation(error_code = 4)
1141 1141
1142 1142 popt12,pcov = curve_fit(self.gaus,xSamples[mask12],numpy.abs(CSPCMask12),p0=CSPCmoments[2])
1143 1143 if popt12[2] > widthlimit: # CONDITION
1144 1144 return self.StopWindEstimation(error_code = 4)
1145 1145
1146 1146 FitGauss01 = self.gaus(xSamples, *popt01)
1147 1147 FitGauss02 = self.gaus(xSamples, *popt02)
1148 1148 FitGauss12 = self.gaus(xSamples, *popt12)
1149 1149
1150 1150 except:
1151 1151 return self.StopWindEstimation(error_code = 5)
1152 1152
1153 1153
1154 1154 '''************* Getting Fij ***************'''
1155 1155
1156 1156
1157 1157 #Punto en Eje X de la Gaussiana donde se encuentra el centro -- x-axis point of the gaussian where the center is located
1158 1158 # -> PointGauCenter
1159 1159 GaussCenter = popt[1]
1160 1160 ClosestCenter = xSamples[numpy.abs(xSamples-GaussCenter).argmin()]
1161 1161 PointGauCenter = numpy.where(xSamples==ClosestCenter)[0][0]
1162 1162
1163 1163 #Punto e^-1 hubicado en la Gaussiana -- point where e^-1 is located in the gaussian
1164 1164 PeMinus1 = numpy.max(FitGauss) * numpy.exp(-1)
1165 1165 FijClosest = FitGauss[numpy.abs(FitGauss-PeMinus1).argmin()] # El punto mas cercano a "Peminus1" dentro de "FitGauss"
1166 1166 PointFij = numpy.where(FitGauss==FijClosest)[0][0]
1167 1167
1168 1168 Fij = numpy.abs(xSamples[PointFij] - xSamples[PointGauCenter])
1169 1169
1170 1170 '''********** Taking frequency ranges from mean SPCs **********'''
1171 1171
1172 1172 #GaussCenter = popt[1] #Primer momento 01
1173 1173 GauWidth = popt[2] * 3/2 #Ancho de banda de Gau01 -- Bandwidth of Gau01 TODO why *3/2?
1174 1174 Range = numpy.empty(2)
1175 1175 Range[0] = GaussCenter - GauWidth
1176 1176 Range[1] = GaussCenter + GauWidth
1177 1177 #Punto en Eje X de la Gaussiana donde se encuentra ancho de banda (min:max) -- Point in x-axis where the bandwidth is located (min:max)
1178 1178 ClosRangeMin = xSamples[numpy.abs(xSamples-Range[0]).argmin()]
1179 1179 ClosRangeMax = xSamples[numpy.abs(xSamples-Range[1]).argmin()]
1180 1180
1181 1181 PointRangeMin = numpy.where(xSamples==ClosRangeMin)[0][0]
1182 1182 PointRangeMax = numpy.where(xSamples==ClosRangeMax)[0][0]
1183 1183
1184 1184 Range = numpy.array([ PointRangeMin, PointRangeMax ])
1185 1185
1186 1186 FrecRange = xFrec[ Range[0] : Range[1] ]
1187 1187
1188 1188
1189 1189 '''************************** Getting Phase Slope ***************************'''
1190 1190
1191 1191 for i in range(1,3): # Changed to only compute two
1192 1192
1193 1193 if len(FrecRange) > 5 and len(FrecRange) < spc.shape[1] * 0.3:
1194 1194 # PhaseRange=self.moving_average(phase[i,Range[0]:Range[1]],N=1) #used before to smooth phase with N=3
1195 1195 PhaseRange = phase[i,Range[0]:Range[1]].copy()
1196 1196
1197 1197 mask = ~numpy.isnan(FrecRange) & ~numpy.isnan(PhaseRange)
1198 1198
1199 1199
1200 1200 if len(FrecRange) == len(PhaseRange):
1201 1201
1202 1202 try:
1203 1203 slope, intercept, _, _, _ = stats.linregress(FrecRange[mask], self.AntiAliasing(PhaseRange[mask], 4.5))
1204 1204 PhaseSlope[i] = slope
1205 1205 PhaseInter[i] = intercept
1206 1206
1207 1207 except:
1208 1208 return self.StopWindEstimation(error_code = 6)
1209 1209
1210 1210 else:
1211 1211 return self.StopWindEstimation(error_code = 7)
1212 1212
1213 1213 else:
1214 1214 return self.StopWindEstimation(error_code = 8)
1215 1215
1216 1216
1217 1217
1218 1218 '''*** Constants A-H correspond to the convention as in Briggs and Vincent 1992 ***'''
1219 1219
1220 1220 '''Getting constant C'''
1221 1221 cC=(Fij*numpy.pi)**2
1222 1222
1223 1223 '''****** Getting constants F and G ******'''
1224 1224 MijEijNij = numpy.array([[Xi02,Eta02], [Xi12,Eta12]])
1225 1225 MijResult0 = (-PhaseSlope[1] * cC) / (2*numpy.pi)
1226 1226 MijResult1 = (-PhaseSlope[2] * cC) / (2*numpy.pi)
1227 1227 MijResults = numpy.array([MijResult0,MijResult1])
1228 1228 (cF,cG) = numpy.linalg.solve(MijEijNij, MijResults)
1229 1229
1230 1230 '''****** Getting constants A, B and H ******'''
1231 1231 W01 = numpy.nanmax( FitGauss01 )
1232 1232 W02 = numpy.nanmax( FitGauss02 )
1233 1233 W12 = numpy.nanmax( FitGauss12 )
1234 1234
1235 1235 WijResult0 = ((cF * Xi01 + cG * Eta01)**2)/cC - numpy.log(W01 / numpy.sqrt(numpy.pi / cC))
1236 1236 WijResult1 = ((cF * Xi02 + cG * Eta02)**2)/cC - numpy.log(W02 / numpy.sqrt(numpy.pi / cC))
1237 1237 WijResult2 = ((cF * Xi12 + cG * Eta12)**2)/cC - numpy.log(W12 / numpy.sqrt(numpy.pi / cC))
1238 1238
1239 1239 WijResults = numpy.array([WijResult0, WijResult1, WijResult2])
1240 1240
1241 1241 WijEijNij = numpy.array([ [Xi01**2, Eta01**2, 2*Xi01*Eta01] , [Xi02**2, Eta02**2, 2*Xi02*Eta02] , [Xi12**2, Eta12**2, 2*Xi12*Eta12] ])
1242 1242 (cA,cB,cH) = numpy.linalg.solve(WijEijNij, WijResults)
1243 1243
1244 1244 VxVy = numpy.array([[cA,cH],[cH,cB]])
1245 1245 VxVyResults = numpy.array([-cF,-cG])
1246 1246 (Vx,Vy) = numpy.linalg.solve(VxVy, VxVyResults)
1247 1247
1248 1248 Vzon = Vy
1249 1249 Vmer = Vx
1250 1250
1251 1251 # Vmag=numpy.sqrt(Vzon**2+Vmer**2) # unused
1252 1252 # Vang=numpy.arctan2(Vmer,Vzon) # unused
1253 1253
1254 1254
1255 1255 ''' using frequency as abscissa. Due to three channels, the offzenith angle is zero
1256 1256 and Vrad equal to Vver. formula taken from Briggs 92, figure 4.
1257 1257 '''
1258 1258 if numpy.abs( popt[1] ) < 3.5 and len(FrecRange) > 4:
1259 1259 Vver = 0.5 * radarWavelength * popt[1] * 100 # *100 to get cm (/s)
1260 1260 else:
1261 1261 Vver = numpy.NaN
1262 1262
1263 1263 error_code = 0
1264 1264
1265 1265 return Vzon, Vmer, Vver, error_code
1266 1266
1267 1267
1268 1268 class SpectralMoments(Operation):
1269 1269
1270 1270 '''
1271 1271 Function SpectralMoments()
1272 1272
1273 1273 Calculates moments (power, mean, standard deviation) and SNR of the signal
1274 1274
1275 1275 Type of dataIn: Spectra
1276 1276
1277 1277 Configuration Parameters:
1278 1278
1279 1279 dirCosx : Cosine director in X axis
1280 1280 dirCosy : Cosine director in Y axis
1281 1281
1282 1282 elevation :
1283 1283 azimuth :
1284 1284
1285 1285 Input:
1286 1286 channelList : simple channel list to select e.g. [2,3,7]
1287 1287 self.dataOut.data_pre : Spectral data
1288 1288 self.dataOut.abscissaList : List of frequencies
1289 1289 self.dataOut.noise : Noise level per channel
1290 1290
1291 1291 Affected:
1292 1292 self.dataOut.moments : Parameters per channel
1293 self.dataOut.data_SNR : SNR per channel
1293 self.dataOut.data_snr : SNR per channel
1294 1294
1295 1295 '''
1296 1296
1297 1297 def run(self, dataOut):
1298 1298
1299 1299 data = dataOut.data_pre[0]
1300 1300 absc = dataOut.abscissaList[:-1]
1301 1301 noise = dataOut.noise
1302 1302 nChannel = data.shape[0]
1303 1303 data_param = numpy.zeros((nChannel, 4, data.shape[2]))
1304 1304
1305 1305 for ind in range(nChannel):
1306 1306 data_param[ind,:,:] = self.__calculateMoments( data[ind,:,:] , absc , noise[ind] )
1307 1307
1308 1308 dataOut.moments = data_param[:,1:,:]
1309 dataOut.data_SNR = data_param[:,0]
1310 dataOut.data_POW = data_param[:,1]
1311 dataOut.data_DOP = data_param[:,2]
1312 dataOut.data_WIDTH = data_param[:,3]
1309 dataOut.data_snr = data_param[:,0]
1310 dataOut.data_pow = data_param[:,1]
1311 dataOut.data_dop = data_param[:,2]
1312 dataOut.data_width = data_param[:,3]
1313 1313
1314 1314 return dataOut
1315 1315
1316 1316 def __calculateMoments(self, oldspec, oldfreq, n0,
1317 1317 nicoh = None, graph = None, smooth = None, type1 = None, fwindow = None, snrth = None, dc = None, aliasing = None, oldfd = None, wwauto = None):
1318 1318
1319 1319 if (nicoh is None): nicoh = 1
1320 1320 if (graph is None): graph = 0
1321 1321 if (smooth is None): smooth = 0
1322 1322 elif (self.smooth < 3): smooth = 0
1323 1323
1324 1324 if (type1 is None): type1 = 0
1325 1325 if (fwindow is None): fwindow = numpy.zeros(oldfreq.size) + 1
1326 1326 if (snrth is None): snrth = -3
1327 1327 if (dc is None): dc = 0
1328 1328 if (aliasing is None): aliasing = 0
1329 1329 if (oldfd is None): oldfd = 0
1330 1330 if (wwauto is None): wwauto = 0
1331 1331
1332 1332 if (n0 < 1.e-20): n0 = 1.e-20
1333 1333
1334 1334 freq = oldfreq
1335 1335 vec_power = numpy.zeros(oldspec.shape[1])
1336 1336 vec_fd = numpy.zeros(oldspec.shape[1])
1337 1337 vec_w = numpy.zeros(oldspec.shape[1])
1338 1338 vec_snr = numpy.zeros(oldspec.shape[1])
1339 1339
1340 1340 # oldspec = numpy.ma.masked_invalid(oldspec)
1341 1341
1342 1342 for ind in range(oldspec.shape[1]):
1343 1343
1344 1344 spec = oldspec[:,ind]
1345 1345 aux = spec*fwindow
1346 1346 max_spec = aux.max()
1347 1347 m = aux.tolist().index(max_spec)
1348 1348
1349 1349 # Smooth
1350 1350 if (smooth == 0):
1351 1351 spec2 = spec
1352 1352 else:
1353 1353 spec2 = scipy.ndimage.filters.uniform_filter1d(spec,size=smooth)
1354 1354
1355 1355 # Moments Estimation
1356 1356 bb = spec2[numpy.arange(m,spec2.size)]
1357 1357 bb = (bb<n0).nonzero()
1358 1358 bb = bb[0]
1359 1359
1360 1360 ss = spec2[numpy.arange(0,m + 1)]
1361 1361 ss = (ss<n0).nonzero()
1362 1362 ss = ss[0]
1363 1363
1364 1364 if (bb.size == 0):
1365 1365 bb0 = spec.size - 1 - m
1366 1366 else:
1367 1367 bb0 = bb[0] - 1
1368 1368 if (bb0 < 0):
1369 1369 bb0 = 0
1370 1370
1371 1371 if (ss.size == 0):
1372 1372 ss1 = 1
1373 1373 else:
1374 1374 ss1 = max(ss) + 1
1375 1375
1376 1376 if (ss1 > m):
1377 1377 ss1 = m
1378 1378
1379 1379 valid = numpy.arange(int(m + bb0 - ss1 + 1)) + ss1
1380 1380
1381 1381 signal_power = ((spec2[valid] - n0) * fwindow[valid]).mean() # D. Scipión added with correct definition
1382 1382 total_power = (spec2[valid] * fwindow[valid]).mean() # D. Scipión added with correct definition
1383 1383 power = ((spec2[valid] - n0) * fwindow[valid]).sum()
1384 1384 fd = ((spec2[valid]- n0)*freq[valid] * fwindow[valid]).sum() / power
1385 1385 w = numpy.sqrt(((spec2[valid] - n0)*fwindow[valid]*(freq[valid]- fd)**2).sum() / power)
1386 1386 snr = (spec2.mean()-n0)/n0
1387 1387 if (snr < 1.e-20) :
1388 1388 snr = 1.e-20
1389 1389
1390 1390 # vec_power[ind] = power #D. Scipión replaced with the line below
1391 1391 vec_power[ind] = total_power
1392 1392 vec_fd[ind] = fd
1393 1393 vec_w[ind] = w
1394 1394 vec_snr[ind] = snr
1395 1395
1396 1396 return numpy.vstack((vec_snr, vec_power, vec_fd, vec_w))
1397 1397
1398 1398 #------------------ Get SA Parameters --------------------------
1399 1399
1400 1400 def GetSAParameters(self):
1401 1401 #SA en frecuencia
1402 1402 pairslist = self.dataOut.groupList
1403 1403 num_pairs = len(pairslist)
1404 1404
1405 1405 vel = self.dataOut.abscissaList
1406 1406 spectra = self.dataOut.data_pre
1407 1407 cspectra = self.dataIn.data_cspc
1408 1408 delta_v = vel[1] - vel[0]
1409 1409
1410 1410 #Calculating the power spectrum
1411 1411 spc_pow = numpy.sum(spectra, 3)*delta_v
1412 1412 #Normalizing Spectra
1413 1413 norm_spectra = spectra/spc_pow
1414 1414 #Calculating the norm_spectra at peak
1415 1415 max_spectra = numpy.max(norm_spectra, 3)
1416 1416
1417 1417 #Normalizing Cross Spectra
1418 1418 norm_cspectra = numpy.zeros(cspectra.shape)
1419 1419
1420 1420 for i in range(num_chan):
1421 1421 norm_cspectra[i,:,:] = cspectra[i,:,:]/numpy.sqrt(spc_pow[pairslist[i][0],:]*spc_pow[pairslist[i][1],:])
1422 1422
1423 1423 max_cspectra = numpy.max(norm_cspectra,2)
1424 1424 max_cspectra_index = numpy.argmax(norm_cspectra, 2)
1425 1425
1426 1426 for i in range(num_pairs):
1427 1427 cspc_par[i,:,:] = __calculateMoments(norm_cspectra)
1428 1428 #------------------- Get Lags ----------------------------------
1429 1429
1430 1430 class SALags(Operation):
1431 1431 '''
1432 1432 Function GetMoments()
1433 1433
1434 1434 Input:
1435 1435 self.dataOut.data_pre
1436 1436 self.dataOut.abscissaList
1437 1437 self.dataOut.noise
1438 1438 self.dataOut.normFactor
1439 self.dataOut.data_SNR
1439 self.dataOut.data_snr
1440 1440 self.dataOut.groupList
1441 1441 self.dataOut.nChannels
1442 1442
1443 1443 Affected:
1444 1444 self.dataOut.data_param
1445 1445
1446 1446 '''
1447 1447 def run(self, dataOut):
1448 1448 data_acf = dataOut.data_pre[0]
1449 1449 data_ccf = dataOut.data_pre[1]
1450 1450 normFactor_acf = dataOut.normFactor[0]
1451 1451 normFactor_ccf = dataOut.normFactor[1]
1452 1452 pairs_acf = dataOut.groupList[0]
1453 1453 pairs_ccf = dataOut.groupList[1]
1454 1454
1455 1455 nHeights = dataOut.nHeights
1456 1456 absc = dataOut.abscissaList
1457 1457 noise = dataOut.noise
1458 SNR = dataOut.data_SNR
1458 SNR = dataOut.data_snr
1459 1459 nChannels = dataOut.nChannels
1460 1460 # pairsList = dataOut.groupList
1461 1461 # pairsAutoCorr, pairsCrossCorr = self.__getPairsAutoCorr(pairsList, nChannels)
1462 1462
1463 1463 for l in range(len(pairs_acf)):
1464 1464 data_acf[l,:,:] = data_acf[l,:,:]/normFactor_acf[l,:]
1465 1465
1466 1466 for l in range(len(pairs_ccf)):
1467 1467 data_ccf[l,:,:] = data_ccf[l,:,:]/normFactor_ccf[l,:]
1468 1468
1469 1469 dataOut.data_param = numpy.zeros((len(pairs_ccf)*2 + 1, nHeights))
1470 1470 dataOut.data_param[:-1,:] = self.__calculateTaus(data_acf, data_ccf, absc)
1471 1471 dataOut.data_param[-1,:] = self.__calculateLag1Phase(data_acf, absc)
1472 1472 return
1473 1473
1474 1474 # def __getPairsAutoCorr(self, pairsList, nChannels):
1475 1475 #
1476 1476 # pairsAutoCorr = numpy.zeros(nChannels, dtype = 'int')*numpy.nan
1477 1477 #
1478 1478 # for l in range(len(pairsList)):
1479 1479 # firstChannel = pairsList[l][0]
1480 1480 # secondChannel = pairsList[l][1]
1481 1481 #
1482 1482 # #Obteniendo pares de Autocorrelacion
1483 1483 # if firstChannel == secondChannel:
1484 1484 # pairsAutoCorr[firstChannel] = int(l)
1485 1485 #
1486 1486 # pairsAutoCorr = pairsAutoCorr.astype(int)
1487 1487 #
1488 1488 # pairsCrossCorr = range(len(pairsList))
1489 1489 # pairsCrossCorr = numpy.delete(pairsCrossCorr,pairsAutoCorr)
1490 1490 #
1491 1491 # return pairsAutoCorr, pairsCrossCorr
1492 1492
1493 1493 def __calculateTaus(self, data_acf, data_ccf, lagRange):
1494 1494
1495 1495 lag0 = data_acf.shape[1]/2
1496 1496 #Funcion de Autocorrelacion
1497 1497 mean_acf = stats.nanmean(data_acf, axis = 0)
1498 1498
1499 1499 #Obtencion Indice de TauCross
1500 1500 ind_ccf = data_ccf.argmax(axis = 1)
1501 1501 #Obtencion Indice de TauAuto
1502 1502 ind_acf = numpy.zeros(ind_ccf.shape,dtype = 'int')
1503 1503 ccf_lag0 = data_ccf[:,lag0,:]
1504 1504
1505 1505 for i in range(ccf_lag0.shape[0]):
1506 1506 ind_acf[i,:] = numpy.abs(mean_acf - ccf_lag0[i,:]).argmin(axis = 0)
1507 1507
1508 1508 #Obtencion de TauCross y TauAuto
1509 1509 tau_ccf = lagRange[ind_ccf]
1510 1510 tau_acf = lagRange[ind_acf]
1511 1511
1512 1512 Nan1, Nan2 = numpy.where(tau_ccf == lagRange[0])
1513 1513
1514 1514 tau_ccf[Nan1,Nan2] = numpy.nan
1515 1515 tau_acf[Nan1,Nan2] = numpy.nan
1516 1516 tau = numpy.vstack((tau_ccf,tau_acf))
1517 1517
1518 1518 return tau
1519 1519
1520 1520 def __calculateLag1Phase(self, data, lagTRange):
1521 1521 data1 = stats.nanmean(data, axis = 0)
1522 1522 lag1 = numpy.where(lagTRange == 0)[0][0] + 1
1523 1523
1524 1524 phase = numpy.angle(data1[lag1,:])
1525 1525
1526 1526 return phase
1527 1527
1528 1528 class SpectralFitting(Operation):
1529 1529 '''
1530 1530 Function GetMoments()
1531 1531
1532 1532 Input:
1533 1533 Output:
1534 1534 Variables modified:
1535 1535 '''
1536 1536
1537 1537 def run(self, dataOut, getSNR = True, path=None, file=None, groupList=None):
1538 1538
1539 1539
1540 1540 if path != None:
1541 1541 sys.path.append(path)
1542 1542 self.dataOut.library = importlib.import_module(file)
1543 1543
1544 1544 #To be inserted as a parameter
1545 1545 groupArray = numpy.array(groupList)
1546 1546 # groupArray = numpy.array([[0,1],[2,3]])
1547 1547 self.dataOut.groupList = groupArray
1548 1548
1549 1549 nGroups = groupArray.shape[0]
1550 1550 nChannels = self.dataIn.nChannels
1551 1551 nHeights=self.dataIn.heightList.size
1552 1552
1553 1553 #Parameters Array
1554 1554 self.dataOut.data_param = None
1555 1555
1556 1556 #Set constants
1557 1557 constants = self.dataOut.library.setConstants(self.dataIn)
1558 1558 self.dataOut.constants = constants
1559 1559 M = self.dataIn.normFactor
1560 1560 N = self.dataIn.nFFTPoints
1561 1561 ippSeconds = self.dataIn.ippSeconds
1562 1562 K = self.dataIn.nIncohInt
1563 1563 pairsArray = numpy.array(self.dataIn.pairsList)
1564 1564
1565 1565 #List of possible combinations
1566 1566 listComb = itertools.combinations(numpy.arange(groupArray.shape[1]),2)
1567 1567 indCross = numpy.zeros(len(list(listComb)), dtype = 'int')
1568 1568
1569 1569 if getSNR:
1570 1570 listChannels = groupArray.reshape((groupArray.size))
1571 1571 listChannels.sort()
1572 1572 noise = self.dataIn.getNoise()
1573 self.dataOut.data_SNR = self.__getSNR(self.dataIn.data_spc[listChannels,:,:], noise[listChannels])
1573 self.dataOut.data_snr = self.__getSNR(self.dataIn.data_spc[listChannels,:,:], noise[listChannels])
1574 1574
1575 1575 for i in range(nGroups):
1576 1576 coord = groupArray[i,:]
1577 1577
1578 1578 #Input data array
1579 1579 data = self.dataIn.data_spc[coord,:,:]/(M*N)
1580 1580 data = data.reshape((data.shape[0]*data.shape[1],data.shape[2]))
1581 1581
1582 1582 #Cross Spectra data array for Covariance Matrixes
1583 1583 ind = 0
1584 1584 for pairs in listComb:
1585 1585 pairsSel = numpy.array([coord[x],coord[y]])
1586 1586 indCross[ind] = int(numpy.where(numpy.all(pairsArray == pairsSel, axis = 1))[0][0])
1587 1587 ind += 1
1588 1588 dataCross = self.dataIn.data_cspc[indCross,:,:]/(M*N)
1589 1589 dataCross = dataCross**2/K
1590 1590
1591 1591 for h in range(nHeights):
1592 1592
1593 1593 #Input
1594 1594 d = data[:,h]
1595 1595
1596 1596 #Covariance Matrix
1597 1597 D = numpy.diag(d**2/K)
1598 1598 ind = 0
1599 1599 for pairs in listComb:
1600 1600 #Coordinates in Covariance Matrix
1601 1601 x = pairs[0]
1602 1602 y = pairs[1]
1603 1603 #Channel Index
1604 1604 S12 = dataCross[ind,:,h]
1605 1605 D12 = numpy.diag(S12)
1606 1606 #Completing Covariance Matrix with Cross Spectras
1607 1607 D[x*N:(x+1)*N,y*N:(y+1)*N] = D12
1608 1608 D[y*N:(y+1)*N,x*N:(x+1)*N] = D12
1609 1609 ind += 1
1610 1610 Dinv=numpy.linalg.inv(D)
1611 1611 L=numpy.linalg.cholesky(Dinv)
1612 1612 LT=L.T
1613 1613
1614 1614 dp = numpy.dot(LT,d)
1615 1615
1616 1616 #Initial values
1617 1617 data_spc = self.dataIn.data_spc[coord,:,h]
1618 1618
1619 1619 if (h>0)and(error1[3]<5):
1620 1620 p0 = self.dataOut.data_param[i,:,h-1]
1621 1621 else:
1622 1622 p0 = numpy.array(self.dataOut.library.initialValuesFunction(data_spc, constants, i))
1623 1623
1624 1624 try:
1625 1625 #Least Squares
1626 1626 minp,covp,infodict,mesg,ier = optimize.leastsq(self.__residFunction,p0,args=(dp,LT,constants),full_output=True)
1627 1627 # minp,covp = optimize.leastsq(self.__residFunction,p0,args=(dp,LT,constants))
1628 1628 #Chi square error
1629 1629 error0 = numpy.sum(infodict['fvec']**2)/(2*N)
1630 1630 #Error with Jacobian
1631 1631 error1 = self.dataOut.library.errorFunction(minp,constants,LT)
1632 1632 except:
1633 1633 minp = p0*numpy.nan
1634 1634 error0 = numpy.nan
1635 1635 error1 = p0*numpy.nan
1636 1636
1637 1637 #Save
1638 1638 if self.dataOut.data_param is None:
1639 1639 self.dataOut.data_param = numpy.zeros((nGroups, p0.size, nHeights))*numpy.nan
1640 1640 self.dataOut.data_error = numpy.zeros((nGroups, p0.size + 1, nHeights))*numpy.nan
1641 1641
1642 1642 self.dataOut.data_error[i,:,h] = numpy.hstack((error0,error1))
1643 1643 self.dataOut.data_param[i,:,h] = minp
1644 1644 return
1645 1645
1646 1646 def __residFunction(self, p, dp, LT, constants):
1647 1647
1648 1648 fm = self.dataOut.library.modelFunction(p, constants)
1649 1649 fmp=numpy.dot(LT,fm)
1650 1650
1651 1651 return dp-fmp
1652 1652
1653 1653 def __getSNR(self, z, noise):
1654 1654
1655 1655 avg = numpy.average(z, axis=1)
1656 1656 SNR = (avg.T-noise)/noise
1657 1657 SNR = SNR.T
1658 1658 return SNR
1659 1659
1660 1660 def __chisq(p,chindex,hindex):
1661 1661 #similar to Resid but calculates CHI**2
1662 1662 [LT,d,fm]=setupLTdfm(p,chindex,hindex)
1663 1663 dp=numpy.dot(LT,d)
1664 1664 fmp=numpy.dot(LT,fm)
1665 1665 chisq=numpy.dot((dp-fmp).T,(dp-fmp))
1666 1666 return chisq
1667 1667
1668 1668 class WindProfiler(Operation):
1669 1669
1670 1670 __isConfig = False
1671 1671
1672 1672 __initime = None
1673 1673 __lastdatatime = None
1674 1674 __integrationtime = None
1675 1675
1676 1676 __buffer = None
1677 1677
1678 1678 __dataReady = False
1679 1679
1680 1680 __firstdata = None
1681 1681
1682 1682 n = None
1683 1683
1684 1684 def __init__(self):
1685 1685 Operation.__init__(self)
1686 1686
1687 1687 def __calculateCosDir(self, elev, azim):
1688 1688 zen = (90 - elev)*numpy.pi/180
1689 1689 azim = azim*numpy.pi/180
1690 1690 cosDirX = numpy.sqrt((1-numpy.cos(zen)**2)/((1+numpy.tan(azim)**2)))
1691 1691 cosDirY = numpy.sqrt(1-numpy.cos(zen)**2-cosDirX**2)
1692 1692
1693 1693 signX = numpy.sign(numpy.cos(azim))
1694 1694 signY = numpy.sign(numpy.sin(azim))
1695 1695
1696 1696 cosDirX = numpy.copysign(cosDirX, signX)
1697 1697 cosDirY = numpy.copysign(cosDirY, signY)
1698 1698 return cosDirX, cosDirY
1699 1699
1700 1700 def __calculateAngles(self, theta_x, theta_y, azimuth):
1701 1701
1702 1702 dir_cosw = numpy.sqrt(1-theta_x**2-theta_y**2)
1703 1703 zenith_arr = numpy.arccos(dir_cosw)
1704 1704 azimuth_arr = numpy.arctan2(theta_x,theta_y) + azimuth*math.pi/180
1705 1705
1706 1706 dir_cosu = numpy.sin(azimuth_arr)*numpy.sin(zenith_arr)
1707 1707 dir_cosv = numpy.cos(azimuth_arr)*numpy.sin(zenith_arr)
1708 1708
1709 1709 return azimuth_arr, zenith_arr, dir_cosu, dir_cosv, dir_cosw
1710 1710
1711 1711 def __calculateMatA(self, dir_cosu, dir_cosv, dir_cosw, horOnly):
1712 1712
1713 1713 #
1714 1714 if horOnly:
1715 1715 A = numpy.c_[dir_cosu,dir_cosv]
1716 1716 else:
1717 1717 A = numpy.c_[dir_cosu,dir_cosv,dir_cosw]
1718 1718 A = numpy.asmatrix(A)
1719 1719 A1 = numpy.linalg.inv(A.transpose()*A)*A.transpose()
1720 1720
1721 1721 return A1
1722 1722
1723 1723 def __correctValues(self, heiRang, phi, velRadial, SNR):
1724 1724 listPhi = phi.tolist()
1725 1725 maxid = listPhi.index(max(listPhi))
1726 1726 minid = listPhi.index(min(listPhi))
1727 1727
1728 1728 rango = list(range(len(phi)))
1729 1729 # rango = numpy.delete(rango,maxid)
1730 1730
1731 1731 heiRang1 = heiRang*math.cos(phi[maxid])
1732 1732 heiRangAux = heiRang*math.cos(phi[minid])
1733 1733 indOut = (heiRang1 < heiRangAux[0]).nonzero()
1734 1734 heiRang1 = numpy.delete(heiRang1,indOut)
1735 1735
1736 1736 velRadial1 = numpy.zeros([len(phi),len(heiRang1)])
1737 1737 SNR1 = numpy.zeros([len(phi),len(heiRang1)])
1738 1738
1739 1739 for i in rango:
1740 1740 x = heiRang*math.cos(phi[i])
1741 1741 y1 = velRadial[i,:]
1742 1742 f1 = interpolate.interp1d(x,y1,kind = 'cubic')
1743 1743
1744 1744 x1 = heiRang1
1745 1745 y11 = f1(x1)
1746 1746
1747 1747 y2 = SNR[i,:]
1748 1748 f2 = interpolate.interp1d(x,y2,kind = 'cubic')
1749 1749 y21 = f2(x1)
1750 1750
1751 1751 velRadial1[i,:] = y11
1752 1752 SNR1[i,:] = y21
1753 1753
1754 1754 return heiRang1, velRadial1, SNR1
1755 1755
1756 1756 def __calculateVelUVW(self, A, velRadial):
1757 1757
1758 1758 #Operacion Matricial
1759 1759 # velUVW = numpy.zeros((velRadial.shape[1],3))
1760 1760 # for ind in range(velRadial.shape[1]):
1761 1761 # velUVW[ind,:] = numpy.dot(A,velRadial[:,ind])
1762 1762 # velUVW = velUVW.transpose()
1763 1763 velUVW = numpy.zeros((A.shape[0],velRadial.shape[1]))
1764 1764 velUVW[:,:] = numpy.dot(A,velRadial)
1765 1765
1766 1766
1767 1767 return velUVW
1768 1768
1769 1769 # def techniqueDBS(self, velRadial0, dirCosx, disrCosy, azimuth, correct, horizontalOnly, heiRang, SNR0):
1770 1770
1771 1771 def techniqueDBS(self, kwargs):
1772 1772 """
1773 1773 Function that implements Doppler Beam Swinging (DBS) technique.
1774 1774
1775 1775 Input: Radial velocities, Direction cosines (x and y) of the Beam, Antenna azimuth,
1776 1776 Direction correction (if necessary), Ranges and SNR
1777 1777
1778 1778 Output: Winds estimation (Zonal, Meridional and Vertical)
1779 1779
1780 1780 Parameters affected: Winds, height range, SNR
1781 1781 """
1782 1782 velRadial0 = kwargs['velRadial']
1783 1783 heiRang = kwargs['heightList']
1784 1784 SNR0 = kwargs['SNR']
1785 1785
1786 1786 if 'dirCosx' in kwargs and 'dirCosy' in kwargs:
1787 1787 theta_x = numpy.array(kwargs['dirCosx'])
1788 1788 theta_y = numpy.array(kwargs['dirCosy'])
1789 1789 else:
1790 1790 elev = numpy.array(kwargs['elevation'])
1791 1791 azim = numpy.array(kwargs['azimuth'])
1792 1792 theta_x, theta_y = self.__calculateCosDir(elev, azim)
1793 1793 azimuth = kwargs['correctAzimuth']
1794 1794 if 'horizontalOnly' in kwargs:
1795 1795 horizontalOnly = kwargs['horizontalOnly']
1796 1796 else: horizontalOnly = False
1797 1797 if 'correctFactor' in kwargs:
1798 1798 correctFactor = kwargs['correctFactor']
1799 1799 else: correctFactor = 1
1800 1800 if 'channelList' in kwargs:
1801 1801 channelList = kwargs['channelList']
1802 1802 if len(channelList) == 2:
1803 1803 horizontalOnly = True
1804 1804 arrayChannel = numpy.array(channelList)
1805 1805 param = param[arrayChannel,:,:]
1806 1806 theta_x = theta_x[arrayChannel]
1807 1807 theta_y = theta_y[arrayChannel]
1808 1808
1809 1809 azimuth_arr, zenith_arr, dir_cosu, dir_cosv, dir_cosw = self.__calculateAngles(theta_x, theta_y, azimuth)
1810 1810 heiRang1, velRadial1, SNR1 = self.__correctValues(heiRang, zenith_arr, correctFactor*velRadial0, SNR0)
1811 1811 A = self.__calculateMatA(dir_cosu, dir_cosv, dir_cosw, horizontalOnly)
1812 1812
1813 1813 #Calculo de Componentes de la velocidad con DBS
1814 1814 winds = self.__calculateVelUVW(A,velRadial1)
1815 1815
1816 1816 return winds, heiRang1, SNR1
1817 1817
1818 1818 def __calculateDistance(self, posx, posy, pairs_ccf, azimuth = None):
1819 1819
1820 1820 nPairs = len(pairs_ccf)
1821 1821 posx = numpy.asarray(posx)
1822 1822 posy = numpy.asarray(posy)
1823 1823
1824 1824 #Rotacion Inversa para alinear con el azimuth
1825 1825 if azimuth!= None:
1826 1826 azimuth = azimuth*math.pi/180
1827 1827 posx1 = posx*math.cos(azimuth) + posy*math.sin(azimuth)
1828 1828 posy1 = -posx*math.sin(azimuth) + posy*math.cos(azimuth)
1829 1829 else:
1830 1830 posx1 = posx
1831 1831 posy1 = posy
1832 1832
1833 1833 #Calculo de Distancias
1834 1834 distx = numpy.zeros(nPairs)
1835 1835 disty = numpy.zeros(nPairs)
1836 1836 dist = numpy.zeros(nPairs)
1837 1837 ang = numpy.zeros(nPairs)
1838 1838
1839 1839 for i in range(nPairs):
1840 1840 distx[i] = posx1[pairs_ccf[i][1]] - posx1[pairs_ccf[i][0]]
1841 1841 disty[i] = posy1[pairs_ccf[i][1]] - posy1[pairs_ccf[i][0]]
1842 1842 dist[i] = numpy.sqrt(distx[i]**2 + disty[i]**2)
1843 1843 ang[i] = numpy.arctan2(disty[i],distx[i])
1844 1844
1845 1845 return distx, disty, dist, ang
1846 1846 #Calculo de Matrices
1847 1847 # nPairs = len(pairs)
1848 1848 # ang1 = numpy.zeros((nPairs, 2, 1))
1849 1849 # dist1 = numpy.zeros((nPairs, 2, 1))
1850 1850 #
1851 1851 # for j in range(nPairs):
1852 1852 # dist1[j,0,0] = dist[pairs[j][0]]
1853 1853 # dist1[j,1,0] = dist[pairs[j][1]]
1854 1854 # ang1[j,0,0] = ang[pairs[j][0]]
1855 1855 # ang1[j,1,0] = ang[pairs[j][1]]
1856 1856 #
1857 1857 # return distx,disty, dist1,ang1
1858 1858
1859 1859
1860 1860 def __calculateVelVer(self, phase, lagTRange, _lambda):
1861 1861
1862 1862 Ts = lagTRange[1] - lagTRange[0]
1863 1863 velW = -_lambda*phase/(4*math.pi*Ts)
1864 1864
1865 1865 return velW
1866 1866
1867 1867 def __calculateVelHorDir(self, dist, tau1, tau2, ang):
1868 1868 nPairs = tau1.shape[0]
1869 1869 nHeights = tau1.shape[1]
1870 1870 vel = numpy.zeros((nPairs,3,nHeights))
1871 1871 dist1 = numpy.reshape(dist, (dist.size,1))
1872 1872
1873 1873 angCos = numpy.cos(ang)
1874 1874 angSin = numpy.sin(ang)
1875 1875
1876 1876 vel0 = dist1*tau1/(2*tau2**2)
1877 1877 vel[:,0,:] = (vel0*angCos).sum(axis = 1)
1878 1878 vel[:,1,:] = (vel0*angSin).sum(axis = 1)
1879 1879
1880 1880 ind = numpy.where(numpy.isinf(vel))
1881 1881 vel[ind] = numpy.nan
1882 1882
1883 1883 return vel
1884 1884
1885 1885 # def __getPairsAutoCorr(self, pairsList, nChannels):
1886 1886 #
1887 1887 # pairsAutoCorr = numpy.zeros(nChannels, dtype = 'int')*numpy.nan
1888 1888 #
1889 1889 # for l in range(len(pairsList)):
1890 1890 # firstChannel = pairsList[l][0]
1891 1891 # secondChannel = pairsList[l][1]
1892 1892 #
1893 1893 # #Obteniendo pares de Autocorrelacion
1894 1894 # if firstChannel == secondChannel:
1895 1895 # pairsAutoCorr[firstChannel] = int(l)
1896 1896 #
1897 1897 # pairsAutoCorr = pairsAutoCorr.astype(int)
1898 1898 #
1899 1899 # pairsCrossCorr = range(len(pairsList))
1900 1900 # pairsCrossCorr = numpy.delete(pairsCrossCorr,pairsAutoCorr)
1901 1901 #
1902 1902 # return pairsAutoCorr, pairsCrossCorr
1903 1903
1904 1904 # def techniqueSA(self, pairsSelected, pairsList, nChannels, tau, azimuth, _lambda, position_x, position_y, lagTRange, correctFactor):
1905 1905 def techniqueSA(self, kwargs):
1906 1906
1907 1907 """
1908 1908 Function that implements Spaced Antenna (SA) technique.
1909 1909
1910 1910 Input: Radial velocities, Direction cosines (x and y) of the Beam, Antenna azimuth,
1911 1911 Direction correction (if necessary), Ranges and SNR
1912 1912
1913 1913 Output: Winds estimation (Zonal, Meridional and Vertical)
1914 1914
1915 1915 Parameters affected: Winds
1916 1916 """
1917 1917 position_x = kwargs['positionX']
1918 1918 position_y = kwargs['positionY']
1919 1919 azimuth = kwargs['azimuth']
1920 1920
1921 1921 if 'correctFactor' in kwargs:
1922 1922 correctFactor = kwargs['correctFactor']
1923 1923 else:
1924 1924 correctFactor = 1
1925 1925
1926 1926 groupList = kwargs['groupList']
1927 1927 pairs_ccf = groupList[1]
1928 1928 tau = kwargs['tau']
1929 1929 _lambda = kwargs['_lambda']
1930 1930
1931 1931 #Cross Correlation pairs obtained
1932 1932 # pairsAutoCorr, pairsCrossCorr = self.__getPairsAutoCorr(pairssList, nChannels)
1933 1933 # pairsArray = numpy.array(pairsList)[pairsCrossCorr]
1934 1934 # pairsSelArray = numpy.array(pairsSelected)
1935 1935 # pairs = []
1936 1936 #
1937 1937 # #Wind estimation pairs obtained
1938 1938 # for i in range(pairsSelArray.shape[0]/2):
1939 1939 # ind1 = numpy.where(numpy.all(pairsArray == pairsSelArray[2*i], axis = 1))[0][0]
1940 1940 # ind2 = numpy.where(numpy.all(pairsArray == pairsSelArray[2*i + 1], axis = 1))[0][0]
1941 1941 # pairs.append((ind1,ind2))
1942 1942
1943 1943 indtau = tau.shape[0]/2
1944 1944 tau1 = tau[:indtau,:]
1945 1945 tau2 = tau[indtau:-1,:]
1946 1946 # tau1 = tau1[pairs,:]
1947 1947 # tau2 = tau2[pairs,:]
1948 1948 phase1 = tau[-1,:]
1949 1949
1950 1950 #---------------------------------------------------------------------
1951 1951 #Metodo Directo
1952 1952 distx, disty, dist, ang = self.__calculateDistance(position_x, position_y, pairs_ccf,azimuth)
1953 1953 winds = self.__calculateVelHorDir(dist, tau1, tau2, ang)
1954 1954 winds = stats.nanmean(winds, axis=0)
1955 1955 #---------------------------------------------------------------------
1956 1956 #Metodo General
1957 1957 # distx, disty, dist = self.calculateDistance(position_x,position_y,pairsCrossCorr, pairsList, azimuth)
1958 1958 # #Calculo Coeficientes de Funcion de Correlacion
1959 1959 # F,G,A,B,H = self.calculateCoef(tau1,tau2,distx,disty,n)
1960 1960 # #Calculo de Velocidades
1961 1961 # winds = self.calculateVelUV(F,G,A,B,H)
1962 1962
1963 1963 #---------------------------------------------------------------------
1964 1964 winds[2,:] = self.__calculateVelVer(phase1, lagTRange, _lambda)
1965 1965 winds = correctFactor*winds
1966 1966 return winds
1967 1967
1968 1968 def __checkTime(self, currentTime, paramInterval, outputInterval):
1969 1969
1970 1970 dataTime = currentTime + paramInterval
1971 1971 deltaTime = dataTime - self.__initime
1972 1972
1973 1973 if deltaTime >= outputInterval or deltaTime < 0:
1974 1974 self.__dataReady = True
1975 1975 return
1976 1976
1977 1977 def techniqueMeteors(self, arrayMeteor, meteorThresh, heightMin, heightMax):
1978 1978 '''
1979 1979 Function that implements winds estimation technique with detected meteors.
1980 1980
1981 1981 Input: Detected meteors, Minimum meteor quantity to wind estimation
1982 1982
1983 1983 Output: Winds estimation (Zonal and Meridional)
1984 1984
1985 1985 Parameters affected: Winds
1986 1986 '''
1987 1987 #Settings
1988 1988 nInt = (heightMax - heightMin)/2
1989 1989 nInt = int(nInt)
1990 1990 winds = numpy.zeros((2,nInt))*numpy.nan
1991 1991
1992 1992 #Filter errors
1993 1993 error = numpy.where(arrayMeteor[:,-1] == 0)[0]
1994 1994 finalMeteor = arrayMeteor[error,:]
1995 1995
1996 1996 #Meteor Histogram
1997 1997 finalHeights = finalMeteor[:,2]
1998 1998 hist = numpy.histogram(finalHeights, bins = nInt, range = (heightMin,heightMax))
1999 1999 nMeteorsPerI = hist[0]
2000 2000 heightPerI = hist[1]
2001 2001
2002 2002 #Sort of meteors
2003 2003 indSort = finalHeights.argsort()
2004 2004 finalMeteor2 = finalMeteor[indSort,:]
2005 2005
2006 2006 # Calculating winds
2007 2007 ind1 = 0
2008 2008 ind2 = 0
2009 2009
2010 2010 for i in range(nInt):
2011 2011 nMet = nMeteorsPerI[i]
2012 2012 ind1 = ind2
2013 2013 ind2 = ind1 + nMet
2014 2014
2015 2015 meteorAux = finalMeteor2[ind1:ind2,:]
2016 2016
2017 2017 if meteorAux.shape[0] >= meteorThresh:
2018 2018 vel = meteorAux[:, 6]
2019 2019 zen = meteorAux[:, 4]*numpy.pi/180
2020 2020 azim = meteorAux[:, 3]*numpy.pi/180
2021 2021
2022 2022 n = numpy.cos(zen)
2023 2023 # m = (1 - n**2)/(1 - numpy.tan(azim)**2)
2024 2024 # l = m*numpy.tan(azim)
2025 2025 l = numpy.sin(zen)*numpy.sin(azim)
2026 2026 m = numpy.sin(zen)*numpy.cos(azim)
2027 2027
2028 2028 A = numpy.vstack((l, m)).transpose()
2029 2029 A1 = numpy.dot(numpy.linalg.inv( numpy.dot(A.transpose(),A) ),A.transpose())
2030 2030 windsAux = numpy.dot(A1, vel)
2031 2031
2032 2032 winds[0,i] = windsAux[0]
2033 2033 winds[1,i] = windsAux[1]
2034 2034
2035 2035 return winds, heightPerI[:-1]
2036 2036
2037 2037 def techniqueNSM_SA(self, **kwargs):
2038 2038 metArray = kwargs['metArray']
2039 2039 heightList = kwargs['heightList']
2040 2040 timeList = kwargs['timeList']
2041 2041
2042 2042 rx_location = kwargs['rx_location']
2043 2043 groupList = kwargs['groupList']
2044 2044 azimuth = kwargs['azimuth']
2045 2045 dfactor = kwargs['dfactor']
2046 2046 k = kwargs['k']
2047 2047
2048 2048 azimuth1, dist = self.__calculateAzimuth1(rx_location, groupList, azimuth)
2049 2049 d = dist*dfactor
2050 2050 #Phase calculation
2051 2051 metArray1 = self.__getPhaseSlope(metArray, heightList, timeList)
2052 2052
2053 2053 metArray1[:,-2] = metArray1[:,-2]*metArray1[:,2]*1000/(k*d[metArray1[:,1].astype(int)]) #angles into velocities
2054 2054
2055 2055 velEst = numpy.zeros((heightList.size,2))*numpy.nan
2056 2056 azimuth1 = azimuth1*numpy.pi/180
2057 2057
2058 2058 for i in range(heightList.size):
2059 2059 h = heightList[i]
2060 2060 indH = numpy.where((metArray1[:,2] == h)&(numpy.abs(metArray1[:,-2]) < 100))[0]
2061 2061 metHeight = metArray1[indH,:]
2062 2062 if metHeight.shape[0] >= 2:
2063 2063 velAux = numpy.asmatrix(metHeight[:,-2]).T #Radial Velocities
2064 2064 iazim = metHeight[:,1].astype(int)
2065 2065 azimAux = numpy.asmatrix(azimuth1[iazim]).T #Azimuths
2066 2066 A = numpy.hstack((numpy.cos(azimAux),numpy.sin(azimAux)))
2067 2067 A = numpy.asmatrix(A)
2068 2068 A1 = numpy.linalg.pinv(A.transpose()*A)*A.transpose()
2069 2069 velHor = numpy.dot(A1,velAux)
2070 2070
2071 2071 velEst[i,:] = numpy.squeeze(velHor)
2072 2072 return velEst
2073 2073
2074 2074 def __getPhaseSlope(self, metArray, heightList, timeList):
2075 2075 meteorList = []
2076 2076 #utctime sec1 height SNR velRad ph0 ph1 ph2 coh0 coh1 coh2
2077 2077 #Putting back together the meteor matrix
2078 2078 utctime = metArray[:,0]
2079 2079 uniqueTime = numpy.unique(utctime)
2080 2080
2081 2081 phaseDerThresh = 0.5
2082 2082 ippSeconds = timeList[1] - timeList[0]
2083 2083 sec = numpy.where(timeList>1)[0][0]
2084 2084 nPairs = metArray.shape[1] - 6
2085 2085 nHeights = len(heightList)
2086 2086
2087 2087 for t in uniqueTime:
2088 2088 metArray1 = metArray[utctime==t,:]
2089 2089 # phaseDerThresh = numpy.pi/4 #reducir Phase thresh
2090 2090 tmet = metArray1[:,1].astype(int)
2091 2091 hmet = metArray1[:,2].astype(int)
2092 2092
2093 2093 metPhase = numpy.zeros((nPairs, heightList.size, timeList.size - 1))
2094 2094 metPhase[:,:] = numpy.nan
2095 2095 metPhase[:,hmet,tmet] = metArray1[:,6:].T
2096 2096
2097 2097 #Delete short trails
2098 2098 metBool = ~numpy.isnan(metPhase[0,:,:])
2099 2099 heightVect = numpy.sum(metBool, axis = 1)
2100 2100 metBool[heightVect<sec,:] = False
2101 2101 metPhase[:,heightVect<sec,:] = numpy.nan
2102 2102
2103 2103 #Derivative
2104 2104 metDer = numpy.abs(metPhase[:,:,1:] - metPhase[:,:,:-1])
2105 2105 phDerAux = numpy.dstack((numpy.full((nPairs,nHeights,1), False, dtype=bool),metDer > phaseDerThresh))
2106 2106 metPhase[phDerAux] = numpy.nan
2107 2107
2108 2108 #--------------------------METEOR DETECTION -----------------------------------------
2109 2109 indMet = numpy.where(numpy.any(metBool,axis=1))[0]
2110 2110
2111 2111 for p in numpy.arange(nPairs):
2112 2112 phase = metPhase[p,:,:]
2113 2113 phDer = metDer[p,:,:]
2114 2114
2115 2115 for h in indMet:
2116 2116 height = heightList[h]
2117 2117 phase1 = phase[h,:] #82
2118 2118 phDer1 = phDer[h,:]
2119 2119
2120 2120 phase1[~numpy.isnan(phase1)] = numpy.unwrap(phase1[~numpy.isnan(phase1)]) #Unwrap
2121 2121
2122 2122 indValid = numpy.where(~numpy.isnan(phase1))[0]
2123 2123 initMet = indValid[0]
2124 2124 endMet = 0
2125 2125
2126 2126 for i in range(len(indValid)-1):
2127 2127
2128 2128 #Time difference
2129 2129 inow = indValid[i]
2130 2130 inext = indValid[i+1]
2131 2131 idiff = inext - inow
2132 2132 #Phase difference
2133 2133 phDiff = numpy.abs(phase1[inext] - phase1[inow])
2134 2134
2135 2135 if idiff>sec or phDiff>numpy.pi/4 or inext==indValid[-1]: #End of Meteor
2136 2136 sizeTrail = inow - initMet + 1
2137 2137 if sizeTrail>3*sec: #Too short meteors
2138 2138 x = numpy.arange(initMet,inow+1)*ippSeconds
2139 2139 y = phase1[initMet:inow+1]
2140 2140 ynnan = ~numpy.isnan(y)
2141 2141 x = x[ynnan]
2142 2142 y = y[ynnan]
2143 2143 slope, intercept, r_value, p_value, std_err = stats.linregress(x,y)
2144 2144 ylin = x*slope + intercept
2145 2145 rsq = r_value**2
2146 2146 if rsq > 0.5:
2147 2147 vel = slope#*height*1000/(k*d)
2148 2148 estAux = numpy.array([utctime,p,height, vel, rsq])
2149 2149 meteorList.append(estAux)
2150 2150 initMet = inext
2151 2151 metArray2 = numpy.array(meteorList)
2152 2152
2153 2153 return metArray2
2154 2154
2155 2155 def __calculateAzimuth1(self, rx_location, pairslist, azimuth0):
2156 2156
2157 2157 azimuth1 = numpy.zeros(len(pairslist))
2158 2158 dist = numpy.zeros(len(pairslist))
2159 2159
2160 2160 for i in range(len(rx_location)):
2161 2161 ch0 = pairslist[i][0]
2162 2162 ch1 = pairslist[i][1]
2163 2163
2164 2164 diffX = rx_location[ch0][0] - rx_location[ch1][0]
2165 2165 diffY = rx_location[ch0][1] - rx_location[ch1][1]
2166 2166 azimuth1[i] = numpy.arctan2(diffY,diffX)*180/numpy.pi
2167 2167 dist[i] = numpy.sqrt(diffX**2 + diffY**2)
2168 2168
2169 2169 azimuth1 -= azimuth0
2170 2170 return azimuth1, dist
2171 2171
2172 2172 def techniqueNSM_DBS(self, **kwargs):
2173 2173 metArray = kwargs['metArray']
2174 2174 heightList = kwargs['heightList']
2175 2175 timeList = kwargs['timeList']
2176 2176 azimuth = kwargs['azimuth']
2177 2177 theta_x = numpy.array(kwargs['theta_x'])
2178 2178 theta_y = numpy.array(kwargs['theta_y'])
2179 2179
2180 2180 utctime = metArray[:,0]
2181 2181 cmet = metArray[:,1].astype(int)
2182 2182 hmet = metArray[:,3].astype(int)
2183 2183 SNRmet = metArray[:,4]
2184 2184 vmet = metArray[:,5]
2185 2185 spcmet = metArray[:,6]
2186 2186
2187 2187 nChan = numpy.max(cmet) + 1
2188 2188 nHeights = len(heightList)
2189 2189
2190 2190 azimuth_arr, zenith_arr, dir_cosu, dir_cosv, dir_cosw = self.__calculateAngles(theta_x, theta_y, azimuth)
2191 2191 hmet = heightList[hmet]
2192 2192 h1met = hmet*numpy.cos(zenith_arr[cmet]) #Corrected heights
2193 2193
2194 2194 velEst = numpy.zeros((heightList.size,2))*numpy.nan
2195 2195
2196 2196 for i in range(nHeights - 1):
2197 2197 hmin = heightList[i]
2198 2198 hmax = heightList[i + 1]
2199 2199
2200 2200 thisH = (h1met>=hmin) & (h1met<hmax) & (cmet!=2) & (SNRmet>8) & (vmet<50) & (spcmet<10)
2201 2201 indthisH = numpy.where(thisH)
2202 2202
2203 2203 if numpy.size(indthisH) > 3:
2204 2204
2205 2205 vel_aux = vmet[thisH]
2206 2206 chan_aux = cmet[thisH]
2207 2207 cosu_aux = dir_cosu[chan_aux]
2208 2208 cosv_aux = dir_cosv[chan_aux]
2209 2209 cosw_aux = dir_cosw[chan_aux]
2210 2210
2211 2211 nch = numpy.size(numpy.unique(chan_aux))
2212 2212 if nch > 1:
2213 2213 A = self.__calculateMatA(cosu_aux, cosv_aux, cosw_aux, True)
2214 2214 velEst[i,:] = numpy.dot(A,vel_aux)
2215 2215
2216 2216 return velEst
2217 2217
2218 2218 def run(self, dataOut, technique, nHours=1, hmin=70, hmax=110, **kwargs):
2219 2219
2220 2220 param = dataOut.data_param
2221 2221 if dataOut.abscissaList != None:
2222 2222 absc = dataOut.abscissaList[:-1]
2223 2223 # noise = dataOut.noise
2224 2224 heightList = dataOut.heightList
2225 SNR = dataOut.data_SNR
2225 SNR = dataOut.data_snr
2226 2226
2227 2227 if technique == 'DBS':
2228 2228
2229 2229 kwargs['velRadial'] = param[:,1,:] #Radial velocity
2230 2230 kwargs['heightList'] = heightList
2231 2231 kwargs['SNR'] = SNR
2232 2232
2233 dataOut.data_output, dataOut.heightList, dataOut.data_SNR = self.techniqueDBS(kwargs) #DBS Function
2233 dataOut.data_output, dataOut.heightList, dataOut.data_snr = self.techniqueDBS(kwargs) #DBS Function
2234 2234 dataOut.utctimeInit = dataOut.utctime
2235 2235 dataOut.outputInterval = dataOut.paramInterval
2236 2236
2237 2237 elif technique == 'SA':
2238 2238
2239 2239 #Parameters
2240 2240 # position_x = kwargs['positionX']
2241 2241 # position_y = kwargs['positionY']
2242 2242 # azimuth = kwargs['azimuth']
2243 2243 #
2244 2244 # if kwargs.has_key('crosspairsList'):
2245 2245 # pairs = kwargs['crosspairsList']
2246 2246 # else:
2247 2247 # pairs = None
2248 2248 #
2249 2249 # if kwargs.has_key('correctFactor'):
2250 2250 # correctFactor = kwargs['correctFactor']
2251 2251 # else:
2252 2252 # correctFactor = 1
2253 2253
2254 2254 # tau = dataOut.data_param
2255 2255 # _lambda = dataOut.C/dataOut.frequency
2256 2256 # pairsList = dataOut.groupList
2257 2257 # nChannels = dataOut.nChannels
2258 2258
2259 2259 kwargs['groupList'] = dataOut.groupList
2260 2260 kwargs['tau'] = dataOut.data_param
2261 2261 kwargs['_lambda'] = dataOut.C/dataOut.frequency
2262 2262 # dataOut.data_output = self.techniqueSA(pairs, pairsList, nChannels, tau, azimuth, _lambda, position_x, position_y, absc, correctFactor)
2263 2263 dataOut.data_output = self.techniqueSA(kwargs)
2264 2264 dataOut.utctimeInit = dataOut.utctime
2265 2265 dataOut.outputInterval = dataOut.timeInterval
2266 2266
2267 2267 elif technique == 'Meteors':
2268 2268 dataOut.flagNoData = True
2269 2269 self.__dataReady = False
2270 2270
2271 2271 if 'nHours' in kwargs:
2272 2272 nHours = kwargs['nHours']
2273 2273 else:
2274 2274 nHours = 1
2275 2275
2276 2276 if 'meteorsPerBin' in kwargs:
2277 2277 meteorThresh = kwargs['meteorsPerBin']
2278 2278 else:
2279 2279 meteorThresh = 6
2280 2280
2281 2281 if 'hmin' in kwargs:
2282 2282 hmin = kwargs['hmin']
2283 2283 else: hmin = 70
2284 2284 if 'hmax' in kwargs:
2285 2285 hmax = kwargs['hmax']
2286 2286 else: hmax = 110
2287 2287
2288 2288 dataOut.outputInterval = nHours*3600
2289 2289
2290 2290 if self.__isConfig == False:
2291 2291 # self.__initime = dataOut.datatime.replace(minute = 0, second = 0, microsecond = 03)
2292 2292 #Get Initial LTC time
2293 2293 self.__initime = datetime.datetime.utcfromtimestamp(dataOut.utctime)
2294 2294 self.__initime = (self.__initime.replace(minute = 0, second = 0, microsecond = 0) - datetime.datetime(1970, 1, 1)).total_seconds()
2295 2295
2296 2296 self.__isConfig = True
2297 2297
2298 2298 if self.__buffer is None:
2299 2299 self.__buffer = dataOut.data_param
2300 2300 self.__firstdata = copy.copy(dataOut)
2301 2301
2302 2302 else:
2303 2303 self.__buffer = numpy.vstack((self.__buffer, dataOut.data_param))
2304 2304
2305 2305 self.__checkTime(dataOut.utctime, dataOut.paramInterval, dataOut.outputInterval) #Check if the buffer is ready
2306 2306
2307 2307 if self.__dataReady:
2308 2308 dataOut.utctimeInit = self.__initime
2309 2309
2310 2310 self.__initime += dataOut.outputInterval #to erase time offset
2311 2311
2312 2312 dataOut.data_output, dataOut.heightList = self.techniqueMeteors(self.__buffer, meteorThresh, hmin, hmax)
2313 2313 dataOut.flagNoData = False
2314 2314 self.__buffer = None
2315 2315
2316 2316 elif technique == 'Meteors1':
2317 2317 dataOut.flagNoData = True
2318 2318 self.__dataReady = False
2319 2319
2320 2320 if 'nMins' in kwargs:
2321 2321 nMins = kwargs['nMins']
2322 2322 else: nMins = 20
2323 2323 if 'rx_location' in kwargs:
2324 2324 rx_location = kwargs['rx_location']
2325 2325 else: rx_location = [(0,1),(1,1),(1,0)]
2326 2326 if 'azimuth' in kwargs:
2327 2327 azimuth = kwargs['azimuth']
2328 2328 else: azimuth = 51.06
2329 2329 if 'dfactor' in kwargs:
2330 2330 dfactor = kwargs['dfactor']
2331 2331 if 'mode' in kwargs:
2332 2332 mode = kwargs['mode']
2333 2333 if 'theta_x' in kwargs:
2334 2334 theta_x = kwargs['theta_x']
2335 2335 if 'theta_y' in kwargs:
2336 2336 theta_y = kwargs['theta_y']
2337 2337 else: mode = 'SA'
2338 2338
2339 2339 #Borrar luego esto
2340 2340 if dataOut.groupList is None:
2341 2341 dataOut.groupList = [(0,1),(0,2),(1,2)]
2342 2342 groupList = dataOut.groupList
2343 2343 C = 3e8
2344 2344 freq = 50e6
2345 2345 lamb = C/freq
2346 2346 k = 2*numpy.pi/lamb
2347 2347
2348 2348 timeList = dataOut.abscissaList
2349 2349 heightList = dataOut.heightList
2350 2350
2351 2351 if self.__isConfig == False:
2352 2352 dataOut.outputInterval = nMins*60
2353 2353 # self.__initime = dataOut.datatime.replace(minute = 0, second = 0, microsecond = 03)
2354 2354 #Get Initial LTC time
2355 2355 initime = datetime.datetime.utcfromtimestamp(dataOut.utctime)
2356 2356 minuteAux = initime.minute
2357 2357 minuteNew = int(numpy.floor(minuteAux/nMins)*nMins)
2358 2358 self.__initime = (initime.replace(minute = minuteNew, second = 0, microsecond = 0) - datetime.datetime(1970, 1, 1)).total_seconds()
2359 2359
2360 2360 self.__isConfig = True
2361 2361
2362 2362 if self.__buffer is None:
2363 2363 self.__buffer = dataOut.data_param
2364 2364 self.__firstdata = copy.copy(dataOut)
2365 2365
2366 2366 else:
2367 2367 self.__buffer = numpy.vstack((self.__buffer, dataOut.data_param))
2368 2368
2369 2369 self.__checkTime(dataOut.utctime, dataOut.paramInterval, dataOut.outputInterval) #Check if the buffer is ready
2370 2370
2371 2371 if self.__dataReady:
2372 2372 dataOut.utctimeInit = self.__initime
2373 2373 self.__initime += dataOut.outputInterval #to erase time offset
2374 2374
2375 2375 metArray = self.__buffer
2376 2376 if mode == 'SA':
2377 2377 dataOut.data_output = self.techniqueNSM_SA(rx_location=rx_location, groupList=groupList, azimuth=azimuth, dfactor=dfactor, k=k,metArray=metArray, heightList=heightList,timeList=timeList)
2378 2378 elif mode == 'DBS':
2379 2379 dataOut.data_output = self.techniqueNSM_DBS(metArray=metArray,heightList=heightList,timeList=timeList, azimuth=azimuth, theta_x=theta_x, theta_y=theta_y)
2380 2380 dataOut.data_output = dataOut.data_output.T
2381 2381 dataOut.flagNoData = False
2382 2382 self.__buffer = None
2383 2383
2384 2384 return
2385 2385
2386 2386 class EWDriftsEstimation(Operation):
2387 2387
2388 2388 def __init__(self):
2389 2389 Operation.__init__(self)
2390 2390
2391 2391 def __correctValues(self, heiRang, phi, velRadial, SNR):
2392 2392 listPhi = phi.tolist()
2393 2393 maxid = listPhi.index(max(listPhi))
2394 2394 minid = listPhi.index(min(listPhi))
2395 2395
2396 2396 rango = list(range(len(phi)))
2397 2397 # rango = numpy.delete(rango,maxid)
2398 2398
2399 2399 heiRang1 = heiRang*math.cos(phi[maxid])
2400 2400 heiRangAux = heiRang*math.cos(phi[minid])
2401 2401 indOut = (heiRang1 < heiRangAux[0]).nonzero()
2402 2402 heiRang1 = numpy.delete(heiRang1,indOut)
2403 2403
2404 2404 velRadial1 = numpy.zeros([len(phi),len(heiRang1)])
2405 2405 SNR1 = numpy.zeros([len(phi),len(heiRang1)])
2406 2406
2407 2407 for i in rango:
2408 2408 x = heiRang*math.cos(phi[i])
2409 2409 y1 = velRadial[i,:]
2410 2410 f1 = interpolate.interp1d(x,y1,kind = 'cubic')
2411 2411
2412 2412 x1 = heiRang1
2413 2413 y11 = f1(x1)
2414 2414
2415 2415 y2 = SNR[i,:]
2416 2416 f2 = interpolate.interp1d(x,y2,kind = 'cubic')
2417 2417 y21 = f2(x1)
2418 2418
2419 2419 velRadial1[i,:] = y11
2420 2420 SNR1[i,:] = y21
2421 2421
2422 2422 return heiRang1, velRadial1, SNR1
2423 2423
2424 2424 def run(self, dataOut, zenith, zenithCorrection):
2425 2425 heiRang = dataOut.heightList
2426 2426 velRadial = dataOut.data_param[:,3,:]
2427 SNR = dataOut.data_SNR
2427 SNR = dataOut.data_snr
2428 2428
2429 2429 zenith = numpy.array(zenith)
2430 2430 zenith -= zenithCorrection
2431 2431 zenith *= numpy.pi/180
2432 2432
2433 2433 heiRang1, velRadial1, SNR1 = self.__correctValues(heiRang, numpy.abs(zenith), velRadial, SNR)
2434 2434
2435 2435 alp = zenith[0]
2436 2436 bet = zenith[1]
2437 2437
2438 2438 w_w = velRadial1[0,:]
2439 2439 w_e = velRadial1[1,:]
2440 2440
2441 2441 w = (w_w*numpy.sin(bet) - w_e*numpy.sin(alp))/(numpy.cos(alp)*numpy.sin(bet) - numpy.cos(bet)*numpy.sin(alp))
2442 2442 u = (w_w*numpy.cos(bet) - w_e*numpy.cos(alp))/(numpy.sin(alp)*numpy.cos(bet) - numpy.sin(bet)*numpy.cos(alp))
2443 2443
2444 2444 winds = numpy.vstack((u,w))
2445 2445
2446 2446 dataOut.heightList = heiRang1
2447 2447 dataOut.data_output = winds
2448 dataOut.data_SNR = SNR1
2448 dataOut.data_snr = SNR1
2449 2449
2450 2450 dataOut.utctimeInit = dataOut.utctime
2451 2451 dataOut.outputInterval = dataOut.timeInterval
2452 2452 return
2453 2453
2454 2454 #--------------- Non Specular Meteor ----------------
2455 2455
2456 2456 class NonSpecularMeteorDetection(Operation):
2457 2457
2458 2458 def run(self, dataOut, mode, SNRthresh=8, phaseDerThresh=0.5, cohThresh=0.8, allData = False):
2459 2459 data_acf = dataOut.data_pre[0]
2460 2460 data_ccf = dataOut.data_pre[1]
2461 2461 pairsList = dataOut.groupList[1]
2462 2462
2463 2463 lamb = dataOut.C/dataOut.frequency
2464 2464 tSamp = dataOut.ippSeconds*dataOut.nCohInt
2465 2465 paramInterval = dataOut.paramInterval
2466 2466
2467 2467 nChannels = data_acf.shape[0]
2468 2468 nLags = data_acf.shape[1]
2469 2469 nProfiles = data_acf.shape[2]
2470 2470 nHeights = dataOut.nHeights
2471 2471 nCohInt = dataOut.nCohInt
2472 2472 sec = numpy.round(nProfiles/dataOut.paramInterval)
2473 2473 heightList = dataOut.heightList
2474 2474 ippSeconds = dataOut.ippSeconds*dataOut.nCohInt*dataOut.nAvg
2475 2475 utctime = dataOut.utctime
2476 2476
2477 2477 dataOut.abscissaList = numpy.arange(0,paramInterval+ippSeconds,ippSeconds)
2478 2478
2479 2479 #------------------------ SNR --------------------------------------
2480 2480 power = data_acf[:,0,:,:].real
2481 2481 noise = numpy.zeros(nChannels)
2482 2482 SNR = numpy.zeros(power.shape)
2483 2483 for i in range(nChannels):
2484 2484 noise[i] = hildebrand_sekhon(power[i,:], nCohInt)
2485 2485 SNR[i] = (power[i]-noise[i])/noise[i]
2486 2486 SNRm = numpy.nanmean(SNR, axis = 0)
2487 2487 SNRdB = 10*numpy.log10(SNR)
2488 2488
2489 2489 if mode == 'SA':
2490 2490 dataOut.groupList = dataOut.groupList[1]
2491 2491 nPairs = data_ccf.shape[0]
2492 2492 #---------------------- Coherence and Phase --------------------------
2493 2493 phase = numpy.zeros(data_ccf[:,0,:,:].shape)
2494 2494 # phase1 = numpy.copy(phase)
2495 2495 coh1 = numpy.zeros(data_ccf[:,0,:,:].shape)
2496 2496
2497 2497 for p in range(nPairs):
2498 2498 ch0 = pairsList[p][0]
2499 2499 ch1 = pairsList[p][1]
2500 2500 ccf = data_ccf[p,0,:,:]/numpy.sqrt(data_acf[ch0,0,:,:]*data_acf[ch1,0,:,:])
2501 2501 phase[p,:,:] = ndimage.median_filter(numpy.angle(ccf), size = (5,1)) #median filter
2502 2502 # phase1[p,:,:] = numpy.angle(ccf) #median filter
2503 2503 coh1[p,:,:] = ndimage.median_filter(numpy.abs(ccf), 5) #median filter
2504 2504 # coh1[p,:,:] = numpy.abs(ccf) #median filter
2505 2505 coh = numpy.nanmax(coh1, axis = 0)
2506 2506 # struc = numpy.ones((5,1))
2507 2507 # coh = ndimage.morphology.grey_dilation(coh, size=(10,1))
2508 2508 #---------------------- Radial Velocity ----------------------------
2509 2509 phaseAux = numpy.mean(numpy.angle(data_acf[:,1,:,:]), axis = 0)
2510 2510 velRad = phaseAux*lamb/(4*numpy.pi*tSamp)
2511 2511
2512 2512 if allData:
2513 2513 boolMetFin = ~numpy.isnan(SNRm)
2514 2514 # coh[:-1,:] = numpy.nanmean(numpy.abs(phase[:,1:,:] - phase[:,:-1,:]),axis=0)
2515 2515 else:
2516 2516 #------------------------ Meteor mask ---------------------------------
2517 2517 # #SNR mask
2518 2518 # boolMet = (SNRdB>SNRthresh)#|(~numpy.isnan(SNRdB))
2519 2519 #
2520 2520 # #Erase small objects
2521 2521 # boolMet1 = self.__erase_small(boolMet, 2*sec, 5)
2522 2522 #
2523 2523 # auxEEJ = numpy.sum(boolMet1,axis=0)
2524 2524 # indOver = auxEEJ>nProfiles*0.8 #Use this later
2525 2525 # indEEJ = numpy.where(indOver)[0]
2526 2526 # indNEEJ = numpy.where(~indOver)[0]
2527 2527 #
2528 2528 # boolMetFin = boolMet1
2529 2529 #
2530 2530 # if indEEJ.size > 0:
2531 2531 # boolMet1[:,indEEJ] = False #Erase heights with EEJ
2532 2532 #
2533 2533 # boolMet2 = coh > cohThresh
2534 2534 # boolMet2 = self.__erase_small(boolMet2, 2*sec,5)
2535 2535 #
2536 2536 # #Final Meteor mask
2537 2537 # boolMetFin = boolMet1|boolMet2
2538 2538
2539 2539 #Coherence mask
2540 2540 boolMet1 = coh > 0.75
2541 2541 struc = numpy.ones((30,1))
2542 2542 boolMet1 = ndimage.morphology.binary_dilation(boolMet1, structure=struc)
2543 2543
2544 2544 #Derivative mask
2545 2545 derPhase = numpy.nanmean(numpy.abs(phase[:,1:,:] - phase[:,:-1,:]),axis=0)
2546 2546 boolMet2 = derPhase < 0.2
2547 2547 # boolMet2 = ndimage.morphology.binary_opening(boolMet2)
2548 2548 # boolMet2 = ndimage.morphology.binary_closing(boolMet2, structure = numpy.ones((10,1)))
2549 2549 boolMet2 = ndimage.median_filter(boolMet2,size=5)
2550 2550 boolMet2 = numpy.vstack((boolMet2,numpy.full((1,nHeights), True, dtype=bool)))
2551 2551 # #Final mask
2552 2552 # boolMetFin = boolMet2
2553 2553 boolMetFin = boolMet1&boolMet2
2554 2554 # boolMetFin = ndimage.morphology.binary_dilation(boolMetFin)
2555 2555 #Creating data_param
2556 2556 coordMet = numpy.where(boolMetFin)
2557 2557
2558 2558 tmet = coordMet[0]
2559 2559 hmet = coordMet[1]
2560 2560
2561 2561 data_param = numpy.zeros((tmet.size, 6 + nPairs))
2562 2562 data_param[:,0] = utctime
2563 2563 data_param[:,1] = tmet
2564 2564 data_param[:,2] = hmet
2565 2565 data_param[:,3] = SNRm[tmet,hmet]
2566 2566 data_param[:,4] = velRad[tmet,hmet]
2567 2567 data_param[:,5] = coh[tmet,hmet]
2568 2568 data_param[:,6:] = phase[:,tmet,hmet].T
2569 2569
2570 2570 elif mode == 'DBS':
2571 2571 dataOut.groupList = numpy.arange(nChannels)
2572 2572
2573 2573 #Radial Velocities
2574 2574 phase = numpy.angle(data_acf[:,1,:,:])
2575 2575 # phase = ndimage.median_filter(numpy.angle(data_acf[:,1,:,:]), size = (1,5,1))
2576 2576 velRad = phase*lamb/(4*numpy.pi*tSamp)
2577 2577
2578 2578 #Spectral width
2579 2579 # acf1 = ndimage.median_filter(numpy.abs(data_acf[:,1,:,:]), size = (1,5,1))
2580 2580 # acf2 = ndimage.median_filter(numpy.abs(data_acf[:,2,:,:]), size = (1,5,1))
2581 2581 acf1 = data_acf[:,1,:,:]
2582 2582 acf2 = data_acf[:,2,:,:]
2583 2583
2584 2584 spcWidth = (lamb/(2*numpy.sqrt(6)*numpy.pi*tSamp))*numpy.sqrt(numpy.log(acf1/acf2))
2585 2585 # velRad = ndimage.median_filter(velRad, size = (1,5,1))
2586 2586 if allData:
2587 2587 boolMetFin = ~numpy.isnan(SNRdB)
2588 2588 else:
2589 2589 #SNR
2590 2590 boolMet1 = (SNRdB>SNRthresh) #SNR mask
2591 2591 boolMet1 = ndimage.median_filter(boolMet1, size=(1,5,5))
2592 2592
2593 2593 #Radial velocity
2594 2594 boolMet2 = numpy.abs(velRad) < 20
2595 2595 boolMet2 = ndimage.median_filter(boolMet2, (1,5,5))
2596 2596
2597 2597 #Spectral Width
2598 2598 boolMet3 = spcWidth < 30
2599 2599 boolMet3 = ndimage.median_filter(boolMet3, (1,5,5))
2600 2600 # boolMetFin = self.__erase_small(boolMet1, 10,5)
2601 2601 boolMetFin = boolMet1&boolMet2&boolMet3
2602 2602
2603 2603 #Creating data_param
2604 2604 coordMet = numpy.where(boolMetFin)
2605 2605
2606 2606 cmet = coordMet[0]
2607 2607 tmet = coordMet[1]
2608 2608 hmet = coordMet[2]
2609 2609
2610 2610 data_param = numpy.zeros((tmet.size, 7))
2611 2611 data_param[:,0] = utctime
2612 2612 data_param[:,1] = cmet
2613 2613 data_param[:,2] = tmet
2614 2614 data_param[:,3] = hmet
2615 2615 data_param[:,4] = SNR[cmet,tmet,hmet].T
2616 2616 data_param[:,5] = velRad[cmet,tmet,hmet].T
2617 2617 data_param[:,6] = spcWidth[cmet,tmet,hmet].T
2618 2618
2619 2619 # self.dataOut.data_param = data_int
2620 2620 if len(data_param) == 0:
2621 2621 dataOut.flagNoData = True
2622 2622 else:
2623 2623 dataOut.data_param = data_param
2624 2624
2625 2625 def __erase_small(self, binArray, threshX, threshY):
2626 2626 labarray, numfeat = ndimage.measurements.label(binArray)
2627 2627 binArray1 = numpy.copy(binArray)
2628 2628
2629 2629 for i in range(1,numfeat + 1):
2630 2630 auxBin = (labarray==i)
2631 2631 auxSize = auxBin.sum()
2632 2632
2633 2633 x,y = numpy.where(auxBin)
2634 2634 widthX = x.max() - x.min()
2635 2635 widthY = y.max() - y.min()
2636 2636
2637 2637 #width X: 3 seg -> 12.5*3
2638 2638 #width Y:
2639 2639
2640 2640 if (auxSize < 50) or (widthX < threshX) or (widthY < threshY):
2641 2641 binArray1[auxBin] = False
2642 2642
2643 2643 return binArray1
2644 2644
2645 2645 #--------------- Specular Meteor ----------------
2646 2646
2647 2647 class SMDetection(Operation):
2648 2648 '''
2649 2649 Function DetectMeteors()
2650 2650 Project developed with paper:
2651 2651 HOLDSWORTH ET AL. 2004
2652 2652
2653 2653 Input:
2654 2654 self.dataOut.data_pre
2655 2655
2656 2656 centerReceiverIndex: From the channels, which is the center receiver
2657 2657
2658 2658 hei_ref: Height reference for the Beacon signal extraction
2659 2659 tauindex:
2660 2660 predefinedPhaseShifts: Predefined phase offset for the voltge signals
2661 2661
2662 2662 cohDetection: Whether to user Coherent detection or not
2663 2663 cohDet_timeStep: Coherent Detection calculation time step
2664 2664 cohDet_thresh: Coherent Detection phase threshold to correct phases
2665 2665
2666 2666 noise_timeStep: Noise calculation time step
2667 2667 noise_multiple: Noise multiple to define signal threshold
2668 2668
2669 2669 multDet_timeLimit: Multiple Detection Removal time limit in seconds
2670 2670 multDet_rangeLimit: Multiple Detection Removal range limit in km
2671 2671
2672 2672 phaseThresh: Maximum phase difference between receiver to be consider a meteor
2673 2673 SNRThresh: Minimum SNR threshold of the meteor signal to be consider a meteor
2674 2674
2675 2675 hmin: Minimum Height of the meteor to use it in the further wind estimations
2676 2676 hmax: Maximum Height of the meteor to use it in the further wind estimations
2677 2677 azimuth: Azimuth angle correction
2678 2678
2679 2679 Affected:
2680 2680 self.dataOut.data_param
2681 2681
2682 2682 Rejection Criteria (Errors):
2683 2683 0: No error; analysis OK
2684 2684 1: SNR < SNR threshold
2685 2685 2: angle of arrival (AOA) ambiguously determined
2686 2686 3: AOA estimate not feasible
2687 2687 4: Large difference in AOAs obtained from different antenna baselines
2688 2688 5: echo at start or end of time series
2689 2689 6: echo less than 5 examples long; too short for analysis
2690 2690 7: echo rise exceeds 0.3s
2691 2691 8: echo decay time less than twice rise time
2692 2692 9: large power level before echo
2693 2693 10: large power level after echo
2694 2694 11: poor fit to amplitude for estimation of decay time
2695 2695 12: poor fit to CCF phase variation for estimation of radial drift velocity
2696 2696 13: height unresolvable echo: not valid height within 70 to 110 km
2697 2697 14: height ambiguous echo: more then one possible height within 70 to 110 km
2698 2698 15: radial drift velocity or projected horizontal velocity exceeds 200 m/s
2699 2699 16: oscilatory echo, indicating event most likely not an underdense echo
2700 2700
2701 2701 17: phase difference in meteor Reestimation
2702 2702
2703 2703 Data Storage:
2704 2704 Meteors for Wind Estimation (8):
2705 2705 Utc Time | Range Height
2706 2706 Azimuth Zenith errorCosDir
2707 2707 VelRad errorVelRad
2708 2708 Phase0 Phase1 Phase2 Phase3
2709 2709 TypeError
2710 2710
2711 2711 '''
2712 2712
2713 2713 def run(self, dataOut, hei_ref = None, tauindex = 0,
2714 2714 phaseOffsets = None,
2715 2715 cohDetection = False, cohDet_timeStep = 1, cohDet_thresh = 25,
2716 2716 noise_timeStep = 4, noise_multiple = 4,
2717 2717 multDet_timeLimit = 1, multDet_rangeLimit = 3,
2718 2718 phaseThresh = 20, SNRThresh = 5,
2719 2719 hmin = 50, hmax=150, azimuth = 0,
2720 2720 channelPositions = None) :
2721 2721
2722 2722
2723 2723 #Getting Pairslist
2724 2724 if channelPositions is None:
2725 2725 # channelPositions = [(2.5,0), (0,2.5), (0,0), (0,4.5), (-2,0)] #T
2726 2726 channelPositions = [(4.5,2), (2,4.5), (2,2), (2,0), (0,2)] #Estrella
2727 2727 meteorOps = SMOperations()
2728 2728 pairslist0, distances = meteorOps.getPhasePairs(channelPositions)
2729 2729 heiRang = dataOut.heightList
2730 2730 #Get Beacon signal - No Beacon signal anymore
2731 2731 # newheis = numpy.where(self.dataOut.heightList>self.dataOut.radarControllerHeaderObj.Taus[tauindex])
2732 2732 #
2733 2733 # if hei_ref != None:
2734 2734 # newheis = numpy.where(self.dataOut.heightList>hei_ref)
2735 2735 #
2736 2736
2737 2737
2738 2738 #****************REMOVING HARDWARE PHASE DIFFERENCES***************
2739 2739 # see if the user put in pre defined phase shifts
2740 2740 voltsPShift = dataOut.data_pre.copy()
2741 2741
2742 2742 # if predefinedPhaseShifts != None:
2743 2743 # hardwarePhaseShifts = numpy.array(predefinedPhaseShifts)*numpy.pi/180
2744 2744 #
2745 2745 # # elif beaconPhaseShifts:
2746 2746 # # #get hardware phase shifts using beacon signal
2747 2747 # # hardwarePhaseShifts = self.__getHardwarePhaseDiff(self.dataOut.data_pre, pairslist, newheis, 10)
2748 2748 # # hardwarePhaseShifts = numpy.insert(hardwarePhaseShifts,centerReceiverIndex,0)
2749 2749 #
2750 2750 # else:
2751 2751 # hardwarePhaseShifts = numpy.zeros(5)
2752 2752 #
2753 2753 # voltsPShift = numpy.zeros((self.dataOut.data_pre.shape[0],self.dataOut.data_pre.shape[1],self.dataOut.data_pre.shape[2]), dtype = 'complex')
2754 2754 # for i in range(self.dataOut.data_pre.shape[0]):
2755 2755 # voltsPShift[i,:,:] = self.__shiftPhase(self.dataOut.data_pre[i,:,:], hardwarePhaseShifts[i])
2756 2756
2757 2757 #******************END OF REMOVING HARDWARE PHASE DIFFERENCES*********
2758 2758
2759 2759 #Remove DC
2760 2760 voltsDC = numpy.mean(voltsPShift,1)
2761 2761 voltsDC = numpy.mean(voltsDC,1)
2762 2762 for i in range(voltsDC.shape[0]):
2763 2763 voltsPShift[i] = voltsPShift[i] - voltsDC[i]
2764 2764
2765 2765 #Don't considerate last heights, theyre used to calculate Hardware Phase Shift
2766 2766 # voltsPShift = voltsPShift[:,:,:newheis[0][0]]
2767 2767
2768 2768 #************ FIND POWER OF DATA W/COH OR NON COH DETECTION (3.4) **********
2769 2769 #Coherent Detection
2770 2770 if cohDetection:
2771 2771 #use coherent detection to get the net power
2772 2772 cohDet_thresh = cohDet_thresh*numpy.pi/180
2773 2773 voltsPShift = self.__coherentDetection(voltsPShift, cohDet_timeStep, dataOut.timeInterval, pairslist0, cohDet_thresh)
2774 2774
2775 2775 #Non-coherent detection!
2776 2776 powerNet = numpy.nansum(numpy.abs(voltsPShift[:,:,:])**2,0)
2777 2777 #********** END OF COH/NON-COH POWER CALCULATION**********************
2778 2778
2779 2779 #********** FIND THE NOISE LEVEL AND POSSIBLE METEORS ****************
2780 2780 #Get noise
2781 2781 noise, noise1 = self.__getNoise(powerNet, noise_timeStep, dataOut.timeInterval)
2782 2782 # noise = self.getNoise1(powerNet, noise_timeStep, self.dataOut.timeInterval)
2783 2783 #Get signal threshold
2784 2784 signalThresh = noise_multiple*noise
2785 2785 #Meteor echoes detection
2786 2786 listMeteors = self.__findMeteors(powerNet, signalThresh)
2787 2787 #******* END OF NOISE LEVEL AND POSSIBLE METEORS CACULATION **********
2788 2788
2789 2789 #************** REMOVE MULTIPLE DETECTIONS (3.5) ***************************
2790 2790 #Parameters
2791 2791 heiRange = dataOut.heightList
2792 2792 rangeInterval = heiRange[1] - heiRange[0]
2793 2793 rangeLimit = multDet_rangeLimit/rangeInterval
2794 2794 timeLimit = multDet_timeLimit/dataOut.timeInterval
2795 2795 #Multiple detection removals
2796 2796 listMeteors1 = self.__removeMultipleDetections(listMeteors, rangeLimit, timeLimit)
2797 2797 #************ END OF REMOVE MULTIPLE DETECTIONS **********************
2798 2798
2799 2799 #********************* METEOR REESTIMATION (3.7, 3.8, 3.9, 3.10) ********************
2800 2800 #Parameters
2801 2801 phaseThresh = phaseThresh*numpy.pi/180
2802 2802 thresh = [phaseThresh, noise_multiple, SNRThresh]
2803 2803 #Meteor reestimation (Errors N 1, 6, 12, 17)
2804 2804 listMeteors2, listMeteorsPower, listMeteorsVolts = self.__meteorReestimation(listMeteors1, voltsPShift, pairslist0, thresh, noise, dataOut.timeInterval, dataOut.frequency)
2805 2805 # listMeteors2, listMeteorsPower, listMeteorsVolts = self.meteorReestimation3(listMeteors2, listMeteorsPower, listMeteorsVolts, voltsPShift, pairslist, thresh, noise)
2806 2806 #Estimation of decay times (Errors N 7, 8, 11)
2807 2807 listMeteors3 = self.__estimateDecayTime(listMeteors2, listMeteorsPower, dataOut.timeInterval, dataOut.frequency)
2808 2808 #******************* END OF METEOR REESTIMATION *******************
2809 2809
2810 2810 #********************* METEOR PARAMETERS CALCULATION (3.11, 3.12, 3.13) **************************
2811 2811 #Calculating Radial Velocity (Error N 15)
2812 2812 radialStdThresh = 10
2813 2813 listMeteors4 = self.__getRadialVelocity(listMeteors3, listMeteorsVolts, radialStdThresh, pairslist0, dataOut.timeInterval)
2814 2814
2815 2815 if len(listMeteors4) > 0:
2816 2816 #Setting New Array
2817 2817 date = dataOut.utctime
2818 2818 arrayParameters = self.__setNewArrays(listMeteors4, date, heiRang)
2819 2819
2820 2820 #Correcting phase offset
2821 2821 if phaseOffsets != None:
2822 2822 phaseOffsets = numpy.array(phaseOffsets)*numpy.pi/180
2823 2823 arrayParameters[:,8:12] = numpy.unwrap(arrayParameters[:,8:12] + phaseOffsets)
2824 2824
2825 2825 #Second Pairslist
2826 2826 pairsList = []
2827 2827 pairx = (0,1)
2828 2828 pairy = (2,3)
2829 2829 pairsList.append(pairx)
2830 2830 pairsList.append(pairy)
2831 2831
2832 2832 jph = numpy.array([0,0,0,0])
2833 2833 h = (hmin,hmax)
2834 2834 arrayParameters = meteorOps.getMeteorParams(arrayParameters, azimuth, h, pairsList, distances, jph)
2835 2835
2836 2836 # #Calculate AOA (Error N 3, 4)
2837 2837 # #JONES ET AL. 1998
2838 2838 # error = arrayParameters[:,-1]
2839 2839 # AOAthresh = numpy.pi/8
2840 2840 # phases = -arrayParameters[:,9:13]
2841 2841 # arrayParameters[:,4:7], arrayParameters[:,-1] = meteorOps.getAOA(phases, pairsList, error, AOAthresh, azimuth)
2842 2842 #
2843 2843 # #Calculate Heights (Error N 13 and 14)
2844 2844 # error = arrayParameters[:,-1]
2845 2845 # Ranges = arrayParameters[:,2]
2846 2846 # zenith = arrayParameters[:,5]
2847 2847 # arrayParameters[:,3], arrayParameters[:,-1] = meteorOps.getHeights(Ranges, zenith, error, hmin, hmax)
2848 2848 # error = arrayParameters[:,-1]
2849 2849 #********************* END OF PARAMETERS CALCULATION **************************
2850 2850
2851 2851 #***************************+ PASS DATA TO NEXT STEP **********************
2852 2852 # arrayFinal = arrayParameters.reshape((1,arrayParameters.shape[0],arrayParameters.shape[1]))
2853 2853 dataOut.data_param = arrayParameters
2854 2854
2855 2855 if arrayParameters is None:
2856 2856 dataOut.flagNoData = True
2857 2857 else:
2858 2858 dataOut.flagNoData = True
2859 2859
2860 2860 return
2861 2861
2862 2862 def __getHardwarePhaseDiff(self, voltage0, pairslist, newheis, n):
2863 2863
2864 2864 minIndex = min(newheis[0])
2865 2865 maxIndex = max(newheis[0])
2866 2866
2867 2867 voltage = voltage0[:,:,minIndex:maxIndex+1]
2868 2868 nLength = voltage.shape[1]/n
2869 2869 nMin = 0
2870 2870 nMax = 0
2871 2871 phaseOffset = numpy.zeros((len(pairslist),n))
2872 2872
2873 2873 for i in range(n):
2874 2874 nMax += nLength
2875 2875 phaseCCF = -numpy.angle(self.__calculateCCF(voltage[:,nMin:nMax,:], pairslist, [0]))
2876 2876 phaseCCF = numpy.mean(phaseCCF, axis = 2)
2877 2877 phaseOffset[:,i] = phaseCCF.transpose()
2878 2878 nMin = nMax
2879 2879 # phaseDiff, phaseArrival = self.estimatePhaseDifference(voltage, pairslist)
2880 2880
2881 2881 #Remove Outliers
2882 2882 factor = 2
2883 2883 wt = phaseOffset - signal.medfilt(phaseOffset,(1,5))
2884 2884 dw = numpy.std(wt,axis = 1)
2885 2885 dw = dw.reshape((dw.size,1))
2886 2886 ind = numpy.where(numpy.logical_or(wt>dw*factor,wt<-dw*factor))
2887 2887 phaseOffset[ind] = numpy.nan
2888 2888 phaseOffset = stats.nanmean(phaseOffset, axis=1)
2889 2889
2890 2890 return phaseOffset
2891 2891
2892 2892 def __shiftPhase(self, data, phaseShift):
2893 2893 #this will shift the phase of a complex number
2894 2894 dataShifted = numpy.abs(data) * numpy.exp((numpy.angle(data)+phaseShift)*1j)
2895 2895 return dataShifted
2896 2896
2897 2897 def __estimatePhaseDifference(self, array, pairslist):
2898 2898 nChannel = array.shape[0]
2899 2899 nHeights = array.shape[2]
2900 2900 numPairs = len(pairslist)
2901 2901 # phaseCCF = numpy.zeros((nChannel, 5, nHeights))
2902 2902 phaseCCF = numpy.angle(self.__calculateCCF(array, pairslist, [-2,-1,0,1,2]))
2903 2903
2904 2904 #Correct phases
2905 2905 derPhaseCCF = phaseCCF[:,1:,:] - phaseCCF[:,0:-1,:]
2906 2906 indDer = numpy.where(numpy.abs(derPhaseCCF) > numpy.pi)
2907 2907
2908 2908 if indDer[0].shape[0] > 0:
2909 2909 for i in range(indDer[0].shape[0]):
2910 2910 signo = -numpy.sign(derPhaseCCF[indDer[0][i],indDer[1][i],indDer[2][i]])
2911 2911 phaseCCF[indDer[0][i],indDer[1][i]+1:,:] += signo*2*numpy.pi
2912 2912
2913 2913 # for j in range(numSides):
2914 2914 # phaseCCFAux = self.calculateCCF(arrayCenter, arraySides[j,:,:], [-2,1,0,1,2])
2915 2915 # phaseCCF[j,:,:] = numpy.angle(phaseCCFAux)
2916 2916 #
2917 2917 #Linear
2918 2918 phaseInt = numpy.zeros((numPairs,1))
2919 2919 angAllCCF = phaseCCF[:,[0,1,3,4],0]
2920 2920 for j in range(numPairs):
2921 2921 fit = stats.linregress([-2,-1,1,2],angAllCCF[j,:])
2922 2922 phaseInt[j] = fit[1]
2923 2923 #Phase Differences
2924 2924 phaseDiff = phaseInt - phaseCCF[:,2,:]
2925 2925 phaseArrival = phaseInt.reshape(phaseInt.size)
2926 2926
2927 2927 #Dealias
2928 2928 phaseArrival = numpy.angle(numpy.exp(1j*phaseArrival))
2929 2929 # indAlias = numpy.where(phaseArrival > numpy.pi)
2930 2930 # phaseArrival[indAlias] -= 2*numpy.pi
2931 2931 # indAlias = numpy.where(phaseArrival < -numpy.pi)
2932 2932 # phaseArrival[indAlias] += 2*numpy.pi
2933 2933
2934 2934 return phaseDiff, phaseArrival
2935 2935
2936 2936 def __coherentDetection(self, volts, timeSegment, timeInterval, pairslist, thresh):
2937 2937 #this function will run the coherent detection used in Holdworth et al. 2004 and return the net power
2938 2938 #find the phase shifts of each channel over 1 second intervals
2939 2939 #only look at ranges below the beacon signal
2940 2940 numProfPerBlock = numpy.ceil(timeSegment/timeInterval)
2941 2941 numBlocks = int(volts.shape[1]/numProfPerBlock)
2942 2942 numHeights = volts.shape[2]
2943 2943 nChannel = volts.shape[0]
2944 2944 voltsCohDet = volts.copy()
2945 2945
2946 2946 pairsarray = numpy.array(pairslist)
2947 2947 indSides = pairsarray[:,1]
2948 2948 # indSides = numpy.array(range(nChannel))
2949 2949 # indSides = numpy.delete(indSides, indCenter)
2950 2950 #
2951 2951 # listCenter = numpy.array_split(volts[indCenter,:,:], numBlocks, 0)
2952 2952 listBlocks = numpy.array_split(volts, numBlocks, 1)
2953 2953
2954 2954 startInd = 0
2955 2955 endInd = 0
2956 2956
2957 2957 for i in range(numBlocks):
2958 2958 startInd = endInd
2959 2959 endInd = endInd + listBlocks[i].shape[1]
2960 2960
2961 2961 arrayBlock = listBlocks[i]
2962 2962 # arrayBlockCenter = listCenter[i]
2963 2963
2964 2964 #Estimate the Phase Difference
2965 2965 phaseDiff, aux = self.__estimatePhaseDifference(arrayBlock, pairslist)
2966 2966 #Phase Difference RMS
2967 2967 arrayPhaseRMS = numpy.abs(phaseDiff)
2968 2968 phaseRMSaux = numpy.sum(arrayPhaseRMS < thresh,0)
2969 2969 indPhase = numpy.where(phaseRMSaux==4)
2970 2970 #Shifting
2971 2971 if indPhase[0].shape[0] > 0:
2972 2972 for j in range(indSides.size):
2973 2973 arrayBlock[indSides[j],:,indPhase] = self.__shiftPhase(arrayBlock[indSides[j],:,indPhase], phaseDiff[j,indPhase].transpose())
2974 2974 voltsCohDet[:,startInd:endInd,:] = arrayBlock
2975 2975
2976 2976 return voltsCohDet
2977 2977
2978 2978 def __calculateCCF(self, volts, pairslist ,laglist):
2979 2979
2980 2980 nHeights = volts.shape[2]
2981 2981 nPoints = volts.shape[1]
2982 2982 voltsCCF = numpy.zeros((len(pairslist), len(laglist), nHeights),dtype = 'complex')
2983 2983
2984 2984 for i in range(len(pairslist)):
2985 2985 volts1 = volts[pairslist[i][0]]
2986 2986 volts2 = volts[pairslist[i][1]]
2987 2987
2988 2988 for t in range(len(laglist)):
2989 2989 idxT = laglist[t]
2990 2990 if idxT >= 0:
2991 2991 vStacked = numpy.vstack((volts2[idxT:,:],
2992 2992 numpy.zeros((idxT, nHeights),dtype='complex')))
2993 2993 else:
2994 2994 vStacked = numpy.vstack((numpy.zeros((-idxT, nHeights),dtype='complex'),
2995 2995 volts2[:(nPoints + idxT),:]))
2996 2996 voltsCCF[i,t,:] = numpy.sum((numpy.conjugate(volts1)*vStacked),axis=0)
2997 2997
2998 2998 vStacked = None
2999 2999 return voltsCCF
3000 3000
3001 3001 def __getNoise(self, power, timeSegment, timeInterval):
3002 3002 numProfPerBlock = numpy.ceil(timeSegment/timeInterval)
3003 3003 numBlocks = int(power.shape[0]/numProfPerBlock)
3004 3004 numHeights = power.shape[1]
3005 3005
3006 3006 listPower = numpy.array_split(power, numBlocks, 0)
3007 3007 noise = numpy.zeros((power.shape[0], power.shape[1]))
3008 3008 noise1 = numpy.zeros((power.shape[0], power.shape[1]))
3009 3009
3010 3010 startInd = 0
3011 3011 endInd = 0
3012 3012
3013 3013 for i in range(numBlocks): #split por canal
3014 3014 startInd = endInd
3015 3015 endInd = endInd + listPower[i].shape[0]
3016 3016
3017 3017 arrayBlock = listPower[i]
3018 3018 noiseAux = numpy.mean(arrayBlock, 0)
3019 3019 # noiseAux = numpy.median(noiseAux)
3020 3020 # noiseAux = numpy.mean(arrayBlock)
3021 3021 noise[startInd:endInd,:] = noise[startInd:endInd,:] + noiseAux
3022 3022
3023 3023 noiseAux1 = numpy.mean(arrayBlock)
3024 3024 noise1[startInd:endInd,:] = noise1[startInd:endInd,:] + noiseAux1
3025 3025
3026 3026 return noise, noise1
3027 3027
3028 3028 def __findMeteors(self, power, thresh):
3029 3029 nProf = power.shape[0]
3030 3030 nHeights = power.shape[1]
3031 3031 listMeteors = []
3032 3032
3033 3033 for i in range(nHeights):
3034 3034 powerAux = power[:,i]
3035 3035 threshAux = thresh[:,i]
3036 3036
3037 3037 indUPthresh = numpy.where(powerAux > threshAux)[0]
3038 3038 indDNthresh = numpy.where(powerAux <= threshAux)[0]
3039 3039
3040 3040 j = 0
3041 3041
3042 3042 while (j < indUPthresh.size - 2):
3043 3043 if (indUPthresh[j + 2] == indUPthresh[j] + 2):
3044 3044 indDNAux = numpy.where(indDNthresh > indUPthresh[j])
3045 3045 indDNthresh = indDNthresh[indDNAux]
3046 3046
3047 3047 if (indDNthresh.size > 0):
3048 3048 indEnd = indDNthresh[0] - 1
3049 3049 indInit = indUPthresh[j]
3050 3050
3051 3051 meteor = powerAux[indInit:indEnd + 1]
3052 3052 indPeak = meteor.argmax() + indInit
3053 3053 FLA = sum(numpy.conj(meteor)*numpy.hstack((meteor[1:],0)))
3054 3054
3055 3055 listMeteors.append(numpy.array([i,indInit,indPeak,indEnd,FLA])) #CHEQUEAR!!!!!
3056 3056 j = numpy.where(indUPthresh == indEnd)[0] + 1
3057 3057 else: j+=1
3058 3058 else: j+=1
3059 3059
3060 3060 return listMeteors
3061 3061
3062 3062 def __removeMultipleDetections(self,listMeteors, rangeLimit, timeLimit):
3063 3063
3064 3064 arrayMeteors = numpy.asarray(listMeteors)
3065 3065 listMeteors1 = []
3066 3066
3067 3067 while arrayMeteors.shape[0] > 0:
3068 3068 FLAs = arrayMeteors[:,4]
3069 3069 maxFLA = FLAs.argmax()
3070 3070 listMeteors1.append(arrayMeteors[maxFLA,:])
3071 3071
3072 3072 MeteorInitTime = arrayMeteors[maxFLA,1]
3073 3073 MeteorEndTime = arrayMeteors[maxFLA,3]
3074 3074 MeteorHeight = arrayMeteors[maxFLA,0]
3075 3075
3076 3076 #Check neighborhood
3077 3077 maxHeightIndex = MeteorHeight + rangeLimit
3078 3078 minHeightIndex = MeteorHeight - rangeLimit
3079 3079 minTimeIndex = MeteorInitTime - timeLimit
3080 3080 maxTimeIndex = MeteorEndTime + timeLimit
3081 3081
3082 3082 #Check Heights
3083 3083 indHeight = numpy.logical_and(arrayMeteors[:,0] >= minHeightIndex, arrayMeteors[:,0] <= maxHeightIndex)
3084 3084 indTime = numpy.logical_and(arrayMeteors[:,3] >= minTimeIndex, arrayMeteors[:,1] <= maxTimeIndex)
3085 3085 indBoth = numpy.where(numpy.logical_and(indTime,indHeight))
3086 3086
3087 3087 arrayMeteors = numpy.delete(arrayMeteors, indBoth, axis = 0)
3088 3088
3089 3089 return listMeteors1
3090 3090
3091 3091 def __meteorReestimation(self, listMeteors, volts, pairslist, thresh, noise, timeInterval,frequency):
3092 3092 numHeights = volts.shape[2]
3093 3093 nChannel = volts.shape[0]
3094 3094
3095 3095 thresholdPhase = thresh[0]
3096 3096 thresholdNoise = thresh[1]
3097 3097 thresholdDB = float(thresh[2])
3098 3098
3099 3099 thresholdDB1 = 10**(thresholdDB/10)
3100 3100 pairsarray = numpy.array(pairslist)
3101 3101 indSides = pairsarray[:,1]
3102 3102
3103 3103 pairslist1 = list(pairslist)
3104 3104 pairslist1.append((0,1))
3105 3105 pairslist1.append((3,4))
3106 3106
3107 3107 listMeteors1 = []
3108 3108 listPowerSeries = []
3109 3109 listVoltageSeries = []
3110 3110 #volts has the war data
3111 3111
3112 3112 if frequency == 30e6:
3113 3113 timeLag = 45*10**-3
3114 3114 else:
3115 3115 timeLag = 15*10**-3
3116 3116 lag = numpy.ceil(timeLag/timeInterval)
3117 3117
3118 3118 for i in range(len(listMeteors)):
3119 3119
3120 3120 ###################### 3.6 - 3.7 PARAMETERS REESTIMATION #########################
3121 3121 meteorAux = numpy.zeros(16)
3122 3122
3123 3123 #Loading meteor Data (mHeight, mStart, mPeak, mEnd)
3124 3124 mHeight = listMeteors[i][0]
3125 3125 mStart = listMeteors[i][1]
3126 3126 mPeak = listMeteors[i][2]
3127 3127 mEnd = listMeteors[i][3]
3128 3128
3129 3129 #get the volt data between the start and end times of the meteor
3130 3130 meteorVolts = volts[:,mStart:mEnd+1,mHeight]
3131 3131 meteorVolts = meteorVolts.reshape(meteorVolts.shape[0], meteorVolts.shape[1], 1)
3132 3132
3133 3133 #3.6. Phase Difference estimation
3134 3134 phaseDiff, aux = self.__estimatePhaseDifference(meteorVolts, pairslist)
3135 3135
3136 3136 #3.7. Phase difference removal & meteor start, peak and end times reestimated
3137 3137 #meteorVolts0.- all Channels, all Profiles
3138 3138 meteorVolts0 = volts[:,:,mHeight]
3139 3139 meteorThresh = noise[:,mHeight]*thresholdNoise
3140 3140 meteorNoise = noise[:,mHeight]
3141 3141 meteorVolts0[indSides,:] = self.__shiftPhase(meteorVolts0[indSides,:], phaseDiff) #Phase Shifting
3142 3142 powerNet0 = numpy.nansum(numpy.abs(meteorVolts0)**2, axis = 0) #Power
3143 3143
3144 3144 #Times reestimation
3145 3145 mStart1 = numpy.where(powerNet0[:mPeak] < meteorThresh[:mPeak])[0]
3146 3146 if mStart1.size > 0:
3147 3147 mStart1 = mStart1[-1] + 1
3148 3148
3149 3149 else:
3150 3150 mStart1 = mPeak
3151 3151
3152 3152 mEnd1 = numpy.where(powerNet0[mPeak:] < meteorThresh[mPeak:])[0][0] + mPeak - 1
3153 3153 mEndDecayTime1 = numpy.where(powerNet0[mPeak:] < meteorNoise[mPeak:])[0]
3154 3154 if mEndDecayTime1.size == 0:
3155 3155 mEndDecayTime1 = powerNet0.size
3156 3156 else:
3157 3157 mEndDecayTime1 = mEndDecayTime1[0] + mPeak - 1
3158 3158 # mPeak1 = meteorVolts0[mStart1:mEnd1 + 1].argmax()
3159 3159
3160 3160 #meteorVolts1.- all Channels, from start to end
3161 3161 meteorVolts1 = meteorVolts0[:,mStart1:mEnd1 + 1]
3162 3162 meteorVolts2 = meteorVolts0[:,mPeak + lag:mEnd1 + 1]
3163 3163 if meteorVolts2.shape[1] == 0:
3164 3164 meteorVolts2 = meteorVolts0[:,mPeak:mEnd1 + 1]
3165 3165 meteorVolts1 = meteorVolts1.reshape(meteorVolts1.shape[0], meteorVolts1.shape[1], 1)
3166 3166 meteorVolts2 = meteorVolts2.reshape(meteorVolts2.shape[0], meteorVolts2.shape[1], 1)
3167 3167 ##################### END PARAMETERS REESTIMATION #########################
3168 3168
3169 3169 ##################### 3.8 PHASE DIFFERENCE REESTIMATION ########################
3170 3170 # if mEnd1 - mStart1 > 4: #Error Number 6: echo less than 5 samples long; too short for analysis
3171 3171 if meteorVolts2.shape[1] > 0:
3172 3172 #Phase Difference re-estimation
3173 3173 phaseDiff1, phaseDiffint = self.__estimatePhaseDifference(meteorVolts2, pairslist1) #Phase Difference Estimation
3174 3174 # phaseDiff1, phaseDiffint = self.estimatePhaseDifference(meteorVolts2, pairslist)
3175 3175 meteorVolts2 = meteorVolts2.reshape(meteorVolts2.shape[0], meteorVolts2.shape[1])
3176 3176 phaseDiff11 = numpy.reshape(phaseDiff1, (phaseDiff1.shape[0],1))
3177 3177 meteorVolts2[indSides,:] = self.__shiftPhase(meteorVolts2[indSides,:], phaseDiff11[0:4]) #Phase Shifting
3178 3178
3179 3179 #Phase Difference RMS
3180 3180 phaseRMS1 = numpy.sqrt(numpy.mean(numpy.square(phaseDiff1)))
3181 3181 powerNet1 = numpy.nansum(numpy.abs(meteorVolts1[:,:])**2,0)
3182 3182 #Data from Meteor
3183 3183 mPeak1 = powerNet1.argmax() + mStart1
3184 3184 mPeakPower1 = powerNet1.max()
3185 3185 noiseAux = sum(noise[mStart1:mEnd1 + 1,mHeight])
3186 3186 mSNR1 = (sum(powerNet1)-noiseAux)/noiseAux
3187 3187 Meteor1 = numpy.array([mHeight, mStart1, mPeak1, mEnd1, mPeakPower1, mSNR1, phaseRMS1])
3188 3188 Meteor1 = numpy.hstack((Meteor1,phaseDiffint))
3189 3189 PowerSeries = powerNet0[mStart1:mEndDecayTime1 + 1]
3190 3190 #Vectorize
3191 3191 meteorAux[0:7] = [mHeight, mStart1, mPeak1, mEnd1, mPeakPower1, mSNR1, phaseRMS1]
3192 3192 meteorAux[7:11] = phaseDiffint[0:4]
3193 3193
3194 3194 #Rejection Criterions
3195 3195 if phaseRMS1 > thresholdPhase: #Error Number 17: Phase variation
3196 3196 meteorAux[-1] = 17
3197 3197 elif mSNR1 < thresholdDB1: #Error Number 1: SNR < threshold dB
3198 3198 meteorAux[-1] = 1
3199 3199
3200 3200
3201 3201 else:
3202 3202 meteorAux[0:4] = [mHeight, mStart, mPeak, mEnd]
3203 3203 meteorAux[-1] = 6 #Error Number 6: echo less than 5 samples long; too short for analysis
3204 3204 PowerSeries = 0
3205 3205
3206 3206 listMeteors1.append(meteorAux)
3207 3207 listPowerSeries.append(PowerSeries)
3208 3208 listVoltageSeries.append(meteorVolts1)
3209 3209
3210 3210 return listMeteors1, listPowerSeries, listVoltageSeries
3211 3211
3212 3212 def __estimateDecayTime(self, listMeteors, listPower, timeInterval, frequency):
3213 3213
3214 3214 threshError = 10
3215 3215 #Depending if it is 30 or 50 MHz
3216 3216 if frequency == 30e6:
3217 3217 timeLag = 45*10**-3
3218 3218 else:
3219 3219 timeLag = 15*10**-3
3220 3220 lag = numpy.ceil(timeLag/timeInterval)
3221 3221
3222 3222 listMeteors1 = []
3223 3223
3224 3224 for i in range(len(listMeteors)):
3225 3225 meteorPower = listPower[i]
3226 3226 meteorAux = listMeteors[i]
3227 3227
3228 3228 if meteorAux[-1] == 0:
3229 3229
3230 3230 try:
3231 3231 indmax = meteorPower.argmax()
3232 3232 indlag = indmax + lag
3233 3233
3234 3234 y = meteorPower[indlag:]
3235 3235 x = numpy.arange(0, y.size)*timeLag
3236 3236
3237 3237 #first guess
3238 3238 a = y[0]
3239 3239 tau = timeLag
3240 3240 #exponential fit
3241 3241 popt, pcov = optimize.curve_fit(self.__exponential_function, x, y, p0 = [a, tau])
3242 3242 y1 = self.__exponential_function(x, *popt)
3243 3243 #error estimation
3244 3244 error = sum((y - y1)**2)/(numpy.var(y)*(y.size - popt.size))
3245 3245
3246 3246 decayTime = popt[1]
3247 3247 riseTime = indmax*timeInterval
3248 3248 meteorAux[11:13] = [decayTime, error]
3249 3249
3250 3250 #Table items 7, 8 and 11
3251 3251 if (riseTime > 0.3): #Number 7: Echo rise exceeds 0.3s
3252 3252 meteorAux[-1] = 7
3253 3253 elif (decayTime < 2*riseTime) : #Number 8: Echo decay time less than than twice rise time
3254 3254 meteorAux[-1] = 8
3255 3255 if (error > threshError): #Number 11: Poor fit to amplitude for estimation of decay time
3256 3256 meteorAux[-1] = 11
3257 3257
3258 3258
3259 3259 except:
3260 3260 meteorAux[-1] = 11
3261 3261
3262 3262
3263 3263 listMeteors1.append(meteorAux)
3264 3264
3265 3265 return listMeteors1
3266 3266
3267 3267 #Exponential Function
3268 3268
3269 3269 def __exponential_function(self, x, a, tau):
3270 3270 y = a*numpy.exp(-x/tau)
3271 3271 return y
3272 3272
3273 3273 def __getRadialVelocity(self, listMeteors, listVolts, radialStdThresh, pairslist, timeInterval):
3274 3274
3275 3275 pairslist1 = list(pairslist)
3276 3276 pairslist1.append((0,1))
3277 3277 pairslist1.append((3,4))
3278 3278 numPairs = len(pairslist1)
3279 3279 #Time Lag
3280 3280 timeLag = 45*10**-3
3281 3281 c = 3e8
3282 3282 lag = numpy.ceil(timeLag/timeInterval)
3283 3283 freq = 30e6
3284 3284
3285 3285 listMeteors1 = []
3286 3286
3287 3287 for i in range(len(listMeteors)):
3288 3288 meteorAux = listMeteors[i]
3289 3289 if meteorAux[-1] == 0:
3290 3290 mStart = listMeteors[i][1]
3291 3291 mPeak = listMeteors[i][2]
3292 3292 mLag = mPeak - mStart + lag
3293 3293
3294 3294 #get the volt data between the start and end times of the meteor
3295 3295 meteorVolts = listVolts[i]
3296 3296 meteorVolts = meteorVolts.reshape(meteorVolts.shape[0], meteorVolts.shape[1], 1)
3297 3297
3298 3298 #Get CCF
3299 3299 allCCFs = self.__calculateCCF(meteorVolts, pairslist1, [-2,-1,0,1,2])
3300 3300
3301 3301 #Method 2
3302 3302 slopes = numpy.zeros(numPairs)
3303 3303 time = numpy.array([-2,-1,1,2])*timeInterval
3304 3304 angAllCCF = numpy.angle(allCCFs[:,[0,1,3,4],0])
3305 3305
3306 3306 #Correct phases
3307 3307 derPhaseCCF = angAllCCF[:,1:] - angAllCCF[:,0:-1]
3308 3308 indDer = numpy.where(numpy.abs(derPhaseCCF) > numpy.pi)
3309 3309
3310 3310 if indDer[0].shape[0] > 0:
3311 3311 for i in range(indDer[0].shape[0]):
3312 3312 signo = -numpy.sign(derPhaseCCF[indDer[0][i],indDer[1][i]])
3313 3313 angAllCCF[indDer[0][i],indDer[1][i]+1:] += signo*2*numpy.pi
3314 3314
3315 3315 # fit = scipy.stats.linregress(numpy.array([-2,-1,1,2])*timeInterval, numpy.array([phaseLagN2s[i],phaseLagN1s[i],phaseLag1s[i],phaseLag2s[i]]))
3316 3316 for j in range(numPairs):
3317 3317 fit = stats.linregress(time, angAllCCF[j,:])
3318 3318 slopes[j] = fit[0]
3319 3319
3320 3320 #Remove Outlier
3321 3321 # indOut = numpy.argmax(numpy.abs(slopes - numpy.mean(slopes)))
3322 3322 # slopes = numpy.delete(slopes,indOut)
3323 3323 # indOut = numpy.argmax(numpy.abs(slopes - numpy.mean(slopes)))
3324 3324 # slopes = numpy.delete(slopes,indOut)
3325 3325
3326 3326 radialVelocity = -numpy.mean(slopes)*(0.25/numpy.pi)*(c/freq)
3327 3327 radialError = numpy.std(slopes)*(0.25/numpy.pi)*(c/freq)
3328 3328 meteorAux[-2] = radialError
3329 3329 meteorAux[-3] = radialVelocity
3330 3330
3331 3331 #Setting Error
3332 3332 #Number 15: Radial Drift velocity or projected horizontal velocity exceeds 200 m/s
3333 3333 if numpy.abs(radialVelocity) > 200:
3334 3334 meteorAux[-1] = 15
3335 3335 #Number 12: Poor fit to CCF variation for estimation of radial drift velocity
3336 3336 elif radialError > radialStdThresh:
3337 3337 meteorAux[-1] = 12
3338 3338
3339 3339 listMeteors1.append(meteorAux)
3340 3340 return listMeteors1
3341 3341
3342 3342 def __setNewArrays(self, listMeteors, date, heiRang):
3343 3343
3344 3344 #New arrays
3345 3345 arrayMeteors = numpy.array(listMeteors)
3346 3346 arrayParameters = numpy.zeros((len(listMeteors), 13))
3347 3347
3348 3348 #Date inclusion
3349 3349 # date = re.findall(r'\((.*?)\)', date)
3350 3350 # date = date[0].split(',')
3351 3351 # date = map(int, date)
3352 3352 #
3353 3353 # if len(date)<6:
3354 3354 # date.append(0)
3355 3355 #
3356 3356 # date = [date[0]*10000 + date[1]*100 + date[2], date[3]*10000 + date[4]*100 + date[5]]
3357 3357 # arrayDate = numpy.tile(date, (len(listMeteors), 1))
3358 3358 arrayDate = numpy.tile(date, (len(listMeteors)))
3359 3359
3360 3360 #Meteor array
3361 3361 # arrayMeteors[:,0] = heiRang[arrayMeteors[:,0].astype(int)]
3362 3362 # arrayMeteors = numpy.hstack((arrayDate, arrayMeteors))
3363 3363
3364 3364 #Parameters Array
3365 3365 arrayParameters[:,0] = arrayDate #Date
3366 3366 arrayParameters[:,1] = heiRang[arrayMeteors[:,0].astype(int)] #Range
3367 3367 arrayParameters[:,6:8] = arrayMeteors[:,-3:-1] #Radial velocity and its error
3368 3368 arrayParameters[:,8:12] = arrayMeteors[:,7:11] #Phases
3369 3369 arrayParameters[:,-1] = arrayMeteors[:,-1] #Error
3370 3370
3371 3371
3372 3372 return arrayParameters
3373 3373
3374 3374 class CorrectSMPhases(Operation):
3375 3375
3376 3376 def run(self, dataOut, phaseOffsets, hmin = 50, hmax = 150, azimuth = 45, channelPositions = None):
3377 3377
3378 3378 arrayParameters = dataOut.data_param
3379 3379 pairsList = []
3380 3380 pairx = (0,1)
3381 3381 pairy = (2,3)
3382 3382 pairsList.append(pairx)
3383 3383 pairsList.append(pairy)
3384 3384 jph = numpy.zeros(4)
3385 3385
3386 3386 phaseOffsets = numpy.array(phaseOffsets)*numpy.pi/180
3387 3387 # arrayParameters[:,8:12] = numpy.unwrap(arrayParameters[:,8:12] + phaseOffsets)
3388 3388 arrayParameters[:,8:12] = numpy.angle(numpy.exp(1j*(arrayParameters[:,8:12] + phaseOffsets)))
3389 3389
3390 3390 meteorOps = SMOperations()
3391 3391 if channelPositions is None:
3392 3392 # channelPositions = [(2.5,0), (0,2.5), (0,0), (0,4.5), (-2,0)] #T
3393 3393 channelPositions = [(4.5,2), (2,4.5), (2,2), (2,0), (0,2)] #Estrella
3394 3394
3395 3395 pairslist0, distances = meteorOps.getPhasePairs(channelPositions)
3396 3396 h = (hmin,hmax)
3397 3397
3398 3398 arrayParameters = meteorOps.getMeteorParams(arrayParameters, azimuth, h, pairsList, distances, jph)
3399 3399
3400 3400 dataOut.data_param = arrayParameters
3401 3401 return
3402 3402
3403 3403 class SMPhaseCalibration(Operation):
3404 3404
3405 3405 __buffer = None
3406 3406
3407 3407 __initime = None
3408 3408
3409 3409 __dataReady = False
3410 3410
3411 3411 __isConfig = False
3412 3412
3413 3413 def __checkTime(self, currentTime, initTime, paramInterval, outputInterval):
3414 3414
3415 3415 dataTime = currentTime + paramInterval
3416 3416 deltaTime = dataTime - initTime
3417 3417
3418 3418 if deltaTime >= outputInterval or deltaTime < 0:
3419 3419 return True
3420 3420
3421 3421 return False
3422 3422
3423 3423 def __getGammas(self, pairs, d, phases):
3424 3424 gammas = numpy.zeros(2)
3425 3425
3426 3426 for i in range(len(pairs)):
3427 3427
3428 3428 pairi = pairs[i]
3429 3429
3430 3430 phip3 = phases[:,pairi[0]]
3431 3431 d3 = d[pairi[0]]
3432 3432 phip2 = phases[:,pairi[1]]
3433 3433 d2 = d[pairi[1]]
3434 3434 #Calculating gamma
3435 3435 # jdcos = alp1/(k*d1)
3436 3436 # jgamma = numpy.angle(numpy.exp(1j*(d0*alp1/d1 - alp0)))
3437 3437 jgamma = -phip2*d3/d2 - phip3
3438 3438 jgamma = numpy.angle(numpy.exp(1j*jgamma))
3439 3439 # jgamma[jgamma>numpy.pi] -= 2*numpy.pi
3440 3440 # jgamma[jgamma<-numpy.pi] += 2*numpy.pi
3441 3441
3442 3442 #Revised distribution
3443 3443 jgammaArray = numpy.hstack((jgamma,jgamma+0.5*numpy.pi,jgamma-0.5*numpy.pi))
3444 3444
3445 3445 #Histogram
3446 3446 nBins = 64
3447 3447 rmin = -0.5*numpy.pi
3448 3448 rmax = 0.5*numpy.pi
3449 3449 phaseHisto = numpy.histogram(jgammaArray, bins=nBins, range=(rmin,rmax))
3450 3450
3451 3451 meteorsY = phaseHisto[0]
3452 3452 phasesX = phaseHisto[1][:-1]
3453 3453 width = phasesX[1] - phasesX[0]
3454 3454 phasesX += width/2
3455 3455
3456 3456 #Gaussian aproximation
3457 3457 bpeak = meteorsY.argmax()
3458 3458 peak = meteorsY.max()
3459 3459 jmin = bpeak - 5
3460 3460 jmax = bpeak + 5 + 1
3461 3461
3462 3462 if jmin<0:
3463 3463 jmin = 0
3464 3464 jmax = 6
3465 3465 elif jmax > meteorsY.size:
3466 3466 jmin = meteorsY.size - 6
3467 3467 jmax = meteorsY.size
3468 3468
3469 3469 x0 = numpy.array([peak,bpeak,50])
3470 3470 coeff = optimize.leastsq(self.__residualFunction, x0, args=(meteorsY[jmin:jmax], phasesX[jmin:jmax]))
3471 3471
3472 3472 #Gammas
3473 3473 gammas[i] = coeff[0][1]
3474 3474
3475 3475 return gammas
3476 3476
3477 3477 def __residualFunction(self, coeffs, y, t):
3478 3478
3479 3479 return y - self.__gauss_function(t, coeffs)
3480 3480
3481 3481 def __gauss_function(self, t, coeffs):
3482 3482
3483 3483 return coeffs[0]*numpy.exp(-0.5*((t - coeffs[1]) / coeffs[2])**2)
3484 3484
3485 3485 def __getPhases(self, azimuth, h, pairsList, d, gammas, meteorsArray):
3486 3486 meteorOps = SMOperations()
3487 3487 nchan = 4
3488 3488 pairx = pairsList[0] #x es 0
3489 3489 pairy = pairsList[1] #y es 1
3490 3490 center_xangle = 0
3491 3491 center_yangle = 0
3492 3492 range_angle = numpy.array([10*numpy.pi,numpy.pi,numpy.pi/2,numpy.pi/4])
3493 3493 ntimes = len(range_angle)
3494 3494
3495 3495 nstepsx = 20
3496 3496 nstepsy = 20
3497 3497
3498 3498 for iz in range(ntimes):
3499 3499 min_xangle = -range_angle[iz]/2 + center_xangle
3500 3500 max_xangle = range_angle[iz]/2 + center_xangle
3501 3501 min_yangle = -range_angle[iz]/2 + center_yangle
3502 3502 max_yangle = range_angle[iz]/2 + center_yangle
3503 3503
3504 3504 inc_x = (max_xangle-min_xangle)/nstepsx
3505 3505 inc_y = (max_yangle-min_yangle)/nstepsy
3506 3506
3507 3507 alpha_y = numpy.arange(nstepsy)*inc_y + min_yangle
3508 3508 alpha_x = numpy.arange(nstepsx)*inc_x + min_xangle
3509 3509 penalty = numpy.zeros((nstepsx,nstepsy))
3510 3510 jph_array = numpy.zeros((nchan,nstepsx,nstepsy))
3511 3511 jph = numpy.zeros(nchan)
3512 3512
3513 3513 # Iterations looking for the offset
3514 3514 for iy in range(int(nstepsy)):
3515 3515 for ix in range(int(nstepsx)):
3516 3516 d3 = d[pairsList[1][0]]
3517 3517 d2 = d[pairsList[1][1]]
3518 3518 d5 = d[pairsList[0][0]]
3519 3519 d4 = d[pairsList[0][1]]
3520 3520
3521 3521 alp2 = alpha_y[iy] #gamma 1
3522 3522 alp4 = alpha_x[ix] #gamma 0
3523 3523
3524 3524 alp3 = -alp2*d3/d2 - gammas[1]
3525 3525 alp5 = -alp4*d5/d4 - gammas[0]
3526 3526 # jph[pairy[1]] = alpha_y[iy]
3527 3527 # jph[pairy[0]] = -gammas[1] - alpha_y[iy]*d[pairy[1]]/d[pairy[0]]
3528 3528
3529 3529 # jph[pairx[1]] = alpha_x[ix]
3530 3530 # jph[pairx[0]] = -gammas[0] - alpha_x[ix]*d[pairx[1]]/d[pairx[0]]
3531 3531 jph[pairsList[0][1]] = alp4
3532 3532 jph[pairsList[0][0]] = alp5
3533 3533 jph[pairsList[1][0]] = alp3
3534 3534 jph[pairsList[1][1]] = alp2
3535 3535 jph_array[:,ix,iy] = jph
3536 3536 # d = [2.0,2.5,2.5,2.0]
3537 3537 #falta chequear si va a leer bien los meteoros
3538 3538 meteorsArray1 = meteorOps.getMeteorParams(meteorsArray, azimuth, h, pairsList, d, jph)
3539 3539 error = meteorsArray1[:,-1]
3540 3540 ind1 = numpy.where(error==0)[0]
3541 3541 penalty[ix,iy] = ind1.size
3542 3542
3543 3543 i,j = numpy.unravel_index(penalty.argmax(), penalty.shape)
3544 3544 phOffset = jph_array[:,i,j]
3545 3545
3546 3546 center_xangle = phOffset[pairx[1]]
3547 3547 center_yangle = phOffset[pairy[1]]
3548 3548
3549 3549 phOffset = numpy.angle(numpy.exp(1j*jph_array[:,i,j]))
3550 3550 phOffset = phOffset*180/numpy.pi
3551 3551 return phOffset
3552 3552
3553 3553
3554 3554 def run(self, dataOut, hmin, hmax, channelPositions=None, nHours = 1):
3555 3555
3556 3556 dataOut.flagNoData = True
3557 3557 self.__dataReady = False
3558 3558 dataOut.outputInterval = nHours*3600
3559 3559
3560 3560 if self.__isConfig == False:
3561 3561 # self.__initime = dataOut.datatime.replace(minute = 0, second = 0, microsecond = 03)
3562 3562 #Get Initial LTC time
3563 3563 self.__initime = datetime.datetime.utcfromtimestamp(dataOut.utctime)
3564 3564 self.__initime = (self.__initime.replace(minute = 0, second = 0, microsecond = 0) - datetime.datetime(1970, 1, 1)).total_seconds()
3565 3565
3566 3566 self.__isConfig = True
3567 3567
3568 3568 if self.__buffer is None:
3569 3569 self.__buffer = dataOut.data_param.copy()
3570 3570
3571 3571 else:
3572 3572 self.__buffer = numpy.vstack((self.__buffer, dataOut.data_param))
3573 3573
3574 3574 self.__dataReady = self.__checkTime(dataOut.utctime, self.__initime, dataOut.paramInterval, dataOut.outputInterval) #Check if the buffer is ready
3575 3575
3576 3576 if self.__dataReady:
3577 3577 dataOut.utctimeInit = self.__initime
3578 3578 self.__initime += dataOut.outputInterval #to erase time offset
3579 3579
3580 3580 freq = dataOut.frequency
3581 3581 c = dataOut.C #m/s
3582 3582 lamb = c/freq
3583 3583 k = 2*numpy.pi/lamb
3584 3584 azimuth = 0
3585 3585 h = (hmin, hmax)
3586 3586 # pairs = ((0,1),(2,3)) #Estrella
3587 3587 # pairs = ((1,0),(2,3)) #T
3588 3588
3589 3589 if channelPositions is None:
3590 3590 # channelPositions = [(2.5,0), (0,2.5), (0,0), (0,4.5), (-2,0)] #T
3591 3591 channelPositions = [(4.5,2), (2,4.5), (2,2), (2,0), (0,2)] #Estrella
3592 3592 meteorOps = SMOperations()
3593 3593 pairslist0, distances = meteorOps.getPhasePairs(channelPositions)
3594 3594
3595 3595 #Checking correct order of pairs
3596 3596 pairs = []
3597 3597 if distances[1] > distances[0]:
3598 3598 pairs.append((1,0))
3599 3599 else:
3600 3600 pairs.append((0,1))
3601 3601
3602 3602 if distances[3] > distances[2]:
3603 3603 pairs.append((3,2))
3604 3604 else:
3605 3605 pairs.append((2,3))
3606 3606 # distances1 = [-distances[0]*lamb, distances[1]*lamb, -distances[2]*lamb, distances[3]*lamb]
3607 3607
3608 3608 meteorsArray = self.__buffer
3609 3609 error = meteorsArray[:,-1]
3610 3610 boolError = (error==0)|(error==3)|(error==4)|(error==13)|(error==14)
3611 3611 ind1 = numpy.where(boolError)[0]
3612 3612 meteorsArray = meteorsArray[ind1,:]
3613 3613 meteorsArray[:,-1] = 0
3614 3614 phases = meteorsArray[:,8:12]
3615 3615
3616 3616 #Calculate Gammas
3617 3617 gammas = self.__getGammas(pairs, distances, phases)
3618 3618 # gammas = numpy.array([-21.70409463,45.76935864])*numpy.pi/180
3619 3619 #Calculate Phases
3620 3620 phasesOff = self.__getPhases(azimuth, h, pairs, distances, gammas, meteorsArray)
3621 3621 phasesOff = phasesOff.reshape((1,phasesOff.size))
3622 3622 dataOut.data_output = -phasesOff
3623 3623 dataOut.flagNoData = False
3624 3624 self.__buffer = None
3625 3625
3626 3626
3627 3627 return
3628 3628
3629 3629 class SMOperations():
3630 3630
3631 3631 def __init__(self):
3632 3632
3633 3633 return
3634 3634
3635 3635 def getMeteorParams(self, arrayParameters0, azimuth, h, pairsList, distances, jph):
3636 3636
3637 3637 arrayParameters = arrayParameters0.copy()
3638 3638 hmin = h[0]
3639 3639 hmax = h[1]
3640 3640
3641 3641 #Calculate AOA (Error N 3, 4)
3642 3642 #JONES ET AL. 1998
3643 3643 AOAthresh = numpy.pi/8
3644 3644 error = arrayParameters[:,-1]
3645 3645 phases = -arrayParameters[:,8:12] + jph
3646 3646 # phases = numpy.unwrap(phases)
3647 3647 arrayParameters[:,3:6], arrayParameters[:,-1] = self.__getAOA(phases, pairsList, distances, error, AOAthresh, azimuth)
3648 3648
3649 3649 #Calculate Heights (Error N 13 and 14)
3650 3650 error = arrayParameters[:,-1]
3651 3651 Ranges = arrayParameters[:,1]
3652 3652 zenith = arrayParameters[:,4]
3653 3653 arrayParameters[:,2], arrayParameters[:,-1] = self.__getHeights(Ranges, zenith, error, hmin, hmax)
3654 3654
3655 3655 #----------------------- Get Final data ------------------------------------
3656 3656 # error = arrayParameters[:,-1]
3657 3657 # ind1 = numpy.where(error==0)[0]
3658 3658 # arrayParameters = arrayParameters[ind1,:]
3659 3659
3660 3660 return arrayParameters
3661 3661
3662 3662 def __getAOA(self, phases, pairsList, directions, error, AOAthresh, azimuth):
3663 3663
3664 3664 arrayAOA = numpy.zeros((phases.shape[0],3))
3665 3665 cosdir0, cosdir = self.__getDirectionCosines(phases, pairsList,directions)
3666 3666
3667 3667 arrayAOA[:,:2] = self.__calculateAOA(cosdir, azimuth)
3668 3668 cosDirError = numpy.sum(numpy.abs(cosdir0 - cosdir), axis = 1)
3669 3669 arrayAOA[:,2] = cosDirError
3670 3670
3671 3671 azimuthAngle = arrayAOA[:,0]
3672 3672 zenithAngle = arrayAOA[:,1]
3673 3673
3674 3674 #Setting Error
3675 3675 indError = numpy.where(numpy.logical_or(error == 3, error == 4))[0]
3676 3676 error[indError] = 0
3677 3677 #Number 3: AOA not fesible
3678 3678 indInvalid = numpy.where(numpy.logical_and((numpy.logical_or(numpy.isnan(zenithAngle), numpy.isnan(azimuthAngle))),error == 0))[0]
3679 3679 error[indInvalid] = 3
3680 3680 #Number 4: Large difference in AOAs obtained from different antenna baselines
3681 3681 indInvalid = numpy.where(numpy.logical_and(cosDirError > AOAthresh,error == 0))[0]
3682 3682 error[indInvalid] = 4
3683 3683 return arrayAOA, error
3684 3684
3685 3685 def __getDirectionCosines(self, arrayPhase, pairsList, distances):
3686 3686
3687 3687 #Initializing some variables
3688 3688 ang_aux = numpy.array([-8,-7,-6,-5,-4,-3,-2,-1,0,1,2,3,4,5,6,7,8])*2*numpy.pi
3689 3689 ang_aux = ang_aux.reshape(1,ang_aux.size)
3690 3690
3691 3691 cosdir = numpy.zeros((arrayPhase.shape[0],2))
3692 3692 cosdir0 = numpy.zeros((arrayPhase.shape[0],2))
3693 3693
3694 3694
3695 3695 for i in range(2):
3696 3696 ph0 = arrayPhase[:,pairsList[i][0]]
3697 3697 ph1 = arrayPhase[:,pairsList[i][1]]
3698 3698 d0 = distances[pairsList[i][0]]
3699 3699 d1 = distances[pairsList[i][1]]
3700 3700
3701 3701 ph0_aux = ph0 + ph1
3702 3702 ph0_aux = numpy.angle(numpy.exp(1j*ph0_aux))
3703 3703 # ph0_aux[ph0_aux > numpy.pi] -= 2*numpy.pi
3704 3704 # ph0_aux[ph0_aux < -numpy.pi] += 2*numpy.pi
3705 3705 #First Estimation
3706 3706 cosdir0[:,i] = (ph0_aux)/(2*numpy.pi*(d0 - d1))
3707 3707
3708 3708 #Most-Accurate Second Estimation
3709 3709 phi1_aux = ph0 - ph1
3710 3710 phi1_aux = phi1_aux.reshape(phi1_aux.size,1)
3711 3711 #Direction Cosine 1
3712 3712 cosdir1 = (phi1_aux + ang_aux)/(2*numpy.pi*(d0 + d1))
3713 3713
3714 3714 #Searching the correct Direction Cosine
3715 3715 cosdir0_aux = cosdir0[:,i]
3716 3716 cosdir0_aux = cosdir0_aux.reshape(cosdir0_aux.size,1)
3717 3717 #Minimum Distance
3718 3718 cosDiff = (cosdir1 - cosdir0_aux)**2
3719 3719 indcos = cosDiff.argmin(axis = 1)
3720 3720 #Saving Value obtained
3721 3721 cosdir[:,i] = cosdir1[numpy.arange(len(indcos)),indcos]
3722 3722
3723 3723 return cosdir0, cosdir
3724 3724
3725 3725 def __calculateAOA(self, cosdir, azimuth):
3726 3726 cosdirX = cosdir[:,0]
3727 3727 cosdirY = cosdir[:,1]
3728 3728
3729 3729 zenithAngle = numpy.arccos(numpy.sqrt(1 - cosdirX**2 - cosdirY**2))*180/numpy.pi
3730 3730 azimuthAngle = numpy.arctan2(cosdirX,cosdirY)*180/numpy.pi + azimuth#0 deg north, 90 deg east
3731 3731 angles = numpy.vstack((azimuthAngle, zenithAngle)).transpose()
3732 3732
3733 3733 return angles
3734 3734
3735 3735 def __getHeights(self, Ranges, zenith, error, minHeight, maxHeight):
3736 3736
3737 3737 Ramb = 375 #Ramb = c/(2*PRF)
3738 3738 Re = 6371 #Earth Radius
3739 3739 heights = numpy.zeros(Ranges.shape)
3740 3740
3741 3741 R_aux = numpy.array([0,1,2])*Ramb
3742 3742 R_aux = R_aux.reshape(1,R_aux.size)
3743 3743
3744 3744 Ranges = Ranges.reshape(Ranges.size,1)
3745 3745
3746 3746 Ri = Ranges + R_aux
3747 3747 hi = numpy.sqrt(Re**2 + Ri**2 + (2*Re*numpy.cos(zenith*numpy.pi/180)*Ri.transpose()).transpose()) - Re
3748 3748
3749 3749 #Check if there is a height between 70 and 110 km
3750 3750 h_bool = numpy.sum(numpy.logical_and(hi > minHeight, hi < maxHeight), axis = 1)
3751 3751 ind_h = numpy.where(h_bool == 1)[0]
3752 3752
3753 3753 hCorr = hi[ind_h, :]
3754 3754 ind_hCorr = numpy.where(numpy.logical_and(hi > minHeight, hi < maxHeight))
3755 3755
3756 3756 hCorr = hi[ind_hCorr][:len(ind_h)]
3757 3757 heights[ind_h] = hCorr
3758 3758
3759 3759 #Setting Error
3760 3760 #Number 13: Height unresolvable echo: not valid height within 70 to 110 km
3761 3761 #Number 14: Height ambiguous echo: more than one possible height within 70 to 110 km
3762 3762 indError = numpy.where(numpy.logical_or(error == 13, error == 14))[0]
3763 3763 error[indError] = 0
3764 3764 indInvalid2 = numpy.where(numpy.logical_and(h_bool > 1, error == 0))[0]
3765 3765 error[indInvalid2] = 14
3766 3766 indInvalid1 = numpy.where(numpy.logical_and(h_bool == 0, error == 0))[0]
3767 3767 error[indInvalid1] = 13
3768 3768
3769 3769 return heights, error
3770 3770
3771 3771 def getPhasePairs(self, channelPositions):
3772 3772 chanPos = numpy.array(channelPositions)
3773 3773 listOper = list(itertools.combinations(list(range(5)),2))
3774 3774
3775 3775 distances = numpy.zeros(4)
3776 3776 axisX = []
3777 3777 axisY = []
3778 3778 distX = numpy.zeros(3)
3779 3779 distY = numpy.zeros(3)
3780 3780 ix = 0
3781 3781 iy = 0
3782 3782
3783 3783 pairX = numpy.zeros((2,2))
3784 3784 pairY = numpy.zeros((2,2))
3785 3785
3786 3786 for i in range(len(listOper)):
3787 3787 pairi = listOper[i]
3788 3788
3789 3789 posDif = numpy.abs(chanPos[pairi[0],:] - chanPos[pairi[1],:])
3790 3790
3791 3791 if posDif[0] == 0:
3792 3792 axisY.append(pairi)
3793 3793 distY[iy] = posDif[1]
3794 3794 iy += 1
3795 3795 elif posDif[1] == 0:
3796 3796 axisX.append(pairi)
3797 3797 distX[ix] = posDif[0]
3798 3798 ix += 1
3799 3799
3800 3800 for i in range(2):
3801 3801 if i==0:
3802 3802 dist0 = distX
3803 3803 axis0 = axisX
3804 3804 else:
3805 3805 dist0 = distY
3806 3806 axis0 = axisY
3807 3807
3808 3808 side = numpy.argsort(dist0)[:-1]
3809 3809 axis0 = numpy.array(axis0)[side,:]
3810 3810 chanC = int(numpy.intersect1d(axis0[0,:], axis0[1,:])[0])
3811 3811 axis1 = numpy.unique(numpy.reshape(axis0,4))
3812 3812 side = axis1[axis1 != chanC]
3813 3813 diff1 = chanPos[chanC,i] - chanPos[side[0],i]
3814 3814 diff2 = chanPos[chanC,i] - chanPos[side[1],i]
3815 3815 if diff1<0:
3816 3816 chan2 = side[0]
3817 3817 d2 = numpy.abs(diff1)
3818 3818 chan1 = side[1]
3819 3819 d1 = numpy.abs(diff2)
3820 3820 else:
3821 3821 chan2 = side[1]
3822 3822 d2 = numpy.abs(diff2)
3823 3823 chan1 = side[0]
3824 3824 d1 = numpy.abs(diff1)
3825 3825
3826 3826 if i==0:
3827 3827 chanCX = chanC
3828 3828 chan1X = chan1
3829 3829 chan2X = chan2
3830 3830 distances[0:2] = numpy.array([d1,d2])
3831 3831 else:
3832 3832 chanCY = chanC
3833 3833 chan1Y = chan1
3834 3834 chan2Y = chan2
3835 3835 distances[2:4] = numpy.array([d1,d2])
3836 3836 # axisXsides = numpy.reshape(axisX[ix,:],4)
3837 3837 #
3838 3838 # channelCentX = int(numpy.intersect1d(pairX[0,:], pairX[1,:])[0])
3839 3839 # channelCentY = int(numpy.intersect1d(pairY[0,:], pairY[1,:])[0])
3840 3840 #
3841 3841 # ind25X = numpy.where(pairX[0,:] != channelCentX)[0][0]
3842 3842 # ind20X = numpy.where(pairX[1,:] != channelCentX)[0][0]
3843 3843 # channel25X = int(pairX[0,ind25X])
3844 3844 # channel20X = int(pairX[1,ind20X])
3845 3845 # ind25Y = numpy.where(pairY[0,:] != channelCentY)[0][0]
3846 3846 # ind20Y = numpy.where(pairY[1,:] != channelCentY)[0][0]
3847 3847 # channel25Y = int(pairY[0,ind25Y])
3848 3848 # channel20Y = int(pairY[1,ind20Y])
3849 3849
3850 3850 # pairslist = [(channelCentX, channel25X),(channelCentX, channel20X),(channelCentY,channel25Y),(channelCentY, channel20Y)]
3851 3851 pairslist = [(chanCX, chan1X),(chanCX, chan2X),(chanCY,chan1Y),(chanCY, chan2Y)]
3852 3852
3853 3853 return pairslist, distances
3854 3854 # def __getAOA(self, phases, pairsList, error, AOAthresh, azimuth):
3855 3855 #
3856 3856 # arrayAOA = numpy.zeros((phases.shape[0],3))
3857 3857 # cosdir0, cosdir = self.__getDirectionCosines(phases, pairsList)
3858 3858 #
3859 3859 # arrayAOA[:,:2] = self.__calculateAOA(cosdir, azimuth)
3860 3860 # cosDirError = numpy.sum(numpy.abs(cosdir0 - cosdir), axis = 1)
3861 3861 # arrayAOA[:,2] = cosDirError
3862 3862 #
3863 3863 # azimuthAngle = arrayAOA[:,0]
3864 3864 # zenithAngle = arrayAOA[:,1]
3865 3865 #
3866 3866 # #Setting Error
3867 3867 # #Number 3: AOA not fesible
3868 3868 # indInvalid = numpy.where(numpy.logical_and((numpy.logical_or(numpy.isnan(zenithAngle), numpy.isnan(azimuthAngle))),error == 0))[0]
3869 3869 # error[indInvalid] = 3
3870 3870 # #Number 4: Large difference in AOAs obtained from different antenna baselines
3871 3871 # indInvalid = numpy.where(numpy.logical_and(cosDirError > AOAthresh,error == 0))[0]
3872 3872 # error[indInvalid] = 4
3873 3873 # return arrayAOA, error
3874 3874 #
3875 3875 # def __getDirectionCosines(self, arrayPhase, pairsList):
3876 3876 #
3877 3877 # #Initializing some variables
3878 3878 # ang_aux = numpy.array([-8,-7,-6,-5,-4,-3,-2,-1,0,1,2,3,4,5,6,7,8])*2*numpy.pi
3879 3879 # ang_aux = ang_aux.reshape(1,ang_aux.size)
3880 3880 #
3881 3881 # cosdir = numpy.zeros((arrayPhase.shape[0],2))
3882 3882 # cosdir0 = numpy.zeros((arrayPhase.shape[0],2))
3883 3883 #
3884 3884 #
3885 3885 # for i in range(2):
3886 3886 # #First Estimation
3887 3887 # phi0_aux = arrayPhase[:,pairsList[i][0]] + arrayPhase[:,pairsList[i][1]]
3888 3888 # #Dealias
3889 3889 # indcsi = numpy.where(phi0_aux > numpy.pi)
3890 3890 # phi0_aux[indcsi] -= 2*numpy.pi
3891 3891 # indcsi = numpy.where(phi0_aux < -numpy.pi)
3892 3892 # phi0_aux[indcsi] += 2*numpy.pi
3893 3893 # #Direction Cosine 0
3894 3894 # cosdir0[:,i] = -(phi0_aux)/(2*numpy.pi*0.5)
3895 3895 #
3896 3896 # #Most-Accurate Second Estimation
3897 3897 # phi1_aux = arrayPhase[:,pairsList[i][0]] - arrayPhase[:,pairsList[i][1]]
3898 3898 # phi1_aux = phi1_aux.reshape(phi1_aux.size,1)
3899 3899 # #Direction Cosine 1
3900 3900 # cosdir1 = -(phi1_aux + ang_aux)/(2*numpy.pi*4.5)
3901 3901 #
3902 3902 # #Searching the correct Direction Cosine
3903 3903 # cosdir0_aux = cosdir0[:,i]
3904 3904 # cosdir0_aux = cosdir0_aux.reshape(cosdir0_aux.size,1)
3905 3905 # #Minimum Distance
3906 3906 # cosDiff = (cosdir1 - cosdir0_aux)**2
3907 3907 # indcos = cosDiff.argmin(axis = 1)
3908 3908 # #Saving Value obtained
3909 3909 # cosdir[:,i] = cosdir1[numpy.arange(len(indcos)),indcos]
3910 3910 #
3911 3911 # return cosdir0, cosdir
3912 3912 #
3913 3913 # def __calculateAOA(self, cosdir, azimuth):
3914 3914 # cosdirX = cosdir[:,0]
3915 3915 # cosdirY = cosdir[:,1]
3916 3916 #
3917 3917 # zenithAngle = numpy.arccos(numpy.sqrt(1 - cosdirX**2 - cosdirY**2))*180/numpy.pi
3918 3918 # azimuthAngle = numpy.arctan2(cosdirX,cosdirY)*180/numpy.pi + azimuth #0 deg north, 90 deg east
3919 3919 # angles = numpy.vstack((azimuthAngle, zenithAngle)).transpose()
3920 3920 #
3921 3921 # return angles
3922 3922 #
3923 3923 # def __getHeights(self, Ranges, zenith, error, minHeight, maxHeight):
3924 3924 #
3925 3925 # Ramb = 375 #Ramb = c/(2*PRF)
3926 3926 # Re = 6371 #Earth Radius
3927 3927 # heights = numpy.zeros(Ranges.shape)
3928 3928 #
3929 3929 # R_aux = numpy.array([0,1,2])*Ramb
3930 3930 # R_aux = R_aux.reshape(1,R_aux.size)
3931 3931 #
3932 3932 # Ranges = Ranges.reshape(Ranges.size,1)
3933 3933 #
3934 3934 # Ri = Ranges + R_aux
3935 3935 # hi = numpy.sqrt(Re**2 + Ri**2 + (2*Re*numpy.cos(zenith*numpy.pi/180)*Ri.transpose()).transpose()) - Re
3936 3936 #
3937 3937 # #Check if there is a height between 70 and 110 km
3938 3938 # h_bool = numpy.sum(numpy.logical_and(hi > minHeight, hi < maxHeight), axis = 1)
3939 3939 # ind_h = numpy.where(h_bool == 1)[0]
3940 3940 #
3941 3941 # hCorr = hi[ind_h, :]
3942 3942 # ind_hCorr = numpy.where(numpy.logical_and(hi > minHeight, hi < maxHeight))
3943 3943 #
3944 3944 # hCorr = hi[ind_hCorr]
3945 3945 # heights[ind_h] = hCorr
3946 3946 #
3947 3947 # #Setting Error
3948 3948 # #Number 13: Height unresolvable echo: not valid height within 70 to 110 km
3949 3949 # #Number 14: Height ambiguous echo: more than one possible height within 70 to 110 km
3950 3950 #
3951 3951 # indInvalid2 = numpy.where(numpy.logical_and(h_bool > 1, error == 0))[0]
3952 3952 # error[indInvalid2] = 14
3953 3953 # indInvalid1 = numpy.where(numpy.logical_and(h_bool == 0, error == 0))[0]
3954 3954 # error[indInvalid1] = 13
3955 3955 #
3956 3956 # return heights, error
@@ -1,876 +1,898
1 1 # Copyright (c) 2012-2020 Jicamarca Radio Observatory
2 2 # All rights reserved.
3 3 #
4 4 # Distributed under the terms of the BSD 3-clause license.
5 5 """Spectra processing Unit and operations
6 6
7 7 Here you will find the processing unit `SpectraProc` and several operations
8 8 to work with Spectra data type
9 9 """
10 10
11 11 import time
12 12 import itertools
13 13
14 14 import numpy
15 15
16 16 from schainpy.model.proc.jroproc_base import ProcessingUnit, MPDecorator, Operation
17 17 from schainpy.model.data.jrodata import Spectra
18 18 from schainpy.model.data.jrodata import hildebrand_sekhon
19 19 from schainpy.utils import log
20 20
21 21
22 22 class SpectraProc(ProcessingUnit):
23 23
24 24 def __init__(self):
25 25
26 26 ProcessingUnit.__init__(self)
27 27
28 28 self.buffer = None
29 29 self.firstdatatime = None
30 30 self.profIndex = 0
31 31 self.dataOut = Spectra()
32 32 self.id_min = None
33 33 self.id_max = None
34 34 self.setupReq = False #Agregar a todas las unidades de proc
35 35
36 36 def __updateSpecFromVoltage(self):
37 37
38 38 self.dataOut.timeZone = self.dataIn.timeZone
39 39 self.dataOut.dstFlag = self.dataIn.dstFlag
40 40 self.dataOut.errorCount = self.dataIn.errorCount
41 41 self.dataOut.useLocalTime = self.dataIn.useLocalTime
42 42 try:
43 43 self.dataOut.processingHeaderObj = self.dataIn.processingHeaderObj.copy()
44 44 except:
45 45 pass
46 46 self.dataOut.radarControllerHeaderObj = self.dataIn.radarControllerHeaderObj.copy()
47 47 self.dataOut.systemHeaderObj = self.dataIn.systemHeaderObj.copy()
48 48 self.dataOut.channelList = self.dataIn.channelList
49 49 self.dataOut.heightList = self.dataIn.heightList
50 50 self.dataOut.dtype = numpy.dtype([('real', '<f4'), ('imag', '<f4')])
51 51 self.dataOut.nProfiles = self.dataOut.nFFTPoints
52 52 self.dataOut.flagDiscontinuousBlock = self.dataIn.flagDiscontinuousBlock
53 53 self.dataOut.utctime = self.firstdatatime
54 54 self.dataOut.flagDecodeData = self.dataIn.flagDecodeData
55 55 self.dataOut.flagDeflipData = self.dataIn.flagDeflipData
56 56 self.dataOut.flagShiftFFT = False
57 57 self.dataOut.nCohInt = self.dataIn.nCohInt
58 58 self.dataOut.nIncohInt = 1
59 59 self.dataOut.windowOfFilter = self.dataIn.windowOfFilter
60 60 self.dataOut.frequency = self.dataIn.frequency
61 61 self.dataOut.realtime = self.dataIn.realtime
62 62 self.dataOut.azimuth = self.dataIn.azimuth
63 63 self.dataOut.zenith = self.dataIn.zenith
64 64 self.dataOut.beam.codeList = self.dataIn.beam.codeList
65 65 self.dataOut.beam.azimuthList = self.dataIn.beam.azimuthList
66 66 self.dataOut.beam.zenithList = self.dataIn.beam.zenithList
67 67
68 68 def __getFft(self):
69 69 """
70 70 Convierte valores de Voltaje a Spectra
71 71
72 72 Affected:
73 73 self.dataOut.data_spc
74 74 self.dataOut.data_cspc
75 75 self.dataOut.data_dc
76 76 self.dataOut.heightList
77 77 self.profIndex
78 78 self.buffer
79 79 self.dataOut.flagNoData
80 80 """
81 81 fft_volt = numpy.fft.fft(
82 82 self.buffer, n=self.dataOut.nFFTPoints, axis=1)
83 83 fft_volt = fft_volt.astype(numpy.dtype('complex'))
84 84 dc = fft_volt[:, 0, :]
85 85
86 86 # calculo de self-spectra
87 87 fft_volt = numpy.fft.fftshift(fft_volt, axes=(1,))
88 88 spc = fft_volt * numpy.conjugate(fft_volt)
89 89 spc = spc.real
90 90
91 91 blocksize = 0
92 92 blocksize += dc.size
93 93 blocksize += spc.size
94 94
95 95 cspc = None
96 96 pairIndex = 0
97 97 if self.dataOut.pairsList != None:
98 98 # calculo de cross-spectra
99 99 cspc = numpy.zeros(
100 100 (self.dataOut.nPairs, self.dataOut.nFFTPoints, self.dataOut.nHeights), dtype='complex')
101 101 for pair in self.dataOut.pairsList:
102 102 if pair[0] not in self.dataOut.channelList:
103 103 raise ValueError("Error getting CrossSpectra: pair 0 of %s is not in channelList = %s" % (
104 104 str(pair), str(self.dataOut.channelList)))
105 105 if pair[1] not in self.dataOut.channelList:
106 106 raise ValueError("Error getting CrossSpectra: pair 1 of %s is not in channelList = %s" % (
107 107 str(pair), str(self.dataOut.channelList)))
108 108
109 109 cspc[pairIndex, :, :] = fft_volt[pair[0], :, :] * \
110 110 numpy.conjugate(fft_volt[pair[1], :, :])
111 111 pairIndex += 1
112 112 blocksize += cspc.size
113 113
114 114 self.dataOut.data_spc = spc
115 115 self.dataOut.data_cspc = cspc
116 116 self.dataOut.data_dc = dc
117 117 self.dataOut.blockSize = blocksize
118 118 self.dataOut.flagShiftFFT = False
119 119
120 120 def run(self, nProfiles=None, nFFTPoints=None, pairsList=None, ippFactor=None, shift_fft=False):
121 121
122 122 if self.dataIn.type == "Spectra":
123 123 self.dataOut.copy(self.dataIn)
124 124 if shift_fft:
125 125 #desplaza a la derecha en el eje 2 determinadas posiciones
126 126 shift = int(self.dataOut.nFFTPoints/2)
127 127 self.dataOut.data_spc = numpy.roll(self.dataOut.data_spc, shift , axis=1)
128 128
129 129 if self.dataOut.data_cspc is not None:
130 130 #desplaza a la derecha en el eje 2 determinadas posiciones
131 131 self.dataOut.data_cspc = numpy.roll(self.dataOut.data_cspc, shift, axis=1)
132 132 if pairsList:
133 133 self.__selectPairs(pairsList)
134 134
135 135 elif self.dataIn.type == "Voltage":
136 136
137 137 self.dataOut.flagNoData = True
138 138
139 139 if nFFTPoints == None:
140 140 raise ValueError("This SpectraProc.run() need nFFTPoints input variable")
141 141
142 142 if nProfiles == None:
143 143 nProfiles = nFFTPoints
144 144
145 145 if ippFactor == None:
146 146 self.dataOut.ippFactor = 1
147 147
148 148 self.dataOut.nFFTPoints = nFFTPoints
149 149
150 150 if self.buffer is None:
151 151 self.buffer = numpy.zeros((self.dataIn.nChannels,
152 152 nProfiles,
153 153 self.dataIn.nHeights),
154 154 dtype='complex')
155 155
156 156 if self.dataIn.flagDataAsBlock:
157 157 nVoltProfiles = self.dataIn.data.shape[1]
158 158
159 159 if nVoltProfiles == nProfiles:
160 160 self.buffer = self.dataIn.data.copy()
161 161 self.profIndex = nVoltProfiles
162 162
163 163 elif nVoltProfiles < nProfiles:
164 164
165 165 if self.profIndex == 0:
166 166 self.id_min = 0
167 167 self.id_max = nVoltProfiles
168 168
169 169 self.buffer[:, self.id_min:self.id_max,
170 170 :] = self.dataIn.data
171 171 self.profIndex += nVoltProfiles
172 172 self.id_min += nVoltProfiles
173 173 self.id_max += nVoltProfiles
174 174 else:
175 175 raise ValueError("The type object %s has %d profiles, it should just has %d profiles" % (
176 176 self.dataIn.type, self.dataIn.data.shape[1], nProfiles))
177 177 self.dataOut.flagNoData = True
178 178 else:
179 179 self.buffer[:, self.profIndex, :] = self.dataIn.data.copy()
180 180 self.profIndex += 1
181 181
182 182 if self.firstdatatime == None:
183 183 self.firstdatatime = self.dataIn.utctime
184 184
185 185 if self.profIndex == nProfiles:
186 186 self.__updateSpecFromVoltage()
187 187 if pairsList == None:
188 188 self.dataOut.pairsList = [pair for pair in itertools.combinations(self.dataOut.channelList, 2)]
189 189 else:
190 190 self.dataOut.pairsList = pairsList
191 191 self.__getFft()
192 192 self.dataOut.flagNoData = False
193 193 self.firstdatatime = None
194 194 self.profIndex = 0
195 195 else:
196 196 raise ValueError("The type of input object '%s' is not valid".format(
197 197 self.dataIn.type))
198 198
199 199 def __selectPairs(self, pairsList):
200 200
201 201 if not pairsList:
202 202 return
203 203
204 204 pairs = []
205 205 pairsIndex = []
206 206
207 207 for pair in pairsList:
208 208 if pair[0] not in self.dataOut.channelList or pair[1] not in self.dataOut.channelList:
209 209 continue
210 210 pairs.append(pair)
211 211 pairsIndex.append(pairs.index(pair))
212 212
213 213 self.dataOut.data_cspc = self.dataOut.data_cspc[pairsIndex]
214 214 self.dataOut.pairsList = pairs
215 215
216 216 return
217 217
218 218 def selectFFTs(self, minFFT, maxFFT ):
219 219 """
220 220 Selecciona un bloque de datos en base a un grupo de valores de puntos FFTs segun el rango
221 221 minFFT<= FFT <= maxFFT
222 222 """
223 223
224 224 if (minFFT > maxFFT):
225 225 raise ValueError("Error selecting heights: Height range (%d,%d) is not valid" % (minFFT, maxFFT))
226 226
227 227 if (minFFT < self.dataOut.getFreqRange()[0]):
228 228 minFFT = self.dataOut.getFreqRange()[0]
229 229
230 230 if (maxFFT > self.dataOut.getFreqRange()[-1]):
231 231 maxFFT = self.dataOut.getFreqRange()[-1]
232 232
233 233 minIndex = 0
234 234 maxIndex = 0
235 235 FFTs = self.dataOut.getFreqRange()
236 236
237 237 inda = numpy.where(FFTs >= minFFT)
238 238 indb = numpy.where(FFTs <= maxFFT)
239 239
240 240 try:
241 241 minIndex = inda[0][0]
242 242 except:
243 243 minIndex = 0
244 244
245 245 try:
246 246 maxIndex = indb[0][-1]
247 247 except:
248 248 maxIndex = len(FFTs)
249 249
250 250 self.selectFFTsByIndex(minIndex, maxIndex)
251 251
252 252 return 1
253 253
254 254 def getBeaconSignal(self, tauindex=0, channelindex=0, hei_ref=None):
255 255 newheis = numpy.where(
256 256 self.dataOut.heightList > self.dataOut.radarControllerHeaderObj.Taus[tauindex])
257 257
258 258 if hei_ref != None:
259 259 newheis = numpy.where(self.dataOut.heightList > hei_ref)
260 260
261 261 minIndex = min(newheis[0])
262 262 maxIndex = max(newheis[0])
263 263 data_spc = self.dataOut.data_spc[:, :, minIndex:maxIndex + 1]
264 264 heightList = self.dataOut.heightList[minIndex:maxIndex + 1]
265 265
266 266 # determina indices
267 267 nheis = int(self.dataOut.radarControllerHeaderObj.txB /
268 268 (self.dataOut.heightList[1] - self.dataOut.heightList[0]))
269 269 avg_dB = 10 * \
270 270 numpy.log10(numpy.sum(data_spc[channelindex, :, :], axis=0))
271 271 beacon_dB = numpy.sort(avg_dB)[-nheis:]
272 272 beacon_heiIndexList = []
273 273 for val in avg_dB.tolist():
274 274 if val >= beacon_dB[0]:
275 275 beacon_heiIndexList.append(avg_dB.tolist().index(val))
276 276
277 277 #data_spc = data_spc[:,:,beacon_heiIndexList]
278 278 data_cspc = None
279 279 if self.dataOut.data_cspc is not None:
280 280 data_cspc = self.dataOut.data_cspc[:, :, minIndex:maxIndex + 1]
281 281 #data_cspc = data_cspc[:,:,beacon_heiIndexList]
282 282
283 283 data_dc = None
284 284 if self.dataOut.data_dc is not None:
285 285 data_dc = self.dataOut.data_dc[:, minIndex:maxIndex + 1]
286 286 #data_dc = data_dc[:,beacon_heiIndexList]
287 287
288 288 self.dataOut.data_spc = data_spc
289 289 self.dataOut.data_cspc = data_cspc
290 290 self.dataOut.data_dc = data_dc
291 291 self.dataOut.heightList = heightList
292 292 self.dataOut.beacon_heiIndexList = beacon_heiIndexList
293 293
294 294 return 1
295 295
296 296 def selectFFTsByIndex(self, minIndex, maxIndex):
297 297 """
298 298
299 299 """
300 300
301 301 if (minIndex < 0) or (minIndex > maxIndex):
302 302 raise ValueError("Error selecting heights: Index range (%d,%d) is not valid" % (minIndex, maxIndex))
303 303
304 304 if (maxIndex >= self.dataOut.nProfiles):
305 305 maxIndex = self.dataOut.nProfiles-1
306 306
307 307 #Spectra
308 308 data_spc = self.dataOut.data_spc[:,minIndex:maxIndex+1,:]
309 309
310 310 data_cspc = None
311 311 if self.dataOut.data_cspc is not None:
312 312 data_cspc = self.dataOut.data_cspc[:,minIndex:maxIndex+1,:]
313 313
314 314 data_dc = None
315 315 if self.dataOut.data_dc is not None:
316 316 data_dc = self.dataOut.data_dc[minIndex:maxIndex+1,:]
317 317
318 318 self.dataOut.data_spc = data_spc
319 319 self.dataOut.data_cspc = data_cspc
320 320 self.dataOut.data_dc = data_dc
321 321
322 322 self.dataOut.ippSeconds = self.dataOut.ippSeconds*(self.dataOut.nFFTPoints / numpy.shape(data_cspc)[1])
323 323 self.dataOut.nFFTPoints = numpy.shape(data_cspc)[1]
324 324 self.dataOut.profilesPerBlock = numpy.shape(data_cspc)[1]
325 325
326 326 return 1
327 327
328 328 def getNoise(self, minHei=None, maxHei=None, minVel=None, maxVel=None):
329 329 # validacion de rango
330 330 if minHei == None:
331 331 minHei = self.dataOut.heightList[0]
332 332
333 333 if maxHei == None:
334 334 maxHei = self.dataOut.heightList[-1]
335 335
336 336 if (minHei < self.dataOut.heightList[0]) or (minHei > maxHei):
337 337 print('minHei: %.2f is out of the heights range' % (minHei))
338 338 print('minHei is setting to %.2f' % (self.dataOut.heightList[0]))
339 339 minHei = self.dataOut.heightList[0]
340 340
341 341 if (maxHei > self.dataOut.heightList[-1]) or (maxHei < minHei):
342 342 print('maxHei: %.2f is out of the heights range' % (maxHei))
343 343 print('maxHei is setting to %.2f' % (self.dataOut.heightList[-1]))
344 344 maxHei = self.dataOut.heightList[-1]
345 345
346 346 # validacion de velocidades
347 347 velrange = self.dataOut.getVelRange(1)
348 348
349 349 if minVel == None:
350 350 minVel = velrange[0]
351 351
352 352 if maxVel == None:
353 353 maxVel = velrange[-1]
354 354
355 355 if (minVel < velrange[0]) or (minVel > maxVel):
356 356 print('minVel: %.2f is out of the velocity range' % (minVel))
357 357 print('minVel is setting to %.2f' % (velrange[0]))
358 358 minVel = velrange[0]
359 359
360 360 if (maxVel > velrange[-1]) or (maxVel < minVel):
361 361 print('maxVel: %.2f is out of the velocity range' % (maxVel))
362 362 print('maxVel is setting to %.2f' % (velrange[-1]))
363 363 maxVel = velrange[-1]
364 364
365 365 # seleccion de indices para rango
366 366 minIndex = 0
367 367 maxIndex = 0
368 368 heights = self.dataOut.heightList
369 369
370 370 inda = numpy.where(heights >= minHei)
371 371 indb = numpy.where(heights <= maxHei)
372 372
373 373 try:
374 374 minIndex = inda[0][0]
375 375 except:
376 376 minIndex = 0
377 377
378 378 try:
379 379 maxIndex = indb[0][-1]
380 380 except:
381 381 maxIndex = len(heights)
382 382
383 383 if (minIndex < 0) or (minIndex > maxIndex):
384 384 raise ValueError("some value in (%d,%d) is not valid" % (
385 385 minIndex, maxIndex))
386 386
387 387 if (maxIndex >= self.dataOut.nHeights):
388 388 maxIndex = self.dataOut.nHeights - 1
389 389
390 390 # seleccion de indices para velocidades
391 391 indminvel = numpy.where(velrange >= minVel)
392 392 indmaxvel = numpy.where(velrange <= maxVel)
393 393 try:
394 394 minIndexVel = indminvel[0][0]
395 395 except:
396 396 minIndexVel = 0
397 397
398 398 try:
399 399 maxIndexVel = indmaxvel[0][-1]
400 400 except:
401 401 maxIndexVel = len(velrange)
402 402
403 403 # seleccion del espectro
404 404 data_spc = self.dataOut.data_spc[:,
405 405 minIndexVel:maxIndexVel + 1, minIndex:maxIndex + 1]
406 406 # estimacion de ruido
407 407 noise = numpy.zeros(self.dataOut.nChannels)
408 408
409 409 for channel in range(self.dataOut.nChannels):
410 410 daux = data_spc[channel, :, :]
411 411 sortdata = numpy.sort(daux, axis=None)
412 412 noise[channel] = hildebrand_sekhon(sortdata, self.dataOut.nIncohInt)
413 413
414 414 self.dataOut.noise_estimation = noise.copy()
415 415
416 416 return 1
417 417
418 418 class removeDC(Operation):
419 419
420 420 def run(self, dataOut, mode=2):
421 421 self.dataOut = dataOut
422 422 jspectra = self.dataOut.data_spc
423 423 jcspectra = self.dataOut.data_cspc
424 424
425 425 num_chan = jspectra.shape[0]
426 426 num_hei = jspectra.shape[2]
427 427
428 428 if jcspectra is not None:
429 429 jcspectraExist = True
430 430 num_pairs = jcspectra.shape[0]
431 431 else:
432 432 jcspectraExist = False
433 433
434 434 freq_dc = int(jspectra.shape[1] / 2)
435 435 ind_vel = numpy.array([-2, -1, 1, 2]) + freq_dc
436 436 ind_vel = ind_vel.astype(int)
437 437
438 438 if ind_vel[0] < 0:
439 439 ind_vel[list(range(0, 1))] = ind_vel[list(range(0, 1))] + self.num_prof
440 440
441 441 if mode == 1:
442 442 jspectra[:, freq_dc, :] = (
443 443 jspectra[:, ind_vel[1], :] + jspectra[:, ind_vel[2], :]) / 2 # CORRECCION
444 444
445 445 if jcspectraExist:
446 446 jcspectra[:, freq_dc, :] = (
447 447 jcspectra[:, ind_vel[1], :] + jcspectra[:, ind_vel[2], :]) / 2
448 448
449 449 if mode == 2:
450 450
451 451 vel = numpy.array([-2, -1, 1, 2])
452 452 xx = numpy.zeros([4, 4])
453 453
454 454 for fil in range(4):
455 455 xx[fil, :] = vel[fil]**numpy.asarray(list(range(4)))
456 456
457 457 xx_inv = numpy.linalg.inv(xx)
458 458 xx_aux = xx_inv[0, :]
459 459
460 460 for ich in range(num_chan):
461 461 yy = jspectra[ich, ind_vel, :]
462 462 jspectra[ich, freq_dc, :] = numpy.dot(xx_aux, yy)
463 463
464 464 junkid = jspectra[ich, freq_dc, :] <= 0
465 465 cjunkid = sum(junkid)
466 466
467 467 if cjunkid.any():
468 468 jspectra[ich, freq_dc, junkid.nonzero()] = (
469 469 jspectra[ich, ind_vel[1], junkid] + jspectra[ich, ind_vel[2], junkid]) / 2
470 470
471 471 if jcspectraExist:
472 472 for ip in range(num_pairs):
473 473 yy = jcspectra[ip, ind_vel, :]
474 474 jcspectra[ip, freq_dc, :] = numpy.dot(xx_aux, yy)
475 475
476 476 self.dataOut.data_spc = jspectra
477 477 self.dataOut.data_cspc = jcspectra
478 478
479 479 return self.dataOut
480 480
481 481 class removeInterference(Operation):
482 482
483 483 def removeInterference2(self):
484 484
485 485 cspc = self.dataOut.data_cspc
486 486 spc = self.dataOut.data_spc
487 487 Heights = numpy.arange(cspc.shape[2])
488 488 realCspc = numpy.abs(cspc)
489 489
490 490 for i in range(cspc.shape[0]):
491 491 LinePower= numpy.sum(realCspc[i], axis=0)
492 492 Threshold = numpy.amax(LinePower)-numpy.sort(LinePower)[len(Heights)-int(len(Heights)*0.1)]
493 493 SelectedHeights = Heights[ numpy.where( LinePower < Threshold ) ]
494 494 InterferenceSum = numpy.sum( realCspc[i,:,SelectedHeights], axis=0 )
495 495 InterferenceThresholdMin = numpy.sort(InterferenceSum)[int(len(InterferenceSum)*0.98)]
496 496 InterferenceThresholdMax = numpy.sort(InterferenceSum)[int(len(InterferenceSum)*0.99)]
497 497
498 498
499 499 InterferenceRange = numpy.where( ([InterferenceSum > InterferenceThresholdMin]))# , InterferenceSum < InterferenceThresholdMax]) )
500 500 #InterferenceRange = numpy.where( ([InterferenceRange < InterferenceThresholdMax]))
501 501 if len(InterferenceRange)<int(cspc.shape[1]*0.3):
502 502 cspc[i,InterferenceRange,:] = numpy.NaN
503 503
504 504 self.dataOut.data_cspc = cspc
505 505
506 506 def removeInterference(self, interf = 2, hei_interf = None, nhei_interf = None, offhei_interf = None):
507 507
508 508 jspectra = self.dataOut.data_spc
509 509 jcspectra = self.dataOut.data_cspc
510 510 jnoise = self.dataOut.getNoise()
511 511 num_incoh = self.dataOut.nIncohInt
512 512
513 513 num_channel = jspectra.shape[0]
514 514 num_prof = jspectra.shape[1]
515 515 num_hei = jspectra.shape[2]
516 516
517 517 # hei_interf
518 518 if hei_interf is None:
519 519 count_hei = int(num_hei / 2)
520 520 hei_interf = numpy.asmatrix(list(range(count_hei))) + num_hei - count_hei
521 521 hei_interf = numpy.asarray(hei_interf)[0]
522 522 # nhei_interf
523 523 if (nhei_interf == None):
524 524 nhei_interf = 5
525 525 if (nhei_interf < 1):
526 526 nhei_interf = 1
527 527 if (nhei_interf > count_hei):
528 528 nhei_interf = count_hei
529 529 if (offhei_interf == None):
530 530 offhei_interf = 0
531 531
532 532 ind_hei = list(range(num_hei))
533 533 # mask_prof = numpy.asarray(range(num_prof - 2)) + 1
534 534 # mask_prof[range(num_prof/2 - 1,len(mask_prof))] += 1
535 535 mask_prof = numpy.asarray(list(range(num_prof)))
536 536 num_mask_prof = mask_prof.size
537 537 comp_mask_prof = [0, num_prof / 2]
538 538
539 539 # noise_exist: Determina si la variable jnoise ha sido definida y contiene la informacion del ruido de cada canal
540 540 if (jnoise.size < num_channel or numpy.isnan(jnoise).any()):
541 541 jnoise = numpy.nan
542 542 noise_exist = jnoise[0] < numpy.Inf
543 543
544 544 # Subrutina de Remocion de la Interferencia
545 545 for ich in range(num_channel):
546 546 # Se ordena los espectros segun su potencia (menor a mayor)
547 547 power = jspectra[ich, mask_prof, :]
548 548 power = power[:, hei_interf]
549 549 power = power.sum(axis=0)
550 550 psort = power.ravel().argsort()
551 551
552 552 # Se estima la interferencia promedio en los Espectros de Potencia empleando
553 553 junkspc_interf = jspectra[ich, :, hei_interf[psort[list(range(
554 554 offhei_interf, nhei_interf + offhei_interf))]]]
555 555
556 556 if noise_exist:
557 557 # tmp_noise = jnoise[ich] / num_prof
558 558 tmp_noise = jnoise[ich]
559 559 junkspc_interf = junkspc_interf - tmp_noise
560 560 #junkspc_interf[:,comp_mask_prof] = 0
561 561
562 562 jspc_interf = junkspc_interf.sum(axis=0) / nhei_interf
563 563 jspc_interf = jspc_interf.transpose()
564 564 # Calculando el espectro de interferencia promedio
565 565 noiseid = numpy.where(
566 566 jspc_interf <= tmp_noise / numpy.sqrt(num_incoh))
567 567 noiseid = noiseid[0]
568 568 cnoiseid = noiseid.size
569 569 interfid = numpy.where(
570 570 jspc_interf > tmp_noise / numpy.sqrt(num_incoh))
571 571 interfid = interfid[0]
572 572 cinterfid = interfid.size
573 573
574 574 if (cnoiseid > 0):
575 575 jspc_interf[noiseid] = 0
576 576
577 577 # Expandiendo los perfiles a limpiar
578 578 if (cinterfid > 0):
579 579 new_interfid = (
580 580 numpy.r_[interfid - 1, interfid, interfid + 1] + num_prof) % num_prof
581 581 new_interfid = numpy.asarray(new_interfid)
582 582 new_interfid = {x for x in new_interfid}
583 583 new_interfid = numpy.array(list(new_interfid))
584 584 new_cinterfid = new_interfid.size
585 585 else:
586 586 new_cinterfid = 0
587 587
588 588 for ip in range(new_cinterfid):
589 589 ind = junkspc_interf[:, new_interfid[ip]].ravel().argsort()
590 590 jspc_interf[new_interfid[ip]
591 591 ] = junkspc_interf[ind[nhei_interf // 2], new_interfid[ip]]
592 592
593 593 jspectra[ich, :, ind_hei] = jspectra[ich, :,
594 594 ind_hei] - jspc_interf # Corregir indices
595 595
596 596 # Removiendo la interferencia del punto de mayor interferencia
597 597 ListAux = jspc_interf[mask_prof].tolist()
598 598 maxid = ListAux.index(max(ListAux))
599 599
600 600 if cinterfid > 0:
601 601 for ip in range(cinterfid * (interf == 2) - 1):
602 602 ind = (jspectra[ich, interfid[ip], :] < tmp_noise *
603 603 (1 + 1 / numpy.sqrt(num_incoh))).nonzero()
604 604 cind = len(ind)
605 605
606 606 if (cind > 0):
607 607 jspectra[ich, interfid[ip], ind] = tmp_noise * \
608 608 (1 + (numpy.random.uniform(cind) - 0.5) /
609 609 numpy.sqrt(num_incoh))
610 610
611 611 ind = numpy.array([-2, -1, 1, 2])
612 612 xx = numpy.zeros([4, 4])
613 613
614 614 for id1 in range(4):
615 615 xx[:, id1] = ind[id1]**numpy.asarray(list(range(4)))
616 616
617 617 xx_inv = numpy.linalg.inv(xx)
618 618 xx = xx_inv[:, 0]
619 619 ind = (ind + maxid + num_mask_prof) % num_mask_prof
620 620 yy = jspectra[ich, mask_prof[ind], :]
621 621 jspectra[ich, mask_prof[maxid], :] = numpy.dot(
622 622 yy.transpose(), xx)
623 623
624 624 indAux = (jspectra[ich, :, :] < tmp_noise *
625 625 (1 - 1 / numpy.sqrt(num_incoh))).nonzero()
626 626 jspectra[ich, indAux[0], indAux[1]] = tmp_noise * \
627 627 (1 - 1 / numpy.sqrt(num_incoh))
628 628
629 629 # Remocion de Interferencia en el Cross Spectra
630 630 if jcspectra is None:
631 631 return jspectra, jcspectra
632 632 num_pairs = int(jcspectra.size / (num_prof * num_hei))
633 633 jcspectra = jcspectra.reshape(num_pairs, num_prof, num_hei)
634 634
635 635 for ip in range(num_pairs):
636 636
637 637 #-------------------------------------------
638 638
639 639 cspower = numpy.abs(jcspectra[ip, mask_prof, :])
640 640 cspower = cspower[:, hei_interf]
641 641 cspower = cspower.sum(axis=0)
642 642
643 643 cspsort = cspower.ravel().argsort()
644 644 junkcspc_interf = jcspectra[ip, :, hei_interf[cspsort[list(range(
645 645 offhei_interf, nhei_interf + offhei_interf))]]]
646 646 junkcspc_interf = junkcspc_interf.transpose()
647 647 jcspc_interf = junkcspc_interf.sum(axis=1) / nhei_interf
648 648
649 649 ind = numpy.abs(jcspc_interf[mask_prof]).ravel().argsort()
650 650
651 651 median_real = int(numpy.median(numpy.real(
652 652 junkcspc_interf[mask_prof[ind[list(range(3 * num_prof // 4))]], :])))
653 653 median_imag = int(numpy.median(numpy.imag(
654 654 junkcspc_interf[mask_prof[ind[list(range(3 * num_prof // 4))]], :])))
655 655 comp_mask_prof = [int(e) for e in comp_mask_prof]
656 656 junkcspc_interf[comp_mask_prof, :] = numpy.complex(
657 657 median_real, median_imag)
658 658
659 659 for iprof in range(num_prof):
660 660 ind = numpy.abs(junkcspc_interf[iprof, :]).ravel().argsort()
661 661 jcspc_interf[iprof] = junkcspc_interf[iprof, ind[nhei_interf // 2]]
662 662
663 663 # Removiendo la Interferencia
664 664 jcspectra[ip, :, ind_hei] = jcspectra[ip,
665 665 :, ind_hei] - jcspc_interf
666 666
667 667 ListAux = numpy.abs(jcspc_interf[mask_prof]).tolist()
668 668 maxid = ListAux.index(max(ListAux))
669 669
670 670 ind = numpy.array([-2, -1, 1, 2])
671 671 xx = numpy.zeros([4, 4])
672 672
673 673 for id1 in range(4):
674 674 xx[:, id1] = ind[id1]**numpy.asarray(list(range(4)))
675 675
676 676 xx_inv = numpy.linalg.inv(xx)
677 677 xx = xx_inv[:, 0]
678 678
679 679 ind = (ind + maxid + num_mask_prof) % num_mask_prof
680 680 yy = jcspectra[ip, mask_prof[ind], :]
681 681 jcspectra[ip, mask_prof[maxid], :] = numpy.dot(yy.transpose(), xx)
682 682
683 683 # Guardar Resultados
684 684 self.dataOut.data_spc = jspectra
685 685 self.dataOut.data_cspc = jcspectra
686 686
687 687 return 1
688 688
689 689 def run(self, dataOut, interf = 2,hei_interf = None, nhei_interf = None, offhei_interf = None, mode=1):
690 690
691 691 self.dataOut = dataOut
692 692
693 693 if mode == 1:
694 694 self.removeInterference(interf = 2,hei_interf = None, nhei_interf = None, offhei_interf = None)
695 695 elif mode == 2:
696 696 self.removeInterference2()
697 697
698 698 return self.dataOut
699 699
700 700
701 701 class IncohInt(Operation):
702 702
703 703 __profIndex = 0
704 704 __withOverapping = False
705 705
706 706 __byTime = False
707 707 __initime = None
708 708 __lastdatatime = None
709 709 __integrationtime = None
710 710
711 711 __buffer_spc = None
712 712 __buffer_cspc = None
713 713 __buffer_dc = None
714 714
715 715 __dataReady = False
716 716
717 717 __timeInterval = None
718 718
719 719 n = None
720 720
721 721 def __init__(self):
722 722
723 723 Operation.__init__(self)
724 724
725 725 def setup(self, n=None, timeInterval=None, overlapping=False):
726 726 """
727 727 Set the parameters of the integration class.
728 728
729 729 Inputs:
730 730
731 731 n : Number of coherent integrations
732 732 timeInterval : Time of integration. If the parameter "n" is selected this one does not work
733 733 overlapping :
734 734
735 735 """
736 736
737 737 self.__initime = None
738 738 self.__lastdatatime = 0
739 739
740 740 self.__buffer_spc = 0
741 741 self.__buffer_cspc = 0
742 742 self.__buffer_dc = 0
743 743
744 744 self.__profIndex = 0
745 745 self.__dataReady = False
746 746 self.__byTime = False
747 747
748 748 if n is None and timeInterval is None:
749 749 raise ValueError("n or timeInterval should be specified ...")
750 750
751 751 if n is not None:
752 752 self.n = int(n)
753 753 else:
754 754
755 755 self.__integrationtime = int(timeInterval)
756 756 self.n = None
757 757 self.__byTime = True
758 758
759 759 def putData(self, data_spc, data_cspc, data_dc):
760 760 """
761 761 Add a profile to the __buffer_spc and increase in one the __profileIndex
762 762
763 763 """
764 764
765 765 self.__buffer_spc += data_spc
766 766
767 767 if data_cspc is None:
768 768 self.__buffer_cspc = None
769 769 else:
770 770 self.__buffer_cspc += data_cspc
771 771
772 772 if data_dc is None:
773 773 self.__buffer_dc = None
774 774 else:
775 775 self.__buffer_dc += data_dc
776 776
777 777 self.__profIndex += 1
778 778
779 779 return
780 780
781 781 def pushData(self):
782 782 """
783 783 Return the sum of the last profiles and the profiles used in the sum.
784 784
785 785 Affected:
786 786
787 787 self.__profileIndex
788 788
789 789 """
790 790
791 791 data_spc = self.__buffer_spc
792 792 data_cspc = self.__buffer_cspc
793 793 data_dc = self.__buffer_dc
794 794 n = self.__profIndex
795 795
796 796 self.__buffer_spc = 0
797 797 self.__buffer_cspc = 0
798 798 self.__buffer_dc = 0
799 799 self.__profIndex = 0
800 800
801 801 return data_spc, data_cspc, data_dc, n
802 802
803 803 def byProfiles(self, *args):
804 804
805 805 self.__dataReady = False
806 806 avgdata_spc = None
807 807 avgdata_cspc = None
808 808 avgdata_dc = None
809 809
810 810 self.putData(*args)
811 811
812 812 if self.__profIndex == self.n:
813 813
814 814 avgdata_spc, avgdata_cspc, avgdata_dc, n = self.pushData()
815 815 self.n = n
816 816 self.__dataReady = True
817 817
818 818 return avgdata_spc, avgdata_cspc, avgdata_dc
819 819
820 820 def byTime(self, datatime, *args):
821 821
822 822 self.__dataReady = False
823 823 avgdata_spc = None
824 824 avgdata_cspc = None
825 825 avgdata_dc = None
826 826
827 827 self.putData(*args)
828 828
829 829 if (datatime - self.__initime) >= self.__integrationtime:
830 830 avgdata_spc, avgdata_cspc, avgdata_dc, n = self.pushData()
831 831 self.n = n
832 832 self.__dataReady = True
833 833
834 834 return avgdata_spc, avgdata_cspc, avgdata_dc
835 835
836 836 def integrate(self, datatime, *args):
837 837
838 838 if self.__profIndex == 0:
839 839 self.__initime = datatime
840 840
841 841 if self.__byTime:
842 842 avgdata_spc, avgdata_cspc, avgdata_dc = self.byTime(
843 843 datatime, *args)
844 844 else:
845 845 avgdata_spc, avgdata_cspc, avgdata_dc = self.byProfiles(*args)
846 846
847 847 if not self.__dataReady:
848 848 return None, None, None, None
849 849
850 850 return self.__initime, avgdata_spc, avgdata_cspc, avgdata_dc
851 851
852 852 def run(self, dataOut, n=None, timeInterval=None, overlapping=False):
853 853 if n == 1:
854 854 return dataOut
855 855
856 856 dataOut.flagNoData = True
857 857
858 858 if not self.isConfig:
859 859 self.setup(n, timeInterval, overlapping)
860 860 self.isConfig = True
861 861
862 862 avgdatatime, avgdata_spc, avgdata_cspc, avgdata_dc = self.integrate(dataOut.utctime,
863 863 dataOut.data_spc,
864 864 dataOut.data_cspc,
865 865 dataOut.data_dc)
866 866
867 867 if self.__dataReady:
868 868
869 869 dataOut.data_spc = avgdata_spc
870 870 dataOut.data_cspc = avgdata_cspc
871 871 dataOut.data_dc = avgdata_dc
872 872 dataOut.nIncohInt *= self.n
873 873 dataOut.utctime = avgdatatime
874 874 dataOut.flagNoData = False
875 875
876 return dataOut No newline at end of file
876 return dataOut
877
878 class dopplerFlip(Operation):
879
880 def run(self, dataOut):
881 # arreglo 1: (num_chan, num_profiles, num_heights)
882 self.dataOut = dataOut
883 # JULIA-oblicua, indice 2
884 # arreglo 2: (num_profiles, num_heights)
885 jspectra = self.dataOut.data_spc[2]
886 jspectra_tmp = numpy.zeros(jspectra.shape)
887 num_profiles = jspectra.shape[0]
888 freq_dc = int(num_profiles / 2)
889 # Flip con for
890 for j in range(num_profiles):
891 jspectra_tmp[num_profiles-j-1]= jspectra[j]
892 # Intercambio perfil de DC con perfil inmediato anterior
893 jspectra_tmp[freq_dc-1]= jspectra[freq_dc-1]
894 jspectra_tmp[freq_dc]= jspectra[freq_dc]
895 # canal modificado es re-escrito en el arreglo de canales
896 self.dataOut.data_spc[2] = jspectra_tmp
897
898 return self.dataOut No newline at end of file
@@ -1,1629 +1,1625
1 1 import sys
2 2 import numpy,math
3 3 from scipy import interpolate
4 4 from schainpy.model.proc.jroproc_base import ProcessingUnit, Operation, MPDecorator
5 5 from schainpy.model.data.jrodata import Voltage,hildebrand_sekhon
6 6 from schainpy.utils import log
7 7 from time import time
8 8
9 9
10 10
11 11 class VoltageProc(ProcessingUnit):
12 12
13 13 def __init__(self):
14 14
15 15 ProcessingUnit.__init__(self)
16 16
17 17 self.dataOut = Voltage()
18 18 self.flip = 1
19 19 self.setupReq = False
20 20
21 21 def run(self):
22 22
23 23 if self.dataIn.type == 'AMISR':
24 24 self.__updateObjFromAmisrInput()
25 25
26 26 if self.dataIn.type == 'Voltage':
27 27 self.dataOut.copy(self.dataIn)
28 28
29 29 def __updateObjFromAmisrInput(self):
30 30
31 31 self.dataOut.timeZone = self.dataIn.timeZone
32 32 self.dataOut.dstFlag = self.dataIn.dstFlag
33 33 self.dataOut.errorCount = self.dataIn.errorCount
34 34 self.dataOut.useLocalTime = self.dataIn.useLocalTime
35 35
36 36 self.dataOut.flagNoData = self.dataIn.flagNoData
37 37 self.dataOut.data = self.dataIn.data
38 38 self.dataOut.utctime = self.dataIn.utctime
39 39 self.dataOut.channelList = self.dataIn.channelList
40 40 #self.dataOut.timeInterval = self.dataIn.timeInterval
41 41 self.dataOut.heightList = self.dataIn.heightList
42 42 self.dataOut.nProfiles = self.dataIn.nProfiles
43 43
44 44 self.dataOut.nCohInt = self.dataIn.nCohInt
45 45 self.dataOut.ippSeconds = self.dataIn.ippSeconds
46 46 self.dataOut.frequency = self.dataIn.frequency
47 47
48 48 self.dataOut.azimuth = self.dataIn.azimuth
49 49 self.dataOut.zenith = self.dataIn.zenith
50 50
51 51 self.dataOut.beam.codeList = self.dataIn.beam.codeList
52 52 self.dataOut.beam.azimuthList = self.dataIn.beam.azimuthList
53 53 self.dataOut.beam.zenithList = self.dataIn.beam.zenithList
54 54
55 55
56 56 class selectChannels(Operation):
57 57
58 58 def run(self, dataOut, channelList):
59 59
60 60 channelIndexList = []
61 61 self.dataOut = dataOut
62 62 for channel in channelList:
63 63 if channel not in self.dataOut.channelList:
64 64 raise ValueError("Channel %d is not in %s" %(channel, str(self.dataOut.channelList)))
65 65
66 66 index = self.dataOut.channelList.index(channel)
67 67 channelIndexList.append(index)
68 68 self.selectChannelsByIndex(channelIndexList)
69 69 return self.dataOut
70 70
71 71 def selectChannelsByIndex(self, channelIndexList):
72 72 """
73 73 Selecciona un bloque de datos en base a canales segun el channelIndexList
74 74
75 75 Input:
76 76 channelIndexList : lista sencilla de canales a seleccionar por ej. [2,3,7]
77 77
78 78 Affected:
79 79 self.dataOut.data
80 80 self.dataOut.channelIndexList
81 81 self.dataOut.nChannels
82 82 self.dataOut.m_ProcessingHeader.totalSpectra
83 83 self.dataOut.systemHeaderObj.numChannels
84 84 self.dataOut.m_ProcessingHeader.blockSize
85 85
86 86 Return:
87 87 None
88 88 """
89 89
90 90 for channelIndex in channelIndexList:
91 91 if channelIndex not in self.dataOut.channelIndexList:
92 92 raise ValueError("The value %d in channelIndexList is not valid" %channelIndex)
93 93
94 94 if self.dataOut.type == 'Voltage':
95 95 if self.dataOut.flagDataAsBlock:
96 96 """
97 97 Si la data es obtenida por bloques, dimension = [nChannels, nProfiles, nHeis]
98 98 """
99 99 data = self.dataOut.data[channelIndexList,:,:]
100 100 else:
101 101 data = self.dataOut.data[channelIndexList,:]
102 102
103 103 self.dataOut.data = data
104 104 # self.dataOut.channelList = [self.dataOut.channelList[i] for i in channelIndexList]
105 105 self.dataOut.channelList = range(len(channelIndexList))
106 106
107 107 elif self.dataOut.type == 'Spectra':
108 108 data_spc = self.dataOut.data_spc[channelIndexList, :]
109 109 data_dc = self.dataOut.data_dc[channelIndexList, :]
110 110
111 111 self.dataOut.data_spc = data_spc
112 112 self.dataOut.data_dc = data_dc
113 113
114 114 # self.dataOut.channelList = [self.dataOut.channelList[i] for i in channelIndexList]
115 115 self.dataOut.channelList = range(len(channelIndexList))
116 116 self.__selectPairsByChannel(channelIndexList)
117 117
118 118 return 1
119 119
120 120 def __selectPairsByChannel(self, channelList=None):
121 121
122 122 if channelList == None:
123 123 return
124 124
125 125 pairsIndexListSelected = []
126 126 for pairIndex in self.dataOut.pairsIndexList:
127 127 # First pair
128 128 if self.dataOut.pairsList[pairIndex][0] not in channelList:
129 129 continue
130 130 # Second pair
131 131 if self.dataOut.pairsList[pairIndex][1] not in channelList:
132 132 continue
133 133
134 134 pairsIndexListSelected.append(pairIndex)
135 135
136 136 if not pairsIndexListSelected:
137 137 self.dataOut.data_cspc = None
138 138 self.dataOut.pairsList = []
139 139 return
140 140
141 141 self.dataOut.data_cspc = self.dataOut.data_cspc[pairsIndexListSelected]
142 142 self.dataOut.pairsList = [self.dataOut.pairsList[i]
143 143 for i in pairsIndexListSelected]
144 144
145 145 return
146 146
147 147 class selectHeights(Operation):
148 148
149 def run(self, dataOut, minHei=None, maxHei=None):
149 def run(self, dataOut, minHei=None, maxHei=None, minIndex=None, maxIndex=None):
150 150 """
151 151 Selecciona un bloque de datos en base a un grupo de valores de alturas segun el rango
152 152 minHei <= height <= maxHei
153 153
154 154 Input:
155 155 minHei : valor minimo de altura a considerar
156 156 maxHei : valor maximo de altura a considerar
157 157
158 158 Affected:
159 159 Indirectamente son cambiados varios valores a travez del metodo selectHeightsByIndex
160 160
161 161 Return:
162 162 1 si el metodo se ejecuto con exito caso contrario devuelve 0
163 163 """
164 164
165 165 self.dataOut = dataOut
166 166
167 if minHei == None:
168 minHei = self.dataOut.heightList[0]
167 if minHei and maxHei:
169 168
170 if maxHei == None:
171 maxHei = self.dataOut.heightList[-1]
169 if (minHei < self.dataOut.heightList[0]):
170 minHei = self.dataOut.heightList[0]
172 171
173 if (minHei < self.dataOut.heightList[0]):
174 minHei = self.dataOut.heightList[0]
172 if (maxHei > self.dataOut.heightList[-1]):
173 maxHei = self.dataOut.heightList[-1]
175 174
176 if (maxHei > self.dataOut.heightList[-1]):
177 maxHei = self.dataOut.heightList[-1]
178
179 minIndex = 0
180 maxIndex = 0
181 heights = self.dataOut.heightList
175 minIndex = 0
176 maxIndex = 0
177 heights = self.dataOut.heightList
182 178
183 inda = numpy.where(heights >= minHei)
184 indb = numpy.where(heights <= maxHei)
179 inda = numpy.where(heights >= minHei)
180 indb = numpy.where(heights <= maxHei)
185 181
186 try:
187 minIndex = inda[0][0]
188 except:
189 minIndex = 0
182 try:
183 minIndex = inda[0][0]
184 except:
185 minIndex = 0
190 186
191 try:
192 maxIndex = indb[0][-1]
193 except:
194 maxIndex = len(heights)
187 try:
188 maxIndex = indb[0][-1]
189 except:
190 maxIndex = len(heights)
195 191
196 192 self.selectHeightsByIndex(minIndex, maxIndex)
197 193
198 194 return self.dataOut
199 195
200 196 def selectHeightsByIndex(self, minIndex, maxIndex):
201 197 """
202 198 Selecciona un bloque de datos en base a un grupo indices de alturas segun el rango
203 199 minIndex <= index <= maxIndex
204 200
205 201 Input:
206 202 minIndex : valor de indice minimo de altura a considerar
207 203 maxIndex : valor de indice maximo de altura a considerar
208 204
209 205 Affected:
210 206 self.dataOut.data
211 207 self.dataOut.heightList
212 208
213 209 Return:
214 210 1 si el metodo se ejecuto con exito caso contrario devuelve 0
215 211 """
216 212
217 213 if self.dataOut.type == 'Voltage':
218 214 if (minIndex < 0) or (minIndex > maxIndex):
219 215 raise ValueError("Height index range (%d,%d) is not valid" % (minIndex, maxIndex))
220 216
221 217 if (maxIndex >= self.dataOut.nHeights):
222 218 maxIndex = self.dataOut.nHeights
223 219
224 220 #voltage
225 221 if self.dataOut.flagDataAsBlock:
226 222 """
227 223 Si la data es obtenida por bloques, dimension = [nChannels, nProfiles, nHeis]
228 224 """
229 225 data = self.dataOut.data[:,:, minIndex:maxIndex]
230 226 else:
231 227 data = self.dataOut.data[:, minIndex:maxIndex]
232 228
233 229 # firstHeight = self.dataOut.heightList[minIndex]
234 230
235 231 self.dataOut.data = data
236 232 self.dataOut.heightList = self.dataOut.heightList[minIndex:maxIndex]
237 233
238 234 if self.dataOut.nHeights <= 1:
239 235 raise ValueError("selectHeights: Too few heights. Current number of heights is %d" %(self.dataOut.nHeights))
240 236 elif self.dataOut.type == 'Spectra':
241 237 if (minIndex < 0) or (minIndex > maxIndex):
242 238 raise ValueError("Error selecting heights: Index range (%d,%d) is not valid" % (
243 239 minIndex, maxIndex))
244 240
245 241 if (maxIndex >= self.dataOut.nHeights):
246 242 maxIndex = self.dataOut.nHeights - 1
247 243
248 244 # Spectra
249 245 data_spc = self.dataOut.data_spc[:, :, minIndex:maxIndex + 1]
250 246
251 247 data_cspc = None
252 248 if self.dataOut.data_cspc is not None:
253 249 data_cspc = self.dataOut.data_cspc[:, :, minIndex:maxIndex + 1]
254 250
255 251 data_dc = None
256 252 if self.dataOut.data_dc is not None:
257 253 data_dc = self.dataOut.data_dc[:, minIndex:maxIndex + 1]
258 254
259 255 self.dataOut.data_spc = data_spc
260 256 self.dataOut.data_cspc = data_cspc
261 257 self.dataOut.data_dc = data_dc
262 258
263 259 self.dataOut.heightList = self.dataOut.heightList[minIndex:maxIndex + 1]
264 260
265 261 return 1
266 262
267 263
268 264 class filterByHeights(Operation):
269 265
270 266 def run(self, dataOut, window):
271 267
272 268 deltaHeight = dataOut.heightList[1] - dataOut.heightList[0]
273 269
274 270 if window == None:
275 271 window = (dataOut.radarControllerHeaderObj.txA/dataOut.radarControllerHeaderObj.nBaud) / deltaHeight
276 272
277 273 newdelta = deltaHeight * window
278 274 r = dataOut.nHeights % window
279 275 newheights = (dataOut.nHeights-r)/window
280 276
281 277 if newheights <= 1:
282 278 raise ValueError("filterByHeights: Too few heights. Current number of heights is %d and window is %d" %(dataOut.nHeights, window))
283 279
284 280 if dataOut.flagDataAsBlock:
285 281 """
286 282 Si la data es obtenida por bloques, dimension = [nChannels, nProfiles, nHeis]
287 283 """
288 284 buffer = dataOut.data[:, :, 0:int(dataOut.nHeights-r)]
289 285 buffer = buffer.reshape(dataOut.nChannels, dataOut.nProfiles, int(dataOut.nHeights/window), window)
290 286 buffer = numpy.sum(buffer,3)
291 287
292 288 else:
293 289 buffer = dataOut.data[:,0:int(dataOut.nHeights-r)]
294 290 buffer = buffer.reshape(dataOut.nChannels,int(dataOut.nHeights/window),int(window))
295 291 buffer = numpy.sum(buffer,2)
296 292
297 293 dataOut.data = buffer
298 294 dataOut.heightList = dataOut.heightList[0] + numpy.arange( newheights )*newdelta
299 295 dataOut.windowOfFilter = window
300 296
301 297 return dataOut
302 298
303 299
304 300 class setH0(Operation):
305 301
306 302 def run(self, dataOut, h0, deltaHeight = None):
307 303
308 304 if not deltaHeight:
309 305 deltaHeight = dataOut.heightList[1] - dataOut.heightList[0]
310 306
311 307 nHeights = dataOut.nHeights
312 308
313 309 newHeiRange = h0 + numpy.arange(nHeights)*deltaHeight
314 310
315 311 dataOut.heightList = newHeiRange
316 312
317 313 return dataOut
318 314
319 315
320 316 class deFlip(Operation):
321 317
322 318 def run(self, dataOut, channelList = []):
323 319
324 320 data = dataOut.data.copy()
325 321
326 322 if dataOut.flagDataAsBlock:
327 323 flip = self.flip
328 324 profileList = list(range(dataOut.nProfiles))
329 325
330 326 if not channelList:
331 327 for thisProfile in profileList:
332 328 data[:,thisProfile,:] = data[:,thisProfile,:]*flip
333 329 flip *= -1.0
334 330 else:
335 331 for thisChannel in channelList:
336 332 if thisChannel not in dataOut.channelList:
337 333 continue
338 334
339 335 for thisProfile in profileList:
340 336 data[thisChannel,thisProfile,:] = data[thisChannel,thisProfile,:]*flip
341 337 flip *= -1.0
342 338
343 339 self.flip = flip
344 340
345 341 else:
346 342 if not channelList:
347 343 data[:,:] = data[:,:]*self.flip
348 344 else:
349 345 for thisChannel in channelList:
350 346 if thisChannel not in dataOut.channelList:
351 347 continue
352 348
353 349 data[thisChannel,:] = data[thisChannel,:]*self.flip
354 350
355 351 self.flip *= -1.
356 352
357 353 dataOut.data = data
358 354
359 355 return dataOut
360 356
361 357
362 358 class setAttribute(Operation):
363 359 '''
364 360 Set an arbitrary attribute(s) to dataOut
365 361 '''
366 362
367 363 def __init__(self):
368 364
369 365 Operation.__init__(self)
370 366 self._ready = False
371 367
372 368 def run(self, dataOut, **kwargs):
373 369
374 370 for key, value in kwargs.items():
375 371 setattr(dataOut, key, value)
376 372
377 373 return dataOut
378 374
379 375
380 376 @MPDecorator
381 377 class printAttribute(Operation):
382 378 '''
383 379 Print an arbitrary attribute of dataOut
384 380 '''
385 381
386 382 def __init__(self):
387 383
388 384 Operation.__init__(self)
389 385
390 386 def run(self, dataOut, attributes):
391 387
392 388 if isinstance(attributes, str):
393 389 attributes = [attributes]
394 390 for attr in attributes:
395 391 if hasattr(dataOut, attr):
396 392 log.log(getattr(dataOut, attr), attr)
397 393
398 394
399 395 class interpolateHeights(Operation):
400 396
401 397 def run(self, dataOut, topLim, botLim):
402 398 #69 al 72 para julia
403 399 #82-84 para meteoros
404 400 if len(numpy.shape(dataOut.data))==2:
405 401 sampInterp = (dataOut.data[:,botLim-1] + dataOut.data[:,topLim+1])/2
406 402 sampInterp = numpy.transpose(numpy.tile(sampInterp,(topLim-botLim + 1,1)))
407 403 #dataOut.data[:,botLim:limSup+1] = sampInterp
408 404 dataOut.data[:,botLim:topLim+1] = sampInterp
409 405 else:
410 406 nHeights = dataOut.data.shape[2]
411 407 x = numpy.hstack((numpy.arange(botLim),numpy.arange(topLim+1,nHeights)))
412 408 y = dataOut.data[:,:,list(range(botLim))+list(range(topLim+1,nHeights))]
413 409 f = interpolate.interp1d(x, y, axis = 2)
414 410 xnew = numpy.arange(botLim,topLim+1)
415 411 ynew = f(xnew)
416 412 dataOut.data[:,:,botLim:topLim+1] = ynew
417 413
418 414 return dataOut
419 415
420 416
421 417 class CohInt(Operation):
422 418
423 419 isConfig = False
424 420 __profIndex = 0
425 421 __byTime = False
426 422 __initime = None
427 423 __lastdatatime = None
428 424 __integrationtime = None
429 425 __buffer = None
430 426 __bufferStride = []
431 427 __dataReady = False
432 428 __profIndexStride = 0
433 429 __dataToPutStride = False
434 430 n = None
435 431
436 432 def __init__(self, **kwargs):
437 433
438 434 Operation.__init__(self, **kwargs)
439 435
440 436 def setup(self, n=None, timeInterval=None, stride=None, overlapping=False, byblock=False):
441 437 """
442 438 Set the parameters of the integration class.
443 439
444 440 Inputs:
445 441
446 442 n : Number of coherent integrations
447 443 timeInterval : Time of integration. If the parameter "n" is selected this one does not work
448 444 overlapping :
449 445 """
450 446
451 447 self.__initime = None
452 448 self.__lastdatatime = 0
453 449 self.__buffer = None
454 450 self.__dataReady = False
455 451 self.byblock = byblock
456 452 self.stride = stride
457 453
458 454 if n == None and timeInterval == None:
459 455 raise ValueError("n or timeInterval should be specified ...")
460 456
461 457 if n != None:
462 458 self.n = n
463 459 self.__byTime = False
464 460 else:
465 461 self.__integrationtime = timeInterval #* 60. #if (type(timeInterval)!=integer) -> change this line
466 462 self.n = 9999
467 463 self.__byTime = True
468 464
469 465 if overlapping:
470 466 self.__withOverlapping = True
471 467 self.__buffer = None
472 468 else:
473 469 self.__withOverlapping = False
474 470 self.__buffer = 0
475 471
476 472 self.__profIndex = 0
477 473
478 474 def putData(self, data):
479 475
480 476 """
481 477 Add a profile to the __buffer and increase in one the __profileIndex
482 478
483 479 """
484 480
485 481 if not self.__withOverlapping:
486 482 self.__buffer += data.copy()
487 483 self.__profIndex += 1
488 484 return
489 485
490 486 #Overlapping data
491 487 nChannels, nHeis = data.shape
492 488 data = numpy.reshape(data, (1, nChannels, nHeis))
493 489
494 490 #If the buffer is empty then it takes the data value
495 491 if self.__buffer is None:
496 492 self.__buffer = data
497 493 self.__profIndex += 1
498 494 return
499 495
500 496 #If the buffer length is lower than n then stakcing the data value
501 497 if self.__profIndex < self.n:
502 498 self.__buffer = numpy.vstack((self.__buffer, data))
503 499 self.__profIndex += 1
504 500 return
505 501
506 502 #If the buffer length is equal to n then replacing the last buffer value with the data value
507 503 self.__buffer = numpy.roll(self.__buffer, -1, axis=0)
508 504 self.__buffer[self.n-1] = data
509 505 self.__profIndex = self.n
510 506 return
511 507
512 508
513 509 def pushData(self):
514 510 """
515 511 Return the sum of the last profiles and the profiles used in the sum.
516 512
517 513 Affected:
518 514
519 515 self.__profileIndex
520 516
521 517 """
522 518
523 519 if not self.__withOverlapping:
524 520 data = self.__buffer
525 521 n = self.__profIndex
526 522
527 523 self.__buffer = 0
528 524 self.__profIndex = 0
529 525
530 526 return data, n
531 527
532 528 #Integration with Overlapping
533 529 data = numpy.sum(self.__buffer, axis=0)
534 530 # print data
535 531 # raise
536 532 n = self.__profIndex
537 533
538 534 return data, n
539 535
540 536 def byProfiles(self, data):
541 537
542 538 self.__dataReady = False
543 539 avgdata = None
544 540 # n = None
545 541 # print data
546 542 # raise
547 543 self.putData(data)
548 544
549 545 if self.__profIndex == self.n:
550 546 avgdata, n = self.pushData()
551 547 self.__dataReady = True
552 548
553 549 return avgdata
554 550
555 551 def byTime(self, data, datatime):
556 552
557 553 self.__dataReady = False
558 554 avgdata = None
559 555 n = None
560 556
561 557 self.putData(data)
562 558
563 559 if (datatime - self.__initime) >= self.__integrationtime:
564 560 avgdata, n = self.pushData()
565 561 self.n = n
566 562 self.__dataReady = True
567 563
568 564 return avgdata
569 565
570 566 def integrateByStride(self, data, datatime):
571 567 # print data
572 568 if self.__profIndex == 0:
573 569 self.__buffer = [[data.copy(), datatime]]
574 570 else:
575 571 self.__buffer.append([data.copy(),datatime])
576 572 self.__profIndex += 1
577 573 self.__dataReady = False
578 574
579 575 if self.__profIndex == self.n * self.stride :
580 576 self.__dataToPutStride = True
581 577 self.__profIndexStride = 0
582 578 self.__profIndex = 0
583 579 self.__bufferStride = []
584 580 for i in range(self.stride):
585 581 current = self.__buffer[i::self.stride]
586 582 data = numpy.sum([t[0] for t in current], axis=0)
587 583 avgdatatime = numpy.average([t[1] for t in current])
588 584 # print data
589 585 self.__bufferStride.append((data, avgdatatime))
590 586
591 587 if self.__dataToPutStride:
592 588 self.__dataReady = True
593 589 self.__profIndexStride += 1
594 590 if self.__profIndexStride == self.stride:
595 591 self.__dataToPutStride = False
596 592 # print self.__bufferStride[self.__profIndexStride - 1]
597 593 # raise
598 594 return self.__bufferStride[self.__profIndexStride - 1]
599 595
600 596
601 597 return None, None
602 598
603 599 def integrate(self, data, datatime=None):
604 600
605 601 if self.__initime == None:
606 602 self.__initime = datatime
607 603
608 604 if self.__byTime:
609 605 avgdata = self.byTime(data, datatime)
610 606 else:
611 607 avgdata = self.byProfiles(data)
612 608
613 609
614 610 self.__lastdatatime = datatime
615 611
616 612 if avgdata is None:
617 613 return None, None
618 614
619 615 avgdatatime = self.__initime
620 616
621 617 deltatime = datatime - self.__lastdatatime
622 618
623 619 if not self.__withOverlapping:
624 620 self.__initime = datatime
625 621 else:
626 622 self.__initime += deltatime
627 623
628 624 return avgdata, avgdatatime
629 625
630 626 def integrateByBlock(self, dataOut):
631 627
632 628 times = int(dataOut.data.shape[1]/self.n)
633 629 avgdata = numpy.zeros((dataOut.nChannels, times, dataOut.nHeights), dtype=numpy.complex)
634 630
635 631 id_min = 0
636 632 id_max = self.n
637 633
638 634 for i in range(times):
639 635 junk = dataOut.data[:,id_min:id_max,:]
640 636 avgdata[:,i,:] = junk.sum(axis=1)
641 637 id_min += self.n
642 638 id_max += self.n
643 639
644 640 timeInterval = dataOut.ippSeconds*self.n
645 641 avgdatatime = (times - 1) * timeInterval + dataOut.utctime
646 642 self.__dataReady = True
647 643 return avgdata, avgdatatime
648 644
649 645 def run(self, dataOut, n=None, timeInterval=None, stride=None, overlapping=False, byblock=False, **kwargs):
650 646
651 647 if not self.isConfig:
652 648 self.setup(n=n, stride=stride, timeInterval=timeInterval, overlapping=overlapping, byblock=byblock, **kwargs)
653 649 self.isConfig = True
654 650
655 651 if dataOut.flagDataAsBlock:
656 652 """
657 653 Si la data es leida por bloques, dimension = [nChannels, nProfiles, nHeis]
658 654 """
659 655 avgdata, avgdatatime = self.integrateByBlock(dataOut)
660 656 dataOut.nProfiles /= self.n
661 657 else:
662 658 if stride is None:
663 659 avgdata, avgdatatime = self.integrate(dataOut.data, dataOut.utctime)
664 660 else:
665 661 avgdata, avgdatatime = self.integrateByStride(dataOut.data, dataOut.utctime)
666 662
667 663
668 664 # dataOut.timeInterval *= n
669 665 dataOut.flagNoData = True
670 666
671 667 if self.__dataReady:
672 668 dataOut.data = avgdata
673 669 if not dataOut.flagCohInt:
674 670 dataOut.nCohInt *= self.n
675 671 dataOut.flagCohInt = True
676 672 dataOut.utctime = avgdatatime
677 673 # print avgdata, avgdatatime
678 674 # raise
679 675 # dataOut.timeInterval = dataOut.ippSeconds * dataOut.nCohInt
680 676 dataOut.flagNoData = False
681 677 return dataOut
682 678
683 679 class Decoder(Operation):
684 680
685 681 isConfig = False
686 682 __profIndex = 0
687 683
688 684 code = None
689 685
690 686 nCode = None
691 687 nBaud = None
692 688
693 689 def __init__(self, **kwargs):
694 690
695 691 Operation.__init__(self, **kwargs)
696 692
697 693 self.times = None
698 694 self.osamp = None
699 695 # self.__setValues = False
700 696 self.isConfig = False
701 697 self.setupReq = False
702 698 def setup(self, code, osamp, dataOut):
703 699
704 700 self.__profIndex = 0
705 701
706 702 self.code = code
707 703
708 704 self.nCode = len(code)
709 705 self.nBaud = len(code[0])
710 706
711 707 if (osamp != None) and (osamp >1):
712 708 self.osamp = osamp
713 709 self.code = numpy.repeat(code, repeats=self.osamp, axis=1)
714 710 self.nBaud = self.nBaud*self.osamp
715 711
716 712 self.__nChannels = dataOut.nChannels
717 713 self.__nProfiles = dataOut.nProfiles
718 714 self.__nHeis = dataOut.nHeights
719 715
720 716 if self.__nHeis < self.nBaud:
721 717 raise ValueError('Number of heights (%d) should be greater than number of bauds (%d)' %(self.__nHeis, self.nBaud))
722 718
723 719 #Frequency
724 720 __codeBuffer = numpy.zeros((self.nCode, self.__nHeis), dtype=numpy.complex)
725 721
726 722 __codeBuffer[:,0:self.nBaud] = self.code
727 723
728 724 self.fft_code = numpy.conj(numpy.fft.fft(__codeBuffer, axis=1))
729 725
730 726 if dataOut.flagDataAsBlock:
731 727
732 728 self.ndatadec = self.__nHeis #- self.nBaud + 1
733 729
734 730 self.datadecTime = numpy.zeros((self.__nChannels, self.__nProfiles, self.ndatadec), dtype=numpy.complex)
735 731
736 732 else:
737 733
738 734 #Time
739 735 self.ndatadec = self.__nHeis #- self.nBaud + 1
740 736
741 737 self.datadecTime = numpy.zeros((self.__nChannels, self.ndatadec), dtype=numpy.complex)
742 738
743 739 def __convolutionInFreq(self, data):
744 740
745 741 fft_code = self.fft_code[self.__profIndex].reshape(1,-1)
746 742
747 743 fft_data = numpy.fft.fft(data, axis=1)
748 744
749 745 conv = fft_data*fft_code
750 746
751 747 data = numpy.fft.ifft(conv,axis=1)
752 748
753 749 return data
754 750
755 751 def __convolutionInFreqOpt(self, data):
756 752
757 753 raise NotImplementedError
758 754
759 755 def __convolutionInTime(self, data):
760 756
761 757 code = self.code[self.__profIndex]
762 758 for i in range(self.__nChannels):
763 759 self.datadecTime[i,:] = numpy.correlate(data[i,:], code, mode='full')[self.nBaud-1:]
764 760
765 761 return self.datadecTime
766 762
767 763 def __convolutionByBlockInTime(self, data):
768 764
769 765 repetitions = int(self.__nProfiles / self.nCode)
770 766 junk = numpy.lib.stride_tricks.as_strided(self.code, (repetitions, self.code.size), (0, self.code.itemsize))
771 767 junk = junk.flatten()
772 768 code_block = numpy.reshape(junk, (self.nCode*repetitions, self.nBaud))
773 769 profilesList = range(self.__nProfiles)
774 770
775 771 for i in range(self.__nChannels):
776 772 for j in profilesList:
777 773 self.datadecTime[i,j,:] = numpy.correlate(data[i,j,:], code_block[j,:], mode='full')[self.nBaud-1:]
778 774 return self.datadecTime
779 775
780 776 def __convolutionByBlockInFreq(self, data):
781 777
782 778 raise NotImplementedError("Decoder by frequency fro Blocks not implemented")
783 779
784 780
785 781 fft_code = self.fft_code[self.__profIndex].reshape(1,-1)
786 782
787 783 fft_data = numpy.fft.fft(data, axis=2)
788 784
789 785 conv = fft_data*fft_code
790 786
791 787 data = numpy.fft.ifft(conv,axis=2)
792 788
793 789 return data
794 790
795 791
796 792 def run(self, dataOut, code=None, nCode=None, nBaud=None, mode = 0, osamp=None, times=None):
797 793
798 794 if dataOut.flagDecodeData:
799 795 print("This data is already decoded, recoding again ...")
800 796
801 797 if not self.isConfig:
802 798
803 799 if code is None:
804 800 if dataOut.code is None:
805 801 raise ValueError("Code could not be read from %s instance. Enter a value in Code parameter" %dataOut.type)
806 802
807 803 code = dataOut.code
808 804 else:
809 805 code = numpy.array(code).reshape(nCode,nBaud)
810 806 self.setup(code, osamp, dataOut)
811 807
812 808 self.isConfig = True
813 809
814 810 if mode == 3:
815 811 sys.stderr.write("Decoder Warning: mode=%d is not valid, using mode=0\n" %mode)
816 812
817 813 if times != None:
818 814 sys.stderr.write("Decoder Warning: Argument 'times' in not used anymore\n")
819 815
820 816 if self.code is None:
821 817 print("Fail decoding: Code is not defined.")
822 818 return
823 819
824 820 self.__nProfiles = dataOut.nProfiles
825 821 datadec = None
826 822
827 823 if mode == 3:
828 824 mode = 0
829 825
830 826 if dataOut.flagDataAsBlock:
831 827 """
832 828 Decoding when data have been read as block,
833 829 """
834 830
835 831 if mode == 0:
836 832 datadec = self.__convolutionByBlockInTime(dataOut.data)
837 833 if mode == 1:
838 834 datadec = self.__convolutionByBlockInFreq(dataOut.data)
839 835 else:
840 836 """
841 837 Decoding when data have been read profile by profile
842 838 """
843 839 if mode == 0:
844 840 datadec = self.__convolutionInTime(dataOut.data)
845 841
846 842 if mode == 1:
847 843 datadec = self.__convolutionInFreq(dataOut.data)
848 844
849 845 if mode == 2:
850 846 datadec = self.__convolutionInFreqOpt(dataOut.data)
851 847
852 848 if datadec is None:
853 849 raise ValueError("Codification mode selected is not valid: mode=%d. Try selecting 0 or 1" %mode)
854 850
855 851 dataOut.code = self.code
856 852 dataOut.nCode = self.nCode
857 853 dataOut.nBaud = self.nBaud
858 854
859 855 dataOut.data = datadec
860 856
861 857 dataOut.heightList = dataOut.heightList[0:datadec.shape[-1]]
862 858
863 859 dataOut.flagDecodeData = True #asumo q la data esta decodificada
864 860
865 861 if self.__profIndex == self.nCode-1:
866 862 self.__profIndex = 0
867 863 return dataOut
868 864
869 865 self.__profIndex += 1
870 866
871 867 return dataOut
872 868 # dataOut.flagDeflipData = True #asumo q la data no esta sin flip
873 869
874 870
875 871 class ProfileConcat(Operation):
876 872
877 873 isConfig = False
878 874 buffer = None
879 875
880 876 def __init__(self, **kwargs):
881 877
882 878 Operation.__init__(self, **kwargs)
883 879 self.profileIndex = 0
884 880
885 881 def reset(self):
886 882 self.buffer = numpy.zeros_like(self.buffer)
887 883 self.start_index = 0
888 884 self.times = 1
889 885
890 886 def setup(self, data, m, n=1):
891 887 self.buffer = numpy.zeros((data.shape[0],data.shape[1]*m),dtype=type(data[0,0]))
892 888 self.nHeights = data.shape[1]#.nHeights
893 889 self.start_index = 0
894 890 self.times = 1
895 891
896 892 def concat(self, data):
897 893
898 894 self.buffer[:,self.start_index:self.nHeights*self.times] = data.copy()
899 895 self.start_index = self.start_index + self.nHeights
900 896
901 897 def run(self, dataOut, m):
902 898 dataOut.flagNoData = True
903 899
904 900 if not self.isConfig:
905 901 self.setup(dataOut.data, m, 1)
906 902 self.isConfig = True
907 903
908 904 if dataOut.flagDataAsBlock:
909 905 raise ValueError("ProfileConcat can only be used when voltage have been read profile by profile, getBlock = False")
910 906
911 907 else:
912 908 self.concat(dataOut.data)
913 909 self.times += 1
914 910 if self.times > m:
915 911 dataOut.data = self.buffer
916 912 self.reset()
917 913 dataOut.flagNoData = False
918 914 # se deben actualizar mas propiedades del header y del objeto dataOut, por ejemplo, las alturas
919 915 deltaHeight = dataOut.heightList[1] - dataOut.heightList[0]
920 916 xf = dataOut.heightList[0] + dataOut.nHeights * deltaHeight * m
921 917 dataOut.heightList = numpy.arange(dataOut.heightList[0], xf, deltaHeight)
922 918 dataOut.ippSeconds *= m
923 919 return dataOut
924 920
925 921 class ProfileSelector(Operation):
926 922
927 923 profileIndex = None
928 924 # Tamanho total de los perfiles
929 925 nProfiles = None
930 926
931 927 def __init__(self, **kwargs):
932 928
933 929 Operation.__init__(self, **kwargs)
934 930 self.profileIndex = 0
935 931
936 932 def incProfileIndex(self):
937 933
938 934 self.profileIndex += 1
939 935
940 936 if self.profileIndex >= self.nProfiles:
941 937 self.profileIndex = 0
942 938
943 939 def isThisProfileInRange(self, profileIndex, minIndex, maxIndex):
944 940
945 941 if profileIndex < minIndex:
946 942 return False
947 943
948 944 if profileIndex > maxIndex:
949 945 return False
950 946
951 947 return True
952 948
953 949 def isThisProfileInList(self, profileIndex, profileList):
954 950
955 951 if profileIndex not in profileList:
956 952 return False
957 953
958 954 return True
959 955
960 956 def run(self, dataOut, profileList=None, profileRangeList=None, beam=None, byblock=False, rangeList = None, nProfiles=None):
961 957
962 958 """
963 959 ProfileSelector:
964 960
965 961 Inputs:
966 962 profileList : Index of profiles selected. Example: profileList = (0,1,2,7,8)
967 963
968 964 profileRangeList : Minimum and maximum profile indexes. Example: profileRangeList = (4, 30)
969 965
970 966 rangeList : List of profile ranges. Example: rangeList = ((4, 30), (32, 64), (128, 256))
971 967
972 968 """
973 969
974 970 if rangeList is not None:
975 971 if type(rangeList[0]) not in (tuple, list):
976 972 rangeList = [rangeList]
977 973
978 974 dataOut.flagNoData = True
979 975
980 976 if dataOut.flagDataAsBlock:
981 977 """
982 978 data dimension = [nChannels, nProfiles, nHeis]
983 979 """
984 980 if profileList != None:
985 981 dataOut.data = dataOut.data[:,profileList,:]
986 982
987 983 if profileRangeList != None:
988 984 minIndex = profileRangeList[0]
989 985 maxIndex = profileRangeList[1]
990 986 profileList = list(range(minIndex, maxIndex+1))
991 987
992 988 dataOut.data = dataOut.data[:,minIndex:maxIndex+1,:]
993 989
994 990 if rangeList != None:
995 991
996 992 profileList = []
997 993
998 994 for thisRange in rangeList:
999 995 minIndex = thisRange[0]
1000 996 maxIndex = thisRange[1]
1001 997
1002 998 profileList.extend(list(range(minIndex, maxIndex+1)))
1003 999
1004 1000 dataOut.data = dataOut.data[:,profileList,:]
1005 1001
1006 1002 dataOut.nProfiles = len(profileList)
1007 1003 dataOut.profileIndex = dataOut.nProfiles - 1
1008 1004 dataOut.flagNoData = False
1009 1005
1010 1006 return dataOut
1011 1007
1012 1008 """
1013 1009 data dimension = [nChannels, nHeis]
1014 1010 """
1015 1011
1016 1012 if profileList != None:
1017 1013
1018 1014 if self.isThisProfileInList(dataOut.profileIndex, profileList):
1019 1015
1020 1016 self.nProfiles = len(profileList)
1021 1017 dataOut.nProfiles = self.nProfiles
1022 1018 dataOut.profileIndex = self.profileIndex
1023 1019 dataOut.flagNoData = False
1024 1020
1025 1021 self.incProfileIndex()
1026 1022 return dataOut
1027 1023
1028 1024 if profileRangeList != None:
1029 1025
1030 1026 minIndex = profileRangeList[0]
1031 1027 maxIndex = profileRangeList[1]
1032 1028
1033 1029 if self.isThisProfileInRange(dataOut.profileIndex, minIndex, maxIndex):
1034 1030
1035 1031 self.nProfiles = maxIndex - minIndex + 1
1036 1032 dataOut.nProfiles = self.nProfiles
1037 1033 dataOut.profileIndex = self.profileIndex
1038 1034 dataOut.flagNoData = False
1039 1035
1040 1036 self.incProfileIndex()
1041 1037 return dataOut
1042 1038
1043 1039 if rangeList != None:
1044 1040
1045 1041 nProfiles = 0
1046 1042
1047 1043 for thisRange in rangeList:
1048 1044 minIndex = thisRange[0]
1049 1045 maxIndex = thisRange[1]
1050 1046
1051 1047 nProfiles += maxIndex - minIndex + 1
1052 1048
1053 1049 for thisRange in rangeList:
1054 1050
1055 1051 minIndex = thisRange[0]
1056 1052 maxIndex = thisRange[1]
1057 1053
1058 1054 if self.isThisProfileInRange(dataOut.profileIndex, minIndex, maxIndex):
1059 1055
1060 1056 self.nProfiles = nProfiles
1061 1057 dataOut.nProfiles = self.nProfiles
1062 1058 dataOut.profileIndex = self.profileIndex
1063 1059 dataOut.flagNoData = False
1064 1060
1065 1061 self.incProfileIndex()
1066 1062
1067 1063 break
1068 1064
1069 1065 return dataOut
1070 1066
1071 1067
1072 1068 if beam != None: #beam is only for AMISR data
1073 1069 if self.isThisProfileInList(dataOut.profileIndex, dataOut.beamRangeDict[beam]):
1074 1070 dataOut.flagNoData = False
1075 1071 dataOut.profileIndex = self.profileIndex
1076 1072
1077 1073 self.incProfileIndex()
1078 1074
1079 1075 return dataOut
1080 1076
1081 1077 raise ValueError("ProfileSelector needs profileList, profileRangeList or rangeList parameter")
1082 1078
1083 1079
1084 1080 class Reshaper(Operation):
1085 1081
1086 1082 def __init__(self, **kwargs):
1087 1083
1088 1084 Operation.__init__(self, **kwargs)
1089 1085
1090 1086 self.__buffer = None
1091 1087 self.__nitems = 0
1092 1088
1093 1089 def __appendProfile(self, dataOut, nTxs):
1094 1090
1095 1091 if self.__buffer is None:
1096 1092 shape = (dataOut.nChannels, int(dataOut.nHeights/nTxs) )
1097 1093 self.__buffer = numpy.empty(shape, dtype = dataOut.data.dtype)
1098 1094
1099 1095 ini = dataOut.nHeights * self.__nitems
1100 1096 end = ini + dataOut.nHeights
1101 1097
1102 1098 self.__buffer[:, ini:end] = dataOut.data
1103 1099
1104 1100 self.__nitems += 1
1105 1101
1106 1102 return int(self.__nitems*nTxs)
1107 1103
1108 1104 def __getBuffer(self):
1109 1105
1110 1106 if self.__nitems == int(1./self.__nTxs):
1111 1107
1112 1108 self.__nitems = 0
1113 1109
1114 1110 return self.__buffer.copy()
1115 1111
1116 1112 return None
1117 1113
1118 1114 def __checkInputs(self, dataOut, shape, nTxs):
1119 1115
1120 1116 if shape is None and nTxs is None:
1121 1117 raise ValueError("Reshaper: shape of factor should be defined")
1122 1118
1123 1119 if nTxs:
1124 1120 if nTxs < 0:
1125 1121 raise ValueError("nTxs should be greater than 0")
1126 1122
1127 1123 if nTxs < 1 and dataOut.nProfiles % (1./nTxs) != 0:
1128 1124 raise ValueError("nProfiles= %d is not divisibled by (1./nTxs) = %f" %(dataOut.nProfiles, (1./nTxs)))
1129 1125
1130 1126 shape = [dataOut.nChannels, dataOut.nProfiles*nTxs, dataOut.nHeights/nTxs]
1131 1127
1132 1128 return shape, nTxs
1133 1129
1134 1130 if len(shape) != 2 and len(shape) != 3:
1135 1131 raise ValueError("shape dimension should be equal to 2 or 3. shape = (nProfiles, nHeis) or (nChannels, nProfiles, nHeis). Actually shape = (%d, %d, %d)" %(dataOut.nChannels, dataOut.nProfiles, dataOut.nHeights))
1136 1132
1137 1133 if len(shape) == 2:
1138 1134 shape_tuple = [dataOut.nChannels]
1139 1135 shape_tuple.extend(shape)
1140 1136 else:
1141 1137 shape_tuple = list(shape)
1142 1138
1143 1139 nTxs = 1.0*shape_tuple[1]/dataOut.nProfiles
1144 1140
1145 1141 return shape_tuple, nTxs
1146 1142
1147 1143 def run(self, dataOut, shape=None, nTxs=None):
1148 1144
1149 1145 shape_tuple, self.__nTxs = self.__checkInputs(dataOut, shape, nTxs)
1150 1146
1151 1147 dataOut.flagNoData = True
1152 1148 profileIndex = None
1153 1149
1154 1150 if dataOut.flagDataAsBlock:
1155 1151
1156 1152 dataOut.data = numpy.reshape(dataOut.data, shape_tuple)
1157 1153 dataOut.flagNoData = False
1158 1154
1159 1155 profileIndex = int(dataOut.nProfiles*self.__nTxs) - 1
1160 1156
1161 1157 else:
1162 1158
1163 1159 if self.__nTxs < 1:
1164 1160
1165 1161 self.__appendProfile(dataOut, self.__nTxs)
1166 1162 new_data = self.__getBuffer()
1167 1163
1168 1164 if new_data is not None:
1169 1165 dataOut.data = new_data
1170 1166 dataOut.flagNoData = False
1171 1167
1172 1168 profileIndex = dataOut.profileIndex*nTxs
1173 1169
1174 1170 else:
1175 1171 raise ValueError("nTxs should be greater than 0 and lower than 1, or use VoltageReader(..., getblock=True)")
1176 1172
1177 1173 deltaHeight = dataOut.heightList[1] - dataOut.heightList[0]
1178 1174
1179 1175 dataOut.heightList = numpy.arange(dataOut.nHeights/self.__nTxs) * deltaHeight + dataOut.heightList[0]
1180 1176
1181 1177 dataOut.nProfiles = int(dataOut.nProfiles*self.__nTxs)
1182 1178
1183 1179 dataOut.profileIndex = profileIndex
1184 1180
1185 1181 dataOut.ippSeconds /= self.__nTxs
1186 1182
1187 1183 return dataOut
1188 1184
1189 1185 class SplitProfiles(Operation):
1190 1186
1191 1187 def __init__(self, **kwargs):
1192 1188
1193 1189 Operation.__init__(self, **kwargs)
1194 1190
1195 1191 def run(self, dataOut, n):
1196 1192
1197 1193 dataOut.flagNoData = True
1198 1194 profileIndex = None
1199 1195
1200 1196 if dataOut.flagDataAsBlock:
1201 1197
1202 1198 #nchannels, nprofiles, nsamples
1203 1199 shape = dataOut.data.shape
1204 1200
1205 1201 if shape[2] % n != 0:
1206 1202 raise ValueError("Could not split the data, n=%d has to be multiple of %d" %(n, shape[2]))
1207 1203
1208 1204 new_shape = shape[0], shape[1]*n, int(shape[2]/n)
1209 1205
1210 1206 dataOut.data = numpy.reshape(dataOut.data, new_shape)
1211 1207 dataOut.flagNoData = False
1212 1208
1213 1209 profileIndex = int(dataOut.nProfiles/n) - 1
1214 1210
1215 1211 else:
1216 1212
1217 1213 raise ValueError("Could not split the data when is read Profile by Profile. Use VoltageReader(..., getblock=True)")
1218 1214
1219 1215 deltaHeight = dataOut.heightList[1] - dataOut.heightList[0]
1220 1216
1221 1217 dataOut.heightList = numpy.arange(dataOut.nHeights/n) * deltaHeight + dataOut.heightList[0]
1222 1218
1223 1219 dataOut.nProfiles = int(dataOut.nProfiles*n)
1224 1220
1225 1221 dataOut.profileIndex = profileIndex
1226 1222
1227 1223 dataOut.ippSeconds /= n
1228 1224
1229 1225 return dataOut
1230 1226
1231 1227 class CombineProfiles(Operation):
1232 1228 def __init__(self, **kwargs):
1233 1229
1234 1230 Operation.__init__(self, **kwargs)
1235 1231
1236 1232 self.__remData = None
1237 1233 self.__profileIndex = 0
1238 1234
1239 1235 def run(self, dataOut, n):
1240 1236
1241 1237 dataOut.flagNoData = True
1242 1238 profileIndex = None
1243 1239
1244 1240 if dataOut.flagDataAsBlock:
1245 1241
1246 1242 #nchannels, nprofiles, nsamples
1247 1243 shape = dataOut.data.shape
1248 1244 new_shape = shape[0], shape[1]/n, shape[2]*n
1249 1245
1250 1246 if shape[1] % n != 0:
1251 1247 raise ValueError("Could not split the data, n=%d has to be multiple of %d" %(n, shape[1]))
1252 1248
1253 1249 dataOut.data = numpy.reshape(dataOut.data, new_shape)
1254 1250 dataOut.flagNoData = False
1255 1251
1256 1252 profileIndex = int(dataOut.nProfiles*n) - 1
1257 1253
1258 1254 else:
1259 1255
1260 1256 #nchannels, nsamples
1261 1257 if self.__remData is None:
1262 1258 newData = dataOut.data
1263 1259 else:
1264 1260 newData = numpy.concatenate((self.__remData, dataOut.data), axis=1)
1265 1261
1266 1262 self.__profileIndex += 1
1267 1263
1268 1264 if self.__profileIndex < n:
1269 1265 self.__remData = newData
1270 1266 #continue
1271 1267 return
1272 1268
1273 1269 self.__profileIndex = 0
1274 1270 self.__remData = None
1275 1271
1276 1272 dataOut.data = newData
1277 1273 dataOut.flagNoData = False
1278 1274
1279 1275 profileIndex = dataOut.profileIndex/n
1280 1276
1281 1277
1282 1278 deltaHeight = dataOut.heightList[1] - dataOut.heightList[0]
1283 1279
1284 1280 dataOut.heightList = numpy.arange(dataOut.nHeights*n) * deltaHeight + dataOut.heightList[0]
1285 1281
1286 1282 dataOut.nProfiles = int(dataOut.nProfiles/n)
1287 1283
1288 1284 dataOut.profileIndex = profileIndex
1289 1285
1290 1286 dataOut.ippSeconds *= n
1291 1287
1292 1288 return dataOut
1293 1289
1294 1290 class PulsePairVoltage(Operation):
1295 1291 '''
1296 1292 Function PulsePair(Signal Power, Velocity)
1297 1293 The real component of Lag[0] provides Intensity Information
1298 1294 The imag component of Lag[1] Phase provides Velocity Information
1299 1295
1300 1296 Configuration Parameters:
1301 1297 nPRF = Number of Several PRF
1302 1298 theta = Degree Azimuth angel Boundaries
1303 1299
1304 1300 Input:
1305 1301 self.dataOut
1306 1302 lag[N]
1307 1303 Affected:
1308 1304 self.dataOut.spc
1309 1305 '''
1310 1306 isConfig = False
1311 1307 __profIndex = 0
1312 1308 __initime = None
1313 1309 __lastdatatime = None
1314 1310 __buffer = None
1315 1311 noise = None
1316 1312 __dataReady = False
1317 1313 n = None
1318 1314 __nch = 0
1319 1315 __nHeis = 0
1320 1316 removeDC = False
1321 1317 ipp = None
1322 1318 lambda_ = 0
1323 1319
1324 1320 def __init__(self,**kwargs):
1325 1321 Operation.__init__(self,**kwargs)
1326 1322
1327 1323 def setup(self, dataOut, n = None, removeDC=False):
1328 1324 '''
1329 1325 n= Numero de PRF's de entrada
1330 1326 '''
1331 1327 self.__initime = None
1332 1328 self.__lastdatatime = 0
1333 1329 self.__dataReady = False
1334 1330 self.__buffer = 0
1335 1331 self.__profIndex = 0
1336 1332 self.noise = None
1337 1333 self.__nch = dataOut.nChannels
1338 1334 self.__nHeis = dataOut.nHeights
1339 1335 self.removeDC = removeDC
1340 1336 self.lambda_ = 3.0e8/(9345.0e6)
1341 1337 self.ippSec = dataOut.ippSeconds
1342 1338 self.nCohInt = dataOut.nCohInt
1343 1339 print("IPPseconds",dataOut.ippSeconds)
1344 1340
1345 1341 print("ELVALOR DE n es:", n)
1346 1342 if n == None:
1347 1343 raise ValueError("n should be specified.")
1348 1344
1349 1345 if n != None:
1350 1346 if n<2:
1351 1347 raise ValueError("n should be greater than 2")
1352 1348
1353 1349 self.n = n
1354 1350 self.__nProf = n
1355 1351
1356 1352 self.__buffer = numpy.zeros((dataOut.nChannels,
1357 1353 n,
1358 1354 dataOut.nHeights),
1359 1355 dtype='complex')
1360 1356
1361 1357 def putData(self,data):
1362 1358 '''
1363 1359 Add a profile to he __buffer and increase in one the __profiel Index
1364 1360 '''
1365 1361 self.__buffer[:,self.__profIndex,:]= data
1366 1362 self.__profIndex += 1
1367 1363 return
1368 1364
1369 1365 def pushData(self,dataOut):
1370 1366 '''
1371 1367 Return the PULSEPAIR and the profiles used in the operation
1372 1368 Affected : self.__profileIndex
1373 1369 '''
1374 1370 #----------------- Remove DC-----------------------------------
1375 1371 if self.removeDC==True:
1376 1372 mean = numpy.mean(self.__buffer,1)
1377 1373 tmp = mean.reshape(self.__nch,1,self.__nHeis)
1378 1374 dc= numpy.tile(tmp,[1,self.__nProf,1])
1379 1375 self.__buffer = self.__buffer - dc
1380 1376 #------------------Calculo de Potencia ------------------------
1381 1377 pair0 = self.__buffer*numpy.conj(self.__buffer)
1382 1378 pair0 = pair0.real
1383 1379 lag_0 = numpy.sum(pair0,1)
1384 1380 #------------------Calculo de Ruido x canal--------------------
1385 1381 self.noise = numpy.zeros(self.__nch)
1386 1382 for i in range(self.__nch):
1387 1383 daux = numpy.sort(pair0[i,:,:],axis= None)
1388 1384 self.noise[i]=hildebrand_sekhon( daux ,self.nCohInt)
1389 1385
1390 1386 self.noise = self.noise.reshape(self.__nch,1)
1391 1387 self.noise = numpy.tile(self.noise,[1,self.__nHeis])
1392 1388 noise_buffer = self.noise.reshape(self.__nch,1,self.__nHeis)
1393 1389 noise_buffer = numpy.tile(noise_buffer,[1,self.__nProf,1])
1394 1390 #------------------ Potencia recibida= P , Potencia senal = S , Ruido= N--
1395 1391 #------------------ P= S+N ,P=lag_0/N ---------------------------------
1396 1392 #-------------------- Power --------------------------------------------------
1397 1393 data_power = lag_0/(self.n*self.nCohInt)
1398 1394 #------------------ Senal ---------------------------------------------------
1399 1395 data_intensity = pair0 - noise_buffer
1400 1396 data_intensity = numpy.sum(data_intensity,axis=1)*(self.n*self.nCohInt)#*self.nCohInt)
1401 1397 #data_intensity = (lag_0-self.noise*self.n)*(self.n*self.nCohInt)
1402 1398 for i in range(self.__nch):
1403 1399 for j in range(self.__nHeis):
1404 1400 if data_intensity[i][j] < 0:
1405 1401 data_intensity[i][j] = numpy.min(numpy.absolute(data_intensity[i][j]))
1406 1402
1407 1403 #----------------- Calculo de Frecuencia y Velocidad doppler--------
1408 1404 pair1 = self.__buffer[:,:-1,:]*numpy.conjugate(self.__buffer[:,1:,:])
1409 1405 lag_1 = numpy.sum(pair1,1)
1410 1406 data_freq = (-1/(2.0*math.pi*self.ippSec*self.nCohInt))*numpy.angle(lag_1)
1411 1407 data_velocity = (self.lambda_/2.0)*data_freq
1412 1408
1413 1409 #---------------- Potencia promedio estimada de la Senal-----------
1414 1410 lag_0 = lag_0/self.n
1415 1411 S = lag_0-self.noise
1416 1412
1417 1413 #---------------- Frecuencia Doppler promedio ---------------------
1418 1414 lag_1 = lag_1/(self.n-1)
1419 1415 R1 = numpy.abs(lag_1)
1420 1416
1421 1417 #---------------- Calculo del SNR----------------------------------
1422 1418 data_snrPP = S/self.noise
1423 1419 for i in range(self.__nch):
1424 1420 for j in range(self.__nHeis):
1425 1421 if data_snrPP[i][j] < 1.e-20:
1426 1422 data_snrPP[i][j] = 1.e-20
1427 1423
1428 1424 #----------------- Calculo del ancho espectral ----------------------
1429 1425 L = S/R1
1430 1426 L = numpy.where(L<0,1,L)
1431 1427 L = numpy.log(L)
1432 1428 tmp = numpy.sqrt(numpy.absolute(L))
1433 1429 data_specwidth = (self.lambda_/(2*math.sqrt(2)*math.pi*self.ippSec*self.nCohInt))*tmp*numpy.sign(L)
1434 1430 n = self.__profIndex
1435 1431
1436 1432 self.__buffer = numpy.zeros((self.__nch, self.__nProf,self.__nHeis), dtype='complex')
1437 1433 self.__profIndex = 0
1438 1434 return data_power,data_intensity,data_velocity,data_snrPP,data_specwidth,n
1439 1435
1440 1436
1441 1437 def pulsePairbyProfiles(self,dataOut):
1442 1438
1443 1439 self.__dataReady = False
1444 1440 data_power = None
1445 1441 data_intensity = None
1446 1442 data_velocity = None
1447 1443 data_specwidth = None
1448 1444 data_snrPP = None
1449 1445 self.putData(data=dataOut.data)
1450 1446 if self.__profIndex == self.n:
1451 1447 data_power,data_intensity, data_velocity,data_snrPP,data_specwidth, n = self.pushData(dataOut=dataOut)
1452 1448 self.__dataReady = True
1453 1449
1454 1450 return data_power, data_intensity, data_velocity, data_snrPP, data_specwidth
1455 1451
1456 1452
1457 1453 def pulsePairOp(self, dataOut, datatime= None):
1458 1454
1459 1455 if self.__initime == None:
1460 1456 self.__initime = datatime
1461 1457 data_power, data_intensity, data_velocity, data_snrPP, data_specwidth = self.pulsePairbyProfiles(dataOut)
1462 1458 self.__lastdatatime = datatime
1463 1459
1464 1460 if data_power is None:
1465 1461 return None, None, None,None,None,None
1466 1462
1467 1463 avgdatatime = self.__initime
1468 1464 deltatime = datatime - self.__lastdatatime
1469 1465 self.__initime = datatime
1470 1466
1471 1467 return data_power, data_intensity, data_velocity, data_snrPP, data_specwidth, avgdatatime
1472 1468
1473 1469 def run(self, dataOut,n = None,removeDC= False, overlapping= False,**kwargs):
1474 1470
1475 1471 if not self.isConfig:
1476 1472 self.setup(dataOut = dataOut, n = n , removeDC=removeDC , **kwargs)
1477 1473 self.isConfig = True
1478 1474 data_power, data_intensity, data_velocity,data_snrPP,data_specwidth, avgdatatime = self.pulsePairOp(dataOut, dataOut.utctime)
1479 1475 dataOut.flagNoData = True
1480 1476
1481 1477 if self.__dataReady:
1482 1478 dataOut.nCohInt *= self.n
1483 1479 dataOut.dataPP_POW = data_intensity # S
1484 1480 dataOut.dataPP_POWER = data_power # P
1485 1481 dataOut.dataPP_DOP = data_velocity
1486 1482 dataOut.dataPP_SNR = data_snrPP
1487 1483 dataOut.dataPP_WIDTH = data_specwidth
1488 1484 dataOut.PRFbyAngle = self.n #numero de PRF*cada angulo rotado que equivale a un tiempo.
1489 1485 dataOut.utctime = avgdatatime
1490 1486 dataOut.flagNoData = False
1491 1487 return dataOut
1492 1488
1493 1489
1494 1490
1495 1491 # import collections
1496 1492 # from scipy.stats import mode
1497 1493 #
1498 1494 # class Synchronize(Operation):
1499 1495 #
1500 1496 # isConfig = False
1501 1497 # __profIndex = 0
1502 1498 #
1503 1499 # def __init__(self, **kwargs):
1504 1500 #
1505 1501 # Operation.__init__(self, **kwargs)
1506 1502 # # self.isConfig = False
1507 1503 # self.__powBuffer = None
1508 1504 # self.__startIndex = 0
1509 1505 # self.__pulseFound = False
1510 1506 #
1511 1507 # def __findTxPulse(self, dataOut, channel=0, pulse_with = None):
1512 1508 #
1513 1509 # #Read data
1514 1510 #
1515 1511 # powerdB = dataOut.getPower(channel = channel)
1516 1512 # noisedB = dataOut.getNoise(channel = channel)[0]
1517 1513 #
1518 1514 # self.__powBuffer.extend(powerdB.flatten())
1519 1515 #
1520 1516 # dataArray = numpy.array(self.__powBuffer)
1521 1517 #
1522 1518 # filteredPower = numpy.correlate(dataArray, dataArray[0:self.__nSamples], "same")
1523 1519 #
1524 1520 # maxValue = numpy.nanmax(filteredPower)
1525 1521 #
1526 1522 # if maxValue < noisedB + 10:
1527 1523 # #No se encuentra ningun pulso de transmision
1528 1524 # return None
1529 1525 #
1530 1526 # maxValuesIndex = numpy.where(filteredPower > maxValue - 0.1*abs(maxValue))[0]
1531 1527 #
1532 1528 # if len(maxValuesIndex) < 2:
1533 1529 # #Solo se encontro un solo pulso de transmision de un baudio, esperando por el siguiente TX
1534 1530 # return None
1535 1531 #
1536 1532 # phasedMaxValuesIndex = maxValuesIndex - self.__nSamples
1537 1533 #
1538 1534 # #Seleccionar solo valores con un espaciamiento de nSamples
1539 1535 # pulseIndex = numpy.intersect1d(maxValuesIndex, phasedMaxValuesIndex)
1540 1536 #
1541 1537 # if len(pulseIndex) < 2:
1542 1538 # #Solo se encontro un pulso de transmision con ancho mayor a 1
1543 1539 # return None
1544 1540 #
1545 1541 # spacing = pulseIndex[1:] - pulseIndex[:-1]
1546 1542 #
1547 1543 # #remover senales que se distancien menos de 10 unidades o muestras
1548 1544 # #(No deberian existir IPP menor a 10 unidades)
1549 1545 #
1550 1546 # realIndex = numpy.where(spacing > 10 )[0]
1551 1547 #
1552 1548 # if len(realIndex) < 2:
1553 1549 # #Solo se encontro un pulso de transmision con ancho mayor a 1
1554 1550 # return None
1555 1551 #
1556 1552 # #Eliminar pulsos anchos (deja solo la diferencia entre IPPs)
1557 1553 # realPulseIndex = pulseIndex[realIndex]
1558 1554 #
1559 1555 # period = mode(realPulseIndex[1:] - realPulseIndex[:-1])[0][0]
1560 1556 #
1561 1557 # print "IPP = %d samples" %period
1562 1558 #
1563 1559 # self.__newNSamples = dataOut.nHeights #int(period)
1564 1560 # self.__startIndex = int(realPulseIndex[0])
1565 1561 #
1566 1562 # return 1
1567 1563 #
1568 1564 #
1569 1565 # def setup(self, nSamples, nChannels, buffer_size = 4):
1570 1566 #
1571 1567 # self.__powBuffer = collections.deque(numpy.zeros( buffer_size*nSamples,dtype=numpy.float),
1572 1568 # maxlen = buffer_size*nSamples)
1573 1569 #
1574 1570 # bufferList = []
1575 1571 #
1576 1572 # for i in range(nChannels):
1577 1573 # bufferByChannel = collections.deque(numpy.zeros( buffer_size*nSamples, dtype=numpy.complex) + numpy.NAN,
1578 1574 # maxlen = buffer_size*nSamples)
1579 1575 #
1580 1576 # bufferList.append(bufferByChannel)
1581 1577 #
1582 1578 # self.__nSamples = nSamples
1583 1579 # self.__nChannels = nChannels
1584 1580 # self.__bufferList = bufferList
1585 1581 #
1586 1582 # def run(self, dataOut, channel = 0):
1587 1583 #
1588 1584 # if not self.isConfig:
1589 1585 # nSamples = dataOut.nHeights
1590 1586 # nChannels = dataOut.nChannels
1591 1587 # self.setup(nSamples, nChannels)
1592 1588 # self.isConfig = True
1593 1589 #
1594 1590 # #Append new data to internal buffer
1595 1591 # for thisChannel in range(self.__nChannels):
1596 1592 # bufferByChannel = self.__bufferList[thisChannel]
1597 1593 # bufferByChannel.extend(dataOut.data[thisChannel])
1598 1594 #
1599 1595 # if self.__pulseFound:
1600 1596 # self.__startIndex -= self.__nSamples
1601 1597 #
1602 1598 # #Finding Tx Pulse
1603 1599 # if not self.__pulseFound:
1604 1600 # indexFound = self.__findTxPulse(dataOut, channel)
1605 1601 #
1606 1602 # if indexFound == None:
1607 1603 # dataOut.flagNoData = True
1608 1604 # return
1609 1605 #
1610 1606 # self.__arrayBuffer = numpy.zeros((self.__nChannels, self.__newNSamples), dtype = numpy.complex)
1611 1607 # self.__pulseFound = True
1612 1608 # self.__startIndex = indexFound
1613 1609 #
1614 1610 # #If pulse was found ...
1615 1611 # for thisChannel in range(self.__nChannels):
1616 1612 # bufferByChannel = self.__bufferList[thisChannel]
1617 1613 # #print self.__startIndex
1618 1614 # x = numpy.array(bufferByChannel)
1619 1615 # self.__arrayBuffer[thisChannel] = x[self.__startIndex:self.__startIndex+self.__newNSamples]
1620 1616 #
1621 1617 # deltaHeight = dataOut.heightList[1] - dataOut.heightList[0]
1622 1618 # dataOut.heightList = numpy.arange(self.__newNSamples)*deltaHeight
1623 1619 # # dataOut.ippSeconds = (self.__newNSamples / deltaHeight)/1e6
1624 1620 #
1625 1621 # dataOut.data = self.__arrayBuffer
1626 1622 #
1627 1623 # self.__startIndex += self.__newNSamples
1628 1624 #
1629 1625 # return
General Comments 0
You need to be logged in to leave comments. Login now