##// END OF EJS Templates
Lectura y procesamiento de spectra desde HDF5
joabAM -
r1404:b1b398996ebb
parent child
Show More

The requested changes are too big and content was truncated. Show full diff

@@ -1,1073 +1,1077
1 1 # Copyright (c) 2012-2020 Jicamarca Radio Observatory
2 2 # All rights reserved.
3 3 #
4 4 # Distributed under the terms of the BSD 3-clause license.
5 5 """Definition of diferent Data objects for different types of data
6 6
7 7 Here you will find the diferent data objects for the different types
8 8 of data, this data objects must be used as dataIn or dataOut objects in
9 9 processing units and operations. Currently the supported data objects are:
10 10 Voltage, Spectra, SpectraHeis, Fits, Correlation and Parameters
11 11 """
12 12
13 13 import copy
14 14 import numpy
15 15 import datetime
16 16 import json
17 17
18 18 import schainpy.admin
19 19 from schainpy.utils import log
20 20 from .jroheaderIO import SystemHeader, RadarControllerHeader
21 21 from schainpy.model.data import _noise
22 22
23 23
24 24 def getNumpyDtype(dataTypeCode):
25 25
26 26 if dataTypeCode == 0:
27 27 numpyDtype = numpy.dtype([('real', '<i1'), ('imag', '<i1')])
28 28 elif dataTypeCode == 1:
29 29 numpyDtype = numpy.dtype([('real', '<i2'), ('imag', '<i2')])
30 30 elif dataTypeCode == 2:
31 31 numpyDtype = numpy.dtype([('real', '<i4'), ('imag', '<i4')])
32 32 elif dataTypeCode == 3:
33 33 numpyDtype = numpy.dtype([('real', '<i8'), ('imag', '<i8')])
34 34 elif dataTypeCode == 4:
35 35 numpyDtype = numpy.dtype([('real', '<f4'), ('imag', '<f4')])
36 36 elif dataTypeCode == 5:
37 37 numpyDtype = numpy.dtype([('real', '<f8'), ('imag', '<f8')])
38 38 else:
39 39 raise ValueError('dataTypeCode was not defined')
40 40
41 41 return numpyDtype
42 42
43 43
44 44 def getDataTypeCode(numpyDtype):
45 45
46 46 if numpyDtype == numpy.dtype([('real', '<i1'), ('imag', '<i1')]):
47 47 datatype = 0
48 48 elif numpyDtype == numpy.dtype([('real', '<i2'), ('imag', '<i2')]):
49 49 datatype = 1
50 50 elif numpyDtype == numpy.dtype([('real', '<i4'), ('imag', '<i4')]):
51 51 datatype = 2
52 52 elif numpyDtype == numpy.dtype([('real', '<i8'), ('imag', '<i8')]):
53 53 datatype = 3
54 54 elif numpyDtype == numpy.dtype([('real', '<f4'), ('imag', '<f4')]):
55 55 datatype = 4
56 56 elif numpyDtype == numpy.dtype([('real', '<f8'), ('imag', '<f8')]):
57 57 datatype = 5
58 58 else:
59 59 datatype = None
60 60
61 61 return datatype
62 62
63 63
64 64 def hildebrand_sekhon(data, navg):
65 65 """
66 66 This method is for the objective determination of the noise level in Doppler spectra. This
67 67 implementation technique is based on the fact that the standard deviation of the spectral
68 68 densities is equal to the mean spectral density for white Gaussian noise
69 69
70 70 Inputs:
71 71 Data : heights
72 72 navg : numbers of averages
73 73
74 74 Return:
75 75 mean : noise's level
76 76 """
77 77
78 78 sortdata = numpy.sort(data, axis=None)
79 79 '''
80 80 lenOfData = len(sortdata)
81 81 nums_min = lenOfData*0.2
82 82
83 83 if nums_min <= 5:
84 84
85 85 nums_min = 5
86 86
87 87 sump = 0.
88 88 sumq = 0.
89 89
90 90 j = 0
91 91 cont = 1
92 92
93 93 while((cont == 1)and(j < lenOfData)):
94 94
95 95 sump += sortdata[j]
96 96 sumq += sortdata[j]**2
97 97
98 98 if j > nums_min:
99 99 rtest = float(j)/(j-1) + 1.0/navg
100 100 if ((sumq*j) > (rtest*sump**2)):
101 101 j = j - 1
102 102 sump = sump - sortdata[j]
103 103 sumq = sumq - sortdata[j]**2
104 104 cont = 0
105 105
106 106 j += 1
107 107
108 108 lnoise = sump / j
109 109 '''
110 110 return _noise.hildebrand_sekhon(sortdata, navg)
111 111
112 112
113 113 class Beam:
114 114
115 115 def __init__(self):
116 116 self.codeList = []
117 117 self.azimuthList = []
118 118 self.zenithList = []
119 119
120 120
121 121
122 122 class GenericData(object):
123 123
124 124 flagNoData = True
125 125
126 126 def copy(self, inputObj=None):
127 127
128 128 if inputObj == None:
129 129 return copy.deepcopy(self)
130 130
131 131 for key in list(inputObj.__dict__.keys()):
132 132
133 133 attribute = inputObj.__dict__[key]
134 134
135 135 # If this attribute is a tuple or list
136 136 if type(inputObj.__dict__[key]) in (tuple, list):
137 137 self.__dict__[key] = attribute[:]
138 138 continue
139 139
140 140 # If this attribute is another object or instance
141 141 if hasattr(attribute, '__dict__'):
142 142 self.__dict__[key] = attribute.copy()
143 143 continue
144 144
145 145 self.__dict__[key] = inputObj.__dict__[key]
146 146
147 147 def deepcopy(self):
148 148
149 149 return copy.deepcopy(self)
150 150
151 151 def isEmpty(self):
152 152
153 153 return self.flagNoData
154 154
155 155 def isReady(self):
156 156
157 157 return not self.flagNoData
158 158
159 159
160 160 class JROData(GenericData):
161 161
162 162 systemHeaderObj = SystemHeader()
163 163 radarControllerHeaderObj = RadarControllerHeader()
164 164 type = None
165 165 datatype = None # dtype but in string
166 166 nProfiles = None
167 167 heightList = None
168 168 channelList = None
169 169 flagDiscontinuousBlock = False
170 170 useLocalTime = False
171 171 utctime = None
172 172 timeZone = None
173 173 dstFlag = None
174 174 errorCount = None
175 175 blocksize = None
176 176 flagDecodeData = False # asumo q la data no esta decodificada
177 177 flagDeflipData = False # asumo q la data no esta sin flip
178 178 flagShiftFFT = False
179 179 nCohInt = None
180 180 windowOfFilter = 1
181 181 C = 3e8
182 182 frequency = 49.92e6
183 183 realtime = False
184 184 beacon_heiIndexList = None
185 185 last_block = None
186 186 blocknow = None
187 187 azimuth = None
188 188 zenith = None
189 189 beam = Beam()
190 190 profileIndex = None
191 191 error = None
192 192 data = None
193 193 nmodes = None
194 194 metadata_list = ['heightList', 'timeZone', 'type']
195 195 codeList = None
196 196 azimuthList = None
197 197 elevationList = None
198 198
199 199 def __str__(self):
200 200
201 201 return '{} - {}'.format(self.type, self.datatime())
202 202
203 203 def getNoise(self):
204 204
205 205 raise NotImplementedError
206 206
207 207 @property
208 208 def nChannels(self):
209 209
210 210 return len(self.channelList)
211 211
212 212 @property
213 213 def channelIndexList(self):
214 214
215 215 return list(range(self.nChannels))
216 216
217 217 @property
218 218 def nHeights(self):
219 219
220 220 return len(self.heightList)
221 221
222 222 def getDeltaH(self):
223 223
224 224 return self.heightList[1] - self.heightList[0]
225 225
226 226 @property
227 227 def ltctime(self):
228 228
229 229 if self.useLocalTime:
230 230 return self.utctime - self.timeZone * 60
231 231
232 232 return self.utctime
233 233
234 234 @property
235 235 def datatime(self):
236 236
237 237 datatimeValue = datetime.datetime.utcfromtimestamp(self.ltctime)
238 238 return datatimeValue
239 239
240 240 def getTimeRange(self):
241 241
242 242 datatime = []
243 243
244 244 datatime.append(self.ltctime)
245 245 datatime.append(self.ltctime + self.timeInterval + 1)
246 246
247 247 datatime = numpy.array(datatime)
248 248
249 249 return datatime
250 250
251 251 def getFmaxTimeResponse(self):
252 252
253 253 period = (10**-6) * self.getDeltaH() / (0.15)
254 254
255 255 PRF = 1. / (period * self.nCohInt)
256 256
257 257 fmax = PRF
258 258
259 259 return fmax
260 260
261 261 def getFmax(self):
262 262 PRF = 1. / (self.ippSeconds * self.nCohInt)
263 263
264 264 fmax = PRF
265 265 return fmax
266 266
267 267 def getVmax(self):
268 268
269 269 _lambda = self.C / self.frequency
270 270
271 271 vmax = self.getFmax() * _lambda / 2
272 272
273 273 return vmax
274 274
275 275 @property
276 276 def ippSeconds(self):
277 277 '''
278 278 '''
279 279 return self.radarControllerHeaderObj.ippSeconds
280 280
281 281 @ippSeconds.setter
282 282 def ippSeconds(self, ippSeconds):
283 283 '''
284 284 '''
285 285 self.radarControllerHeaderObj.ippSeconds = ippSeconds
286 286
287 287 @property
288 288 def code(self):
289 289 '''
290 290 '''
291 291 return self.radarControllerHeaderObj.code
292 292
293 293 @code.setter
294 294 def code(self, code):
295 295 '''
296 296 '''
297 297 self.radarControllerHeaderObj.code = code
298 298
299 299 @property
300 300 def nCode(self):
301 301 '''
302 302 '''
303 303 return self.radarControllerHeaderObj.nCode
304 304
305 305 @nCode.setter
306 306 def nCode(self, ncode):
307 307 '''
308 308 '''
309 309 self.radarControllerHeaderObj.nCode = ncode
310 310
311 311 @property
312 312 def nBaud(self):
313 313 '''
314 314 '''
315 315 return self.radarControllerHeaderObj.nBaud
316 316
317 317 @nBaud.setter
318 318 def nBaud(self, nbaud):
319 319 '''
320 320 '''
321 321 self.radarControllerHeaderObj.nBaud = nbaud
322 322
323 323 @property
324 324 def ipp(self):
325 325 '''
326 326 '''
327 327 return self.radarControllerHeaderObj.ipp
328 328
329 329 @ipp.setter
330 330 def ipp(self, ipp):
331 331 '''
332 332 '''
333 333 self.radarControllerHeaderObj.ipp = ipp
334 334
335 335 @property
336 336 def metadata(self):
337 337 '''
338 338 '''
339 339
340 340 return {attr: getattr(self, attr) for attr in self.metadata_list}
341 341
342 342
343 343 class Voltage(JROData):
344 344
345 345 dataPP_POW = None
346 346 dataPP_DOP = None
347 347 dataPP_WIDTH = None
348 348 dataPP_SNR = None
349 349
350 350 def __init__(self):
351 351 '''
352 352 Constructor
353 353 '''
354 354
355 355 self.useLocalTime = True
356 356 self.radarControllerHeaderObj = RadarControllerHeader()
357 357 self.systemHeaderObj = SystemHeader()
358 358 self.type = "Voltage"
359 359 self.data = None
360 360 self.nProfiles = None
361 361 self.heightList = None
362 362 self.channelList = None
363 363 self.flagNoData = True
364 364 self.flagDiscontinuousBlock = False
365 365 self.utctime = None
366 366 self.timeZone = 0
367 367 self.dstFlag = None
368 368 self.errorCount = None
369 369 self.nCohInt = None
370 370 self.blocksize = None
371 371 self.flagCohInt = False
372 372 self.flagDecodeData = False # asumo q la data no esta decodificada
373 373 self.flagDeflipData = False # asumo q la data no esta sin flip
374 374 self.flagShiftFFT = False
375 375 self.flagDataAsBlock = False # Asumo que la data es leida perfil a perfil
376 376 self.profileIndex = 0
377 377 self.metadata_list = ['type', 'heightList', 'timeZone', 'nProfiles', 'channelList', 'nCohInt',
378 378 'code', 'nCode', 'nBaud', 'ippSeconds', 'ipp']
379 379
380 380 def getNoisebyHildebrand(self, channel=None):
381 381 """
382 382 Determino el nivel de ruido usando el metodo Hildebrand-Sekhon
383 383
384 384 Return:
385 385 noiselevel
386 386 """
387 387
388 388 if channel != None:
389 389 data = self.data[channel]
390 390 nChannels = 1
391 391 else:
392 392 data = self.data
393 393 nChannels = self.nChannels
394 394
395 395 noise = numpy.zeros(nChannels)
396 396 power = data * numpy.conjugate(data)
397 397
398 398 for thisChannel in range(nChannels):
399 399 if nChannels == 1:
400 400 daux = power[:].real
401 401 else:
402 402 daux = power[thisChannel, :].real
403 403 noise[thisChannel] = hildebrand_sekhon(daux, self.nCohInt)
404 404
405 405 return noise
406 406
407 407 def getNoise(self, type=1, channel=None):
408 408
409 409 if type == 1:
410 410 noise = self.getNoisebyHildebrand(channel)
411 411
412 412 return noise
413 413
414 414 def getPower(self, channel=None):
415 415
416 416 if channel != None:
417 417 data = self.data[channel]
418 418 else:
419 419 data = self.data
420 420
421 421 power = data * numpy.conjugate(data)
422 422 powerdB = 10 * numpy.log10(power.real)
423 423 powerdB = numpy.squeeze(powerdB)
424 424
425 425 return powerdB
426 426
427 427 @property
428 428 def timeInterval(self):
429 429
430 430 return self.ippSeconds * self.nCohInt
431 431
432 432 noise = property(getNoise, "I'm the 'nHeights' property.")
433 433
434 434
435 435 class Spectra(JROData):
436 436
437 437 def __init__(self):
438 438 '''
439 439 Constructor
440 440 '''
441 441
442 442 self.data_dc = None
443 443 self.data_spc = None
444 444 self.data_cspc = None
445 445 self.useLocalTime = True
446 446 self.radarControllerHeaderObj = RadarControllerHeader()
447 447 self.systemHeaderObj = SystemHeader()
448 448 self.type = "Spectra"
449 449 self.timeZone = 0
450 450 self.nProfiles = None
451 451 self.heightList = None
452 452 self.channelList = None
453 453 self.pairsList = None
454 454 self.flagNoData = True
455 455 self.flagDiscontinuousBlock = False
456 456 self.utctime = None
457 457 self.nCohInt = None
458 458 self.nIncohInt = None
459 459 self.blocksize = None
460 460 self.nFFTPoints = None
461 461 self.wavelength = None
462 462 self.flagDecodeData = False # asumo q la data no esta decodificada
463 463 self.flagDeflipData = False # asumo q la data no esta sin flip
464 464 self.flagShiftFFT = False
465 465 self.ippFactor = 1
466 466 self.beacon_heiIndexList = []
467 467 self.noise_estimation = None
468 self.codeList = []
469 self.azimuthList = []
470 self.elevationList = []
468 471 self.metadata_list = ['type', 'heightList', 'timeZone', 'pairsList', 'channelList', 'nCohInt',
469 472 'code', 'nCode', 'nBaud', 'ippSeconds', 'ipp','nIncohInt', 'nFFTPoints', 'nProfiles']
470 473
471 474
472 475 def getNoisebyHildebrand(self, xmin_index=None, xmax_index=None, ymin_index=None, ymax_index=None):
473 476 """
474 477 Determino el nivel de ruido usando el metodo Hildebrand-Sekhon
475 478
476 479 Return:
477 480 noiselevel
478 481 """
479 482
480 483 noise = numpy.zeros(self.nChannels)
481 484 for channel in range(self.nChannels):
482 485 daux = self.data_spc[channel,
483 486 xmin_index:xmax_index, ymin_index:ymax_index]
484 487 noise[channel] = hildebrand_sekhon(daux, self.nIncohInt)
485 488
486 489 return noise
487 490
488 491 def getNoise(self, xmin_index=None, xmax_index=None, ymin_index=None, ymax_index=None):
489 492
490 493 if self.noise_estimation is not None:
491 494 # this was estimated by getNoise Operation defined in jroproc_spectra.py
492 495 return self.noise_estimation
493 496 else:
494 497 noise = self.getNoisebyHildebrand(
495 498 xmin_index, xmax_index, ymin_index, ymax_index)
496 499 return noise
497 500
498 501 def getFreqRangeTimeResponse(self, extrapoints=0):
499 502
500 503 deltafreq = self.getFmaxTimeResponse() / (self.nFFTPoints * self.ippFactor)
501 504 freqrange = deltafreq * (numpy.arange(self.nFFTPoints + extrapoints) - self.nFFTPoints / 2.) - deltafreq / 2
502 505
503 506 return freqrange
504 507
505 508 def getAcfRange(self, extrapoints=0):
506 509
507 510 deltafreq = 10. / (self.getFmax() / (self.nFFTPoints * self.ippFactor))
508 511 freqrange = deltafreq * (numpy.arange(self.nFFTPoints + extrapoints) -self.nFFTPoints / 2.) - deltafreq / 2
509 512
510 513 return freqrange
511 514
512 515 def getFreqRange(self, extrapoints=0):
513 516
514 517 deltafreq = self.getFmax() / (self.nFFTPoints * self.ippFactor)
515 518 freqrange = deltafreq * (numpy.arange(self.nFFTPoints + extrapoints) -self.nFFTPoints / 2.) - deltafreq / 2
516 519
517 520 return freqrange
518 521
519 522 def getVelRange(self, extrapoints=0):
520 523
521 524 deltav = self.getVmax() / (self.nFFTPoints * self.ippFactor)
522 525 velrange = deltav * (numpy.arange(self.nFFTPoints + extrapoints) - self.nFFTPoints / 2.)
523 526
524 527 if self.nmodes:
525 528 return velrange/self.nmodes
526 529 else:
527 530 return velrange
528 531
529 532 @property
530 533 def nPairs(self):
531 534
532 535 return len(self.pairsList)
533 536
534 537 @property
535 538 def pairsIndexList(self):
536 539
537 540 return list(range(self.nPairs))
538 541
539 542 @property
540 543 def normFactor(self):
541 544
542 545 pwcode = 1
543 546
544 547 if self.flagDecodeData:
545 548 pwcode = numpy.sum(self.code[0]**2)
546 549 #normFactor = min(self.nFFTPoints,self.nProfiles)*self.nIncohInt*self.nCohInt*pwcode*self.windowOfFilter
547 550 normFactor = self.nProfiles * self.nIncohInt * self.nCohInt * pwcode * self.windowOfFilter
548 551
549 552 return normFactor
550 553
551 554 @property
552 555 def flag_cspc(self):
553 556
554 557 if self.data_cspc is None:
555 558 return True
556 559
557 560 return False
558 561
559 562 @property
560 563 def flag_dc(self):
561 564
562 565 if self.data_dc is None:
563 566 return True
564 567
565 568 return False
566 569
567 570 @property
568 571 def timeInterval(self):
569 572
570 573 timeInterval = self.ippSeconds * self.nCohInt * self.nIncohInt * self.nProfiles * self.ippFactor
571 574 if self.nmodes:
572 575 return self.nmodes*timeInterval
573 576 else:
574 577 return timeInterval
575 578
576 579 def getPower(self):
577 580
578 581 factor = self.normFactor
579 582 z = self.data_spc / factor
580 583 z = numpy.where(numpy.isfinite(z), z, numpy.NAN)
581 584 avg = numpy.average(z, axis=1)
582 585
583 586 return 10 * numpy.log10(avg)
584 587
585 588 def getCoherence(self, pairsList=None, phase=False):
586 589
587 590 z = []
588 591 if pairsList is None:
589 592 pairsIndexList = self.pairsIndexList
590 593 else:
591 594 pairsIndexList = []
592 595 for pair in pairsList:
593 596 if pair not in self.pairsList:
594 597 raise ValueError("Pair %s is not in dataOut.pairsList" % (
595 598 pair))
596 599 pairsIndexList.append(self.pairsList.index(pair))
597 600 for i in range(len(pairsIndexList)):
598 601 pair = self.pairsList[pairsIndexList[i]]
599 602 ccf = numpy.average(self.data_cspc[pairsIndexList[i], :, :], axis=0)
600 603 powa = numpy.average(self.data_spc[pair[0], :, :], axis=0)
601 604 powb = numpy.average(self.data_spc[pair[1], :, :], axis=0)
602 605 avgcoherenceComplex = ccf / numpy.sqrt(powa * powb)
603 606 if phase:
604 607 data = numpy.arctan2(avgcoherenceComplex.imag,
605 608 avgcoherenceComplex.real) * 180 / numpy.pi
606 609 else:
607 610 data = numpy.abs(avgcoherenceComplex)
608 611
609 612 z.append(data)
610 613
611 614 return numpy.array(z)
612 615
613 616 def setValue(self, value):
614 617
615 618 print("This property should not be initialized")
616 619
617 620 return
618 621
619 622 noise = property(getNoise, setValue, "I'm the 'nHeights' property.")
620 623
621 624
622 625 class SpectraHeis(Spectra):
623 626
624 627 def __init__(self):
625 628
626 629 self.radarControllerHeaderObj = RadarControllerHeader()
627 630 self.systemHeaderObj = SystemHeader()
628 631 self.type = "SpectraHeis"
629 632 self.nProfiles = None
630 633 self.heightList = None
631 634 self.channelList = None
632 635 self.flagNoData = True
633 636 self.flagDiscontinuousBlock = False
634 637 self.utctime = None
635 638 self.blocksize = None
636 639 self.profileIndex = 0
637 640 self.nCohInt = 1
638 641 self.nIncohInt = 1
639 642
640 643 @property
641 644 def normFactor(self):
642 645 pwcode = 1
643 646 if self.flagDecodeData:
644 647 pwcode = numpy.sum(self.code[0]**2)
645 648
646 649 normFactor = self.nIncohInt * self.nCohInt * pwcode
647 650
648 651 return normFactor
649 652
650 653 @property
651 654 def timeInterval(self):
652 655
653 656 return self.ippSeconds * self.nCohInt * self.nIncohInt
654 657
655 658
656 659 class Fits(JROData):
657 660
658 661 def __init__(self):
659 662
660 663 self.type = "Fits"
661 664 self.nProfiles = None
662 665 self.heightList = None
663 666 self.channelList = None
664 667 self.flagNoData = True
665 668 self.utctime = None
666 669 self.nCohInt = 1
667 670 self.nIncohInt = 1
668 671 self.useLocalTime = True
669 672 self.profileIndex = 0
670 673 self.timeZone = 0
671 674
672 675 def getTimeRange(self):
673 676
674 677 datatime = []
675 678
676 679 datatime.append(self.ltctime)
677 680 datatime.append(self.ltctime + self.timeInterval)
678 681
679 682 datatime = numpy.array(datatime)
680 683
681 684 return datatime
682 685
683 686 def getChannelIndexList(self):
684 687
685 688 return list(range(self.nChannels))
686 689
687 690 def getNoise(self, type=1):
688 691
689 692
690 693 if type == 1:
691 694 noise = self.getNoisebyHildebrand()
692 695
693 696 if type == 2:
694 697 noise = self.getNoisebySort()
695 698
696 699 if type == 3:
697 700 noise = self.getNoisebyWindow()
698 701
699 702 return noise
700 703
701 704 @property
702 705 def timeInterval(self):
703 706
704 707 timeInterval = self.ippSeconds * self.nCohInt * self.nIncohInt
705 708
706 709 return timeInterval
707 710
708 711 @property
709 712 def ippSeconds(self):
710 713 '''
711 714 '''
712 715 return self.ipp_sec
713 716
714 717 noise = property(getNoise, "I'm the 'nHeights' property.")
715 718
716 719
717 720 class Correlation(JROData):
718 721
719 722 def __init__(self):
720 723 '''
721 724 Constructor
722 725 '''
723 726 self.radarControllerHeaderObj = RadarControllerHeader()
724 727 self.systemHeaderObj = SystemHeader()
725 728 self.type = "Correlation"
726 729 self.data = None
727 730 self.dtype = None
728 731 self.nProfiles = None
729 732 self.heightList = None
730 733 self.channelList = None
731 734 self.flagNoData = True
732 735 self.flagDiscontinuousBlock = False
733 736 self.utctime = None
734 737 self.timeZone = 0
735 738 self.dstFlag = None
736 739 self.errorCount = None
737 740 self.blocksize = None
738 741 self.flagDecodeData = False # asumo q la data no esta decodificada
739 742 self.flagDeflipData = False # asumo q la data no esta sin flip
740 743 self.pairsList = None
741 744 self.nPoints = None
742 745
743 746 def getPairsList(self):
744 747
745 748 return self.pairsList
746 749
747 750 def getNoise(self, mode=2):
748 751
749 752 indR = numpy.where(self.lagR == 0)[0][0]
750 753 indT = numpy.where(self.lagT == 0)[0][0]
751 754
752 755 jspectra0 = self.data_corr[:, :, indR, :]
753 756 jspectra = copy.copy(jspectra0)
754 757
755 758 num_chan = jspectra.shape[0]
756 759 num_hei = jspectra.shape[2]
757 760
758 761 freq_dc = jspectra.shape[1] / 2
759 762 ind_vel = numpy.array([-2, -1, 1, 2]) + freq_dc
760 763
761 764 if ind_vel[0] < 0:
762 765 ind_vel[list(range(0, 1))] = ind_vel[list(
763 766 range(0, 1))] + self.num_prof
764 767
765 768 if mode == 1:
766 769 jspectra[:, freq_dc, :] = (
767 770 jspectra[:, ind_vel[1], :] + jspectra[:, ind_vel[2], :]) / 2 # CORRECCION
768 771
769 772 if mode == 2:
770 773
771 774 vel = numpy.array([-2, -1, 1, 2])
772 775 xx = numpy.zeros([4, 4])
773 776
774 777 for fil in range(4):
775 778 xx[fil, :] = vel[fil]**numpy.asarray(list(range(4)))
776 779
777 780 xx_inv = numpy.linalg.inv(xx)
778 781 xx_aux = xx_inv[0, :]
779 782
780 783 for ich in range(num_chan):
781 784 yy = jspectra[ich, ind_vel, :]
782 785 jspectra[ich, freq_dc, :] = numpy.dot(xx_aux, yy)
783 786
784 787 junkid = jspectra[ich, freq_dc, :] <= 0
785 788 cjunkid = sum(junkid)
786 789
787 790 if cjunkid.any():
788 791 jspectra[ich, freq_dc, junkid.nonzero()] = (
789 792 jspectra[ich, ind_vel[1], junkid] + jspectra[ich, ind_vel[2], junkid]) / 2
790 793
791 794 noise = jspectra0[:, freq_dc, :] - jspectra[:, freq_dc, :]
792 795
793 796 return noise
794 797
795 798 @property
796 799 def timeInterval(self):
797 800
798 801 return self.ippSeconds * self.nCohInt * self.nProfiles
799 802
800 803 def splitFunctions(self):
801 804
802 805 pairsList = self.pairsList
803 806 ccf_pairs = []
804 807 acf_pairs = []
805 808 ccf_ind = []
806 809 acf_ind = []
807 810 for l in range(len(pairsList)):
808 811 chan0 = pairsList[l][0]
809 812 chan1 = pairsList[l][1]
810 813
811 814 # Obteniendo pares de Autocorrelacion
812 815 if chan0 == chan1:
813 816 acf_pairs.append(chan0)
814 817 acf_ind.append(l)
815 818 else:
816 819 ccf_pairs.append(pairsList[l])
817 820 ccf_ind.append(l)
818 821
819 822 data_acf = self.data_cf[acf_ind]
820 823 data_ccf = self.data_cf[ccf_ind]
821 824
822 825 return acf_ind, ccf_ind, acf_pairs, ccf_pairs, data_acf, data_ccf
823 826
824 827 @property
825 828 def normFactor(self):
826 829 acf_ind, ccf_ind, acf_pairs, ccf_pairs, data_acf, data_ccf = self.splitFunctions()
827 830 acf_pairs = numpy.array(acf_pairs)
828 831 normFactor = numpy.zeros((self.nPairs, self.nHeights))
829 832
830 833 for p in range(self.nPairs):
831 834 pair = self.pairsList[p]
832 835
833 836 ch0 = pair[0]
834 837 ch1 = pair[1]
835 838
836 839 ch0_max = numpy.max(data_acf[acf_pairs == ch0, :, :], axis=1)
837 840 ch1_max = numpy.max(data_acf[acf_pairs == ch1, :, :], axis=1)
838 841 normFactor[p, :] = numpy.sqrt(ch0_max * ch1_max)
839 842
840 843 return normFactor
841 844
842 845
843 846 class Parameters(Spectra):
844 847
845 848 groupList = None # List of Pairs, Groups, etc
846 849 data_param = None # Parameters obtained
847 850 data_pre = None # Data Pre Parametrization
848 851 data_SNR = None # Signal to Noise Ratio
849 852 abscissaList = None # Abscissa, can be velocities, lags or time
850 853 utctimeInit = None # Initial UTC time
851 854 paramInterval = None # Time interval to calculate Parameters in seconds
852 855 useLocalTime = True
853 856 # Fitting
854 857 data_error = None # Error of the estimation
855 858 constants = None
856 859 library = None
857 860 # Output signal
858 861 outputInterval = None # Time interval to calculate output signal in seconds
859 862 data_output = None # Out signal
860 863 nAvg = None
861 864 noise_estimation = None
862 865 GauSPC = None # Fit gaussian SPC
863 866
864 867 def __init__(self):
865 868 '''
866 869 Constructor
867 870 '''
868 871 self.radarControllerHeaderObj = RadarControllerHeader()
872 self.radarControllerHeaderObj.set_ippSeconds(0)
869 873 self.systemHeaderObj = SystemHeader()
870 874 self.type = "Parameters"
871 875 self.timeZone = 0
872 876
873 877 def getTimeRange1(self, interval):
874 878
875 879 datatime = []
876 880
877 881 if self.useLocalTime:
878 882 time1 = self.utctimeInit - self.timeZone * 60
879 883 else:
880 884 time1 = self.utctimeInit
881 885
882 886 datatime.append(time1)
883 887 datatime.append(time1 + interval)
884 888 datatime = numpy.array(datatime)
885 889
886 890 return datatime
887 891
888 892 @property
889 893 def timeInterval(self):
890 894
891 895 if hasattr(self, 'timeInterval1'):
892 896 return self.timeInterval1
893 897 else:
894 898 return self.paramInterval
895 899
896 900 def setValue(self, value):
897 901
898 902 print("This property should not be initialized")
899 903
900 904 return
901 905
902 906 def getNoise(self):
903 907
904 908 return self.spc_noise
905 909
906 910 noise = property(getNoise, setValue, "I'm the 'Noise' property.")
907 911
908 912
909 913 class PlotterData(object):
910 914 '''
911 915 Object to hold data to be plotted
912 916 '''
913 917
914 918 MAXNUMX = 200
915 919 MAXNUMY = 200
916 920
917 921 def __init__(self, code, exp_code, localtime=True):
918 922
919 923 self.key = code
920 924 self.exp_code = exp_code
921 925 self.ready = False
922 926 self.flagNoData = False
923 927 self.localtime = localtime
924 928 self.data = {}
925 929 self.meta = {}
926 930 self.__heights = []
927 931
928 932 def __str__(self):
929 933 dum = ['{}{}'.format(key, self.shape(key)) for key in self.data]
930 934 return 'Data[{}][{}]'.format(';'.join(dum), len(self.times))
931 935
932 936 def __len__(self):
933 937 return len(self.data)
934 938
935 939 def __getitem__(self, key):
936 940 if isinstance(key, int):
937 941 return self.data[self.times[key]]
938 942 elif isinstance(key, str):
939 943 ret = numpy.array([self.data[x][key] for x in self.times])
940 944 if ret.ndim > 1:
941 945 ret = numpy.swapaxes(ret, 0, 1)
942 946 return ret
943 947
944 948 def __contains__(self, key):
945 949 return key in self.data[self.min_time]
946 950
947 951 def setup(self):
948 952 '''
949 953 Configure object
950 954 '''
951 955 self.type = ''
952 956 self.ready = False
953 957 del self.data
954 958 self.data = {}
955 959 self.__heights = []
956 960 self.__all_heights = set()
957 961
958 962 def shape(self, key):
959 963 '''
960 964 Get the shape of the one-element data for the given key
961 965 '''
962 966
963 967 if len(self.data[self.min_time][key]):
964 968 return self.data[self.min_time][key].shape
965 969 return (0,)
966 970
967 971 def update(self, data, tm, meta={}):
968 972 '''
969 973 Update data object with new dataOut
970 974 '''
971 975
972 976 self.data[tm] = data
973 977
974 978 for key, value in meta.items():
975 979 setattr(self, key, value)
976 980
977 981 def normalize_heights(self):
978 982 '''
979 983 Ensure same-dimension of the data for different heighList
980 984 '''
981 985
982 986 H = numpy.array(list(self.__all_heights))
983 987 H.sort()
984 988 for key in self.data:
985 989 shape = self.shape(key)[:-1] + H.shape
986 990 for tm, obj in list(self.data[key].items()):
987 991 h = self.__heights[self.times.tolist().index(tm)]
988 992 if H.size == h.size:
989 993 continue
990 994 index = numpy.where(numpy.in1d(H, h))[0]
991 995 dummy = numpy.zeros(shape) + numpy.nan
992 996 if len(shape) == 2:
993 997 dummy[:, index] = obj
994 998 else:
995 999 dummy[index] = obj
996 1000 self.data[key][tm] = dummy
997 1001
998 1002 self.__heights = [H for tm in self.times]
999 1003
1000 1004 def jsonify(self, tm, plot_name, plot_type, decimate=False):
1001 1005 '''
1002 1006 Convert data to json
1003 1007 '''
1004 1008
1005 1009 meta = {}
1006 1010 meta['xrange'] = []
1007 1011 dy = int(len(self.yrange)/self.MAXNUMY) + 1
1008 1012 tmp = self.data[tm][self.key]
1009 1013 shape = tmp.shape
1010 1014 if len(shape) == 2:
1011 1015 data = self.roundFloats(self.data[tm][self.key][::, ::dy].tolist())
1012 1016 elif len(shape) == 3:
1013 1017 dx = int(self.data[tm][self.key].shape[1]/self.MAXNUMX) + 1
1014 1018 data = self.roundFloats(
1015 1019 self.data[tm][self.key][::, ::dx, ::dy].tolist())
1016 1020 meta['xrange'] = self.roundFloats(self.xrange[2][::dx].tolist())
1017 1021 else:
1018 1022 data = self.roundFloats(self.data[tm][self.key].tolist())
1019 1023
1020 1024 ret = {
1021 1025 'plot': plot_name,
1022 1026 'code': self.exp_code,
1023 1027 'time': float(tm),
1024 1028 'data': data,
1025 1029 }
1026 1030 meta['type'] = plot_type
1027 1031 meta['interval'] = float(self.interval)
1028 1032 meta['localtime'] = self.localtime
1029 1033 meta['yrange'] = self.roundFloats(self.yrange[::dy].tolist())
1030 1034 meta.update(self.meta)
1031 1035 ret['metadata'] = meta
1032 1036 return json.dumps(ret)
1033 1037
1034 1038 @property
1035 1039 def times(self):
1036 1040 '''
1037 1041 Return the list of times of the current data
1038 1042 '''
1039 1043
1040 1044 ret = [t for t in self.data]
1041 1045 ret.sort()
1042 1046 return numpy.array(ret)
1043 1047
1044 1048 @property
1045 1049 def min_time(self):
1046 1050 '''
1047 1051 Return the minimun time value
1048 1052 '''
1049 1053
1050 1054 return self.times[0]
1051 1055
1052 1056 @property
1053 1057 def max_time(self):
1054 1058 '''
1055 1059 Return the maximun time value
1056 1060 '''
1057 1061
1058 1062 return self.times[-1]
1059 1063
1060 1064 # @property
1061 1065 # def heights(self):
1062 1066 # '''
1063 1067 # Return the list of heights of the current data
1064 1068 # '''
1065 1069
1066 1070 # return numpy.array(self.__heights[-1])
1067 1071
1068 1072 @staticmethod
1069 1073 def roundFloats(obj):
1070 1074 if isinstance(obj, list):
1071 1075 return list(map(PlotterData.roundFloats, obj))
1072 1076 elif isinstance(obj, float):
1073 1077 return round(obj, 2)
@@ -1,701 +1,700
1 1 # Copyright (c) 2012-2020 Jicamarca Radio Observatory
2 2 # All rights reserved.
3 3 #
4 4 # Distributed under the terms of the BSD 3-clause license.
5 5 """Base class to create plot operations
6 6
7 7 """
8 8
9 9 import os
10 10 import sys
11 11 import zmq
12 12 import time
13 13 import numpy
14 14 import datetime
15 15 from collections import deque
16 16 from functools import wraps
17 17 from threading import Thread
18 18 import matplotlib
19 19
20 20 if 'BACKEND' in os.environ:
21 21 matplotlib.use(os.environ['BACKEND'])
22 22 elif 'linux' in sys.platform:
23 23 matplotlib.use("TkAgg")
24 24 elif 'darwin' in sys.platform:
25 25 matplotlib.use('MacOSX')
26 26 else:
27 27 from schainpy.utils import log
28 28 log.warning('Using default Backend="Agg"', 'INFO')
29 29 matplotlib.use('Agg')
30 30
31 31 import matplotlib.pyplot as plt
32 32 from matplotlib.patches import Polygon
33 33 from mpl_toolkits.axes_grid1 import make_axes_locatable
34 34 from matplotlib.ticker import FuncFormatter, LinearLocator, MultipleLocator
35 35
36 36 from schainpy.model.data.jrodata import PlotterData
37 37 from schainpy.model.proc.jroproc_base import ProcessingUnit, Operation, MPDecorator
38 38 from schainpy.utils import log
39 39
40 40 jet_values = matplotlib.pyplot.get_cmap('jet', 100)(numpy.arange(100))[10:90]
41 41 blu_values = matplotlib.pyplot.get_cmap(
42 42 'seismic_r', 20)(numpy.arange(20))[10:15]
43 43 ncmap = matplotlib.colors.LinearSegmentedColormap.from_list(
44 44 'jro', numpy.vstack((blu_values, jet_values)))
45 45 matplotlib.pyplot.register_cmap(cmap=ncmap)
46 46
47 47 CMAPS = [plt.get_cmap(s) for s in ('jro', 'jet', 'viridis',
48 48 'plasma', 'inferno', 'Greys', 'seismic', 'bwr', 'coolwarm')]
49 49
50 50 EARTH_RADIUS = 6.3710e3
51 51
52 52 def ll2xy(lat1, lon1, lat2, lon2):
53 53
54 54 p = 0.017453292519943295
55 55 a = 0.5 - numpy.cos((lat2 - lat1) * p)/2 + numpy.cos(lat1 * p) * \
56 56 numpy.cos(lat2 * p) * (1 - numpy.cos((lon2 - lon1) * p)) / 2
57 57 r = 12742 * numpy.arcsin(numpy.sqrt(a))
58 58 theta = numpy.arctan2(numpy.sin((lon2-lon1)*p)*numpy.cos(lat2*p), numpy.cos(lat1*p)
59 59 * numpy.sin(lat2*p)-numpy.sin(lat1*p)*numpy.cos(lat2*p)*numpy.cos((lon2-lon1)*p))
60 60 theta = -theta + numpy.pi/2
61 61 return r*numpy.cos(theta), r*numpy.sin(theta)
62 62
63 63
64 64 def km2deg(km):
65 65 '''
66 66 Convert distance in km to degrees
67 67 '''
68 68
69 69 return numpy.rad2deg(km/EARTH_RADIUS)
70 70
71 71
72 72 def figpause(interval):
73 73 backend = plt.rcParams['backend']
74 74 if backend in matplotlib.rcsetup.interactive_bk:
75 75 figManager = matplotlib._pylab_helpers.Gcf.get_active()
76 76 if figManager is not None:
77 77 canvas = figManager.canvas
78 78 if canvas.figure.stale:
79 79 canvas.draw()
80 80 try:
81 81 canvas.start_event_loop(interval)
82 82 except:
83 83 pass
84 84 return
85 85
86 86 def popup(message):
87 87 '''
88 88 '''
89 89
90 90 fig = plt.figure(figsize=(12, 8), facecolor='r')
91 91 text = '\n'.join([s.strip() for s in message.split(':')])
92 92 fig.text(0.01, 0.5, text, ha='left', va='center',
93 93 size='20', weight='heavy', color='w')
94 94 fig.show()
95 95 figpause(1000)
96 96
97 97
98 98 class Throttle(object):
99 99 '''
100 100 Decorator that prevents a function from being called more than once every
101 101 time period.
102 102 To create a function that cannot be called more than once a minute, but
103 103 will sleep until it can be called:
104 104 @Throttle(minutes=1)
105 105 def foo():
106 106 pass
107 107
108 108 for i in range(10):
109 109 foo()
110 110 print "This function has run %s times." % i
111 111 '''
112 112
113 113 def __init__(self, seconds=0, minutes=0, hours=0):
114 114 self.throttle_period = datetime.timedelta(
115 115 seconds=seconds, minutes=minutes, hours=hours
116 116 )
117 117
118 118 self.time_of_last_call = datetime.datetime.min
119 119
120 120 def __call__(self, fn):
121 121 @wraps(fn)
122 122 def wrapper(*args, **kwargs):
123 123 coerce = kwargs.pop('coerce', None)
124 124 if coerce:
125 125 self.time_of_last_call = datetime.datetime.now()
126 126 return fn(*args, **kwargs)
127 127 else:
128 128 now = datetime.datetime.now()
129 129 time_since_last_call = now - self.time_of_last_call
130 130 time_left = self.throttle_period - time_since_last_call
131 131
132 132 if time_left > datetime.timedelta(seconds=0):
133 133 return
134 134
135 135 self.time_of_last_call = datetime.datetime.now()
136 136 return fn(*args, **kwargs)
137 137
138 138 return wrapper
139 139
140 140 def apply_throttle(value):
141 141
142 142 @Throttle(seconds=value)
143 143 def fnThrottled(fn):
144 144 fn()
145 145
146 146 return fnThrottled
147 147
148 148
149 149 @MPDecorator
150 150 class Plot(Operation):
151 151 """Base class for Schain plotting operations
152 152
153 153 This class should never be use directtly you must subclass a new operation,
154 154 children classes must be defined as follow:
155 155
156 156 ExamplePlot(Plot):
157 157
158 158 CODE = 'code'
159 159 colormap = 'jet'
160 160 plot_type = 'pcolor' # options are ('pcolor', 'pcolorbuffer', 'scatter', 'scatterbuffer')
161 161
162 162 def setup(self):
163 163 pass
164 164
165 165 def plot(self):
166 166 pass
167 167
168 168 """
169 169
170 170 CODE = 'Figure'
171 171 colormap = 'jet'
172 172 bgcolor = 'white'
173 173 buffering = True
174 174 __missing = 1E30
175 175
176 176 __attrs__ = ['show', 'save', 'ymin', 'ymax', 'zmin', 'zmax', 'title',
177 177 'showprofile']
178 178
179 179 def __init__(self):
180 180
181 181 Operation.__init__(self)
182 182 self.isConfig = False
183 183 self.isPlotConfig = False
184 184 self.save_time = 0
185 185 self.sender_time = 0
186 186 self.data = None
187 187 self.firsttime = True
188 188 self.sender_queue = deque(maxlen=10)
189 189 self.plots_adjust = {'left': 0.125, 'right': 0.9, 'bottom': 0.15, 'top': 0.9, 'wspace': 0.2, 'hspace': 0.2}
190 190
191 191 def __fmtTime(self, x, pos):
192 192 '''
193 193 '''
194 194 if self.t_units == "h_m":
195 195 return '{}'.format(self.getDateTime(x).strftime('%H:%M'))
196 196 if self.t_units == "h":
197 197 return '{}'.format(self.getDateTime(x).strftime('%H'))
198 198
199 199 def __setup(self, **kwargs):
200 200 '''
201 201 Initialize variables
202 202 '''
203 203
204 204 self.figures = []
205 205 self.axes = []
206 206 self.cb_axes = []
207 207 self.pf_axes = []
208 208 self.localtime = kwargs.pop('localtime', True)
209 209 self.show = kwargs.get('show', True)
210 210 self.save = kwargs.get('save', False)
211 211 self.save_period = kwargs.get('save_period', 0)
212 212 self.colormap = kwargs.get('colormap', self.colormap)
213 213 self.colormap_coh = kwargs.get('colormap_coh', 'jet')
214 214 self.colormap_phase = kwargs.get('colormap_phase', 'RdBu_r')
215 215 self.colormaps = kwargs.get('colormaps', None)
216 216 self.bgcolor = kwargs.get('bgcolor', self.bgcolor)
217 217 self.showprofile = kwargs.get('showprofile', False)
218 218 self.title = kwargs.get('wintitle', self.CODE.upper())
219 219 self.cb_label = kwargs.get('cb_label', None)
220 220 self.cb_labels = kwargs.get('cb_labels', None)
221 221 self.labels = kwargs.get('labels', None)
222 222 self.xaxis = kwargs.get('xaxis', 'frequency')
223 223 self.zmin = kwargs.get('zmin', None)
224 224 self.zmax = kwargs.get('zmax', None)
225 225 self.zlimits = kwargs.get('zlimits', None)
226 226 self.xmin = kwargs.get('xmin', None)
227 227 self.xmax = kwargs.get('xmax', None)
228 228 self.xrange = kwargs.get('xrange', 12)
229 229 self.xscale = kwargs.get('xscale', None)
230 230 self.ymin = kwargs.get('ymin', None)
231 231 self.ymax = kwargs.get('ymax', None)
232 232 self.yscale = kwargs.get('yscale', None)
233 233 self.xlabel = kwargs.get('xlabel', None)
234 234 self.attr_time = kwargs.get('attr_time', 'utctime')
235 235 self.attr_data = kwargs.get('attr_data', 'data_param')
236 236 self.decimation = kwargs.get('decimation', None)
237 237 self.oneFigure = kwargs.get('oneFigure', True)
238 238 self.width = kwargs.get('width', None)
239 239 self.height = kwargs.get('height', None)
240 240 self.colorbar = kwargs.get('colorbar', True)
241 241 self.factors = kwargs.get('factors', range(18))
242 242 self.channels = kwargs.get('channels', None)
243 243 self.titles = kwargs.get('titles', [])
244 244 self.polar = False
245 245 self.type = kwargs.get('type', 'iq')
246 246 self.grid = kwargs.get('grid', False)
247 247 self.pause = kwargs.get('pause', False)
248 248 self.save_code = kwargs.get('save_code', self.CODE)
249 249 self.throttle = kwargs.get('throttle', 0)
250 250 self.exp_code = kwargs.get('exp_code', None)
251 251 self.server = kwargs.get('server', False)
252 252 self.sender_period = kwargs.get('sender_period', 60)
253 253 self.tag = kwargs.get('tag', '')
254 254 self.height_index = kwargs.get('height_index', None)
255 255 self.__throttle_plot = apply_throttle(self.throttle)
256 256 code = self.attr_data if self.attr_data else self.CODE
257 257 self.data = PlotterData(self.CODE, self.exp_code, self.localtime)
258 258 self.tmin = kwargs.get('tmin', None)
259 259 self.t_units = kwargs.get('t_units', "h_m")
260 260
261 261 if self.server:
262 262 if not self.server.startswith('tcp://'):
263 263 self.server = 'tcp://{}'.format(self.server)
264 264 log.success(
265 265 'Sending to server: {}'.format(self.server),
266 266 self.name
267 267 )
268 268
269 269 if isinstance(self.attr_data, str):
270 270 self.attr_data = [self.attr_data]
271 271
272 272 def __setup_plot(self):
273 273 '''
274 274 Common setup for all figures, here figures and axes are created
275 275 '''
276 276
277 277 self.setup()
278 278
279 279 self.time_label = 'LT' if self.localtime else 'UTC'
280 280
281 281 if self.width is None:
282 282 self.width = 8
283 283
284 284 self.figures = []
285 285 self.axes = []
286 286 self.cb_axes = []
287 287 self.pf_axes = []
288 288 self.cmaps = []
289 289
290 290 size = '15%' if self.ncols == 1 else '30%'
291 291 pad = '4%' if self.ncols == 1 else '8%'
292 292
293 293 if self.oneFigure:
294 294 if self.height is None:
295 295 self.height = 1.4 * self.nrows + 1
296 296 fig = plt.figure(figsize=(self.width, self.height),
297 297 edgecolor='k',
298 298 facecolor='w')
299 299 self.figures.append(fig)
300 300 for n in range(self.nplots):
301 301 ax = fig.add_subplot(self.nrows, self.ncols,
302 302 n + 1, polar=self.polar)
303 303 ax.tick_params(labelsize=8)
304 304 ax.firsttime = True
305 305 ax.index = 0
306 306 ax.press = None
307 307 self.axes.append(ax)
308 308 if self.showprofile:
309 309 cax = self.__add_axes(ax, size=size, pad=pad)
310 310 cax.tick_params(labelsize=8)
311 311 self.pf_axes.append(cax)
312 312 else:
313 313 if self.height is None:
314 314 self.height = 3
315 315 for n in range(self.nplots):
316 316 fig = plt.figure(figsize=(self.width, self.height),
317 317 edgecolor='k',
318 318 facecolor='w')
319 319 ax = fig.add_subplot(1, 1, 1, polar=self.polar)
320 320 ax.tick_params(labelsize=8)
321 321 ax.firsttime = True
322 322 ax.index = 0
323 323 ax.press = None
324 324 self.figures.append(fig)
325 325 self.axes.append(ax)
326 326 if self.showprofile:
327 327 cax = self.__add_axes(ax, size=size, pad=pad)
328 328 cax.tick_params(labelsize=8)
329 329 self.pf_axes.append(cax)
330 330
331 331 for n in range(self.nrows):
332 332 if self.colormaps is not None:
333 333 cmap = plt.get_cmap(self.colormaps[n])
334 334 else:
335 335 cmap = plt.get_cmap(self.colormap)
336 336 cmap.set_bad(self.bgcolor, 1.)
337 337 self.cmaps.append(cmap)
338 338
339 339 def __add_axes(self, ax, size='30%', pad='8%'):
340 340 '''
341 341 Add new axes to the given figure
342 342 '''
343 343 divider = make_axes_locatable(ax)
344 344 nax = divider.new_horizontal(size=size, pad=pad)
345 345 ax.figure.add_axes(nax)
346 346 return nax
347 347
348 348 def fill_gaps(self, x_buffer, y_buffer, z_buffer):
349 349 '''
350 350 Create a masked array for missing data
351 351 '''
352 352 if x_buffer.shape[0] < 2:
353 353 return x_buffer, y_buffer, z_buffer
354 354
355 355 deltas = x_buffer[1:] - x_buffer[0:-1]
356 356 x_median = numpy.median(deltas)
357 357
358 358 index = numpy.where(deltas > 5 * x_median)
359 359
360 360 if len(index[0]) != 0:
361 361 z_buffer[::, index[0], ::] = self.__missing
362 362 z_buffer = numpy.ma.masked_inside(z_buffer,
363 363 0.99 * self.__missing,
364 364 1.01 * self.__missing)
365 365
366 366 return x_buffer, y_buffer, z_buffer
367 367
368 368 def decimate(self):
369 369
370 370 # dx = int(len(self.x)/self.__MAXNUMX) + 1
371 371 dy = int(len(self.y) / self.decimation) + 1
372 372
373 373 # x = self.x[::dx]
374 374 x = self.x
375 375 y = self.y[::dy]
376 376 z = self.z[::, ::, ::dy]
377 377
378 378 return x, y, z
379 379
380 380 def format(self):
381 381 '''
382 382 Set min and max values, labels, ticks and titles
383 383 '''
384 384
385 385 for n, ax in enumerate(self.axes):
386 386 if ax.firsttime:
387 387 if self.xaxis != 'time':
388 388 xmin = self.xmin
389 389 xmax = self.xmax
390 390 else:
391 391 xmin = self.tmin
392 392 xmax = self.tmin + self.xrange*60*60
393 393 ax.xaxis.set_major_formatter(FuncFormatter(self.__fmtTime))
394 394 if self.t_units == "h_m":
395 395 ax.xaxis.set_major_locator(LinearLocator(9))
396 396 if self.t_units == "h":
397 397 ax.xaxis.set_major_locator(LinearLocator(int((xmax-xmin)/3600)+1))
398 398 ymin = self.ymin if self.ymin is not None else numpy.nanmin(self.y[numpy.isfinite(self.y)])
399 399 ymax = self.ymax if self.ymax is not None else numpy.nanmax(self.y[numpy.isfinite(self.y)])
400 400 ax.set_facecolor(self.bgcolor)
401 401 if self.xscale:
402 402 ax.xaxis.set_major_formatter(FuncFormatter(
403 403 lambda x, pos: '{0:g}'.format(x*self.xscale)))
404 404 if self.yscale:
405 405 ax.yaxis.set_major_formatter(FuncFormatter(
406 406 lambda x, pos: '{0:g}'.format(x*self.yscale)))
407 407 if self.xlabel is not None:
408 408 ax.set_xlabel(self.xlabel)
409 409 if self.ylabel is not None:
410 410 ax.set_ylabel(self.ylabel)
411 411 if self.showprofile:
412 412 self.pf_axes[n].set_ylim(ymin, ymax)
413 413 self.pf_axes[n].set_xlim(self.zmin, self.zmax)
414 414 self.pf_axes[n].set_xlabel('dB')
415 415 self.pf_axes[n].grid(b=True, axis='x')
416 416 [tick.set_visible(False)
417 417 for tick in self.pf_axes[n].get_yticklabels()]
418 418 if self.colorbar:
419 419 ax.cbar = plt.colorbar(
420 420 ax.plt, ax=ax, fraction=0.05, pad=0.02, aspect=10)
421 421 ax.cbar.ax.tick_params(labelsize=8)
422 422 ax.cbar.ax.press = None
423 423 if self.cb_label:
424 424 ax.cbar.set_label(self.cb_label, size=8)
425 425 elif self.cb_labels:
426 426 ax.cbar.set_label(self.cb_labels[n], size=8)
427 427 else:
428 428 ax.cbar = None
429 429 ax.set_xlim(xmin, xmax)
430 430 ax.set_ylim(ymin, ymax)
431 431 ax.firsttime = False
432 432 if self.grid:
433 433 ax.grid(True)
434 434 if not self.polar:
435 435 ax.set_title('{} {} {}'.format(
436 436 self.titles[n],
437 437 self.getDateTime(self.data.max_time).strftime(
438 438 '%Y-%m-%d %H:%M:%S'),
439 439 self.time_label),
440 440 size=8)
441 441 else:
442 442 ax.set_title('{}'.format(self.titles[n]), size=8)
443 443 ax.set_ylim(0, 90)
444 444 ax.set_yticks(numpy.arange(0, 90, 20))
445 445 ax.yaxis.labelpad = 40
446 446
447 447 if self.firsttime:
448 448 for n, fig in enumerate(self.figures):
449 449 fig.subplots_adjust(**self.plots_adjust)
450 450 self.firsttime = False
451 451
452 452 def clear_figures(self):
453 453 '''
454 454 Reset axes for redraw plots
455 455 '''
456 456
457 457 for ax in self.axes+self.pf_axes+self.cb_axes:
458 458 ax.clear()
459 459 ax.firsttime = True
460 460 if hasattr(ax, 'cbar') and ax.cbar:
461 461 ax.cbar.remove()
462 462
463 463 def __plot(self):
464 464 '''
465 465 Main function to plot, format and save figures
466 466 '''
467 467
468 468 self.plot()
469 469 self.format()
470 470
471 471 for n, fig in enumerate(self.figures):
472 472 if self.nrows == 0 or self.nplots == 0:
473 473 log.warning('No data', self.name)
474 474 fig.text(0.5, 0.5, 'No Data', fontsize='large', ha='center')
475 475 fig.canvas.manager.set_window_title(self.CODE)
476 476 continue
477 477
478 478 fig.canvas.manager.set_window_title('{} - {}'.format(self.title,
479 479 self.getDateTime(self.data.max_time).strftime('%Y/%m/%d')))
480 480 fig.canvas.draw()
481 481 if self.show:
482 482 fig.show()
483 483 figpause(0.01)
484 484
485 485 if self.save:
486 486 self.save_figure(n)
487 487
488 488 if self.server:
489 489 self.send_to_server()
490 490
491 491 def __update(self, dataOut, timestamp):
492 492 '''
493 493 '''
494 494
495 495 metadata = {
496 496 'yrange': dataOut.heightList,
497 497 'interval': dataOut.timeInterval,
498 498 'channels': dataOut.channelList
499 499 }
500
501 500 data, meta = self.update(dataOut)
502 501 metadata.update(meta)
503 502 self.data.update(data, timestamp, metadata)
504 503
505 504 def save_figure(self, n):
506 505 '''
507 506 '''
508 507
509 508 if (self.data.max_time - self.save_time) <= self.save_period:
510 509 return
511 510
512 511 self.save_time = self.data.max_time
513 512
514 513 fig = self.figures[n]
515 514
516 515 if self.throttle == 0:
517 516 figname = os.path.join(
518 517 self.save,
519 518 self.save_code,
520 519 '{}_{}.png'.format(
521 520 self.save_code,
522 521 self.getDateTime(self.data.max_time).strftime(
523 522 '%Y%m%d_%H%M%S'
524 523 ),
525 524 )
526 525 )
527 526 log.log('Saving figure: {}'.format(figname), self.name)
528 527 if not os.path.isdir(os.path.dirname(figname)):
529 528 os.makedirs(os.path.dirname(figname))
530 529 fig.savefig(figname)
531 530
532 531 figname = os.path.join(
533 532 self.save,
534 533 '{}_{}.png'.format(
535 534 self.save_code,
536 535 self.getDateTime(self.data.min_time).strftime(
537 536 '%Y%m%d'
538 537 ),
539 538 )
540 539 )
541 540
542 541 log.log('Saving figure: {}'.format(figname), self.name)
543 542 if not os.path.isdir(os.path.dirname(figname)):
544 543 os.makedirs(os.path.dirname(figname))
545 544 fig.savefig(figname)
546 545
547 546 def send_to_server(self):
548 547 '''
549 548 '''
550 549
551 550 if self.exp_code == None:
552 551 log.warning('Missing `exp_code` skipping sending to server...')
553 552
554 553 last_time = self.data.max_time
555 554 interval = last_time - self.sender_time
556 555 if interval < self.sender_period:
557 556 return
558 557
559 558 self.sender_time = last_time
560 559
561 560 attrs = ['titles', 'zmin', 'zmax', 'tag', 'ymin', 'ymax']
562 561 for attr in attrs:
563 562 value = getattr(self, attr)
564 563 if value:
565 564 if isinstance(value, (numpy.float32, numpy.float64)):
566 565 value = round(float(value), 2)
567 566 self.data.meta[attr] = value
568 567 if self.colormap == 'jet':
569 568 self.data.meta['colormap'] = 'Jet'
570 569 elif 'RdBu' in self.colormap:
571 570 self.data.meta['colormap'] = 'RdBu'
572 571 else:
573 572 self.data.meta['colormap'] = 'Viridis'
574 573 self.data.meta['interval'] = int(interval)
575 574
576 575 self.sender_queue.append(last_time)
577 576
578 577 while True:
579 578 try:
580 579 tm = self.sender_queue.popleft()
581 580 except IndexError:
582 581 break
583 582 msg = self.data.jsonify(tm, self.save_code, self.plot_type)
584 583 self.socket.send_string(msg)
585 584 socks = dict(self.poll.poll(2000))
586 585 if socks.get(self.socket) == zmq.POLLIN:
587 586 reply = self.socket.recv_string()
588 587 if reply == 'ok':
589 588 log.log("Response from server ok", self.name)
590 589 time.sleep(0.1)
591 590 continue
592 591 else:
593 592 log.warning(
594 593 "Malformed reply from server: {}".format(reply), self.name)
595 594 else:
596 595 log.warning(
597 596 "No response from server, retrying...", self.name)
598 597 self.sender_queue.appendleft(tm)
599 598 self.socket.setsockopt(zmq.LINGER, 0)
600 599 self.socket.close()
601 600 self.poll.unregister(self.socket)
602 601 self.socket = self.context.socket(zmq.REQ)
603 602 self.socket.connect(self.server)
604 603 self.poll.register(self.socket, zmq.POLLIN)
605 604 break
606 605
607 606 def setup(self):
608 607 '''
609 608 This method should be implemented in the child class, the following
610 609 attributes should be set:
611 610
612 611 self.nrows: number of rows
613 612 self.ncols: number of cols
614 613 self.nplots: number of plots (channels or pairs)
615 614 self.ylabel: label for Y axes
616 615 self.titles: list of axes title
617 616
618 617 '''
619 618 raise NotImplementedError
620 619
621 620 def plot(self):
622 621 '''
623 622 Must be defined in the child class, the actual plotting method
624 623 '''
625 624 raise NotImplementedError
626 625
627 626 def update(self, dataOut):
628 627 '''
629 628 Must be defined in the child class, update self.data with new data
630 629 '''
631 630
632 631 data = {
633 632 self.CODE: getattr(dataOut, 'data_{}'.format(self.CODE))
634 633 }
635 634 meta = {}
636 635
637 636 return data, meta
638 637
639 638 def run(self, dataOut, **kwargs):
640 639 '''
641 640 Main plotting routine
642 641 '''
643 642
644 643 if self.isConfig is False:
645 644 self.__setup(**kwargs)
646 645
647 646 if self.localtime:
648 647 self.getDateTime = datetime.datetime.fromtimestamp
649 648 else:
650 649 self.getDateTime = datetime.datetime.utcfromtimestamp
651 650
652 651 self.data.setup()
653 652 self.isConfig = True
654 653 if self.server:
655 654 self.context = zmq.Context()
656 655 self.socket = self.context.socket(zmq.REQ)
657 656 self.socket.connect(self.server)
658 657 self.poll = zmq.Poller()
659 658 self.poll.register(self.socket, zmq.POLLIN)
660 659
661 660 tm = getattr(dataOut, self.attr_time)
662 661
663 662 if self.data and 'time' in self.xaxis and (tm - self.tmin) >= self.xrange*60*60:
664 663 self.save_time = tm
665 664 self.__plot()
666 665 self.tmin += self.xrange*60*60
667 666 self.data.setup()
668 667 self.clear_figures()
669 668
670 669 self.__update(dataOut, tm)
671 670
672 671 if self.isPlotConfig is False:
673 672 self.__setup_plot()
674 673 self.isPlotConfig = True
675 674 if self.xaxis == 'time':
676 675 dt = self.getDateTime(tm)
677 676 if self.xmin is None:
678 677 self.tmin = tm
679 678 self.xmin = dt.hour
680 679 minutes = (self.xmin-int(self.xmin)) * 60
681 680 seconds = (minutes - int(minutes)) * 60
682 681 self.tmin = (dt.replace(hour=int(self.xmin), minute=int(minutes), second=int(seconds)) -
683 682 datetime.datetime(1970, 1, 1)).total_seconds()
684 683 if self.localtime:
685 684 self.tmin += time.timezone
686 685
687 686 if self.xmin is not None and self.xmax is not None:
688 687 self.xrange = self.xmax - self.xmin
689 688
690 689 if self.throttle == 0:
691 690 self.__plot()
692 691 else:
693 692 self.__throttle_plot(self.__plot)#, coerce=coerce)
694 693
695 694 def close(self):
696 695
697 696 if self.data and not self.data.flagNoData:
698 697 self.save_time = 0
699 698 self.__plot()
700 699 if self.data and not self.data.flagNoData and self.pause:
701 700 figpause(10)
@@ -1,356 +1,355
1 1 import os
2 2 import datetime
3 3 import numpy
4 4
5 5 from schainpy.model.graphics.jroplot_base import Plot, plt
6 6 from schainpy.model.graphics.jroplot_spectra import SpectraPlot, RTIPlot, CoherencePlot
7 7 from schainpy.utils import log
8 8
9 9 EARTH_RADIUS = 6.3710e3
10 10
11 11
12 12 def ll2xy(lat1, lon1, lat2, lon2):
13 13
14 14 p = 0.017453292519943295
15 15 a = 0.5 - numpy.cos((lat2 - lat1) * p)/2 + numpy.cos(lat1 * p) * \
16 16 numpy.cos(lat2 * p) * (1 - numpy.cos((lon2 - lon1) * p)) / 2
17 17 r = 12742 * numpy.arcsin(numpy.sqrt(a))
18 18 theta = numpy.arctan2(numpy.sin((lon2-lon1)*p)*numpy.cos(lat2*p), numpy.cos(lat1*p)
19 19 * numpy.sin(lat2*p)-numpy.sin(lat1*p)*numpy.cos(lat2*p)*numpy.cos((lon2-lon1)*p))
20 20 theta = -theta + numpy.pi/2
21 21 return r*numpy.cos(theta), r*numpy.sin(theta)
22 22
23 23
24 24 def km2deg(km):
25 25 '''
26 26 Convert distance in km to degrees
27 27 '''
28 28
29 29 return numpy.rad2deg(km/EARTH_RADIUS)
30 30
31 31
32 32
33 33 class SpectralMomentsPlot(SpectraPlot):
34 34 '''
35 35 Plot for Spectral Moments
36 36 '''
37 37 CODE = 'spc_moments'
38 38 colormap = 'jet'
39 39 plot_type = 'pcolor'
40 40
41 41
42 42 class SnrPlot(RTIPlot):
43 43 '''
44 44 Plot for SNR Data
45 45 '''
46 46
47 47 CODE = 'snr'
48 48 colormap = 'jet'
49 49
50 50 def update(self, dataOut):
51 51 self.update_list(dataOut)
52 52 data = {
53 53 'snr': 10*numpy.log10(dataOut.data_snr)
54 54 }
55 55
56 56 return data, {}
57 57
58 58 class DopplerPlot(RTIPlot):
59 59 '''
60 60 Plot for DOPPLER Data (1st moment)
61 61 '''
62 62
63 63 CODE = 'dop'
64 64 colormap = 'jet'
65 65
66 66 def update(self, dataOut):
67 67 self.update_list(dataOut)
68 68 data = {
69 69 'dop': 10*numpy.log10(dataOut.data_dop)
70 70 }
71 71
72 72 return data, {}
73 73
74 74 class PowerPlot(RTIPlot):
75 75 '''
76 76 Plot for Power Data (0 moment)
77 77 '''
78 78
79 79 CODE = 'pow'
80 80 colormap = 'jet'
81 81
82 82 def update(self, dataOut):
83 83 self.update_list(dataOut)
84 84 data = {
85 85 'pow': 10*numpy.log10(dataOut.data_pow)
86 86 }
87 #print("data",data)
88 87 return data, {}
89 88
90 89 class SpectralWidthPlot(RTIPlot):
91 90 '''
92 91 Plot for Spectral Width Data (2nd moment)
93 92 '''
94 93
95 94 CODE = 'width'
96 95 colormap = 'jet'
97 96
98 97 def update(self, dataOut):
99 98 self.update_list(dataOut)
100 99 data = {
101 100 'width': dataOut.data_width
102 101 }
103 102
104 103 return data, {}
105 104
106 105 class SkyMapPlot(Plot):
107 106 '''
108 107 Plot for meteors detection data
109 108 '''
110 109
111 110 CODE = 'param'
112 111
113 112 def setup(self):
114 113
115 114 self.ncols = 1
116 115 self.nrows = 1
117 116 self.width = 7.2
118 117 self.height = 7.2
119 118 self.nplots = 1
120 119 self.xlabel = 'Zonal Zenith Angle (deg)'
121 120 self.ylabel = 'Meridional Zenith Angle (deg)'
122 121 self.polar = True
123 122 self.ymin = -180
124 123 self.ymax = 180
125 124 self.colorbar = False
126 125
127 126 def plot(self):
128 127
129 128 arrayParameters = numpy.concatenate(self.data['param'])
130 129 error = arrayParameters[:, -1]
131 130 indValid = numpy.where(error == 0)[0]
132 131 finalMeteor = arrayParameters[indValid, :]
133 132 finalAzimuth = finalMeteor[:, 3]
134 133 finalZenith = finalMeteor[:, 4]
135 134
136 135 x = finalAzimuth * numpy.pi / 180
137 136 y = finalZenith
138 137
139 138 ax = self.axes[0]
140 139
141 140 if ax.firsttime:
142 141 ax.plot = ax.plot(x, y, 'bo', markersize=5)[0]
143 142 else:
144 143 ax.plot.set_data(x, y)
145 144
146 145 dt1 = self.getDateTime(self.data.min_time).strftime('%y/%m/%d %H:%M:%S')
147 146 dt2 = self.getDateTime(self.data.max_time).strftime('%y/%m/%d %H:%M:%S')
148 147 title = 'Meteor Detection Sky Map\n %s - %s \n Number of events: %5.0f\n' % (dt1,
149 148 dt2,
150 149 len(x))
151 150 self.titles[0] = title
152 151
153 152
154 153 class GenericRTIPlot(Plot):
155 154 '''
156 155 Plot for data_xxxx object
157 156 '''
158 157
159 158 CODE = 'param'
160 159 colormap = 'viridis'
161 160 plot_type = 'pcolorbuffer'
162 161
163 162 def setup(self):
164 163 self.xaxis = 'time'
165 164 self.ncols = 1
166 165 self.nrows = self.data.shape('param')[0]
167 166 self.nplots = self.nrows
168 167 self.plots_adjust.update({'hspace':0.8, 'left': 0.1, 'bottom': 0.08, 'right':0.95, 'top': 0.95})
169 168
170 169 if not self.xlabel:
171 170 self.xlabel = 'Time'
172 171
173 172 self.ylabel = 'Height [km]'
174 173 if not self.titles:
175 174 self.titles = ['Param {}'.format(x) for x in range(self.nrows)]
176 175
177 176 def update(self, dataOut):
178 177
179 178 data = {
180 179 'param' : numpy.concatenate([getattr(dataOut, attr) for attr in self.attr_data], axis=0)
181 180 }
182 181
183 182 meta = {}
184 183
185 184 return data, meta
186 185
187 186 def plot(self):
188 187 # self.data.normalize_heights()
189 188 self.x = self.data.times
190 189 self.y = self.data.yrange
191 190 self.z = self.data['param']
192 191
193 192 self.z = numpy.ma.masked_invalid(self.z)
194 193
195 194 if self.decimation is None:
196 195 x, y, z = self.fill_gaps(self.x, self.y, self.z)
197 196 else:
198 197 x, y, z = self.fill_gaps(*self.decimate())
199 198
200 199 for n, ax in enumerate(self.axes):
201 200
202 201 self.zmax = self.zmax if self.zmax is not None else numpy.max(
203 202 self.z[n])
204 203 self.zmin = self.zmin if self.zmin is not None else numpy.min(
205 204 self.z[n])
206 205
207 206 if ax.firsttime:
208 207 if self.zlimits is not None:
209 208 self.zmin, self.zmax = self.zlimits[n]
210 209
211 210 ax.plt = ax.pcolormesh(x, y, z[n].T * self.factors[n],
212 211 vmin=self.zmin,
213 212 vmax=self.zmax,
214 213 cmap=self.cmaps[n]
215 214 )
216 215 else:
217 216 if self.zlimits is not None:
218 217 self.zmin, self.zmax = self.zlimits[n]
219 218 ax.collections.remove(ax.collections[0])
220 219 ax.plt = ax.pcolormesh(x, y, z[n].T * self.factors[n],
221 220 vmin=self.zmin,
222 221 vmax=self.zmax,
223 222 cmap=self.cmaps[n]
224 223 )
225 224
226 225
227 226 class PolarMapPlot(Plot):
228 227 '''
229 228 Plot for weather radar
230 229 '''
231 230
232 231 CODE = 'param'
233 232 colormap = 'seismic'
234 233
235 234 def setup(self):
236 235 self.ncols = 1
237 236 self.nrows = 1
238 237 self.width = 9
239 238 self.height = 8
240 239 self.mode = self.data.meta['mode']
241 240 if self.channels is not None:
242 241 self.nplots = len(self.channels)
243 242 self.nrows = len(self.channels)
244 243 else:
245 244 self.nplots = self.data.shape(self.CODE)[0]
246 245 self.nrows = self.nplots
247 246 self.channels = list(range(self.nplots))
248 247 if self.mode == 'E':
249 248 self.xlabel = 'Longitude'
250 249 self.ylabel = 'Latitude'
251 250 else:
252 251 self.xlabel = 'Range (km)'
253 252 self.ylabel = 'Height (km)'
254 253 self.bgcolor = 'white'
255 254 self.cb_labels = self.data.meta['units']
256 255 self.lat = self.data.meta['latitude']
257 256 self.lon = self.data.meta['longitude']
258 257 self.xmin, self.xmax = float(
259 258 km2deg(self.xmin) + self.lon), float(km2deg(self.xmax) + self.lon)
260 259 self.ymin, self.ymax = float(
261 260 km2deg(self.ymin) + self.lat), float(km2deg(self.ymax) + self.lat)
262 261 # self.polar = True
263 262
264 263 def plot(self):
265 264
266 265 for n, ax in enumerate(self.axes):
267 266 data = self.data['param'][self.channels[n]]
268 267
269 268 zeniths = numpy.linspace(
270 269 0, self.data.meta['max_range'], data.shape[1])
271 270 if self.mode == 'E':
272 271 azimuths = -numpy.radians(self.data.yrange)+numpy.pi/2
273 272 r, theta = numpy.meshgrid(zeniths, azimuths)
274 273 x, y = r*numpy.cos(theta)*numpy.cos(numpy.radians(self.data.meta['elevation'])), r*numpy.sin(
275 274 theta)*numpy.cos(numpy.radians(self.data.meta['elevation']))
276 275 x = km2deg(x) + self.lon
277 276 y = km2deg(y) + self.lat
278 277 else:
279 278 azimuths = numpy.radians(self.data.yrange)
280 279 r, theta = numpy.meshgrid(zeniths, azimuths)
281 280 x, y = r*numpy.cos(theta), r*numpy.sin(theta)
282 281 self.y = zeniths
283 282
284 283 if ax.firsttime:
285 284 if self.zlimits is not None:
286 285 self.zmin, self.zmax = self.zlimits[n]
287 286 ax.plt = ax.pcolormesh( # r, theta, numpy.ma.array(data, mask=numpy.isnan(data)),
288 287 x, y, numpy.ma.array(data, mask=numpy.isnan(data)),
289 288 vmin=self.zmin,
290 289 vmax=self.zmax,
291 290 cmap=self.cmaps[n])
292 291 else:
293 292 if self.zlimits is not None:
294 293 self.zmin, self.zmax = self.zlimits[n]
295 294 ax.collections.remove(ax.collections[0])
296 295 ax.plt = ax.pcolormesh( # r, theta, numpy.ma.array(data, mask=numpy.isnan(data)),
297 296 x, y, numpy.ma.array(data, mask=numpy.isnan(data)),
298 297 vmin=self.zmin,
299 298 vmax=self.zmax,
300 299 cmap=self.cmaps[n])
301 300
302 301 if self.mode == 'A':
303 302 continue
304 303
305 304 # plot district names
306 305 f = open('/data/workspace/schain_scripts/distrito.csv')
307 306 for line in f:
308 307 label, lon, lat = [s.strip() for s in line.split(',') if s]
309 308 lat = float(lat)
310 309 lon = float(lon)
311 310 # ax.plot(lon, lat, '.b', ms=2)
312 311 ax.text(lon, lat, label.decode('utf8'), ha='center',
313 312 va='bottom', size='8', color='black')
314 313
315 314 # plot limites
316 315 limites = []
317 316 tmp = []
318 317 for line in open('/data/workspace/schain_scripts/lima.csv'):
319 318 if '#' in line:
320 319 if tmp:
321 320 limites.append(tmp)
322 321 tmp = []
323 322 continue
324 323 values = line.strip().split(',')
325 324 tmp.append((float(values[0]), float(values[1])))
326 325 for points in limites:
327 326 ax.add_patch(
328 327 Polygon(points, ec='k', fc='none', ls='--', lw=0.5))
329 328
330 329 # plot Cuencas
331 330 for cuenca in ('rimac', 'lurin', 'mala', 'chillon', 'chilca', 'chancay-huaral'):
332 331 f = open('/data/workspace/schain_scripts/{}.csv'.format(cuenca))
333 332 values = [line.strip().split(',') for line in f]
334 333 points = [(float(s[0]), float(s[1])) for s in values]
335 334 ax.add_patch(Polygon(points, ec='b', fc='none'))
336 335
337 336 # plot grid
338 337 for r in (15, 30, 45, 60):
339 338 ax.add_artist(plt.Circle((self.lon, self.lat),
340 339 km2deg(r), color='0.6', fill=False, lw=0.2))
341 340 ax.text(
342 341 self.lon + (km2deg(r))*numpy.cos(60*numpy.pi/180),
343 342 self.lat + (km2deg(r))*numpy.sin(60*numpy.pi/180),
344 343 '{}km'.format(r),
345 344 ha='center', va='bottom', size='8', color='0.6', weight='heavy')
346 345
347 346 if self.mode == 'E':
348 347 title = 'El={}$^\circ$'.format(self.data.meta['elevation'])
349 348 label = 'E{:02d}'.format(int(self.data.meta['elevation']))
350 349 else:
351 350 title = 'Az={}$^\circ$'.format(self.data.meta['azimuth'])
352 351 label = 'A{:02d}'.format(int(self.data.meta['azimuth']))
353 352
354 353 self.save_labels = ['{}-{}'.format(lbl, label) for lbl in self.labels]
355 354 self.titles = ['{} {}'.format(
356 355 self.data.parameters[x], title) for x in self.channels]
@@ -1,731 +1,735
1 1 # Copyright (c) 2012-2020 Jicamarca Radio Observatory
2 2 # All rights reserved.
3 3 #
4 4 # Distributed under the terms of the BSD 3-clause license.
5 5 """Classes to plot Spectra data
6 6
7 7 """
8 8
9 9 import os
10 10 import numpy
11 11
12 12 from schainpy.model.graphics.jroplot_base import Plot, plt, log
13 13 from itertools import combinations
14 14
15
15 16 class SpectraPlot(Plot):
16 17 '''
17 18 Plot for Spectra data
18 19 '''
19 20
20 21 CODE = 'spc'
21 22 colormap = 'jet'
22 23 plot_type = 'pcolor'
23 24 buffering = False
24 25 channelList = []
25 26
26 27 def setup(self):
27 28 self.nplots = len(self.data.channels)
28 29 self.ncols = int(numpy.sqrt(self.nplots) + 0.9)
29 30 self.nrows = int((1.0 * self.nplots / self.ncols) + 0.9)
30 31 self.height = 2.6 * self.nrows
31 32
32 33 self.cb_label = 'dB'
33 34 if self.showprofile:
34 35 self.width = 4 * self.ncols
35 36 else:
36 37 self.width = 3.5 * self.ncols
37 38 self.plots_adjust.update({'wspace': 0.4, 'hspace':0.4, 'left': 0.1, 'right': 0.9, 'bottom': 0.08})
38 39 self.ylabel = 'Range [km]'
40 def update_list(self,dataOut):
41 if len(self.channelList) == 0:
42 self.channelList = dataOut.channelList
39 43
40 44 def update(self, dataOut):
41 if self.channelList == None:
42 self.channelList = dataOut.channelList
45 self.update_list(dataOut)
43 46 data = {}
44 47 meta = {}
45 48 spc = 10*numpy.log10(dataOut.data_spc/dataOut.normFactor)
46 49
47 50 data['spc'] = spc
48 51 data['rti'] = dataOut.getPower()
49 52 data['noise'] = 10*numpy.log10(dataOut.getNoise()/dataOut.normFactor)
50 53 meta['xrange'] = (dataOut.getFreqRange(1)/1000., dataOut.getAcfRange(1), dataOut.getVelRange(1))
51 54 if self.CODE == 'spc_moments':
52 55 data['moments'] = dataOut.moments
53 56
54 57 return data, meta
55 58
56 59 def plot(self):
57 60 if self.xaxis == "frequency":
58 61 x = self.data.xrange[0]
59 62 self.xlabel = "Frequency (kHz)"
60 63 elif self.xaxis == "time":
61 64 x = self.data.xrange[1]
62 65 self.xlabel = "Time (ms)"
63 66 else:
64 67 x = self.data.xrange[2]
65 68 self.xlabel = "Velocity (m/s)"
66 69
67 70 if self.CODE == 'spc_moments':
68 71 x = self.data.xrange[2]
69 72 self.xlabel = "Velocity (m/s)"
70 73
71 74 self.titles = []
72 75
73 76 y = self.data.yrange
74 77 self.y = y
75 78
76 79 data = self.data[-1]
77 80 z = data['spc']
78 81
79 82 for n, ax in enumerate(self.axes):
80 83 noise = data['noise'][n]
81 84 if self.CODE == 'spc_moments':
82 85 mean = data['moments'][n, 1]
83 86 if ax.firsttime:
84 87 self.xmax = self.xmax if self.xmax else numpy.nanmax(x)
85 88 self.xmin = self.xmin if self.xmin else -self.xmax
86 89 self.zmin = self.zmin if self.zmin else numpy.nanmin(z)
87 90 self.zmax = self.zmax if self.zmax else numpy.nanmax(z)
88 91 ax.plt = ax.pcolormesh(x, y, z[n].T,
89 92 vmin=self.zmin,
90 93 vmax=self.zmax,
91 94 cmap=plt.get_cmap(self.colormap)
92 95 )
93 96
94 97 if self.showprofile:
95 98 ax.plt_profile = self.pf_axes[n].plot(
96 99 data['rti'][n], y)[0]
97 100 ax.plt_noise = self.pf_axes[n].plot(numpy.repeat(noise, len(y)), y,
98 101 color="k", linestyle="dashed", lw=1)[0]
99 102 if self.CODE == 'spc_moments':
100 103 ax.plt_mean = ax.plot(mean, y, color='k')[0]
101 104 else:
102 105 ax.plt.set_array(z[n].T.ravel())
103 106 if self.showprofile:
104 107 ax.plt_profile.set_data(data['rti'][n], y)
105 108 ax.plt_noise.set_data(numpy.repeat(noise, len(y)), y)
106 109 if self.CODE == 'spc_moments':
107 110 ax.plt_mean.set_data(mean, y)
108 111 self.titles.append('CH {}: {:3.2f}dB'.format(self.channelList[n], noise))
109 112
110 113
111 114 class CrossSpectraPlot(Plot):
112 115
113 116 CODE = 'cspc'
114 117 colormap = 'jet'
115 118 plot_type = 'pcolor'
116 119 zmin_coh = None
117 120 zmax_coh = None
118 121 zmin_phase = None
119 122 zmax_phase = None
120 123 realChannels = None
121 124 crossPairs = None
122 125
123 126 def setup(self):
124 127
125 128 self.ncols = 4
126 129 self.nplots = len(self.data.pairs) * 2
127 130 self.nrows = int((1.0 * self.nplots / self.ncols) + 0.9)
128 131 self.width = 3.1 * self.ncols
129 132 self.height = 2.6 * self.nrows
130 133 self.ylabel = 'Range [km]'
131 134 self.showprofile = False
132 135 self.plots_adjust.update({'left': 0.08, 'right': 0.92, 'wspace': 0.5, 'hspace':0.4, 'top':0.95, 'bottom': 0.08})
133 136
134 137 def update(self, dataOut):
135 138
136 139 data = {}
137 140 meta = {}
138 141
139 142 spc = dataOut.data_spc
140 143 cspc = dataOut.data_cspc
141 144 meta['xrange'] = (dataOut.getFreqRange(1)/1000., dataOut.getAcfRange(1), dataOut.getVelRange(1))
142 145 rawPairs = list(combinations(list(range(dataOut.nChannels)), 2))
143 146 meta['pairs'] = rawPairs
144 147
145 148 if self.crossPairs == None:
146 149 self.crossPairs = dataOut.pairsList
147 150
148 151 tmp = []
149 152
150 153 for n, pair in enumerate(meta['pairs']):
151 154
152 155 out = cspc[n] / numpy.sqrt(spc[pair[0]] * spc[pair[1]])
153 156 coh = numpy.abs(out)
154 157 phase = numpy.arctan2(out.imag, out.real) * 180 / numpy.pi
155 158 tmp.append(coh)
156 159 tmp.append(phase)
157 160
158 161 data['cspc'] = numpy.array(tmp)
159 162
160 163 return data, meta
161 164
162 165 def plot(self):
163 166
164 167 if self.xaxis == "frequency":
165 168 x = self.data.xrange[0]
166 169 self.xlabel = "Frequency (kHz)"
167 170 elif self.xaxis == "time":
168 171 x = self.data.xrange[1]
169 172 self.xlabel = "Time (ms)"
170 173 else:
171 174 x = self.data.xrange[2]
172 175 self.xlabel = "Velocity (m/s)"
173 176
174 177 self.titles = []
175 178
176 179 y = self.data.yrange
177 180 self.y = y
178 181
179 182 data = self.data[-1]
180 183 cspc = data['cspc']
181 #print(self.crossPairs)
184
182 185 for n in range(len(self.data.pairs)):
183 #pair = self.data.pairs[n]
186
184 187 pair = self.crossPairs[n]
185 188
186 189 coh = cspc[n*2]
187 190 phase = cspc[n*2+1]
188 191 ax = self.axes[2 * n]
189 192
190 193 if ax.firsttime:
191 194 ax.plt = ax.pcolormesh(x, y, coh.T,
192 195 vmin=0,
193 196 vmax=1,
194 197 cmap=plt.get_cmap(self.colormap_coh)
195 198 )
196 199 else:
197 200 ax.plt.set_array(coh.T.ravel())
198 201 self.titles.append(
199 202 'Coherence Ch{} * Ch{}'.format(pair[0], pair[1]))
200 203
201 204 ax = self.axes[2 * n + 1]
202 205 if ax.firsttime:
203 206 ax.plt = ax.pcolormesh(x, y, phase.T,
204 207 vmin=-180,
205 208 vmax=180,
206 209 cmap=plt.get_cmap(self.colormap_phase)
207 210 )
208 211 else:
209 212 ax.plt.set_array(phase.T.ravel())
210 213 self.titles.append('Phase CH{} * CH{}'.format(pair[0], pair[1]))
211 214
212 215
213 216 class RTIPlot(Plot):
214 217 '''
215 218 Plot for RTI data
216 219 '''
217 220
218 221 CODE = 'rti'
219 222 colormap = 'jet'
220 223 plot_type = 'pcolorbuffer'
221 224 titles = None
222 225 channelList = []
223 226
224 227 def setup(self):
225 228 self.xaxis = 'time'
226 229 self.ncols = 1
227 230 #print("dataChannels ",self.data.channels)
228 231 self.nrows = len(self.data.channels)
229 232 self.nplots = len(self.data.channels)
230 233 self.ylabel = 'Range [km]'
231 234 self.xlabel = 'Time'
232 235 self.cb_label = 'dB'
233 236 self.plots_adjust.update({'hspace':0.8, 'left': 0.1, 'bottom': 0.08, 'right':0.95})
234 237 self.titles = ['{} Channel {}'.format(
235 238 self.CODE.upper(), x) for x in range(self.nplots)]
236 239
237 240 def update_list(self,dataOut):
238 241 if len(self.channelList) == 0:
239 242 self.channelList = dataOut.channelList
240 243
244
241 245 def update(self, dataOut):
242 246 self.update_list(dataOut)
243 247 data = {}
244 248 meta = {}
245 249 data['rti'] = dataOut.getPower()
246 250 data['noise'] = 10*numpy.log10(dataOut.getNoise()/dataOut.normFactor)
247 251 return data, meta
248 252
249 253 def plot(self):
250 254
251 255 self.x = self.data.times
252 256 self.y = self.data.yrange
253 257 self.z = self.data[self.CODE]
254 258 self.z = numpy.ma.masked_invalid(self.z)
255 259 try:
256 260 if self.channelList != None:
257 261 self.titles = ['{} Channel {}'.format(
258 262 self.CODE.upper(), x) for x in self.channelList]
259 263 except:
260 264 if self.channelList.any() != None:
261 265 self.titles = ['{} Channel {}'.format(
262 266 self.CODE.upper(), x) for x in self.channelList]
263 267 if self.decimation is None:
264 268 x, y, z = self.fill_gaps(self.x, self.y, self.z)
265 269 else:
266 270 x, y, z = self.fill_gaps(*self.decimate())
267 271
268 272 for n, ax in enumerate(self.axes):
269 273 self.zmin = self.zmin if self.zmin else numpy.min(self.z)
270 274 self.zmax = self.zmax if self.zmax else numpy.max(self.z)
271 275 data = self.data[-1]
272 276 if ax.firsttime:
273 277 ax.plt = ax.pcolormesh(x, y, z[n].T,
274 278 vmin=self.zmin,
275 279 vmax=self.zmax,
276 280 cmap=plt.get_cmap(self.colormap)
277 281 )
278 282 if self.showprofile:
279 283 ax.plot_profile = self.pf_axes[n].plot(
280 284 data['rti'][n], self.y)[0]
281 285 ax.plot_noise = self.pf_axes[n].plot(numpy.repeat(data['noise'][n], len(self.y)), self.y,
282 286 color="k", linestyle="dashed", lw=1)[0]
283 287 else:
284 288 ax.collections.remove(ax.collections[0])
285 289 ax.plt = ax.pcolormesh(x, y, z[n].T,
286 290 vmin=self.zmin,
287 291 vmax=self.zmax,
288 292 cmap=plt.get_cmap(self.colormap)
289 293 )
290 294 if self.showprofile:
291 295 ax.plot_profile.set_data(data['rti'][n], self.y)
292 296 ax.plot_noise.set_data(numpy.repeat(
293 297 data['noise'][n], len(self.y)), self.y)
294 298
295 299
296 300 class CoherencePlot(RTIPlot):
297 301 '''
298 302 Plot for Coherence data
299 303 '''
300 304
301 305 CODE = 'coh'
302 306
303 307 def setup(self):
304 308 self.xaxis = 'time'
305 309 self.ncols = 1
306 310 self.nrows = len(self.data.pairs)
307 311 self.nplots = len(self.data.pairs)
308 312 self.ylabel = 'Range [km]'
309 313 self.xlabel = 'Time'
310 314 self.plots_adjust.update({'hspace':0.6, 'left': 0.1, 'bottom': 0.1,'right':0.95})
311 315 if self.CODE == 'coh':
312 316 self.cb_label = ''
313 317 self.titles = [
314 318 'Coherence Map Ch{} * Ch{}'.format(x[0], x[1]) for x in self.data.pairs]
315 319 else:
316 320 self.cb_label = 'Degrees'
317 321 self.titles = [
318 322 'Phase Map Ch{} * Ch{}'.format(x[0], x[1]) for x in self.data.pairs]
319 323
320 324 def update(self, dataOut):
321 325 self.update_list(dataOut)
322 326 data = {}
323 327 meta = {}
324 328 data['coh'] = dataOut.getCoherence()
325 329 meta['pairs'] = dataOut.pairsList
326 330
327 331
328 332 return data, meta
329 333
330 334 class PhasePlot(CoherencePlot):
331 335 '''
332 336 Plot for Phase map data
333 337 '''
334 338
335 339 CODE = 'phase'
336 340 colormap = 'seismic'
337 341
338 342 def update(self, dataOut):
339 343
340 344 data = {}
341 345 meta = {}
342 346 data['phase'] = dataOut.getCoherence(phase=True)
343 347 meta['pairs'] = dataOut.pairsList
344 348
345 349 return data, meta
346 350
347 351 class NoisePlot(Plot):
348 352 '''
349 353 Plot for noise
350 354 '''
351 355
352 356 CODE = 'noise'
353 357 plot_type = 'scatterbuffer'
354 358
355 359 def setup(self):
356 360 self.xaxis = 'time'
357 361 self.ncols = 1
358 362 self.nrows = 1
359 363 self.nplots = 1
360 364 self.ylabel = 'Intensity [dB]'
361 365 self.xlabel = 'Time'
362 366 self.titles = ['Noise']
363 367 self.colorbar = False
364 368 self.plots_adjust.update({'right': 0.85 })
365 369
366 370 def update(self, dataOut):
367 371
368 372 data = {}
369 373 meta = {}
370 374 data['noise'] = 10*numpy.log10(dataOut.getNoise()/dataOut.normFactor).reshape(dataOut.nChannels, 1)
371 375 meta['yrange'] = numpy.array([])
372 376
373 377 return data, meta
374 378
375 379 def plot(self):
376 380
377 381 x = self.data.times
378 382 xmin = self.data.min_time
379 383 xmax = xmin + self.xrange * 60 * 60
380 384 Y = self.data['noise']
381 385
382 386 if self.axes[0].firsttime:
383 387 self.ymin = numpy.nanmin(Y) - 5
384 388 self.ymax = numpy.nanmax(Y) + 5
385 389 for ch in self.data.channels:
386 390 y = Y[ch]
387 391 self.axes[0].plot(x, y, lw=1, label='Ch{}'.format(ch))
388 392 plt.legend(bbox_to_anchor=(1.18, 1.0))
389 393 else:
390 394 for ch in self.data.channels:
391 395 y = Y[ch]
392 396 self.axes[0].lines[ch].set_data(x, y)
393 397
394 398
395 399 class PowerProfilePlot(Plot):
396 400
397 401 CODE = 'pow_profile'
398 402 plot_type = 'scatter'
399 403
400 404 def setup(self):
401 405
402 406 self.ncols = 1
403 407 self.nrows = 1
404 408 self.nplots = 1
405 409 self.height = 4
406 410 self.width = 3
407 411 self.ylabel = 'Range [km]'
408 412 self.xlabel = 'Intensity [dB]'
409 413 self.titles = ['Power Profile']
410 414 self.colorbar = False
411 415
412 416 def update(self, dataOut):
413 417
414 418 data = {}
415 419 meta = {}
416 420 data[self.CODE] = dataOut.getPower()
417 421
418 422 return data, meta
419 423
420 424 def plot(self):
421 425
422 426 y = self.data.yrange
423 427 self.y = y
424 428
425 429 x = self.data[-1][self.CODE]
426 430
427 431 if self.xmin is None: self.xmin = numpy.nanmin(x)*0.9
428 432 if self.xmax is None: self.xmax = numpy.nanmax(x)*1.1
429 433
430 434 if self.axes[0].firsttime:
431 435 for ch in self.data.channels:
432 436 self.axes[0].plot(x[ch], y, lw=1, label='Ch{}'.format(ch))
433 437 plt.legend()
434 438 else:
435 439 for ch in self.data.channels:
436 440 self.axes[0].lines[ch].set_data(x[ch], y)
437 441
438 442
439 443 class SpectraCutPlot(Plot):
440 444
441 445 CODE = 'spc_cut'
442 446 plot_type = 'scatter'
443 447 buffering = False
444 448
445 449 def setup(self):
446 450
447 451 self.nplots = len(self.data.channels)
448 452 self.ncols = int(numpy.sqrt(self.nplots) + 0.9)
449 453 self.nrows = int((1.0 * self.nplots / self.ncols) + 0.9)
450 454 self.width = 3.4 * self.ncols + 1.5
451 455 self.height = 3 * self.nrows
452 456 self.ylabel = 'Power [dB]'
453 457 self.colorbar = False
454 458 self.plots_adjust.update({'left':0.1, 'hspace':0.3, 'right': 0.75, 'bottom':0.08})
455 459
456 460 def update(self, dataOut):
457 461
458 462 data = {}
459 463 meta = {}
460 464 spc = 10*numpy.log10(dataOut.data_spc/dataOut.normFactor)
461 465 data['spc'] = spc
462 466 meta['xrange'] = (dataOut.getFreqRange(1)/1000., dataOut.getAcfRange(1), dataOut.getVelRange(1))
463 467
464 468 return data, meta
465 469
466 470 def plot(self):
467 471 if self.xaxis == "frequency":
468 472 x = self.data.xrange[0][1:]
469 473 self.xlabel = "Frequency (kHz)"
470 474 elif self.xaxis == "time":
471 475 x = self.data.xrange[1]
472 476 self.xlabel = "Time (ms)"
473 477 else:
474 478 x = self.data.xrange[2]
475 479 self.xlabel = "Velocity (m/s)"
476 480
477 481 self.titles = []
478 482
479 483 y = self.data.yrange
480 484 z = self.data[-1]['spc']
481 485
482 486 if self.height_index:
483 487 index = numpy.array(self.height_index)
484 488 else:
485 489 index = numpy.arange(0, len(y), int((len(y))/9))
486 490
487 491 for n, ax in enumerate(self.axes):
488 492 if ax.firsttime:
489 493 self.xmax = self.xmax if self.xmax else numpy.nanmax(x)
490 494 self.xmin = self.xmin if self.xmin else -self.xmax
491 495 self.ymin = self.ymin if self.ymin else numpy.nanmin(z)
492 496 self.ymax = self.ymax if self.ymax else numpy.nanmax(z)
493 497 ax.plt = ax.plot(x, z[n, :, index].T)
494 498 labels = ['Range = {:2.1f}km'.format(y[i]) for i in index]
495 499 self.figures[0].legend(ax.plt, labels, loc='center right')
496 500 else:
497 501 for i, line in enumerate(ax.plt):
498 502 line.set_data(x, z[n, :, index[i]])
499 503 self.titles.append('CH {}'.format(n))
500 504
501 505
502 506 class BeaconPhase(Plot):
503 507
504 508 __isConfig = None
505 509 __nsubplots = None
506 510
507 511 PREFIX = 'beacon_phase'
508 512
509 513 def __init__(self):
510 514 Plot.__init__(self)
511 515 self.timerange = 24*60*60
512 516 self.isConfig = False
513 517 self.__nsubplots = 1
514 518 self.counter_imagwr = 0
515 519 self.WIDTH = 800
516 520 self.HEIGHT = 400
517 521 self.WIDTHPROF = 120
518 522 self.HEIGHTPROF = 0
519 523 self.xdata = None
520 524 self.ydata = None
521 525
522 526 self.PLOT_CODE = BEACON_CODE
523 527
524 528 self.FTP_WEI = None
525 529 self.EXP_CODE = None
526 530 self.SUB_EXP_CODE = None
527 531 self.PLOT_POS = None
528 532
529 533 self.filename_phase = None
530 534
531 535 self.figfile = None
532 536
533 537 self.xmin = None
534 538 self.xmax = None
535 539
536 540 def getSubplots(self):
537 541
538 542 ncol = 1
539 543 nrow = 1
540 544
541 545 return nrow, ncol
542 546
543 547 def setup(self, id, nplots, wintitle, showprofile=True, show=True):
544 548
545 549 self.__showprofile = showprofile
546 550 self.nplots = nplots
547 551
548 552 ncolspan = 7
549 553 colspan = 6
550 554 self.__nsubplots = 2
551 555
552 556 self.createFigure(id = id,
553 557 wintitle = wintitle,
554 558 widthplot = self.WIDTH+self.WIDTHPROF,
555 559 heightplot = self.HEIGHT+self.HEIGHTPROF,
556 560 show=show)
557 561
558 562 nrow, ncol = self.getSubplots()
559 563
560 564 self.addAxes(nrow, ncol*ncolspan, 0, 0, colspan, 1)
561 565
562 566 def save_phase(self, filename_phase):
563 567 f = open(filename_phase,'w+')
564 568 f.write('\n\n')
565 569 f.write('JICAMARCA RADIO OBSERVATORY - Beacon Phase \n')
566 570 f.write('DD MM YYYY HH MM SS pair(2,0) pair(2,1) pair(2,3) pair(2,4)\n\n' )
567 571 f.close()
568 572
569 573 def save_data(self, filename_phase, data, data_datetime):
570 574 f=open(filename_phase,'a')
571 575 timetuple_data = data_datetime.timetuple()
572 576 day = str(timetuple_data.tm_mday)
573 577 month = str(timetuple_data.tm_mon)
574 578 year = str(timetuple_data.tm_year)
575 579 hour = str(timetuple_data.tm_hour)
576 580 minute = str(timetuple_data.tm_min)
577 581 second = str(timetuple_data.tm_sec)
578 582 f.write(day+' '+month+' '+year+' '+hour+' '+minute+' '+second+' '+str(data[0])+' '+str(data[1])+' '+str(data[2])+' '+str(data[3])+'\n')
579 583 f.close()
580 584
581 585 def plot(self):
582 586 log.warning('TODO: Not yet implemented...')
583 587
584 588 def run(self, dataOut, id, wintitle="", pairsList=None, showprofile='True',
585 589 xmin=None, xmax=None, ymin=None, ymax=None, hmin=None, hmax=None,
586 590 timerange=None,
587 591 save=False, figpath='./', figfile=None, show=True, ftp=False, wr_period=1,
588 592 server=None, folder=None, username=None, password=None,
589 593 ftp_wei=0, exp_code=0, sub_exp_code=0, plot_pos=0):
590 594
591 595 if dataOut.flagNoData:
592 596 return dataOut
593 597
594 598 if not isTimeInHourRange(dataOut.datatime, xmin, xmax):
595 599 return
596 600
597 601 if pairsList == None:
598 602 pairsIndexList = dataOut.pairsIndexList[:10]
599 603 else:
600 604 pairsIndexList = []
601 605 for pair in pairsList:
602 606 if pair not in dataOut.pairsList:
603 607 raise ValueError("Pair %s is not in dataOut.pairsList" %(pair))
604 608 pairsIndexList.append(dataOut.pairsList.index(pair))
605 609
606 610 if pairsIndexList == []:
607 611 return
608 612
609 613 # if len(pairsIndexList) > 4:
610 614 # pairsIndexList = pairsIndexList[0:4]
611 615
612 616 hmin_index = None
613 617 hmax_index = None
614 618
615 619 if hmin != None and hmax != None:
616 620 indexes = numpy.arange(dataOut.nHeights)
617 621 hmin_list = indexes[dataOut.heightList >= hmin]
618 622 hmax_list = indexes[dataOut.heightList <= hmax]
619 623
620 624 if hmin_list.any():
621 625 hmin_index = hmin_list[0]
622 626
623 627 if hmax_list.any():
624 628 hmax_index = hmax_list[-1]+1
625 629
626 630 x = dataOut.getTimeRange()
627 631
628 632 thisDatetime = dataOut.datatime
629 633
630 634 title = wintitle + " Signal Phase" # : %s" %(thisDatetime.strftime("%d-%b-%Y"))
631 635 xlabel = "Local Time"
632 636 ylabel = "Phase (degrees)"
633 637
634 638 update_figfile = False
635 639
636 640 nplots = len(pairsIndexList)
637 641 #phase = numpy.zeros((len(pairsIndexList),len(dataOut.beacon_heiIndexList)))
638 642 phase_beacon = numpy.zeros(len(pairsIndexList))
639 643 for i in range(nplots):
640 644 pair = dataOut.pairsList[pairsIndexList[i]]
641 645 ccf = numpy.average(dataOut.data_cspc[pairsIndexList[i], :, hmin_index:hmax_index], axis=0)
642 646 powa = numpy.average(dataOut.data_spc[pair[0], :, hmin_index:hmax_index], axis=0)
643 647 powb = numpy.average(dataOut.data_spc[pair[1], :, hmin_index:hmax_index], axis=0)
644 648 avgcoherenceComplex = ccf/numpy.sqrt(powa*powb)
645 649 phase = numpy.arctan2(avgcoherenceComplex.imag, avgcoherenceComplex.real)*180/numpy.pi
646 650
647 651 if dataOut.beacon_heiIndexList:
648 652 phase_beacon[i] = numpy.average(phase[dataOut.beacon_heiIndexList])
649 653 else:
650 654 phase_beacon[i] = numpy.average(phase)
651 655
652 656 if not self.isConfig:
653 657
654 658 nplots = len(pairsIndexList)
655 659
656 660 self.setup(id=id,
657 661 nplots=nplots,
658 662 wintitle=wintitle,
659 663 showprofile=showprofile,
660 664 show=show)
661 665
662 666 if timerange != None:
663 667 self.timerange = timerange
664 668
665 669 self.xmin, self.xmax = self.getTimeLim(x, xmin, xmax, timerange)
666 670
667 671 if ymin == None: ymin = 0
668 672 if ymax == None: ymax = 360
669 673
670 674 self.FTP_WEI = ftp_wei
671 675 self.EXP_CODE = exp_code
672 676 self.SUB_EXP_CODE = sub_exp_code
673 677 self.PLOT_POS = plot_pos
674 678
675 679 self.name = thisDatetime.strftime("%Y%m%d_%H%M%S")
676 680 self.isConfig = True
677 681 self.figfile = figfile
678 682 self.xdata = numpy.array([])
679 683 self.ydata = numpy.array([])
680 684
681 685 update_figfile = True
682 686
683 687 #open file beacon phase
684 688 path = '%s%03d' %(self.PREFIX, self.id)
685 689 beacon_file = os.path.join(path,'%s.txt'%self.name)
686 690 self.filename_phase = os.path.join(figpath,beacon_file)
687 691 #self.save_phase(self.filename_phase)
688 692
689 693
690 694 #store data beacon phase
691 695 #self.save_data(self.filename_phase, phase_beacon, thisDatetime)
692 696
693 697 self.setWinTitle(title)
694 698
695 699
696 700 title = "Phase Plot %s" %(thisDatetime.strftime("%Y/%m/%d %H:%M:%S"))
697 701
698 702 legendlabels = ["Pair (%d,%d)"%(pair[0], pair[1]) for pair in dataOut.pairsList]
699 703
700 704 axes = self.axesList[0]
701 705
702 706 self.xdata = numpy.hstack((self.xdata, x[0:1]))
703 707
704 708 if len(self.ydata)==0:
705 709 self.ydata = phase_beacon.reshape(-1,1)
706 710 else:
707 711 self.ydata = numpy.hstack((self.ydata, phase_beacon.reshape(-1,1)))
708 712
709 713
710 714 axes.pmultilineyaxis(x=self.xdata, y=self.ydata,
711 715 xmin=self.xmin, xmax=self.xmax, ymin=ymin, ymax=ymax,
712 716 xlabel=xlabel, ylabel=ylabel, title=title, legendlabels=legendlabels, marker='x', markersize=8, linestyle="solid",
713 717 XAxisAsTime=True, grid='both'
714 718 )
715 719
716 720 self.draw()
717 721
718 722 if dataOut.ltctime >= self.xmax:
719 723 self.counter_imagwr = wr_period
720 724 self.isConfig = False
721 725 update_figfile = True
722 726
723 727 self.save(figpath=figpath,
724 728 figfile=figfile,
725 729 save=save,
726 730 ftp=ftp,
727 731 wr_period=wr_period,
728 732 thisDatetime=thisDatetime,
729 733 update_figfile=update_figfile)
730 734
731 735 return dataOut
@@ -1,1577 +1,1577
1 1 """
2 2 Created on Jul 2, 2014
3 3
4 4 @author: roj-idl71
5 5 """
6 6 import os
7 7 import sys
8 8 import glob
9 9 import time
10 10 import numpy
11 11 import fnmatch
12 12 import inspect
13 13 import time
14 14 import datetime
15 15 import zmq
16 16
17 17 from schainpy.model.proc.jroproc_base import Operation, MPDecorator
18 18 from schainpy.model.data.jroheaderIO import PROCFLAG, BasicHeader, SystemHeader, RadarControllerHeader, ProcessingHeader
19 19 from schainpy.model.data.jroheaderIO import get_dtype_index, get_numpy_dtype, get_procflag_dtype, get_dtype_width
20 20 from schainpy.utils import log
21 21 import schainpy.admin
22 22
23 23 LOCALTIME = True
24 24 DT_DIRECTIVES = {
25 25 '%Y': 4,
26 26 '%y': 2,
27 27 '%m': 2,
28 28 '%d': 2,
29 29 '%j': 3,
30 30 '%H': 2,
31 31 '%M': 2,
32 32 '%S': 2,
33 33 '%f': 6
34 34 }
35 35
36 36
37 37 def isNumber(cad):
38 38 """
39 39 Chequea si el conjunto de caracteres que componen un string puede ser convertidos a un numero.
40 40
41 41 Excepciones:
42 42 Si un determinado string no puede ser convertido a numero
43 43 Input:
44 44 str, string al cual se le analiza para determinar si convertible a un numero o no
45 45
46 46 Return:
47 47 True : si el string es uno numerico
48 48 False : no es un string numerico
49 49 """
50 50 try:
51 51 float(cad)
52 52 return True
53 53 except:
54 54 return False
55 55
56 56
57 57 def isFileInEpoch(filename, startUTSeconds, endUTSeconds):
58 58 """
59 59 Esta funcion determina si un archivo de datos se encuentra o no dentro del rango de fecha especificado.
60 60
61 61 Inputs:
62 62 filename : nombre completo del archivo de datos en formato Jicamarca (.r)
63 63
64 64 startUTSeconds : fecha inicial del rango seleccionado. La fecha esta dada en
65 65 segundos contados desde 01/01/1970.
66 66 endUTSeconds : fecha final del rango seleccionado. La fecha esta dada en
67 67 segundos contados desde 01/01/1970.
68 68
69 69 Return:
70 70 Boolean : Retorna True si el archivo de datos contiene datos en el rango de
71 71 fecha especificado, de lo contrario retorna False.
72 72
73 73 Excepciones:
74 74 Si el archivo no existe o no puede ser abierto
75 75 Si la cabecera no puede ser leida.
76 76
77 77 """
78 78 basicHeaderObj = BasicHeader(LOCALTIME)
79 79
80 80 try:
81 81 fp = open(filename, 'rb')
82 82 except IOError:
83 83 print("The file %s can't be opened" % (filename))
84 84 return 0
85 85
86 86 sts = basicHeaderObj.read(fp)
87 87 fp.close()
88 88
89 89 if not(sts):
90 90 print("Skipping the file %s because it has not a valid header" % (filename))
91 91 return 0
92 92
93 93 if not ((startUTSeconds <= basicHeaderObj.utc) and (endUTSeconds > basicHeaderObj.utc)):
94 94 return 0
95 95
96 96 return 1
97 97
98 98
99 99 def isTimeInRange(thisTime, startTime, endTime):
100 100 if endTime >= startTime:
101 101 if (thisTime < startTime) or (thisTime > endTime):
102 102 return 0
103 103 return 1
104 104 else:
105 105 if (thisTime < startTime) and (thisTime > endTime):
106 106 return 0
107 107 return 1
108 108
109 109
110 110 def isFileInTimeRange(filename, startDate, endDate, startTime, endTime):
111 111 """
112 112 Retorna 1 si el archivo de datos se encuentra dentro del rango de horas especificado.
113 113
114 114 Inputs:
115 115 filename : nombre completo del archivo de datos en formato Jicamarca (.r)
116 116
117 117 startDate : fecha inicial del rango seleccionado en formato datetime.date
118 118
119 119 endDate : fecha final del rango seleccionado en formato datetime.date
120 120
121 121 startTime : tiempo inicial del rango seleccionado en formato datetime.time
122 122
123 123 endTime : tiempo final del rango seleccionado en formato datetime.time
124 124
125 125 Return:
126 126 Boolean : Retorna True si el archivo de datos contiene datos en el rango de
127 127 fecha especificado, de lo contrario retorna False.
128 128
129 129 Excepciones:
130 130 Si el archivo no existe o no puede ser abierto
131 131 Si la cabecera no puede ser leida.
132 132
133 133 """
134 134
135 135 try:
136 136 fp = open(filename, 'rb')
137 137 except IOError:
138 138 print("The file %s can't be opened" % (filename))
139 139 return None
140 140
141 141 firstBasicHeaderObj = BasicHeader(LOCALTIME)
142 142 systemHeaderObj = SystemHeader()
143 143 radarControllerHeaderObj = RadarControllerHeader()
144 144 processingHeaderObj = ProcessingHeader()
145 145
146 146 lastBasicHeaderObj = BasicHeader(LOCALTIME)
147 147
148 148 sts = firstBasicHeaderObj.read(fp)
149 149
150 150 if not(sts):
151 151 print("[Reading] Skipping the file %s because it has not a valid header" % (filename))
152 152 return None
153 153
154 154 if not systemHeaderObj.read(fp):
155 155 return None
156 156
157 157 if not radarControllerHeaderObj.read(fp):
158 158 return None
159 159
160 160 if not processingHeaderObj.read(fp):
161 161 return None
162 162
163 163 filesize = os.path.getsize(filename)
164 164
165 165 offset = processingHeaderObj.blockSize + 24 # header size
166 166
167 167 if filesize <= offset:
168 168 print("[Reading] %s: This file has not enough data" % filename)
169 169 return None
170 170
171 171 fp.seek(-offset, 2)
172 172
173 173 sts = lastBasicHeaderObj.read(fp)
174 174
175 175 fp.close()
176 176
177 177 thisDatetime = lastBasicHeaderObj.datatime
178 178 thisTime_last_block = thisDatetime.time()
179 179
180 180 thisDatetime = firstBasicHeaderObj.datatime
181 181 thisDate = thisDatetime.date()
182 182 thisTime_first_block = thisDatetime.time()
183 183
184 184 # General case
185 185 # o>>>>>>>>>>>>>><<<<<<<<<<<<<<o
186 186 #-----------o----------------------------o-----------
187 187 # startTime endTime
188 188
189 189 if endTime >= startTime:
190 190 if (thisTime_last_block < startTime) or (thisTime_first_block > endTime):
191 191 return None
192 192
193 193 return thisDatetime
194 194
195 195 # If endTime < startTime then endTime belongs to the next day
196 196
197 197 #<<<<<<<<<<<o o>>>>>>>>>>>
198 198 #-----------o----------------------------o-----------
199 199 # endTime startTime
200 200
201 201 if (thisDate == startDate) and (thisTime_last_block < startTime):
202 202 return None
203 203
204 204 if (thisDate == endDate) and (thisTime_first_block > endTime):
205 205 return None
206 206
207 207 if (thisTime_last_block < startTime) and (thisTime_first_block > endTime):
208 208 return None
209 209
210 210 return thisDatetime
211 211
212 212
213 213 def isFolderInDateRange(folder, startDate=None, endDate=None):
214 214 """
215 215 Retorna 1 si el archivo de datos se encuentra dentro del rango de horas especificado.
216 216
217 217 Inputs:
218 218 folder : nombre completo del directorio.
219 219 Su formato deberia ser "/path_root/?YYYYDDD"
220 220
221 221 siendo:
222 222 YYYY : Anio (ejemplo 2015)
223 223 DDD : Dia del anio (ejemplo 305)
224 224
225 225 startDate : fecha inicial del rango seleccionado en formato datetime.date
226 226
227 227 endDate : fecha final del rango seleccionado en formato datetime.date
228 228
229 229 Return:
230 230 Boolean : Retorna True si el archivo de datos contiene datos en el rango de
231 231 fecha especificado, de lo contrario retorna False.
232 232 Excepciones:
233 233 Si el directorio no tiene el formato adecuado
234 234 """
235 235
236 236 basename = os.path.basename(folder)
237 237
238 238 if not isRadarFolder(basename):
239 239 print("The folder %s has not the rigth format" % folder)
240 240 return 0
241 241
242 242 if startDate and endDate:
243 243 thisDate = getDateFromRadarFolder(basename)
244 244
245 245 if thisDate < startDate:
246 246 return 0
247 247
248 248 if thisDate > endDate:
249 249 return 0
250 250
251 251 return 1
252 252
253 253
254 254 def isFileInDateRange(filename, startDate=None, endDate=None):
255 255 """
256 256 Retorna 1 si el archivo de datos se encuentra dentro del rango de horas especificado.
257 257
258 258 Inputs:
259 259 filename : nombre completo del archivo de datos en formato Jicamarca (.r)
260 260
261 261 Su formato deberia ser "?YYYYDDDsss"
262 262
263 263 siendo:
264 264 YYYY : Anio (ejemplo 2015)
265 265 DDD : Dia del anio (ejemplo 305)
266 266 sss : set
267 267
268 268 startDate : fecha inicial del rango seleccionado en formato datetime.date
269 269
270 270 endDate : fecha final del rango seleccionado en formato datetime.date
271 271
272 272 Return:
273 273 Boolean : Retorna True si el archivo de datos contiene datos en el rango de
274 274 fecha especificado, de lo contrario retorna False.
275 275 Excepciones:
276 276 Si el archivo no tiene el formato adecuado
277 277 """
278 278
279 279 basename = os.path.basename(filename)
280 280
281 281 if not isRadarFile(basename):
282 282 print("The filename %s has not the rigth format" % filename)
283 283 return 0
284 284
285 285 if startDate and endDate:
286 286 thisDate = getDateFromRadarFile(basename)
287 287
288 288 if thisDate < startDate:
289 289 return 0
290 290
291 291 if thisDate > endDate:
292 292 return 0
293 293
294 294 return 1
295 295
296 296
297 297 def getFileFromSet(path, ext, set):
298 298 validFilelist = []
299 299 fileList = os.listdir(path)
300 300
301 301 # 0 1234 567 89A BCDE
302 302 # H YYYY DDD SSS .ext
303 303
304 304 for thisFile in fileList:
305 305 try:
306 306 year = int(thisFile[1:5])
307 307 doy = int(thisFile[5:8])
308 308 except:
309 309 continue
310 310
311 311 if (os.path.splitext(thisFile)[-1].lower() != ext.lower()):
312 312 continue
313 313
314 314 validFilelist.append(thisFile)
315 315
316 316 myfile = fnmatch.filter(
317 317 validFilelist, '*%4.4d%3.3d%3.3d*' % (year, doy, set))
318 318
319 319 if len(myfile) != 0:
320 320 return myfile[0]
321 321 else:
322 322 filename = '*%4.4d%3.3d%3.3d%s' % (year, doy, set, ext.lower())
323 323 print('the filename %s does not exist' % filename)
324 324 print('...going to the last file: ')
325 325
326 326 if validFilelist:
327 327 validFilelist = sorted(validFilelist, key=str.lower)
328 328 return validFilelist[-1]
329 329
330 330 return None
331 331
332 332
333 333 def getlastFileFromPath(path, ext):
334 334 """
335 335 Depura el fileList dejando solo los que cumplan el formato de "PYYYYDDDSSS.ext"
336 336 al final de la depuracion devuelve el ultimo file de la lista que quedo.
337 337
338 338 Input:
339 339 fileList : lista conteniendo todos los files (sin path) que componen una determinada carpeta
340 340 ext : extension de los files contenidos en una carpeta
341 341
342 342 Return:
343 343 El ultimo file de una determinada carpeta, no se considera el path.
344 344 """
345 345 validFilelist = []
346 346 fileList = os.listdir(path)
347 347
348 348 # 0 1234 567 89A BCDE
349 349 # H YYYY DDD SSS .ext
350 350
351 351 for thisFile in fileList:
352 352
353 353 year = thisFile[1:5]
354 354 if not isNumber(year):
355 355 continue
356 356
357 357 doy = thisFile[5:8]
358 358 if not isNumber(doy):
359 359 continue
360 360
361 361 year = int(year)
362 362 doy = int(doy)
363 363
364 364 if (os.path.splitext(thisFile)[-1].lower() != ext.lower()):
365 365 continue
366 366
367 367 validFilelist.append(thisFile)
368 368
369 369 if validFilelist:
370 370 validFilelist = sorted(validFilelist, key=str.lower)
371 371 return validFilelist[-1]
372 372
373 373 return None
374 374
375 375
376 376 def isRadarFolder(folder):
377 377 try:
378 378 year = int(folder[1:5])
379 379 doy = int(folder[5:8])
380 380 except:
381 381 return 0
382 382
383 383 return 1
384 384
385 385
386 386 def isRadarFile(file):
387 387 try:
388 388 year = int(file[1:5])
389 389 doy = int(file[5:8])
390 390 set = int(file[8:11])
391 391 except:
392 392 return 0
393 393
394 394 return 1
395 395
396 396
397 397 def getDateFromRadarFile(file):
398 398 try:
399 399 year = int(file[1:5])
400 400 doy = int(file[5:8])
401 401 set = int(file[8:11])
402 402 except:
403 403 return None
404 404
405 405 thisDate = datetime.date(year, 1, 1) + datetime.timedelta(doy - 1)
406 406 return thisDate
407 407
408 408
409 409 def getDateFromRadarFolder(folder):
410 410 try:
411 411 year = int(folder[1:5])
412 412 doy = int(folder[5:8])
413 413 except:
414 414 return None
415 415
416 416 thisDate = datetime.date(year, 1, 1) + datetime.timedelta(doy - 1)
417 417 return thisDate
418 418
419 419 def parse_format(s, fmt):
420 420
421 421 for i in range(fmt.count('%')):
422 422 x = fmt.index('%')
423 423 d = DT_DIRECTIVES[fmt[x:x+2]]
424 424 fmt = fmt.replace(fmt[x:x+2], s[x:x+d])
425 425 return fmt
426 426
427 427 class Reader(object):
428 428
429 429 c = 3E8
430 430 isConfig = False
431 431 dtype = None
432 432 pathList = []
433 433 filenameList = []
434 434 datetimeList = []
435 435 filename = None
436 436 ext = None
437 437 flagIsNewFile = 1
438 438 flagDiscontinuousBlock = 0
439 439 flagIsNewBlock = 0
440 440 flagNoMoreFiles = 0
441 441 fp = None
442 442 firstHeaderSize = 0
443 443 basicHeaderSize = 24
444 444 versionFile = 1103
445 445 fileSize = None
446 446 fileSizeByHeader = None
447 447 fileIndex = -1
448 448 profileIndex = None
449 449 blockIndex = 0
450 450 nTotalBlocks = 0
451 451 maxTimeStep = 30
452 452 lastUTTime = None
453 453 datablock = None
454 454 dataOut = None
455 455 getByBlock = False
456 456 path = None
457 457 startDate = None
458 458 endDate = None
459 459 startTime = datetime.time(0, 0, 0)
460 460 endTime = datetime.time(23, 59, 59)
461 461 set = None
462 462 expLabel = ""
463 463 online = False
464 464 delay = 60
465 465 nTries = 3 # quantity tries
466 466 nFiles = 3 # number of files for searching
467 467 walk = True
468 468 getblock = False
469 469 nTxs = 1
470 470 realtime = False
471 471 blocksize = 0
472 472 blocktime = None
473 473 warnings = True
474 474 verbose = True
475 475 server = None
476 476 format = None
477 477 oneDDict = None
478 478 twoDDict = None
479 479 independentParam = None
480 480 filefmt = None
481 481 folderfmt = None
482 482 open_file = open
483 483 open_mode = 'rb'
484 484
485 485 def run(self):
486 486
487 487 raise NotImplementedError
488 488
489 489 def getAllowedArgs(self):
490 490 if hasattr(self, '__attrs__'):
491 491 return self.__attrs__
492 492 else:
493 493 return inspect.getargspec(self.run).args
494 494
495 495 def set_kwargs(self, **kwargs):
496 496
497 497 for key, value in kwargs.items():
498 498 setattr(self, key, value)
499 499
500 500 def find_folders(self, path, startDate, endDate, folderfmt, last=False):
501 501
502 502 folders = [x for f in path.split(',')
503 503 for x in os.listdir(f) if os.path.isdir(os.path.join(f, x))]
504 504 folders.sort()
505 505
506 506 if last:
507 507 folders = [folders[-1]]
508 508
509 509 for folder in folders:
510 510 try:
511 511 dt = datetime.datetime.strptime(parse_format(folder, folderfmt), folderfmt).date()
512 512 if dt >= startDate and dt <= endDate:
513 513 yield os.path.join(path, folder)
514 514 else:
515 515 log.log('Skiping folder {}'.format(folder), self.name)
516 516 except Exception as e:
517 517 log.log('Skiping folder {}'.format(folder), self.name)
518 518 continue
519 519 return
520 520
521 521 def find_files(self, folders, ext, filefmt, startDate=None, endDate=None,
522 522 expLabel='', last=False):
523 523
524 524 for path in folders:
525 525 files = glob.glob1(path, '*{}'.format(ext))
526 526 files.sort()
527 527 if last:
528 528 if files:
529 529 fo = files[-1]
530 530 try:
531 531 dt = datetime.datetime.strptime(parse_format(fo, filefmt), filefmt).date()
532 532 yield os.path.join(path, expLabel, fo)
533 533 except Exception as e:
534 534 pass
535 535 return
536 536 else:
537 537 return
538 538
539 539 for fo in files:
540 540 try:
541 541 dt = datetime.datetime.strptime(parse_format(fo, filefmt), filefmt).date()
542 542 if dt >= startDate and dt <= endDate:
543 543 yield os.path.join(path, expLabel, fo)
544 544 else:
545 545 log.log('Skiping file {}'.format(fo), self.name)
546 546 except Exception as e:
547 547 log.log('Skiping file {}'.format(fo), self.name)
548 548 continue
549 549
550 550 def searchFilesOffLine(self, path, startDate, endDate,
551 551 expLabel, ext, walk,
552 552 filefmt, folderfmt):
553 553 """Search files in offline mode for the given arguments
554 554
555 555 Return:
556 556 Generator of files
557 557 """
558 558
559 559 if walk:
560 560 folders = self.find_folders(
561 561 path, startDate, endDate, folderfmt)
562 562 else:
563 563 folders = path.split(',')
564 564
565 565 return self.find_files(
566 566 folders, ext, filefmt, startDate, endDate, expLabel)
567 567
568 568 def searchFilesOnLine(self, path, startDate, endDate,
569 569 expLabel, ext, walk,
570 570 filefmt, folderfmt):
571 571 """Search for the last file of the last folder
572 572
573 573 Arguments:
574 574 path : carpeta donde estan contenidos los files que contiene data
575 575 expLabel : Nombre del subexperimento (subfolder)
576 576 ext : extension de los files
577 577 walk : Si es habilitado no realiza busquedas dentro de los ubdirectorios (doypath)
578 578
579 579 Return:
580 580 generator with the full path of last filename
581 581 """
582 582
583 583 if walk:
584 584 folders = self.find_folders(
585 585 path, startDate, endDate, folderfmt, last=True)
586 586 else:
587 587 folders = path.split(',')
588 588
589 589 return self.find_files(
590 590 folders, ext, filefmt, startDate, endDate, expLabel, last=True)
591 591
592 592 def setNextFile(self):
593 593 """Set the next file to be readed open it and parse de file header"""
594 594
595 595 while True:
596 596 if self.fp != None:
597 597 self.fp.close()
598 598
599 599 if self.online:
600 600 newFile = self.setNextFileOnline()
601 601 else:
602 602 newFile = self.setNextFileOffline()
603 603
604 604 if not(newFile):
605 605 if self.online:
606 606 raise schainpy.admin.SchainError('Time to wait for new files reach')
607 607 else:
608 608 if self.fileIndex == -1:
609 609 raise schainpy.admin.SchainWarning('No files found in the given path')
610 610 else:
611 611 raise schainpy.admin.SchainWarning('No more files to read')
612 612
613 613 if self.verifyFile(self.filename):
614 614 break
615 615
616 616 log.log('Opening file: %s' % self.filename, self.name)
617 617
618 618 self.readFirstHeader()
619 619 self.nReadBlocks = 0
620 620
621 621 def setNextFileOnline(self):
622 622 """Check for the next file to be readed in online mode.
623 623
624 624 Set:
625 625 self.filename
626 626 self.fp
627 627 self.filesize
628 628
629 629 Return:
630 630 boolean
631 631
632 632 """
633 633 nextFile = True
634 634 nextDay = False
635 635
636 636 for nFiles in range(self.nFiles+1):
637 637 for nTries in range(self.nTries):
638 638 fullfilename, filename = self.checkForRealPath(nextFile, nextDay)
639 639 if fullfilename is not None:
640 640 break
641 641 log.warning(
642 642 "Waiting %0.2f sec for the next file: \"%s\" , try %02d ..." % (self.delay, filename, nTries + 1),
643 643 self.name)
644 644 time.sleep(self.delay)
645 645 nextFile = False
646 646 continue
647 647
648 648 if fullfilename is not None:
649 649 break
650 650
651 651 self.nTries = 1
652 652 nextFile = True
653 653
654 654 if nFiles == (self.nFiles - 1):
655 655 log.log('Trying with next day...', self.name)
656 656 nextDay = True
657 657 self.nTries = 3
658 658
659 659 if fullfilename:
660 660 self.fileSize = os.path.getsize(fullfilename)
661 661 self.filename = fullfilename
662 662 self.flagIsNewFile = 1
663 663 if self.fp != None:
664 664 self.fp.close()
665 665 self.fp = self.open_file(fullfilename, self.open_mode)
666 666 self.flagNoMoreFiles = 0
667 667 self.fileIndex += 1
668 668 return 1
669 669 else:
670 670 return 0
671 671
672 672 def setNextFileOffline(self):
673 673 """Open the next file to be readed in offline mode"""
674 674
675 675 try:
676 676 filename = next(self.filenameList)
677 677 self.fileIndex +=1
678 678 except StopIteration:
679 679 self.flagNoMoreFiles = 1
680 680 return 0
681 681
682 682 self.filename = filename
683 683 self.fileSize = os.path.getsize(filename)
684 684 self.fp = self.open_file(filename, self.open_mode)
685 685 self.flagIsNewFile = 1
686 686
687 687 return 1
688 688
689 689 @staticmethod
690 690 def isDateTimeInRange(dt, startDate, endDate, startTime, endTime):
691 691 """Check if the given datetime is in range"""
692 692 startDateTime= datetime.datetime.combine(startDate,startTime)
693 693 endDateTime = datetime.datetime.combine(endDate,endTime)
694 #print("dt eval: ", dt, startDateTime,endDateTime)
694
695 695 if startDateTime <= dt <= endDateTime:
696 696 return True
697 697 return False
698 698
699 699 def verifyFile(self, filename):
700 700 """Check for a valid file
701 701
702 702 Arguments:
703 703 filename -- full path filename
704 704
705 705 Return:
706 706 boolean
707 707 """
708 708
709 709 return True
710 710
711 711 def checkForRealPath(self, nextFile, nextDay):
712 712 """Check if the next file to be readed exists"""
713 713
714 714 raise NotImplementedError
715 715
716 716 def readFirstHeader(self):
717 717 """Parse the file header"""
718 718
719 719 pass
720 720
721 721 def waitDataBlock(self, pointer_location, blocksize=None):
722 722 """
723 723 """
724 724
725 725 currentPointer = pointer_location
726 726 if blocksize is None:
727 727 neededSize = self.processingHeaderObj.blockSize # + self.basicHeaderSize
728 728 else:
729 729 neededSize = blocksize
730 730
731 731 for nTries in range(self.nTries):
732 732 self.fp.close()
733 733 self.fp = open(self.filename, 'rb')
734 734 self.fp.seek(currentPointer)
735 735
736 736 self.fileSize = os.path.getsize(self.filename)
737 737 currentSize = self.fileSize - currentPointer
738 738
739 739 if (currentSize >= neededSize):
740 740 return 1
741 741
742 742 log.warning(
743 743 "Waiting %0.2f seconds for the next block, try %03d ..." % (self.delay, nTries + 1),
744 744 self.name
745 745 )
746 746 time.sleep(self.delay)
747 747
748 748 return 0
749 749
750 750 class JRODataReader(Reader):
751 751
752 752 utc = 0
753 753 nReadBlocks = 0
754 754 foldercounter = 0
755 755 firstHeaderSize = 0
756 756 basicHeaderSize = 24
757 757 __isFirstTimeOnline = 1
758 758 filefmt = "*%Y%j***"
759 759 folderfmt = "*%Y%j"
760 760 __attrs__ = ['path', 'startDate', 'endDate', 'startTime', 'endTime', 'online', 'delay', 'walk']
761 761
762 762 def getDtypeWidth(self):
763 763
764 764 dtype_index = get_dtype_index(self.dtype)
765 765 dtype_width = get_dtype_width(dtype_index)
766 766
767 767 return dtype_width
768 768
769 769 def checkForRealPath(self, nextFile, nextDay):
770 770 """Check if the next file to be readed exists.
771 771
772 772 Example :
773 773 nombre correcto del file es .../.../D2009307/P2009307367.ext
774 774
775 775 Entonces la funcion prueba con las siguientes combinaciones
776 776 .../.../y2009307367.ext
777 777 .../.../Y2009307367.ext
778 778 .../.../x2009307/y2009307367.ext
779 779 .../.../x2009307/Y2009307367.ext
780 780 .../.../X2009307/y2009307367.ext
781 781 .../.../X2009307/Y2009307367.ext
782 782 siendo para este caso, la ultima combinacion de letras, identica al file buscado
783 783
784 784 Return:
785 785 str -- fullpath of the file
786 786 """
787 787
788 788
789 789 if nextFile:
790 790 self.set += 1
791 791 if nextDay:
792 792 self.set = 0
793 793 self.doy += 1
794 794 foldercounter = 0
795 795 prefixDirList = [None, 'd', 'D']
796 796 if self.ext.lower() == ".r": # voltage
797 797 prefixFileList = ['d', 'D']
798 798 elif self.ext.lower() == ".pdata": # spectra
799 799 prefixFileList = ['p', 'P']
800 800
801 801 # barrido por las combinaciones posibles
802 802 for prefixDir in prefixDirList:
803 803 thispath = self.path
804 804 if prefixDir != None:
805 805 # formo el nombre del directorio xYYYYDDD (x=d o x=D)
806 806 if foldercounter == 0:
807 807 thispath = os.path.join(self.path, "%s%04d%03d" %
808 808 (prefixDir, self.year, self.doy))
809 809 else:
810 810 thispath = os.path.join(self.path, "%s%04d%03d_%02d" % (
811 811 prefixDir, self.year, self.doy, foldercounter))
812 812 for prefixFile in prefixFileList: # barrido por las dos combinaciones posibles de "D"
813 813 # formo el nombre del file xYYYYDDDSSS.ext
814 814 filename = "%s%04d%03d%03d%s" % (prefixFile, self.year, self.doy, self.set, self.ext)
815 815 fullfilename = os.path.join(
816 816 thispath, filename)
817 817
818 818 if os.path.exists(fullfilename):
819 819 return fullfilename, filename
820 820
821 821 return None, filename
822 822
823 823 def __waitNewBlock(self):
824 824 """
825 825 Return 1 si se encontro un nuevo bloque de datos, 0 de otra forma.
826 826
827 827 Si el modo de lectura es OffLine siempre retorn 0
828 828 """
829 829 if not self.online:
830 830 return 0
831 831
832 832 if (self.nReadBlocks >= self.processingHeaderObj.dataBlocksPerFile):
833 833 return 0
834 834
835 835 currentPointer = self.fp.tell()
836 836
837 837 neededSize = self.processingHeaderObj.blockSize + self.basicHeaderSize
838 838
839 839 for nTries in range(self.nTries):
840 840
841 841 self.fp.close()
842 842 self.fp = open(self.filename, 'rb')
843 843 self.fp.seek(currentPointer)
844 844
845 845 self.fileSize = os.path.getsize(self.filename)
846 846 currentSize = self.fileSize - currentPointer
847 847
848 848 if (currentSize >= neededSize):
849 849 self.basicHeaderObj.read(self.fp)
850 850 return 1
851 851
852 852 if self.fileSize == self.fileSizeByHeader:
853 853 # self.flagEoF = True
854 854 return 0
855 855
856 856 print("[Reading] Waiting %0.2f seconds for the next block, try %03d ..." % (self.delay, nTries + 1))
857 857 time.sleep(self.delay)
858 858
859 859 return 0
860 860
861 861 def __setNewBlock(self):
862 862
863 863 if self.fp == None:
864 864 return 0
865 865
866 866 if self.flagIsNewFile:
867 867 self.lastUTTime = self.basicHeaderObj.utc
868 868 return 1
869 869
870 870 if self.realtime:
871 871 self.flagDiscontinuousBlock = 1
872 872 if not(self.setNextFile()):
873 873 return 0
874 874 else:
875 875 return 1
876 876
877 877 currentSize = self.fileSize - self.fp.tell()
878 878 neededSize = self.processingHeaderObj.blockSize + self.basicHeaderSize
879 879
880 880 if (currentSize >= neededSize):
881 881 self.basicHeaderObj.read(self.fp)
882 882 self.lastUTTime = self.basicHeaderObj.utc
883 883 return 1
884 884
885 885 if self.__waitNewBlock():
886 886 self.lastUTTime = self.basicHeaderObj.utc
887 887 return 1
888 888
889 889 if not(self.setNextFile()):
890 890 return 0
891 891
892 892 deltaTime = self.basicHeaderObj.utc - self.lastUTTime
893 893 self.lastUTTime = self.basicHeaderObj.utc
894 894
895 895 self.flagDiscontinuousBlock = 0
896 896
897 897 if deltaTime > self.maxTimeStep:
898 898 self.flagDiscontinuousBlock = 1
899 899
900 900 return 1
901 901
902 902 def readNextBlock(self):
903 903
904 904 while True:
905 905 if not(self.__setNewBlock()):
906 906 continue
907 907
908 908 if not(self.readBlock()):
909 909 return 0
910 910
911 911 self.getBasicHeader()
912 912
913 913 if not self.isDateTimeInRange(self.dataOut.datatime, self.startDate, self.endDate, self.startTime, self.endTime):
914 914 print("[Reading] Block No. %d/%d -> %s [Skipping]" % (self.nReadBlocks,
915 915 self.processingHeaderObj.dataBlocksPerFile,
916 916 self.dataOut.datatime.ctime()))
917 917 continue
918 918
919 919 break
920 920
921 921 if self.verbose:
922 922 print("[Reading] Block No. %d/%d -> %s" % (self.nReadBlocks,
923 923 self.processingHeaderObj.dataBlocksPerFile,
924 924 self.dataOut.datatime.ctime()))
925 925 return 1
926 926
927 927 def readFirstHeader(self):
928 928
929 929 self.basicHeaderObj.read(self.fp)
930 930 self.systemHeaderObj.read(self.fp)
931 931 self.radarControllerHeaderObj.read(self.fp)
932 932 self.processingHeaderObj.read(self.fp)
933 933 self.firstHeaderSize = self.basicHeaderObj.size
934 934
935 935 datatype = int(numpy.log2((self.processingHeaderObj.processFlags &
936 936 PROCFLAG.DATATYPE_MASK)) - numpy.log2(PROCFLAG.DATATYPE_CHAR))
937 937 if datatype == 0:
938 938 datatype_str = numpy.dtype([('real', '<i1'), ('imag', '<i1')])
939 939 elif datatype == 1:
940 940 datatype_str = numpy.dtype([('real', '<i2'), ('imag', '<i2')])
941 941 elif datatype == 2:
942 942 datatype_str = numpy.dtype([('real', '<i4'), ('imag', '<i4')])
943 943 elif datatype == 3:
944 944 datatype_str = numpy.dtype([('real', '<i8'), ('imag', '<i8')])
945 945 elif datatype == 4:
946 946 datatype_str = numpy.dtype([('real', '<f4'), ('imag', '<f4')])
947 947 elif datatype == 5:
948 948 datatype_str = numpy.dtype([('real', '<f8'), ('imag', '<f8')])
949 949 else:
950 950 raise ValueError('Data type was not defined')
951 951
952 952 self.dtype = datatype_str
953 953 #self.ippSeconds = 2 * 1000 * self.radarControllerHeaderObj.ipp / self.c
954 954 self.fileSizeByHeader = self.processingHeaderObj.dataBlocksPerFile * self.processingHeaderObj.blockSize + \
955 955 self.firstHeaderSize + self.basicHeaderSize * \
956 956 (self.processingHeaderObj.dataBlocksPerFile - 1)
957 957 # self.dataOut.channelList = numpy.arange(self.systemHeaderObj.numChannels)
958 958 # self.dataOut.channelIndexList = numpy.arange(self.systemHeaderObj.numChannels)
959 959 self.getBlockDimension()
960 960
961 961 def verifyFile(self, filename):
962 962
963 963 flag = True
964 964
965 965 try:
966 966 fp = open(filename, 'rb')
967 967 except IOError:
968 968 log.error("File {} can't be opened".format(filename), self.name)
969 969 return False
970 970
971 971 if self.online and self.waitDataBlock(0):
972 972 pass
973 973
974 974 basicHeaderObj = BasicHeader(LOCALTIME)
975 975 systemHeaderObj = SystemHeader()
976 976 radarControllerHeaderObj = RadarControllerHeader()
977 977 processingHeaderObj = ProcessingHeader()
978 978
979 979 if not(basicHeaderObj.read(fp)):
980 980 flag = False
981 981 if not(systemHeaderObj.read(fp)):
982 982 flag = False
983 983 if not(radarControllerHeaderObj.read(fp)):
984 984 flag = False
985 985 if not(processingHeaderObj.read(fp)):
986 986 flag = False
987 987 if not self.online:
988 988 dt1 = basicHeaderObj.datatime
989 989 pos = self.fileSize-processingHeaderObj.blockSize-24
990 990 if pos<0:
991 991 flag = False
992 992 log.error('Invalid size for file: {}'.format(self.filename), self.name)
993 993 else:
994 994 fp.seek(pos)
995 995 if not(basicHeaderObj.read(fp)):
996 996 flag = False
997 997 dt2 = basicHeaderObj.datatime
998 998 if not self.isDateTimeInRange(dt1, self.startDate, self.endDate, self.startTime, self.endTime) and not \
999 999 self.isDateTimeInRange(dt2, self.startDate, self.endDate, self.startTime, self.endTime):
1000 1000 flag = False
1001 1001
1002 1002 fp.close()
1003 1003 return flag
1004 1004
1005 1005 def findDatafiles(self, path, startDate=None, endDate=None, expLabel='', ext='.r', walk=True, include_path=False):
1006 1006
1007 1007 path_empty = True
1008 1008
1009 1009 dateList = []
1010 1010 pathList = []
1011 1011
1012 1012 multi_path = path.split(',')
1013 1013
1014 1014 if not walk:
1015 1015
1016 1016 for single_path in multi_path:
1017 1017
1018 1018 if not os.path.isdir(single_path):
1019 1019 continue
1020 1020
1021 1021 fileList = glob.glob1(single_path, "*" + ext)
1022 1022
1023 1023 if not fileList:
1024 1024 continue
1025 1025
1026 1026 path_empty = False
1027 1027
1028 1028 fileList.sort()
1029 1029
1030 1030 for thisFile in fileList:
1031 1031
1032 1032 if not os.path.isfile(os.path.join(single_path, thisFile)):
1033 1033 continue
1034 1034
1035 1035 if not isRadarFile(thisFile):
1036 1036 continue
1037 1037
1038 1038 if not isFileInDateRange(thisFile, startDate, endDate):
1039 1039 continue
1040 1040
1041 1041 thisDate = getDateFromRadarFile(thisFile)
1042 1042
1043 1043 if thisDate in dateList or single_path in pathList:
1044 1044 continue
1045 1045
1046 1046 dateList.append(thisDate)
1047 1047 pathList.append(single_path)
1048 1048
1049 1049 else:
1050 1050 for single_path in multi_path:
1051 1051
1052 1052 if not os.path.isdir(single_path):
1053 1053 continue
1054 1054
1055 1055 dirList = []
1056 1056
1057 1057 for thisPath in os.listdir(single_path):
1058 1058
1059 1059 if not os.path.isdir(os.path.join(single_path, thisPath)):
1060 1060 continue
1061 1061
1062 1062 if not isRadarFolder(thisPath):
1063 1063 continue
1064 1064
1065 1065 if not isFolderInDateRange(thisPath, startDate, endDate):
1066 1066 continue
1067 1067
1068 1068 dirList.append(thisPath)
1069 1069
1070 1070 if not dirList:
1071 1071 continue
1072 1072
1073 1073 dirList.sort()
1074 1074
1075 1075 for thisDir in dirList:
1076 1076
1077 1077 datapath = os.path.join(single_path, thisDir, expLabel)
1078 1078 fileList = glob.glob1(datapath, "*" + ext)
1079 1079
1080 1080 if not fileList:
1081 1081 continue
1082 1082
1083 1083 path_empty = False
1084 1084
1085 1085 thisDate = getDateFromRadarFolder(thisDir)
1086 1086
1087 1087 pathList.append(datapath)
1088 1088 dateList.append(thisDate)
1089 1089
1090 1090 dateList.sort()
1091 1091
1092 1092 if walk:
1093 1093 pattern_path = os.path.join(multi_path[0], "[dYYYYDDD]", expLabel)
1094 1094 else:
1095 1095 pattern_path = multi_path[0]
1096 1096
1097 1097 if path_empty:
1098 1098 raise schainpy.admin.SchainError("[Reading] No *%s files in %s for %s to %s" % (ext, pattern_path, startDate, endDate))
1099 1099 else:
1100 1100 if not dateList:
1101 1101 raise schainpy.admin.SchainError("[Reading] Date range selected invalid [%s - %s]: No *%s files in %s)" % (startDate, endDate, ext, path))
1102 1102
1103 1103 if include_path:
1104 1104 return dateList, pathList
1105 1105
1106 1106 return dateList
1107 1107
1108 1108 def setup(self, **kwargs):
1109 1109
1110 1110 self.set_kwargs(**kwargs)
1111 1111 if not self.ext.startswith('.'):
1112 1112 self.ext = '.{}'.format(self.ext)
1113 1113
1114 1114 if self.server is not None:
1115 1115 if 'tcp://' in self.server:
1116 1116 address = server
1117 1117 else:
1118 1118 address = 'ipc:///tmp/%s' % self.server
1119 1119 self.server = address
1120 1120 self.context = zmq.Context()
1121 1121 self.receiver = self.context.socket(zmq.PULL)
1122 1122 self.receiver.connect(self.server)
1123 1123 time.sleep(0.5)
1124 1124 print('[Starting] ReceiverData from {}'.format(self.server))
1125 1125 else:
1126 1126 self.server = None
1127 1127 if self.path == None:
1128 1128 raise ValueError("[Reading] The path is not valid")
1129 1129
1130 1130 if self.online:
1131 1131 log.log("[Reading] Searching files in online mode...", self.name)
1132 1132
1133 1133 for nTries in range(self.nTries):
1134 1134 fullpath = self.searchFilesOnLine(self.path, self.startDate,
1135 1135 self.endDate, self.expLabel, self.ext, self.walk,
1136 1136 self.filefmt, self.folderfmt)
1137 1137
1138 1138 try:
1139 1139 fullpath = next(fullpath)
1140 1140 except:
1141 1141 fullpath = None
1142 1142
1143 1143 if fullpath:
1144 1144 break
1145 1145
1146 1146 log.warning(
1147 1147 'Waiting {} sec for a valid file in {}: try {} ...'.format(
1148 1148 self.delay, self.path, nTries + 1),
1149 1149 self.name)
1150 1150 time.sleep(self.delay)
1151 1151
1152 1152 if not(fullpath):
1153 1153 raise schainpy.admin.SchainError(
1154 1154 'There isn\'t any valid file in {}'.format(self.path))
1155 1155
1156 1156 pathname, filename = os.path.split(fullpath)
1157 1157 self.year = int(filename[1:5])
1158 1158 self.doy = int(filename[5:8])
1159 1159 self.set = int(filename[8:11]) - 1
1160 1160 else:
1161 1161 log.log("Searching files in {}".format(self.path), self.name)
1162 1162 self.filenameList = self.searchFilesOffLine(self.path, self.startDate,
1163 1163 self.endDate, self.expLabel, self.ext, self.walk, self.filefmt, self.folderfmt)
1164 1164
1165 1165 self.setNextFile()
1166 1166
1167 1167 return
1168 1168
1169 1169 def getBasicHeader(self):
1170 1170
1171 1171 self.dataOut.utctime = self.basicHeaderObj.utc + self.basicHeaderObj.miliSecond / \
1172 1172 1000. + self.profileIndex * self.radarControllerHeaderObj.ippSeconds
1173 1173
1174 1174 self.dataOut.flagDiscontinuousBlock = self.flagDiscontinuousBlock
1175 1175
1176 1176 self.dataOut.timeZone = self.basicHeaderObj.timeZone
1177 1177
1178 1178 self.dataOut.dstFlag = self.basicHeaderObj.dstFlag
1179 1179
1180 1180 self.dataOut.errorCount = self.basicHeaderObj.errorCount
1181 1181
1182 1182 self.dataOut.useLocalTime = self.basicHeaderObj.useLocalTime
1183 1183
1184 1184 self.dataOut.ippSeconds = self.radarControllerHeaderObj.ippSeconds / self.nTxs
1185 1185
1186 1186 def getFirstHeader(self):
1187 1187
1188 1188 raise NotImplementedError
1189 1189
1190 1190 def getData(self):
1191 1191
1192 1192 raise NotImplementedError
1193 1193
1194 1194 def hasNotDataInBuffer(self):
1195 1195
1196 1196 raise NotImplementedError
1197 1197
1198 1198 def readBlock(self):
1199 1199
1200 1200 raise NotImplementedError
1201 1201
1202 1202 def isEndProcess(self):
1203 1203
1204 1204 return self.flagNoMoreFiles
1205 1205
1206 1206 def printReadBlocks(self):
1207 1207
1208 1208 print("[Reading] Number of read blocks per file %04d" % self.nReadBlocks)
1209 1209
1210 1210 def printTotalBlocks(self):
1211 1211
1212 1212 print("[Reading] Number of read blocks %04d" % self.nTotalBlocks)
1213 1213
1214 1214 def run(self, **kwargs):
1215 1215 """
1216 1216
1217 1217 Arguments:
1218 1218 path :
1219 1219 startDate :
1220 1220 endDate :
1221 1221 startTime :
1222 1222 endTime :
1223 1223 set :
1224 1224 expLabel :
1225 1225 ext :
1226 1226 online :
1227 1227 delay :
1228 1228 walk :
1229 1229 getblock :
1230 1230 nTxs :
1231 1231 realtime :
1232 1232 blocksize :
1233 1233 blocktime :
1234 1234 skip :
1235 1235 cursor :
1236 1236 warnings :
1237 1237 server :
1238 1238 verbose :
1239 1239 format :
1240 1240 oneDDict :
1241 1241 twoDDict :
1242 1242 independentParam :
1243 1243 """
1244 1244
1245 1245 if not(self.isConfig):
1246 1246 self.setup(**kwargs)
1247 1247 self.isConfig = True
1248 1248 if self.server is None:
1249 1249 self.getData()
1250 1250 else:
1251 1251 self.getFromServer()
1252 1252
1253 1253
1254 1254 class JRODataWriter(Reader):
1255 1255
1256 1256 """
1257 1257 Esta clase permite escribir datos a archivos procesados (.r o ,pdata). La escritura
1258 1258 de los datos siempre se realiza por bloques.
1259 1259 """
1260 1260
1261 1261 setFile = None
1262 1262 profilesPerBlock = None
1263 1263 blocksPerFile = None
1264 1264 nWriteBlocks = 0
1265 1265 fileDate = None
1266 1266
1267 1267 def __init__(self, dataOut=None):
1268 1268 raise NotImplementedError
1269 1269
1270 1270 def hasAllDataInBuffer(self):
1271 1271 raise NotImplementedError
1272 1272
1273 1273 def setBlockDimension(self):
1274 1274 raise NotImplementedError
1275 1275
1276 1276 def writeBlock(self):
1277 1277 raise NotImplementedError
1278 1278
1279 1279 def putData(self):
1280 1280 raise NotImplementedError
1281 1281
1282 1282 def getDtypeWidth(self):
1283 1283
1284 1284 dtype_index = get_dtype_index(self.dtype)
1285 1285 dtype_width = get_dtype_width(dtype_index)
1286 1286
1287 1287 return dtype_width
1288 1288
1289 1289 def getProcessFlags(self):
1290 1290
1291 1291 processFlags = 0
1292 1292
1293 1293 dtype_index = get_dtype_index(self.dtype)
1294 1294 procflag_dtype = get_procflag_dtype(dtype_index)
1295 1295
1296 1296 processFlags += procflag_dtype
1297 1297
1298 1298 if self.dataOut.flagDecodeData:
1299 1299 processFlags += PROCFLAG.DECODE_DATA
1300 1300
1301 1301 if self.dataOut.flagDeflipData:
1302 1302 processFlags += PROCFLAG.DEFLIP_DATA
1303 1303
1304 1304 if self.dataOut.code is not None:
1305 1305 processFlags += PROCFLAG.DEFINE_PROCESS_CODE
1306 1306
1307 1307 if self.dataOut.nCohInt > 1:
1308 1308 processFlags += PROCFLAG.COHERENT_INTEGRATION
1309 1309
1310 1310 if self.dataOut.type == "Spectra":
1311 1311 if self.dataOut.nIncohInt > 1:
1312 1312 processFlags += PROCFLAG.INCOHERENT_INTEGRATION
1313 1313
1314 1314 if self.dataOut.data_dc is not None:
1315 1315 processFlags += PROCFLAG.SAVE_CHANNELS_DC
1316 1316
1317 1317 if self.dataOut.flagShiftFFT:
1318 1318 processFlags += PROCFLAG.SHIFT_FFT_DATA
1319 1319
1320 1320 return processFlags
1321 1321
1322 1322 def setBasicHeader(self):
1323 1323
1324 1324 self.basicHeaderObj.size = self.basicHeaderSize # bytes
1325 1325 self.basicHeaderObj.version = self.versionFile
1326 1326 self.basicHeaderObj.dataBlock = self.nTotalBlocks
1327 1327 utc = numpy.floor(self.dataOut.utctime)
1328 1328 milisecond = (self.dataOut.utctime - utc) * 1000.0
1329 1329 self.basicHeaderObj.utc = utc
1330 1330 self.basicHeaderObj.miliSecond = milisecond
1331 1331 self.basicHeaderObj.timeZone = self.dataOut.timeZone
1332 1332 self.basicHeaderObj.dstFlag = self.dataOut.dstFlag
1333 1333 self.basicHeaderObj.errorCount = self.dataOut.errorCount
1334 1334
1335 1335 def setFirstHeader(self):
1336 1336 """
1337 1337 Obtiene una copia del First Header
1338 1338
1339 1339 Affected:
1340 1340
1341 1341 self.basicHeaderObj
1342 1342 self.systemHeaderObj
1343 1343 self.radarControllerHeaderObj
1344 1344 self.processingHeaderObj self.
1345 1345
1346 1346 Return:
1347 1347 None
1348 1348 """
1349 1349
1350 1350 raise NotImplementedError
1351 1351
1352 1352 def __writeFirstHeader(self):
1353 1353 """
1354 1354 Escribe el primer header del file es decir el Basic header y el Long header (SystemHeader, RadarControllerHeader, ProcessingHeader)
1355 1355
1356 1356 Affected:
1357 1357 __dataType
1358 1358
1359 1359 Return:
1360 1360 None
1361 1361 """
1362 1362
1363 1363 # CALCULAR PARAMETROS
1364 1364
1365 1365 sizeLongHeader = self.systemHeaderObj.size + \
1366 1366 self.radarControllerHeaderObj.size + self.processingHeaderObj.size
1367 1367 self.basicHeaderObj.size = self.basicHeaderSize + sizeLongHeader
1368 1368
1369 1369 self.basicHeaderObj.write(self.fp)
1370 1370 self.systemHeaderObj.write(self.fp)
1371 1371 self.radarControllerHeaderObj.write(self.fp)
1372 1372 self.processingHeaderObj.write(self.fp)
1373 1373
1374 1374 def __setNewBlock(self):
1375 1375 """
1376 1376 Si es un nuevo file escribe el First Header caso contrario escribe solo el Basic Header
1377 1377
1378 1378 Return:
1379 1379 0 : si no pudo escribir nada
1380 1380 1 : Si escribio el Basic el First Header
1381 1381 """
1382 1382 if self.fp == None:
1383 1383 self.setNextFile()
1384 1384
1385 1385 if self.flagIsNewFile:
1386 1386 return 1
1387 1387
1388 1388 if self.blockIndex < self.processingHeaderObj.dataBlocksPerFile:
1389 1389 self.basicHeaderObj.write(self.fp)
1390 1390 return 1
1391 1391
1392 1392 if not(self.setNextFile()):
1393 1393 return 0
1394 1394
1395 1395 return 1
1396 1396
1397 1397 def writeNextBlock(self):
1398 1398 """
1399 1399 Selecciona el bloque siguiente de datos y los escribe en un file
1400 1400
1401 1401 Return:
1402 1402 0 : Si no hizo pudo escribir el bloque de datos
1403 1403 1 : Si no pudo escribir el bloque de datos
1404 1404 """
1405 1405 if not(self.__setNewBlock()):
1406 1406 return 0
1407 1407
1408 1408 self.writeBlock()
1409 1409
1410 1410 print("[Writing] Block No. %d/%d" % (self.blockIndex,
1411 1411 self.processingHeaderObj.dataBlocksPerFile))
1412 1412
1413 1413 return 1
1414 1414
1415 1415 def setNextFile(self):
1416 1416 """Determina el siguiente file que sera escrito
1417 1417
1418 1418 Affected:
1419 1419 self.filename
1420 1420 self.subfolder
1421 1421 self.fp
1422 1422 self.setFile
1423 1423 self.flagIsNewFile
1424 1424
1425 1425 Return:
1426 1426 0 : Si el archivo no puede ser escrito
1427 1427 1 : Si el archivo esta listo para ser escrito
1428 1428 """
1429 1429 ext = self.ext
1430 1430 path = self.path
1431 1431
1432 1432 if self.fp != None:
1433 1433 self.fp.close()
1434 1434
1435 1435
1436 1436 if not os.path.exists(path):
1437 1437 os.mkdir(path)
1438 1438
1439 1439 timeTuple = time.localtime(self.dataOut.utctime)
1440 1440 subfolder = 'd%4.4d%3.3d' % (timeTuple.tm_year, timeTuple.tm_yday)
1441 1441
1442 1442 fullpath = os.path.join(path, subfolder)
1443 1443 setFile = self.setFile
1444 1444
1445 1445 if not(os.path.exists(fullpath)):
1446 1446 os.mkdir(fullpath)
1447 1447 setFile = -1 # inicializo mi contador de seteo
1448 1448 else:
1449 1449 filesList = os.listdir(fullpath)
1450 1450 if len(filesList) > 0:
1451 1451 filesList = sorted(filesList, key=str.lower)
1452 1452 filen = filesList[-1]
1453 1453 # el filename debera tener el siguiente formato
1454 1454 # 0 1234 567 89A BCDE (hex)
1455 1455 # x YYYY DDD SSS .ext
1456 1456 if isNumber(filen[8:11]):
1457 1457 # inicializo mi contador de seteo al seteo del ultimo file
1458 1458 setFile = int(filen[8:11])
1459 1459 else:
1460 1460 setFile = -1
1461 1461 else:
1462 1462 setFile = -1 # inicializo mi contador de seteo
1463 1463
1464 1464 setFile += 1
1465 1465
1466 1466 # If this is a new day it resets some values
1467 1467 if self.dataOut.datatime.date() > self.fileDate:
1468 1468 setFile = 0
1469 1469 self.nTotalBlocks = 0
1470 1470
1471 1471 filen = '{}{:04d}{:03d}{:03d}{}'.format(
1472 1472 self.optchar, timeTuple.tm_year, timeTuple.tm_yday, setFile, ext)
1473 1473
1474 1474 filename = os.path.join(path, subfolder, filen)
1475 1475
1476 1476 fp = open(filename, 'wb')
1477 1477
1478 1478 self.blockIndex = 0
1479 1479 self.filename = filename
1480 1480 self.subfolder = subfolder
1481 1481 self.fp = fp
1482 1482 self.setFile = setFile
1483 1483 self.flagIsNewFile = 1
1484 1484 self.fileDate = self.dataOut.datatime.date()
1485 1485 self.setFirstHeader()
1486 1486
1487 1487 print('[Writing] Opening file: %s' % self.filename)
1488 1488
1489 1489 self.__writeFirstHeader()
1490 1490
1491 1491 return 1
1492 1492
1493 1493 def setup(self, dataOut, path, blocksPerFile, profilesPerBlock=64, set=None, ext=None, datatype=4):
1494 1494 """
1495 1495 Setea el tipo de formato en la cual sera guardada la data y escribe el First Header
1496 1496
1497 1497 Inputs:
1498 1498 path : directory where data will be saved
1499 1499 profilesPerBlock : number of profiles per block
1500 1500 set : initial file set
1501 1501 datatype : An integer number that defines data type:
1502 1502 0 : int8 (1 byte)
1503 1503 1 : int16 (2 bytes)
1504 1504 2 : int32 (4 bytes)
1505 1505 3 : int64 (8 bytes)
1506 1506 4 : float32 (4 bytes)
1507 1507 5 : double64 (8 bytes)
1508 1508
1509 1509 Return:
1510 1510 0 : Si no realizo un buen seteo
1511 1511 1 : Si realizo un buen seteo
1512 1512 """
1513 1513
1514 1514 if ext == None:
1515 1515 ext = self.ext
1516 1516
1517 1517 self.ext = ext.lower()
1518 1518
1519 1519 self.path = path
1520 1520
1521 1521 if set is None:
1522 1522 self.setFile = -1
1523 1523 else:
1524 1524 self.setFile = set - 1
1525 1525
1526 1526 self.blocksPerFile = blocksPerFile
1527 1527 self.profilesPerBlock = profilesPerBlock
1528 1528 self.dataOut = dataOut
1529 1529 self.fileDate = self.dataOut.datatime.date()
1530 1530 self.dtype = self.dataOut.dtype
1531 1531
1532 1532 if datatype is not None:
1533 1533 self.dtype = get_numpy_dtype(datatype)
1534 1534
1535 1535 if not(self.setNextFile()):
1536 1536 print("[Writing] There isn't a next file")
1537 1537 return 0
1538 1538
1539 1539 self.setBlockDimension()
1540 1540
1541 1541 return 1
1542 1542
1543 1543 def run(self, dataOut, path, blocksPerFile=100, profilesPerBlock=64, set=None, ext=None, datatype=4, **kwargs):
1544 1544
1545 1545 if not(self.isConfig):
1546 1546
1547 1547 self.setup(dataOut, path, blocksPerFile, profilesPerBlock=profilesPerBlock,
1548 1548 set=set, ext=ext, datatype=datatype, **kwargs)
1549 1549 self.isConfig = True
1550 1550
1551 1551 self.dataOut = dataOut
1552 1552 self.putData()
1553 1553 return self.dataOut
1554 1554
1555 1555 @MPDecorator
1556 1556 class printInfo(Operation):
1557 1557
1558 1558 def __init__(self):
1559 1559
1560 1560 Operation.__init__(self)
1561 1561 self.__printInfo = True
1562 1562
1563 1563 def run(self, dataOut, headers = ['systemHeaderObj', 'radarControllerHeaderObj', 'processingHeaderObj']):
1564 1564 if self.__printInfo == False:
1565 1565 return
1566 1566
1567 1567 for header in headers:
1568 1568 if hasattr(dataOut, header):
1569 1569 obj = getattr(dataOut, header)
1570 1570 if hasattr(obj, 'printInfo'):
1571 1571 obj.printInfo()
1572 1572 else:
1573 1573 print(obj)
1574 1574 else:
1575 1575 log.warning('Header {} Not found in object'.format(header))
1576 1576
1577 1577 self.__printInfo = False
@@ -1,659 +1,659
1 1 ''''
2 2 Created on Set 9, 2015
3 3
4 4 @author: roj-idl71 Karim Kuyeng
5 5
6 6 @update: 2021, Joab Apaza
7 7 '''
8 8
9 9 import os
10 10 import sys
11 11 import glob
12 12 import fnmatch
13 13 import datetime
14 14 import time
15 15 import re
16 16 import h5py
17 17 import numpy
18 18
19 19 try:
20 20 from gevent import sleep
21 21 except:
22 22 from time import sleep
23 23
24 24 from schainpy.model.data.jroheaderIO import RadarControllerHeader, SystemHeader
25 25 from schainpy.model.data.jrodata import Voltage
26 26 from schainpy.model.proc.jroproc_base import ProcessingUnit, Operation, MPDecorator
27 27 from numpy import imag
28 28
29 29
30 30 class AMISRReader(ProcessingUnit):
31 31 '''
32 32 classdocs
33 33 '''
34 34
35 35 def __init__(self):
36 36 '''
37 37 Constructor
38 38 '''
39 39
40 40 ProcessingUnit.__init__(self)
41 41
42 42 self.set = None
43 43 self.subset = None
44 44 self.extension_file = '.h5'
45 45 self.dtc_str = 'dtc'
46 46 self.dtc_id = 0
47 47 self.status = True
48 48 self.isConfig = False
49 49 self.dirnameList = []
50 50 self.filenameList = []
51 51 self.fileIndex = None
52 52 self.flagNoMoreFiles = False
53 53 self.flagIsNewFile = 0
54 54 self.filename = ''
55 55 self.amisrFilePointer = None
56 56 self.realBeamCode = []
57 57 self.beamCodeMap = None
58 58 self.azimuthList = []
59 59 self.elevationList = []
60 60 self.dataShape = None
61 61
62 62
63 63
64 64 self.profileIndex = 0
65 65
66 66
67 67 self.beamCodeByFrame = None
68 68 self.radacTimeByFrame = None
69 69
70 70 self.dataset = None
71 71
72 72 self.__firstFile = True
73 73
74 74 self.buffer = None
75 75
76 76 self.timezone = 'ut'
77 77
78 78 self.__waitForNewFile = 20
79 79 self.__filename_online = None
80 80 #Is really necessary create the output object in the initializer
81 81 self.dataOut = Voltage()
82 82 self.dataOut.error=False
83 83
84 84
85 85 def setup(self,path=None,
86 86 startDate=None,
87 87 endDate=None,
88 88 startTime=None,
89 89 endTime=None,
90 90 walk=True,
91 91 timezone='ut',
92 92 all=0,
93 93 code = None,
94 94 nCode = 0,
95 95 nBaud = 0,
96 96 online=False):
97 97
98 98
99 99
100 100 self.timezone = timezone
101 101 self.all = all
102 102 self.online = online
103 103
104 104 self.code = code
105 105 self.nCode = int(nCode)
106 106 self.nBaud = int(nBaud)
107 107
108 108
109 109
110 110 #self.findFiles()
111 111 if not(online):
112 112 #Busqueda de archivos offline
113 113 self.searchFilesOffLine(path, startDate, endDate, startTime, endTime, walk)
114 114 else:
115 115 self.searchFilesOnLine(path, startDate, endDate, startTime,endTime,walk)
116 116
117 117 if not(self.filenameList):
118 118 raise schainpy.admin.SchainWarning("There is no files into the folder: %s"%(path))
119 119 sys.exit()
120 120
121 121 self.fileIndex = 0
122 122
123 123 self.readNextFile(online)
124 124
125 125 '''
126 126 Add code
127 127 '''
128 128 self.isConfig = True
129 129 # print("Setup Done")
130 130 pass
131 131
132 132
133 133 def readAMISRHeader(self,fp):
134 134
135 135 if self.isConfig and (not self.flagNoMoreFiles):
136 136 newShape = fp.get('Raw11/Data/Samples/Data').shape[1:]
137 137 if self.dataShape != newShape and newShape != None:
138 138 raise schainpy.admin.SchainError("NEW FILE HAS A DIFFERENT SHAPE: ")
139 139 print(self.dataShape,newShape,"\n")
140 140 return 0
141 141 else:
142 142 self.dataShape = fp.get('Raw11/Data/Samples/Data').shape[1:]
143 143
144 144
145 145 header = 'Raw11/Data/RadacHeader'
146 146 self.beamCodeByPulse = fp.get(header+'/BeamCode') # LIST OF BEAMS PER PROFILE, TO BE USED ON REARRANGE
147 147 if (self.startDate> datetime.date(2021, 7, 15)): #Se cambiΓ³ la forma de extracciΓ³n de Apuntes el 17
148 148 self.beamcodeFile = fp['Setup/Beamcodefile'][()].decode()
149 149 self.trueBeams = self.beamcodeFile.split("\n")
150 150 self.trueBeams.pop()#remove last
151 151 [self.realBeamCode.append(x) for x in self.trueBeams if x not in self.realBeamCode]
152 152 self.beamCode = [int(x, 16) for x in self.realBeamCode]
153 153 else:
154 154 _beamCode= fp.get('Raw11/Data/Beamcodes') #se usa la manera previa al cambio de apuntes
155 155 self.beamCode = _beamCode[0,:]
156 156
157 157 if self.beamCodeMap == None:
158 158 self.beamCodeMap = fp['Setup/BeamcodeMap']
159 159 for beam in self.beamCode:
160 160 beamAziElev = numpy.where(self.beamCodeMap[:,0]==beam)
161 161 beamAziElev = beamAziElev[0].squeeze()
162 162 self.azimuthList.append(self.beamCodeMap[beamAziElev,1])
163 163 self.elevationList.append(self.beamCodeMap[beamAziElev,2])
164 164 #print("Beamssss: ",self.beamCodeMap[beamAziElev,1],self.beamCodeMap[beamAziElev,2])
165 165 #print(self.beamCode)
166 166 #self.code = fp.get(header+'/Code') # NOT USE FOR THIS
167 167 self.frameCount = fp.get(header+'/FrameCount')# NOT USE FOR THIS
168 168 self.modeGroup = fp.get(header+'/ModeGroup')# NOT USE FOR THIS
169 169 self.nsamplesPulse = fp.get(header+'/NSamplesPulse')# TO GET NSA OR USING DATA FOR THAT
170 170 self.pulseCount = fp.get(header+'/PulseCount')# NOT USE FOR THIS
171 171 self.radacTime = fp.get(header+'/RadacTime')# 1st TIME ON FILE ANDE CALCULATE THE REST WITH IPP*nindexprofile
172 172 self.timeCount = fp.get(header+'/TimeCount')# NOT USE FOR THIS
173 173 self.timeStatus = fp.get(header+'/TimeStatus')# NOT USE FOR THIS
174 174 self.rangeFromFile = fp.get('Raw11/Data/Samples/Range')
175 175 self.frequency = fp.get('Rx/Frequency')
176 176 txAus = fp.get('Raw11/Data/Pulsewidth')
177 177
178 178
179 179 self.nblocks = self.pulseCount.shape[0] #nblocks
180 180
181 181 self.nprofiles = self.pulseCount.shape[1] #nprofile
182 182 self.nsa = self.nsamplesPulse[0,0] #ngates
183 183 self.nchannels = len(self.beamCode)
184 184 self.ippSeconds = (self.radacTime[0][1] -self.radacTime[0][0]) #Ipp in seconds
185 185 #self.__waitForNewFile = self.nblocks # wait depending on the number of blocks since each block is 1 sec
186 186 self.__waitForNewFile = self.nblocks * self.nprofiles * self.ippSeconds # wait until new file is created
187 187
188 188 #filling radar controller header parameters
189 189 self.__ippKm = self.ippSeconds *.15*1e6 # in km
190 190 self.__txA = (txAus.value)*.15 #(ipp[us]*.15km/1us) in km
191 191 self.__txB = 0
192 192 nWindows=1
193 193 self.__nSamples = self.nsa
194 194 self.__firstHeight = self.rangeFromFile[0][0]/1000 #in km
195 195 self.__deltaHeight = (self.rangeFromFile[0][1] - self.rangeFromFile[0][0])/1000
196 196
197 197 #for now until understand why the code saved is different (code included even though code not in tuf file)
198 198 #self.__codeType = 0
199 199 # self.__nCode = None
200 200 # self.__nBaud = None
201 201 self.__code = self.code
202 202 self.__codeType = 0
203 203 if self.code != None:
204 204 self.__codeType = 1
205 205 self.__nCode = self.nCode
206 206 self.__nBaud = self.nBaud
207 207 #self.__code = 0
208 208
209 209 #filling system header parameters
210 210 self.__nSamples = self.nsa
211 211 self.newProfiles = self.nprofiles/self.nchannels
212 212 self.__channelList = list(range(self.nchannels))
213 213
214 214 self.__frequency = self.frequency[0][0]
215 215
216 216
217 217 return 1
218 218
219 219
220 220 def createBuffers(self):
221 221
222 222 pass
223 223
224 224 def __setParameters(self,path='', startDate='',endDate='',startTime='', endTime='', walk=''):
225 225 self.path = path
226 226 self.startDate = startDate
227 227 self.endDate = endDate
228 228 self.startTime = startTime
229 229 self.endTime = endTime
230 230 self.walk = walk
231 231
232 232 def __checkPath(self):
233 233 if os.path.exists(self.path):
234 234 self.status = 1
235 235 else:
236 236 self.status = 0
237 237 print('Path:%s does not exists'%self.path)
238 238
239 239 return
240 240
241 241
242 242 def __selDates(self, amisr_dirname_format):
243 243 try:
244 244 year = int(amisr_dirname_format[0:4])
245 245 month = int(amisr_dirname_format[4:6])
246 246 dom = int(amisr_dirname_format[6:8])
247 247 thisDate = datetime.date(year,month,dom)
248 248 #margen de un dΓ­a extra, igual luego se filtra for fecha y hora
249 249 if (thisDate>=(self.startDate - datetime.timedelta(days=1)) and thisDate <= (self.endDate)+ datetime.timedelta(days=1)):
250 250 return amisr_dirname_format
251 251 except:
252 252 return None
253 253
254 254
255 255 def __findDataForDates(self,online=False):
256 256
257 257 if not(self.status):
258 258 return None
259 259
260 260 pat = '\d+.\d+'
261 261 dirnameList = [re.search(pat,x) for x in os.listdir(self.path)]
262 262 dirnameList = [x for x in dirnameList if x!=None]
263 263 dirnameList = [x.string for x in dirnameList]
264 264 if not(online):
265 265 dirnameList = [self.__selDates(x) for x in dirnameList]
266 266 dirnameList = [x for x in dirnameList if x!=None]
267 267 if len(dirnameList)>0:
268 268 self.status = 1
269 269 self.dirnameList = dirnameList
270 270 self.dirnameList.sort()
271 271 else:
272 272 self.status = 0
273 273 return None
274 274
275 275 def __getTimeFromData(self):
276 276 startDateTime_Reader = datetime.datetime.combine(self.startDate,self.startTime)
277 277 endDateTime_Reader = datetime.datetime.combine(self.endDate,self.endTime)
278 278
279 279 print('Filtering Files from %s to %s'%(startDateTime_Reader, endDateTime_Reader))
280 280 print('........................................')
281 281 filter_filenameList = []
282 282 self.filenameList.sort()
283 283 #for i in range(len(self.filenameList)-1):
284 284 for i in range(len(self.filenameList)):
285 285 filename = self.filenameList[i]
286 286 fp = h5py.File(filename,'r')
287 287 time_str = fp.get('Time/RadacTimeString')
288 288
289 289 startDateTimeStr_File = time_str[0][0].decode('UTF-8').split('.')[0]
290 290 #startDateTimeStr_File = "2019-12-16 09:21:11"
291 291 junk = time.strptime(startDateTimeStr_File, '%Y-%m-%d %H:%M:%S')
292 292 startDateTime_File = datetime.datetime(junk.tm_year,junk.tm_mon,junk.tm_mday,junk.tm_hour, junk.tm_min, junk.tm_sec)
293 293
294 294 #endDateTimeStr_File = "2019-12-16 11:10:11"
295 295 endDateTimeStr_File = time_str[-1][-1].decode('UTF-8').split('.')[0]
296 296 junk = time.strptime(endDateTimeStr_File, '%Y-%m-%d %H:%M:%S')
297 297 endDateTime_File = datetime.datetime(junk.tm_year,junk.tm_mon,junk.tm_mday,junk.tm_hour, junk.tm_min, junk.tm_sec)
298 298
299 299 fp.close()
300 300
301 301 #print("check time", startDateTime_File)
302 302 if self.timezone == 'lt':
303 303 startDateTime_File = startDateTime_File - datetime.timedelta(minutes = 300)
304 304 endDateTime_File = endDateTime_File - datetime.timedelta(minutes = 300)
305 305 if (startDateTime_File >=startDateTime_Reader and endDateTime_File<=endDateTime_Reader):
306 306 filter_filenameList.append(filename)
307 307
308 308 if (startDateTime_File>endDateTime_Reader):
309 309 break
310 310
311 311
312 312 filter_filenameList.sort()
313 313 self.filenameList = filter_filenameList
314 314
315 315 return 1
316 316
317 317 def __filterByGlob1(self, dirName):
318 318 filter_files = glob.glob1(dirName, '*.*%s'%self.extension_file)
319 319 filter_files.sort()
320 320 filterDict = {}
321 321 filterDict.setdefault(dirName)
322 322 filterDict[dirName] = filter_files
323 323 return filterDict
324 324
325 325 def __getFilenameList(self, fileListInKeys, dirList):
326 326 for value in fileListInKeys:
327 327 dirName = list(value.keys())[0]
328 328 for file in value[dirName]:
329 329 filename = os.path.join(dirName, file)
330 330 self.filenameList.append(filename)
331 331
332 332
333 333 def __selectDataForTimes(self, online=False):
334 334 #aun no esta implementado el filtro for tiempo
335 335 if not(self.status):
336 336 return None
337 337
338 338 dirList = [os.path.join(self.path,x) for x in self.dirnameList]
339 339 fileListInKeys = [self.__filterByGlob1(x) for x in dirList]
340 340 self.__getFilenameList(fileListInKeys, dirList)
341 341 if not(online):
342 342 #filtro por tiempo
343 343 if not(self.all):
344 344 self.__getTimeFromData()
345 345
346 346 if len(self.filenameList)>0:
347 347 self.status = 1
348 348 self.filenameList.sort()
349 349 else:
350 350 self.status = 0
351 351 return None
352 352
353 353 else:
354 354 #get the last file - 1
355 355 self.filenameList = [self.filenameList[-2]]
356 356 new_dirnameList = []
357 357 for dirname in self.dirnameList:
358 358 junk = numpy.array([dirname in x for x in self.filenameList])
359 359 junk_sum = junk.sum()
360 360 if junk_sum > 0:
361 361 new_dirnameList.append(dirname)
362 362 self.dirnameList = new_dirnameList
363 363 return 1
364 364
365 365 def searchFilesOnLine(self, path, startDate, endDate, startTime=datetime.time(0,0,0),
366 366 endTime=datetime.time(23,59,59),walk=True):
367 367
368 368 if endDate ==None:
369 369 startDate = datetime.datetime.utcnow().date()
370 370 endDate = datetime.datetime.utcnow().date()
371 371
372 372 self.__setParameters(path=path, startDate=startDate, endDate=endDate,startTime = startTime,endTime=endTime, walk=walk)
373 373
374 374 self.__checkPath()
375 375
376 376 self.__findDataForDates(online=True)
377 377
378 378 self.dirnameList = [self.dirnameList[-1]]
379 379
380 380 self.__selectDataForTimes(online=True)
381 381
382 382 return
383 383
384 384
385 385 def searchFilesOffLine(self,
386 386 path,
387 387 startDate,
388 388 endDate,
389 389 startTime=datetime.time(0,0,0),
390 390 endTime=datetime.time(23,59,59),
391 391 walk=True):
392 392
393 393 self.__setParameters(path, startDate, endDate, startTime, endTime, walk)
394 394
395 395 self.__checkPath()
396 396
397 397 self.__findDataForDates()
398 398
399 399 self.__selectDataForTimes()
400 400
401 401 for i in range(len(self.filenameList)):
402 402 print("%s" %(self.filenameList[i]))
403 403
404 404 return
405 405
406 406 def __setNextFileOffline(self):
407 407
408 408 try:
409 409 self.filename = self.filenameList[self.fileIndex]
410 410 self.amisrFilePointer = h5py.File(self.filename,'r')
411 411 self.fileIndex += 1
412 412 except:
413 413 self.flagNoMoreFiles = 1
414 414 raise schainpy.admin.SchainError('No more files to read')
415 415 return 0
416 416
417 417 self.flagIsNewFile = 1
418 418 print("Setting the file: %s"%self.filename)
419 419
420 420 return 1
421 421
422 422
423 423 def __setNextFileOnline(self):
424 424 filename = self.filenameList[0]
425 425 if self.__filename_online != None:
426 426 self.__selectDataForTimes(online=True)
427 427 filename = self.filenameList[0]
428 428 wait = 0
429 429 self.__waitForNewFile=300 ## DEBUG:
430 430 while self.__filename_online == filename:
431 431 print('waiting %d seconds to get a new file...'%(self.__waitForNewFile))
432 432 if wait == 5:
433 433 self.flagNoMoreFiles = 1
434 434 return 0
435 435 sleep(self.__waitForNewFile)
436 436 self.__selectDataForTimes(online=True)
437 437 filename = self.filenameList[0]
438 438 wait += 1
439 439
440 440 self.__filename_online = filename
441 441
442 442 self.amisrFilePointer = h5py.File(filename,'r')
443 443 self.flagIsNewFile = 1
444 444 self.filename = filename
445 445 print("Setting the file: %s"%self.filename)
446 446 return 1
447 447
448 448
449 449 def readData(self):
450 450 buffer = self.amisrFilePointer.get('Raw11/Data/Samples/Data')
451 451 re = buffer[:,:,:,0]
452 452 im = buffer[:,:,:,1]
453 453 dataset = re + im*1j
454 454
455 455 self.radacTime = self.amisrFilePointer.get('Raw11/Data/RadacHeader/RadacTime')
456 456 timeset = self.radacTime[:,0]
457 457
458 458 return dataset,timeset
459 459
460 460 def reshapeData(self):
461 461 #self.beamCodeByPulse, self.beamCode, self.nblocks, self.nprofiles, self.nsa,
462 462 channels = self.beamCodeByPulse[0,:]
463 463 nchan = self.nchannels
464 464 #self.newProfiles = self.nprofiles/nchan #must be defined on filljroheader
465 465 nblocks = self.nblocks
466 466 nsamples = self.nsa
467 467
468 468 #Dimensions : nChannels, nProfiles, nSamples
469 469 new_block = numpy.empty((nblocks, nchan, numpy.int_(self.newProfiles), nsamples), dtype="complex64")
470 470 ############################################
471 471
472 472 for thisChannel in range(nchan):
473 473 new_block[:,thisChannel,:,:] = self.dataset[:,numpy.where(channels==self.beamCode[thisChannel])[0],:]
474 474
475 475
476 476 new_block = numpy.transpose(new_block, (1,0,2,3))
477 477 new_block = numpy.reshape(new_block, (nchan,-1, nsamples))
478 478
479 479 return new_block
480 480
481 481 def updateIndexes(self):
482 482
483 483 pass
484 484
485 485 def fillJROHeader(self):
486 486
487 487 #fill radar controller header
488 488 self.dataOut.radarControllerHeaderObj = RadarControllerHeader(ipp=self.__ippKm,
489 489 txA=self.__txA,
490 490 txB=0,
491 491 nWindows=1,
492 492 nHeights=self.__nSamples,
493 493 firstHeight=self.__firstHeight,
494 494 deltaHeight=self.__deltaHeight,
495 495 codeType=self.__codeType,
496 496 nCode=self.__nCode, nBaud=self.__nBaud,
497 497 code = self.__code,
498 498 fClock=1)
499 499
500 500 #fill system header
501 501 self.dataOut.systemHeaderObj = SystemHeader(nSamples=self.__nSamples,
502 502 nProfiles=self.newProfiles,
503 503 nChannels=len(self.__channelList),
504 504 adcResolution=14,
505 505 pciDioBusWidth=32)
506 506
507 507 self.dataOut.type = "Voltage"
508 508 self.dataOut.data = None
509 509 self.dataOut.dtype = numpy.dtype([('real','<i8'),('imag','<i8')])
510 510 # self.dataOut.nChannels = 0
511 511
512 512 # self.dataOut.nHeights = 0
513 513
514 514 self.dataOut.nProfiles = self.newProfiles*self.nblocks
515 515 #self.dataOut.heightList = self.__firstHeigth + numpy.arange(self.__nSamples, dtype = numpy.float)*self.__deltaHeigth
516 516 ranges = numpy.reshape(self.rangeFromFile.value,(-1))
517 517 self.dataOut.heightList = ranges/1000.0 #km
518 518 self.dataOut.channelList = self.__channelList
519 519 self.dataOut.blocksize = self.dataOut.nChannels * self.dataOut.nHeights
520 520
521 521 # self.dataOut.channelIndexList = None
522 522
523 523
524 524 self.dataOut.azimuthList = numpy.array(self.azimuthList)
525 525 self.dataOut.elevationList = numpy.array(self.elevationList)
526 526 self.dataOut.codeList = numpy.array(self.beamCode)
527 527 #print(self.dataOut.elevationList)
528 528 self.dataOut.flagNoData = True
529 529
530 530 #Set to TRUE if the data is discontinuous
531 531 self.dataOut.flagDiscontinuousBlock = False
532 532
533 533 self.dataOut.utctime = None
534 534
535 535 #self.dataOut.timeZone = -5 #self.__timezone/60 #timezone like jroheader, difference in minutes between UTC and localtime
536 536 if self.timezone == 'lt':
537 537 self.dataOut.timeZone = time.timezone / 60. #get the timezone in minutes
538 538 else:
539 539 self.dataOut.timeZone = 0 #by default time is UTC
540 540
541 541 self.dataOut.dstFlag = 0
542 542 self.dataOut.errorCount = 0
543 543 self.dataOut.nCohInt = 1
544 544 self.dataOut.flagDecodeData = False #asumo que la data esta decodificada
545 545 self.dataOut.flagDeflipData = False #asumo que la data esta sin flip
546 546 self.dataOut.flagShiftFFT = False
547 547 self.dataOut.ippSeconds = self.ippSeconds
548 548
549 549 #Time interval between profiles
550 550 #self.dataOut.timeInterval = self.dataOut.ippSeconds * self.dataOut.nCohInt
551 551
552 552 self.dataOut.frequency = self.__frequency
553 553 self.dataOut.realtime = self.online
554 554 pass
555 555
556 556 def readNextFile(self,online=False):
557 557
558 558 if not(online):
559 559 newFile = self.__setNextFileOffline()
560 560 else:
561 561 newFile = self.__setNextFileOnline()
562 562
563 563 if not(newFile):
564 564 self.dataOut.error = True
565 565 return 0
566 566
567 567 if not self.readAMISRHeader(self.amisrFilePointer):
568 568 self.dataOut.error = True
569 569 return 0
570 570
571 571 self.createBuffers()
572 572 self.fillJROHeader()
573 573
574 574 #self.__firstFile = False
575 575
576 576
577 577
578 578 self.dataset,self.timeset = self.readData()
579 579
580 580 if self.endDate!=None:
581 581 endDateTime_Reader = datetime.datetime.combine(self.endDate,self.endTime)
582 582 time_str = self.amisrFilePointer.get('Time/RadacTimeString')
583 583 startDateTimeStr_File = time_str[0][0].decode('UTF-8').split('.')[0]
584 584 junk = time.strptime(startDateTimeStr_File, '%Y-%m-%d %H:%M:%S')
585 585 startDateTime_File = datetime.datetime(junk.tm_year,junk.tm_mon,junk.tm_mday,junk.tm_hour, junk.tm_min, junk.tm_sec)
586 586 if self.timezone == 'lt':
587 587 startDateTime_File = startDateTime_File - datetime.timedelta(minutes = 300)
588 588 if (startDateTime_File>endDateTime_Reader):
589 589 return 0
590 590
591 591 self.jrodataset = self.reshapeData()
592 592 #----self.updateIndexes()
593 593 self.profileIndex = 0
594 594
595 595 return 1
596 596
597 597
598 598 def __hasNotDataInBuffer(self):
599 599 if self.profileIndex >= (self.newProfiles*self.nblocks):
600 600 return 1
601 601 return 0
602 602
603 603
604 604 def getData(self):
605 605
606 606 if self.flagNoMoreFiles:
607 607 self.dataOut.flagNoData = True
608 608 return 0
609 609
610 610 if self.__hasNotDataInBuffer():
611 611 if not (self.readNextFile(self.online)):
612 612 return 0
613 613
614 614
615 615 if self.dataset is None: # setear esta condicion cuando no hayan datos por leer
616 616 self.dataOut.flagNoData = True
617 617 return 0
618 618
619 619 #self.dataOut.data = numpy.reshape(self.jrodataset[self.profileIndex,:],(1,-1))
620 620
621 621 self.dataOut.data = self.jrodataset[:,self.profileIndex,:]
622 622
623 623 #print("R_t",self.timeset)
624 624
625 625 #self.dataOut.utctime = self.jrotimeset[self.profileIndex]
626 626 #verificar basic header de jro data y ver si es compatible con este valor
627 627 #self.dataOut.utctime = self.timeset + (self.profileIndex * self.ippSeconds * self.nchannels)
628 628 indexprof = numpy.mod(self.profileIndex, self.newProfiles)
629 629 indexblock = self.profileIndex/self.newProfiles
630 630 #print (indexblock, indexprof)
631 631 diffUTC = 0
632 632 t_comp = (indexprof * self.ippSeconds * self.nchannels) + diffUTC #
633 633
634 634 #print("utc :",indexblock," __ ",t_comp)
635 635 #print(numpy.shape(self.timeset))
636 636 self.dataOut.utctime = self.timeset[numpy.int_(indexblock)] + t_comp
637 637 #self.dataOut.utctime = self.timeset[self.profileIndex] + t_comp
638 638
639 639 self.dataOut.profileIndex = self.profileIndex
640 640 #print("N profile:",self.profileIndex,self.newProfiles,self.nblocks,self.dataOut.utctime)
641 641 self.dataOut.flagNoData = False
642 642 # if indexprof == 0:
643 643 # print("kamisr: ",self.dataOut.utctime)
644 644
645 645 self.profileIndex += 1
646 646
647 return self.dataOut.data
647 return self.dataOut.data #retorno necesario??
648 648
649 649
650 650 def run(self, **kwargs):
651 651 '''
652 652 This method will be called many times so here you should put all your code
653 653 '''
654 654 #print("running kamisr")
655 655 if not self.isConfig:
656 656 self.setup(**kwargs)
657 657 self.isConfig = True
658 658
659 659 self.getData()
@@ -1,677 +1,673
1 1 import os
2 2 import time
3 3 import datetime
4 4
5 5 import numpy
6 6 import h5py
7 7
8 8 import schainpy.admin
9 9 from schainpy.model.data.jrodata import *
10 10 from schainpy.model.proc.jroproc_base import ProcessingUnit, Operation, MPDecorator
11 11 from schainpy.model.io.jroIO_base import *
12 12 from schainpy.utils import log
13 13
14 14
15 15 class HDFReader(Reader, ProcessingUnit):
16 16 """Processing unit to read HDF5 format files
17 17
18 18 This unit reads HDF5 files created with `HDFWriter` operation contains
19 19 by default two groups Data and Metadata all variables would be saved as `dataOut`
20 20 attributes.
21 21 It is possible to read any HDF5 file by given the structure in the `description`
22 22 parameter, also you can add extra values to metadata with the parameter `extras`.
23 23
24 24 Parameters:
25 25 -----------
26 26 path : str
27 27 Path where files are located.
28 28 startDate : date
29 29 Start date of the files
30 30 endDate : list
31 31 End date of the files
32 32 startTime : time
33 33 Start time of the files
34 34 endTime : time
35 35 End time of the files
36 36 description : dict, optional
37 37 Dictionary with the description of the HDF5 file
38 38 extras : dict, optional
39 39 Dictionary with extra metadata to be be added to `dataOut`
40 40
41 41 Examples
42 42 --------
43 43
44 44 desc = {
45 45 'Data': {
46 46 'data_output': ['u', 'v', 'w'],
47 47 'utctime': 'timestamps',
48 48 } ,
49 49 'Metadata': {
50 50 'heightList': 'heights'
51 51 }
52 52 }
53 53
54 54 desc = {
55 55 'Data': {
56 56 'data_output': 'winds',
57 57 'utctime': 'timestamps'
58 58 },
59 59 'Metadata': {
60 60 'heightList': 'heights'
61 61 }
62 62 }
63 63
64 64 extras = {
65 65 'timeZone': 300
66 66 }
67 67
68 68 reader = project.addReadUnit(
69 69 name='HDFReader',
70 70 path='/path/to/files',
71 71 startDate='2019/01/01',
72 72 endDate='2019/01/31',
73 73 startTime='00:00:00',
74 74 endTime='23:59:59',
75 75 # description=json.dumps(desc),
76 76 # extras=json.dumps(extras),
77 77 )
78 78
79 79 """
80 80
81 81 __attrs__ = ['path', 'startDate', 'endDate', 'startTime', 'endTime', 'description', 'extras']
82 82
83 83 def __init__(self):
84 84 ProcessingUnit.__init__(self)
85 self.dataOut = Parameters()
85
86 86 self.ext = ".hdf5"
87 87 self.optchar = "D"
88 88 self.meta = {}
89 89 self.data = {}
90 90 self.open_file = h5py.File
91 91 self.open_mode = 'r'
92 92 self.description = {}
93 93 self.extras = {}
94 94 self.filefmt = "*%Y%j***"
95 95 self.folderfmt = "*%Y%j"
96 96 self.utcoffset = 0
97 97
98 self.dataOut = Parameters()
99 self.dataOut.error=False ## NOTE: Importante definir esto antes inicio
100 self.dataOut.flagNoData = True
101
98 102 def setup(self, **kwargs):
99 103
100 104 self.set_kwargs(**kwargs)
101 105 if not self.ext.startswith('.'):
102 106 self.ext = '.{}'.format(self.ext)
103 107
104 108 if self.online:
105 109 log.log("Searching files in online mode...", self.name)
106 110
107 111 for nTries in range(self.nTries):
108 112 fullpath = self.searchFilesOnLine(self.path, self.startDate,
109 113 self.endDate, self.expLabel, self.ext, self.walk,
110 114 self.filefmt, self.folderfmt)
111 115 pathname, filename = os.path.split(fullpath)
112 #print(pathname,filename)
116
113 117 try:
114 118 fullpath = next(fullpath)
115 119
116 120 except:
117 121 fullpath = None
118 122
119 123 if fullpath:
120 124 break
121 125
122 126 log.warning(
123 127 'Waiting {} sec for a valid file in {}: try {} ...'.format(
124 128 self.delay, self.path, nTries + 1),
125 129 self.name)
126 130 time.sleep(self.delay)
127 131
128 132 if not(fullpath):
129 133 raise schainpy.admin.SchainError(
130 134 'There isn\'t any valid file in {}'.format(self.path))
131 135
132 136 pathname, filename = os.path.split(fullpath)
133 137 self.year = int(filename[1:5])
134 138 self.doy = int(filename[5:8])
135 139 self.set = int(filename[8:11]) - 1
136 140 else:
137 141 log.log("Searching files in {}".format(self.path), self.name)
138 142 self.filenameList = self.searchFilesOffLine(self.path, self.startDate,
139 143 self.endDate, self.expLabel, self.ext, self.walk, self.filefmt, self.folderfmt)
140 144
141 145 self.setNextFile()
142 146
143 return
147
144 148
145 149
146 150 def readFirstHeader(self):
147 151 '''Read metadata and data'''
148 152
149 153 self.__readMetadata()
150 154 self.__readData()
151 155 self.__setBlockList()
152 156
153 if 'type' in self.meta:
154 ##print("Creting dataOut...")
155 self.dataOut = eval(self.meta['type'])()
156 ##print(vars(self.dataOut))
157 157
158 158 for attr in self.meta:
159 ##print("attr: ", attr)
160 ##print(type(self.dataOut).__name__)
161 setattr(self.dataOut, attr, self.meta[attr])
162 159
160 setattr(self.dataOut, attr, self.meta[attr])
161 self.dataOut.error=False
162 self.dataOut.flagNoData = False
163 163 self.blockIndex = 0
164 164
165 165 return
166 166
167 167 def __setBlockList(self):
168 168 '''
169 169 Selects the data within the times defined
170 170
171 171 self.fp
172 172 self.startTime
173 173 self.endTime
174 174 self.blockList
175 175 self.blocksPerFile
176 176
177 177 '''
178 178
179 179 startTime = self.startTime
180 180 endTime = self.endTime
181 181 thisUtcTime = self.data['utctime'] + self.utcoffset
182 182 self.interval = numpy.min(thisUtcTime[1:] - thisUtcTime[:-1])
183 183 thisDatetime = datetime.datetime.utcfromtimestamp(thisUtcTime[0])
184 184 self.startFileDatetime = thisDatetime
185 185 thisDate = thisDatetime.date()
186 186 thisTime = thisDatetime.time()
187 187
188 188 startUtcTime = (datetime.datetime.combine(thisDate, startTime) - datetime.datetime(1970, 1, 1)).total_seconds()
189 189 endUtcTime = (datetime.datetime.combine(thisDate, endTime) - datetime.datetime(1970, 1, 1)).total_seconds()
190 190
191 191 ind = numpy.where(numpy.logical_and(thisUtcTime >= startUtcTime, thisUtcTime < endUtcTime))[0]
192 192
193 193 self.blockList = ind
194 194 self.blocksPerFile = len(ind)
195 195 self.blocksPerFile = len(thisUtcTime)
196 196 return
197 197
198 198 def __readMetadata(self):
199 199 '''
200 200 Reads Metadata
201 201 '''
202 202
203 203 meta = {}
204 204
205 205 if self.description:
206 206 for key, value in self.description['Metadata'].items():
207 207 meta[key] = self.fp[value][()]
208 208 else:
209 209 grp = self.fp['Metadata']
210 210 for name in grp:
211 211 meta[name] = grp[name][()]
212 212
213 213 if self.extras:
214 214 for key, value in self.extras.items():
215 215 meta[key] = value
216 216 self.meta = meta
217 217
218 218 return
219 219
220 220
221 221
222 222 def checkForRealPath(self, nextFile, nextDay):
223 223
224 224 # print("check FRP")
225 225 # dt = self.startFileDatetime + datetime.timedelta(1)
226 226 # filename = '{}.{}{}'.format(self.path, dt.strftime('%Y%m%d'), self.ext)
227 227 # fullfilename = os.path.join(self.path, filename)
228 228 # print("check Path ",fullfilename,filename)
229 229 # if os.path.exists(fullfilename):
230 230 # return fullfilename, filename
231 231 # return None, filename
232 232 return None,None
233 233
234 234 def __readData(self):
235 235
236 236 data = {}
237 237
238 238 if self.description:
239 239 for key, value in self.description['Data'].items():
240 240 if isinstance(value, str):
241 241 if isinstance(self.fp[value], h5py.Dataset):
242 242 data[key] = self.fp[value][()]
243 243 elif isinstance(self.fp[value], h5py.Group):
244 244 array = []
245 245 for ch in self.fp[value]:
246 246 array.append(self.fp[value][ch][()])
247 247 data[key] = numpy.array(array)
248 248 elif isinstance(value, list):
249 249 array = []
250 250 for ch in value:
251 251 array.append(self.fp[ch][()])
252 252 data[key] = numpy.array(array)
253 253 else:
254 254 grp = self.fp['Data']
255 255 for name in grp:
256 256 if isinstance(grp[name], h5py.Dataset):
257 257 array = grp[name][()]
258 258 elif isinstance(grp[name], h5py.Group):
259 259 array = []
260 260 for ch in grp[name]:
261 261 array.append(grp[name][ch][()])
262 262 array = numpy.array(array)
263 263 else:
264 264 log.warning('Unknown type: {}'.format(name))
265 265
266 266 if name in self.description:
267 267 key = self.description[name]
268 268 else:
269 269 key = name
270 270 data[key] = array
271 271
272 272 self.data = data
273 273 return
274 274
275 275 def getData(self):
276 276 if not self.isDateTimeInRange(self.startFileDatetime, self.startDate, self.endDate, self.startTime, self.endTime):
277 277 self.dataOut.flagNoData = True
278 278 self.blockIndex = self.blocksPerFile
279 #self.dataOut.error = True TERMINA EL PROGRAMA, removido
279 self.dataOut.error = True # TERMINA EL PROGRAMA
280 280 return
281 281 for attr in self.data:
282 #print("attr ",attr)
282
283 283 if self.data[attr].ndim == 1:
284 284 setattr(self.dataOut, attr, self.data[attr][self.blockIndex])
285 285 else:
286 286 setattr(self.dataOut, attr, self.data[attr][:, self.blockIndex])
287 287
288 288
289 289 self.blockIndex += 1
290 290
291 291 if self.blockIndex == 1:
292 292 log.log("Block No. {}/{} -> {}".format(
293 293 self.blockIndex,
294 294 self.blocksPerFile,
295 295 self.dataOut.datatime.ctime()), self.name)
296 296 else:
297 297 log.log("Block No. {}/{} ".format(
298 298 self.blockIndex,
299 299 self.blocksPerFile),self.name)
300 300
301 if self.blockIndex == self.blocksPerFile:
302 self.setNextFile()
303
301 304 self.dataOut.flagNoData = False
302 self.dataOut.error = False
303 return
305
304 306
305 307 def run(self, **kwargs):
306 308
307 309 if not(self.isConfig):
308 310 self.setup(**kwargs)
309 311 self.isConfig = True
310 312
311 if self.blockIndex == self.blocksPerFile:
312 self.setNextFile()
313
314 313 self.getData()
315 314
316 return
317
318 315 @MPDecorator
319 316 class HDFWriter(Operation):
320 317 """Operation to write HDF5 files.
321 318
322 319 The HDF5 file contains by default two groups Data and Metadata where
323 320 you can save any `dataOut` attribute specified by `dataList` and `metadataList`
324 321 parameters, data attributes are normaly time dependent where the metadata
325 322 are not.
326 323 It is possible to customize the structure of the HDF5 file with the
327 324 optional description parameter see the examples.
328 325
329 326 Parameters:
330 327 -----------
331 328 path : str
332 329 Path where files will be saved.
333 330 blocksPerFile : int
334 331 Number of blocks per file
335 332 metadataList : list
336 333 List of the dataOut attributes that will be saved as metadata
337 334 dataList : int
338 335 List of the dataOut attributes that will be saved as data
339 336 setType : bool
340 337 If True the name of the files corresponds to the timestamp of the data
341 338 description : dict, optional
342 339 Dictionary with the desired description of the HDF5 file
343 340
344 341 Examples
345 342 --------
346 343
347 344 desc = {
348 345 'data_output': {'winds': ['z', 'w', 'v']},
349 346 'utctime': 'timestamps',
350 347 'heightList': 'heights'
351 348 }
352 349 desc = {
353 350 'data_output': ['z', 'w', 'v'],
354 351 'utctime': 'timestamps',
355 352 'heightList': 'heights'
356 353 }
357 354 desc = {
358 355 'Data': {
359 356 'data_output': 'winds',
360 357 'utctime': 'timestamps'
361 358 },
362 359 'Metadata': {
363 360 'heightList': 'heights'
364 361 }
365 362 }
366 363
367 364 writer = proc_unit.addOperation(name='HDFWriter')
368 365 writer.addParameter(name='path', value='/path/to/file')
369 366 writer.addParameter(name='blocksPerFile', value='32')
370 367 writer.addParameter(name='metadataList', value='heightList,timeZone')
371 368 writer.addParameter(name='dataList',value='data_output,utctime')
372 369 # writer.addParameter(name='description',value=json.dumps(desc))
373 370
374 371 """
375 372
376 373 ext = ".hdf5"
377 374 optchar = "D"
378 375 filename = None
379 376 path = None
380 377 setFile = None
381 378 fp = None
382 379 firsttime = True
383 380 #Configurations
384 381 blocksPerFile = None
385 382 blockIndex = None
386 383 dataOut = None
387 384 #Data Arrays
388 385 dataList = None
389 386 metadataList = None
390 387 currentDay = None
391 388 lastTime = None
392 389 timeZone = "ut"
393 390 hourLimit = 3
394 391 breakDays = True
395 392
396 393 def __init__(self):
397 394
398 395 Operation.__init__(self)
399 396 return
400 397
401 398 def setup(self, path=None, blocksPerFile=10, metadataList=None, dataList=None, setType=None,
402 399 description={},timeZone = "ut",hourLimit = 3, breakDays=True):
403 400 self.path = path
404 401 self.blocksPerFile = blocksPerFile
405 402 self.metadataList = metadataList
406 403 self.dataList = [s.strip() for s in dataList]
407 404 self.setType = setType
408 405 self.description = description
409 406 self.timeZone = timeZone
410 407 self.hourLimit = hourLimit
411 408 self.breakDays = breakDays
412 409
413 410 if self.metadataList is None:
414 411 self.metadataList = self.dataOut.metadata_list
415 412
416 413 tableList = []
417 414 dsList = []
418 415
419 416 for i in range(len(self.dataList)):
420 417 dsDict = {}
421 418 if hasattr(self.dataOut, self.dataList[i]):
422 419 dataAux = getattr(self.dataOut, self.dataList[i])
423 420 dsDict['variable'] = self.dataList[i]
424 421 else:
425 422 log.warning('Attribute {} not found in dataOut', self.name)
426 423 continue
427 424
428 425 if dataAux is None:
429 426 continue
430 427 elif isinstance(dataAux, (int, float, numpy.integer, numpy.float)):
431 428 dsDict['nDim'] = 0
432 429 else:
433 430 dsDict['nDim'] = len(dataAux.shape)
434 431 dsDict['shape'] = dataAux.shape
435 432 dsDict['dsNumber'] = dataAux.shape[0]
436 433 dsDict['dtype'] = dataAux.dtype
437 434
438 435 dsList.append(dsDict)
439 436
440 437 self.dsList = dsList
441 438 self.currentDay = self.dataOut.datatime.date()
442 439
443 440 def timeFlag(self):
444 441 currentTime = self.dataOut.utctime
445 442 timeTuple = None
446 443 if self.timeZone == "lt":
447 444 timeTuple = time.localtime(currentTime)
448 445 else :
449 446 timeTuple = time.gmtime(currentTime)
450 447
451 448 dataDay = timeTuple.tm_yday
452 449 #print("time UTC: ",currentTime, self.dataOut.datatime)
453 450 if self.lastTime is None:
454 451 self.lastTime = currentTime
455 452 self.currentDay = dataDay
456 453 return False
457 454
458 455 timeDiff = currentTime - self.lastTime
459 456
460 457 #Si el dia es diferente o si la diferencia entre un dato y otro supera self.hourLimit
461 458 if (dataDay != self.currentDay) and self.breakDays:
462 459 self.currentDay = dataDay
463 460 return True
464 461 elif timeDiff > self.hourLimit*60*60:
465 462 self.lastTime = currentTime
466 463 return True
467 464 else:
468 465 self.lastTime = currentTime
469 466 return False
470 467
471 468 def run(self, dataOut,**kwargs):
472 469
473 470 self.dataOut = dataOut
474 471 if not(self.isConfig):
475 472 self.setup(**kwargs)
476 473
477 474 self.isConfig = True
478 475 self.setNextFile()
479 476
480 477 self.putData()
481 478 return
482 479
483 480 def setNextFile(self):
484 481
485 482 ext = self.ext
486 483 path = self.path
487 484 setFile = self.setFile
488 485 timeTuple = None
489 486 if self.timeZone == "lt":
490 487 timeTuple = time.localtime(self.dataOut.utctime)
491 488 elif self.timeZone == "ut":
492 489 timeTuple = time.gmtime(self.dataOut.utctime)
493 490 #print("path: ",timeTuple)
494 491 subfolder = 'd%4.4d%3.3d' % (timeTuple.tm_year,timeTuple.tm_yday)
495 492 fullpath = os.path.join(path, subfolder)
496 493
497 494 if os.path.exists(fullpath):
498 495 filesList = os.listdir(fullpath)
499 496 filesList = [k for k in filesList if k.startswith(self.optchar)]
500 497 if len( filesList ) > 0:
501 498 filesList = sorted(filesList, key=str.lower)
502 499 filen = filesList[-1]
503 500 # el filename debera tener el siguiente formato
504 501 # 0 1234 567 89A BCDE (hex)
505 502 # x YYYY DDD SSS .ext
506 503 if isNumber(filen[8:11]):
507 504 setFile = int(filen[8:11]) #inicializo mi contador de seteo al seteo del ultimo file
508 505 else:
509 506 setFile = -1
510 507 else:
511 508 setFile = -1 #inicializo mi contador de seteo
512 509 else:
513 510 os.makedirs(fullpath)
514 511 setFile = -1 #inicializo mi contador de seteo
515 512
516 513 if self.setType is None:
517 514 setFile += 1
518 515 file = '%s%4.4d%3.3d%03d%s' % (self.optchar,
519 516 timeTuple.tm_year,
520 517 timeTuple.tm_yday,
521 518 setFile,
522 519 ext )
523 520 else:
524 521 setFile = timeTuple.tm_hour*60+timeTuple.tm_min
525 522 file = '%s%4.4d%3.3d%04d%s' % (self.optchar,
526 523 timeTuple.tm_year,
527 524 timeTuple.tm_yday,
528 525 setFile,
529 526 ext )
530 527
531 528 self.filename = os.path.join( path, subfolder, file )
532 529
533 530 #Setting HDF5 File
534 531 self.fp = h5py.File(self.filename, 'w')
535 532 #write metadata
536 533 self.writeMetadata(self.fp)
537 534 #Write data
538 535 self.writeData(self.fp)
539 536
540 537 def getLabel(self, name, x=None):
541 538
542 539 if x is None:
543 540 if 'Data' in self.description:
544 541 data = self.description['Data']
545 542 if 'Metadata' in self.description:
546 543 data.update(self.description['Metadata'])
547 544 else:
548 545 data = self.description
549 546 if name in data:
550 547 if isinstance(data[name], str):
551 548 return data[name]
552 549 elif isinstance(data[name], list):
553 550 return None
554 551 elif isinstance(data[name], dict):
555 552 for key, value in data[name].items():
556 553 return key
557 554 return name
558 555 else:
559 556 if 'Metadata' in self.description:
560 557 meta = self.description['Metadata']
561 558 else:
562 559 meta = self.description
563 560 if name in meta:
564 561 if isinstance(meta[name], list):
565 562 return meta[name][x]
566 563 elif isinstance(meta[name], dict):
567 564 for key, value in meta[name].items():
568 565 return value[x]
569 566 if 'cspc' in name:
570 567 return 'pair{:02d}'.format(x)
571 568 else:
572 569 return 'channel{:02d}'.format(x)
573 570
574 571 def writeMetadata(self, fp):
575 572
576 573 if self.description:
577 574 if 'Metadata' in self.description:
578 575 grp = fp.create_group('Metadata')
579 576 else:
580 577 grp = fp
581 578 else:
582 579 grp = fp.create_group('Metadata')
583 580
584 581 for i in range(len(self.metadataList)):
585 582 if not hasattr(self.dataOut, self.metadataList[i]):
586 583 log.warning('Metadata: `{}` not found'.format(self.metadataList[i]), self.name)
587 584 continue
588 585 value = getattr(self.dataOut, self.metadataList[i])
589 586 if isinstance(value, bool):
590 587 if value is True:
591 588 value = 1
592 589 else:
593 590 value = 0
594 591 grp.create_dataset(self.getLabel(self.metadataList[i]), data=value)
595 592 return
596 593
597 594 def writeData(self, fp):
598 595
599 596 if self.description:
600 597 if 'Data' in self.description:
601 598 grp = fp.create_group('Data')
602 599 else:
603 600 grp = fp
604 601 else:
605 602 grp = fp.create_group('Data')
606 603
607 604 dtsets = []
608 605 data = []
609 606
610 607 for dsInfo in self.dsList:
611 608 if dsInfo['nDim'] == 0:
612 609 ds = grp.create_dataset(
613 610 self.getLabel(dsInfo['variable']),
614 611 (self.blocksPerFile, ),
615 612 chunks=True,
616 613 dtype=numpy.float64)
617 614 dtsets.append(ds)
618 615 data.append((dsInfo['variable'], -1))
619 616 else:
620 617 label = self.getLabel(dsInfo['variable'])
621 618 if label is not None:
622 619 sgrp = grp.create_group(label)
623 620 else:
624 621 sgrp = grp
625 622 for i in range(dsInfo['dsNumber']):
626 623 ds = sgrp.create_dataset(
627 624 self.getLabel(dsInfo['variable'], i),
628 625 (self.blocksPerFile, ) + dsInfo['shape'][1:],
629 626 chunks=True,
630 627 dtype=dsInfo['dtype'])
631 628 dtsets.append(ds)
632 629 data.append((dsInfo['variable'], i))
633 630 fp.flush()
634 631
635 632 log.log('Creating file: {}'.format(fp.filename), self.name)
636 633
637 634 self.ds = dtsets
638 635 self.data = data
639 636 self.firsttime = True
640 637 self.blockIndex = 0
641 638 return
642 639
643 640 def putData(self):
644 641
645 642 if (self.blockIndex == self.blocksPerFile) or self.timeFlag():
646 643 self.closeFile()
647 644 self.setNextFile()
648 print("breaking file")
649 645
650 646 for i, ds in enumerate(self.ds):
651 647 attr, ch = self.data[i]
652 648 if ch == -1:
653 649 ds[self.blockIndex] = getattr(self.dataOut, attr)
654 650 else:
655 651 ds[self.blockIndex] = getattr(self.dataOut, attr)[ch]
656 652
657 653 self.fp.flush()
658 654 self.blockIndex += 1
659 655 if self.blockIndex == 1:
660 656 log.log('Block No. {}/{} --> {}'.format(self.blockIndex, self.blocksPerFile,self.dataOut.datatime.ctime()), self.name)
661 657 else:
662 658 log.log('Block No. {}/{}'.format(self.blockIndex, self.blocksPerFile), self.name)
663 659 return
664 660
665 661 def closeFile(self):
666 662
667 663 if self.blockIndex != self.blocksPerFile:
668 664 for ds in self.ds:
669 665 ds.resize(self.blockIndex, axis=0)
670 666
671 667 if self.fp:
672 668 self.fp.flush()
673 669 self.fp.close()
674 670
675 671 def close(self):
676 672
677 673 self.closeFile()
@@ -1,203 +1,205
1 1 '''
2 2 Base clases to create Processing units and operations, the MPDecorator
3 3 must be used in plotting and writing operations to allow to run as an
4 4 external process.
5 5 '''
6 6
7 7 import inspect
8 8 import zmq
9 9 import time
10 10 import pickle
11 11 import traceback
12 12 from threading import Thread
13 13 from multiprocessing import Process, Queue
14 14 from schainpy.utils import log
15 15
16 16
17 17 class ProcessingUnit(object):
18 18 '''
19 19 Base class to create Signal Chain Units
20 20 '''
21 21
22 22 proc_type = 'processing'
23 23
24 24 def __init__(self):
25 25
26 26 self.dataIn = None
27 27 self.dataOut = None
28 28 self.isConfig = False
29 29 self.operations = []
30 30
31 31 def setInput(self, unit):
32 32
33 33 self.dataIn = unit.dataOut
34 34
35
35 36 def getAllowedArgs(self):
36 37 if hasattr(self, '__attrs__'):
37 38 return self.__attrs__
38 39 else:
39 40 return inspect.getargspec(self.run).args
40 41
41 42 def addOperation(self, conf, operation):
42 43 '''
43 44 '''
44 45
45 46 self.operations.append((operation, conf.type, conf.getKwargs()))
46 47
47 48 def getOperationObj(self, objId):
48 49
49 50 if objId not in list(self.operations.keys()):
50 51 return None
51 52
52 53 return self.operations[objId]
53 54
54 55 def call(self, **kwargs):
55 56 '''
56 57 '''
57 58
58 59 try:
59 60 if self.dataIn is not None and self.dataIn.flagNoData and not self.dataIn.error:
60 61 return self.dataIn.isReady()
61 62 elif self.dataIn is None or not self.dataIn.error:
62 63 self.run(**kwargs)
63 64 elif self.dataIn.error:
64 65 self.dataOut.error = self.dataIn.error
65 66 self.dataOut.flagNoData = True
66 67 except:
68
67 69 err = traceback.format_exc()
68 70 if 'SchainWarning' in err:
69 71 log.warning(err.split('SchainWarning:')[-1].split('\n')[0].strip(), self.name)
70 72 elif 'SchainError' in err:
71 73 log.error(err.split('SchainError:')[-1].split('\n')[0].strip(), self.name)
72 74 else:
73 75 log.error(err, self.name)
74 76 self.dataOut.error = True
75 77
76 78 for op, optype, opkwargs in self.operations:
77 79 if optype == 'other' and not self.dataOut.flagNoData:
78 80 self.dataOut = op.run(self.dataOut, **opkwargs)
79 81 elif optype == 'external' and not self.dataOut.flagNoData:
80 82 op.queue.put(self.dataOut)
81 83 elif optype == 'external' and self.dataOut.error:
82 84 op.queue.put(self.dataOut)
83 85
84 86 return 'Error' if self.dataOut.error else self.dataOut.isReady()
85 87
86 88 def setup(self):
87 89
88 90 raise NotImplementedError
89 91
90 92 def run(self):
91 93
92 94 raise NotImplementedError
93 95
94 96 def close(self):
95 97
96 98 return
97 99
98 100
99 101 class Operation(object):
100 102
101 103 '''
102 104 '''
103 105
104 106 proc_type = 'operation'
105 107
106 108 def __init__(self):
107 109
108 110 self.id = None
109 111 self.isConfig = False
110 112
111 113 if not hasattr(self, 'name'):
112 114 self.name = self.__class__.__name__
113 115
114 116 def getAllowedArgs(self):
115 117 if hasattr(self, '__attrs__'):
116 118 return self.__attrs__
117 119 else:
118 120 return inspect.getargspec(self.run).args
119 121
120 122 def setup(self):
121 123
122 124 self.isConfig = True
123 125
124 126 raise NotImplementedError
125 127
126 128 def run(self, dataIn, **kwargs):
127 129 """
128 130 Realiza las operaciones necesarias sobre la dataIn.data y actualiza los
129 131 atributos del objeto dataIn.
130 132
131 133 Input:
132 134
133 135 dataIn : objeto del tipo JROData
134 136
135 137 Return:
136 138
137 139 None
138 140
139 141 Affected:
140 142 __buffer : buffer de recepcion de datos.
141 143
142 144 """
143 145 if not self.isConfig:
144 146 self.setup(**kwargs)
145 147
146 148 raise NotImplementedError
147 149
148 150 def close(self):
149 151
150 152 return
151 153
152 154
153 155 def MPDecorator(BaseClass):
154 156 """
155 157 Multiprocessing class decorator
156 158
157 159 This function add multiprocessing features to a BaseClass.
158 160 """
159 161
160 162 class MPClass(BaseClass, Process):
161 163
162 164 def __init__(self, *args, **kwargs):
163 165 super(MPClass, self).__init__()
164 166 Process.__init__(self)
165 167
166 168 self.args = args
167 169 self.kwargs = kwargs
168 170 self.t = time.time()
169 171 self.op_type = 'external'
170 172 self.name = BaseClass.__name__
171 173 self.__doc__ = BaseClass.__doc__
172 174
173 175 if 'plot' in self.name.lower() and not self.name.endswith('_'):
174 176 self.name = '{}{}'.format(self.CODE.upper(), 'Plot')
175 177
176 178 self.start_time = time.time()
177 179 #self.err_queue = args[2]
178 180 self.queue = Queue(maxsize=1)
179 181 self.myrun = BaseClass.run
180 182
181 183 def run(self):
182 184
183 185 while True:
184 186
185 187 dataOut = self.queue.get()
186 188
187 189 if not dataOut.error:
188 190 try:
189 191 BaseClass.run(self, dataOut, **self.kwargs)
190 192 except:
191 193 err = traceback.format_exc()
192 194 log.error(err, self.name)
193 195 else:
194 196 break
195 197
196 198 self.close()
197 199
198 200 def close(self):
199 201
200 202 BaseClass.close(self)
201 203 log.success('Done...(Time:{:4.2f} secs)'.format(time.time()-self.start_time), self.name)
202 204
203 205 return MPClass
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
@@ -1,1683 +1,1684
1 1 # Copyright (c) 2012-2020 Jicamarca Radio Observatory
2 2 # All rights reserved.
3 3 #
4 4 # Distributed under the terms of the BSD 3-clause license.
5 5 """Spectra processing Unit and operations
6 6
7 7 Here you will find the processing unit `SpectraProc` and several operations
8 8 to work with Spectra data type
9 9 """
10 10
11 11 import time
12 12 import itertools
13 13
14 14 import numpy
15 15 import math
16 16
17 17 from schainpy.model.proc.jroproc_base import ProcessingUnit, MPDecorator, Operation
18 18 from schainpy.model.data.jrodata import Spectra
19 19 from schainpy.model.data.jrodata import hildebrand_sekhon
20 20 from schainpy.utils import log
21 21
22 22 from scipy.optimize import curve_fit
23
23 SPEED_OF_LIGHT = 299792458
24 24
25 25 class SpectraProc(ProcessingUnit):
26 26
27 27 def __init__(self):
28 28
29 29 ProcessingUnit.__init__(self)
30 30
31 31 self.buffer = None
32 32 self.firstdatatime = None
33 33 self.profIndex = 0
34 34 self.dataOut = Spectra()
35 35 self.id_min = None
36 36 self.id_max = None
37 37 self.setupReq = False #Agregar a todas las unidades de proc
38 38
39 39 def __updateSpecFromVoltage(self):
40 40
41 41 self.dataOut.timeZone = self.dataIn.timeZone
42 42 self.dataOut.dstFlag = self.dataIn.dstFlag
43 43 self.dataOut.errorCount = self.dataIn.errorCount
44 44 self.dataOut.useLocalTime = self.dataIn.useLocalTime
45 45 try:
46 46 self.dataOut.processingHeaderObj = self.dataIn.processingHeaderObj.copy()
47 47 except:
48 48 pass
49 49 self.dataOut.radarControllerHeaderObj = self.dataIn.radarControllerHeaderObj.copy()
50 50 self.dataOut.systemHeaderObj = self.dataIn.systemHeaderObj.copy()
51 51 self.dataOut.channelList = self.dataIn.channelList
52 52 self.dataOut.heightList = self.dataIn.heightList
53 53 self.dataOut.dtype = numpy.dtype([('real', '<f4'), ('imag', '<f4')])
54 54 self.dataOut.nProfiles = self.dataOut.nFFTPoints
55 55 self.dataOut.flagDiscontinuousBlock = self.dataIn.flagDiscontinuousBlock
56 56 self.dataOut.utctime = self.firstdatatime
57 57 self.dataOut.flagDecodeData = self.dataIn.flagDecodeData
58 58 self.dataOut.flagDeflipData = self.dataIn.flagDeflipData
59 59 self.dataOut.flagShiftFFT = False
60 60 self.dataOut.nCohInt = self.dataIn.nCohInt
61 61 self.dataOut.nIncohInt = 1
62 62 self.dataOut.windowOfFilter = self.dataIn.windowOfFilter
63 63 self.dataOut.frequency = self.dataIn.frequency
64 64 self.dataOut.realtime = self.dataIn.realtime
65 65 self.dataOut.azimuth = self.dataIn.azimuth
66 66 self.dataOut.zenith = self.dataIn.zenith
67 67 self.dataOut.codeList = self.dataIn.codeList
68 68 self.dataOut.azimuthList = self.dataIn.azimuthList
69 69 self.dataOut.elevationList = self.dataIn.elevationList
70 70
71
72
71 73 def __getFft(self):
72 74 """
73 75 Convierte valores de Voltaje a Spectra
74 76
75 77 Affected:
76 78 self.dataOut.data_spc
77 79 self.dataOut.data_cspc
78 80 self.dataOut.data_dc
79 81 self.dataOut.heightList
80 82 self.profIndex
81 83 self.buffer
82 84 self.dataOut.flagNoData
83 85 """
84 86 fft_volt = numpy.fft.fft(
85 87 self.buffer, n=self.dataOut.nFFTPoints, axis=1)
86 88 fft_volt = fft_volt.astype(numpy.dtype('complex'))
87 89 dc = fft_volt[:, 0, :]
88 90
89 91 # calculo de self-spectra
90 92 fft_volt = numpy.fft.fftshift(fft_volt, axes=(1,))
91 93 spc = fft_volt * numpy.conjugate(fft_volt)
92 94 spc = spc.real
93 95
94 96 blocksize = 0
95 97 blocksize += dc.size
96 98 blocksize += spc.size
97 99
98 100 cspc = None
99 101 pairIndex = 0
100 102 if self.dataOut.pairsList != None:
101 103 # calculo de cross-spectra
102 104 cspc = numpy.zeros(
103 105 (self.dataOut.nPairs, self.dataOut.nFFTPoints, self.dataOut.nHeights), dtype='complex')
104 106 for pair in self.dataOut.pairsList:
105 107 if pair[0] not in self.dataOut.channelList:
106 108 raise ValueError("Error getting CrossSpectra: pair 0 of %s is not in channelList = %s" % (
107 109 str(pair), str(self.dataOut.channelList)))
108 110 if pair[1] not in self.dataOut.channelList:
109 111 raise ValueError("Error getting CrossSpectra: pair 1 of %s is not in channelList = %s" % (
110 112 str(pair), str(self.dataOut.channelList)))
111 113
112 114 cspc[pairIndex, :, :] = fft_volt[pair[0], :, :] * \
113 115 numpy.conjugate(fft_volt[pair[1], :, :])
114 116 pairIndex += 1
115 117 blocksize += cspc.size
116 118
117 119 self.dataOut.data_spc = spc
118 120 self.dataOut.data_cspc = cspc
119 121 self.dataOut.data_dc = dc
120 122 self.dataOut.blockSize = blocksize
121 123 self.dataOut.flagShiftFFT = False
122 124
123 125 def run(self, nProfiles=None, nFFTPoints=None, pairsList=None, ippFactor=None, shift_fft=False):
124 #print("spectra run")
126
125 127 if self.dataIn.type == "Spectra":
126 128 self.dataOut.copy(self.dataIn)
127 129 if shift_fft:
128 130 #desplaza a la derecha en el eje 2 determinadas posiciones
129 131 shift = int(self.dataOut.nFFTPoints/2)
130 132 self.dataOut.data_spc = numpy.roll(self.dataOut.data_spc, shift , axis=1)
131 133
132 134 if self.dataOut.data_cspc is not None:
133 135 #desplaza a la derecha en el eje 2 determinadas posiciones
134 136 self.dataOut.data_cspc = numpy.roll(self.dataOut.data_cspc, shift, axis=1)
135 137 if pairsList:
136 138 self.__selectPairs(pairsList)
137 139
140
138 141 elif self.dataIn.type == "Voltage":
139 142
140 143 self.dataOut.flagNoData = True
141 144
142 145 if nFFTPoints == None:
143 146 raise ValueError("This SpectraProc.run() need nFFTPoints input variable")
144 147
145 148 if nProfiles == None:
146 149 nProfiles = nFFTPoints
147 150
148 151 if ippFactor == None:
149 152 self.dataOut.ippFactor = 1
150 153
151 154 self.dataOut.nFFTPoints = nFFTPoints
152 155
153 156 if self.buffer is None:
154 157 self.buffer = numpy.zeros((self.dataIn.nChannels,
155 158 nProfiles,
156 159 self.dataIn.nHeights),
157 160 dtype='complex')
158 161
159 162 if self.dataIn.flagDataAsBlock:
160 163 nVoltProfiles = self.dataIn.data.shape[1]
161 164
162 165 if nVoltProfiles == nProfiles:
163 166 self.buffer = self.dataIn.data.copy()
164 167 self.profIndex = nVoltProfiles
165 168
166 169 elif nVoltProfiles < nProfiles:
167 170
168 171 if self.profIndex == 0:
169 172 self.id_min = 0
170 173 self.id_max = nVoltProfiles
171 174
172 175 self.buffer[:, self.id_min:self.id_max,
173 176 :] = self.dataIn.data
174 177 self.profIndex += nVoltProfiles
175 178 self.id_min += nVoltProfiles
176 179 self.id_max += nVoltProfiles
177 180 else:
178 181 raise ValueError("The type object %s has %d profiles, it should just has %d profiles" % (
179 182 self.dataIn.type, self.dataIn.data.shape[1], nProfiles))
180 183 self.dataOut.flagNoData = True
181 184 else:
182 185 self.buffer[:, self.profIndex, :] = self.dataIn.data.copy()
183 186 self.profIndex += 1
184 187
185 188 if self.firstdatatime == None:
186 189 self.firstdatatime = self.dataIn.utctime
187 190
188 191 if self.profIndex == nProfiles:
189 192 self.__updateSpecFromVoltage()
190 193 if pairsList == None:
191 194 self.dataOut.pairsList = [pair for pair in itertools.combinations(self.dataOut.channelList, 2)]
192 195 else:
193 196 self.dataOut.pairsList = pairsList
194 197 self.__getFft()
195 198 self.dataOut.flagNoData = False
196 199 self.firstdatatime = None
197 200 self.profIndex = 0
198 201 else:
199 202 raise ValueError("The type of input object '%s' is not valid".format(
200 203 self.dataIn.type))
201 204
202 205 def __selectPairs(self, pairsList):
203 206
204 207 if not pairsList:
205 208 return
206 209
207 210 pairs = []
208 211 pairsIndex = []
209 212
210 213 for pair in pairsList:
211 214 if pair[0] not in self.dataOut.channelList or pair[1] not in self.dataOut.channelList:
212 215 continue
213 216 pairs.append(pair)
214 217 pairsIndex.append(pairs.index(pair))
215 218
216 219 self.dataOut.data_cspc = self.dataOut.data_cspc[pairsIndex]
217 220 self.dataOut.pairsList = pairs
218 221
219 222 return
220 223
221 224 def selectFFTs(self, minFFT, maxFFT ):
222 225 """
223 226 Selecciona un bloque de datos en base a un grupo de valores de puntos FFTs segun el rango
224 227 minFFT<= FFT <= maxFFT
225 228 """
226 229
227 230 if (minFFT > maxFFT):
228 231 raise ValueError("Error selecting heights: Height range (%d,%d) is not valid" % (minFFT, maxFFT))
229 232
230 233 if (minFFT < self.dataOut.getFreqRange()[0]):
231 234 minFFT = self.dataOut.getFreqRange()[0]
232 235
233 236 if (maxFFT > self.dataOut.getFreqRange()[-1]):
234 237 maxFFT = self.dataOut.getFreqRange()[-1]
235 238
236 239 minIndex = 0
237 240 maxIndex = 0
238 241 FFTs = self.dataOut.getFreqRange()
239 242
240 243 inda = numpy.where(FFTs >= minFFT)
241 244 indb = numpy.where(FFTs <= maxFFT)
242 245
243 246 try:
244 247 minIndex = inda[0][0]
245 248 except:
246 249 minIndex = 0
247 250
248 251 try:
249 252 maxIndex = indb[0][-1]
250 253 except:
251 254 maxIndex = len(FFTs)
252 255
253 256 self.selectFFTsByIndex(minIndex, maxIndex)
254 257
255 258 return 1
256 259
257 260 def getBeaconSignal(self, tauindex=0, channelindex=0, hei_ref=None):
258 261 newheis = numpy.where(
259 262 self.dataOut.heightList > self.dataOut.radarControllerHeaderObj.Taus[tauindex])
260 263
261 264 if hei_ref != None:
262 265 newheis = numpy.where(self.dataOut.heightList > hei_ref)
263 266
264 267 minIndex = min(newheis[0])
265 268 maxIndex = max(newheis[0])
266 269 data_spc = self.dataOut.data_spc[:, :, minIndex:maxIndex + 1]
267 270 heightList = self.dataOut.heightList[minIndex:maxIndex + 1]
268 271
269 272 # determina indices
270 273 nheis = int(self.dataOut.radarControllerHeaderObj.txB /
271 274 (self.dataOut.heightList[1] - self.dataOut.heightList[0]))
272 275 avg_dB = 10 * \
273 276 numpy.log10(numpy.sum(data_spc[channelindex, :, :], axis=0))
274 277 beacon_dB = numpy.sort(avg_dB)[-nheis:]
275 278 beacon_heiIndexList = []
276 279 for val in avg_dB.tolist():
277 280 if val >= beacon_dB[0]:
278 281 beacon_heiIndexList.append(avg_dB.tolist().index(val))
279 282
280 283 #data_spc = data_spc[:,:,beacon_heiIndexList]
281 284 data_cspc = None
282 285 if self.dataOut.data_cspc is not None:
283 286 data_cspc = self.dataOut.data_cspc[:, :, minIndex:maxIndex + 1]
284 287 #data_cspc = data_cspc[:,:,beacon_heiIndexList]
285 288
286 289 data_dc = None
287 290 if self.dataOut.data_dc is not None:
288 291 data_dc = self.dataOut.data_dc[:, minIndex:maxIndex + 1]
289 292 #data_dc = data_dc[:,beacon_heiIndexList]
290 293
291 294 self.dataOut.data_spc = data_spc
292 295 self.dataOut.data_cspc = data_cspc
293 296 self.dataOut.data_dc = data_dc
294 297 self.dataOut.heightList = heightList
295 298 self.dataOut.beacon_heiIndexList = beacon_heiIndexList
296 299
297 300 return 1
298 301
299 302 def selectFFTsByIndex(self, minIndex, maxIndex):
300 303 """
301 304
302 305 """
303 306
304 307 if (minIndex < 0) or (minIndex > maxIndex):
305 308 raise ValueError("Error selecting heights: Index range (%d,%d) is not valid" % (minIndex, maxIndex))
306 309
307 310 if (maxIndex >= self.dataOut.nProfiles):
308 311 maxIndex = self.dataOut.nProfiles-1
309 312
310 313 #Spectra
311 314 data_spc = self.dataOut.data_spc[:,minIndex:maxIndex+1,:]
312 315
313 316 data_cspc = None
314 317 if self.dataOut.data_cspc is not None:
315 318 data_cspc = self.dataOut.data_cspc[:,minIndex:maxIndex+1,:]
316 319
317 320 data_dc = None
318 321 if self.dataOut.data_dc is not None:
319 322 data_dc = self.dataOut.data_dc[minIndex:maxIndex+1,:]
320 323
321 324 self.dataOut.data_spc = data_spc
322 325 self.dataOut.data_cspc = data_cspc
323 326 self.dataOut.data_dc = data_dc
324 327
325 328 self.dataOut.ippSeconds = self.dataOut.ippSeconds*(self.dataOut.nFFTPoints / numpy.shape(data_cspc)[1])
326 329 self.dataOut.nFFTPoints = numpy.shape(data_cspc)[1]
327 330 self.dataOut.profilesPerBlock = numpy.shape(data_cspc)[1]
328 331
329 332 return 1
330 333
331 334 def getNoise(self, minHei=None, maxHei=None, minVel=None, maxVel=None):
332 335 # validacion de rango
333 336 if minHei == None:
334 337 minHei = self.dataOut.heightList[0]
335 338
336 339 if maxHei == None:
337 340 maxHei = self.dataOut.heightList[-1]
338 341
339 342 if (minHei < self.dataOut.heightList[0]) or (minHei > maxHei):
340 343 print('minHei: %.2f is out of the heights range' % (minHei))
341 344 print('minHei is setting to %.2f' % (self.dataOut.heightList[0]))
342 345 minHei = self.dataOut.heightList[0]
343 346
344 347 if (maxHei > self.dataOut.heightList[-1]) or (maxHei < minHei):
345 348 print('maxHei: %.2f is out of the heights range' % (maxHei))
346 349 print('maxHei is setting to %.2f' % (self.dataOut.heightList[-1]))
347 350 maxHei = self.dataOut.heightList[-1]
348 351
349 352 # validacion de velocidades
350 353 velrange = self.dataOut.getVelRange(1)
351 354
352 355 if minVel == None:
353 356 minVel = velrange[0]
354 357
355 358 if maxVel == None:
356 359 maxVel = velrange[-1]
357 360
358 361 if (minVel < velrange[0]) or (minVel > maxVel):
359 362 print('minVel: %.2f is out of the velocity range' % (minVel))
360 363 print('minVel is setting to %.2f' % (velrange[0]))
361 364 minVel = velrange[0]
362 365
363 366 if (maxVel > velrange[-1]) or (maxVel < minVel):
364 367 print('maxVel: %.2f is out of the velocity range' % (maxVel))
365 368 print('maxVel is setting to %.2f' % (velrange[-1]))
366 369 maxVel = velrange[-1]
367 370
368 371 # seleccion de indices para rango
369 372 minIndex = 0
370 373 maxIndex = 0
371 374 heights = self.dataOut.heightList
372 375
373 376 inda = numpy.where(heights >= minHei)
374 377 indb = numpy.where(heights <= maxHei)
375 378
376 379 try:
377 380 minIndex = inda[0][0]
378 381 except:
379 382 minIndex = 0
380 383
381 384 try:
382 385 maxIndex = indb[0][-1]
383 386 except:
384 387 maxIndex = len(heights)
385 388
386 389 if (minIndex < 0) or (minIndex > maxIndex):
387 390 raise ValueError("some value in (%d,%d) is not valid" % (
388 391 minIndex, maxIndex))
389 392
390 393 if (maxIndex >= self.dataOut.nHeights):
391 394 maxIndex = self.dataOut.nHeights - 1
392 395
393 396 # seleccion de indices para velocidades
394 397 indminvel = numpy.where(velrange >= minVel)
395 398 indmaxvel = numpy.where(velrange <= maxVel)
396 399 try:
397 400 minIndexVel = indminvel[0][0]
398 401 except:
399 402 minIndexVel = 0
400 403
401 404 try:
402 405 maxIndexVel = indmaxvel[0][-1]
403 406 except:
404 407 maxIndexVel = len(velrange)
405 408
406 409 # seleccion del espectro
407 410 data_spc = self.dataOut.data_spc[:,
408 411 minIndexVel:maxIndexVel + 1, minIndex:maxIndex + 1]
409 412 # estimacion de ruido
410 413 noise = numpy.zeros(self.dataOut.nChannels)
411 414
412 415 for channel in range(self.dataOut.nChannels):
413 416 daux = data_spc[channel, :, :]
414 417 sortdata = numpy.sort(daux, axis=None)
415 418 noise[channel] = hildebrand_sekhon(sortdata, self.dataOut.nIncohInt)
416 419
417 420 self.dataOut.noise_estimation = noise.copy()
418 421
419 422 return 1
420 423
421 424 class removeDC(Operation):
422 425
423 426 def run(self, dataOut, mode=2):
424 427 self.dataOut = dataOut
425 428 jspectra = self.dataOut.data_spc
426 429 jcspectra = self.dataOut.data_cspc
427 430
428 431 num_chan = jspectra.shape[0]
429 432 num_hei = jspectra.shape[2]
430 433
431 434 if jcspectra is not None:
432 435 jcspectraExist = True
433 436 num_pairs = jcspectra.shape[0]
434 437 else:
435 438 jcspectraExist = False
436 439
437 440 freq_dc = int(jspectra.shape[1] / 2)
438 441 ind_vel = numpy.array([-2, -1, 1, 2]) + freq_dc
439 442 ind_vel = ind_vel.astype(int)
440 443
441 444 if ind_vel[0] < 0:
442 445 ind_vel[list(range(0, 1))] = ind_vel[list(range(0, 1))] + self.num_prof
443 446
444 447 if mode == 1:
445 448 jspectra[:, freq_dc, :] = (
446 449 jspectra[:, ind_vel[1], :] + jspectra[:, ind_vel[2], :]) / 2 # CORRECCION
447 450
448 451 if jcspectraExist:
449 452 jcspectra[:, freq_dc, :] = (
450 453 jcspectra[:, ind_vel[1], :] + jcspectra[:, ind_vel[2], :]) / 2
451 454
452 455 if mode == 2:
453 456
454 457 vel = numpy.array([-2, -1, 1, 2])
455 458 xx = numpy.zeros([4, 4])
456 459
457 460 for fil in range(4):
458 461 xx[fil, :] = vel[fil]**numpy.asarray(list(range(4)))
459 462
460 463 xx_inv = numpy.linalg.inv(xx)
461 464 xx_aux = xx_inv[0, :]
462 465
463 466 for ich in range(num_chan):
464 467 yy = jspectra[ich, ind_vel, :]
465 468 jspectra[ich, freq_dc, :] = numpy.dot(xx_aux, yy)
466 469
467 470 junkid = jspectra[ich, freq_dc, :] <= 0
468 471 cjunkid = sum(junkid)
469 472
470 473 if cjunkid.any():
471 474 jspectra[ich, freq_dc, junkid.nonzero()] = (
472 475 jspectra[ich, ind_vel[1], junkid] + jspectra[ich, ind_vel[2], junkid]) / 2
473 476
474 477 if jcspectraExist:
475 478 for ip in range(num_pairs):
476 479 yy = jcspectra[ip, ind_vel, :]
477 480 jcspectra[ip, freq_dc, :] = numpy.dot(xx_aux, yy)
478 481
479 482 self.dataOut.data_spc = jspectra
480 483 self.dataOut.data_cspc = jcspectra
481 484
482 485 return self.dataOut
483 486
484 487 # import matplotlib.pyplot as plt
485 488
486 489 def fit_func( x, a0, a1, a2): #, a3, a4, a5):
487 490 z = (x - a1) / a2
488 491 y = a0 * numpy.exp(-z**2 / a2) #+ a3 + a4 * x + a5 * x**2
489 492 return y
490 493
491 494
492 495 class CleanRayleigh(Operation):
493 496
494 497 def __init__(self):
495 498
496 499 Operation.__init__(self)
497 500 self.i=0
498 501 self.isConfig = False
499 502 self.__dataReady = False
500 503 self.__profIndex = 0
501 504 self.byTime = False
502 505 self.byProfiles = False
503 506
504 507 self.bloques = None
505 508 self.bloque0 = None
506 509
507 510 self.index = 0
508 511
509 512 self.buffer = 0
510 513 self.buffer2 = 0
511 514 self.buffer3 = 0
512 515
513 516
514 517 def setup(self,dataOut,min_hei,max_hei,n, timeInterval,factor_stdv):
515 518
516 519 self.nChannels = dataOut.nChannels
517 520 self.nProf = dataOut.nProfiles
518 521 self.nPairs = dataOut.data_cspc.shape[0]
519 522 self.pairsArray = numpy.array(dataOut.pairsList)
520 523 self.spectra = dataOut.data_spc
521 524 self.cspectra = dataOut.data_cspc
522 525 self.heights = dataOut.heightList #alturas totales
523 526 self.nHeights = len(self.heights)
524 527 self.min_hei = min_hei
525 528 self.max_hei = max_hei
526 529 if (self.min_hei == None):
527 530 self.min_hei = 0
528 531 if (self.max_hei == None):
529 532 self.max_hei = dataOut.heightList[-1]
530 533 self.hval = ((self.max_hei>=self.heights) & (self.heights >= self.min_hei)).nonzero()
531 534 self.heightsClean = self.heights[self.hval] #alturas filtradas
532 535 self.hval = self.hval[0] # forma (N,), an solo N elementos -> Indices de alturas
533 536 self.nHeightsClean = len(self.heightsClean)
534 537 self.channels = dataOut.channelList
535 538 self.nChan = len(self.channels)
536 539 self.nIncohInt = dataOut.nIncohInt
537 540 self.__initime = dataOut.utctime
538 541 self.maxAltInd = self.hval[-1]+1
539 542 self.minAltInd = self.hval[0]
540 543
541 544 self.crosspairs = dataOut.pairsList
542 545 self.nPairs = len(self.crosspairs)
543 546 self.normFactor = dataOut.normFactor
544 547 self.nFFTPoints = dataOut.nFFTPoints
545 548 self.ippSeconds = dataOut.ippSeconds
546 549 self.currentTime = self.__initime
547 550 self.pairsArray = numpy.array(dataOut.pairsList)
548 551 self.factor_stdv = factor_stdv
549 #print("CHANNELS: ",[x for x in self.channels])
550 552
551 553 if n != None :
552 554 self.byProfiles = True
553 555 self.nIntProfiles = n
554 556 else:
555 557 self.__integrationtime = timeInterval
556 558
557 559 self.__dataReady = False
558 560 self.isConfig = True
559 561
560 562
561 563
562 564 def run(self, dataOut,min_hei=None,max_hei=None, n=None, timeInterval=10,factor_stdv=2.5):
563 #print (dataOut.utctime)
565
564 566 if not self.isConfig :
565 #print("Setting config")
567
566 568 self.setup(dataOut, min_hei,max_hei,n,timeInterval,factor_stdv)
567 #print("Config Done")
569
568 570 tini=dataOut.utctime
569 571
570 572 if self.byProfiles:
571 573 if self.__profIndex == self.nIntProfiles:
572 574 self.__dataReady = True
573 575 else:
574 576 if (tini - self.__initime) >= self.__integrationtime:
575 #print(tini - self.__initime,self.__profIndex)
577
576 578 self.__dataReady = True
577 579 self.__initime = tini
578 580
579 581 #if (tini.tm_min % 2) == 0 and (tini.tm_sec < 5 and self.fint==0):
580 582
581 583 if self.__dataReady:
582 #print("Data ready",self.__profIndex)
584
583 585 self.__profIndex = 0
584 586 jspc = self.buffer
585 587 jcspc = self.buffer2
586 588 #jnoise = self.buffer3
587 589 self.buffer = dataOut.data_spc
588 590 self.buffer2 = dataOut.data_cspc
589 591 #self.buffer3 = dataOut.noise
590 592 self.currentTime = dataOut.utctime
591 593 if numpy.any(jspc) :
592 594 #print( jspc.shape, jcspc.shape)
593 595 jspc = numpy.reshape(jspc,(int(len(jspc)/self.nChannels),self.nChannels,self.nFFTPoints,self.nHeights))
594 596 jcspc= numpy.reshape(jcspc,(int(len(jcspc)/self.nPairs),self.nPairs,self.nFFTPoints,self.nHeights))
595 597 self.__dataReady = False
596 598 #print( jspc.shape, jcspc.shape)
597 599 dataOut.flagNoData = False
598 600 else:
599 601 dataOut.flagNoData = True
600 602 self.__dataReady = False
601 603 return dataOut
602 604 else:
603 605 #print( len(self.buffer))
604 606 if numpy.any(self.buffer):
605 607 self.buffer = numpy.concatenate((self.buffer,dataOut.data_spc), axis=0)
606 608 self.buffer2 = numpy.concatenate((self.buffer2,dataOut.data_cspc), axis=0)
607 609 self.buffer3 += dataOut.data_dc
608 610 else:
609 611 self.buffer = dataOut.data_spc
610 612 self.buffer2 = dataOut.data_cspc
611 613 self.buffer3 = dataOut.data_dc
612 614 #print self.index, self.fint
613 615 #print self.buffer2.shape
614 616 dataOut.flagNoData = True ## NOTE: ?? revisar LUEGO
615 617 self.__profIndex += 1
616 618 return dataOut ## NOTE: REV
617 619
618 620
619 621 #index = tini.tm_hour*12+tini.tm_min/5
620 622 '''REVISAR'''
621 623 # jspc = jspc/self.nFFTPoints/self.normFactor
622 624 # jcspc = jcspc/self.nFFTPoints/self.normFactor
623 625
624 626
625 627
626 628 tmp_spectra,tmp_cspectra = self.cleanRayleigh(dataOut,jspc,jcspc,self.factor_stdv)
627 629 dataOut.data_spc = tmp_spectra
628 630 dataOut.data_cspc = tmp_cspectra
629 631
630 632 #dataOut.data_spc,dataOut.data_cspc = self.cleanRayleigh(dataOut,jspc,jcspc,self.factor_stdv)
631 633
632 634 dataOut.data_dc = self.buffer3
633 635 dataOut.nIncohInt *= self.nIntProfiles
634 636 dataOut.utctime = self.currentTime #tiempo promediado
635 637 #print("Time: ",time.localtime(dataOut.utctime))
636 638 # dataOut.data_spc = sat_spectra
637 639 # dataOut.data_cspc = sat_cspectra
638 640 self.buffer = 0
639 641 self.buffer2 = 0
640 642 self.buffer3 = 0
641 643
642 644 return dataOut
643 645
644 646 def cleanRayleigh(self,dataOut,spectra,cspectra,factor_stdv):
645 647 #print("OP cleanRayleigh")
646 648 #import matplotlib.pyplot as plt
647 649 #for k in range(149):
648 650 #channelsProcssd = []
649 651 #channelA_ok = False
650 652 #rfunc = cspectra.copy() #self.bloques
651 653 rfunc = spectra.copy()
652 654 #rfunc = cspectra
653 655 #val_spc = spectra*0.0 #self.bloque0*0.0
654 656 #val_cspc = cspectra*0.0 #self.bloques*0.0
655 657 #in_sat_spectra = spectra.copy() #self.bloque0
656 658 #in_sat_cspectra = cspectra.copy() #self.bloques
657 659
658 660
659 661 ###ONLY FOR TEST:
660 662 raxs = math.ceil(math.sqrt(self.nPairs))
661 663 caxs = math.ceil(self.nPairs/raxs)
662 664 if self.nPairs <4:
663 665 raxs = 2
664 666 caxs = 2
665 667 #print(raxs, caxs)
666 668 fft_rev = 14 #nFFT to plot
667 669 hei_rev = ((self.heights >= 550) & (self.heights <= 551)).nonzero() #hei to plot
668 670 hei_rev = hei_rev[0]
669 671 #print(hei_rev)
670 672
671 673 #print numpy.absolute(rfunc[:,0,0,14])
672 674
673 675 gauss_fit, covariance = None, None
674 676 for ih in range(self.minAltInd,self.maxAltInd):
675 677 for ifreq in range(self.nFFTPoints):
676 678 '''
677 679 ###ONLY FOR TEST:
678 680 if ifreq ==fft_rev and ih==hei_rev: #TO VIEW A SIGNLE FREQUENCY
679 681 fig, axs = plt.subplots(raxs, caxs)
680 682 fig2, axs2 = plt.subplots(raxs, caxs)
681 683 col_ax = 0
682 684 row_ax = 0
683 685 '''
684 686 #print(self.nPairs)
685 687 for ii in range(self.nChan): #PARES DE CANALES SELF y CROSS
686 688 # if self.crosspairs[ii][1]-self.crosspairs[ii][0] > 1: # APLICAR SOLO EN PARES CONTIGUOS
687 689 # continue
688 690 # if not self.crosspairs[ii][0] in channelsProcssd:
689 691 # channelA_ok = True
690 692 #print("pair: ",self.crosspairs[ii])
691 693 '''
692 694 ###ONLY FOR TEST:
693 695 if (col_ax%caxs==0 and col_ax!=0 and self.nPairs !=1):
694 696 col_ax = 0
695 697 row_ax += 1
696 698 '''
697 699 func2clean = 10*numpy.log10(numpy.absolute(rfunc[:,ii,ifreq,ih])) #Potencia?
698 700 #print(func2clean.shape)
699 701 val = (numpy.isfinite(func2clean)==True).nonzero()
700 702
701 703 if len(val)>0: #limitador
702 704 min_val = numpy.around(numpy.amin(func2clean)-2) #> (-40)
703 705 if min_val <= -40 :
704 706 min_val = -40
705 707 max_val = numpy.around(numpy.amax(func2clean)+2) #< 200
706 708 if max_val >= 200 :
707 709 max_val = 200
708 710 #print min_val, max_val
709 711 step = 1
710 712 #print("Getting bins and the histogram")
711 713 x_dist = min_val + numpy.arange(1 + ((max_val-(min_val))/step))*step
712 714 y_dist,binstep = numpy.histogram(func2clean,bins=range(int(min_val),int(max_val+2),step))
713 715 #print(len(y_dist),len(binstep[:-1]))
714 716 #print(row_ax,col_ax, " ..")
715 717 #print(self.pairsArray[ii][0],self.pairsArray[ii][1])
716 718 mean = numpy.sum(x_dist * y_dist) / numpy.sum(y_dist)
717 719 sigma = numpy.sqrt(numpy.sum(y_dist * (x_dist - mean)**2) / numpy.sum(y_dist))
718 720 parg = [numpy.amax(y_dist),mean,sigma]
719 721
720 722 newY = None
721 723
722 724 try :
723 725 gauss_fit, covariance = curve_fit(fit_func, x_dist, y_dist,p0=parg)
724 726 mode = gauss_fit[1]
725 727 stdv = gauss_fit[2]
726 728 #print(" FIT OK",gauss_fit)
727 729 '''
728 730 ###ONLY FOR TEST:
729 731 if ifreq ==fft_rev and ih==hei_rev: #TO VIEW A SIGNLE FREQUENCY
730 732 newY = fit_func(x_dist,gauss_fit[0],gauss_fit[1],gauss_fit[2])
731 733 axs[row_ax,col_ax].plot(binstep[:-1],y_dist,color='green')
732 734 axs[row_ax,col_ax].plot(binstep[:-1],newY,color='red')
733 735 axs[row_ax,col_ax].set_title("CH "+str(self.channels[ii]))
734 736 '''
735 737 except:
736 738 mode = mean
737 739 stdv = sigma
738 740 #print("FIT FAIL")
739 741 #continue
740 742
741 743
742 744 #print(mode,stdv)
743 745 #Removing echoes greater than mode + std_factor*stdv
744 746 noval = (abs(func2clean - mode)>=(factor_stdv*stdv)).nonzero()
745 747 #noval tiene los indices que se van a remover
746 748 #print("Chan ",ii," novals: ",len(noval[0]))
747 749 if len(noval[0]) > 0: #forma de array (N,) es igual a longitud (N)
748 750 novall = ((func2clean - mode) >= (factor_stdv*stdv)).nonzero()
749 751 #print(novall)
750 752 #print(" ",self.pairsArray[ii])
751 753 #cross_pairs = self.pairsArray[ii]
752 754 #Getting coherent echoes which are removed.
753 755 # if len(novall[0]) > 0:
754 756 #
755 757 # val_spc[novall[0],cross_pairs[0],ifreq,ih] = 1
756 758 # val_spc[novall[0],cross_pairs[1],ifreq,ih] = 1
757 759 # val_cspc[novall[0],ii,ifreq,ih] = 1
758 760 #print("OUT NOVALL 1")
759 761 try:
760 762 pair = (self.channels[ii],self.channels[ii + 1])
761 763 except:
762 764 pair = (99,99)
763 765 #print("par ", pair)
764 766 if ( pair in self.crosspairs):
765 767 q = self.crosspairs.index(pair)
766 768 #print("estΓ‘ aqui: ", q, (ii,ii + 1))
767 769 new_a = numpy.delete(cspectra[:,q,ifreq,ih], noval[0])
768 770 cspectra[noval,q,ifreq,ih] = numpy.mean(new_a) #mean CrossSpectra
769 771
770 772 #if channelA_ok:
771 773 #chA = self.channels.index(cross_pairs[0])
772 774 new_b = numpy.delete(spectra[:,ii,ifreq,ih], noval[0])
773 775 spectra[noval,ii,ifreq,ih] = numpy.mean(new_b) #mean Spectra Pair A
774 776 #channelA_ok = False
775 777
776 778 # chB = self.channels.index(cross_pairs[1])
777 779 # new_c = numpy.delete(spectra[:,chB,ifreq,ih], noval[0])
778 780 # spectra[noval,chB,ifreq,ih] = numpy.mean(new_c) #mean Spectra Pair B
779 781 #
780 782 # channelsProcssd.append(self.crosspairs[ii][0]) # save channel A
781 783 # channelsProcssd.append(self.crosspairs[ii][1]) # save channel B
782 784 '''
783 785 ###ONLY FOR TEST:
784 786 if ifreq ==fft_rev and ih==hei_rev: #TO VIEW A SIGNLE FREQUENCY
785 787 func2clean = 10*numpy.log10(numpy.absolute(spectra[:,ii,ifreq,ih]))
786 788 y_dist,binstep = numpy.histogram(func2clean,bins=range(int(min_val),int(max_val+2),step))
787 789 axs2[row_ax,col_ax].plot(binstep[:-1],newY,color='red')
788 790 axs2[row_ax,col_ax].plot(binstep[:-1],y_dist,color='green')
789 791 axs2[row_ax,col_ax].set_title("CH "+str(self.channels[ii]))
790 792 '''
791 793 '''
792 794 ###ONLY FOR TEST:
793 795 col_ax += 1 #contador de ploteo columnas
794 796 ##print(col_ax)
795 797 ###ONLY FOR TEST:
796 798 if ifreq ==fft_rev and ih==hei_rev: #TO VIEW A SIGNLE FREQUENCY
797 799 title = str(dataOut.datatime)+" nFFT: "+str(ifreq)+" Alt: "+str(self.heights[ih])+ " km"
798 800 title2 = str(dataOut.datatime)+" nFFT: "+str(ifreq)+" Alt: "+str(self.heights[ih])+ " km CLEANED"
799 801 fig.suptitle(title)
800 802 fig2.suptitle(title2)
801 803 plt.show()
802 804 '''
803 805 ##################################################################################################
804 806
805 807 #print("Getting average of the spectra and cross-spectra from incoherent echoes.")
806 808 out_spectra = numpy.zeros([self.nChan,self.nFFTPoints,self.nHeights], dtype=float) #+numpy.nan
807 809 out_cspectra = numpy.zeros([self.nPairs,self.nFFTPoints,self.nHeights], dtype=complex) #+numpy.nan
808 810 for ih in range(self.nHeights):
809 811 for ifreq in range(self.nFFTPoints):
810 812 for ich in range(self.nChan):
811 813 tmp = spectra[:,ich,ifreq,ih]
812 814 valid = (numpy.isfinite(tmp[:])==True).nonzero()
813 815
814 816 if len(valid[0]) >0 :
815 817 out_spectra[ich,ifreq,ih] = numpy.nansum(tmp)#/len(valid[0])
816 818
817 819 for icr in range(self.nPairs):
818 820 tmp = numpy.squeeze(cspectra[:,icr,ifreq,ih])
819 821 valid = (numpy.isfinite(tmp)==True).nonzero()
820 822 if len(valid[0]) > 0:
821 823 out_cspectra[icr,ifreq,ih] = numpy.nansum(tmp)#/len(valid[0])
822 824
823 825 return out_spectra, out_cspectra
824 826
825 827 def REM_ISOLATED_POINTS(self,array,rth):
826 828 # import matplotlib.pyplot as plt
827 829 if rth == None :
828 830 rth = 4
829 831 #print("REM ISO")
830 832 num_prof = len(array[0,:,0])
831 833 num_hei = len(array[0,0,:])
832 834 n2d = len(array[:,0,0])
833 835
834 836 for ii in range(n2d) :
835 837 #print ii,n2d
836 838 tmp = array[ii,:,:]
837 839 #print tmp.shape, array[ii,101,:],array[ii,102,:]
838 840
839 841 # fig = plt.figure(figsize=(6,5))
840 842 # left, bottom, width, height = 0.1, 0.1, 0.8, 0.8
841 843 # ax = fig.add_axes([left, bottom, width, height])
842 844 # x = range(num_prof)
843 845 # y = range(num_hei)
844 846 # cp = ax.contour(y,x,tmp)
845 847 # ax.clabel(cp, inline=True,fontsize=10)
846 848 # plt.show()
847 849
848 850 #indxs = WHERE(FINITE(tmp) AND tmp GT 0,cindxs)
849 851 tmp = numpy.reshape(tmp,num_prof*num_hei)
850 852 indxs1 = (numpy.isfinite(tmp)==True).nonzero()
851 853 indxs2 = (tmp > 0).nonzero()
852 854
853 855 indxs1 = (indxs1[0])
854 856 indxs2 = indxs2[0]
855 857 #indxs1 = numpy.array(indxs1[0])
856 858 #indxs2 = numpy.array(indxs2[0])
857 859 indxs = None
858 860 #print indxs1 , indxs2
859 861 for iv in range(len(indxs2)):
860 862 indv = numpy.array((indxs1 == indxs2[iv]).nonzero())
861 863 #print len(indxs2), indv
862 864 if len(indv[0]) > 0 :
863 865 indxs = numpy.concatenate((indxs,indxs2[iv]), axis=None)
864 866 # print indxs
865 867 indxs = indxs[1:]
866 868 #print(indxs, len(indxs))
867 869 if len(indxs) < 4 :
868 870 array[ii,:,:] = 0.
869 871 return
870 872
871 873 xpos = numpy.mod(indxs ,num_hei)
872 874 ypos = (indxs / num_hei)
873 875 sx = numpy.argsort(xpos) # Ordering respect to "x" (time)
874 876 #print sx
875 877 xpos = xpos[sx]
876 878 ypos = ypos[sx]
877 879
878 880 # *********************************** Cleaning isolated points **********************************
879 881 ic = 0
880 882 while True :
881 883 r = numpy.sqrt(list(numpy.power((xpos[ic]-xpos),2)+ numpy.power((ypos[ic]-ypos),2)))
882 884 #no_coh = WHERE(FINITE(r) AND (r LE rth),cno_coh)
883 885 #plt.plot(r)
884 886 #plt.show()
885 887 no_coh1 = (numpy.isfinite(r)==True).nonzero()
886 888 no_coh2 = (r <= rth).nonzero()
887 889 #print r, no_coh1, no_coh2
888 890 no_coh1 = numpy.array(no_coh1[0])
889 891 no_coh2 = numpy.array(no_coh2[0])
890 892 no_coh = None
891 893 #print valid1 , valid2
892 894 for iv in range(len(no_coh2)):
893 895 indv = numpy.array((no_coh1 == no_coh2[iv]).nonzero())
894 896 if len(indv[0]) > 0 :
895 897 no_coh = numpy.concatenate((no_coh,no_coh2[iv]), axis=None)
896 898 no_coh = no_coh[1:]
897 899 #print len(no_coh), no_coh
898 900 if len(no_coh) < 4 :
899 901 #print xpos[ic], ypos[ic], ic
900 902 # plt.plot(r)
901 903 # plt.show()
902 904 xpos[ic] = numpy.nan
903 905 ypos[ic] = numpy.nan
904 906
905 907 ic = ic + 1
906 908 if (ic == len(indxs)) :
907 909 break
908 910 #print( xpos, ypos)
909 911
910 912 indxs = (numpy.isfinite(list(xpos))==True).nonzero()
911 913 #print indxs[0]
912 914 if len(indxs[0]) < 4 :
913 915 array[ii,:,:] = 0.
914 916 return
915 917
916 918 xpos = xpos[indxs[0]]
917 919 ypos = ypos[indxs[0]]
918 920 for i in range(0,len(ypos)):
919 921 ypos[i]=int(ypos[i])
920 922 junk = tmp
921 923 tmp = junk*0.0
922 924
923 925 tmp[list(xpos + (ypos*num_hei))] = junk[list(xpos + (ypos*num_hei))]
924 926 array[ii,:,:] = numpy.reshape(tmp,(num_prof,num_hei))
925 927
926 928 #print array.shape
927 929 #tmp = numpy.reshape(tmp,(num_prof,num_hei))
928 930 #print tmp.shape
929 931
930 932 # fig = plt.figure(figsize=(6,5))
931 933 # left, bottom, width, height = 0.1, 0.1, 0.8, 0.8
932 934 # ax = fig.add_axes([left, bottom, width, height])
933 935 # x = range(num_prof)
934 936 # y = range(num_hei)
935 937 # cp = ax.contour(y,x,array[ii,:,:])
936 938 # ax.clabel(cp, inline=True,fontsize=10)
937 939 # plt.show()
938 940 return array
939 941
940 942
941 943 class IntegrationFaradaySpectra(Operation):
942 944
943 945 __profIndex = 0
944 946 __withOverapping = False
945 947
946 948 __byTime = False
947 949 __initime = None
948 950 __lastdatatime = None
949 951 __integrationtime = None
950 952
951 953 __buffer_spc = None
952 954 __buffer_cspc = None
953 955 __buffer_dc = None
954 956
955 957 __dataReady = False
956 958
957 959 __timeInterval = None
958 960
959 961 n = None
960 962
961 963 def __init__(self):
962 964
963 965 Operation.__init__(self)
964 966
965 967 def setup(self, dataOut,n=None, timeInterval=None, overlapping=False, DPL=None):
966 968 """
967 969 Set the parameters of the integration class.
968 970
969 971 Inputs:
970 972
971 973 n : Number of coherent integrations
972 974 timeInterval : Time of integration. If the parameter "n" is selected this one does not work
973 975 overlapping :
974 976
975 977 """
976 978
977 979 self.__initime = None
978 980 self.__lastdatatime = 0
979 981
980 982 self.__buffer_spc = []
981 983 self.__buffer_cspc = []
982 984 self.__buffer_dc = 0
983 985
984 986 self.__profIndex = 0
985 987 self.__dataReady = False
986 988 self.__byTime = False
987 989
988 990 #self.ByLags = dataOut.ByLags ###REDEFINIR
989 991 self.ByLags = False
990 992
991 993 if DPL != None:
992 994 self.DPL=DPL
993 995 else:
994 996 #self.DPL=dataOut.DPL ###REDEFINIR
995 997 self.DPL=0
996 998
997 999 if n is None and timeInterval is None:
998 1000 raise ValueError("n or timeInterval should be specified ...")
999 1001
1000 1002 if n is not None:
1001 1003 self.n = int(n)
1002 1004 else:
1003 1005
1004 1006 self.__integrationtime = int(timeInterval)
1005 1007 self.n = None
1006 1008 self.__byTime = True
1007 1009
1008 1010 def putData(self, data_spc, data_cspc, data_dc):
1009 1011 """
1010 1012 Add a profile to the __buffer_spc and increase in one the __profileIndex
1011 1013
1012 1014 """
1013 1015
1014 1016 self.__buffer_spc.append(data_spc)
1015 1017
1016 1018 if data_cspc is None:
1017 1019 self.__buffer_cspc = None
1018 1020 else:
1019 1021 self.__buffer_cspc.append(data_cspc)
1020 1022
1021 1023 if data_dc is None:
1022 1024 self.__buffer_dc = None
1023 1025 else:
1024 1026 self.__buffer_dc += data_dc
1025 1027
1026 1028 self.__profIndex += 1
1027 1029
1028 1030 return
1029 1031
1030 1032 def hildebrand_sekhon_Integration(self,data,navg):
1031 1033
1032 1034 sortdata = numpy.sort(data, axis=None)
1033 1035 sortID=data.argsort()
1034 1036 lenOfData = len(sortdata)
1035 1037 nums_min = lenOfData*0.75
1036 1038 if nums_min <= 5:
1037 1039 nums_min = 5
1038 1040 sump = 0.
1039 1041 sumq = 0.
1040 1042 j = 0
1041 1043 cont = 1
1042 1044 while((cont == 1)and(j < lenOfData)):
1043 1045 sump += sortdata[j]
1044 1046 sumq += sortdata[j]**2
1045 1047 if j > nums_min:
1046 1048 rtest = float(j)/(j-1) + 1.0/navg
1047 1049 if ((sumq*j) > (rtest*sump**2)):
1048 1050 j = j - 1
1049 1051 sump = sump - sortdata[j]
1050 1052 sumq = sumq - sortdata[j]**2
1051 1053 cont = 0
1052 1054 j += 1
1053 1055 #lnoise = sump / j
1054 1056
1055 1057 return j,sortID
1056 1058
1057 1059 def pushData(self):
1058 1060 """
1059 1061 Return the sum of the last profiles and the profiles used in the sum.
1060 1062
1061 1063 Affected:
1062 1064
1063 1065 self.__profileIndex
1064 1066
1065 1067 """
1066 1068 bufferH=None
1067 1069 buffer=None
1068 1070 buffer1=None
1069 1071 buffer_cspc=None
1070 1072 self.__buffer_spc=numpy.array(self.__buffer_spc)
1071 1073 self.__buffer_cspc=numpy.array(self.__buffer_cspc)
1072 1074 freq_dc = int(self.__buffer_spc.shape[2] / 2)
1073 1075 #print("FREQ_DC",freq_dc,self.__buffer_spc.shape,self.nHeights)
1074 1076 for k in range(7,self.nHeights):
1075 1077 buffer_cspc=numpy.copy(self.__buffer_cspc[:,:,:,k])
1076 1078 outliers_IDs_cspc=[]
1077 1079 cspc_outliers_exist=False
1078 #print("AQUIII")
1079 1080 for i in range(self.nChannels):#dataOut.nChannels):
1080 1081
1081 1082 buffer1=numpy.copy(self.__buffer_spc[:,i,:,k])
1082 1083 indexes=[]
1083 1084 #sortIDs=[]
1084 1085 outliers_IDs=[]
1085 1086
1086 1087 for j in range(self.nProfiles):
1087 1088 # if i==0 and j==freq_dc: #NOT CONSIDERING DC PROFILE AT CHANNEL 0
1088 1089 # continue
1089 1090 # if i==1 and j==0: #NOT CONSIDERING DC PROFILE AT CHANNEL 1
1090 1091 # continue
1091 1092 buffer=buffer1[:,j]
1092 1093 index,sortID=self.hildebrand_sekhon_Integration(buffer,1)
1093 1094
1094 1095 indexes.append(index)
1095 1096 #sortIDs.append(sortID)
1096 1097 outliers_IDs=numpy.append(outliers_IDs,sortID[index:])
1097 1098
1098 1099 outliers_IDs=numpy.array(outliers_IDs)
1099 1100 outliers_IDs=outliers_IDs.ravel()
1100 1101 outliers_IDs=numpy.unique(outliers_IDs)
1101 1102 outliers_IDs=outliers_IDs.astype(numpy.dtype('int64'))
1102 1103 indexes=numpy.array(indexes)
1103 1104 indexmin=numpy.min(indexes)
1104 1105
1105 1106 if indexmin != buffer1.shape[0]:
1106 1107 cspc_outliers_exist=True
1107 1108 ###sortdata=numpy.sort(buffer1,axis=0)
1108 1109 ###avg2=numpy.mean(sortdata[:indexmin,:],axis=0)
1109 1110 lt=outliers_IDs
1110 1111 avg=numpy.mean(buffer1[[t for t in range(buffer1.shape[0]) if t not in lt],:],axis=0)
1111 1112
1112 1113 for p in list(outliers_IDs):
1113 1114 buffer1[p,:]=avg
1114 1115
1115 1116 self.__buffer_spc[:,i,:,k]=numpy.copy(buffer1)
1116 1117 ###cspc IDs
1117 1118 #indexmin_cspc+=indexmin_cspc
1118 1119 outliers_IDs_cspc=numpy.append(outliers_IDs_cspc,outliers_IDs)
1119 1120
1120 1121 #if not breakFlag:
1121 1122 outliers_IDs_cspc=outliers_IDs_cspc.astype(numpy.dtype('int64'))
1122 1123 if cspc_outliers_exist:
1123 1124 #sortdata=numpy.sort(buffer_cspc,axis=0)
1124 1125 #avg=numpy.mean(sortdata[:indexmin_cpsc,:],axis=0)
1125 1126 lt=outliers_IDs_cspc
1126 1127
1127 1128 avg=numpy.mean(buffer_cspc[[t for t in range(buffer_cspc.shape[0]) if t not in lt],:],axis=0)
1128 1129 for p in list(outliers_IDs_cspc):
1129 1130 buffer_cspc[p,:]=avg
1130 1131
1131 1132 self.__buffer_cspc[:,:,:,k]=numpy.copy(buffer_cspc)
1132 1133 #else:
1133 1134 #break
1134 1135
1135 1136
1136 1137
1137 1138
1138 1139 buffer=None
1139 1140 bufferH=None
1140 1141 buffer1=None
1141 1142 buffer_cspc=None
1142 1143
1143 1144 #print("cpsc",self.__buffer_cspc[:,0,0,0,0])
1144 1145 #print(self.__profIndex)
1145 1146 #exit()
1146 1147
1147 1148 buffer=None
1148 1149 #print(self.__buffer_spc[:,1,3,20,0])
1149 1150 #print(self.__buffer_spc[:,1,5,37,0])
1150 1151 data_spc = numpy.sum(self.__buffer_spc,axis=0)
1151 1152 data_cspc = numpy.sum(self.__buffer_cspc,axis=0)
1152 1153
1153 1154 #print(numpy.shape(data_spc))
1154 1155 #data_spc[1,4,20,0]=numpy.nan
1155 1156
1156 1157 #data_cspc = self.__buffer_cspc
1157 1158 data_dc = self.__buffer_dc
1158 1159 n = self.__profIndex
1159 1160
1160 1161 self.__buffer_spc = []
1161 1162 self.__buffer_cspc = []
1162 1163 self.__buffer_dc = 0
1163 1164 self.__profIndex = 0
1164 1165
1165 1166 return data_spc, data_cspc, data_dc, n
1166 1167
1167 1168 def byProfiles(self, *args):
1168 1169
1169 1170 self.__dataReady = False
1170 1171 avgdata_spc = None
1171 1172 avgdata_cspc = None
1172 1173 avgdata_dc = None
1173 1174
1174 1175 self.putData(*args)
1175 1176
1176 1177 if self.__profIndex == self.n:
1177 1178
1178 1179 avgdata_spc, avgdata_cspc, avgdata_dc, n = self.pushData()
1179 1180 self.n = n
1180 1181 self.__dataReady = True
1181 1182
1182 1183 return avgdata_spc, avgdata_cspc, avgdata_dc
1183 1184
1184 1185 def byTime(self, datatime, *args):
1185 1186
1186 1187 self.__dataReady = False
1187 1188 avgdata_spc = None
1188 1189 avgdata_cspc = None
1189 1190 avgdata_dc = None
1190 1191
1191 1192 self.putData(*args)
1192 1193
1193 1194 if (datatime - self.__initime) >= self.__integrationtime:
1194 1195 avgdata_spc, avgdata_cspc, avgdata_dc, n = self.pushData()
1195 1196 self.n = n
1196 1197 self.__dataReady = True
1197 1198
1198 1199 return avgdata_spc, avgdata_cspc, avgdata_dc
1199 1200
1200 1201 def integrate(self, datatime, *args):
1201 1202
1202 1203 if self.__profIndex == 0:
1203 1204 self.__initime = datatime
1204 1205
1205 1206 if self.__byTime:
1206 1207 avgdata_spc, avgdata_cspc, avgdata_dc = self.byTime(
1207 1208 datatime, *args)
1208 1209 else:
1209 1210 avgdata_spc, avgdata_cspc, avgdata_dc = self.byProfiles(*args)
1210 1211
1211 1212 if not self.__dataReady:
1212 1213 return None, None, None, None
1213 1214
1214 1215 return self.__initime, avgdata_spc, avgdata_cspc, avgdata_dc
1215 1216
1216 1217 def run(self, dataOut, n=None, DPL = None,timeInterval=None, overlapping=False):
1217 1218 if n == 1:
1218 1219 return dataOut
1219 1220
1220 1221 dataOut.flagNoData = True
1221 1222
1222 1223 if not self.isConfig:
1223 1224 self.setup(dataOut, n, timeInterval, overlapping,DPL )
1224 1225 self.isConfig = True
1225 1226
1226 1227 if not self.ByLags:
1227 1228 self.nProfiles=dataOut.nProfiles
1228 1229 self.nChannels=dataOut.nChannels
1229 1230 self.nHeights=dataOut.nHeights
1230 1231 avgdatatime, avgdata_spc, avgdata_cspc, avgdata_dc = self.integrate(dataOut.utctime,
1231 1232 dataOut.data_spc,
1232 1233 dataOut.data_cspc,
1233 1234 dataOut.data_dc)
1234 1235 else:
1235 1236 self.nProfiles=dataOut.nProfiles
1236 1237 self.nChannels=dataOut.nChannels
1237 1238 self.nHeights=dataOut.nHeights
1238 1239 avgdatatime, avgdata_spc, avgdata_cspc, avgdata_dc = self.integrate(dataOut.utctime,
1239 1240 dataOut.dataLag_spc,
1240 1241 dataOut.dataLag_cspc,
1241 1242 dataOut.dataLag_dc)
1242 1243
1243 1244 if self.__dataReady:
1244 1245
1245 1246 if not self.ByLags:
1246 1247
1247 1248 dataOut.data_spc = numpy.squeeze(avgdata_spc)
1248 1249 dataOut.data_cspc = numpy.squeeze(avgdata_cspc)
1249 1250 dataOut.data_dc = avgdata_dc
1250 1251 else:
1251 1252 dataOut.dataLag_spc = avgdata_spc
1252 1253 dataOut.dataLag_cspc = avgdata_cspc
1253 1254 dataOut.dataLag_dc = avgdata_dc
1254 1255
1255 1256 dataOut.data_spc=dataOut.dataLag_spc[:,:,:,dataOut.LagPlot]
1256 1257 dataOut.data_cspc=dataOut.dataLag_cspc[:,:,:,dataOut.LagPlot]
1257 1258 dataOut.data_dc=dataOut.dataLag_dc[:,:,dataOut.LagPlot]
1258 1259
1259 1260
1260 1261 dataOut.nIncohInt *= self.n
1261 1262 dataOut.utctime = avgdatatime
1262 1263 dataOut.flagNoData = False
1263 1264
1264 1265 return dataOut
1265 1266
1266 1267 class removeInterference(Operation):
1267 1268
1268 1269 def removeInterference2(self):
1269 1270
1270 1271 cspc = self.dataOut.data_cspc
1271 1272 spc = self.dataOut.data_spc
1272 1273 Heights = numpy.arange(cspc.shape[2])
1273 1274 realCspc = numpy.abs(cspc)
1274 1275
1275 1276 for i in range(cspc.shape[0]):
1276 1277 LinePower= numpy.sum(realCspc[i], axis=0)
1277 1278 Threshold = numpy.amax(LinePower)-numpy.sort(LinePower)[len(Heights)-int(len(Heights)*0.1)]
1278 1279 SelectedHeights = Heights[ numpy.where( LinePower < Threshold ) ]
1279 1280 InterferenceSum = numpy.sum( realCspc[i,:,SelectedHeights], axis=0 )
1280 1281 InterferenceThresholdMin = numpy.sort(InterferenceSum)[int(len(InterferenceSum)*0.98)]
1281 1282 InterferenceThresholdMax = numpy.sort(InterferenceSum)[int(len(InterferenceSum)*0.99)]
1282 1283
1283 1284
1284 1285 InterferenceRange = numpy.where( ([InterferenceSum > InterferenceThresholdMin]))# , InterferenceSum < InterferenceThresholdMax]) )
1285 1286 #InterferenceRange = numpy.where( ([InterferenceRange < InterferenceThresholdMax]))
1286 1287 if len(InterferenceRange)<int(cspc.shape[1]*0.3):
1287 1288 cspc[i,InterferenceRange,:] = numpy.NaN
1288 1289
1289 1290 self.dataOut.data_cspc = cspc
1290 1291
1291 1292 def removeInterference(self, interf = 2, hei_interf = None, nhei_interf = None, offhei_interf = None):
1292 1293
1293 1294 jspectra = self.dataOut.data_spc
1294 1295 jcspectra = self.dataOut.data_cspc
1295 1296 jnoise = self.dataOut.getNoise()
1296 1297 num_incoh = self.dataOut.nIncohInt
1297 1298
1298 1299 num_channel = jspectra.shape[0]
1299 1300 num_prof = jspectra.shape[1]
1300 1301 num_hei = jspectra.shape[2]
1301 1302
1302 1303 # hei_interf
1303 1304 if hei_interf is None:
1304 1305 count_hei = int(num_hei / 2)
1305 1306 hei_interf = numpy.asmatrix(list(range(count_hei))) + num_hei - count_hei
1306 1307 hei_interf = numpy.asarray(hei_interf)[0]
1307 1308 # nhei_interf
1308 1309 if (nhei_interf == None):
1309 1310 nhei_interf = 5
1310 1311 if (nhei_interf < 1):
1311 1312 nhei_interf = 1
1312 1313 if (nhei_interf > count_hei):
1313 1314 nhei_interf = count_hei
1314 1315 if (offhei_interf == None):
1315 1316 offhei_interf = 0
1316 1317
1317 1318 ind_hei = list(range(num_hei))
1318 1319 # mask_prof = numpy.asarray(range(num_prof - 2)) + 1
1319 1320 # mask_prof[range(num_prof/2 - 1,len(mask_prof))] += 1
1320 1321 mask_prof = numpy.asarray(list(range(num_prof)))
1321 1322 num_mask_prof = mask_prof.size
1322 1323 comp_mask_prof = [0, num_prof / 2]
1323 1324
1324 1325 # noise_exist: Determina si la variable jnoise ha sido definida y contiene la informacion del ruido de cada canal
1325 1326 if (jnoise.size < num_channel or numpy.isnan(jnoise).any()):
1326 1327 jnoise = numpy.nan
1327 1328 noise_exist = jnoise[0] < numpy.Inf
1328 1329
1329 1330 # Subrutina de Remocion de la Interferencia
1330 1331 for ich in range(num_channel):
1331 1332 # Se ordena los espectros segun su potencia (menor a mayor)
1332 1333 power = jspectra[ich, mask_prof, :]
1333 1334 power = power[:, hei_interf]
1334 1335 power = power.sum(axis=0)
1335 1336 psort = power.ravel().argsort()
1336 1337
1337 1338 # Se estima la interferencia promedio en los Espectros de Potencia empleando
1338 1339 junkspc_interf = jspectra[ich, :, hei_interf[psort[list(range(
1339 1340 offhei_interf, nhei_interf + offhei_interf))]]]
1340 1341
1341 1342 if noise_exist:
1342 1343 # tmp_noise = jnoise[ich] / num_prof
1343 1344 tmp_noise = jnoise[ich]
1344 1345 junkspc_interf = junkspc_interf - tmp_noise
1345 1346 #junkspc_interf[:,comp_mask_prof] = 0
1346 1347
1347 1348 jspc_interf = junkspc_interf.sum(axis=0) / nhei_interf
1348 1349 jspc_interf = jspc_interf.transpose()
1349 1350 # Calculando el espectro de interferencia promedio
1350 1351 noiseid = numpy.where(
1351 1352 jspc_interf <= tmp_noise / numpy.sqrt(num_incoh))
1352 1353 noiseid = noiseid[0]
1353 1354 cnoiseid = noiseid.size
1354 1355 interfid = numpy.where(
1355 1356 jspc_interf > tmp_noise / numpy.sqrt(num_incoh))
1356 1357 interfid = interfid[0]
1357 1358 cinterfid = interfid.size
1358 1359
1359 1360 if (cnoiseid > 0):
1360 1361 jspc_interf[noiseid] = 0
1361 1362
1362 1363 # Expandiendo los perfiles a limpiar
1363 1364 if (cinterfid > 0):
1364 1365 new_interfid = (
1365 1366 numpy.r_[interfid - 1, interfid, interfid + 1] + num_prof) % num_prof
1366 1367 new_interfid = numpy.asarray(new_interfid)
1367 1368 new_interfid = {x for x in new_interfid}
1368 1369 new_interfid = numpy.array(list(new_interfid))
1369 1370 new_cinterfid = new_interfid.size
1370 1371 else:
1371 1372 new_cinterfid = 0
1372 1373
1373 1374 for ip in range(new_cinterfid):
1374 1375 ind = junkspc_interf[:, new_interfid[ip]].ravel().argsort()
1375 1376 jspc_interf[new_interfid[ip]
1376 1377 ] = junkspc_interf[ind[nhei_interf // 2], new_interfid[ip]]
1377 1378
1378 1379 jspectra[ich, :, ind_hei] = jspectra[ich, :,
1379 1380 ind_hei] - jspc_interf # Corregir indices
1380 1381
1381 1382 # Removiendo la interferencia del punto de mayor interferencia
1382 1383 ListAux = jspc_interf[mask_prof].tolist()
1383 1384 maxid = ListAux.index(max(ListAux))
1384 1385
1385 1386 if cinterfid > 0:
1386 1387 for ip in range(cinterfid * (interf == 2) - 1):
1387 1388 ind = (jspectra[ich, interfid[ip], :] < tmp_noise *
1388 1389 (1 + 1 / numpy.sqrt(num_incoh))).nonzero()
1389 1390 cind = len(ind)
1390 1391
1391 1392 if (cind > 0):
1392 1393 jspectra[ich, interfid[ip], ind] = tmp_noise * \
1393 1394 (1 + (numpy.random.uniform(cind) - 0.5) /
1394 1395 numpy.sqrt(num_incoh))
1395 1396
1396 1397 ind = numpy.array([-2, -1, 1, 2])
1397 1398 xx = numpy.zeros([4, 4])
1398 1399
1399 1400 for id1 in range(4):
1400 1401 xx[:, id1] = ind[id1]**numpy.asarray(list(range(4)))
1401 1402
1402 1403 xx_inv = numpy.linalg.inv(xx)
1403 1404 xx = xx_inv[:, 0]
1404 1405 ind = (ind + maxid + num_mask_prof) % num_mask_prof
1405 1406 yy = jspectra[ich, mask_prof[ind], :]
1406 1407 jspectra[ich, mask_prof[maxid], :] = numpy.dot(
1407 1408 yy.transpose(), xx)
1408 1409
1409 1410 indAux = (jspectra[ich, :, :] < tmp_noise *
1410 1411 (1 - 1 / numpy.sqrt(num_incoh))).nonzero()
1411 1412 jspectra[ich, indAux[0], indAux[1]] = tmp_noise * \
1412 1413 (1 - 1 / numpy.sqrt(num_incoh))
1413 1414
1414 1415 # Remocion de Interferencia en el Cross Spectra
1415 1416 if jcspectra is None:
1416 1417 return jspectra, jcspectra
1417 1418 num_pairs = int(jcspectra.size / (num_prof * num_hei))
1418 1419 jcspectra = jcspectra.reshape(num_pairs, num_prof, num_hei)
1419 1420
1420 1421 for ip in range(num_pairs):
1421 1422
1422 1423 #-------------------------------------------
1423 1424
1424 1425 cspower = numpy.abs(jcspectra[ip, mask_prof, :])
1425 1426 cspower = cspower[:, hei_interf]
1426 1427 cspower = cspower.sum(axis=0)
1427 1428
1428 1429 cspsort = cspower.ravel().argsort()
1429 1430 junkcspc_interf = jcspectra[ip, :, hei_interf[cspsort[list(range(
1430 1431 offhei_interf, nhei_interf + offhei_interf))]]]
1431 1432 junkcspc_interf = junkcspc_interf.transpose()
1432 1433 jcspc_interf = junkcspc_interf.sum(axis=1) / nhei_interf
1433 1434
1434 1435 ind = numpy.abs(jcspc_interf[mask_prof]).ravel().argsort()
1435 1436
1436 1437 median_real = int(numpy.median(numpy.real(
1437 1438 junkcspc_interf[mask_prof[ind[list(range(3 * num_prof // 4))]], :])))
1438 1439 median_imag = int(numpy.median(numpy.imag(
1439 1440 junkcspc_interf[mask_prof[ind[list(range(3 * num_prof // 4))]], :])))
1440 1441 comp_mask_prof = [int(e) for e in comp_mask_prof]
1441 1442 junkcspc_interf[comp_mask_prof, :] = numpy.complex(
1442 1443 median_real, median_imag)
1443 1444
1444 1445 for iprof in range(num_prof):
1445 1446 ind = numpy.abs(junkcspc_interf[iprof, :]).ravel().argsort()
1446 1447 jcspc_interf[iprof] = junkcspc_interf[iprof, ind[nhei_interf // 2]]
1447 1448
1448 1449 # Removiendo la Interferencia
1449 1450 jcspectra[ip, :, ind_hei] = jcspectra[ip,
1450 1451 :, ind_hei] - jcspc_interf
1451 1452
1452 1453 ListAux = numpy.abs(jcspc_interf[mask_prof]).tolist()
1453 1454 maxid = ListAux.index(max(ListAux))
1454 1455
1455 1456 ind = numpy.array([-2, -1, 1, 2])
1456 1457 xx = numpy.zeros([4, 4])
1457 1458
1458 1459 for id1 in range(4):
1459 1460 xx[:, id1] = ind[id1]**numpy.asarray(list(range(4)))
1460 1461
1461 1462 xx_inv = numpy.linalg.inv(xx)
1462 1463 xx = xx_inv[:, 0]
1463 1464
1464 1465 ind = (ind + maxid + num_mask_prof) % num_mask_prof
1465 1466 yy = jcspectra[ip, mask_prof[ind], :]
1466 1467 jcspectra[ip, mask_prof[maxid], :] = numpy.dot(yy.transpose(), xx)
1467 1468
1468 1469 # Guardar Resultados
1469 1470 self.dataOut.data_spc = jspectra
1470 1471 self.dataOut.data_cspc = jcspectra
1471 1472
1472 1473 return 1
1473 1474
1474 1475 def run(self, dataOut, interf = 2,hei_interf = None, nhei_interf = None, offhei_interf = None, mode=1):
1475 1476
1476 1477 self.dataOut = dataOut
1477 1478
1478 1479 if mode == 1:
1479 1480 self.removeInterference(interf = 2,hei_interf = None, nhei_interf = None, offhei_interf = None)
1480 1481 elif mode == 2:
1481 1482 self.removeInterference2()
1482 1483
1483 1484 return self.dataOut
1484 1485
1485 1486
1486 1487 class IncohInt(Operation):
1487 1488
1488 1489 __profIndex = 0
1489 1490 __withOverapping = False
1490 1491
1491 1492 __byTime = False
1492 1493 __initime = None
1493 1494 __lastdatatime = None
1494 1495 __integrationtime = None
1495 1496
1496 1497 __buffer_spc = None
1497 1498 __buffer_cspc = None
1498 1499 __buffer_dc = None
1499 1500
1500 1501 __dataReady = False
1501 1502
1502 1503 __timeInterval = None
1503 1504
1504 1505 n = None
1505 1506
1506 1507 def __init__(self):
1507 1508
1508 1509 Operation.__init__(self)
1509 1510
1510 1511 def setup(self, n=None, timeInterval=None, overlapping=False):
1511 1512 """
1512 1513 Set the parameters of the integration class.
1513 1514
1514 1515 Inputs:
1515 1516
1516 1517 n : Number of coherent integrations
1517 1518 timeInterval : Time of integration. If the parameter "n" is selected this one does not work
1518 1519 overlapping :
1519 1520
1520 1521 """
1521 1522
1522 1523 self.__initime = None
1523 1524 self.__lastdatatime = 0
1524 1525
1525 1526 self.__buffer_spc = 0
1526 1527 self.__buffer_cspc = 0
1527 1528 self.__buffer_dc = 0
1528 1529
1529 1530 self.__profIndex = 0
1530 1531 self.__dataReady = False
1531 1532 self.__byTime = False
1532 1533
1533 1534 if n is None and timeInterval is None:
1534 1535 raise ValueError("n or timeInterval should be specified ...")
1535 1536
1536 1537 if n is not None:
1537 1538 self.n = int(n)
1538 1539 else:
1539 1540
1540 1541 self.__integrationtime = int(timeInterval)
1541 1542 self.n = None
1542 1543 self.__byTime = True
1543 1544
1544 1545 def putData(self, data_spc, data_cspc, data_dc):
1545 1546 """
1546 1547 Add a profile to the __buffer_spc and increase in one the __profileIndex
1547 1548
1548 1549 """
1549 1550
1550 1551 self.__buffer_spc += data_spc
1551 1552
1552 1553 if data_cspc is None:
1553 1554 self.__buffer_cspc = None
1554 1555 else:
1555 1556 self.__buffer_cspc += data_cspc
1556 1557
1557 1558 if data_dc is None:
1558 1559 self.__buffer_dc = None
1559 1560 else:
1560 1561 self.__buffer_dc += data_dc
1561 1562
1562 1563 self.__profIndex += 1
1563 1564
1564 1565 return
1565 1566
1566 1567 def pushData(self):
1567 1568 """
1568 1569 Return the sum of the last profiles and the profiles used in the sum.
1569 1570
1570 1571 Affected:
1571 1572
1572 1573 self.__profileIndex
1573 1574
1574 1575 """
1575 1576
1576 1577 data_spc = self.__buffer_spc
1577 1578 data_cspc = self.__buffer_cspc
1578 1579 data_dc = self.__buffer_dc
1579 1580 n = self.__profIndex
1580 1581
1581 1582 self.__buffer_spc = 0
1582 1583 self.__buffer_cspc = 0
1583 1584 self.__buffer_dc = 0
1584 1585 self.__profIndex = 0
1585 1586
1586 1587 return data_spc, data_cspc, data_dc, n
1587 1588
1588 1589 def byProfiles(self, *args):
1589 1590
1590 1591 self.__dataReady = False
1591 1592 avgdata_spc = None
1592 1593 avgdata_cspc = None
1593 1594 avgdata_dc = None
1594 1595
1595 1596 self.putData(*args)
1596 1597
1597 1598 if self.__profIndex == self.n:
1598 1599
1599 1600 avgdata_spc, avgdata_cspc, avgdata_dc, n = self.pushData()
1600 1601 self.n = n
1601 1602 self.__dataReady = True
1602 1603
1603 1604 return avgdata_spc, avgdata_cspc, avgdata_dc
1604 1605
1605 1606 def byTime(self, datatime, *args):
1606 1607
1607 1608 self.__dataReady = False
1608 1609 avgdata_spc = None
1609 1610 avgdata_cspc = None
1610 1611 avgdata_dc = None
1611 1612
1612 1613 self.putData(*args)
1613 1614
1614 1615 if (datatime - self.__initime) >= self.__integrationtime:
1615 1616 avgdata_spc, avgdata_cspc, avgdata_dc, n = self.pushData()
1616 1617 self.n = n
1617 1618 self.__dataReady = True
1618 1619
1619 1620 return avgdata_spc, avgdata_cspc, avgdata_dc
1620 1621
1621 1622 def integrate(self, datatime, *args):
1622 1623
1623 1624 if self.__profIndex == 0:
1624 1625 self.__initime = datatime
1625 1626
1626 1627 if self.__byTime:
1627 1628 avgdata_spc, avgdata_cspc, avgdata_dc = self.byTime(
1628 1629 datatime, *args)
1629 1630 else:
1630 1631 avgdata_spc, avgdata_cspc, avgdata_dc = self.byProfiles(*args)
1631 1632
1632 1633 if not self.__dataReady:
1633 1634 return None, None, None, None
1634 1635
1635 1636 return self.__initime, avgdata_spc, avgdata_cspc, avgdata_dc
1636 1637
1637 1638 def run(self, dataOut, n=None, timeInterval=None, overlapping=False):
1638 1639 if n == 1:
1639 1640 return dataOut
1640 1641
1641 1642 dataOut.flagNoData = True
1642 1643
1643 1644 if not self.isConfig:
1644 1645 self.setup(n, timeInterval, overlapping)
1645 1646 self.isConfig = True
1646 1647
1647 1648 avgdatatime, avgdata_spc, avgdata_cspc, avgdata_dc = self.integrate(dataOut.utctime,
1648 1649 dataOut.data_spc,
1649 1650 dataOut.data_cspc,
1650 1651 dataOut.data_dc)
1651 1652
1652 1653 if self.__dataReady:
1653 1654
1654 1655 dataOut.data_spc = avgdata_spc
1655 1656 dataOut.data_cspc = avgdata_cspc
1656 1657 dataOut.data_dc = avgdata_dc
1657 1658 dataOut.nIncohInt *= self.n
1658 1659 dataOut.utctime = avgdatatime
1659 1660 dataOut.flagNoData = False
1660 1661
1661 1662 return dataOut
1662 1663
1663 1664 class dopplerFlip(Operation):
1664 1665
1665 1666 def run(self, dataOut):
1666 1667 # arreglo 1: (num_chan, num_profiles, num_heights)
1667 1668 self.dataOut = dataOut
1668 1669 # JULIA-oblicua, indice 2
1669 1670 # arreglo 2: (num_profiles, num_heights)
1670 1671 jspectra = self.dataOut.data_spc[2]
1671 1672 jspectra_tmp = numpy.zeros(jspectra.shape)
1672 1673 num_profiles = jspectra.shape[0]
1673 1674 freq_dc = int(num_profiles / 2)
1674 1675 # Flip con for
1675 1676 for j in range(num_profiles):
1676 1677 jspectra_tmp[num_profiles-j-1]= jspectra[j]
1677 1678 # Intercambio perfil de DC con perfil inmediato anterior
1678 1679 jspectra_tmp[freq_dc-1]= jspectra[freq_dc-1]
1679 1680 jspectra_tmp[freq_dc]= jspectra[freq_dc]
1680 1681 # canal modificado es re-escrito en el arreglo de canales
1681 1682 self.dataOut.data_spc[2] = jspectra_tmp
1682 1683
1683 1684 return self.dataOut
General Comments 0
You need to be logged in to leave comments. Login now