##// END OF EJS Templates
-Using ValueError raises instead of IOError...
Miguel Valdez -
r684:ab5af7e4405a
parent child
Show More
@@ -1,1128 +1,1125
1 1 '''
2 2
3 3 $Author: murco $
4 4 $Id: JROData.py 173 2012-11-20 15:06:21Z murco $
5 5 '''
6 6
7 7 import copy
8 8 import numpy
9 9 import datetime
10 10
11 11 from jroheaderIO import SystemHeader, RadarControllerHeader
12 12
13 13 def getNumpyDtype(dataTypeCode):
14 14
15 15 if dataTypeCode == 0:
16 16 numpyDtype = numpy.dtype([('real','<i1'),('imag','<i1')])
17 17 elif dataTypeCode == 1:
18 18 numpyDtype = numpy.dtype([('real','<i2'),('imag','<i2')])
19 19 elif dataTypeCode == 2:
20 20 numpyDtype = numpy.dtype([('real','<i4'),('imag','<i4')])
21 21 elif dataTypeCode == 3:
22 22 numpyDtype = numpy.dtype([('real','<i8'),('imag','<i8')])
23 23 elif dataTypeCode == 4:
24 24 numpyDtype = numpy.dtype([('real','<f4'),('imag','<f4')])
25 25 elif dataTypeCode == 5:
26 26 numpyDtype = numpy.dtype([('real','<f8'),('imag','<f8')])
27 27 else:
28 28 raise ValueError, 'dataTypeCode was not defined'
29 29
30 30 return numpyDtype
31 31
32 32 def getDataTypeCode(numpyDtype):
33 33
34 34 if numpyDtype == numpy.dtype([('real','<i1'),('imag','<i1')]):
35 35 datatype = 0
36 36 elif numpyDtype == numpy.dtype([('real','<i2'),('imag','<i2')]):
37 37 datatype = 1
38 38 elif numpyDtype == numpy.dtype([('real','<i4'),('imag','<i4')]):
39 39 datatype = 2
40 40 elif numpyDtype == numpy.dtype([('real','<i8'),('imag','<i8')]):
41 41 datatype = 3
42 42 elif numpyDtype == numpy.dtype([('real','<f4'),('imag','<f4')]):
43 43 datatype = 4
44 44 elif numpyDtype == numpy.dtype([('real','<f8'),('imag','<f8')]):
45 45 datatype = 5
46 46 else:
47 47 datatype = None
48 48
49 49 return datatype
50 50
51 51 def hildebrand_sekhon(data, navg):
52 52 """
53 53 This method is for the objective determination of the noise level in Doppler spectra. This
54 54 implementation technique is based on the fact that the standard deviation of the spectral
55 55 densities is equal to the mean spectral density for white Gaussian noise
56 56
57 57 Inputs:
58 58 Data : heights
59 59 navg : numbers of averages
60 60
61 61 Return:
62 62 -1 : any error
63 63 anoise : noise's level
64 64 """
65 65
66 66 sortdata = numpy.sort(data,axis=None)
67 67 lenOfData = len(sortdata)
68 68 nums_min = lenOfData/10
69 69
70 70 if (lenOfData/10) > 2:
71 71 nums_min = lenOfData/10
72 72 else:
73 73 nums_min = 2
74 74
75 75 sump = 0.
76 76
77 77 sumq = 0.
78 78
79 79 j = 0
80 80
81 81 cont = 1
82 82
83 83 while((cont==1)and(j<lenOfData)):
84 84
85 85 sump += sortdata[j]
86 86
87 87 sumq += sortdata[j]**2
88 88
89 89 if j > nums_min:
90 90 rtest = float(j)/(j-1) + 1.0/navg
91 91 if ((sumq*j) > (rtest*sump**2)):
92 92 j = j - 1
93 93 sump = sump - sortdata[j]
94 94 sumq = sumq - sortdata[j]**2
95 95 cont = 0
96 96
97 97 j += 1
98 98
99 99 lnoise = sump /j
100 100 stdv = numpy.sqrt((sumq - lnoise**2)/(j - 1))
101 101 return lnoise
102 102
103 103 class Beam:
104 104 def __init__(self):
105 105 self.codeList = []
106 106 self.azimuthList = []
107 107 self.zenithList = []
108 108
109 109 class GenericData(object):
110 110
111 111 flagNoData = True
112 112
113 113 def __init__(self):
114 114
115 raise ValueError, "This class has not been implemented"
115 raise NotImplementedError
116 116
117 117 def copy(self, inputObj=None):
118 118
119 119 if inputObj == None:
120 120 return copy.deepcopy(self)
121 121
122 122 for key in inputObj.__dict__.keys():
123 123 self.__dict__[key] = inputObj.__dict__[key]
124 124
125 125 def deepcopy(self):
126 126
127 127 return copy.deepcopy(self)
128 128
129 129 def isEmpty(self):
130 130
131 131 return self.flagNoData
132 132
133 133 class JROData(GenericData):
134 134
135 135 # m_BasicHeader = BasicHeader()
136 136 # m_ProcessingHeader = ProcessingHeader()
137 137
138 138 systemHeaderObj = SystemHeader()
139 139
140 140 radarControllerHeaderObj = RadarControllerHeader()
141 141
142 142 # data = None
143 143
144 144 type = None
145 145
146 146 datatype = None #dtype but in string
147 147
148 148 # dtype = None
149 149
150 150 # nChannels = None
151 151
152 152 # nHeights = None
153 153
154 154 nProfiles = None
155 155
156 156 heightList = None
157 157
158 158 channelList = None
159 159
160 160 flagDiscontinuousBlock = False
161 161
162 162 useLocalTime = False
163 163
164 164 utctime = None
165 165
166 166 timeZone = None
167 167
168 168 dstFlag = None
169 169
170 170 errorCount = None
171 171
172 172 blocksize = None
173 173
174 174 # nCode = None
175 175 #
176 176 # nBaud = None
177 177 #
178 178 # code = None
179 179
180 180 flagDecodeData = False #asumo q la data no esta decodificada
181 181
182 182 flagDeflipData = False #asumo q la data no esta sin flip
183 183
184 184 flagShiftFFT = False
185 185
186 186 # ippSeconds = None
187 187
188 188 # timeInterval = None
189 189
190 190 nCohInt = None
191 191
192 192 # noise = None
193 193
194 194 windowOfFilter = 1
195 195
196 196 #Speed of ligth
197 197 C = 3e8
198 198
199 199 frequency = 49.92e6
200 200
201 201 realtime = False
202 202
203 203 beacon_heiIndexList = None
204 204
205 205 last_block = None
206 206
207 207 blocknow = None
208 208
209 209 azimuth = None
210 210
211 211 zenith = None
212 212
213 213 beam = Beam()
214 214
215 215 profileIndex = None
216 216
217 217 def __init__(self):
218 218
219 raise ValueError, "This class has not been implemented"
219 raise NotImplementedError
220 220
221 221 def getNoise(self):
222 222
223 raise ValueError, "Not implemented"
223 raise NotImplementedError
224 224
225 225 def getNChannels(self):
226 226
227 227 return len(self.channelList)
228 228
229 229 def getChannelIndexList(self):
230 230
231 231 return range(self.nChannels)
232 232
233 233 def getNHeights(self):
234 234
235 235 return len(self.heightList)
236 236
237 237 def getHeiRange(self, extrapoints=0):
238 238
239 239 heis = self.heightList
240 240 # deltah = self.heightList[1] - self.heightList[0]
241 241 #
242 242 # heis.append(self.heightList[-1])
243 243
244 244 return heis
245 245
246 246 def getltctime(self):
247 247
248 248 if self.useLocalTime:
249 249 return self.utctime - self.timeZone*60
250 250
251 251 return self.utctime
252 252
253 253 def getDatatime(self):
254 254
255 255 datatimeValue = datetime.datetime.utcfromtimestamp(self.ltctime)
256 256 return datatimeValue
257 257
258 258 def getTimeRange(self):
259 259
260 260 datatime = []
261 261
262 262 datatime.append(self.ltctime)
263 263 datatime.append(self.ltctime + self.timeInterval+60)
264 264
265 265 datatime = numpy.array(datatime)
266 266
267 267 return datatime
268 268
269 269 def getFmax(self):
270 270
271 271 PRF = 1./(self.ippSeconds * self.nCohInt)
272 272
273 273 fmax = PRF/2.
274 274
275 275 return fmax
276 276
277 277 def getVmax(self):
278 278
279 279 _lambda = self.C/self.frequency
280 280
281 281 vmax = self.getFmax() * _lambda
282 282
283 283 return vmax
284 284
285 285 def get_ippSeconds(self):
286 286 '''
287 287 '''
288 288 return self.radarControllerHeaderObj.ippSeconds
289 289
290 290 def set_ippSeconds(self, ippSeconds):
291 291 '''
292 292 '''
293 293
294 294 self.radarControllerHeaderObj.ippSeconds = ippSeconds
295 295
296 296 return
297 297
298 298 def get_dtype(self):
299 299 '''
300 300 '''
301 301 return getNumpyDtype(self.datatype)
302 302
303 303 def set_dtype(self, numpyDtype):
304 304 '''
305 305 '''
306 306
307 307 self.datatype = getDataTypeCode(numpyDtype)
308 308
309 309 def get_code(self):
310 310 '''
311 311 '''
312 312 return self.radarControllerHeaderObj.code
313 313
314 314 def set_code(self, code):
315 315 '''
316 316 '''
317 317 self.radarControllerHeaderObj.code = code
318 318
319 319 return
320 320
321 321 def get_ncode(self):
322 322 '''
323 323 '''
324 324 return self.radarControllerHeaderObj.nCode
325 325
326 326 def set_ncode(self, nCode):
327 327 '''
328 328 '''
329 329 self.radarControllerHeaderObj.nCode = nCode
330 330
331 331 return
332 332
333 333 def get_nbaud(self):
334 334 '''
335 335 '''
336 336 return self.radarControllerHeaderObj.nBaud
337 337
338 338 def set_nbaud(self, nBaud):
339 339 '''
340 340 '''
341 341 self.radarControllerHeaderObj.nBaud = nBaud
342 342
343 343 return
344 # def getTimeInterval(self):
345 #
346 # raise IOError, "This method should be implemented inside each Class"
347 344
348 345 nChannels = property(getNChannels, "I'm the 'nChannel' property.")
349 346 channelIndexList = property(getChannelIndexList, "I'm the 'channelIndexList' property.")
350 347 nHeights = property(getNHeights, "I'm the 'nHeights' property.")
351 348 #noise = property(getNoise, "I'm the 'nHeights' property.")
352 349 datatime = property(getDatatime, "I'm the 'datatime' property")
353 350 ltctime = property(getltctime, "I'm the 'ltctime' property")
354 351 ippSeconds = property(get_ippSeconds, set_ippSeconds)
355 352 dtype = property(get_dtype, set_dtype)
356 353 # timeInterval = property(getTimeInterval, "I'm the 'timeInterval' property")
357 354 code = property(get_code, set_code)
358 355 nCode = property(get_ncode, set_ncode)
359 356 nBaud = property(get_nbaud, set_nbaud)
360 357
361 358 class Voltage(JROData):
362 359
363 360 #data es un numpy array de 2 dmensiones (canales, alturas)
364 361 data = None
365 362
366 363 def __init__(self):
367 364 '''
368 365 Constructor
369 366 '''
370 367
371 368 self.useLocalTime = True
372 369
373 370 self.radarControllerHeaderObj = RadarControllerHeader()
374 371
375 372 self.systemHeaderObj = SystemHeader()
376 373
377 374 self.type = "Voltage"
378 375
379 376 self.data = None
380 377
381 378 # self.dtype = None
382 379
383 380 # self.nChannels = 0
384 381
385 382 # self.nHeights = 0
386 383
387 384 self.nProfiles = None
388 385
389 386 self.heightList = None
390 387
391 388 self.channelList = None
392 389
393 390 # self.channelIndexList = None
394 391
395 392 self.flagNoData = True
396 393
397 394 self.flagDiscontinuousBlock = False
398 395
399 396 self.utctime = None
400 397
401 398 self.timeZone = None
402 399
403 400 self.dstFlag = None
404 401
405 402 self.errorCount = None
406 403
407 404 self.nCohInt = None
408 405
409 406 self.blocksize = None
410 407
411 408 self.flagDecodeData = False #asumo q la data no esta decodificada
412 409
413 410 self.flagDeflipData = False #asumo q la data no esta sin flip
414 411
415 412 self.flagShiftFFT = False
416 413
417 414 self.flagDataAsBlock = False #Asumo que la data es leida perfil a perfil
418 415
419 416 self.profileIndex = 0
420 417
421 418 def getNoisebyHildebrand(self, channel = None):
422 419 """
423 420 Determino el nivel de ruido usando el metodo Hildebrand-Sekhon
424 421
425 422 Return:
426 423 noiselevel
427 424 """
428 425
429 426 if channel != None:
430 427 data = self.data[channel]
431 428 nChannels = 1
432 429 else:
433 430 data = self.data
434 431 nChannels = self.nChannels
435 432
436 433 noise = numpy.zeros(nChannels)
437 434 power = data * numpy.conjugate(data)
438 435
439 436 for thisChannel in range(nChannels):
440 437 if nChannels == 1:
441 438 daux = power[:].real
442 439 else:
443 440 daux = power[thisChannel,:].real
444 441 noise[thisChannel] = hildebrand_sekhon(daux, self.nCohInt)
445 442
446 443 return noise
447 444
448 445 def getNoise(self, type = 1, channel = None):
449 446
450 447 if type == 1:
451 448 noise = self.getNoisebyHildebrand(channel)
452 449
453 450 return 10*numpy.log10(noise)
454 451
455 452 def getPower(self, channel = None):
456 453
457 454 if channel != None:
458 455 data = self.data[channel]
459 456 else:
460 457 data = self.data
461 458
462 459 power = data * numpy.conjugate(data)
463 460
464 461 return 10*numpy.log10(power.real)
465 462
466 463 def getTimeInterval(self):
467 464
468 465 timeInterval = self.ippSeconds * self.nCohInt
469 466
470 467 return timeInterval
471 468
472 469 noise = property(getNoise, "I'm the 'nHeights' property.")
473 470 timeInterval = property(getTimeInterval, "I'm the 'timeInterval' property")
474 471
475 472 class Spectra(JROData):
476 473
477 474 #data es un numpy array de 2 dmensiones (canales, perfiles, alturas)
478 475 data_spc = None
479 476
480 477 #data es un numpy array de 2 dmensiones (canales, pares, alturas)
481 478 data_cspc = None
482 479
483 480 #data es un numpy array de 2 dmensiones (canales, alturas)
484 481 data_dc = None
485 482
486 483 nFFTPoints = None
487 484
488 485 # nPairs = None
489 486
490 487 pairsList = None
491 488
492 489 nIncohInt = None
493 490
494 491 wavelength = None #Necesario para cacular el rango de velocidad desde la frecuencia
495 492
496 493 nCohInt = None #se requiere para determinar el valor de timeInterval
497 494
498 495 ippFactor = None
499 496
500 497 profileIndex = 0
501 498
502 499 def __init__(self):
503 500 '''
504 501 Constructor
505 502 '''
506 503
507 504 self.useLocalTime = True
508 505
509 506 self.radarControllerHeaderObj = RadarControllerHeader()
510 507
511 508 self.systemHeaderObj = SystemHeader()
512 509
513 510 self.type = "Spectra"
514 511
515 512 # self.data = None
516 513
517 514 # self.dtype = None
518 515
519 516 # self.nChannels = 0
520 517
521 518 # self.nHeights = 0
522 519
523 520 self.nProfiles = None
524 521
525 522 self.heightList = None
526 523
527 524 self.channelList = None
528 525
529 526 # self.channelIndexList = None
530 527
531 528 self.pairsList = None
532 529
533 530 self.flagNoData = True
534 531
535 532 self.flagDiscontinuousBlock = False
536 533
537 534 self.utctime = None
538 535
539 536 self.nCohInt = None
540 537
541 538 self.nIncohInt = None
542 539
543 540 self.blocksize = None
544 541
545 542 self.nFFTPoints = None
546 543
547 544 self.wavelength = None
548 545
549 546 self.flagDecodeData = False #asumo q la data no esta decodificada
550 547
551 548 self.flagDeflipData = False #asumo q la data no esta sin flip
552 549
553 550 self.flagShiftFFT = False
554 551
555 552 self.ippFactor = 1
556 553
557 554 #self.noise = None
558 555
559 556 self.beacon_heiIndexList = []
560 557
561 558 self.noise_estimation = None
562 559
563 560
564 561 def getNoisebyHildebrand(self, xmin_index=None, xmax_index=None, ymin_index=None, ymax_index=None):
565 562 """
566 563 Determino el nivel de ruido usando el metodo Hildebrand-Sekhon
567 564
568 565 Return:
569 566 noiselevel
570 567 """
571 568
572 569 noise = numpy.zeros(self.nChannels)
573 570
574 571 for channel in range(self.nChannels):
575 572 daux = self.data_spc[channel,xmin_index:xmax_index,ymin_index:ymax_index]
576 573 noise[channel] = hildebrand_sekhon(daux, self.nIncohInt)
577 574
578 575 return noise
579 576
580 577 def getNoise(self, xmin_index=None, xmax_index=None, ymin_index=None, ymax_index=None):
581 578
582 579 if self.noise_estimation != None:
583 580 return self.noise_estimation #this was estimated by getNoise Operation defined in jroproc_spectra.py
584 581 else:
585 582 noise = self.getNoisebyHildebrand(xmin_index, xmax_index, ymin_index, ymax_index)
586 583 return noise
587 584
588 585
589 586 def getFreqRange(self, extrapoints=0):
590 587
591 588 deltafreq = self.getFmax() / (self.nFFTPoints*self.ippFactor)
592 589 freqrange = deltafreq*(numpy.arange(self.nFFTPoints+extrapoints)-self.nFFTPoints/2.) - deltafreq/2
593 590
594 591 return freqrange
595 592
596 593 def getVelRange(self, extrapoints=0):
597 594
598 595 deltav = self.getVmax() / (self.nFFTPoints*self.ippFactor)
599 596 velrange = deltav*(numpy.arange(self.nFFTPoints+extrapoints)-self.nFFTPoints/2.) - deltav/2
600 597
601 598 return velrange
602 599
603 600 def getNPairs(self):
604 601
605 602 return len(self.pairsList)
606 603
607 604 def getPairsIndexList(self):
608 605
609 606 return range(self.nPairs)
610 607
611 608 def getNormFactor(self):
612 609
613 610 pwcode = 1
614 611
615 612 if self.flagDecodeData:
616 613 pwcode = numpy.sum(self.code[0]**2)
617 614 #normFactor = min(self.nFFTPoints,self.nProfiles)*self.nIncohInt*self.nCohInt*pwcode*self.windowOfFilter
618 615 normFactor = self.nProfiles*self.nIncohInt*self.nCohInt*pwcode*self.windowOfFilter
619 616
620 617 return normFactor
621 618
622 619 def getFlagCspc(self):
623 620
624 621 if self.data_cspc is None:
625 622 return True
626 623
627 624 return False
628 625
629 626 def getFlagDc(self):
630 627
631 628 if self.data_dc is None:
632 629 return True
633 630
634 631 return False
635 632
636 633 def getTimeInterval(self):
637 634
638 635 timeInterval = self.ippSeconds * self.nCohInt * self.nIncohInt * self.nProfiles
639 636
640 637 return timeInterval
641 638
642 639 def setValue(self, value):
643 640
644 641 print "This property should not be initialized"
645 642
646 643 return
647 644
648 645 nPairs = property(getNPairs, setValue, "I'm the 'nPairs' property.")
649 646 pairsIndexList = property(getPairsIndexList, setValue, "I'm the 'pairsIndexList' property.")
650 647 normFactor = property(getNormFactor, setValue, "I'm the 'getNormFactor' property.")
651 648 flag_cspc = property(getFlagCspc, setValue)
652 649 flag_dc = property(getFlagDc, setValue)
653 650 noise = property(getNoise, setValue, "I'm the 'nHeights' property.")
654 651 timeInterval = property(getTimeInterval, setValue, "I'm the 'timeInterval' property")
655 652
656 653 class SpectraHeis(Spectra):
657 654
658 655 data_spc = None
659 656
660 657 data_cspc = None
661 658
662 659 data_dc = None
663 660
664 661 nFFTPoints = None
665 662
666 663 # nPairs = None
667 664
668 665 pairsList = None
669 666
670 667 nCohInt = None
671 668
672 669 nIncohInt = None
673 670
674 671 def __init__(self):
675 672
676 673 self.radarControllerHeaderObj = RadarControllerHeader()
677 674
678 675 self.systemHeaderObj = SystemHeader()
679 676
680 677 self.type = "SpectraHeis"
681 678
682 679 # self.dtype = None
683 680
684 681 # self.nChannels = 0
685 682
686 683 # self.nHeights = 0
687 684
688 685 self.nProfiles = None
689 686
690 687 self.heightList = None
691 688
692 689 self.channelList = None
693 690
694 691 # self.channelIndexList = None
695 692
696 693 self.flagNoData = True
697 694
698 695 self.flagDiscontinuousBlock = False
699 696
700 697 # self.nPairs = 0
701 698
702 699 self.utctime = None
703 700
704 701 self.blocksize = None
705 702
706 703 self.profileIndex = 0
707 704
708 705 self.nCohInt = 1
709 706
710 707 self.nIncohInt = 1
711 708
712 709 def getNormFactor(self):
713 710 pwcode = 1
714 711 if self.flagDecodeData:
715 712 pwcode = numpy.sum(self.code[0]**2)
716 713
717 714 normFactor = self.nIncohInt*self.nCohInt*pwcode
718 715
719 716 return normFactor
720 717
721 718 def getTimeInterval(self):
722 719
723 720 timeInterval = self.ippSeconds * self.nCohInt * self.nIncohInt
724 721
725 722 return timeInterval
726 723
727 724 normFactor = property(getNormFactor, "I'm the 'getNormFactor' property.")
728 725 timeInterval = property(getTimeInterval, "I'm the 'timeInterval' property")
729 726
730 727 class Fits(JROData):
731 728
732 729 heightList = None
733 730
734 731 channelList = None
735 732
736 733 flagNoData = True
737 734
738 735 flagDiscontinuousBlock = False
739 736
740 737 useLocalTime = False
741 738
742 739 utctime = None
743 740
744 741 timeZone = None
745 742
746 743 # ippSeconds = None
747 744
748 745 # timeInterval = None
749 746
750 747 nCohInt = None
751 748
752 749 nIncohInt = None
753 750
754 751 noise = None
755 752
756 753 windowOfFilter = 1
757 754
758 755 #Speed of ligth
759 756 C = 3e8
760 757
761 758 frequency = 49.92e6
762 759
763 760 realtime = False
764 761
765 762
766 763 def __init__(self):
767 764
768 765 self.type = "Fits"
769 766
770 767 self.nProfiles = None
771 768
772 769 self.heightList = None
773 770
774 771 self.channelList = None
775 772
776 773 # self.channelIndexList = None
777 774
778 775 self.flagNoData = True
779 776
780 777 self.utctime = None
781 778
782 779 self.nCohInt = 1
783 780
784 781 self.nIncohInt = 1
785 782
786 783 self.useLocalTime = True
787 784
788 785 self.profileIndex = 0
789 786
790 787 # self.utctime = None
791 788 # self.timeZone = None
792 789 # self.ltctime = None
793 790 # self.timeInterval = None
794 791 # self.header = None
795 792 # self.data_header = None
796 793 # self.data = None
797 794 # self.datatime = None
798 795 # self.flagNoData = False
799 796 # self.expName = ''
800 797 # self.nChannels = None
801 798 # self.nSamples = None
802 799 # self.dataBlocksPerFile = None
803 800 # self.comments = ''
804 801 #
805 802
806 803
807 804 def getltctime(self):
808 805
809 806 if self.useLocalTime:
810 807 return self.utctime - self.timeZone*60
811 808
812 809 return self.utctime
813 810
814 811 def getDatatime(self):
815 812
816 813 datatime = datetime.datetime.utcfromtimestamp(self.ltctime)
817 814 return datatime
818 815
819 816 def getTimeRange(self):
820 817
821 818 datatime = []
822 819
823 820 datatime.append(self.ltctime)
824 821 datatime.append(self.ltctime + self.timeInterval)
825 822
826 823 datatime = numpy.array(datatime)
827 824
828 825 return datatime
829 826
830 827 def getHeiRange(self):
831 828
832 829 heis = self.heightList
833 830
834 831 return heis
835 832
836 833 def getNHeights(self):
837 834
838 835 return len(self.heightList)
839 836
840 837 def getNChannels(self):
841 838
842 839 return len(self.channelList)
843 840
844 841 def getChannelIndexList(self):
845 842
846 843 return range(self.nChannels)
847 844
848 845 def getNoise(self, type = 1):
849 846
850 847 #noise = numpy.zeros(self.nChannels)
851 848
852 849 if type == 1:
853 850 noise = self.getNoisebyHildebrand()
854 851
855 852 if type == 2:
856 853 noise = self.getNoisebySort()
857 854
858 855 if type == 3:
859 856 noise = self.getNoisebyWindow()
860 857
861 858 return noise
862 859
863 860 def getTimeInterval(self):
864 861
865 862 timeInterval = self.ippSeconds * self.nCohInt * self.nIncohInt
866 863
867 864 return timeInterval
868 865
869 866 datatime = property(getDatatime, "I'm the 'datatime' property")
870 867 nHeights = property(getNHeights, "I'm the 'nHeights' property.")
871 868 nChannels = property(getNChannels, "I'm the 'nChannel' property.")
872 869 channelIndexList = property(getChannelIndexList, "I'm the 'channelIndexList' property.")
873 870 noise = property(getNoise, "I'm the 'nHeights' property.")
874 871
875 872 ltctime = property(getltctime, "I'm the 'ltctime' property")
876 873 timeInterval = property(getTimeInterval, "I'm the 'timeInterval' property")
877 874
878 875 class Correlation(JROData):
879 876
880 877 noise = None
881 878
882 879 SNR = None
883 880
884 881 pairsAutoCorr = None #Pairs of Autocorrelation
885 882
886 883 #--------------------------------------------------
887 884
888 885 data_corr = None
889 886
890 887 data_volt = None
891 888
892 889 lagT = None # each element value is a profileIndex
893 890
894 891 lagR = None # each element value is in km
895 892
896 893 pairsList = None
897 894
898 895 calculateVelocity = None
899 896
900 897 nPoints = None
901 898
902 899 nAvg = None
903 900
904 901 bufferSize = None
905 902
906 903 def __init__(self):
907 904 '''
908 905 Constructor
909 906 '''
910 907 self.radarControllerHeaderObj = RadarControllerHeader()
911 908
912 909 self.systemHeaderObj = SystemHeader()
913 910
914 911 self.type = "Correlation"
915 912
916 913 self.data = None
917 914
918 915 self.dtype = None
919 916
920 917 self.nProfiles = None
921 918
922 919 self.heightList = None
923 920
924 921 self.channelList = None
925 922
926 923 self.flagNoData = True
927 924
928 925 self.flagDiscontinuousBlock = False
929 926
930 927 self.utctime = None
931 928
932 929 self.timeZone = None
933 930
934 931 self.dstFlag = None
935 932
936 933 self.errorCount = None
937 934
938 935 self.blocksize = None
939 936
940 937 self.flagDecodeData = False #asumo q la data no esta decodificada
941 938
942 939 self.flagDeflipData = False #asumo q la data no esta sin flip
943 940
944 941 self.pairsList = None
945 942
946 943 self.nPoints = None
947 944
948 945 def getLagTRange(self, extrapoints=0):
949 946
950 947 lagTRange = self.lagT
951 948 diff = lagTRange[1] - lagTRange[0]
952 949 extra = numpy.arange(1,extrapoints + 1)*diff + lagTRange[-1]
953 950 lagTRange = numpy.hstack((lagTRange, extra))
954 951
955 952 return lagTRange
956 953
957 954 def getLagRRange(self, extrapoints=0):
958 955
959 956 return self.lagR
960 957
961 958 def getPairsList(self):
962 959
963 960 return self.pairsList
964 961
965 962 def getCalculateVelocity(self):
966 963
967 964 return self.calculateVelocity
968 965
969 966 def getNPoints(self):
970 967
971 968 return self.nPoints
972 969
973 970 def getNAvg(self):
974 971
975 972 return self.nAvg
976 973
977 974 def getBufferSize(self):
978 975
979 976 return self.bufferSize
980 977
981 978 def getPairsAutoCorr(self):
982 979 pairsList = self.pairsList
983 980 pairsAutoCorr = numpy.zeros(self.nChannels, dtype = 'int')*numpy.nan
984 981
985 982 for l in range(len(pairsList)):
986 983 firstChannel = pairsList[l][0]
987 984 secondChannel = pairsList[l][1]
988 985
989 986 #Obteniendo pares de Autocorrelacion
990 987 if firstChannel == secondChannel:
991 988 pairsAutoCorr[firstChannel] = int(l)
992 989
993 990 pairsAutoCorr = pairsAutoCorr.astype(int)
994 991
995 992 return pairsAutoCorr
996 993
997 994 def getNoise(self, mode = 2):
998 995
999 996 indR = numpy.where(self.lagR == 0)[0][0]
1000 997 indT = numpy.where(self.lagT == 0)[0][0]
1001 998
1002 999 jspectra0 = self.data_corr[:,:,indR,:]
1003 1000 jspectra = copy.copy(jspectra0)
1004 1001
1005 1002 num_chan = jspectra.shape[0]
1006 1003 num_hei = jspectra.shape[2]
1007 1004
1008 1005 freq_dc = jspectra.shape[1]/2
1009 1006 ind_vel = numpy.array([-2,-1,1,2]) + freq_dc
1010 1007
1011 1008 if ind_vel[0]<0:
1012 1009 ind_vel[range(0,1)] = ind_vel[range(0,1)] + self.num_prof
1013 1010
1014 1011 if mode == 1:
1015 1012 jspectra[:,freq_dc,:] = (jspectra[:,ind_vel[1],:] + jspectra[:,ind_vel[2],:])/2 #CORRECCION
1016 1013
1017 1014 if mode == 2:
1018 1015
1019 1016 vel = numpy.array([-2,-1,1,2])
1020 1017 xx = numpy.zeros([4,4])
1021 1018
1022 1019 for fil in range(4):
1023 1020 xx[fil,:] = vel[fil]**numpy.asarray(range(4))
1024 1021
1025 1022 xx_inv = numpy.linalg.inv(xx)
1026 1023 xx_aux = xx_inv[0,:]
1027 1024
1028 1025 for ich in range(num_chan):
1029 1026 yy = jspectra[ich,ind_vel,:]
1030 1027 jspectra[ich,freq_dc,:] = numpy.dot(xx_aux,yy)
1031 1028
1032 1029 junkid = jspectra[ich,freq_dc,:]<=0
1033 1030 cjunkid = sum(junkid)
1034 1031
1035 1032 if cjunkid.any():
1036 1033 jspectra[ich,freq_dc,junkid.nonzero()] = (jspectra[ich,ind_vel[1],junkid] + jspectra[ich,ind_vel[2],junkid])/2
1037 1034
1038 1035 noise = jspectra0[:,freq_dc,:] - jspectra[:,freq_dc,:]
1039 1036
1040 1037 return noise
1041 1038
1042 1039 def getTimeInterval(self):
1043 1040
1044 1041 timeInterval = self.ippSeconds * self.nCohInt * self.nPoints
1045 1042
1046 1043 return timeInterval
1047 1044
1048 1045 timeInterval = property(getTimeInterval, "I'm the 'timeInterval' property")
1049 1046 # pairsList = property(getPairsList, "I'm the 'pairsList' property.")
1050 1047 # nPoints = property(getNPoints, "I'm the 'nPoints' property.")
1051 1048 calculateVelocity = property(getCalculateVelocity, "I'm the 'calculateVelocity' property.")
1052 1049 nAvg = property(getNAvg, "I'm the 'nAvg' property.")
1053 1050 bufferSize = property(getBufferSize, "I'm the 'bufferSize' property.")
1054 1051
1055 1052
1056 1053 class Parameters(JROData):
1057 1054
1058 1055 #Information from previous data
1059 1056
1060 1057 inputUnit = None #Type of data to be processed
1061 1058
1062 1059 operation = None #Type of operation to parametrize
1063 1060
1064 1061 normFactor = None #Normalization Factor
1065 1062
1066 1063 groupList = None #List of Pairs, Groups, etc
1067 1064
1068 1065 #Parameters
1069 1066
1070 1067 data_param = None #Parameters obtained
1071 1068
1072 1069 data_pre = None #Data Pre Parametrization
1073 1070
1074 1071 data_SNR = None #Signal to Noise Ratio
1075 1072
1076 1073 # heightRange = None #Heights
1077 1074
1078 1075 abscissaList = None #Abscissa, can be velocities, lags or time
1079 1076
1080 1077 noise = None #Noise Potency
1081 1078
1082 1079 utctimeInit = None #Initial UTC time
1083 1080
1084 1081 paramInterval = None #Time interval to calculate Parameters in seconds
1085 1082
1086 1083 #Fitting
1087 1084
1088 1085 data_error = None #Error of the estimation
1089 1086
1090 1087 constants = None
1091 1088
1092 1089 library = None
1093 1090
1094 1091 #Output signal
1095 1092
1096 1093 outputInterval = None #Time interval to calculate output signal in seconds
1097 1094
1098 1095 data_output = None #Out signal
1099 1096
1100 1097
1101 1098
1102 1099 def __init__(self):
1103 1100 '''
1104 1101 Constructor
1105 1102 '''
1106 1103 self.radarControllerHeaderObj = RadarControllerHeader()
1107 1104
1108 1105 self.systemHeaderObj = SystemHeader()
1109 1106
1110 1107 self.type = "Parameters"
1111 1108
1112 1109 def getTimeRange1(self):
1113 1110
1114 1111 datatime = []
1115 1112
1116 1113 if self.useLocalTime:
1117 1114 time1 = self.utctimeInit - self.timeZone*60
1118 1115 else:
1119 1116 time1 = utctimeInit
1120 1117
1121 1118 # datatime.append(self.utctimeInit)
1122 1119 # datatime.append(self.utctimeInit + self.outputInterval)
1123 1120 datatime.append(time1)
1124 1121 datatime.append(time1 + self.outputInterval)
1125 1122
1126 1123 datatime = numpy.array(datatime)
1127 1124
1128 1125 return datatime
@@ -1,724 +1,734
1 1 '''
2 2
3 3 $Author: murco $
4 4 $Id: JROHeaderIO.py 151 2012-10-31 19:00:51Z murco $
5 5 '''
6 6 import numpy
7 7 import copy
8 8 import datetime
9 9
10 10 SPEED_OF_LIGHT = 299792458
11 11 SPEED_OF_LIGHT = 3e8
12 12
13 13 BASIC_STRUCTURE = numpy.dtype([
14 14 ('nSize','<u4'),
15 15 ('nVersion','<u2'),
16 16 ('nDataBlockId','<u4'),
17 17 ('nUtime','<u4'),
18 18 ('nMilsec','<u2'),
19 19 ('nTimezone','<i2'),
20 20 ('nDstflag','<i2'),
21 21 ('nErrorCount','<u4')
22 22 ])
23 23
24 24 SYSTEM_STRUCTURE = numpy.dtype([
25 25 ('nSize','<u4'),
26 26 ('nNumSamples','<u4'),
27 27 ('nNumProfiles','<u4'),
28 28 ('nNumChannels','<u4'),
29 29 ('nADCResolution','<u4'),
30 30 ('nPCDIOBusWidth','<u4'),
31 31 ])
32 32
33 33 RADAR_STRUCTURE = numpy.dtype([
34 34 ('nSize','<u4'),
35 35 ('nExpType','<u4'),
36 36 ('nNTx','<u4'),
37 37 ('fIpp','<f4'),
38 38 ('fTxA','<f4'),
39 39 ('fTxB','<f4'),
40 40 ('nNumWindows','<u4'),
41 41 ('nNumTaus','<u4'),
42 42 ('nCodeType','<u4'),
43 43 ('nLine6Function','<u4'),
44 44 ('nLine5Function','<u4'),
45 45 ('fClock','<f4'),
46 46 ('nPrePulseBefore','<u4'),
47 47 ('nPrePulseAfter','<u4'),
48 48 ('sRangeIPP','<a20'),
49 49 ('sRangeTxA','<a20'),
50 50 ('sRangeTxB','<a20'),
51 51 ])
52 52
53 53 SAMPLING_STRUCTURE = numpy.dtype([('h0','<f4'),('dh','<f4'),('nsa','<u4')])
54 54
55 55
56 56 PROCESSING_STRUCTURE = numpy.dtype([
57 57 ('nSize','<u4'),
58 58 ('nDataType','<u4'),
59 59 ('nSizeOfDataBlock','<u4'),
60 60 ('nProfilesperBlock','<u4'),
61 61 ('nDataBlocksperFile','<u4'),
62 62 ('nNumWindows','<u4'),
63 63 ('nProcessFlags','<u4'),
64 64 ('nCoherentIntegrations','<u4'),
65 65 ('nIncoherentIntegrations','<u4'),
66 66 ('nTotalSpectra','<u4')
67 67 ])
68 68
69 69 class Header(object):
70 70
71 71 def __init__(self):
72 raise
72 raise NotImplementedError
73 73
74 74 def copy(self):
75 75 return copy.deepcopy(self)
76 76
77 77 def read(self):
78 78
79 raise ValueError
79 raise NotImplementedError
80 80
81 81 def write(self):
82 82
83 raise ValueError
83 raise NotImplementedError
84 84
85 85 def printInfo(self):
86 86
87 87 message = "#"*50 + "\n"
88 88 message += self.__class__.__name__.upper() + "\n"
89 89 message += "#"*50 + "\n"
90 90
91 91 for key in self.__dict__.keys():
92 92 message += "%s = %s" %(key, self.__dict__[key]) + "\n"
93 93
94 94 print message
95 95
96 96 class BasicHeader(Header):
97 97
98 98 size = None
99 99 version = None
100 100 dataBlock = None
101 101 utc = None
102 102 ltc = None
103 103 miliSecond = None
104 104 timeZone = None
105 105 dstFlag = None
106 106 errorCount = None
107 107 datatime = None
108 108
109 109 __LOCALTIME = None
110 110
111 111 def __init__(self, useLocalTime=True):
112 112
113 113 self.size = 24
114 114 self.version = 0
115 115 self.dataBlock = 0
116 116 self.utc = 0
117 117 self.miliSecond = 0
118 118 self.timeZone = 0
119 119 self.dstFlag = 0
120 120 self.errorCount = 0
121 121
122 122 self.useLocalTime = useLocalTime
123 123
124 124 def read(self, fp):
125 try:
126 125
126 try:
127 127 header = numpy.fromfile(fp, BASIC_STRUCTURE,1)
128 128
129 except Exception, e:
130 print "BasicHeader: "
131 print e
132 return 0
133
129 134 self.size = int(header['nSize'][0])
130 135 self.version = int(header['nVersion'][0])
131 136 self.dataBlock = int(header['nDataBlockId'][0])
132 137 self.utc = int(header['nUtime'][0])
133 138 self.miliSecond = int(header['nMilsec'][0])
134 139 self.timeZone = int(header['nTimezone'][0])
135 140 self.dstFlag = int(header['nDstflag'][0])
136 141 self.errorCount = int(header['nErrorCount'][0])
137 142
138 except Exception, e:
139 print "BasicHeader: "
140 print e
141 return 0
142
143 143 return 1
144 144
145 145 def write(self, fp):
146 146
147 147 headerTuple = (self.size,self.version,self.dataBlock,self.utc,self.miliSecond,self.timeZone,self.dstFlag,self.errorCount)
148 148 header = numpy.array(headerTuple, BASIC_STRUCTURE)
149 149 header.tofile(fp)
150 150
151 151 return 1
152 152
153 153 def get_ltc(self):
154 154
155 155 return self.utc - self.timeZone*60
156 156
157 157 def set_ltc(self, value):
158 158
159 159 self.utc = value + self.timeZone*60
160 160
161 161 def get_datatime(self):
162 162
163 163 return datetime.datetime.utcfromtimestamp(self.ltc)
164 164
165 165 ltc = property(get_ltc, set_ltc)
166 166 datatime = property(get_datatime)
167 167
168 168 class SystemHeader(Header):
169 169
170 170 size = None
171 171 nSamples = None
172 172 nProfiles = None
173 173 nChannels = None
174 174 adcResolution = None
175 175 pciDioBusWidth = None
176 176
177 177 def __init__(self, nSamples=0, nProfiles=0, nChannels=0, adcResolution=14, pciDioBusWith=0):
178 178
179 179 self.size = 24
180 180 self.nSamples = nSamples
181 181 self.nProfiles = nProfiles
182 182 self.nChannels = nChannels
183 183 self.adcResolution = adcResolution
184 184 self.pciDioBusWidth = pciDioBusWith
185 185
186 186 def read(self, fp):
187 187
188 188 startFp = fp.tell()
189 189
190 190 try:
191 191 header = numpy.fromfile(fp,SYSTEM_STRUCTURE,1)
192 except Exception, e:
193 print "SystemHeader: " + e
194 return 0
192 195
193 196 self.size = header['nSize'][0]
194 197 self.nSamples = header['nNumSamples'][0]
195 198 self.nProfiles = header['nNumProfiles'][0]
196 199 self.nChannels = header['nNumChannels'][0]
197 200 self.adcResolution = header['nADCResolution'][0]
198 201 self.pciDioBusWidth = header['nPCDIOBusWidth'][0]
199 202
200 except Exception, e:
201 print "SystemHeader: " + e
202 return 0
203
204 203 endFp = self.size + startFp
205 204
206 205 if fp.tell() != endFp:
207 raise IOError, "System Header is not consistent"
206 print "System Header is not consistent"
207 return 0
208 208
209 209 return 1
210 210
211 211 def write(self, fp):
212 212
213 213 headerTuple = (self.size,self.nSamples,self.nProfiles,self.nChannels,self.adcResolution,self.pciDioBusWidth)
214 214 header = numpy.array(headerTuple,SYSTEM_STRUCTURE)
215 215 header.tofile(fp)
216 216
217 217 return 1
218 218
219 219 class RadarControllerHeader(Header):
220 220
221 221 size = None
222 222 expType = None
223 223 nTx = None
224 224 ipp = None
225 225 txA = None
226 226 txB = None
227 227 nWindows = None
228 228 numTaus = None
229 229 codeType = None
230 230 line6Function = None
231 231 line5Function = None
232 232 fClock = None
233 233 prePulseBefore = None
234 234 prePulserAfter = None
235 235 rangeIpp = None
236 236 rangeTxA = None
237 237 rangeTxB = None
238 238
239 239 __size = None
240 240
241 241 def __init__(self, expType=2, nTx=1,
242 242 ippKm=None, txA=0, txB=0,
243 243 nWindows=None, nHeights=None, firstHeight=None, deltaHeight=None,
244 244 numTaus=0, line6Function=0, line5Function=0, fClock=None,
245 245 prePulseBefore=0, prePulseAfter=0,
246 246 codeType=0, nCode=0, nBaud=0, code=None,
247 247 flip1=0, flip2=0):
248 248
249 249 self.size = 116
250 250 self.expType = expType
251 251 self.nTx = nTx
252 252 self.ipp = ippKm
253 253 self.txA = txA
254 254 self.txB = txB
255 255 self.rangeIpp = ippKm
256 256 self.rangeTxA = txA
257 257 self.rangeTxB = txB
258 258
259 259 self.nWindows = nWindows
260 260 self.numTaus = numTaus
261 261 self.codeType = codeType
262 262 self.line6Function = line6Function
263 263 self.line5Function = line5Function
264 264 self.fClock = fClock
265 265 self.prePulseBefore = prePulseBefore
266 266 self.prePulserAfter = prePulseAfter
267 267
268 268 self.nHeights = nHeights
269 269 self.firstHeight = firstHeight
270 270 self.deltaHeight = deltaHeight
271 271 self.samplesWin = nHeights
272 272
273 273 self.nCode = nCode
274 274 self.nBaud = nBaud
275 275 self.code = code
276 276 self.flip1 = flip1
277 277 self.flip2 = flip2
278 278
279 279 self.code_size = int(numpy.ceil(self.nBaud/32.))*self.nCode*4
280 280 # self.dynamic = numpy.array([],numpy.dtype('byte'))
281 281
282 282 if self.fClock is None and self.deltaHeight is not None:
283 283 self.fClock = 0.15/(deltaHeight*1e-6) #0.15Km / (height * 1u)
284 284
285 285 def read(self, fp):
286 286
287 287
288 288 startFp = fp.tell()
289 try:
289 290 header = numpy.fromfile(fp,RADAR_STRUCTURE,1)
291 except Exception, e:
292 print "RadarControllerHeader: " + e
293 return 0
290 294
291 295 size = int(header['nSize'][0])
292 296 self.expType = int(header['nExpType'][0])
293 297 self.nTx = int(header['nNTx'][0])
294 298 self.ipp = float(header['fIpp'][0])
295 299 self.txA = float(header['fTxA'][0])
296 300 self.txB = float(header['fTxB'][0])
297 301 self.nWindows = int(header['nNumWindows'][0])
298 302 self.numTaus = int(header['nNumTaus'][0])
299 303 self.codeType = int(header['nCodeType'][0])
300 304 self.line6Function = int(header['nLine6Function'][0])
301 305 self.line5Function = int(header['nLine5Function'][0])
302 306 self.fClock = float(header['fClock'][0])
303 307 self.prePulseBefore = int(header['nPrePulseBefore'][0])
304 308 self.prePulserAfter = int(header['nPrePulseAfter'][0])
305 309 self.rangeIpp = header['sRangeIPP'][0]
306 310 self.rangeTxA = header['sRangeTxA'][0]
307 311 self.rangeTxB = header['sRangeTxB'][0]
308 312
309 313 samplingWindow = numpy.fromfile(fp,SAMPLING_STRUCTURE,self.nWindows)
310 314
311 315 self.nHeights = int(numpy.sum(samplingWindow['nsa']))
312 316 self.firstHeight = samplingWindow['h0']
313 317 self.deltaHeight = samplingWindow['dh']
314 318 self.samplesWin = samplingWindow['nsa']
315 319
316 320 self.Taus = numpy.fromfile(fp,'<f4',self.numTaus)
317 321
318 322 self.code_size = 0
319 323 if self.codeType != 0:
320 324 self.nCode = int(numpy.fromfile(fp,'<u4',1))
321 325 self.nBaud = int(numpy.fromfile(fp,'<u4',1))
322 326
323 327 code = numpy.empty([self.nCode,self.nBaud],dtype='i1')
324 328 for ic in range(self.nCode):
325 329 temp = numpy.fromfile(fp,'u4',int(numpy.ceil(self.nBaud/32.)))
326 330 for ib in range(self.nBaud-1,-1,-1):
327 331 code[ic,ib] = temp[ib/32]%2
328 332 temp[ib/32] = temp[ib/32]/2
329 333
330 334 self.code = 2.0*code - 1.0
331 335 self.code_size = int(numpy.ceil(self.nBaud/32.))*self.nCode*4
332 336
333 337 # if self.line5Function == RCfunction.FLIP:
334 338 # self.flip1 = numpy.fromfile(fp,'<u4',1)
335 339 #
336 340 # if self.line6Function == RCfunction.FLIP:
337 341 # self.flip2 = numpy.fromfile(fp,'<u4',1)
338 342
339 343 endFp = size + startFp
340 344
341 345 if fp.tell() != endFp:
342 raise IOError, "Radar Controller Header is not consistent"
346 print "Radar Controller Header is not consistent"
347 return 0
343 348
344 349 return 1
345 350
346 351 def write(self, fp):
347 352
348 353 headerTuple = (self.size,
349 354 self.expType,
350 355 self.nTx,
351 356 self.ipp,
352 357 self.txA,
353 358 self.txB,
354 359 self.nWindows,
355 360 self.numTaus,
356 361 self.codeType,
357 362 self.line6Function,
358 363 self.line5Function,
359 364 self.fClock,
360 365 self.prePulseBefore,
361 366 self.prePulserAfter,
362 367 self.rangeIpp,
363 368 self.rangeTxA,
364 369 self.rangeTxB)
365 370
366 371 header = numpy.array(headerTuple,RADAR_STRUCTURE)
367 372 header.tofile(fp)
368 373
369 374 sampleWindowTuple = (self.firstHeight,self.deltaHeight,self.samplesWin)
370 375 samplingWindow = numpy.array(sampleWindowTuple,SAMPLING_STRUCTURE)
371 376 samplingWindow.tofile(fp)
372 377
373 378 if self.numTaus > 0:
374 379 self.Taus.tofile(fp)
375 380
376 381 if self.codeType !=0:
377 382 nCode = numpy.array(self.nCode, '<u4')
378 383 nCode.tofile(fp)
379 384 nBaud = numpy.array(self.nBaud, '<u4')
380 385 nBaud.tofile(fp)
381 386 code1 = (self.code + 1.0)/2.
382 387
383 388 for ic in range(self.nCode):
384 389 tempx = numpy.zeros(numpy.ceil(self.nBaud/32.))
385 390 start = 0
386 391 end = 32
387 392 for i in range(len(tempx)):
388 393 code_selected = code1[ic,start:end]
389 394 for j in range(len(code_selected)-1,-1,-1):
390 395 if code_selected[j] == 1:
391 396 tempx[i] = tempx[i] + 2**(len(code_selected)-1-j)
392 397 start = start + 32
393 398 end = end + 32
394 399
395 400 tempx = tempx.astype('u4')
396 401 tempx.tofile(fp)
397 402
398 403 # if self.line5Function == RCfunction.FLIP:
399 404 # self.flip1.tofile(fp)
400 405 #
401 406 # if self.line6Function == RCfunction.FLIP:
402 407 # self.flip2.tofile(fp)
403 408
404 409 return 1
405 410
406 411 def get_ippSeconds(self):
407 412 '''
408 413 '''
409 414 ippSeconds = 2.0 * 1000 * self.ipp / SPEED_OF_LIGHT
410 415
411 416 return ippSeconds
412 417
413 418 def set_ippSeconds(self, ippSeconds):
414 419 '''
415 420 '''
416 421
417 422 self.ipp = ippSeconds * SPEED_OF_LIGHT / (2.0*1000)
418 423
419 424 return
420 425
421 426 def get_size(self):
422 427
423 428 self.__size = 116 + 12*self.nWindows + 4*self.numTaus
424 429
425 430 if self.codeType != 0:
426 431 self.__size += 4 + 4 + 4*self.nCode*numpy.ceil(self.nBaud/32.)
427 432
428 433 return self.__size
429 434
430 435 def set_size(self, value):
431 436
432 437 self.__size = value
433 438
434 439 return
435 440
436 441 ippSeconds = property(get_ippSeconds, set_ippSeconds)
437 442 size = property(get_size, set_size)
438 443
439 444 class ProcessingHeader(Header):
440 445
441 446 # size = None
442 447 dtype = None
443 448 blockSize = None
444 449 profilesPerBlock = None
445 450 dataBlocksPerFile = None
446 451 nWindows = None
447 452 processFlags = None
448 453 nCohInt = None
449 454 nIncohInt = None
450 455 totalSpectra = None
451 456
452 457 flag_dc = None
453 458 flag_cspc = None
454 459
455 460 def __init__(self):
456 461
457 462 # self.size = 0
458 463 self.dtype = 0
459 464 self.blockSize = 0
460 465 self.profilesPerBlock = 0
461 466 self.dataBlocksPerFile = 0
462 467 self.nWindows = 0
463 468 self.processFlags = 0
464 469 self.nCohInt = 0
465 470 self.nIncohInt = 0
466 471 self.totalSpectra = 0
467 472
468 473 self.nHeights = 0
469 474 self.firstHeight = 0
470 475 self.deltaHeight = 0
471 476 self.samplesWin = 0
472 477 self.spectraComb = 0
473 478 self.nCode = None
474 479 self.code = None
475 480 self.nBaud = None
476 481
477 482 self.shif_fft = False
478 483 self.flag_dc = False
479 484 self.flag_cspc = False
480 485 self.flag_decode = False
481 486 self.flag_deflip = False
482 487
483 488 def read(self, fp):
484 489
485 490 startFp = fp.tell()
486 491
492 try:
487 493 header = numpy.fromfile(fp,PROCESSING_STRUCTURE,1)
494 except Exception, e:
495 print "ProcessingHeader: " + e
496 return 0
488 497
489 498 size = int(header['nSize'][0])
490 499 self.dtype = int(header['nDataType'][0])
491 500 self.blockSize = int(header['nSizeOfDataBlock'][0])
492 501 self.profilesPerBlock = int(header['nProfilesperBlock'][0])
493 502 self.dataBlocksPerFile = int(header['nDataBlocksperFile'][0])
494 503 self.nWindows = int(header['nNumWindows'][0])
495 504 self.processFlags = header['nProcessFlags']
496 505 self.nCohInt = int(header['nCoherentIntegrations'][0])
497 506 self.nIncohInt = int(header['nIncoherentIntegrations'][0])
498 507 self.totalSpectra = int(header['nTotalSpectra'][0])
499 508
500 509 samplingWindow = numpy.fromfile(fp,SAMPLING_STRUCTURE,self.nWindows)
501 510
502 511 self.nHeights = int(numpy.sum(samplingWindow['nsa']))
503 512 self.firstHeight = float(samplingWindow['h0'][0])
504 513 self.deltaHeight = float(samplingWindow['dh'][0])
505 514 self.samplesWin = samplingWindow['nsa'][0]
506 515
507 516 self.spectraComb = numpy.fromfile(fp,'u1',2*self.totalSpectra)
508 517
509 518 if ((self.processFlags & PROCFLAG.DEFINE_PROCESS_CODE) == PROCFLAG.DEFINE_PROCESS_CODE):
510 519 self.nCode = int(numpy.fromfile(fp,'<u4',1))
511 520 self.nBaud = int(numpy.fromfile(fp,'<u4',1))
512 521 self.code = numpy.fromfile(fp,'<f4',self.nCode*self.nBaud).reshape(self.nCode,self.nBaud)
513 522
514 523 if ((self.processFlags & PROCFLAG.EXP_NAME_ESP) == PROCFLAG.EXP_NAME_ESP):
515 524 exp_name_len = int(numpy.fromfile(fp,'<u4',1))
516 525 exp_name = numpy.fromfile(fp,'u1',exp_name_len+1)
517 526
518 527 if ((self.processFlags & PROCFLAG.SHIFT_FFT_DATA) == PROCFLAG.SHIFT_FFT_DATA):
519 528 self.shif_fft = True
520 529 else:
521 530 self.shif_fft = False
522 531
523 532 if ((self.processFlags & PROCFLAG.SAVE_CHANNELS_DC) == PROCFLAG.SAVE_CHANNELS_DC):
524 533 self.flag_dc = True
525 534 else:
526 535 self.flag_dc = False
527 536
528 537 if ((self.processFlags & PROCFLAG.DECODE_DATA) == PROCFLAG.DECODE_DATA):
529 538 self.flag_decode = True
530 539 else:
531 540 self.flag_decode = False
532 541
533 542 if ((self.processFlags & PROCFLAG.DEFLIP_DATA) == PROCFLAG.DEFLIP_DATA):
534 543 self.flag_deflip = True
535 544 else:
536 545 self.flag_deflip = False
537 546
538 547 nChannels = 0
539 548 nPairs = 0
540 549 pairList = []
541 550
542 551 for i in range( 0, self.totalSpectra*2, 2 ):
543 552 if self.spectraComb[i] == self.spectraComb[i+1]:
544 553 nChannels = nChannels + 1 #par de canales iguales
545 554 else:
546 555 nPairs = nPairs + 1 #par de canales diferentes
547 556 pairList.append( (self.spectraComb[i], self.spectraComb[i+1]) )
548 557
549 558 self.flag_cspc = False
550 559 if nPairs > 0:
551 560 self.flag_cspc = True
552 561
553 562 endFp = size + startFp
554 563
555 564 if fp.tell() != endFp:
556 raise IOError, "Processing Header is not consistent"
565 print "Processing Header is not consistent"
566 return 0
557 567
558 568 return 1
559 569
560 570 def write(self, fp):
561 571 #Clear DEFINE_PROCESS_CODE
562 572 # self.processFlags = self.processFlags & (~PROCFLAG.DEFINE_PROCESS_CODE)
563 573
564 574 headerTuple = (self.size,
565 575 self.dtype,
566 576 self.blockSize,
567 577 self.profilesPerBlock,
568 578 self.dataBlocksPerFile,
569 579 self.nWindows,
570 580 self.processFlags,
571 581 self.nCohInt,
572 582 self.nIncohInt,
573 583 self.totalSpectra)
574 584
575 585 header = numpy.array(headerTuple,PROCESSING_STRUCTURE)
576 586 header.tofile(fp)
577 587
578 588 if self.nWindows != 0:
579 589 sampleWindowTuple = (self.firstHeight,self.deltaHeight,self.samplesWin)
580 590 samplingWindow = numpy.array(sampleWindowTuple,SAMPLING_STRUCTURE)
581 591 samplingWindow.tofile(fp)
582 592
583 593 if self.totalSpectra != 0:
584 594 # spectraComb = numpy.array([],numpy.dtype('u1'))
585 595 spectraComb = self.spectraComb
586 596 spectraComb.tofile(fp)
587 597
588 598 if self.processFlags & PROCFLAG.DEFINE_PROCESS_CODE == PROCFLAG.DEFINE_PROCESS_CODE:
589 599 nCode = numpy.array([self.nCode], numpy.dtype('u4')) #Probar con un dato que almacene codigo, hasta el momento no se hizo la prueba
590 600 nCode.tofile(fp)
591 601
592 602 nBaud = numpy.array([self.nBaud], numpy.dtype('u4'))
593 603 nBaud.tofile(fp)
594 604
595 605 code = self.code.reshape(self.nCode*self.nBaud)
596 606 code = code.astype(numpy.dtype('<f4'))
597 607 code.tofile(fp)
598 608
599 609 return 1
600 610
601 611 def get_size(self):
602 612
603 613 self.__size = 40 + 12*self.nWindows + 2*self.totalSpectra
604 614
605 615 if self.processFlags & PROCFLAG.DEFINE_PROCESS_CODE == PROCFLAG.DEFINE_PROCESS_CODE:
606 616 # self.__size += 4 + 4 + 4*self.nCode*numpy.ceil(self.nBaud/32.)
607 617 self.__size += 4 + 4 + 4 * self.nCode * self.nBaud
608 618
609 619 return self.__size
610 620
611 621 def set_size(self, value):
612 622
613 623 self.__size = value
614 624
615 625 return
616 626
617 627 size = property(get_size, set_size)
618 628
619 629 class RCfunction:
620 630 NONE=0
621 631 FLIP=1
622 632 CODE=2
623 633 SAMPLING=3
624 634 LIN6DIV256=4
625 635 SYNCHRO=5
626 636
627 637 class nCodeType:
628 638 NONE=0
629 639 USERDEFINE=1
630 640 BARKER2=2
631 641 BARKER3=3
632 642 BARKER4=4
633 643 BARKER5=5
634 644 BARKER7=6
635 645 BARKER11=7
636 646 BARKER13=8
637 647 AC128=9
638 648 COMPLEMENTARYCODE2=10
639 649 COMPLEMENTARYCODE4=11
640 650 COMPLEMENTARYCODE8=12
641 651 COMPLEMENTARYCODE16=13
642 652 COMPLEMENTARYCODE32=14
643 653 COMPLEMENTARYCODE64=15
644 654 COMPLEMENTARYCODE128=16
645 655 CODE_BINARY28=17
646 656
647 657 class PROCFLAG:
648 658
649 659 COHERENT_INTEGRATION = numpy.uint32(0x00000001)
650 660 DECODE_DATA = numpy.uint32(0x00000002)
651 661 SPECTRA_CALC = numpy.uint32(0x00000004)
652 662 INCOHERENT_INTEGRATION = numpy.uint32(0x00000008)
653 663 POST_COHERENT_INTEGRATION = numpy.uint32(0x00000010)
654 664 SHIFT_FFT_DATA = numpy.uint32(0x00000020)
655 665
656 666 DATATYPE_CHAR = numpy.uint32(0x00000040)
657 667 DATATYPE_SHORT = numpy.uint32(0x00000080)
658 668 DATATYPE_LONG = numpy.uint32(0x00000100)
659 669 DATATYPE_INT64 = numpy.uint32(0x00000200)
660 670 DATATYPE_FLOAT = numpy.uint32(0x00000400)
661 671 DATATYPE_DOUBLE = numpy.uint32(0x00000800)
662 672
663 673 DATAARRANGE_CONTIGUOUS_CH = numpy.uint32(0x00001000)
664 674 DATAARRANGE_CONTIGUOUS_H = numpy.uint32(0x00002000)
665 675 DATAARRANGE_CONTIGUOUS_P = numpy.uint32(0x00004000)
666 676
667 677 SAVE_CHANNELS_DC = numpy.uint32(0x00008000)
668 678 DEFLIP_DATA = numpy.uint32(0x00010000)
669 679 DEFINE_PROCESS_CODE = numpy.uint32(0x00020000)
670 680
671 681 ACQ_SYS_NATALIA = numpy.uint32(0x00040000)
672 682 ACQ_SYS_ECHOTEK = numpy.uint32(0x00080000)
673 683 ACQ_SYS_ADRXD = numpy.uint32(0x000C0000)
674 684 ACQ_SYS_JULIA = numpy.uint32(0x00100000)
675 685 ACQ_SYS_XXXXXX = numpy.uint32(0x00140000)
676 686
677 687 EXP_NAME_ESP = numpy.uint32(0x00200000)
678 688 CHANNEL_NAMES_ESP = numpy.uint32(0x00400000)
679 689
680 690 OPERATION_MASK = numpy.uint32(0x0000003F)
681 691 DATATYPE_MASK = numpy.uint32(0x00000FC0)
682 692 DATAARRANGE_MASK = numpy.uint32(0x00007000)
683 693 ACQ_SYS_MASK = numpy.uint32(0x001C0000)
684 694
685 695 dtype0 = numpy.dtype([('real','<i1'),('imag','<i1')])
686 696 dtype1 = numpy.dtype([('real','<i2'),('imag','<i2')])
687 697 dtype2 = numpy.dtype([('real','<i4'),('imag','<i4')])
688 698 dtype3 = numpy.dtype([('real','<i8'),('imag','<i8')])
689 699 dtype4 = numpy.dtype([('real','<f4'),('imag','<f4')])
690 700 dtype5 = numpy.dtype([('real','<f8'),('imag','<f8')])
691 701
692 702 NUMPY_DTYPE_LIST = [dtype0, dtype1, dtype2, dtype3, dtype4, dtype5]
693 703
694 704 PROCFLAG_DTYPE_LIST = [PROCFLAG.DATATYPE_CHAR,
695 705 PROCFLAG.DATATYPE_SHORT,
696 706 PROCFLAG.DATATYPE_LONG,
697 707 PROCFLAG.DATATYPE_INT64,
698 708 PROCFLAG.DATATYPE_FLOAT,
699 709 PROCFLAG.DATATYPE_DOUBLE]
700 710
701 711 DTYPE_WIDTH = [1, 2, 4, 8, 4, 8]
702 712
703 713 def get_dtype_index(numpy_dtype):
704 714
705 715 index = None
706 716
707 717 for i in range(len(NUMPY_DTYPE_LIST)):
708 718 if numpy_dtype == NUMPY_DTYPE_LIST[i]:
709 719 index = i
710 720 break
711 721
712 722 return index
713 723
714 724 def get_numpy_dtype(index):
715 725
716 726 return NUMPY_DTYPE_LIST[index]
717 727
718 728 def get_procflag_dtype(index):
719 729
720 730 return PROCFLAG_DTYPE_LIST[index]
721 731
722 732 def get_dtype_width(index):
723 733
724 734 return DTYPE_WIDTH[index] No newline at end of file
@@ -1,1004 +1,1004
1 1 import numpy
2 2 import time
3 3 import os
4 4 import h5py
5 5 import re
6 6
7 7 from schainpy.model.data.jrodata import *
8 8 from schainpy.model.proc.jroproc_base import ProcessingUnit, Operation
9 9 from schainpy.model.io.jroIO_base import *
10 10
11 11
12 12 class HDF5Reader(ProcessingUnit):
13 13
14 14 ext = ".hdf5"
15 15
16 16 optchar = "D"
17 17
18 18 timezone = None
19 19
20 20 secStart = None
21 21
22 22 secEnd = None
23 23
24 24 fileIndex = None
25 25
26 26 blockIndex = None
27 27
28 28 blocksPerFile = None
29 29
30 30 path = None
31 31
32 32 #List of Files
33 33
34 34 filenameList = None
35 35
36 36 datetimeList = None
37 37
38 38 #Hdf5 File
39 39
40 40 fpMetadata = None
41 41
42 42 pathMeta = None
43 43
44 44 listMetaname = None
45 45
46 46 listMeta = None
47 47
48 48 listDataname = None
49 49
50 50 listData = None
51 51
52 52 listShapes = None
53 53
54 54 fp = None
55 55
56 56 #dataOut reconstruction
57 57
58 58 dataOut = None
59 59
60 60 nRecords = None
61 61
62 62
63 63 def __init__(self):
64 64 self.dataOut = self.__createObjByDefault()
65 65 return
66 66
67 67 def __createObjByDefault(self):
68 68
69 69 dataObj = Parameters()
70 70
71 71 return dataObj
72 72
73 73 def setup(self,path=None,
74 74 startDate=None,
75 75 endDate=None,
76 76 startTime=datetime.time(0,0,0),
77 77 endTime=datetime.time(23,59,59),
78 78 walk=True,
79 79 timezone='ut',
80 80 all=0,
81 81 online=False,
82 82 ext=None):
83 83
84 84 if ext==None:
85 85 ext = self.ext
86 86 self.timezone = timezone
87 87 # self.all = all
88 88 # self.online = online
89 89 self.path = path
90 90
91 91 startDateTime = datetime.datetime.combine(startDate,startTime)
92 92 endDateTime = datetime.datetime.combine(endDate,endTime)
93 93 secStart = (startDateTime-datetime.datetime(1970,1,1)).total_seconds()
94 94 secEnd = (endDateTime-datetime.datetime(1970,1,1)).total_seconds()
95 95
96 96 self.secStart = secStart
97 97 self.secEnd = secEnd
98 98
99 99 if not(online):
100 100 #Busqueda de archivos offline
101 101 self.__searchFilesOffline(path, startDate, endDate, ext, startTime, endTime, secStart, secEnd, walk)
102 102 else:
103 103 self.__searchFilesOnline(path, walk)
104 104
105 105 if not(self.filenameList):
106 106 print "There is no files into the folder: %s"%(path)
107 107 sys.exit(-1)
108 108
109 109 # self.__getExpParameters()
110 110
111 111 self.fileIndex = -1
112 112
113 113 self.__setNextFileOffline()
114 114
115 115 self.__readMetadata()
116 116
117 117 self.blockIndex = 0
118 118
119 119 return
120 120
121 121 def __searchFilesOffline(self,
122 122 path,
123 123 startDate,
124 124 endDate,
125 125 ext,
126 126 startTime=datetime.time(0,0,0),
127 127 endTime=datetime.time(23,59,59),
128 128 secStart = 0,
129 129 secEnd = numpy.inf,
130 130 walk=True):
131 131
132 132 # self.__setParameters(path, startDate, endDate, startTime, endTime, walk)
133 133 #
134 134 # self.__checkPath()
135 135 #
136 136 # self.__findDataForDates()
137 137 #
138 138 # self.__selectDataForTimes()
139 139 #
140 140 # for i in range(len(self.filenameList)):
141 141 # print "%s" %(self.filenameList[i])
142 142
143 143 pathList = []
144 144
145 145 if not walk:
146 146 #pathList.append(path)
147 147 multi_path = path.split(',')
148 148 for single_path in multi_path:
149 149 pathList.append(single_path)
150 150
151 151 else:
152 152 #dirList = []
153 153 multi_path = path.split(',')
154 154 for single_path in multi_path:
155 155 dirList = []
156 156 for thisPath in os.listdir(single_path):
157 157 if not os.path.isdir(os.path.join(single_path,thisPath)):
158 158 continue
159 159 if not isDoyFolder(thisPath):
160 160 continue
161 161
162 162 dirList.append(thisPath)
163 163
164 164 if not(dirList):
165 165 return None, None
166 166
167 167 thisDate = startDate
168 168
169 169 while(thisDate <= endDate):
170 170 year = thisDate.timetuple().tm_year
171 171 doy = thisDate.timetuple().tm_yday
172 172
173 173 matchlist = fnmatch.filter(dirList, '?' + '%4.4d%3.3d' % (year,doy) + '*')
174 174 if len(matchlist) == 0:
175 175 thisDate += datetime.timedelta(1)
176 176 continue
177 177 for match in matchlist:
178 178 pathList.append(os.path.join(single_path,match))
179 179
180 180 thisDate += datetime.timedelta(1)
181 181
182 182 if pathList == []:
183 183 print "Any folder was found for the date range: %s-%s" %(startDate, endDate)
184 184 return None, None
185 185
186 186 print "%d folder(s) was(were) found for the date range: %s - %s" %(len(pathList), startDate, endDate)
187 187
188 188 filenameList = []
189 189 datetimeList = []
190 190 pathDict = {}
191 191 filenameList_to_sort = []
192 192
193 193 for i in range(len(pathList)):
194 194
195 195 thisPath = pathList[i]
196 196
197 197 fileList = glob.glob1(thisPath, "*%s" %ext)
198 198 fileList.sort()
199 199 pathDict.setdefault(fileList[0])
200 200 pathDict[fileList[0]] = i
201 201 filenameList_to_sort.append(fileList[0])
202 202
203 203 filenameList_to_sort.sort()
204 204
205 205 for file in filenameList_to_sort:
206 206 thisPath = pathList[pathDict[file]]
207 207
208 208 fileList = glob.glob1(thisPath, "*%s" %ext)
209 209 fileList.sort()
210 210
211 211 for file in fileList:
212 212
213 213 filename = os.path.join(thisPath,file)
214 214 thisDatetime = self.__isFileinThisTime(filename, secStart, secEnd)
215 215
216 216 if not(thisDatetime):
217 217 continue
218 218
219 219 filenameList.append(filename)
220 220 datetimeList.append(thisDatetime)
221 221
222 222 if not(filenameList):
223 223 print "Any file was found for the time range %s - %s" %(startTime, endTime)
224 224 return None, None
225 225
226 226 print "%d file(s) was(were) found for the time range: %s - %s" %(len(filenameList), startTime, endTime)
227 227 print
228 228
229 229 for i in range(len(filenameList)):
230 230 print "%s -> [%s]" %(filenameList[i], datetimeList[i].ctime())
231 231
232 232 self.filenameList = filenameList
233 233 self.datetimeList = datetimeList
234 234
235 235 return pathList, filenameList
236 236
237 237 def __isFileinThisTime(self, filename, startSeconds, endSeconds):
238 238 """
239 239 Retorna 1 si el archivo de datos se encuentra dentro del rango de horas especificado.
240 240
241 241 Inputs:
242 242 filename : nombre completo del archivo de datos en formato Jicamarca (.r)
243 243
244 244 startTime : tiempo inicial del rango seleccionado en formato datetime.time
245 245
246 246 endTime : tiempo final del rango seleccionado en formato datetime.time
247 247
248 248 Return:
249 249 Boolean : Retorna True si el archivo de datos contiene datos en el rango de
250 250 fecha especificado, de lo contrario retorna False.
251 251
252 252 Excepciones:
253 253 Si el archivo no existe o no puede ser abierto
254 254 Si la cabecera no puede ser leida.
255 255
256 256 """
257 257
258 258 try:
259 259 fp = fp = h5py.File(filename,'r')
260 260 except IOError:
261 traceback.print_exc()
262 raise IOError, "The file %s can't be opened" %(filename)
261 print "File %s can't be opened" %(filename)
262 return None
263 263
264 264 grp = fp['Data']
265 265 timeAux = grp['time']
266 266 time0 = timeAux[:][0].astype(numpy.float) #Time Vector
267 267
268 268 fp.close()
269 269
270 270 if self.timezone == 'lt':
271 271 time0 -= 5*3600
272 272
273 273 boolTimer = numpy.logical_and(time0 >= startSeconds,time0 < endSeconds)
274 274
275 275 if not (numpy.any(boolTimer)):
276 276 return None
277 277
278 278 thisDatetime = datetime.datetime.utcfromtimestamp(time0[0])
279 279 return thisDatetime
280 280
281 281 def __checkPath(self):
282 282 if os.path.exists(self.path):
283 283 self.status = 1
284 284 else:
285 285 self.status = 0
286 286 print 'Path:%s does not exists'%self.path
287 287
288 288 return
289 289
290 290 def __setNextFileOffline(self):
291 291 idFile = self.fileIndex
292 292 idFile += 1
293 293
294 294 if not(idFile < len(self.filenameList)):
295 295 print "No more Files"
296 296 return 0
297 297
298 298 filename = self.filenameList[idFile]
299 299
300 300 filePointer = h5py.File(filename,'r')
301 301
302 302 self.flagIsNewFile = 1
303 303 self.fileIndex = idFile
304 304 self.filename = filename
305 305
306 306 self.fp = filePointer
307 307
308 308 print "Setting the file: %s"%self.filename
309 309
310 310 self.__readMetadata()
311 311 self.__setBlockList()
312 312 # self.nRecords = self.fp['Data'].attrs['blocksPerFile']
313 313 self.nRecords = self.fp['Data'].attrs['nRecords']
314 314 self.blockIndex = 0
315 315 return 1
316 316
317 317 def __setBlockList(self):
318 318 '''
319 319 self.fp
320 320 self.startDateTime
321 321 self.endDateTime
322 322
323 323 self.blockList
324 324 self.blocksPerFile
325 325
326 326 '''
327 327 filePointer = self.fp
328 328 secStart = self.secStart
329 329 secEnd = self.secEnd
330 330
331 331 grp = filePointer['Data']
332 332 timeVector = grp['time'].value.astype(numpy.float)[0]
333 333
334 334 if self.timezone == 'lt':
335 335 timeVector -= 5*3600
336 336
337 337 ind = numpy.where(numpy.logical_and(timeVector >= secStart , timeVector < secEnd))[0]
338 338
339 339 self.blockList = ind
340 340 self.blocksPerFile = len(ind)
341 341
342 342 return
343 343
344 344 def __readMetadata(self):
345 345 '''
346 346 self.pathMeta
347 347
348 348 self.listShapes
349 349 self.listMetaname
350 350 self.listMeta
351 351
352 352 '''
353 353
354 354 grp = self.fp['Data']
355 355 pathMeta = os.path.join(self.path, grp.attrs['metadata'])
356 356
357 357 if pathMeta == self.pathMeta:
358 358 return
359 359 else:
360 360 self.pathMeta = pathMeta
361 361
362 362 filePointer = h5py.File(self.pathMeta,'r')
363 363 groupPointer = filePointer['Metadata']
364 364
365 365 listMetaname = []
366 366 listMetadata = []
367 367 for item in groupPointer.items():
368 368 name = item[0]
369 369
370 370 if name=='array dimensions':
371 371 table = groupPointer[name][:]
372 372 listShapes = {}
373 373 for shapes in table:
374 374 listShapes[shapes[0]] = numpy.array([shapes[1],shapes[2],shapes[3],shapes[4]])
375 375 else:
376 376 data = groupPointer[name].value
377 377 listMetaname.append(name)
378 378 listMetadata.append(data)
379 379
380 380 if name=='type':
381 381 self.__initDataOut(data)
382 382
383 383 filePointer.close()
384 384
385 385 self.listShapes = listShapes
386 386 self.listMetaname = listMetaname
387 387 self.listMeta = listMetadata
388 388
389 389 return
390 390
391 391 def __readData(self):
392 392 grp = self.fp['Data']
393 393 listdataname = []
394 394 listdata = []
395 395
396 396 for item in grp.items():
397 397 name = item[0]
398 398
399 399 if name == 'time':
400 400 listdataname.append('utctime')
401 401 timeAux = grp[name].value.astype(numpy.float)[0]
402 402 listdata.append(timeAux)
403 403 continue
404 404
405 405 listdataname.append(name)
406 406 array = self.__setDataArray(self.nRecords, grp[name],self.listShapes[name])
407 407 listdata.append(array)
408 408
409 409 self.listDataname = listdataname
410 410 self.listData = listdata
411 411 return
412 412
413 413 def __setDataArray(self, nRecords, dataset, shapes):
414 414
415 415 nChannels = shapes[0] #Dimension 0
416 416
417 417 nPoints = shapes[1] #Dimension 1, number of Points or Parameters
418 418
419 419 nSamples = shapes[2] #Dimension 2, number of samples or ranges
420 420
421 421 mode = shapes[3]
422 422
423 423 # if nPoints>1:
424 424 # arrayData = numpy.zeros((nRecords,nChannels,nPoints,nSamples))
425 425 # else:
426 426 # arrayData = numpy.zeros((nRecords,nChannels,nSamples))
427 427 #
428 428 # chn = 'channel'
429 429 #
430 430 # for i in range(nChannels):
431 431 #
432 432 # data = dataset[chn + str(i)].value
433 433 #
434 434 # if nPoints>1:
435 435 # data = numpy.rollaxis(data,2)
436 436 #
437 437 # arrayData[:,i,:] = data
438 438
439 439 arrayData = numpy.zeros((nRecords,nChannels,nPoints,nSamples))
440 440 doSqueeze = False
441 441 if mode == 0:
442 442 strds = 'channel'
443 443 nDatas = nChannels
444 444 newShapes = (nRecords,nPoints,nSamples)
445 445 if nPoints == 1:
446 446 doSqueeze = True
447 447 axisSqueeze = 2
448 448 else:
449 449 strds = 'param'
450 450 nDatas = nPoints
451 451 newShapes = (nRecords,nChannels,nSamples)
452 452 if nChannels == 1:
453 453 doSqueeze = True
454 454 axisSqueeze = 1
455 455
456 456 for i in range(nDatas):
457 457
458 458 data = dataset[strds + str(i)].value
459 459 data = data.reshape(newShapes)
460 460
461 461 if mode == 0:
462 462 arrayData[:,i,:,:] = data
463 463 else:
464 464 arrayData[:,:,i,:] = data
465 465
466 466 if doSqueeze:
467 467 arrayData = numpy.squeeze(arrayData, axis=axisSqueeze)
468 468
469 469 return arrayData
470 470
471 471 def __initDataOut(self, type):
472 472
473 473 # if type =='Parameters':
474 474 # self.dataOut = Parameters()
475 475 # elif type =='Spectra':
476 476 # self.dataOut = Spectra()
477 477 # elif type =='Voltage':
478 478 # self.dataOut = Voltage()
479 479 # elif type =='Correlation':
480 480 # self.dataOut = Correlation()
481 481
482 482 return
483 483
484 484 def __setDataOut(self):
485 485 listMeta = self.listMeta
486 486 listMetaname = self.listMetaname
487 487 listDataname = self.listDataname
488 488 listData = self.listData
489 489
490 490 blockIndex = self.blockIndex
491 491 blockList = self.blockList
492 492
493 493 for i in range(len(listMeta)):
494 494 setattr(self.dataOut,listMetaname[i],listMeta[i])
495 495
496 496 for j in range(len(listData)):
497 497 if listDataname[j]=='utctime':
498 498 # setattr(self.dataOut,listDataname[j],listData[j][blockList[blockIndex]])
499 499 setattr(self.dataOut,'utctimeInit',listData[j][blockList[blockIndex]])
500 500 continue
501 501
502 502 setattr(self.dataOut,listDataname[j],listData[j][blockList[blockIndex],:])
503 503
504 504 return self.dataOut.data_param
505 505
506 506 def getData(self):
507 507
508 508 # if self.flagNoMoreFiles:
509 509 # self.dataOut.flagNoData = True
510 510 # print 'Process finished'
511 511 # return 0
512 512 #
513 513 if self.blockIndex==self.blocksPerFile:
514 514 if not( self.__setNextFileOffline() ):
515 515 self.dataOut.flagNoData = True
516 516 return 0
517 517
518 518 #
519 519 # if self.datablock == None: # setear esta condicion cuando no hayan datos por leers
520 520 # self.dataOut.flagNoData = True
521 521 # return 0
522 522
523 523 self.__readData()
524 524 self.__setDataOut()
525 525 self.dataOut.flagNoData = False
526 526
527 527 self.blockIndex += 1
528 528
529 529 return
530 530
531 531 def run(self, **kwargs):
532 532
533 533 if not(self.isConfig):
534 534 self.setup(**kwargs)
535 535 # self.setObjProperties()
536 536 self.isConfig = True
537 537
538 538 self.getData()
539 539
540 540 return
541 541
542 542 class HDF5Writer(Operation):
543 543
544 544 ext = ".hdf5"
545 545
546 546 optchar = "D"
547 547
548 548 metaoptchar = "M"
549 549
550 550 metaFile = None
551 551
552 552 filename = None
553 553
554 554 path = None
555 555
556 556 setFile = None
557 557
558 558 fp = None
559 559
560 560 grp = None
561 561
562 562 ds = None
563 563
564 564 firsttime = True
565 565
566 566 #Configurations
567 567
568 568 blocksPerFile = None
569 569
570 570 blockIndex = None
571 571
572 572 dataOut = None
573 573
574 574 #Data Arrays
575 575
576 576 dataList = None
577 577
578 578 metadataList = None
579 579
580 580 arrayDim = None
581 581
582 582 tableDim = None
583 583
584 584 # dtype = [('arrayName', 'S20'),('nChannels', 'i'), ('nPoints', 'i'), ('nSamples', 'i'),('mode', 'b')]
585 585
586 586 dtype = [('arrayName', 'S20'),('nDimensions', 'i'), ('dim2', 'i'), ('dim1', 'i'),('dim0', 'i'),('mode', 'b')]
587 587
588 588 mode = None
589 589
590 590 nDatas = None #Number of datasets to be stored per array
591 591
592 592 nDims = None #Number Dimensions in each dataset
593 593
594 594 nDimsForDs = None
595 595
596 596 def __init__(self):
597 597
598 598 Operation.__init__(self)
599 599 self.isConfig = False
600 600 return
601 601
602 602
603 603 def setup(self, dataOut, **kwargs):
604 604
605 605 self.path = kwargs['path']
606 606
607 607 if kwargs.has_key('ext'):
608 608 self.ext = kwargs['ext']
609 609
610 610 if kwargs.has_key('blocksPerFile'):
611 611 self.blocksPerFile = kwargs['blocksPerFile']
612 612 else:
613 613 self.blocksPerFile = 10
614 614
615 615 self.metadataList = kwargs['metadataList']
616 616
617 617 self.dataList = kwargs['dataList']
618 618
619 619 self.dataOut = dataOut
620 620
621 621 if kwargs.has_key('mode'):
622 622 mode = kwargs['mode']
623 623
624 624 if type(mode) == int:
625 625 mode = numpy.zeros(len(self.dataList)) + mode
626 626 else:
627 627 mode = numpy.zeros(len(self.dataList))
628 628
629 629 self.mode = mode
630 630
631 631 arrayDim = numpy.zeros((len(self.dataList),5))
632 632
633 633 #Table dimensions
634 634
635 635 dtype0 = self.dtype
636 636
637 637 tableList = []
638 638
639 639 for i in range(len(self.dataList)):
640 640
641 641 dataAux = getattr(self.dataOut, self.dataList[i])
642 642
643 643 if type(dataAux)==float or type(dataAux)==int:
644 644 arrayDim[i,0] = 1
645 645 else:
646 646 arrayDim0 = dataAux.shape
647 647 arrayDim[i,0] = len(arrayDim0)
648 648 arrayDim[i,4] = mode[i]
649 649
650 650 if len(arrayDim0) == 3:
651 651 arrayDim[i,1:-1] = numpy.array(arrayDim0)
652 652 elif len(arrayDim0) == 2:
653 653 arrayDim[i,2:-1] = numpy.array(arrayDim0) #nHeights
654 654 elif len(arrayDim0) == 1:
655 655 arrayDim[i,3] = arrayDim0
656 656 elif len(arrayDim0) == 0:
657 657 arrayDim[i,0] = 1
658 658 arrayDim[i,3] = 1
659 659
660 660 table = numpy.array((self.dataList[i],) + tuple(arrayDim[i,:]),dtype = dtype0)
661 661 tableList.append(table)
662 662
663 663 self.arrayDim = arrayDim
664 664 self.tableDim = numpy.array(tableList, dtype = dtype0)
665 665 self.blockIndex = 0
666 666
667 667 return
668 668
669 669 def putMetadata(self):
670 670
671 671 fp = self.createMetadataFile()
672 672 self.writeMetadata(fp)
673 673 fp.close()
674 674 return
675 675
676 676 def createMetadataFile(self):
677 677 ext = self.ext
678 678 path = self.path
679 679 setFile = self.setFile
680 680
681 681 timeTuple = time.localtime(self.dataOut.utctime)
682 682
683 683 subfolder = ''
684 684 fullpath = os.path.join( path, subfolder )
685 685
686 686 if not( os.path.exists(fullpath) ):
687 687 os.mkdir(fullpath)
688 688 setFile = -1 #inicializo mi contador de seteo
689 689
690 690 subfolder = 'd%4.4d%3.3d' % (timeTuple.tm_year,timeTuple.tm_yday)
691 691 fullpath = os.path.join( path, subfolder )
692 692
693 693 if not( os.path.exists(fullpath) ):
694 694 os.mkdir(fullpath)
695 695 setFile = -1 #inicializo mi contador de seteo
696 696
697 697 else:
698 698 filesList = os.listdir( fullpath )
699 699 filesList = sorted( filesList, key=str.lower )
700 700 if len( filesList ) > 0:
701 701 filesList = [k for k in filesList if 'M' in k]
702 702 filen = filesList[-1]
703 703 # el filename debera tener el siguiente formato
704 704 # 0 1234 567 89A BCDE (hex)
705 705 # x YYYY DDD SSS .ext
706 706 if isNumber( filen[8:11] ):
707 707 setFile = int( filen[8:11] ) #inicializo mi contador de seteo al seteo del ultimo file
708 708 else:
709 709 setFile = -1
710 710 else:
711 711 setFile = -1 #inicializo mi contador de seteo
712 712
713 713 setFile += 1
714 714
715 715 file = '%s%4.4d%3.3d%3.3d%s' % (self.metaoptchar,
716 716 timeTuple.tm_year,
717 717 timeTuple.tm_yday,
718 718 setFile,
719 719 ext )
720 720
721 721 filename = os.path.join( path, subfolder, file )
722 722 self.metaFile = file
723 723 #Setting HDF5 File
724 724 fp = h5py.File(filename,'w')
725 725
726 726 return fp
727 727
728 728 def writeMetadata(self, fp):
729 729
730 730 grp = fp.create_group("Metadata")
731 731 grp.create_dataset('array dimensions', data = self.tableDim, dtype = self.dtype)
732 732
733 733 for i in range(len(self.metadataList)):
734 734 grp.create_dataset(self.metadataList[i], data=getattr(self.dataOut, self.metadataList[i]))
735 735 return
736 736
737 737 def setNextFile(self):
738 738
739 739 ext = self.ext
740 740 path = self.path
741 741 setFile = self.setFile
742 742 mode = self.mode
743 743
744 744 timeTuple = time.localtime(self.dataOut.utctime)
745 745 subfolder = 'd%4.4d%3.3d' % (timeTuple.tm_year,timeTuple.tm_yday)
746 746
747 747 fullpath = os.path.join( path, subfolder )
748 748
749 749 if os.path.exists(fullpath):
750 750 filesList = os.listdir( fullpath )
751 751 filesList = [k for k in filesList if 'D' in k]
752 752 if len( filesList ) > 0:
753 753 filesList = sorted( filesList, key=str.lower )
754 754 filen = filesList[-1]
755 755 # el filename debera tener el siguiente formato
756 756 # 0 1234 567 89A BCDE (hex)
757 757 # x YYYY DDD SSS .ext
758 758 if isNumber( filen[8:11] ):
759 759 setFile = int( filen[8:11] ) #inicializo mi contador de seteo al seteo del ultimo file
760 760 else:
761 761 setFile = -1
762 762 else:
763 763 setFile = -1 #inicializo mi contador de seteo
764 764
765 765 setFile += 1
766 766
767 767 file = '%s%4.4d%3.3d%3.3d%s' % (self.optchar,
768 768 timeTuple.tm_year,
769 769 timeTuple.tm_yday,
770 770 setFile,
771 771 ext )
772 772
773 773 filename = os.path.join( path, subfolder, file )
774 774
775 775 #Setting HDF5 File
776 776 fp = h5py.File(filename,'w')
777 777 grp = fp.create_group("Data")
778 778 grp.attrs['metadata'] = self.metaFile
779 779
780 780 # grp.attrs['blocksPerFile'] = 0
781 781
782 782 ds = []
783 783 data = []
784 784 nDimsForDs = []
785 785
786 786 nDatas = numpy.zeros(len(self.dataList))
787 787 nDims = self.arrayDim[:,0]
788 788
789 789 nDim1 = self.arrayDim[:,2]
790 790 nDim0 = self.arrayDim[:,3]
791 791
792 792 for i in range(len(self.dataList)):
793 793
794 794 if nDims[i]==1:
795 795 # ds0 = grp.create_dataset(self.dataList[i], (1,1), maxshape=(1,self.blocksPerFile) , chunks = True, dtype='S20')
796 796 ds0 = grp.create_dataset(self.dataList[i], (1,1), maxshape=(1,self.blocksPerFile) , chunks = True, dtype=numpy.float64)
797 797 ds.append(ds0)
798 798 data.append([])
799 799 nDimsForDs.append(nDims[i])
800 800 else:
801 801
802 802 if mode[i]==0:
803 803 strMode = "channel"
804 804 nDatas[i] = self.arrayDim[i,1]
805 805 else:
806 806 strMode = "param"
807 807 nDatas[i] = self.arrayDim[i,2]
808 808
809 809 if nDims[i]==2:
810 810 nDatas[i] = self.arrayDim[i,2]
811 811
812 812 grp0 = grp.create_group(self.dataList[i])
813 813
814 814 for j in range(int(nDatas[i])):
815 815 tableName = strMode + str(j)
816 816
817 817 if nDims[i] == 3:
818 818 ds0 = grp0.create_dataset(tableName, (nDim1[i],nDim0[i],1) , data = numpy.zeros((nDim1[i],nDim0[i],1)) ,maxshape=(None,nDim0[i],None), chunks=True)
819 819 else:
820 820 ds0 = grp0.create_dataset(tableName, (1,nDim0[i]), data = numpy.zeros((1,nDim0[i])) , maxshape=(None,nDim0[i]), chunks=True)
821 821
822 822 ds.append(ds0)
823 823 data.append([])
824 824 nDimsForDs.append(nDims[i])
825 825 self.nDatas = nDatas
826 826 self.nDims = nDims
827 827 self.nDimsForDs = nDimsForDs
828 828 #Saving variables
829 829 print 'Writing the file: %s'%filename
830 830 self.filename = filename
831 831 self.fp = fp
832 832 self.grp = grp
833 833 self.grp.attrs.modify('nRecords', 1)
834 834 self.ds = ds
835 835 self.data = data
836 836
837 837 self.setFile = setFile
838 838 self.firsttime = True
839 839 self.blockIndex = 0
840 840 return
841 841
842 842 def putData(self):
843 843
844 844 if not self.firsttime:
845 845 self.readBlock()
846 846
847 847 if self.blockIndex == self.blocksPerFile:
848 848
849 849 self.setNextFile()
850 850
851 851 self.setBlock()
852 852 self.writeBlock()
853 853
854 854 self.fp.flush()
855 855 self.fp.close()
856 856
857 857 return
858 858
859 859 def readBlock(self):
860 860
861 861 '''
862 862 data Array configured
863 863
864 864
865 865 self.data
866 866 '''
867 867 ds = self.ds
868 868 #Setting HDF5 File
869 869 fp = h5py.File(self.filename,'r+')
870 870 grp = fp["Data"]
871 871 ind = 0
872 872
873 873 # grp.attrs['blocksPerFile'] = 0
874 874 for i in range(len(self.dataList)):
875 875
876 876 if self.nDims[i]==1:
877 877 ds0 = grp[self.dataList[i]]
878 878 ds[ind] = ds0
879 879 ind += 1
880 880 else:
881 881 if self.mode[i]==0:
882 882 strMode = "channel"
883 883 else:
884 884 strMode = "param"
885 885
886 886 grp0 = grp[self.dataList[i]]
887 887
888 888 for j in range(int(self.nDatas[i])):
889 889 tableName = strMode + str(j)
890 890 ds0 = grp0[tableName]
891 891 ds[ind] = ds0
892 892 ind += 1
893 893
894 894
895 895 self.fp = fp
896 896 self.grp = grp
897 897 self.ds = ds
898 898
899 899 return
900 900
901 901
902 902 def setBlock(self):
903 903 '''
904 904 data Array configured
905 905
906 906
907 907 self.data
908 908 '''
909 909 #Creating Arrays
910 910 data = self.data
911 911 nDatas = self.nDatas
912 912 nDims = self.nDims
913 913 mode = self.mode
914 914 ind = 0
915 915
916 916 for i in range(len(self.dataList)):
917 917 dataAux = getattr(self.dataOut,self.dataList[i])
918 918
919 919 if nDims[i] == 1:
920 920 # data[ind] = numpy.array([str(dataAux)]).reshape((1,1))
921 921 data[ind] = dataAux
922 922 # if not self.firsttime:
923 923 # data[ind] = numpy.hstack((self.ds[ind][:], self.data[ind]))
924 924 ind += 1
925 925 else:
926 926 for j in range(int(nDatas[i])):
927 927 if (mode[i] == 0) or (nDims[i] == 2): #In case division per channel or Dimensions is only 1
928 928 data[ind] = dataAux[j,:]
929 929 else:
930 930 data[ind] = dataAux[:,j,:]
931 931
932 932 # if nDims[i] == 3:
933 933 # data[ind] = data[ind].reshape((data[ind].shape[0],data[ind].shape[1],1))
934 934
935 935 # if not self.firsttime:
936 936 # data[ind] = numpy.dstack((self.ds[ind][:], data[ind]))
937 937
938 938 # else:
939 939 # data[ind] = data[ind].reshape((1,data[ind].shape[0]))
940 940
941 941 # if not self.firsttime:
942 942 # data[ind] = numpy.vstack((self.ds[ind][:], data[ind]))
943 943 ind += 1
944 944
945 945 self.data = data
946 946 return
947 947
948 948 def writeBlock(self):
949 949 '''
950 950 Saves the block in the HDF5 file
951 951 '''
952 952 for i in range(len(self.ds)):
953 953 if self.firsttime:
954 954 # self.ds[i].resize(self.data[i].shape)
955 955 # self.ds[i][self.blockIndex,:] = self.data[i]
956 956 if type(self.data[i]) == numpy.ndarray:
957 957 nDims1 = len(self.ds[i].shape)
958 958
959 959 if nDims1 == 3:
960 960 self.data[i] = self.data[i].reshape((self.data[i].shape[0],self.data[i].shape[1],1))
961 961
962 962 self.ds[i].resize(self.data[i].shape)
963 963 self.ds[i][:] = self.data[i]
964 964 else:
965 965 if self.nDimsForDs[i] == 1:
966 966 self.ds[i].resize((self.ds[i].shape[0], self.ds[i].shape[1] + 1))
967 967 self.ds[i][0,-1] = self.data[i]
968 968 elif self.nDimsForDs[i] == 2:
969 969 self.ds[i].resize((self.ds[i].shape[0] + 1,self.ds[i].shape[1]))
970 970 self.ds[i][self.blockIndex,:] = self.data[i]
971 971 elif self.nDimsForDs[i] == 3:
972 972
973 973 dataShape = self.data[i].shape
974 974 dsShape = self.ds[i].shape
975 975
976 976 if dataShape[0]==dsShape[0]:
977 977 self.ds[i].resize((self.ds[i].shape[0],self.ds[i].shape[1],self.ds[i].shape[2]+1))
978 978 self.ds[i][:,:,-1] = self.data[i]
979 979 else:
980 980 self.ds[i].resize((self.ds[i].shape[0] + dataShape[0],self.ds[i].shape[1],self.ds[i].shape[2]))
981 981 self.ds[i][dsShape[0]:,:,0] = self.data[i]
982 982 # self.ds[i].append(self.data[i])
983 983 # self.fp.flush()
984 984 # if not self.firsttime:
985 985 # self.fp.root.Data._v_attrs.nRecords = self.blockIndex
986 986
987 987 # if self.firsttime:
988 988 # self.fp.close()
989 989 # self.readBlock2()
990 990
991 991 self.blockIndex += 1
992 992 self.firsttime = False
993 993 return
994 994
995 995 def run(self, dataOut, **kwargs):
996 996 if not(self.isConfig):
997 997 self.setup(dataOut, **kwargs)
998 998 self.isConfig = True
999 999 self.putMetadata()
1000 1000 self.setNextFile()
1001 1001
1002 1002 self.putData()
1003 1003 return
1004 1004
@@ -1,1618 +1,1636
1 1 '''
2 2 Created on Jul 2, 2014
3 3
4 4 @author: roj-idl71
5 5 '''
6 6 import os
7 7 import sys
8 8 import glob
9 9 import time
10 10 import numpy
11 11 import fnmatch
12 12 import time, datetime
13 13 #import h5py
14 14 import traceback
15 15
16 16 try:
17 17 from gevent import sleep
18 18 except:
19 19 from time import sleep
20 20
21 21 from schainpy.model.data.jroheaderIO import PROCFLAG, BasicHeader, SystemHeader, RadarControllerHeader, ProcessingHeader
22 22 from schainpy.model.data.jroheaderIO import get_dtype_index, get_numpy_dtype, get_procflag_dtype, get_dtype_width
23 23
24 24 LOCALTIME = True
25 25
26 26 def isNumber(cad):
27 27 """
28 28 Chequea si el conjunto de caracteres que componen un string puede ser convertidos a un numero.
29 29
30 30 Excepciones:
31 31 Si un determinado string no puede ser convertido a numero
32 32 Input:
33 33 str, string al cual se le analiza para determinar si convertible a un numero o no
34 34
35 35 Return:
36 36 True : si el string es uno numerico
37 37 False : no es un string numerico
38 38 """
39 39 try:
40 40 float( cad )
41 41 return True
42 42 except:
43 43 return False
44 44
45 45 def isFileInEpoch(filename, startUTSeconds, endUTSeconds):
46 46 """
47 47 Esta funcion determina si un archivo de datos se encuentra o no dentro del rango de fecha especificado.
48 48
49 49 Inputs:
50 50 filename : nombre completo del archivo de datos en formato Jicamarca (.r)
51 51
52 52 startUTSeconds : fecha inicial del rango seleccionado. La fecha esta dada en
53 53 segundos contados desde 01/01/1970.
54 54 endUTSeconds : fecha final del rango seleccionado. La fecha esta dada en
55 55 segundos contados desde 01/01/1970.
56 56
57 57 Return:
58 58 Boolean : Retorna True si el archivo de datos contiene datos en el rango de
59 59 fecha especificado, de lo contrario retorna False.
60 60
61 61 Excepciones:
62 62 Si el archivo no existe o no puede ser abierto
63 63 Si la cabecera no puede ser leida.
64 64
65 65 """
66 66 basicHeaderObj = BasicHeader(LOCALTIME)
67 67
68 68 try:
69 69 fp = open(filename,'rb')
70 70 except IOError:
71 traceback.print_exc()
72 raise IOError, "The file %s can't be opened" %(filename)
71 print "The file %s can't be opened" %(filename)
72 return 0
73 73
74 74 sts = basicHeaderObj.read(fp)
75 75 fp.close()
76 76
77 77 if not(sts):
78 78 print "Skipping the file %s because it has not a valid header" %(filename)
79 79 return 0
80 80
81 81 if not ((startUTSeconds <= basicHeaderObj.utc) and (endUTSeconds > basicHeaderObj.utc)):
82 82 return 0
83 83
84 84 return 1
85 85
86 86 def isFileInTimeRange(filename, startDate, endDate, startTime, endTime):
87 87 """
88 88 Retorna 1 si el archivo de datos se encuentra dentro del rango de horas especificado.
89 89
90 90 Inputs:
91 91 filename : nombre completo del archivo de datos en formato Jicamarca (.r)
92 92
93 93 startDate : fecha inicial del rango seleccionado en formato datetime.date
94 94
95 95 endDate : fecha final del rango seleccionado en formato datetime.date
96 96
97 97 startTime : tiempo inicial del rango seleccionado en formato datetime.time
98 98
99 99 endTime : tiempo final del rango seleccionado en formato datetime.time
100 100
101 101 Return:
102 102 Boolean : Retorna True si el archivo de datos contiene datos en el rango de
103 103 fecha especificado, de lo contrario retorna False.
104 104
105 105 Excepciones:
106 106 Si el archivo no existe o no puede ser abierto
107 107 Si la cabecera no puede ser leida.
108 108
109 109 """
110 110
111 111
112 112 try:
113 113 fp = open(filename,'rb')
114 114 except IOError:
115 traceback.print_exc()
116 raise IOError, "The file %s can't be opened" %(filename)
115 print "The file %s can't be opened" %(filename)
116 return None
117 117
118 118 basicHeaderObj = BasicHeader(LOCALTIME)
119 119 sts = basicHeaderObj.read(fp)
120 120 fp.close()
121 121
122 122 thisDatetime = basicHeaderObj.datatime
123 123 thisDate = thisDatetime.date()
124 124 thisTime = thisDatetime.time()
125 125
126 126 if not(sts):
127 127 print "Skipping the file %s because it has not a valid header" %(filename)
128 128 return None
129 129
130 130 #General case
131 131 # o>>>>>>>>>>>>>><<<<<<<<<<<<<<o
132 132 #-----------o----------------------------o-----------
133 133 # startTime endTime
134 134
135 135 if endTime >= startTime:
136 136 if (thisTime < startTime) or (thisTime > endTime):
137 137 return None
138 138
139 139 return thisDatetime
140 140
141 141 #If endTime < startTime then endTime belongs to the next day
142 142
143 143
144 144 #<<<<<<<<<<<o o>>>>>>>>>>>
145 145 #-----------o----------------------------o-----------
146 146 # endTime startTime
147 147
148 148 if (thisDate == startDate) and (thisTime < startTime):
149 149 return None
150 150
151 151 if (thisDate == endDate) and (thisTime > endTime):
152 152 return None
153 153
154 154 if (thisTime < startTime) and (thisTime > endTime):
155 155 return None
156 156
157 157 return thisDatetime
158 158
159 159 def isFolderInDateRange(folder, startDate=None, endDate=None):
160 160 """
161 161 Retorna 1 si el archivo de datos se encuentra dentro del rango de horas especificado.
162 162
163 163 Inputs:
164 164 folder : nombre completo del directorio.
165 165 Su formato deberia ser "/path_root/?YYYYDDD"
166 166
167 167 siendo:
168 168 YYYY : Anio (ejemplo 2015)
169 169 DDD : Dia del anio (ejemplo 305)
170 170
171 171 startDate : fecha inicial del rango seleccionado en formato datetime.date
172 172
173 173 endDate : fecha final del rango seleccionado en formato datetime.date
174 174
175 175 Return:
176 176 Boolean : Retorna True si el archivo de datos contiene datos en el rango de
177 177 fecha especificado, de lo contrario retorna False.
178 178 Excepciones:
179 179 Si el directorio no tiene el formato adecuado
180 180 """
181 181
182 182 basename = os.path.basename(folder)
183 183
184 184 if not isRadarFolder(basename):
185 raise IOError, "The folder %s has not the rigth format" %folder
185 print "The folder %s has not the rigth format" %folder
186 return 0
186 187
187 188 if startDate and endDate:
188 189 thisDate = getDateFromRadarFolder(basename)
189 190
190 191 if thisDate < startDate:
191 192 return 0
192 193
193 194 if thisDate > endDate:
194 195 return 0
195 196
196 197 return 1
197 198
198 199 def isFileInDateRange(filename, startDate=None, endDate=None):
199 200 """
200 201 Retorna 1 si el archivo de datos se encuentra dentro del rango de horas especificado.
201 202
202 203 Inputs:
203 204 filename : nombre completo del archivo de datos en formato Jicamarca (.r)
204 205
205 206 Su formato deberia ser "?YYYYDDDsss"
206 207
207 208 siendo:
208 209 YYYY : Anio (ejemplo 2015)
209 210 DDD : Dia del anio (ejemplo 305)
210 211 sss : set
211 212
212 213 startDate : fecha inicial del rango seleccionado en formato datetime.date
213 214
214 215 endDate : fecha final del rango seleccionado en formato datetime.date
215 216
216 217 Return:
217 218 Boolean : Retorna True si el archivo de datos contiene datos en el rango de
218 219 fecha especificado, de lo contrario retorna False.
219 220 Excepciones:
220 221 Si el archivo no tiene el formato adecuado
221 222 """
222 223
223 224 basename = os.path.basename(filename)
224 225
225 226 if not isRadarFile(basename):
226 raise IOError, "The filename %s has not the rigth format" %filename
227 print "The filename %s has not the rigth format" %filename
228 return 0
227 229
228 230 if startDate and endDate:
229 231 thisDate = getDateFromRadarFile(basename)
230 232
231 233 if thisDate < startDate:
232 234 return 0
233 235
234 236 if thisDate > endDate:
235 237 return 0
236 238
237 239 return 1
238 240
239 241 def getFileFromSet(path, ext, set):
240 242 validFilelist = []
241 243 fileList = os.listdir(path)
242 244
243 245 # 0 1234 567 89A BCDE
244 246 # H YYYY DDD SSS .ext
245 247
246 248 for thisFile in fileList:
247 249 try:
248 250 year = int(thisFile[1:5])
249 251 doy = int(thisFile[5:8])
250 252 except:
251 253 continue
252 254
253 255 if (os.path.splitext(thisFile)[-1].lower() != ext.lower()):
254 256 continue
255 257
256 258 validFilelist.append(thisFile)
257 259
258 260 myfile = fnmatch.filter(validFilelist,'*%4.4d%3.3d%3.3d*'%(year,doy,set))
259 261
260 262 if len(myfile)!= 0:
261 263 return myfile[0]
262 264 else:
263 265 filename = '*%4.4d%3.3d%3.3d%s'%(year,doy,set,ext.lower())
264 266 print 'the filename %s does not exist'%filename
265 267 print '...going to the last file: '
266 268
267 269 if validFilelist:
268 270 validFilelist = sorted( validFilelist, key=str.lower )
269 271 return validFilelist[-1]
270 272
271 273 return None
272 274
273 275 def getlastFileFromPath(path, ext):
274 276 """
275 277 Depura el fileList dejando solo los que cumplan el formato de "PYYYYDDDSSS.ext"
276 278 al final de la depuracion devuelve el ultimo file de la lista que quedo.
277 279
278 280 Input:
279 281 fileList : lista conteniendo todos los files (sin path) que componen una determinada carpeta
280 282 ext : extension de los files contenidos en una carpeta
281 283
282 284 Return:
283 285 El ultimo file de una determinada carpeta, no se considera el path.
284 286 """
285 287 validFilelist = []
286 288 fileList = os.listdir(path)
287 289
288 290 # 0 1234 567 89A BCDE
289 291 # H YYYY DDD SSS .ext
290 292
291 293 for thisFile in fileList:
292 294
293 295 year = thisFile[1:5]
294 296 if not isNumber(year):
295 297 continue
296 298
297 299 doy = thisFile[5:8]
298 300 if not isNumber(doy):
299 301 continue
300 302
301 303 year = int(year)
302 304 doy = int(doy)
303 305
304 306 if (os.path.splitext(thisFile)[-1].lower() != ext.lower()):
305 307 continue
306 308
307 309 validFilelist.append(thisFile)
308 310
309 311 if validFilelist:
310 312 validFilelist = sorted( validFilelist, key=str.lower )
311 313 return validFilelist[-1]
312 314
313 315 return None
314 316
315 317 def checkForRealPath(path, foldercounter, year, doy, set, ext):
316 318 """
317 319 Por ser Linux Case Sensitive entonces checkForRealPath encuentra el nombre correcto de un path,
318 320 Prueba por varias combinaciones de nombres entre mayusculas y minusculas para determinar
319 321 el path exacto de un determinado file.
320 322
321 323 Example :
322 324 nombre correcto del file es .../.../D2009307/P2009307367.ext
323 325
324 326 Entonces la funcion prueba con las siguientes combinaciones
325 327 .../.../y2009307367.ext
326 328 .../.../Y2009307367.ext
327 329 .../.../x2009307/y2009307367.ext
328 330 .../.../x2009307/Y2009307367.ext
329 331 .../.../X2009307/y2009307367.ext
330 332 .../.../X2009307/Y2009307367.ext
331 333 siendo para este caso, la ultima combinacion de letras, identica al file buscado
332 334
333 335 Return:
334 336 Si encuentra la cobinacion adecuada devuelve el path completo y el nombre del file
335 337 caso contrario devuelve None como path y el la ultima combinacion de nombre en mayusculas
336 338 para el filename
337 339 """
338 340 fullfilename = None
339 341 find_flag = False
340 342 filename = None
341 343
342 344 prefixDirList = [None,'d','D']
343 345 if ext.lower() == ".r": #voltage
344 346 prefixFileList = ['d','D']
345 347 elif ext.lower() == ".pdata": #spectra
346 348 prefixFileList = ['p','P']
347 349 else:
348 350 return None, filename
349 351
350 352 #barrido por las combinaciones posibles
351 353 for prefixDir in prefixDirList:
352 354 thispath = path
353 355 if prefixDir != None:
354 356 #formo el nombre del directorio xYYYYDDD (x=d o x=D)
355 357 if foldercounter == 0:
356 358 thispath = os.path.join(path, "%s%04d%03d" % ( prefixDir, year, doy ))
357 359 else:
358 360 thispath = os.path.join(path, "%s%04d%03d_%02d" % ( prefixDir, year, doy , foldercounter))
359 361 for prefixFile in prefixFileList: #barrido por las dos combinaciones posibles de "D"
360 362 filename = "%s%04d%03d%03d%s" % ( prefixFile, year, doy, set, ext ) #formo el nombre del file xYYYYDDDSSS.ext
361 363 fullfilename = os.path.join( thispath, filename ) #formo el path completo
362 364
363 365 if os.path.exists( fullfilename ): #verifico que exista
364 366 find_flag = True
365 367 break
366 368 if find_flag:
367 369 break
368 370
369 371 if not(find_flag):
370 372 return None, filename
371 373
372 374 return fullfilename, filename
373 375
374 376 def isRadarFolder(folder):
375 377 try:
376 378 year = int(folder[1:5])
377 379 doy = int(folder[5:8])
378 380 except:
379 381 return 0
380 382
381 383 return 1
382 384
383 385 def isRadarFile(file):
384 386 try:
385 387 year = int(file[1:5])
386 388 doy = int(file[5:8])
387 389 set = int(file[8:11])
388 390 except:
389 391 return 0
390 392
391 393 return 1
392 394
393 395 def getDateFromRadarFile(file):
394 396 try:
395 397 year = int(file[1:5])
396 398 doy = int(file[5:8])
397 399 set = int(file[8:11])
398 400 except:
399 401 return None
400 402
401 403 thisDate = datetime.date(year, 1, 1) + datetime.timedelta(doy-1)
402 404 return thisDate
403 405
404 406 def getDateFromRadarFolder(folder):
405 407 try:
406 408 year = int(folder[1:5])
407 409 doy = int(folder[5:8])
408 410 except:
409 411 return None
410 412
411 413 thisDate = datetime.date(year, 1, 1) + datetime.timedelta(doy-1)
412 414 return thisDate
413 415
414 416 class JRODataIO:
415 417
416 418 c = 3E8
417 419
418 420 isConfig = False
419 421
420 422 basicHeaderObj = None
421 423
422 424 systemHeaderObj = None
423 425
424 426 radarControllerHeaderObj = None
425 427
426 428 processingHeaderObj = None
427 429
428 430 dtype = None
429 431
430 432 pathList = []
431 433
432 434 filenameList = []
433 435
434 436 filename = None
435 437
436 438 ext = None
437 439
438 440 flagIsNewFile = 1
439 441
440 442 flagDiscontinuousBlock = 0
441 443
442 444 flagIsNewBlock = 0
443 445
444 446 fp = None
445 447
446 448 firstHeaderSize = 0
447 449
448 450 basicHeaderSize = 24
449 451
450 452 versionFile = 1103
451 453
452 454 fileSize = None
453 455
454 456 # ippSeconds = None
455 457
456 458 fileSizeByHeader = None
457 459
458 460 fileIndex = None
459 461
460 462 profileIndex = None
461 463
462 464 blockIndex = None
463 465
464 466 nTotalBlocks = None
465 467
466 468 maxTimeStep = 30
467 469
468 470 lastUTTime = None
469 471
470 472 datablock = None
471 473
472 474 dataOut = None
473 475
474 476 blocksize = None
475 477
476 478 getByBlock = False
477 479
478 480 def __init__(self):
479 481
480 raise ValueError, "Not implemented"
482 raise NotImplementedError
481 483
482 484 def run(self):
483 485
484 raise ValueError, "Not implemented"
486 raise NotImplementedError
485 487
486 488 def getDtypeWidth(self):
487 489
488 490 dtype_index = get_dtype_index(self.dtype)
489 491 dtype_width = get_dtype_width(dtype_index)
490 492
491 493 return dtype_width
492 494
493 495 class JRODataReader(JRODataIO):
494 496
495 497
496 498 online = 0
497 499
498 500 realtime = 0
499 501
500 502 nReadBlocks = 0
501 503
502 504 delay = 10 #number of seconds waiting a new file
503 505
504 506 nTries = 3 #quantity tries
505 507
506 508 nFiles = 3 #number of files for searching
507 509
508 510 path = None
509 511
510 512 foldercounter = 0
511 513
512 514 flagNoMoreFiles = 0
513 515
514 516 datetimeList = []
515 517
516 518 __isFirstTimeOnline = 1
517 519
518 520 __printInfo = True
519 521
520 522 profileIndex = None
521 523
522 524 nTxs = 1
523 525
524 526 txIndex = None
525 527
526 528 def __init__(self):
527 529
528 530 """
531 This class is used to find data files
529 532
530 """
533 Example:
534 reader = JRODataReader()
535 fileList = reader.findDataFiles()
531 536
532 # raise NotImplementedError, "This method has not been implemented"
537 """
538 pass
533 539
534 540
535 541 def createObjByDefault(self):
536 542 """
537 543
538 544 """
539 raise NotImplementedError, "This method has not been implemented"
545 raise NotImplementedError
540 546
541 547 def getBlockDimension(self):
542 548
543 raise NotImplementedError, "No implemented"
549 raise NotImplementedError
544 550
545 551 def __searchFilesOffLine(self,
546 552 path,
547 553 startDate=None,
548 554 endDate=None,
549 555 startTime=datetime.time(0,0,0),
550 556 endTime=datetime.time(23,59,59),
551 557 set=None,
552 558 expLabel='',
553 559 ext='.r',
554 560 walk=True):
555 561
556 562 self.filenameList = []
557 563 self.datetimeList = []
558 564
559 565 pathList = []
560 566
561 567 dateList, pathList = self.findDatafiles(path, startDate, endDate, expLabel, ext, walk, include_path=True)
562 568
563 569 if dateList == []:
564 print "[Reading] No *%s files in %s from %s to %s)"%(ext, path,
565 datetime.datetime.combine(startDate,startTime).ctime(),
566 datetime.datetime.combine(endDate,endTime).ctime())
570 # print "[Reading] No *%s files in %s from %s to %s)"%(ext, path,
571 # datetime.datetime.combine(startDate,startTime).ctime(),
572 # datetime.datetime.combine(endDate,endTime).ctime())
567 573
568 574 return None, None
569 575
570 576 if len(dateList) > 1:
571 577 print "[Reading] %d days were found in date range: %s - %s" %(len(dateList), startDate, endDate)
572 578 else:
573 579 print "[Reading] data was found for the date %s" %(dateList[0])
574 580
575 581 filenameList = []
576 582 datetimeList = []
577 583
578 584 for thisPath in pathList:
579 585 # thisPath = pathList[pathDict[file]]
580 586
581 587 fileList = glob.glob1(thisPath, "*%s" %ext)
582 588 fileList.sort()
583 589
584 590 for file in fileList:
585 591
586 592 filename = os.path.join(thisPath,file)
587 593
588 594 if not isFileInDateRange(filename, startDate, endDate):
589 595 continue
590 596
591 597 thisDatetime = isFileInTimeRange(filename, startDate, endDate, startTime, endTime)
592 598
593 599 if not(thisDatetime):
594 600 continue
595 601
596 602 filenameList.append(filename)
597 603 datetimeList.append(thisDatetime)
598 604
599 605 if not(filenameList):
600 606 print "[Reading] Any file was found int time range %s - %s" %(datetime.datetime.combine(startDate,startTime).ctime(), datetime.datetime.combine(endDate,endTime).ctime())
601 607 return None, None
602 608
603 609 print "[Reading] %d file(s) was(were) found in time range: %s - %s" %(len(filenameList), startTime, endTime)
604 610 print
605 611
606 612 for i in range(len(filenameList)):
607 613 print "[Reading] %s -> [%s]" %(filenameList[i], datetimeList[i].ctime())
608 614
609 615 self.filenameList = filenameList
610 616 self.datetimeList = datetimeList
611 617
612 618 return pathList, filenameList
613 619
614 620 def __searchFilesOnLine(self, path, expLabel = "", ext = None, walk=True, set=None):
615 621
616 622 """
617 623 Busca el ultimo archivo de la ultima carpeta (determinada o no por startDateTime) y
618 624 devuelve el archivo encontrado ademas de otros datos.
619 625
620 626 Input:
621 627 path : carpeta donde estan contenidos los files que contiene data
622 628
623 629 expLabel : Nombre del subexperimento (subfolder)
624 630
625 631 ext : extension de los files
626 632
627 633 walk : Si es habilitado no realiza busquedas dentro de los ubdirectorios (doypath)
628 634
629 635 Return:
630 636 directory : eL directorio donde esta el file encontrado
631 637 filename : el ultimo file de una determinada carpeta
632 638 year : el anho
633 639 doy : el numero de dia del anho
634 640 set : el set del archivo
635 641
636 642
637 643 """
638 644 dirList = []
639 645
640 646 if not walk:
641 647 fullpath = path
642 648 foldercounter = 0
643 649 else:
644 650 #Filtra solo los directorios
645 651 for thisPath in os.listdir(path):
646 652 if not os.path.isdir(os.path.join(path,thisPath)):
647 653 continue
648 654 if not isRadarFolder(thisPath):
649 655 continue
650 656
651 657 dirList.append(thisPath)
652 658
653 659 if not(dirList):
654 660 return None, None, None, None, None, None
655 661
656 662 dirList = sorted( dirList, key=str.lower )
657 663
658 664 doypath = dirList[-1]
659 665 foldercounter = int(doypath.split('_')[1]) if len(doypath.split('_'))>1 else 0
660 666 fullpath = os.path.join(path, doypath, expLabel)
661 667
662 668
663 669 print "[Reading] %s folder was found: " %(fullpath )
664 670
665 671 if set == None:
666 672 filename = getlastFileFromPath(fullpath, ext)
667 673 else:
668 674 filename = getFileFromSet(fullpath, ext, set)
669 675
670 676 if not(filename):
671 677 return None, None, None, None, None, None
672 678
673 679 print "[Reading] %s file was found" %(filename)
674 680
675 681 if not(self.__verifyFile(os.path.join(fullpath, filename))):
676 682 return None, None, None, None, None, None
677 683
678 684 year = int( filename[1:5] )
679 685 doy = int( filename[5:8] )
680 686 set = int( filename[8:11] )
681 687
682 688 return fullpath, foldercounter, filename, year, doy, set
683 689
684 690 def __setNextFileOffline(self):
685 691
686 692 idFile = self.fileIndex
687 693
688 694 while (True):
689 695 idFile += 1
690 696 if not(idFile < len(self.filenameList)):
691 697 self.flagNoMoreFiles = 1
692 698 # print "[Reading] No more Files"
693 699 return 0
694 700
695 701 filename = self.filenameList[idFile]
696 702
697 703 if not(self.__verifyFile(filename)):
698 704 continue
699 705
700 706 fileSize = os.path.getsize(filename)
701 707 fp = open(filename,'rb')
702 708 break
703 709
704 710 self.flagIsNewFile = 1
705 711 self.fileIndex = idFile
706 712 self.filename = filename
707 713 self.fileSize = fileSize
708 714 self.fp = fp
709 715
710 716 # print "[Reading] Setting the file: %s"%self.filename
711 717
712 718 return 1
713 719
714 720 def __setNextFileOnline(self):
715 721 """
716 722 Busca el siguiente file que tenga suficiente data para ser leida, dentro de un folder especifico, si
717 723 no encuentra un file valido espera un tiempo determinado y luego busca en los posibles n files
718 724 siguientes.
719 725
720 726 Affected:
721 727 self.flagIsNewFile
722 728 self.filename
723 729 self.fileSize
724 730 self.fp
725 731 self.set
726 732 self.flagNoMoreFiles
727 733
728 734 Return:
729 735 0 : si luego de una busqueda del siguiente file valido este no pudo ser encontrado
730 736 1 : si el file fue abierto con exito y esta listo a ser leido
731 737
732 738 Excepciones:
733 739 Si un determinado file no puede ser abierto
734 740 """
735 741 nFiles = 0
736 742 fileOk_flag = False
737 743 firstTime_flag = True
738 744
739 745 self.set += 1
740 746
741 747 if self.set > 999:
742 748 self.set = 0
743 749 self.foldercounter += 1
744 750
745 751 #busca el 1er file disponible
746 752 fullfilename, filename = checkForRealPath( self.path, self.foldercounter, self.year, self.doy, self.set, self.ext )
747 753 if fullfilename:
748 754 if self.__verifyFile(fullfilename, False):
749 755 fileOk_flag = True
750 756
751 757 #si no encuentra un file entonces espera y vuelve a buscar
752 758 if not(fileOk_flag):
753 759 for nFiles in range(self.nFiles+1): #busco en los siguientes self.nFiles+1 files posibles
754 760
755 761 if firstTime_flag: #si es la 1era vez entonces hace el for self.nTries veces
756 762 tries = self.nTries
757 763 else:
758 764 tries = 1 #si no es la 1era vez entonces solo lo hace una vez
759 765
760 766 for nTries in range( tries ):
761 767 if firstTime_flag:
762 768 print "\t[Reading] Waiting %0.2f sec for the next file: \"%s\" , try %03d ..." % ( self.delay, filename, nTries+1 )
763 769 sleep( self.delay )
764 770 else:
765 771 print "\t[Reading] Searching the next \"%s%04d%03d%03d%s\" file ..." % (self.optchar, self.year, self.doy, self.set, self.ext)
766 772
767 773 fullfilename, filename = checkForRealPath( self.path, self.foldercounter, self.year, self.doy, self.set, self.ext )
768 774 if fullfilename:
769 775 if self.__verifyFile(fullfilename):
770 776 fileOk_flag = True
771 777 break
772 778
773 779 if fileOk_flag:
774 780 break
775 781
776 782 firstTime_flag = False
777 783
778 784 print "\t[Reading] Skipping the file \"%s\" due to this file doesn't exist" % filename
779 785 self.set += 1
780 786
781 787 if nFiles == (self.nFiles-1): #si no encuentro el file buscado cambio de carpeta y busco en la siguiente carpeta
782 788 self.set = 0
783 789 self.doy += 1
784 790 self.foldercounter = 0
785 791
786 792 if fileOk_flag:
787 793 self.fileSize = os.path.getsize( fullfilename )
788 794 self.filename = fullfilename
789 795 self.flagIsNewFile = 1
790 796 if self.fp != None: self.fp.close()
791 797 self.fp = open(fullfilename, 'rb')
792 798 self.flagNoMoreFiles = 0
793 799 # print '[Reading] Setting the file: %s' % fullfilename
794 800 else:
795 801 self.fileSize = 0
796 802 self.filename = None
797 803 self.flagIsNewFile = 0
798 804 self.fp = None
799 805 self.flagNoMoreFiles = 1
800 806 # print '[Reading] No more files to read'
801 807
802 808 return fileOk_flag
803 809
804 810 def setNextFile(self):
805 811 if self.fp != None:
806 812 self.fp.close()
807 813
808 814 if self.online:
809 815 newFile = self.__setNextFileOnline()
810 816 else:
811 817 newFile = self.__setNextFileOffline()
812 818
813 819 if not(newFile):
814 820 print '[Reading] No more files to read'
815 821 return 0
816 822
817 823 print '[Reading] Setting the file: %s' % self.filename
818 824
819 825 self.__readFirstHeader()
820 826 self.nReadBlocks = 0
821 827 return 1
822 828
823 829 def __waitNewBlock(self):
824 830 """
825 831 Return 1 si se encontro un nuevo bloque de datos, 0 de otra forma.
826 832
827 833 Si el modo de lectura es OffLine siempre retorn 0
828 834 """
829 835 if not self.online:
830 836 return 0
831 837
832 838 if (self.nReadBlocks >= self.processingHeaderObj.dataBlocksPerFile):
833 839 return 0
834 840
835 841 currentPointer = self.fp.tell()
836 842
837 843 neededSize = self.processingHeaderObj.blockSize + self.basicHeaderSize
838 844
839 845 for nTries in range( self.nTries ):
840 846
841 847 self.fp.close()
842 848 self.fp = open( self.filename, 'rb' )
843 849 self.fp.seek( currentPointer )
844 850
845 851 self.fileSize = os.path.getsize( self.filename )
846 852 currentSize = self.fileSize - currentPointer
847 853
848 854 if ( currentSize >= neededSize ):
849 855 self.basicHeaderObj.read(self.fp)
850 856 return 1
851 857
852 858 if self.fileSize == self.fileSizeByHeader:
853 859 # self.flagEoF = True
854 860 return 0
855 861
856 862 print "[Reading] Waiting %0.2f seconds for the next block, try %03d ..." % (self.delay, nTries+1)
857 863 sleep( self.delay )
858 864
859 865
860 866 return 0
861 867
862 868 def waitDataBlock(self,pointer_location):
863 869
864 870 currentPointer = pointer_location
865 871
866 872 neededSize = self.processingHeaderObj.blockSize #+ self.basicHeaderSize
867 873
868 874 for nTries in range( self.nTries ):
869 875 self.fp.close()
870 876 self.fp = open( self.filename, 'rb' )
871 877 self.fp.seek( currentPointer )
872 878
873 879 self.fileSize = os.path.getsize( self.filename )
874 880 currentSize = self.fileSize - currentPointer
875 881
876 882 if ( currentSize >= neededSize ):
877 883 return 1
878 884
879 885 print "[Reading] Waiting %0.2f seconds for the next block, try %03d ..." % (self.delay, nTries+1)
880 886 sleep( self.delay )
881 887
882 888 return 0
883 889
884 890 def __jumpToLastBlock(self):
885 891
886 892 if not(self.__isFirstTimeOnline):
887 893 return
888 894
889 895 csize = self.fileSize - self.fp.tell()
890 896 blocksize = self.processingHeaderObj.blockSize
891 897
892 898 #salta el primer bloque de datos
893 899 if csize > self.processingHeaderObj.blockSize:
894 900 self.fp.seek(self.fp.tell() + blocksize)
895 901 else:
896 902 return
897 903
898 904 csize = self.fileSize - self.fp.tell()
899 905 neededsize = self.processingHeaderObj.blockSize + self.basicHeaderSize
900 906 while True:
901 907
902 908 if self.fp.tell()<self.fileSize:
903 909 self.fp.seek(self.fp.tell() + neededsize)
904 910 else:
905 911 self.fp.seek(self.fp.tell() - neededsize)
906 912 break
907 913
908 914 # csize = self.fileSize - self.fp.tell()
909 915 # neededsize = self.processingHeaderObj.blockSize + self.basicHeaderSize
910 916 # factor = int(csize/neededsize)
911 917 # if factor > 0:
912 918 # self.fp.seek(self.fp.tell() + factor*neededsize)
913 919
914 920 self.flagIsNewFile = 0
915 921 self.__isFirstTimeOnline = 0
916 922
917 923 def __setNewBlock(self):
918 924
919 925 if self.fp == None:
920 926 return 0
921 927
922 928 # if self.online:
923 929 # self.__jumpToLastBlock()
924 930
925 931 if self.flagIsNewFile:
926 932 self.lastUTTime = self.basicHeaderObj.utc
927 933 return 1
928 934
929 935 if self.realtime:
930 936 self.flagDiscontinuousBlock = 1
931 937 if not(self.setNextFile()):
932 938 return 0
933 939 else:
934 940 return 1
935 941
936 942 currentSize = self.fileSize - self.fp.tell()
937 943 neededSize = self.processingHeaderObj.blockSize + self.basicHeaderSize
938 944
939 945 if (currentSize >= neededSize):
940 946 self.basicHeaderObj.read(self.fp)
941 947 self.lastUTTime = self.basicHeaderObj.utc
942 948 return 1
943 949
944 950 if self.__waitNewBlock():
945 951 self.lastUTTime = self.basicHeaderObj.utc
946 952 return 1
947 953
948 954 if not(self.setNextFile()):
949 955 return 0
950 956
951 957 deltaTime = self.basicHeaderObj.utc - self.lastUTTime #
952 958 self.lastUTTime = self.basicHeaderObj.utc
953 959
954 960 self.flagDiscontinuousBlock = 0
955 961
956 962 if deltaTime > self.maxTimeStep:
957 963 self.flagDiscontinuousBlock = 1
958 964
959 965 return 1
960 966
961 967 def readNextBlock(self):
962 968
963 969 if not(self.__setNewBlock()):
964 970 return 0
965 971
966 972 if not(self.readBlock()):
967 973 return 0
968 974
969 975 self.getBasicHeader()
970 976
971 977 print "[Reading] Block No. %d/%d -> %s" %(self.nReadBlocks,
972 978 self.processingHeaderObj.dataBlocksPerFile,
973 979 self.dataOut.datatime.ctime())
974 980 return 1
975 981
976 982 def __readFirstHeader(self):
977 983
978 984 self.basicHeaderObj.read(self.fp)
979 985 self.systemHeaderObj.read(self.fp)
980 986 self.radarControllerHeaderObj.read(self.fp)
981 987 self.processingHeaderObj.read(self.fp)
982 988
983 989 self.firstHeaderSize = self.basicHeaderObj.size
984 990
985 991 datatype = int(numpy.log2((self.processingHeaderObj.processFlags & PROCFLAG.DATATYPE_MASK))-numpy.log2(PROCFLAG.DATATYPE_CHAR))
986 992 if datatype == 0:
987 993 datatype_str = numpy.dtype([('real','<i1'),('imag','<i1')])
988 994 elif datatype == 1:
989 995 datatype_str = numpy.dtype([('real','<i2'),('imag','<i2')])
990 996 elif datatype == 2:
991 997 datatype_str = numpy.dtype([('real','<i4'),('imag','<i4')])
992 998 elif datatype == 3:
993 999 datatype_str = numpy.dtype([('real','<i8'),('imag','<i8')])
994 1000 elif datatype == 4:
995 1001 datatype_str = numpy.dtype([('real','<f4'),('imag','<f4')])
996 1002 elif datatype == 5:
997 1003 datatype_str = numpy.dtype([('real','<f8'),('imag','<f8')])
998 1004 else:
999 1005 raise ValueError, 'Data type was not defined'
1000 1006
1001 1007 self.dtype = datatype_str
1002 1008 #self.ippSeconds = 2 * 1000 * self.radarControllerHeaderObj.ipp / self.c
1003 1009 self.fileSizeByHeader = self.processingHeaderObj.dataBlocksPerFile * self.processingHeaderObj.blockSize + self.firstHeaderSize + self.basicHeaderSize*(self.processingHeaderObj.dataBlocksPerFile - 1)
1004 1010 # self.dataOut.channelList = numpy.arange(self.systemHeaderObj.numChannels)
1005 1011 # self.dataOut.channelIndexList = numpy.arange(self.systemHeaderObj.numChannels)
1006 1012 self.getBlockDimension()
1007 1013
1008 1014 def __verifyFile(self, filename, msgFlag=True):
1015
1009 1016 msg = None
1017
1010 1018 try:
1011 1019 fp = open(filename, 'rb')
1012 currentPosition = fp.tell()
1013 1020 except IOError:
1014 traceback.print_exc()
1021
1015 1022 if msgFlag:
1016 print "[Reading] The file %s can't be opened" % (filename)
1023 print "[Reading] File %s can't be opened" % (filename)
1024
1017 1025 return False
1018 1026
1027 currentPosition = fp.tell()
1019 1028 neededSize = self.processingHeaderObj.blockSize + self.firstHeaderSize
1020 1029
1021 1030 if neededSize == 0:
1022 1031 basicHeaderObj = BasicHeader(LOCALTIME)
1023 1032 systemHeaderObj = SystemHeader()
1024 1033 radarControllerHeaderObj = RadarControllerHeader()
1025 1034 processingHeaderObj = ProcessingHeader()
1026 1035
1027 try:
1028 if not( basicHeaderObj.read(fp) ): raise IOError
1029 if not( systemHeaderObj.read(fp) ): raise IOError
1030 if not( radarControllerHeaderObj.read(fp) ): raise IOError
1031 if not( processingHeaderObj.read(fp) ): raise IOError
1032 # data_type = int(numpy.log2((processingHeaderObj.processFlags & PROCFLAG.DATATYPE_MASK))-numpy.log2(PROCFLAG.DATATYPE_CHAR))
1033
1034 neededSize = processingHeaderObj.blockSize + basicHeaderObj.size
1036 if not( basicHeaderObj.read(fp) ):
1037 fp.close()
1038 return False
1035 1039
1036 except IOError:
1037 traceback.print_exc()
1038 # sys.exit(0)
1040 if not( systemHeaderObj.read(fp) ):
1041 fp.close()
1042 return False
1039 1043
1040 if msgFlag:
1041 print "[Reading] The file %s is empty or it hasn't enough data" % filename
1044 if not( radarControllerHeaderObj.read(fp) ):
1045 fp.close()
1046 return False
1042 1047
1048 if not( processingHeaderObj.read(fp) ):
1043 1049 fp.close()
1044 1050 return False
1051
1052 neededSize = processingHeaderObj.blockSize + basicHeaderObj.size
1045 1053 else:
1046 1054 msg = "[Reading] Skipping the file %s due to it hasn't enough data" %filename
1047 1055
1048 1056 fp.close()
1057
1049 1058 fileSize = os.path.getsize(filename)
1050 1059 currentSize = fileSize - currentPosition
1060
1051 1061 if currentSize < neededSize:
1052 1062 if msgFlag and (msg != None):
1053 print msg #print"\tSkipping the file %s due to it hasn't enough data" %filename
1063 print msg
1054 1064 return False
1055 1065
1056 1066 return True
1057 1067
1058 1068 def findDatafiles(self, path, startDate=None, endDate=None, expLabel='', ext='.r', walk=True, include_path=False):
1059 1069
1060 1070 dateList = []
1061 1071 pathList = []
1062 1072
1063 1073 multi_path = path.split(',')
1064 1074
1065 1075 if not walk:
1066 1076
1067 1077 for single_path in multi_path:
1068 1078
1069 1079 if not os.path.isdir(single_path):
1070 1080 continue
1071 1081
1072 1082 fileList = glob.glob1(single_path, "*"+ext)
1073 1083
1074 1084 for thisFile in fileList:
1075 1085
1076 1086 if not os.path.isfile(os.path.join(single_path, thisFile)):
1077 1087 continue
1078 1088
1079 1089 if not isRadarFile(thisFile):
1080 1090 continue
1081 1091
1082 1092 if not isFileInDateRange(thisFile, startDate, endDate):
1083 1093 continue
1084 1094
1085 1095 thisDate = getDateFromRadarFile(thisFile)
1086 1096
1087 1097 if thisDate in dateList:
1088 1098 continue
1089 1099
1090 1100 dateList.append(thisDate)
1091 1101 pathList.append(single_path)
1092 1102
1093 1103 else:
1094 1104 for single_path in multi_path:
1095 1105
1096 1106 if not os.path.isdir(single_path):
1097 1107 continue
1098 1108
1099 1109 dirList = []
1100 1110
1101 1111 for thisPath in os.listdir(single_path):
1102 1112
1103 1113 if not os.path.isdir(os.path.join(single_path,thisPath)):
1104 1114 continue
1105 1115
1106 1116 if not isRadarFolder(thisPath):
1107 1117 continue
1108 1118
1109 1119 if not isFolderInDateRange(thisPath, startDate, endDate):
1110 1120 continue
1111 1121
1112 1122 dirList.append(thisPath)
1113 1123
1114 1124 if not dirList:
1115 1125 continue
1116 1126
1117 1127 for thisDir in dirList:
1118 1128
1119 1129 datapath = os.path.join(single_path, thisDir, expLabel)
1120 1130 fileList = glob.glob1(datapath, "*"+ext)
1121 1131
1122 1132 if len(fileList) < 1:
1123 1133 continue
1124 1134
1125 1135 thisDate = getDateFromRadarFolder(thisDir)
1126 1136
1127 1137 pathList.append(datapath)
1128 1138 dateList.append(thisDate)
1129 1139
1130 1140 dateList.sort()
1131 1141
1142 if walk:
1143 pattern_path = os.path.join(multi_path[0], "[dYYYYDDD]", expLabel)
1144 else:
1145 pattern_path = multi_path[0]
1146
1147 if not dateList:
1148 print "[Reading] No *%s files in %s from %s to %s" %(ext, pattern_path, startDate, endDate)
1149
1132 1150 if include_path:
1133 1151 return dateList, pathList
1134 1152
1135 1153 return dateList
1136 1154
1137 1155 def setup(self,
1138 1156 path=None,
1139 1157 startDate=None,
1140 1158 endDate=None,
1141 1159 startTime=datetime.time(0,0,0),
1142 1160 endTime=datetime.time(23,59,59),
1143 1161 set=None,
1144 1162 expLabel = "",
1145 1163 ext = None,
1146 1164 online = False,
1147 1165 delay = 60,
1148 1166 walk = True,
1149 1167 getblock = False,
1150 1168 nTxs = 1,
1151 1169 realtime=False):
1152 1170
1153 1171 if path == None:
1154 1172 raise ValueError, "[Reading] The path is not valid"
1155 1173
1156 1174 if ext == None:
1157 1175 ext = self.ext
1158 1176
1159 1177 if online:
1160 1178 print "[Reading] Searching files in online mode..."
1161 1179
1162 1180 for nTries in range( self.nTries ):
1163 1181 fullpath, foldercounter, file, year, doy, set = self.__searchFilesOnLine(path=path, expLabel=expLabel, ext=ext, walk=walk, set=set)
1164 1182
1165 1183 if fullpath:
1166 1184 break
1167 1185
1168 1186 print '[Reading] Waiting %0.2f sec for an valid file in %s: try %02d ...' % (self.delay, path, nTries+1)
1169 1187 sleep( self.delay )
1170 1188
1171 1189 if not(fullpath):
1172 1190 print "[Reading] There 'isn't any valid file in %s" % path
1173 1191 return
1174 1192
1175 1193 self.year = year
1176 1194 self.doy = doy
1177 1195 self.set = set - 1
1178 1196 self.path = path
1179 1197 self.foldercounter = foldercounter
1180 1198 last_set = None
1181 1199
1182 1200 else:
1183 1201 print "[Reading] Searching files in offline mode ..."
1184 1202 pathList, filenameList = self.__searchFilesOffLine(path, startDate=startDate, endDate=endDate,
1185 1203 startTime=startTime, endTime=endTime,
1186 1204 set=set, expLabel=expLabel, ext=ext,
1187 1205 walk=walk)
1188 1206
1189 1207 if not(pathList):
1190 print "[Reading] No *%s files in %s (%s - %s)"%(ext, path,
1191 datetime.datetime.combine(startDate,startTime).ctime(),
1192 datetime.datetime.combine(endDate,endTime).ctime())
1208 # print "[Reading] No *%s files in %s (%s - %s)"%(ext, path,
1209 # datetime.datetime.combine(startDate,startTime).ctime(),
1210 # datetime.datetime.combine(endDate,endTime).ctime())
1193 1211
1194 1212 # sys.exit(-1)
1195 1213
1196 1214 self.fileIndex = -1
1197 1215 self.pathList = []
1198 1216 self.filenameList = []
1199 1217 return
1200 1218
1201 1219 self.fileIndex = -1
1202 1220 self.pathList = pathList
1203 1221 self.filenameList = filenameList
1204 1222 file_name = os.path.basename(filenameList[-1])
1205 1223 basename, ext = os.path.splitext(file_name)
1206 1224 last_set = int(basename[-3:])
1207 1225
1208 1226 self.online = online
1209 1227 self.realtime = realtime
1210 1228 self.delay = delay
1211 1229 ext = ext.lower()
1212 1230 self.ext = ext
1213 1231 self.getByBlock = getblock
1214 1232 self.nTxs = int(nTxs)
1215 1233
1216 1234 if not(self.setNextFile()):
1217 1235 if (startDate!=None) and (endDate!=None):
1218 1236 print "[Reading] No files in range: %s - %s" %(datetime.datetime.combine(startDate,startTime).ctime(), datetime.datetime.combine(endDate,endTime).ctime())
1219 1237 elif startDate != None:
1220 1238 print "[Reading] No files in range: %s" %(datetime.datetime.combine(startDate,startTime).ctime())
1221 1239 else:
1222 1240 print "[Reading] No files"
1223 1241
1224 1242 self.fileIndex = -1
1225 1243 self.pathList = []
1226 1244 self.filenameList = []
1227 1245 return
1228 1246
1229 1247 # self.getBasicHeader()
1230 1248
1231 1249 if last_set != None:
1232 1250 self.dataOut.last_block = last_set * self.processingHeaderObj.dataBlocksPerFile + self.basicHeaderObj.dataBlock
1233 1251 return
1234 1252
1235 1253 def getBasicHeader(self):
1236 1254
1237 1255 self.dataOut.utctime = self.basicHeaderObj.utc + self.basicHeaderObj.miliSecond/1000. + self.profileIndex * self.radarControllerHeaderObj.ippSeconds
1238 1256
1239 1257 self.dataOut.flagDiscontinuousBlock = self.flagDiscontinuousBlock
1240 1258
1241 1259 self.dataOut.timeZone = self.basicHeaderObj.timeZone
1242 1260
1243 1261 self.dataOut.dstFlag = self.basicHeaderObj.dstFlag
1244 1262
1245 1263 self.dataOut.errorCount = self.basicHeaderObj.errorCount
1246 1264
1247 1265 self.dataOut.useLocalTime = self.basicHeaderObj.useLocalTime
1248 1266
1249 1267 self.dataOut.ippSeconds = self.radarControllerHeaderObj.ippSeconds/self.nTxs
1250 1268
1251 1269 self.dataOut.nProfiles = self.processingHeaderObj.profilesPerBlock*self.nTxs
1252 1270
1253 1271
1254 1272 def getFirstHeader(self):
1255 1273
1256 raise ValueError, "This method has not been implemented"
1274 raise NotImplementedError
1257 1275
1258 1276 def getData(self):
1259 1277
1260 raise ValueError, "This method has not been implemented"
1278 raise NotImplementedError
1261 1279
1262 1280 def hasNotDataInBuffer(self):
1263 1281
1264 raise ValueError, "This method has not been implemented"
1282 raise NotImplementedError
1265 1283
1266 1284 def readBlock(self):
1267 1285
1268 raise ValueError, "This method has not been implemented"
1286 raise NotImplementedError
1269 1287
1270 1288 def isEndProcess(self):
1271 1289
1272 1290 return self.flagNoMoreFiles
1273 1291
1274 1292 def printReadBlocks(self):
1275 1293
1276 1294 print "[Reading] Number of read blocks per file %04d" %self.nReadBlocks
1277 1295
1278 1296 def printTotalBlocks(self):
1279 1297
1280 1298 print "[Reading] Number of read blocks %04d" %self.nTotalBlocks
1281 1299
1282 1300 def printNumberOfBlock(self):
1283 1301
1284 1302 if self.flagIsNewBlock:
1285 1303 print "[Reading] Block No. %d/%d -> %s" %(self.nReadBlocks,
1286 1304 self.processingHeaderObj.dataBlocksPerFile,
1287 1305 self.dataOut.datatime.ctime())
1288 1306
1289 1307 def printInfo(self):
1290 1308
1291 1309 if self.__printInfo == False:
1292 1310 return
1293 1311
1294 1312 self.basicHeaderObj.printInfo()
1295 1313 self.systemHeaderObj.printInfo()
1296 1314 self.radarControllerHeaderObj.printInfo()
1297 1315 self.processingHeaderObj.printInfo()
1298 1316
1299 1317 self.__printInfo = False
1300 1318
1301 1319
1302 1320 def run(self, **kwargs):
1303 1321
1304 1322 if not(self.isConfig):
1305 1323
1306 1324 # self.dataOut = dataOut
1307 1325 self.setup(**kwargs)
1308 1326 self.isConfig = True
1309 1327
1310 1328 self.getData()
1311 1329
1312 1330 class JRODataWriter(JRODataIO):
1313 1331
1314 1332 """
1315 1333 Esta clase permite escribir datos a archivos procesados (.r o ,pdata). La escritura
1316 1334 de los datos siempre se realiza por bloques.
1317 1335 """
1318 1336
1319 1337 blockIndex = 0
1320 1338
1321 1339 path = None
1322 1340
1323 1341 setFile = None
1324 1342
1325 1343 profilesPerBlock = None
1326 1344
1327 1345 blocksPerFile = None
1328 1346
1329 1347 nWriteBlocks = 0
1330 1348
1331 1349 fileDate = None
1332 1350
1333 1351 def __init__(self, dataOut=None):
1334 raise ValueError, "Not implemented"
1352 raise NotImplementedError
1335 1353
1336 1354
1337 1355 def hasAllDataInBuffer(self):
1338 raise ValueError, "Not implemented"
1356 raise NotImplementedError
1339 1357
1340 1358
1341 1359 def setBlockDimension(self):
1342 raise ValueError, "Not implemented"
1360 raise NotImplementedError
1343 1361
1344 1362
1345 1363 def writeBlock(self):
1346 raise ValueError, "No implemented"
1364 raise NotImplementedError
1347 1365
1348 1366
1349 1367 def putData(self):
1350 raise ValueError, "No implemented"
1368 raise NotImplementedError
1351 1369
1352 1370
1353 1371 def getProcessFlags(self):
1354 1372
1355 1373 processFlags = 0
1356 1374
1357 1375 dtype_index = get_dtype_index(self.dtype)
1358 1376 procflag_dtype = get_procflag_dtype(dtype_index)
1359 1377
1360 1378 processFlags += procflag_dtype
1361 1379
1362 1380 if self.dataOut.flagDecodeData:
1363 1381 processFlags += PROCFLAG.DECODE_DATA
1364 1382
1365 1383 if self.dataOut.flagDeflipData:
1366 1384 processFlags += PROCFLAG.DEFLIP_DATA
1367 1385
1368 1386 if self.dataOut.code is not None:
1369 1387 processFlags += PROCFLAG.DEFINE_PROCESS_CODE
1370 1388
1371 1389 if self.dataOut.nCohInt > 1:
1372 1390 processFlags += PROCFLAG.COHERENT_INTEGRATION
1373 1391
1374 1392 if self.dataOut.type == "Spectra":
1375 1393 if self.dataOut.nIncohInt > 1:
1376 1394 processFlags += PROCFLAG.INCOHERENT_INTEGRATION
1377 1395
1378 1396 if self.dataOut.data_dc is not None:
1379 1397 processFlags += PROCFLAG.SAVE_CHANNELS_DC
1380 1398
1381 1399 if self.dataOut.flagShiftFFT:
1382 1400 processFlags += PROCFLAG.SHIFT_FFT_DATA
1383 1401
1384 1402 return processFlags
1385 1403
1386 1404 def setBasicHeader(self):
1387 1405
1388 1406 self.basicHeaderObj.size = self.basicHeaderSize #bytes
1389 1407 self.basicHeaderObj.version = self.versionFile
1390 1408 self.basicHeaderObj.dataBlock = self.nTotalBlocks
1391 1409
1392 1410 utc = numpy.floor(self.dataOut.utctime)
1393 1411 milisecond = (self.dataOut.utctime - utc)* 1000.0
1394 1412
1395 1413 self.basicHeaderObj.utc = utc
1396 1414 self.basicHeaderObj.miliSecond = milisecond
1397 1415 self.basicHeaderObj.timeZone = self.dataOut.timeZone
1398 1416 self.basicHeaderObj.dstFlag = self.dataOut.dstFlag
1399 1417 self.basicHeaderObj.errorCount = self.dataOut.errorCount
1400 1418
1401 1419 def setFirstHeader(self):
1402 1420 """
1403 1421 Obtiene una copia del First Header
1404 1422
1405 1423 Affected:
1406 1424
1407 1425 self.basicHeaderObj
1408 1426 self.systemHeaderObj
1409 1427 self.radarControllerHeaderObj
1410 1428 self.processingHeaderObj self.
1411 1429
1412 1430 Return:
1413 1431 None
1414 1432 """
1415 1433
1416 raise ValueError, "No implemented"
1434 raise NotImplementedError
1417 1435
1418 1436 def __writeFirstHeader(self):
1419 1437 """
1420 1438 Escribe el primer header del file es decir el Basic header y el Long header (SystemHeader, RadarControllerHeader, ProcessingHeader)
1421 1439
1422 1440 Affected:
1423 1441 __dataType
1424 1442
1425 1443 Return:
1426 1444 None
1427 1445 """
1428 1446
1429 1447 # CALCULAR PARAMETROS
1430 1448
1431 1449 sizeLongHeader = self.systemHeaderObj.size + self.radarControllerHeaderObj.size + self.processingHeaderObj.size
1432 1450 self.basicHeaderObj.size = self.basicHeaderSize + sizeLongHeader
1433 1451
1434 1452 self.basicHeaderObj.write(self.fp)
1435 1453 self.systemHeaderObj.write(self.fp)
1436 1454 self.radarControllerHeaderObj.write(self.fp)
1437 1455 self.processingHeaderObj.write(self.fp)
1438 1456
1439 1457 def __setNewBlock(self):
1440 1458 """
1441 1459 Si es un nuevo file escribe el First Header caso contrario escribe solo el Basic Header
1442 1460
1443 1461 Return:
1444 1462 0 : si no pudo escribir nada
1445 1463 1 : Si escribio el Basic el First Header
1446 1464 """
1447 1465 if self.fp == None:
1448 1466 self.setNextFile()
1449 1467
1450 1468 if self.flagIsNewFile:
1451 1469 return 1
1452 1470
1453 1471 if self.blockIndex < self.processingHeaderObj.dataBlocksPerFile:
1454 1472 self.basicHeaderObj.write(self.fp)
1455 1473 return 1
1456 1474
1457 1475 if not( self.setNextFile() ):
1458 1476 return 0
1459 1477
1460 1478 return 1
1461 1479
1462 1480
1463 1481 def writeNextBlock(self):
1464 1482 """
1465 1483 Selecciona el bloque siguiente de datos y los escribe en un file
1466 1484
1467 1485 Return:
1468 1486 0 : Si no hizo pudo escribir el bloque de datos
1469 1487 1 : Si no pudo escribir el bloque de datos
1470 1488 """
1471 1489 if not( self.__setNewBlock() ):
1472 1490 return 0
1473 1491
1474 1492 self.writeBlock()
1475 1493
1476 1494 print "[Writing] Block No. %d/%d" %(self.blockIndex,
1477 1495 self.processingHeaderObj.dataBlocksPerFile)
1478 1496
1479 1497 return 1
1480 1498
1481 1499 def setNextFile(self):
1482 1500 """
1483 1501 Determina el siguiente file que sera escrito
1484 1502
1485 1503 Affected:
1486 1504 self.filename
1487 1505 self.subfolder
1488 1506 self.fp
1489 1507 self.setFile
1490 1508 self.flagIsNewFile
1491 1509
1492 1510 Return:
1493 1511 0 : Si el archivo no puede ser escrito
1494 1512 1 : Si el archivo esta listo para ser escrito
1495 1513 """
1496 1514 ext = self.ext
1497 1515 path = self.path
1498 1516
1499 1517 if self.fp != None:
1500 1518 self.fp.close()
1501 1519
1502 1520 timeTuple = time.localtime( self.dataOut.utctime)
1503 1521 subfolder = 'd%4.4d%3.3d' % (timeTuple.tm_year,timeTuple.tm_yday)
1504 1522
1505 1523 fullpath = os.path.join( path, subfolder )
1506 1524 setFile = self.setFile
1507 1525
1508 1526 if not( os.path.exists(fullpath) ):
1509 1527 os.mkdir(fullpath)
1510 1528 setFile = -1 #inicializo mi contador de seteo
1511 1529 else:
1512 1530 filesList = os.listdir( fullpath )
1513 1531 if len( filesList ) > 0:
1514 1532 filesList = sorted( filesList, key=str.lower )
1515 1533 filen = filesList[-1]
1516 1534 # el filename debera tener el siguiente formato
1517 1535 # 0 1234 567 89A BCDE (hex)
1518 1536 # x YYYY DDD SSS .ext
1519 1537 if isNumber( filen[8:11] ):
1520 1538 setFile = int( filen[8:11] ) #inicializo mi contador de seteo al seteo del ultimo file
1521 1539 else:
1522 1540 setFile = -1
1523 1541 else:
1524 1542 setFile = -1 #inicializo mi contador de seteo
1525 1543
1526 1544 setFile += 1
1527 1545
1528 1546 #If this is a new day it resets some values
1529 1547 if self.dataOut.datatime.date() > self.fileDate:
1530 1548 setFile = 0
1531 1549 self.nTotalBlocks = 0
1532 1550
1533 1551 filen = '%s%4.4d%3.3d%3.3d%s' % (self.optchar, timeTuple.tm_year, timeTuple.tm_yday, setFile, ext )
1534 1552
1535 1553 filename = os.path.join( path, subfolder, filen )
1536 1554
1537 1555 fp = open( filename,'wb' )
1538 1556
1539 1557 self.blockIndex = 0
1540 1558
1541 1559 #guardando atributos
1542 1560 self.filename = filename
1543 1561 self.subfolder = subfolder
1544 1562 self.fp = fp
1545 1563 self.setFile = setFile
1546 1564 self.flagIsNewFile = 1
1547 1565 self.fileDate = self.dataOut.datatime.date()
1548 1566
1549 1567 self.setFirstHeader()
1550 1568
1551 1569 print '[Writing] Opening file: %s'%self.filename
1552 1570
1553 1571 self.__writeFirstHeader()
1554 1572
1555 1573 return 1
1556 1574
1557 1575 def setup(self, dataOut, path, blocksPerFile, profilesPerBlock=64, set=None, ext=None, datatype=4):
1558 1576 """
1559 1577 Setea el tipo de formato en la cual sera guardada la data y escribe el First Header
1560 1578
1561 1579 Inputs:
1562 1580 path : directory where data will be saved
1563 1581 profilesPerBlock : number of profiles per block
1564 1582 set : initial file set
1565 1583 datatype : An integer number that defines data type:
1566 1584 0 : int8 (1 byte)
1567 1585 1 : int16 (2 bytes)
1568 1586 2 : int32 (4 bytes)
1569 1587 3 : int64 (8 bytes)
1570 1588 4 : float32 (4 bytes)
1571 1589 5 : double64 (8 bytes)
1572 1590
1573 1591 Return:
1574 1592 0 : Si no realizo un buen seteo
1575 1593 1 : Si realizo un buen seteo
1576 1594 """
1577 1595
1578 1596 if ext == None:
1579 1597 ext = self.ext
1580 1598
1581 1599 self.ext = ext.lower()
1582 1600
1583 1601 self.path = path
1584 1602
1585 1603 if set is None:
1586 1604 self.setFile = -1
1587 1605 else:
1588 1606 self.setFile = set - 1
1589 1607
1590 1608 self.blocksPerFile = blocksPerFile
1591 1609
1592 1610 self.profilesPerBlock = profilesPerBlock
1593 1611
1594 1612 self.dataOut = dataOut
1595 1613 self.fileDate = self.dataOut.datatime.date()
1596 1614 #By default
1597 1615 self.dtype = self.dataOut.dtype
1598 1616
1599 1617 if datatype is not None:
1600 1618 self.dtype = get_numpy_dtype(datatype)
1601 1619
1602 1620 if not(self.setNextFile()):
1603 1621 print "[Writing] There isn't a next file"
1604 1622 return 0
1605 1623
1606 1624 self.setBlockDimension()
1607 1625
1608 1626 return 1
1609 1627
1610 1628 def run(self, dataOut, **kwargs):
1611 1629
1612 1630 if not(self.isConfig):
1613 1631
1614 1632 self.setup(dataOut, **kwargs)
1615 1633 self.isConfig = True
1616 1634
1617 1635 self.putData()
1618 1636
@@ -1,845 +1,846
1 1 '''
2 2 Created on Jul 3, 2014
3 3
4 4 @author: roj-idl71
5 5 '''
6 6
7 7 import os, sys
8 8 import time, datetime
9 9 import numpy
10 10 import fnmatch
11 11 import glob
12 12 from time import sleep
13 13
14 14 try:
15 15 import pyfits
16 16 except ImportError, e:
17 17 print "Fits data cannot be used. Install pyfits module"
18 18
19 19 from xml.etree.ElementTree import ElementTree
20 20
21 21 from jroIO_base import isRadarFolder, isNumber
22 22 from schainpy.model.data.jrodata import Fits
23 23 from schainpy.model.proc.jroproc_base import Operation, ProcessingUnit
24 24
25 25 class PyFits(object):
26 26 name=None
27 27 format=None
28 28 array =None
29 29 data =None
30 30 thdulist=None
31 31 prihdr=None
32 32 hdu=None
33 33
34 34 def __init__(self):
35 35
36 36 pass
37 37
38 38 def setColF(self,name,format,array):
39 39 self.name=name
40 40 self.format=format
41 41 self.array=array
42 42 a1=numpy.array([self.array],dtype=numpy.float32)
43 43 self.col1 = pyfits.Column(name=self.name, format=self.format, array=a1)
44 44 return self.col1
45 45
46 46 # def setColP(self,name,format,data):
47 47 # self.name=name
48 48 # self.format=format
49 49 # self.data=data
50 50 # a2=numpy.array([self.data],dtype=numpy.float32)
51 51 # self.col2 = pyfits.Column(name=self.name, format=self.format, array=a2)
52 52 # return self.col2
53 53
54 54
55 55 def writeData(self,name,format,data):
56 56 self.name=name
57 57 self.format=format
58 58 self.data=data
59 59 a2=numpy.array([self.data],dtype=numpy.float32)
60 60 self.col2 = pyfits.Column(name=self.name, format=self.format, array=a2)
61 61 return self.col2
62 62
63 63 def cFImage(self,idblock,year,month,day,hour,minute,second):
64 64 self.hdu= pyfits.PrimaryHDU(idblock)
65 65 self.hdu.header.set("Year",year)
66 66 self.hdu.header.set("Month",month)
67 67 self.hdu.header.set("Day",day)
68 68 self.hdu.header.set("Hour",hour)
69 69 self.hdu.header.set("Minute",minute)
70 70 self.hdu.header.set("Second",second)
71 71 return self.hdu
72 72
73 73
74 74 def Ctable(self,colList):
75 75 self.cols=pyfits.ColDefs(colList)
76 76 self.tbhdu = pyfits.new_table(self.cols)
77 77 return self.tbhdu
78 78
79 79
80 80 def CFile(self,hdu,tbhdu):
81 81 self.thdulist=pyfits.HDUList([hdu,tbhdu])
82 82
83 83 def wFile(self,filename):
84 84 if os.path.isfile(filename):
85 85 os.remove(filename)
86 86 self.thdulist.writeto(filename)
87 87
88 88
89 89 class ParameterConf:
90 90 ELEMENTNAME = 'Parameter'
91 91 def __init__(self):
92 92 self.name = ''
93 93 self.value = ''
94 94
95 95 def readXml(self, parmElement):
96 96 self.name = parmElement.get('name')
97 97 self.value = parmElement.get('value')
98 98
99 99 def getElementName(self):
100 100 return self.ELEMENTNAME
101 101
102 102 class Metadata(object):
103 103
104 104 def __init__(self, filename):
105 105 self.parmConfObjList = []
106 106 self.readXml(filename)
107 107
108 108 def readXml(self, filename):
109 109 self.projectElement = None
110 110 self.procUnitConfObjDict = {}
111 111 self.projectElement = ElementTree().parse(filename)
112 112 self.project = self.projectElement.tag
113 113
114 114 parmElementList = self.projectElement.getiterator(ParameterConf().getElementName())
115 115
116 116 for parmElement in parmElementList:
117 117 parmConfObj = ParameterConf()
118 118 parmConfObj.readXml(parmElement)
119 119 self.parmConfObjList.append(parmConfObj)
120 120
121 121 class FitsWriter(Operation):
122 122
123 123 def __init__(self):
124 124 self.isConfig = False
125 125 self.dataBlocksPerFile = None
126 126 self.blockIndex = 0
127 127 self.flagIsNewFile = 1
128 128 self.fitsObj = None
129 129 self.optchar = 'P'
130 130 self.ext = '.fits'
131 131 self.setFile = 0
132 132
133 133 def setFitsHeader(self, dataOut, metadatafile=None):
134 134
135 135 header_data = pyfits.PrimaryHDU()
136 136
137 137 header_data.header['EXPNAME'] = "RADAR DATA"
138 138 header_data.header['DATATYPE'] = "SPECTRA"
139 139 header_data.header['COMMENT'] = ""
140 140
141 141 if metadatafile:
142 142
143 143 metadata4fits = Metadata(metadatafile)
144 144
145 145 for parameter in metadata4fits.parmConfObjList:
146 146 parm_name = parameter.name
147 147 parm_value = parameter.value
148 148
149 149 header_data.header[parm_name] = parm_value
150 150
151 151 header_data.header['DATETIME'] = time.strftime("%b %d %Y %H:%M:%S", dataOut.datatime.timetuple())
152 152 header_data.header['CHANNELLIST'] = str(dataOut.channelList)
153 153 header_data.header['NCHANNELS'] = dataOut.nChannels
154 154 #header_data.header['HEIGHTS'] = dataOut.heightList
155 155 header_data.header['NHEIGHTS'] = dataOut.nHeights
156 156
157 157 header_data.header['IPPSECONDS'] = dataOut.ippSeconds
158 158 header_data.header['NCOHINT'] = dataOut.nCohInt
159 159 header_data.header['NINCOHINT'] = dataOut.nIncohInt
160 160 header_data.header['TIMEZONE'] = dataOut.timeZone
161 161 header_data.header['NBLOCK'] = self.blockIndex
162 162
163 163 header_data.writeto(self.filename)
164 164
165 165 self.addExtension(dataOut.heightList,'HEIGHTLIST')
166 166
167 167
168 168 def setup(self, dataOut, path, dataBlocksPerFile=100, metadatafile=None):
169 169
170 170 self.path = path
171 171 self.dataOut = dataOut
172 172 self.metadatafile = metadatafile
173 173 self.dataBlocksPerFile = dataBlocksPerFile
174 174
175 175 def open(self):
176 176 self.fitsObj = pyfits.open(self.filename, mode='update')
177 177
178 178
179 179 def addExtension(self, data, tagname):
180 180 self.open()
181 181 extension = pyfits.ImageHDU(data=data, name=tagname)
182 182 #extension.header['TAG'] = tagname
183 183 self.fitsObj.append(extension)
184 184 self.write()
185 185
186 186 def addData(self, data):
187 187 self.open()
188 188 extension = pyfits.ImageHDU(data=data, name=self.fitsObj[0].header['DATATYPE'])
189 189 extension.header['UTCTIME'] = self.dataOut.utctime
190 190 self.fitsObj.append(extension)
191 191 self.blockIndex += 1
192 192 self.fitsObj[0].header['NBLOCK'] = self.blockIndex
193 193
194 194 self.write()
195 195
196 196 def write(self):
197 197
198 198 self.fitsObj.flush(verbose=True)
199 199 self.fitsObj.close()
200 200
201 201
202 202 def setNextFile(self):
203 203
204 204 ext = self.ext
205 205 path = self.path
206 206
207 207 timeTuple = time.localtime( self.dataOut.utctime)
208 208 subfolder = 'd%4.4d%3.3d' % (timeTuple.tm_year,timeTuple.tm_yday)
209 209
210 210 fullpath = os.path.join( path, subfolder )
211 211 if not( os.path.exists(fullpath) ):
212 212 os.mkdir(fullpath)
213 213 self.setFile = -1 #inicializo mi contador de seteo
214 214 else:
215 215 filesList = os.listdir( fullpath )
216 216 if len( filesList ) > 0:
217 217 filesList = sorted( filesList, key=str.lower )
218 218 filen = filesList[-1]
219 219
220 220 if isNumber( filen[8:11] ):
221 221 self.setFile = int( filen[8:11] ) #inicializo mi contador de seteo al seteo del ultimo file
222 222 else:
223 223 self.setFile = -1
224 224 else:
225 225 self.setFile = -1 #inicializo mi contador de seteo
226 226
227 227 setFile = self.setFile
228 228 setFile += 1
229 229
230 230 thisFile = '%s%4.4d%3.3d%3.3d%s' % (self.optchar,
231 231 timeTuple.tm_year,
232 232 timeTuple.tm_yday,
233 233 setFile,
234 234 ext )
235 235
236 236 filename = os.path.join( path, subfolder, thisFile )
237 237
238 238 self.blockIndex = 0
239 239 self.filename = filename
240 240 self.setFile = setFile
241 241 self.flagIsNewFile = 1
242 242
243 243 print 'Writing the file: %s'%self.filename
244 244
245 245 self.setFitsHeader(self.dataOut, self.metadatafile)
246 246
247 247 return 1
248 248
249 249 def writeBlock(self):
250 250 self.addData(self.dataOut.data_spc)
251 251 self.flagIsNewFile = 0
252 252
253 253
254 254 def __setNewBlock(self):
255 255
256 256 if self.flagIsNewFile:
257 257 return 1
258 258
259 259 if self.blockIndex < self.dataBlocksPerFile:
260 260 return 1
261 261
262 262 if not( self.setNextFile() ):
263 263 return 0
264 264
265 265 return 1
266 266
267 267 def writeNextBlock(self):
268 268 if not( self.__setNewBlock() ):
269 269 return 0
270 270 self.writeBlock()
271 271 return 1
272 272
273 273 def putData(self):
274 274 if self.flagIsNewFile:
275 275 self.setNextFile()
276 276 self.writeNextBlock()
277 277
278 278 def run(self, dataOut, **kwargs):
279 279 if not(self.isConfig):
280 280 self.setup(dataOut, **kwargs)
281 281 self.isConfig = True
282 282 self.putData()
283 283
284 284
285 285 class FitsReader(ProcessingUnit):
286 286
287 287 # __TIMEZONE = time.timezone
288 288
289 289 expName = None
290 290 datetimestr = None
291 291 utc = None
292 292 nChannels = None
293 293 nSamples = None
294 294 dataBlocksPerFile = None
295 295 comments = None
296 296 lastUTTime = None
297 297 header_dict = None
298 298 data = None
299 299 data_header_dict = None
300 300
301 301 def __init__(self):
302 302 self.isConfig = False
303 303 self.ext = '.fits'
304 304 self.setFile = 0
305 305 self.flagNoMoreFiles = 0
306 306 self.flagIsNewFile = 1
307 307 self.flagDiscontinuousBlock = None
308 308 self.fileIndex = None
309 309 self.filename = None
310 310 self.fileSize = None
311 311 self.fitsObj = None
312 312 self.timeZone = None
313 313 self.nReadBlocks = 0
314 314 self.nTotalBlocks = 0
315 315 self.dataOut = self.createObjByDefault()
316 316 self.maxTimeStep = 10# deberia ser definido por el usuario usando el metodo setup()
317 317 self.blockIndex = 1
318 318
319 319 def createObjByDefault(self):
320 320
321 321 dataObj = Fits()
322 322
323 323 return dataObj
324 324
325 325 def isFileinThisTime(self, filename, startTime, endTime, useLocalTime=False):
326 326 try:
327 327 fitsObj = pyfits.open(filename,'readonly')
328 328 except:
329 raise IOError, "The file %s can't be opened" %(filename)
329 print "File %s can't be opened" %(filename)
330 return None
330 331
331 332 header = fitsObj[0].header
332 333 struct_time = time.strptime(header['DATETIME'], "%b %d %Y %H:%M:%S")
333 334 utc = time.mktime(struct_time) - time.timezone #TIMEZONE debe ser un parametro del header FITS
334 335
335 336 ltc = utc
336 337 if useLocalTime:
337 338 ltc -= time.timezone
338 339 thisDatetime = datetime.datetime.utcfromtimestamp(ltc)
339 340 thisTime = thisDatetime.time()
340 341
341 342 if not ((startTime <= thisTime) and (endTime > thisTime)):
342 343 return None
343 344
344 345 return thisDatetime
345 346
346 347 def __setNextFileOnline(self):
347 raise ValueError, "No implemented"
348 raise NotImplementedError
348 349
349 350 def __setNextFileOffline(self):
350 351 idFile = self.fileIndex
351 352
352 353 while (True):
353 354 idFile += 1
354 355 if not(idFile < len(self.filenameList)):
355 356 self.flagNoMoreFiles = 1
356 357 print "No more Files"
357 358 return 0
358 359
359 360 filename = self.filenameList[idFile]
360 361
361 362 # if not(self.__verifyFile(filename)):
362 363 # continue
363 364
364 365 fileSize = os.path.getsize(filename)
365 366 fitsObj = pyfits.open(filename,'readonly')
366 367 break
367 368
368 369 self.flagIsNewFile = 1
369 370 self.fileIndex = idFile
370 371 self.filename = filename
371 372 self.fileSize = fileSize
372 373 self.fitsObj = fitsObj
373 374 self.blockIndex = 0
374 375 print "Setting the file: %s"%self.filename
375 376
376 377 return 1
377 378
378 379 def __setValuesFromHeader(self):
379 380
380 381 self.dataOut.header = self.header_dict
381 382 self.dataOut.expName = self.expName
382 383
383 384 self.dataOut.timeZone = self.timeZone
384 385 self.dataOut.dataBlocksPerFile = self.dataBlocksPerFile
385 386 self.dataOut.comments = self.comments
386 387 # self.dataOut.timeInterval = self.timeInterval
387 388 self.dataOut.channelList = self.channelList
388 389 self.dataOut.heightList = self.heightList
389 390
390 391 self.dataOut.nCohInt = self.nCohInt
391 392 self.dataOut.nIncohInt = self.nIncohInt
392 393
393 394 self.dataOut.ippSeconds = self.ippSeconds
394 395
395 396 def readHeader(self):
396 397 headerObj = self.fitsObj[0]
397 398
398 399 self.header_dict = headerObj.header
399 400 if 'EXPNAME' in headerObj.header.keys():
400 401 self.expName = headerObj.header['EXPNAME']
401 402
402 403 if 'DATATYPE' in headerObj.header.keys():
403 404 self.dataType = headerObj.header['DATATYPE']
404 405
405 406 self.datetimestr = headerObj.header['DATETIME']
406 407 channelList = headerObj.header['CHANNELLIST']
407 408 channelList = channelList.split('[')
408 409 channelList = channelList[1].split(']')
409 410 channelList = channelList[0].split(',')
410 411 channelList = [int(ch) for ch in channelList]
411 412 self.channelList = channelList
412 413 self.nChannels = headerObj.header['NCHANNELS']
413 414 self.nHeights = headerObj.header['NHEIGHTS']
414 415 self.ippSeconds = headerObj.header['IPPSECONDS']
415 416 self.nCohInt = headerObj.header['NCOHINT']
416 417 self.nIncohInt = headerObj.header['NINCOHINT']
417 418 self.dataBlocksPerFile = headerObj.header['NBLOCK']
418 419 self.timeZone = headerObj.header['TIMEZONE']
419 420
420 421 # self.timeInterval = self.ippSeconds * self.nCohInt * self.nIncohInt
421 422
422 423 if 'COMMENT' in headerObj.header.keys():
423 424 self.comments = headerObj.header['COMMENT']
424 425
425 426 self.readHeightList()
426 427
427 428 def readHeightList(self):
428 429 self.blockIndex = self.blockIndex + 1
429 430 obj = self.fitsObj[self.blockIndex]
430 431 self.heightList = obj.data
431 432 self.blockIndex = self.blockIndex + 1
432 433
433 434 def readExtension(self):
434 435 obj = self.fitsObj[self.blockIndex]
435 436 self.heightList = obj.data
436 437 self.blockIndex = self.blockIndex + 1
437 438
438 439 def setNextFile(self):
439 440
440 441 if self.online:
441 442 newFile = self.__setNextFileOnline()
442 443 else:
443 444 newFile = self.__setNextFileOffline()
444 445
445 446 if not(newFile):
446 447 return 0
447 448
448 449 self.readHeader()
449 450 self.__setValuesFromHeader()
450 451 self.nReadBlocks = 0
451 452 # self.blockIndex = 1
452 453 return 1
453 454
454 455 def __searchFilesOffLine(self,
455 456 path,
456 457 startDate,
457 458 endDate,
458 459 startTime=datetime.time(0,0,0),
459 460 endTime=datetime.time(23,59,59),
460 461 set=None,
461 462 expLabel='',
462 463 ext='.fits',
463 464 walk=True):
464 465
465 466 pathList = []
466 467
467 468 if not walk:
468 469 pathList.append(path)
469 470
470 471 else:
471 472 dirList = []
472 473 for thisPath in os.listdir(path):
473 474 if not os.path.isdir(os.path.join(path,thisPath)):
474 475 continue
475 476 if not isRadarFolder(thisPath):
476 477 continue
477 478
478 479 dirList.append(thisPath)
479 480
480 481 if not(dirList):
481 482 return None, None
482 483
483 484 thisDate = startDate
484 485
485 486 while(thisDate <= endDate):
486 487 year = thisDate.timetuple().tm_year
487 488 doy = thisDate.timetuple().tm_yday
488 489
489 490 matchlist = fnmatch.filter(dirList, '?' + '%4.4d%3.3d' % (year,doy) + '*')
490 491 if len(matchlist) == 0:
491 492 thisDate += datetime.timedelta(1)
492 493 continue
493 494 for match in matchlist:
494 495 pathList.append(os.path.join(path,match,expLabel))
495 496
496 497 thisDate += datetime.timedelta(1)
497 498
498 499 if pathList == []:
499 500 print "Any folder was found for the date range: %s-%s" %(startDate, endDate)
500 501 return None, None
501 502
502 503 print "%d folder(s) was(were) found for the date range: %s - %s" %(len(pathList), startDate, endDate)
503 504
504 505 filenameList = []
505 506 datetimeList = []
506 507
507 508 for i in range(len(pathList)):
508 509
509 510 thisPath = pathList[i]
510 511
511 512 fileList = glob.glob1(thisPath, "*%s" %ext)
512 513 fileList.sort()
513 514
514 515 for thisFile in fileList:
515 516
516 517 filename = os.path.join(thisPath,thisFile)
517 518 thisDatetime = self.isFileinThisTime(filename, startTime, endTime)
518 519
519 520 if not(thisDatetime):
520 521 continue
521 522
522 523 filenameList.append(filename)
523 524 datetimeList.append(thisDatetime)
524 525
525 526 if not(filenameList):
526 527 print "Any file was found for the time range %s - %s" %(startTime, endTime)
527 528 return None, None
528 529
529 530 print "%d file(s) was(were) found for the time range: %s - %s" %(len(filenameList), startTime, endTime)
530 531 print
531 532
532 533 for i in range(len(filenameList)):
533 534 print "%s -> [%s]" %(filenameList[i], datetimeList[i].ctime())
534 535
535 536 self.filenameList = filenameList
536 537 self.datetimeList = datetimeList
537 538
538 539 return pathList, filenameList
539 540
540 541 def setup(self, path=None,
541 542 startDate=None,
542 543 endDate=None,
543 544 startTime=datetime.time(0,0,0),
544 545 endTime=datetime.time(23,59,59),
545 546 set=0,
546 547 expLabel = "",
547 548 ext = None,
548 549 online = False,
549 550 delay = 60,
550 551 walk = True):
551 552
552 553 if path == None:
553 554 raise ValueError, "The path is not valid"
554 555
555 556 if ext == None:
556 557 ext = self.ext
557 558
558 559 if not(online):
559 560 print "Searching files in offline mode ..."
560 561 pathList, filenameList = self.__searchFilesOffLine(path, startDate=startDate, endDate=endDate,
561 562 startTime=startTime, endTime=endTime,
562 563 set=set, expLabel=expLabel, ext=ext,
563 564 walk=walk)
564 565
565 566 if not(pathList):
566 567 print "No *%s files into the folder %s \nfor the range: %s - %s"%(ext, path,
567 568 datetime.datetime.combine(startDate,startTime).ctime(),
568 569 datetime.datetime.combine(endDate,endTime).ctime())
569 570
570 571 sys.exit(-1)
571 572
572 573 self.fileIndex = -1
573 574 self.pathList = pathList
574 575 self.filenameList = filenameList
575 576
576 577 self.online = online
577 578 self.delay = delay
578 579 ext = ext.lower()
579 580 self.ext = ext
580 581
581 582 if not(self.setNextFile()):
582 583 if (startDate!=None) and (endDate!=None):
583 584 print "No files in range: %s - %s" %(datetime.datetime.combine(startDate,startTime).ctime(), datetime.datetime.combine(endDate,endTime).ctime())
584 585 elif startDate != None:
585 586 print "No files in range: %s" %(datetime.datetime.combine(startDate,startTime).ctime())
586 587 else:
587 588 print "No files"
588 589
589 590 sys.exit(-1)
590 591
591 592
592 593
593 594 def readBlock(self):
594 595 dataObj = self.fitsObj[self.blockIndex]
595 596
596 597 self.data = dataObj.data
597 598 self.data_header_dict = dataObj.header
598 599 self.utc = self.data_header_dict['UTCTIME']
599 600
600 601 self.flagIsNewFile = 0
601 602 self.blockIndex += 1
602 603 self.nTotalBlocks += 1
603 604 self.nReadBlocks += 1
604 605
605 606 return 1
606 607
607 608 def __jumpToLastBlock(self):
608 raise ValueError, "No implemented"
609 raise NotImplementedError
609 610
610 611 def __waitNewBlock(self):
611 612 """
612 613 Return 1 si se encontro un nuevo bloque de datos, 0 de otra forma.
613 614
614 615 Si el modo de lectura es OffLine siempre retorn 0
615 616 """
616 617 if not self.online:
617 618 return 0
618 619
619 620 if (self.nReadBlocks >= self.dataBlocksPerFile):
620 621 return 0
621 622
622 623 currentPointer = self.fp.tell()
623 624
624 625 neededSize = self.processingHeaderObj.blockSize + self.basicHeaderSize
625 626
626 627 for nTries in range( self.nTries ):
627 628
628 629 self.fp.close()
629 630 self.fp = open( self.filename, 'rb' )
630 631 self.fp.seek( currentPointer )
631 632
632 633 self.fileSize = os.path.getsize( self.filename )
633 634 currentSize = self.fileSize - currentPointer
634 635
635 636 if ( currentSize >= neededSize ):
636 637 self.__rdBasicHeader()
637 638 return 1
638 639
639 640 print "\tWaiting %0.2f seconds for the next block, try %03d ..." % (self.delay, nTries+1)
640 641 sleep( self.delay )
641 642
642 643
643 644 return 0
644 645
645 646 def __setNewBlock(self):
646 647
647 648 if self.online:
648 649 self.__jumpToLastBlock()
649 650
650 651 if self.flagIsNewFile:
651 652 return 1
652 653
653 654 self.lastUTTime = self.utc
654 655
655 656 if self.online:
656 657 if self.__waitNewBlock():
657 658 return 1
658 659
659 660 if self.nReadBlocks < self.dataBlocksPerFile:
660 661 return 1
661 662
662 663 if not(self.setNextFile()):
663 664 return 0
664 665
665 666 deltaTime = self.utc - self.lastUTTime
666 667
667 668 self.flagDiscontinuousBlock = 0
668 669
669 670 if deltaTime > self.maxTimeStep:
670 671 self.flagDiscontinuousBlock = 1
671 672
672 673 return 1
673 674
674 675
675 676 def readNextBlock(self):
676 677 if not(self.__setNewBlock()):
677 678 return 0
678 679
679 680 if not(self.readBlock()):
680 681 return 0
681 682
682 683 return 1
683 684
684 685 def printInfo(self):
685 686
686 687 pass
687 688
688 689 def getData(self):
689 690
690 691 if self.flagNoMoreFiles:
691 692 self.dataOut.flagNoData = True
692 693 print 'Process finished'
693 694 return 0
694 695
695 696 self.flagDiscontinuousBlock = 0
696 697 self.flagIsNewBlock = 0
697 698
698 699 if not(self.readNextBlock()):
699 700 return 0
700 701
701 702 if self.data is None:
702 703 self.dataOut.flagNoData = True
703 704 return 0
704 705
705 706 self.dataOut.data = self.data
706 707 self.dataOut.data_header = self.data_header_dict
707 708 self.dataOut.utctime = self.utc
708 709
709 710 # self.dataOut.header = self.header_dict
710 711 # self.dataOut.expName = self.expName
711 712 # self.dataOut.nChannels = self.nChannels
712 713 # self.dataOut.timeZone = self.timeZone
713 714 # self.dataOut.dataBlocksPerFile = self.dataBlocksPerFile
714 715 # self.dataOut.comments = self.comments
715 716 # # self.dataOut.timeInterval = self.timeInterval
716 717 # self.dataOut.channelList = self.channelList
717 718 # self.dataOut.heightList = self.heightList
718 719 self.dataOut.flagNoData = False
719 720
720 721 return self.dataOut.data
721 722
722 723 def run(self, **kwargs):
723 724
724 725 if not(self.isConfig):
725 726 self.setup(**kwargs)
726 727 self.isConfig = True
727 728
728 729 self.getData()
729 730
730 731 class SpectraHeisWriter(Operation):
731 732 # set = None
732 733 setFile = None
733 734 idblock = None
734 735 doypath = None
735 736 subfolder = None
736 737
737 738 def __init__(self):
738 739 self.wrObj = PyFits()
739 740 # self.dataOut = dataOut
740 741 self.nTotalBlocks=0
741 742 # self.set = None
742 743 self.setFile = None
743 744 self.idblock = 0
744 745 self.wrpath = None
745 746 self.doypath = None
746 747 self.subfolder = None
747 748 self.isConfig = False
748 749
749 750 def isNumber(str):
750 751 """
751 752 Chequea si el conjunto de caracteres que componen un string puede ser convertidos a un numero.
752 753
753 754 Excepciones:
754 755 Si un determinado string no puede ser convertido a numero
755 756 Input:
756 757 str, string al cual se le analiza para determinar si convertible a un numero o no
757 758
758 759 Return:
759 760 True : si el string es uno numerico
760 761 False : no es un string numerico
761 762 """
762 763 try:
763 764 float( str )
764 765 return True
765 766 except:
766 767 return False
767 768
768 769 def setup(self, dataOut, wrpath):
769 770
770 771 if not(os.path.exists(wrpath)):
771 772 os.mkdir(wrpath)
772 773
773 774 self.wrpath = wrpath
774 775 # self.setFile = 0
775 776 self.dataOut = dataOut
776 777
777 778 def putData(self):
778 779 name= time.localtime( self.dataOut.utctime)
779 780 ext=".fits"
780 781
781 782 if self.doypath == None:
782 783 self.subfolder = 'F%4.4d%3.3d_%d' % (name.tm_year,name.tm_yday,time.mktime(datetime.datetime.now().timetuple()))
783 784 self.doypath = os.path.join( self.wrpath, self.subfolder )
784 785 os.mkdir(self.doypath)
785 786
786 787 if self.setFile == None:
787 788 # self.set = self.dataOut.set
788 789 self.setFile = 0
789 790 # if self.set != self.dataOut.set:
790 791 ## self.set = self.dataOut.set
791 792 # self.setFile = 0
792 793
793 794 #make the filename
794 795 thisFile = 'D%4.4d%3.3d_%3.3d%s' % (name.tm_year,name.tm_yday,self.setFile,ext)
795 796
796 797 filename = os.path.join(self.wrpath,self.subfolder, thisFile)
797 798
798 799 idblock = numpy.array([self.idblock],dtype="int64")
799 800 header=self.wrObj.cFImage(idblock=idblock,
800 801 year=time.gmtime(self.dataOut.utctime).tm_year,
801 802 month=time.gmtime(self.dataOut.utctime).tm_mon,
802 803 day=time.gmtime(self.dataOut.utctime).tm_mday,
803 804 hour=time.gmtime(self.dataOut.utctime).tm_hour,
804 805 minute=time.gmtime(self.dataOut.utctime).tm_min,
805 806 second=time.gmtime(self.dataOut.utctime).tm_sec)
806 807
807 808 c=3E8
808 809 deltaHeight = self.dataOut.heightList[1] - self.dataOut.heightList[0]
809 810 freq=numpy.arange(-1*self.dataOut.nHeights/2.,self.dataOut.nHeights/2.)*(c/(2*deltaHeight*1000))
810 811
811 812 colList = []
812 813
813 814 colFreq=self.wrObj.setColF(name="freq", format=str(self.dataOut.nFFTPoints)+'E', array=freq)
814 815
815 816 colList.append(colFreq)
816 817
817 818 nchannel=self.dataOut.nChannels
818 819
819 820 for i in range(nchannel):
820 821 col = self.wrObj.writeData(name="PCh"+str(i+1),
821 822 format=str(self.dataOut.nFFTPoints)+'E',
822 823 data=10*numpy.log10(self.dataOut.data_spc[i,:]))
823 824
824 825 colList.append(col)
825 826
826 827 data=self.wrObj.Ctable(colList=colList)
827 828
828 829 self.wrObj.CFile(header,data)
829 830
830 831 self.wrObj.wFile(filename)
831 832
832 833 #update the setFile
833 834 self.setFile += 1
834 835 self.idblock += 1
835 836
836 837 return 1
837 838
838 839 def run(self, dataOut, **kwargs):
839 840
840 841 if not(self.isConfig):
841 842
842 843 self.setup(dataOut, **kwargs)
843 844 self.isConfig = True
844 845
845 846 self.putData() No newline at end of file
@@ -1,591 +1,591
1 1 '''
2 2 Created on Jul 3, 2014
3 3
4 4 @author: roj-idl71
5 5 '''
6 6 import os
7 7 import datetime
8 8 import numpy
9 9
10 10 try:
11 11 from gevent import sleep
12 12 except:
13 13 from time import sleep
14 14
15 15 from schainpy.model.data.jroheaderIO import RadarControllerHeader, SystemHeader
16 16 from schainpy.model.data.jrodata import Voltage
17 17 from schainpy.model.proc.jroproc_base import ProcessingUnit, Operation
18 18
19 19 try:
20 20 import digital_rf_hdf5
21 21 except:
22 22 print 'You should install "digital_rf_hdf5" module if you want to read USRP data'
23 23
24 24 class USRPReader(ProcessingUnit):
25 25 '''
26 26 classdocs
27 27 '''
28 28
29 29 def __init__(self):
30 30 '''
31 31 Constructor
32 32 '''
33 33
34 34 ProcessingUnit.__init__(self)
35 35
36 36 self.dataOut = Voltage()
37 37 self.__printInfo = True
38 38 self.__flagDiscontinuousBlock = False
39 39 self.__bufferIndex = 9999999
40 40
41 41 self.__ippKm = None
42 42 self.__codeType = 0
43 43 self.__nCode = None
44 44 self.__nBaud = None
45 45 self.__code = None
46 46
47 47 def __getCurrentSecond(self):
48 48
49 49 return self.__thisUnixSample/self.__sample_rate
50 50
51 51 thisSecond = property(__getCurrentSecond, "I'm the 'thisSecond' property.")
52 52
53 53 def __setFileHeader(self):
54 54 '''
55 55 In this method will be initialized every parameter of dataOut object (header, no data)
56 56 '''
57 57 nProfiles = self.__sample_rate #Number of profiles by second
58 58
59 59 self.dataOut.radarControllerHeaderObj = RadarControllerHeader(ippKm=self.__ippKm,
60 60 txA=0,
61 61 txB=0,
62 62 nWindows=1,
63 63 nHeights=self.__nSamples,
64 64 firstHeight=self.__firstHeigth,
65 65 deltaHeight=self.__deltaHeigth,
66 66 codeType=self.__codeType,
67 67 nCode=self.__nCode, nBaud=self.__nBaud,
68 68 code = self.__code)
69 69
70 70 self.dataOut.systemHeaderObj = SystemHeader(nSamples=self.__nSamples,
71 71 nProfiles=nProfiles,
72 72 nChannels=len(self.__channelList),
73 73 adcResolution=14)
74 74
75 75 self.dataOut.type = "Voltage"
76 76
77 77 self.dataOut.data = None
78 78
79 79 self.dataOut.dtype = numpy.dtype([('real','<i8'),('imag','<i8')])
80 80
81 81 # self.dataOut.nChannels = 0
82 82
83 83 # self.dataOut.nHeights = 0
84 84
85 85 self.dataOut.nProfiles = nProfiles
86 86
87 87 self.dataOut.heightList = self.__firstHeigth + numpy.arange(self.__nSamples, dtype = numpy.float)*self.__deltaHeigth
88 88
89 89 self.dataOut.channelList = self.__channelList
90 90
91 91 self.dataOut.blocksize = self.dataOut.getNChannels() * self.dataOut.getNHeights()
92 92
93 93 # self.dataOut.channelIndexList = None
94 94
95 95 self.dataOut.flagNoData = True
96 96
97 97 #Set to TRUE if the data is discontinuous
98 98 self.dataOut.flagDiscontinuousBlock = False
99 99
100 100 self.dataOut.utctime = None
101 101
102 102 self.dataOut.timeZone = self.__timezone/60 #timezone like jroheader, difference in minutes between UTC and localtime
103 103
104 104 self.dataOut.dstFlag = 0
105 105
106 106 self.dataOut.errorCount = 0
107 107
108 108 self.dataOut.nCohInt = 1
109 109
110 110 self.dataOut.flagDecodeData = False #asumo que la data esta decodificada
111 111
112 112 self.dataOut.flagDeflipData = False #asumo que la data esta sin flip
113 113
114 114 self.dataOut.flagShiftFFT = False
115 115
116 116 self.dataOut.ippSeconds = 1.0*self.__nSamples/self.__sample_rate
117 117
118 118 #Time interval between profiles
119 119 #self.dataOut.timeInterval = self.dataOut.ippSeconds * self.dataOut.nCohInt
120 120
121 121 self.dataOut.frequency = self.__frequency
122 122
123 123 self.dataOut.realtime = self.__online
124 124
125 125 def findDatafiles(self, path, startDate=None, endDate=None):
126 126
127 127 if not os.path.isdir(path):
128 128 return []
129 129
130 130 try:
131 131 digitalReadObj = digital_rf_hdf5.read_hdf5(path, load_all_metadata=True)
132 132 except:
133 133 digitalReadObj = digital_rf_hdf5.read_hdf5(path)
134 134
135 135 channelNameList = digitalReadObj.get_channels()
136 136
137 137 if not channelNameList:
138 138 return []
139 139
140 140 metadata_dict = digitalReadObj.get_rf_file_metadata(channelNameList[0])
141 141
142 142 sample_rate = metadata_dict['sample_rate'][0]
143 143
144 144 this_metadata_file = digitalReadObj.get_metadata(channelNameList[0])
145 145
146 146 try:
147 147 timezone = this_metadata_file['timezone'].value
148 148 except:
149 149 timezone = 0
150 150
151 151 startUTCSecond, endUTCSecond = digitalReadObj.get_bounds(channelNameList[0])/sample_rate - timezone
152 152
153 153 startDatetime = datetime.datetime.utcfromtimestamp(startUTCSecond)
154 154 endDatatime = datetime.datetime.utcfromtimestamp(endUTCSecond)
155 155
156 156 if not startDate:
157 157 startDate = startDatetime.date()
158 158
159 159 if not endDate:
160 160 endDate = endDatatime.date()
161 161
162 162 dateList = []
163 163
164 164 thisDatetime = startDatetime
165 165
166 166 while(thisDatetime<=endDatatime):
167 167
168 168 thisDate = thisDatetime.date()
169 169
170 170 if thisDate < startDate:
171 171 continue
172 172
173 173 if thisDate > endDate:
174 174 break
175 175
176 176 dateList.append(thisDate)
177 177 thisDatetime += datetime.timedelta(1)
178 178
179 179 return dateList
180 180
181 181 def setup(self, path = None,
182 182 startDate = None,
183 183 endDate = None,
184 184 startTime = datetime.time(0,0,0),
185 185 endTime = datetime.time(23,59,59),
186 186 channelList = None,
187 187 nSamples = None,
188 188 ippKm = 60,
189 189 online = False,
190 190 delay = 60,
191 191 buffer_size = None,
192 192 nbuffer = 1024,
193 193 **kwargs):
194 194 '''
195 195 In this method we should set all initial parameters.
196 196
197 197 Inputs:
198 198 path
199 199 startDate
200 200 endDate
201 201 startTime
202 202 endTime
203 203 set
204 204 expLabel
205 205 ext
206 206 online
207 207 delay
208 208 '''
209 209
210 210 if not buffer_size:
211 211 buffer_size = nbuffer
212 212
213 213 if not os.path.isdir(path):
214 raise ValueError, "[Reading] This path %s does not exist" %path
214 raise ValueError, "[Reading] Directory %s does not exist" %path
215 215
216 216 try:
217 217 self.digitalReadObj = digital_rf_hdf5.read_hdf5(path, load_all_metadata=True)
218 218 except:
219 219 self.digitalReadObj = digital_rf_hdf5.read_hdf5(path)
220 220
221 221 channelNameList = self.digitalReadObj.get_channels()
222 222
223 223 if not channelNameList:
224 raise IOError, "[Reading] The path doesn,t have any files .. "
224 raise ValueError, "[Reading] Directory %s does not have any files" %path
225 225
226 226 if not channelList:
227 227 channelList = range(len(channelNameList))
228 228
229 229 ########## Reading metadata ######################
230 230
231 231 metadata_dict = self.digitalReadObj.get_rf_file_metadata(channelNameList[channelList[0]])
232 232
233 233 self.__sample_rate = metadata_dict['sample_rate'][0]
234 234 self.__samples_per_file = metadata_dict['samples_per_file'][0]
235 235 self.__deltaHeigth = 1e6*0.15/self.__sample_rate
236 236
237 237 this_metadata_file = self.digitalReadObj.get_metadata(channelNameList[channelList[0]])
238 238
239 239 self.__frequency = this_metadata_file['center_frequencies'].value
240 240 try:
241 241 self.__timezone = this_metadata_file['timezone'].value
242 242 except:
243 243 self.__timezone = 0
244 244
245 245 self.__firstHeigth = 0
246 246
247 247 try:
248 248 codeType = this_metadata_file['codeType'].value
249 249 except:
250 250 codeType = 0
251 251
252 nCode = 0
253 nBaud = 0
254 code = None
252 nCode = 1
253 nBaud = 1
254 code = [1]
255 255
256 256 if codeType:
257 257 nCode = this_metadata_file['nCode'].value
258 258 nBaud = this_metadata_file['nBaud'].value
259 259 code = this_metadata_file['code'].value
260 260
261 261 if not ippKm:
262 262 try:
263 263 #seconds to km
264 264 ippKm = 1e6*0.15*this_metadata_file['ipp'].value
265 265 except:
266 266 ippKm = None
267 267
268 268 ####################################################
269 269 startUTCSecond = None
270 270 endUTCSecond = None
271 271
272 272 if startDate:
273 273 startDatetime = datetime.datetime.combine(startDate, startTime)
274 274 startUTCSecond = (startDatetime-datetime.datetime(1970,1,1)).total_seconds() + self.__timezone
275 275
276 276 if endDate:
277 277 endDatetime = datetime.datetime.combine(endDate, endTime)
278 278 endUTCSecond = (endDatetime-datetime.datetime(1970,1,1)).total_seconds() + self.__timezone
279 279
280 280 start_index, end_index = self.digitalReadObj.get_bounds(channelNameList[channelList[0]])
281 281
282 282 if not startUTCSecond:
283 283 startUTCSecond = start_index/self.__sample_rate
284 284
285 285 if start_index > startUTCSecond*self.__sample_rate:
286 286 startUTCSecond = start_index/self.__sample_rate
287 287
288 288 if not endUTCSecond:
289 289 endUTCSecond = end_index/self.__sample_rate
290 290
291 291 if end_index < endUTCSecond*self.__sample_rate:
292 292 endUTCSecond = end_index/self.__sample_rate
293 293
294 294 if not nSamples:
295 295 if not ippKm:
296 296 raise ValueError, "[Reading] nSamples or ippKm should be defined"
297 297
298 298 nSamples = ippKm / (1e6*0.15/self.__sample_rate)
299 299
300 300 channelBoundList = []
301 301 channelNameListFiltered = []
302 302
303 303 for thisIndexChannel in channelList:
304 304 thisChannelName = channelNameList[thisIndexChannel]
305 305 start_index, end_index = self.digitalReadObj.get_bounds(thisChannelName)
306 306 channelBoundList.append((start_index, end_index))
307 307 channelNameListFiltered.append(thisChannelName)
308 308
309 309 self.profileIndex = 0
310 310
311 311 self.__delay = delay
312 312 self.__ippKm = ippKm
313 313 self.__codeType = codeType
314 314 self.__nCode = nCode
315 315 self.__nBaud = nBaud
316 316 self.__code = code
317 317
318 318 self.__datapath = path
319 319 self.__online = online
320 320 self.__channelList = channelList
321 321 self.__channelNameList = channelNameListFiltered
322 322 self.__channelBoundList = channelBoundList
323 323 self.__nSamples = nSamples
324 324 self.__samples_to_read = buffer_size*nSamples
325 325 self.__nChannels = len(self.__channelList)
326 326
327 327 self.__startUTCSecond = startUTCSecond
328 328 self.__endUTCSecond = endUTCSecond
329 329
330 330 self.__timeInterval = 1.0 * self.__samples_to_read/self.__sample_rate #Time interval
331 331
332 332 if online:
333 333 # self.__thisUnixSample = int(endUTCSecond*self.__sample_rate - 4*self.__samples_to_read)
334 334 startUTCSecond = numpy.floor(endUTCSecond)
335 335
336 336 self.__thisUnixSample = int(startUTCSecond*self.__sample_rate) - self.__samples_to_read
337 337
338 338 self.__data_buffer = numpy.zeros((self.__nChannels, self.__samples_to_read), dtype = numpy.complex)
339 339
340 340 self.__setFileHeader()
341 341 self.isConfig = True
342 342
343 343 print "[Reading] USRP Data was found from %s to %s " %(
344 344 datetime.datetime.utcfromtimestamp(self.__startUTCSecond - self.__timezone),
345 345 datetime.datetime.utcfromtimestamp(self.__endUTCSecond - self.__timezone)
346 346 )
347 347
348 348 print "[Reading] Starting process from %s to %s" %(datetime.datetime.utcfromtimestamp(startUTCSecond - self.__timezone),
349 349 datetime.datetime.utcfromtimestamp(endUTCSecond - self.__timezone)
350 350 )
351 351
352 352 def __reload(self):
353 353
354 354 if not self.__online:
355 355 return
356 356
357 357 # print
358 358 # print "%s not in range [%s, %s]" %(
359 359 # datetime.datetime.utcfromtimestamp(self.thisSecond - self.__timezone),
360 360 # datetime.datetime.utcfromtimestamp(self.__startUTCSecond - self.__timezone),
361 361 # datetime.datetime.utcfromtimestamp(self.__endUTCSecond - self.__timezone)
362 362 # )
363 363 print "[Reading] reloading metadata ..."
364 364
365 365 try:
366 366 self.digitalReadObj.reload(complete_update=True)
367 367 except:
368 368 self.digitalReadObj.reload()
369 369
370 370 start_index, end_index = self.digitalReadObj.get_bounds(self.__channelNameList[self.__channelList[0]])
371 371
372 372 if start_index > self.__startUTCSecond*self.__sample_rate:
373 373 self.__startUTCSecond = 1.0*start_index/self.__sample_rate
374 374
375 375 if end_index > self.__endUTCSecond*self.__sample_rate:
376 376 self.__endUTCSecond = 1.0*end_index/self.__sample_rate
377 377 print
378 378 print "[Reading] New timerange found [%s, %s] " %(
379 379 datetime.datetime.utcfromtimestamp(self.__startUTCSecond - self.__timezone),
380 380 datetime.datetime.utcfromtimestamp(self.__endUTCSecond - self.__timezone)
381 381 )
382 382
383 383 return True
384 384
385 385 return False
386 386
387 387 def __readNextBlock(self, seconds=30, volt_scale = 218776):
388 388 '''
389 389 '''
390 390
391 391 #Set the next data
392 392 self.__flagDiscontinuousBlock = False
393 393 self.__thisUnixSample += self.__samples_to_read
394 394
395 395 if self.__thisUnixSample + 2*self.__samples_to_read > self.__endUTCSecond*self.__sample_rate:
396 396 print "[Reading] There are no more data into selected timerange"
397 397
398 398 self.__reload()
399 399
400 400 if self.__thisUnixSample + 2*self.__samples_to_read > self.__endUTCSecond*self.__sample_rate:
401 401 self.__thisUnixSample -= self.__samples_to_read
402 402 return False
403 403
404 404 indexChannel = 0
405 405
406 406 dataOk = False
407 407
408 408 for thisChannelName in self.__channelNameList:
409 409
410 410 try:
411 411 result = self.digitalReadObj.read_vector_c81d(self.__thisUnixSample,
412 412 self.__samples_to_read,
413 413 thisChannelName)
414 414
415 415 except IOError, e:
416 416 #read next profile
417 417 self.__flagDiscontinuousBlock = True
418 418 print "[Reading] %s" %datetime.datetime.utcfromtimestamp(self.thisSecond - self.__timezone), e
419 419 break
420 420
421 421 if result.shape[0] != self.__samples_to_read:
422 422 self.__flagDiscontinuousBlock = True
423 423 print "[Reading] %s: Too few samples were found, just %d/%d samples" %(datetime.datetime.utcfromtimestamp(self.thisSecond - self.__timezone),
424 424 result.shape[0],
425 425 self.__samples_to_read)
426 426 break
427 427
428 428 self.__data_buffer[indexChannel,:] = result*volt_scale
429 429
430 430 indexChannel += 1
431 431
432 432 dataOk = True
433 433
434 434 self.__utctime = self.__thisUnixSample/self.__sample_rate
435 435
436 436 if not dataOk:
437 437 return False
438 438
439 439 print "[Reading] %s: %d samples <> %f sec" %(datetime.datetime.utcfromtimestamp(self.thisSecond - self.__timezone),
440 440 self.__samples_to_read,
441 441 self.__timeInterval)
442 442
443 443 self.__bufferIndex = 0
444 444
445 445 return True
446 446
447 447 def __isBufferEmpty(self):
448 448
449 449 if self.__bufferIndex <= self.__samples_to_read - self.__nSamples:
450 450 return False
451 451
452 452 return True
453 453
454 454 def getData(self, seconds=30, nTries=5):
455 455
456 456 '''
457 457 This method gets the data from files and put the data into the dataOut object
458 458
459 459 In addition, increase el the buffer counter in one.
460 460
461 461 Return:
462 462 data : retorna un perfil de voltages (alturas * canales) copiados desde el
463 463 buffer. Si no hay mas archivos a leer retorna None.
464 464
465 465 Affected:
466 466 self.dataOut
467 467 self.profileIndex
468 468 self.flagDiscontinuousBlock
469 469 self.flagIsNewBlock
470 470 '''
471 471
472 472 err_counter = 0
473 473 self.dataOut.flagNoData = True
474 474
475 475 if self.__isBufferEmpty():
476 476
477 477 self.__flagDiscontinuousBlock = False
478 478
479 479 while True:
480 480 if self.__readNextBlock():
481 481 break
482 482
483 483 if self.__thisUnixSample > self.__endUTCSecond*self.__sample_rate:
484 484 return False
485 485
486 486 if self.__flagDiscontinuousBlock:
487 487 print '[Reading] discontinuous block found ... continue with the next block'
488 488 continue
489 489
490 490 if not self.__online:
491 491 return False
492 492
493 493 err_counter += 1
494 494 if err_counter > nTries:
495 495 return False
496 496
497 497 print '[Reading] waiting %d seconds to read a new block' %seconds
498 498 sleep(seconds)
499 499
500 500 self.dataOut.data = self.__data_buffer[:,self.__bufferIndex:self.__bufferIndex+self.__nSamples]
501 501 self.dataOut.utctime = (self.__thisUnixSample + self.__bufferIndex)/self.__sample_rate
502 502 self.dataOut.flagNoData = False
503 503 self.dataOut.flagDiscontinuousBlock = self.__flagDiscontinuousBlock
504 504
505 505 self.__bufferIndex += self.__nSamples
506 506 self.profileIndex += 1
507 507
508 508 return True
509 509
510 510 def printInfo(self):
511 511 '''
512 512 '''
513 513 if self.__printInfo == False:
514 514 return
515 515
516 516 # self.systemHeaderObj.printInfo()
517 517 # self.radarControllerHeaderObj.printInfo()
518 518
519 519 self.__printInfo = False
520 520
521 521 def printNumberOfBlock(self):
522 522 '''
523 523 '''
524 524
525 525 print self.profileIndex
526 526
527 527 def run(self, **kwargs):
528 528 '''
529 529 This method will be called many times so here you should put all your code
530 530 '''
531 531
532 532 if not self.isConfig:
533 533 self.setup(**kwargs)
534 534
535 535 self.getData(seconds=self.__delay)
536 536
537 537 return
538 538
539 539 class USRPWriter(Operation):
540 540 '''
541 541 classdocs
542 542 '''
543 543
544 544 def __init__(self):
545 545 '''
546 546 Constructor
547 547 '''
548 548 self.dataOut = None
549 549
550 550 def setup(self, dataIn, path, blocksPerFile, set=0, ext=None):
551 551 '''
552 552 In this method we should set all initial parameters.
553 553
554 554 Input:
555 555 dataIn : Input data will also be outputa data
556 556
557 557 '''
558 558 self.dataOut = dataIn
559 559
560 560
561 561
562 562
563 563
564 564 self.isConfig = True
565 565
566 566 return
567 567
568 568 def run(self, dataIn, **kwargs):
569 569 '''
570 570 This method will be called many times so here you should put all your code
571 571
572 572 Inputs:
573 573
574 574 dataIn : object with the data
575 575
576 576 '''
577 577
578 578 if not self.isConfig:
579 579 self.setup(dataIn, **kwargs)
580 580
581 581
582 582 if __name__ == '__main__':
583 583
584 584 readObj = USRPReader()
585 585
586 586 while True:
587 587 readObj.run(path='/Volumes/DATA/haystack/passive_radar/')
588 588 # readObj.printInfo()
589 589 readObj.printNumberOfBlock()
590 590
591 591 No newline at end of file
@@ -1,135 +1,135
1 1 '''
2 2 Created on Jul 15, 2014
3 3
4 4 @author: roj-idl71
5 5 '''
6 6 import time
7 7 import threading
8 8 import cPickle
9 9
10 10 # try:
11 11 # from gevent import sleep
12 12 # except:
13 13 from time import sleep
14 14
15 15 SERIALIZER = cPickle
16 16
17 17 # from schainpy.serializer import DynamicSerializer
18 18 from schainpy.model.io.jroIO_usrp import USRPReader
19 19 from schainpy.serializer.DataTranslate import obj2Serial
20 20
21 21 class USRPReaderAPI(USRPReader, threading.Thread):
22 22
23 23 # __isBufferEmpty = True
24 24
25 25 __DATAKEYLIST = ['data','utctime','flagNoData']
26 26
27 27 def __init__(self, serializer='msgpack'):
28 28
29 29 threading.Thread.__init__(self)
30 30 USRPReader.__init__(self)
31 31
32 32 # self.__serializerObj = DynamicSerializer.DynamicSerializer('msgpack')
33 33 self.__mySerial = None
34 34 self.__isBufferEmpty = True
35 35
36 36 self.setSerializer(serializer)
37 37
38 38 def setSerializer(self, serializer):
39 39
40 40 self.__serializer = serializer
41 41
42 42 def getSerializer(self):
43 43
44 44 return self.__serializer
45 45
46 46 def getProfileIndex(self):
47 47
48 48 return self.profileIndex
49 49
50 50 def getSerialMetaData(self):
51 51
52 52 if self.__isBufferEmpty:
53 53 ini = time.time()
54 54
55 55 while True:
56 56
57 57 if not self.__isBufferEmpty:
58 58 break
59 59
60 60 if time.time() - ini > 20:
61 61 break
62 62
63 63 sleep(1e-12)
64 64
65 65
66 66 # if not self.getData():
67 67 # self.__isBufferEmpty = False
68 68 # return None
69 69
70 70 if self.dataOut.flagNoData:
71 71 return None
72 72
73 73 myMetadataSerial = obj2Serial(self.dataOut,
74 74 serializer = self.__serializer)
75 75
76 76 return myMetadataSerial
77 77
78 78 def getSerialData(self):
79 79
80 80 if self.__isBufferEmpty:
81 81 ini = time.time()
82 82
83 83 while True:
84 84
85 85 if not self.__isBufferEmpty:
86 86 break
87 87
88 88 if time.time() - ini > 20:
89 89 break
90 90
91 91 sleep(1e-12)
92 92
93 93
94 94 # if not self.getData():
95 95 # self.__isBufferEmpty = False
96 96 # return None
97 97
98 98 if self.dataOut.flagNoData:
99 99 return None
100 100
101 101 self.__isBufferEmpty = True
102 102
103 103 return self.__mySerial
104 104
105 105 def run(self):
106 106
107 107 '''
108 108 This method will be called once when start() is called
109 109 '''
110 110
111 111 if not self.isConfig:
112 raise IOError, 'setup() method has to be called before start()'
112 raise RuntimeError, 'setup() method has to be called before start()'
113 113
114 114 while True:
115 115
116 116 if not self.__isBufferEmpty:
117 117 sleep(1e-12)
118 118 continue
119 119
120 120 if not self.getData():
121 121 break
122 122
123 123 print ".",
124 124
125 125 self.__mySerial = obj2Serial(self.dataOut,
126 126 keyList = self.__DATAKEYLIST,
127 127 serializer = self.__serializer)
128 128 self.__isBufferEmpty = False
129 129
130 130 # print self.profileIndex
131 131 # print 'wait 1 second'
132 132
133 133 # sleep(0.1)
134 134
135 135 return No newline at end of file
@@ -1,286 +1,286
1 1 '''
2 2
3 3 $Author: murco $
4 4 $Id: jroproc_base.py 1 2012-11-12 18:56:07Z murco $
5 5 '''
6 6
7 7 class ProcessingUnit(object):
8 8
9 9 """
10 10 Esta es la clase base para el procesamiento de datos.
11 11
12 12 Contiene el metodo "call" para llamar operaciones. Las operaciones pueden ser:
13 13 - Metodos internos (callMethod)
14 14 - Objetos del tipo Operation (callObject). Antes de ser llamados, estos objetos
15 15 tienen que ser agreagados con el metodo "add".
16 16
17 17 """
18 18 # objeto de datos de entrada (Voltage, Spectra o Correlation)
19 19 dataIn = None
20 20 dataInList = []
21 21
22 22 # objeto de datos de entrada (Voltage, Spectra o Correlation)
23 23 dataOut = None
24 24
25 25 operations2RunDict = None
26 26
27 27 isConfig = False
28 28
29 29
30 30 def __init__(self):
31 31
32 32 self.dataIn = None
33 33 self.dataInList = []
34 34
35 35 self.dataOut = None
36 36
37 37 self.operations2RunDict = {}
38 38
39 39 self.isConfig = False
40 40
41 41 def addOperation(self, opObj, objId):
42 42
43 43 """
44 44 Agrega un objeto del tipo "Operation" (opObj) a la lista de objetos "self.objectList" y retorna el
45 45 identificador asociado a este objeto.
46 46
47 47 Input:
48 48
49 49 object : objeto de la clase "Operation"
50 50
51 51 Return:
52 52
53 53 objId : identificador del objeto, necesario para ejecutar la operacion
54 54 """
55 55
56 56 self.operations2RunDict[objId] = opObj
57 57
58 58 return objId
59 59
60 60 def getOperationObj(self, objId):
61 61
62 62 if objId not in self.operations2RunDict.keys():
63 63 return None
64 64
65 65 return self.operations2RunDict[objId]
66 66
67 67 def operation(self, **kwargs):
68 68
69 69 """
70 70 Operacion directa sobre la data (dataOut.data). Es necesario actualizar los valores de los
71 71 atributos del objeto dataOut
72 72
73 73 Input:
74 74
75 75 **kwargs : Diccionario de argumentos de la funcion a ejecutar
76 76 """
77 77
78 raise ValueError, "ImplementedError"
78 raise NotImplementedError
79 79
80 80 def callMethod(self, name, **kwargs):
81 81
82 82 """
83 83 Ejecuta el metodo con el nombre "name" y con argumentos **kwargs de la propia clase.
84 84
85 85 Input:
86 86 name : nombre del metodo a ejecutar
87 87
88 88 **kwargs : diccionario con los nombres y valores de la funcion a ejecutar.
89 89
90 90 """
91 91
92 92 #Checking the inputs
93 93 if name == 'run':
94 94
95 95 if not self.checkInputs():
96 96 self.dataOut.flagNoData = True
97 97 return False
98 98 else:
99 99 #Si no es un metodo RUN la entrada es la misma dataOut (interna)
100 100 if self.dataOut.isEmpty():
101 101 return False
102 102
103 103 #Getting the pointer to method
104 104 methodToCall = getattr(self, name)
105 105
106 106 #Executing the self method
107 107 methodToCall(**kwargs)
108 108
109 109 #Checkin the outputs
110 110
111 111 # if name == 'run':
112 112 # pass
113 113 # else:
114 114 # pass
115 115 #
116 116 # if name != 'run':
117 117 # return True
118 118
119 119 if self.dataOut is None:
120 120 return False
121 121
122 122 if self.dataOut.isEmpty():
123 123 return False
124 124
125 125 return True
126 126
127 127 def callObject(self, objId, **kwargs):
128 128
129 129 """
130 130 Ejecuta la operacion asociada al identificador del objeto "objId"
131 131
132 132 Input:
133 133
134 134 objId : identificador del objeto a ejecutar
135 135
136 136 **kwargs : diccionario con los nombres y valores de la funcion a ejecutar.
137 137
138 138 Return:
139 139
140 140 None
141 141 """
142 142
143 143 if self.dataOut.isEmpty():
144 144 return False
145 145
146 146 externalProcObj = self.operations2RunDict[objId]
147 147
148 148 externalProcObj.run(self.dataOut, **kwargs)
149 149
150 150 return True
151 151
152 152 def call(self, opType, opName=None, opId=None, **kwargs):
153 153
154 154 """
155 155 Return True si ejecuta la operacion interna nombrada "opName" o la operacion externa
156 156 identificada con el id "opId"; con los argumentos "**kwargs".
157 157
158 158 False si la operacion no se ha ejecutado.
159 159
160 160 Input:
161 161
162 162 opType : Puede ser "self" o "external"
163 163
164 164 La operacion puede ser de dos tipos (callMethod or callObject):
165 165
166 166 1. Un metodo propio de esta clase:
167 167
168 168 opType = "self"
169 169
170 170 2. El metodo "run" de un objeto del tipo Operation o de un derivado de ella:
171 171
172 172 opType = "other" or "external".
173 173
174 174 opName : Si la operacion es interna (opType = 'self'), entonces el "opName" sera
175 175 usada para llamar a un metodo interno de la clase Processing
176 176
177 177 opId : Si la operacion es externa (opType = 'other'), entonces el "opId" sera
178 178 usada para llamar al metodo "run" de la clase Operation registrada con ese Id
179 179
180 180 Exception:
181 181 Este objeto de tipo Operation debe de haber sido agregado antes con el metodo:
182 182 "addOperation" e identificado con el valor "opId" = el id de la operacion.
183 183 De lo contrario retornara un error del tipo IOError
184 184
185 185 """
186 186
187 187 if opType == 'self':
188 188
189 189 if not opName:
190 raise IOError, "opName parameter should be defined"
190 raise ValueError, "opName parameter should be defined"
191 191
192 192 sts = self.callMethod(opName, **kwargs)
193 193
194 194 if opType == 'other' or opType == 'external':
195 195
196 196 if not opId:
197 raise IOError, "opId parameter should be defined"
197 raise ValueError, "opId parameter should be defined"
198 198
199 199 if opId not in self.operations2RunDict.keys():
200 raise IOError, "This id operation have not been registered"
200 raise ValueError, "Id operation has not been registered"
201 201
202 202 sts = self.callObject(opId, **kwargs)
203 203
204 204 return sts
205 205
206 206 def setInput(self, dataIn):
207 207
208 208 self.dataIn = dataIn
209 209 self.dataInList.append(dataIn)
210 210
211 211 def getOutputObj(self):
212 212
213 213 return self.dataOut
214 214
215 215 def checkInputs(self):
216 216
217 217 for thisDataIn in self.dataInList:
218 218
219 219 if thisDataIn.isEmpty():
220 220 return False
221 221
222 222 return True
223 223
224 224 def setup(self):
225 225
226 raise ValueError, "Not implemented"
226 raise NotImplementedError
227 227
228 228 def run(self):
229 229
230 raise ValueError, "Not implemented"
230 raise NotImplementedError
231 231
232 232 def close(self):
233 233 #Close every thread, queue or any other object here is it is neccesary.
234 234 return
235 235
236 236 class Operation(object):
237 237
238 238 """
239 239 Clase base para definir las operaciones adicionales que se pueden agregar a la clase ProcessingUnit
240 240 y necesiten acumular informacion previa de los datos a procesar. De preferencia usar un buffer de
241 241 acumulacion dentro de esta clase
242 242
243 243 Ejemplo: Integraciones coherentes, necesita la informacion previa de los n perfiles anteriores (bufffer)
244 244
245 245 """
246 246
247 247 __buffer = None
248 248 isConfig = False
249 249
250 250 def __init__(self):
251 251
252 252 self.__buffer = None
253 253 self.isConfig = False
254 254
255 255 def setup(self):
256 256
257 257 self.isConfig = True
258 258
259 raise ValueError, "Not implemented"
259 raise NotImplementedError
260 260
261 261 def run(self, dataIn, **kwargs):
262 262
263 263 """
264 264 Realiza las operaciones necesarias sobre la dataIn.data y actualiza los
265 265 atributos del objeto dataIn.
266 266
267 267 Input:
268 268
269 269 dataIn : objeto del tipo JROData
270 270
271 271 Return:
272 272
273 273 None
274 274
275 275 Affected:
276 276 __buffer : buffer de recepcion de datos.
277 277
278 278 """
279 279 if not self.isConfig:
280 280 self.setup(**kwargs)
281 281
282 raise ValueError, "ImplementedError"
282 raise NotImplementedError
283 283
284 284 def close(self):
285 285
286 286 pass No newline at end of file
@@ -1,880 +1,875
1 1 import numpy
2 2 import math
3 3
4 4 from jroproc_base import ProcessingUnit, Operation
5 5 from schainpy.model.data.jrodata import Spectra
6 6 from schainpy.model.data.jrodata import hildebrand_sekhon
7 7
8 8 class SpectraProc(ProcessingUnit):
9 9
10 10 def __init__(self):
11 11
12 12 ProcessingUnit.__init__(self)
13 13
14 14 self.buffer = None
15 15 self.firstdatatime = None
16 16 self.profIndex = 0
17 17 self.dataOut = Spectra()
18 18 self.id_min = None
19 19 self.id_max = None
20 20
21 21 def __updateSpecFromVoltage(self):
22 22
23 23 self.dataOut.timeZone = self.dataIn.timeZone
24 24 self.dataOut.dstFlag = self.dataIn.dstFlag
25 25 self.dataOut.errorCount = self.dataIn.errorCount
26 26 self.dataOut.useLocalTime = self.dataIn.useLocalTime
27 27
28 28 self.dataOut.radarControllerHeaderObj = self.dataIn.radarControllerHeaderObj.copy()
29 29 self.dataOut.systemHeaderObj = self.dataIn.systemHeaderObj.copy()
30 30 self.dataOut.channelList = self.dataIn.channelList
31 31 self.dataOut.heightList = self.dataIn.heightList
32 32 self.dataOut.dtype = numpy.dtype([('real','<f4'),('imag','<f4')])
33 33
34 34 self.dataOut.nBaud = self.dataIn.nBaud
35 35 self.dataOut.nCode = self.dataIn.nCode
36 36 self.dataOut.code = self.dataIn.code
37 37 self.dataOut.nProfiles = self.dataOut.nFFTPoints
38 38
39 39 self.dataOut.flagDiscontinuousBlock = self.dataIn.flagDiscontinuousBlock
40 40 self.dataOut.utctime = self.firstdatatime
41 41 self.dataOut.flagDecodeData = self.dataIn.flagDecodeData #asumo q la data esta decodificada
42 42 self.dataOut.flagDeflipData = self.dataIn.flagDeflipData #asumo q la data esta sin flip
43 43 self.dataOut.flagShiftFFT = False
44 44
45 45 self.dataOut.nCohInt = self.dataIn.nCohInt
46 46 self.dataOut.nIncohInt = 1
47 47
48 48 self.dataOut.windowOfFilter = self.dataIn.windowOfFilter
49 49
50 50 self.dataOut.frequency = self.dataIn.frequency
51 51 self.dataOut.realtime = self.dataIn.realtime
52 52
53 53 self.dataOut.azimuth = self.dataIn.azimuth
54 54 self.dataOut.zenith = self.dataIn.zenith
55 55
56 56 self.dataOut.beam.codeList = self.dataIn.beam.codeList
57 57 self.dataOut.beam.azimuthList = self.dataIn.beam.azimuthList
58 58 self.dataOut.beam.zenithList = self.dataIn.beam.zenithList
59 59
60 60 def __getFft(self):
61 61 """
62 62 Convierte valores de Voltaje a Spectra
63 63
64 64 Affected:
65 65 self.dataOut.data_spc
66 66 self.dataOut.data_cspc
67 67 self.dataOut.data_dc
68 68 self.dataOut.heightList
69 69 self.profIndex
70 70 self.buffer
71 71 self.dataOut.flagNoData
72 72 """
73 73 fft_volt = numpy.fft.fft(self.buffer,n=self.dataOut.nFFTPoints,axis=1)
74 74 fft_volt = fft_volt.astype(numpy.dtype('complex'))
75 75 dc = fft_volt[:,0,:]
76 76
77 77 #calculo de self-spectra
78 78 fft_volt = numpy.fft.fftshift(fft_volt,axes=(1,))
79 79 spc = fft_volt * numpy.conjugate(fft_volt)
80 80 spc = spc.real
81 81
82 82 blocksize = 0
83 83 blocksize += dc.size
84 84 blocksize += spc.size
85 85
86 86 cspc = None
87 87 pairIndex = 0
88 88 if self.dataOut.pairsList != None:
89 89 #calculo de cross-spectra
90 90 cspc = numpy.zeros((self.dataOut.nPairs, self.dataOut.nFFTPoints, self.dataOut.nHeights), dtype='complex')
91 91 for pair in self.dataOut.pairsList:
92 92 if pair[0] not in self.dataOut.channelList:
93 93 raise ValueError, "Error getting CrossSpectra: pair 0 of %s is not in channelList = %s" %(str(pair), str(self.dataOut.channelList))
94 94 if pair[1] not in self.dataOut.channelList:
95 95 raise ValueError, "Error getting CrossSpectra: pair 1 of %s is not in channelList = %s" %(str(pair), str(self.dataOut.channelList))
96 96
97 97 cspc[pairIndex,:,:] = fft_volt[pair[0],:,:] * numpy.conjugate(fft_volt[pair[1],:,:])
98 98 pairIndex += 1
99 99 blocksize += cspc.size
100 100
101 101 self.dataOut.data_spc = spc
102 102 self.dataOut.data_cspc = cspc
103 103 self.dataOut.data_dc = dc
104 104 self.dataOut.blockSize = blocksize
105 105 self.dataOut.flagShiftFFT = True
106 106
107 107 def run(self, nProfiles=None, nFFTPoints=None, pairsList=[], ippFactor=None):
108 108
109 109 self.dataOut.flagNoData = True
110 110
111 111 if self.dataIn.type == "Spectra":
112 112 self.dataOut.copy(self.dataIn)
113 113 return True
114 114
115 115 if self.dataIn.type == "Voltage":
116 116
117 117 if nFFTPoints == None:
118 118 raise ValueError, "This SpectraProc.run() need nFFTPoints input variable"
119 119
120 120 if nProfiles == None:
121 121 nProfiles = nFFTPoints
122 # raise ValueError, "This SpectraProc.run() need nProfiles input variable"
123 122
124 123 if ippFactor == None:
125 124 ippFactor = 1
126 125
127 126 self.dataOut.ippFactor = ippFactor
128 127
129 128 self.dataOut.nFFTPoints = nFFTPoints
130 129 self.dataOut.pairsList = pairsList
131 130
132 131 if self.buffer is None:
133 132 self.buffer = numpy.zeros( (self.dataIn.nChannels,
134 133 nProfiles,
135 134 self.dataIn.nHeights),
136 135 dtype='complex')
137 136
138 137 if self.dataIn.flagDataAsBlock:
139 138
140 139 if self.dataIn.nProfiles == nProfiles:
141 140 self.buffer = self.dataIn.data.copy()
142 141 self.profIndex = nProfiles
143 142
144 143 elif self.dataIn.nProfiles < nProfiles:
145 144
146 145 if self.profIndex == 0:
147 146 self.id_min = 0
148 147 self.id_max = self.dataIn.nProfiles
149 148
150 149 self.buffer[:,self.id_min:self.id_max,:] = self.dataIn.data
151 150 self.profIndex += self.dataIn.nProfiles
152 151 self.id_min += self.dataIn.data.shape[1]
153 152 self.id_max += self.dataIn.data.shape[1]
154 153 else:
155 154 raise ValueError, "The type object %s has %d profiles, it should be equal to %d profiles"%(self.dataIn.type,self.dataIn.data.shape[1],nProfiles)
156 155 self.dataOut.flagNoData = True
157 156 return 0
158 157 else:
159 158 self.buffer[:,self.profIndex,:] = self.dataIn.data.copy()
160 159 self.profIndex += 1
161 160
162 161 if self.firstdatatime == None:
163 162 self.firstdatatime = self.dataIn.utctime
164 163
165 164 if self.profIndex == nProfiles:
166 165 self.__updateSpecFromVoltage()
167 166 self.__getFft()
168 167
169 168 self.dataOut.flagNoData = False
170 169 self.firstdatatime = None
171 170 self.profIndex = 0
172 171
173 172 return True
174 173
175 174 raise ValueError, "The type of input object '%s' is not valid"%(self.dataIn.type)
176 175
177 176 def __selectPairs(self, channelList=None):
178 177
179 178 if channelList == None:
180 179 return
181 180
182 181 pairsIndexListSelected = []
183 182 for pairIndex in self.dataOut.pairsIndexList:
184 183 #First pair
185 184 if self.dataOut.pairsList[pairIndex][0] not in channelList:
186 185 continue
187 186 #Second pair
188 187 if self.dataOut.pairsList[pairIndex][1] not in channelList:
189 188 continue
190 189
191 190 pairsIndexListSelected.append(pairIndex)
192 191
193 192 if not pairsIndexListSelected:
194 193 self.dataOut.data_cspc = None
195 194 self.dataOut.pairsList = []
196 195 return
197 196
198 197 self.dataOut.data_cspc = self.dataOut.data_cspc[pairsIndexListSelected]
199 198 self.dataOut.pairsList = [self.dataOut.pairsList[i] for i in pairsIndexListSelected]
200 199
201 200 return
202 201
203 202 def selectChannels(self, channelList):
204 203
205 204 channelIndexList = []
206 205
207 206 for channel in channelList:
208 207 if channel not in self.dataOut.channelList:
209 208 raise ValueError, "Error selecting channels: The value %d in channelList is not valid.\nAvailable channels = %s" %(channel, str(self.dataOut.channelList))
210 209
211 210 index = self.dataOut.channelList.index(channel)
212 211 channelIndexList.append(index)
213 212
214 213 self.selectChannelsByIndex(channelIndexList)
215 214
216 215 def selectChannelsByIndex(self, channelIndexList):
217 216 """
218 217 Selecciona un bloque de datos en base a canales segun el channelIndexList
219 218
220 219 Input:
221 220 channelIndexList : lista sencilla de canales a seleccionar por ej. [2,3,7]
222 221
223 222 Affected:
224 223 self.dataOut.data_spc
225 224 self.dataOut.channelIndexList
226 225 self.dataOut.nChannels
227 226
228 227 Return:
229 228 None
230 229 """
231 230
232 231 for channelIndex in channelIndexList:
233 232 if channelIndex not in self.dataOut.channelIndexList:
234 233 raise ValueError, "Error selecting channels: The value %d in channelIndexList is not valid.\nAvailable channel indexes = " %(channelIndex, self.dataOut.channelIndexList)
235 234
236 235 # nChannels = len(channelIndexList)
237 236
238 237 data_spc = self.dataOut.data_spc[channelIndexList,:]
239 238 data_dc = self.dataOut.data_dc[channelIndexList,:]
240 239
241 240 self.dataOut.data_spc = data_spc
242 241 self.dataOut.data_dc = data_dc
243 242
244 243 self.dataOut.channelList = [self.dataOut.channelList[i] for i in channelIndexList]
245 244 # self.dataOut.nChannels = nChannels
246 245
247 246 self.__selectPairs(self.dataOut.channelList)
248 247
249 248 return 1
250 249
251 250 def selectHeights(self, minHei, maxHei):
252 251 """
253 252 Selecciona un bloque de datos en base a un grupo de valores de alturas segun el rango
254 253 minHei <= height <= maxHei
255 254
256 255 Input:
257 256 minHei : valor minimo de altura a considerar
258 257 maxHei : valor maximo de altura a considerar
259 258
260 259 Affected:
261 260 Indirectamente son cambiados varios valores a travez del metodo selectHeightsByIndex
262 261
263 262 Return:
264 263 1 si el metodo se ejecuto con exito caso contrario devuelve 0
265 264 """
266 265
267 266 if (minHei > maxHei):
268 267 raise ValueError, "Error selecting heights: Height range (%d,%d) is not valid" % (minHei, maxHei)
269 268
270 269 if (minHei < self.dataOut.heightList[0]):
271 270 minHei = self.dataOut.heightList[0]
272 271
273 272 if (maxHei > self.dataOut.heightList[-1]):
274 273 maxHei = self.dataOut.heightList[-1]
275 # raise ValueError, "some value in (%d,%d) is not valid" % (minHei, maxHei)
276 274
277 275 minIndex = 0
278 276 maxIndex = 0
279 277 heights = self.dataOut.heightList
280 278
281 279 inda = numpy.where(heights >= minHei)
282 280 indb = numpy.where(heights <= maxHei)
283 281
284 282 try:
285 283 minIndex = inda[0][0]
286 284 except:
287 285 minIndex = 0
288 286
289 287 try:
290 288 maxIndex = indb[0][-1]
291 289 except:
292 290 maxIndex = len(heights)
293 291
294 292 self.selectHeightsByIndex(minIndex, maxIndex)
295 293
296 294 return 1
297 295
298 296 def getBeaconSignal(self, tauindex = 0, channelindex = 0, hei_ref=None):
299 297 newheis = numpy.where(self.dataOut.heightList>self.dataOut.radarControllerHeaderObj.Taus[tauindex])
300 298
301 299 if hei_ref != None:
302 300 newheis = numpy.where(self.dataOut.heightList>hei_ref)
303 301
304 302 minIndex = min(newheis[0])
305 303 maxIndex = max(newheis[0])
306 304 data_spc = self.dataOut.data_spc[:,:,minIndex:maxIndex+1]
307 305 heightList = self.dataOut.heightList[minIndex:maxIndex+1]
308 306
309 307 # determina indices
310 308 nheis = int(self.dataOut.radarControllerHeaderObj.txB/(self.dataOut.heightList[1]-self.dataOut.heightList[0]))
311 309 avg_dB = 10*numpy.log10(numpy.sum(data_spc[channelindex,:,:],axis=0))
312 310 beacon_dB = numpy.sort(avg_dB)[-nheis:]
313 311 beacon_heiIndexList = []
314 312 for val in avg_dB.tolist():
315 313 if val >= beacon_dB[0]:
316 314 beacon_heiIndexList.append(avg_dB.tolist().index(val))
317 315
318 316 #data_spc = data_spc[:,:,beacon_heiIndexList]
319 317 data_cspc = None
320 318 if self.dataOut.data_cspc is not None:
321 319 data_cspc = self.dataOut.data_cspc[:,:,minIndex:maxIndex+1]
322 320 #data_cspc = data_cspc[:,:,beacon_heiIndexList]
323 321
324 322 data_dc = None
325 323 if self.dataOut.data_dc is not None:
326 324 data_dc = self.dataOut.data_dc[:,minIndex:maxIndex+1]
327 325 #data_dc = data_dc[:,beacon_heiIndexList]
328 326
329 327 self.dataOut.data_spc = data_spc
330 328 self.dataOut.data_cspc = data_cspc
331 329 self.dataOut.data_dc = data_dc
332 330 self.dataOut.heightList = heightList
333 331 self.dataOut.beacon_heiIndexList = beacon_heiIndexList
334 332
335 333 return 1
336 334
337 335
338 336 def selectHeightsByIndex(self, minIndex, maxIndex):
339 337 """
340 338 Selecciona un bloque de datos en base a un grupo indices de alturas segun el rango
341 339 minIndex <= index <= maxIndex
342 340
343 341 Input:
344 342 minIndex : valor de indice minimo de altura a considerar
345 343 maxIndex : valor de indice maximo de altura a considerar
346 344
347 345 Affected:
348 346 self.dataOut.data_spc
349 347 self.dataOut.data_cspc
350 348 self.dataOut.data_dc
351 349 self.dataOut.heightList
352 350
353 351 Return:
354 352 1 si el metodo se ejecuto con exito caso contrario devuelve 0
355 353 """
356 354
357 355 if (minIndex < 0) or (minIndex > maxIndex):
358 356 raise ValueError, "Error selecting heights by index: Index range in (%d,%d) is not valid" % (minIndex, maxIndex)
359 357
360 358 if (maxIndex >= self.dataOut.nHeights):
361 359 maxIndex = self.dataOut.nHeights-1
362 # raise ValueError, "some value in (%d,%d) is not valid" % (minIndex, maxIndex)
363
364 # nHeights = maxIndex - minIndex + 1
365 360
366 361 #Spectra
367 362 data_spc = self.dataOut.data_spc[:,:,minIndex:maxIndex+1]
368 363
369 364 data_cspc = None
370 365 if self.dataOut.data_cspc is not None:
371 366 data_cspc = self.dataOut.data_cspc[:,:,minIndex:maxIndex+1]
372 367
373 368 data_dc = None
374 369 if self.dataOut.data_dc is not None:
375 370 data_dc = self.dataOut.data_dc[:,minIndex:maxIndex+1]
376 371
377 372 self.dataOut.data_spc = data_spc
378 373 self.dataOut.data_cspc = data_cspc
379 374 self.dataOut.data_dc = data_dc
380 375
381 376 self.dataOut.heightList = self.dataOut.heightList[minIndex:maxIndex+1]
382 377
383 378 return 1
384 379
385 380 def removeDC(self, mode = 2):
386 381 jspectra = self.dataOut.data_spc
387 382 jcspectra = self.dataOut.data_cspc
388 383
389 384
390 385 num_chan = jspectra.shape[0]
391 386 num_hei = jspectra.shape[2]
392 387
393 388 if jcspectra is not None:
394 389 jcspectraExist = True
395 390 num_pairs = jcspectra.shape[0]
396 391 else: jcspectraExist = False
397 392
398 393 freq_dc = jspectra.shape[1]/2
399 394 ind_vel = numpy.array([-2,-1,1,2]) + freq_dc
400 395
401 396 if ind_vel[0]<0:
402 397 ind_vel[range(0,1)] = ind_vel[range(0,1)] + self.num_prof
403 398
404 399 if mode == 1:
405 400 jspectra[:,freq_dc,:] = (jspectra[:,ind_vel[1],:] + jspectra[:,ind_vel[2],:])/2 #CORRECCION
406 401
407 402 if jcspectraExist:
408 403 jcspectra[:,freq_dc,:] = (jcspectra[:,ind_vel[1],:] + jcspectra[:,ind_vel[2],:])/2
409 404
410 405 if mode == 2:
411 406
412 407 vel = numpy.array([-2,-1,1,2])
413 408 xx = numpy.zeros([4,4])
414 409
415 410 for fil in range(4):
416 411 xx[fil,:] = vel[fil]**numpy.asarray(range(4))
417 412
418 413 xx_inv = numpy.linalg.inv(xx)
419 414 xx_aux = xx_inv[0,:]
420 415
421 416 for ich in range(num_chan):
422 417 yy = jspectra[ich,ind_vel,:]
423 418 jspectra[ich,freq_dc,:] = numpy.dot(xx_aux,yy)
424 419
425 420 junkid = jspectra[ich,freq_dc,:]<=0
426 421 cjunkid = sum(junkid)
427 422
428 423 if cjunkid.any():
429 424 jspectra[ich,freq_dc,junkid.nonzero()] = (jspectra[ich,ind_vel[1],junkid] + jspectra[ich,ind_vel[2],junkid])/2
430 425
431 426 if jcspectraExist:
432 427 for ip in range(num_pairs):
433 428 yy = jcspectra[ip,ind_vel,:]
434 429 jcspectra[ip,freq_dc,:] = numpy.dot(xx_aux,yy)
435 430
436 431
437 432 self.dataOut.data_spc = jspectra
438 433 self.dataOut.data_cspc = jcspectra
439 434
440 435 return 1
441 436
442 437 def removeInterference(self, interf = 2,hei_interf = None, nhei_interf = None, offhei_interf = None):
443 438
444 439 jspectra = self.dataOut.data_spc
445 440 jcspectra = self.dataOut.data_cspc
446 441 jnoise = self.dataOut.getNoise()
447 442 num_incoh = self.dataOut.nIncohInt
448 443
449 444 num_channel = jspectra.shape[0]
450 445 num_prof = jspectra.shape[1]
451 446 num_hei = jspectra.shape[2]
452 447
453 448 #hei_interf
454 449 if hei_interf is None:
455 450 count_hei = num_hei/2 #Como es entero no importa
456 451 hei_interf = numpy.asmatrix(range(count_hei)) + num_hei - count_hei
457 452 hei_interf = numpy.asarray(hei_interf)[0]
458 453 #nhei_interf
459 454 if (nhei_interf == None):
460 455 nhei_interf = 5
461 456 if (nhei_interf < 1):
462 457 nhei_interf = 1
463 458 if (nhei_interf > count_hei):
464 459 nhei_interf = count_hei
465 460 if (offhei_interf == None):
466 461 offhei_interf = 0
467 462
468 463 ind_hei = range(num_hei)
469 464 # mask_prof = numpy.asarray(range(num_prof - 2)) + 1
470 465 # mask_prof[range(num_prof/2 - 1,len(mask_prof))] += 1
471 466 mask_prof = numpy.asarray(range(num_prof))
472 467 num_mask_prof = mask_prof.size
473 468 comp_mask_prof = [0, num_prof/2]
474 469
475 470
476 471 #noise_exist: Determina si la variable jnoise ha sido definida y contiene la informacion del ruido de cada canal
477 472 if (jnoise.size < num_channel or numpy.isnan(jnoise).any()):
478 473 jnoise = numpy.nan
479 474 noise_exist = jnoise[0] < numpy.Inf
480 475
481 476 #Subrutina de Remocion de la Interferencia
482 477 for ich in range(num_channel):
483 478 #Se ordena los espectros segun su potencia (menor a mayor)
484 479 power = jspectra[ich,mask_prof,:]
485 480 power = power[:,hei_interf]
486 481 power = power.sum(axis = 0)
487 482 psort = power.ravel().argsort()
488 483
489 484 #Se estima la interferencia promedio en los Espectros de Potencia empleando
490 485 junkspc_interf = jspectra[ich,:,hei_interf[psort[range(offhei_interf, nhei_interf + offhei_interf)]]]
491 486
492 487 if noise_exist:
493 488 # tmp_noise = jnoise[ich] / num_prof
494 489 tmp_noise = jnoise[ich]
495 490 junkspc_interf = junkspc_interf - tmp_noise
496 491 #junkspc_interf[:,comp_mask_prof] = 0
497 492
498 493 jspc_interf = junkspc_interf.sum(axis = 0) / nhei_interf
499 494 jspc_interf = jspc_interf.transpose()
500 495 #Calculando el espectro de interferencia promedio
501 496 noiseid = numpy.where(jspc_interf <= tmp_noise/ math.sqrt(num_incoh))
502 497 noiseid = noiseid[0]
503 498 cnoiseid = noiseid.size
504 499 interfid = numpy.where(jspc_interf > tmp_noise/ math.sqrt(num_incoh))
505 500 interfid = interfid[0]
506 501 cinterfid = interfid.size
507 502
508 503 if (cnoiseid > 0): jspc_interf[noiseid] = 0
509 504
510 505 #Expandiendo los perfiles a limpiar
511 506 if (cinterfid > 0):
512 507 new_interfid = (numpy.r_[interfid - 1, interfid, interfid + 1] + num_prof)%num_prof
513 508 new_interfid = numpy.asarray(new_interfid)
514 509 new_interfid = {x for x in new_interfid}
515 510 new_interfid = numpy.array(list(new_interfid))
516 511 new_cinterfid = new_interfid.size
517 512 else: new_cinterfid = 0
518 513
519 514 for ip in range(new_cinterfid):
520 515 ind = junkspc_interf[:,new_interfid[ip]].ravel().argsort()
521 516 jspc_interf[new_interfid[ip]] = junkspc_interf[ind[nhei_interf/2],new_interfid[ip]]
522 517
523 518
524 519 jspectra[ich,:,ind_hei] = jspectra[ich,:,ind_hei] - jspc_interf #Corregir indices
525 520
526 521 #Removiendo la interferencia del punto de mayor interferencia
527 522 ListAux = jspc_interf[mask_prof].tolist()
528 523 maxid = ListAux.index(max(ListAux))
529 524
530 525
531 526 if cinterfid > 0:
532 527 for ip in range(cinterfid*(interf == 2) - 1):
533 528 ind = (jspectra[ich,interfid[ip],:] < tmp_noise*(1 + 1/math.sqrt(num_incoh))).nonzero()
534 529 cind = len(ind)
535 530
536 531 if (cind > 0):
537 532 jspectra[ich,interfid[ip],ind] = tmp_noise*(1 + (numpy.random.uniform(cind) - 0.5)/math.sqrt(num_incoh))
538 533
539 534 ind = numpy.array([-2,-1,1,2])
540 535 xx = numpy.zeros([4,4])
541 536
542 537 for id1 in range(4):
543 538 xx[:,id1] = ind[id1]**numpy.asarray(range(4))
544 539
545 540 xx_inv = numpy.linalg.inv(xx)
546 541 xx = xx_inv[:,0]
547 542 ind = (ind + maxid + num_mask_prof)%num_mask_prof
548 543 yy = jspectra[ich,mask_prof[ind],:]
549 544 jspectra[ich,mask_prof[maxid],:] = numpy.dot(yy.transpose(),xx)
550 545
551 546
552 547 indAux = (jspectra[ich,:,:] < tmp_noise*(1-1/math.sqrt(num_incoh))).nonzero()
553 548 jspectra[ich,indAux[0],indAux[1]] = tmp_noise * (1 - 1/math.sqrt(num_incoh))
554 549
555 550 #Remocion de Interferencia en el Cross Spectra
556 551 if jcspectra is None: return jspectra, jcspectra
557 552 num_pairs = jcspectra.size/(num_prof*num_hei)
558 553 jcspectra = jcspectra.reshape(num_pairs, num_prof, num_hei)
559 554
560 555 for ip in range(num_pairs):
561 556
562 557 #-------------------------------------------
563 558
564 559 cspower = numpy.abs(jcspectra[ip,mask_prof,:])
565 560 cspower = cspower[:,hei_interf]
566 561 cspower = cspower.sum(axis = 0)
567 562
568 563 cspsort = cspower.ravel().argsort()
569 564 junkcspc_interf = jcspectra[ip,:,hei_interf[cspsort[range(offhei_interf, nhei_interf + offhei_interf)]]]
570 565 junkcspc_interf = junkcspc_interf.transpose()
571 566 jcspc_interf = junkcspc_interf.sum(axis = 1)/nhei_interf
572 567
573 568 ind = numpy.abs(jcspc_interf[mask_prof]).ravel().argsort()
574 569
575 570 median_real = numpy.median(numpy.real(junkcspc_interf[mask_prof[ind[range(3*num_prof/4)]],:]))
576 571 median_imag = numpy.median(numpy.imag(junkcspc_interf[mask_prof[ind[range(3*num_prof/4)]],:]))
577 572 junkcspc_interf[comp_mask_prof,:] = numpy.complex(median_real, median_imag)
578 573
579 574 for iprof in range(num_prof):
580 575 ind = numpy.abs(junkcspc_interf[iprof,:]).ravel().argsort()
581 576 jcspc_interf[iprof] = junkcspc_interf[iprof, ind[nhei_interf/2]]
582 577
583 578 #Removiendo la Interferencia
584 579 jcspectra[ip,:,ind_hei] = jcspectra[ip,:,ind_hei] - jcspc_interf
585 580
586 581 ListAux = numpy.abs(jcspc_interf[mask_prof]).tolist()
587 582 maxid = ListAux.index(max(ListAux))
588 583
589 584 ind = numpy.array([-2,-1,1,2])
590 585 xx = numpy.zeros([4,4])
591 586
592 587 for id1 in range(4):
593 588 xx[:,id1] = ind[id1]**numpy.asarray(range(4))
594 589
595 590 xx_inv = numpy.linalg.inv(xx)
596 591 xx = xx_inv[:,0]
597 592
598 593 ind = (ind + maxid + num_mask_prof)%num_mask_prof
599 594 yy = jcspectra[ip,mask_prof[ind],:]
600 595 jcspectra[ip,mask_prof[maxid],:] = numpy.dot(yy.transpose(),xx)
601 596
602 597 #Guardar Resultados
603 598 self.dataOut.data_spc = jspectra
604 599 self.dataOut.data_cspc = jcspectra
605 600
606 601 return 1
607 602
608 603 def setRadarFrequency(self, frequency=None):
609 604
610 605 if frequency != None:
611 606 self.dataOut.frequency = frequency
612 607
613 608 return 1
614 609
615 610 def getNoise(self, minHei=None, maxHei=None, minVel=None, maxVel=None):
616 611 #validacion de rango
617 612 if minHei == None:
618 613 minHei = self.dataOut.heightList[0]
619 614
620 615 if maxHei == None:
621 616 maxHei = self.dataOut.heightList[-1]
622 617
623 618 if (minHei < self.dataOut.heightList[0]) or (minHei > maxHei):
624 619 print 'minHei: %.2f is out of the heights range'%(minHei)
625 620 print 'minHei is setting to %.2f'%(self.dataOut.heightList[0])
626 621 minHei = self.dataOut.heightList[0]
627 622
628 623 if (maxHei > self.dataOut.heightList[-1]) or (maxHei < minHei):
629 624 print 'maxHei: %.2f is out of the heights range'%(maxHei)
630 625 print 'maxHei is setting to %.2f'%(self.dataOut.heightList[-1])
631 626 maxHei = self.dataOut.heightList[-1]
632 627
633 628 # validacion de velocidades
634 629 velrange = self.dataOut.getVelRange(1)
635 630
636 631 if minVel == None:
637 632 minVel = velrange[0]
638 633
639 634 if maxVel == None:
640 635 maxVel = velrange[-1]
641 636
642 637 if (minVel < velrange[0]) or (minVel > maxVel):
643 638 print 'minVel: %.2f is out of the velocity range'%(minVel)
644 639 print 'minVel is setting to %.2f'%(velrange[0])
645 640 minVel = velrange[0]
646 641
647 642 if (maxVel > velrange[-1]) or (maxVel < minVel):
648 643 print 'maxVel: %.2f is out of the velocity range'%(maxVel)
649 644 print 'maxVel is setting to %.2f'%(velrange[-1])
650 645 maxVel = velrange[-1]
651 646
652 647 # seleccion de indices para rango
653 648 minIndex = 0
654 649 maxIndex = 0
655 650 heights = self.dataOut.heightList
656 651
657 652 inda = numpy.where(heights >= minHei)
658 653 indb = numpy.where(heights <= maxHei)
659 654
660 655 try:
661 656 minIndex = inda[0][0]
662 657 except:
663 658 minIndex = 0
664 659
665 660 try:
666 661 maxIndex = indb[0][-1]
667 662 except:
668 663 maxIndex = len(heights)
669 664
670 665 if (minIndex < 0) or (minIndex > maxIndex):
671 666 raise ValueError, "some value in (%d,%d) is not valid" % (minIndex, maxIndex)
672 667
673 668 if (maxIndex >= self.dataOut.nHeights):
674 669 maxIndex = self.dataOut.nHeights-1
675 670
676 671 # seleccion de indices para velocidades
677 672 indminvel = numpy.where(velrange >= minVel)
678 673 indmaxvel = numpy.where(velrange <= maxVel)
679 674 try:
680 675 minIndexVel = indminvel[0][0]
681 676 except:
682 677 minIndexVel = 0
683 678
684 679 try:
685 680 maxIndexVel = indmaxvel[0][-1]
686 681 except:
687 682 maxIndexVel = len(velrange)
688 683
689 684 #seleccion del espectro
690 685 data_spc = self.dataOut.data_spc[:,minIndexVel:maxIndexVel+1,minIndex:maxIndex+1]
691 686 #estimacion de ruido
692 687 noise = numpy.zeros(self.dataOut.nChannels)
693 688
694 689 for channel in range(self.dataOut.nChannels):
695 690 daux = data_spc[channel,:,:]
696 691 noise[channel] = hildebrand_sekhon(daux, self.dataOut.nIncohInt)
697 692
698 693 self.dataOut.noise_estimation = noise.copy()
699 694
700 695 return 1
701 696
702 697 class IncohInt(Operation):
703 698
704 699
705 700 __profIndex = 0
706 701 __withOverapping = False
707 702
708 703 __byTime = False
709 704 __initime = None
710 705 __lastdatatime = None
711 706 __integrationtime = None
712 707
713 708 __buffer_spc = None
714 709 __buffer_cspc = None
715 710 __buffer_dc = None
716 711
717 712 __dataReady = False
718 713
719 714 __timeInterval = None
720 715
721 716 n = None
722 717
723 718
724 719
725 720 def __init__(self):
726 721
727 722 Operation.__init__(self)
728 723 # self.isConfig = False
729 724
730 725 def setup(self, n=None, timeInterval=None, overlapping=False):
731 726 """
732 727 Set the parameters of the integration class.
733 728
734 729 Inputs:
735 730
736 731 n : Number of coherent integrations
737 732 timeInterval : Time of integration. If the parameter "n" is selected this one does not work
738 733 overlapping :
739 734
740 735 """
741 736
742 737 self.__initime = None
743 738 self.__lastdatatime = 0
744 739
745 740 self.__buffer_spc = 0
746 741 self.__buffer_cspc = 0
747 742 self.__buffer_dc = 0
748 743
749 744 self.__profIndex = 0
750 745 self.__dataReady = False
751 746 self.__byTime = False
752 747
753 748 if n is None and timeInterval is None:
754 749 raise ValueError, "n or timeInterval should be specified ..."
755 750
756 751 if n is not None:
757 752 self.n = int(n)
758 753 else:
759 754 self.__integrationtime = int(timeInterval) #if (type(timeInterval)!=integer) -> change this line
760 755 self.n = None
761 756 self.__byTime = True
762 757
763 758 def putData(self, data_spc, data_cspc, data_dc):
764 759
765 760 """
766 761 Add a profile to the __buffer_spc and increase in one the __profileIndex
767 762
768 763 """
769 764
770 765 self.__buffer_spc += data_spc
771 766
772 767 if data_cspc is None:
773 768 self.__buffer_cspc = None
774 769 else:
775 770 self.__buffer_cspc += data_cspc
776 771
777 772 if data_dc is None:
778 773 self.__buffer_dc = None
779 774 else:
780 775 self.__buffer_dc += data_dc
781 776
782 777 self.__profIndex += 1
783 778
784 779 return
785 780
786 781 def pushData(self):
787 782 """
788 783 Return the sum of the last profiles and the profiles used in the sum.
789 784
790 785 Affected:
791 786
792 787 self.__profileIndex
793 788
794 789 """
795 790
796 791 data_spc = self.__buffer_spc
797 792 data_cspc = self.__buffer_cspc
798 793 data_dc = self.__buffer_dc
799 794 n = self.__profIndex
800 795
801 796 self.__buffer_spc = 0
802 797 self.__buffer_cspc = 0
803 798 self.__buffer_dc = 0
804 799 self.__profIndex = 0
805 800
806 801 return data_spc, data_cspc, data_dc, n
807 802
808 803 def byProfiles(self, *args):
809 804
810 805 self.__dataReady = False
811 806 avgdata_spc = None
812 807 avgdata_cspc = None
813 808 avgdata_dc = None
814 809
815 810 self.putData(*args)
816 811
817 812 if self.__profIndex == self.n:
818 813
819 814 avgdata_spc, avgdata_cspc, avgdata_dc, n = self.pushData()
820 815 self.n = n
821 816 self.__dataReady = True
822 817
823 818 return avgdata_spc, avgdata_cspc, avgdata_dc
824 819
825 820 def byTime(self, datatime, *args):
826 821
827 822 self.__dataReady = False
828 823 avgdata_spc = None
829 824 avgdata_cspc = None
830 825 avgdata_dc = None
831 826
832 827 self.putData(*args)
833 828
834 829 if (datatime - self.__initime) >= self.__integrationtime:
835 830 avgdata_spc, avgdata_cspc, avgdata_dc, n = self.pushData()
836 831 self.n = n
837 832 self.__dataReady = True
838 833
839 834 return avgdata_spc, avgdata_cspc, avgdata_dc
840 835
841 836 def integrate(self, datatime, *args):
842 837
843 838 if self.__profIndex == 0:
844 839 self.__initime = datatime
845 840
846 841 if self.__byTime:
847 842 avgdata_spc, avgdata_cspc, avgdata_dc = self.byTime(datatime, *args)
848 843 else:
849 844 avgdata_spc, avgdata_cspc, avgdata_dc = self.byProfiles(*args)
850 845
851 846 if not self.__dataReady:
852 847 return None, None, None, None
853 848
854 849 return self.__initime, avgdata_spc, avgdata_cspc, avgdata_dc
855 850
856 851 def run(self, dataOut, n=None, timeInterval=None, overlapping=False):
857 852
858 853 if n==1:
859 854 return
860 855
861 856 dataOut.flagNoData = True
862 857
863 858 if not self.isConfig:
864 859 self.setup(n, timeInterval, overlapping)
865 860 self.isConfig = True
866 861
867 862 avgdatatime, avgdata_spc, avgdata_cspc, avgdata_dc = self.integrate(dataOut.utctime,
868 863 dataOut.data_spc,
869 864 dataOut.data_cspc,
870 865 dataOut.data_dc)
871 866
872 867 if self.__dataReady:
873 868
874 869 dataOut.data_spc = avgdata_spc
875 870 dataOut.data_cspc = avgdata_cspc
876 871 dataOut.data_dc = avgdata_dc
877 872
878 873 dataOut.nIncohInt *= self.n
879 874 dataOut.utctime = avgdatatime
880 875 dataOut.flagNoData = False
@@ -1,1091 +1,1071
1 1 import numpy
2 2
3 3 from jroproc_base import ProcessingUnit, Operation
4 4 from schainpy.model.data.jrodata import Voltage
5 5
6 6 class VoltageProc(ProcessingUnit):
7 7
8 8
9 9 def __init__(self):
10 10
11 11 ProcessingUnit.__init__(self)
12 12
13 13 # self.objectDict = {}
14 14 self.dataOut = Voltage()
15 15 self.flip = 1
16 16
17 17 def run(self):
18 18 if self.dataIn.type == 'AMISR':
19 19 self.__updateObjFromAmisrInput()
20 20
21 21 if self.dataIn.type == 'Voltage':
22 22 self.dataOut.copy(self.dataIn)
23 23
24 24 # self.dataOut.copy(self.dataIn)
25 25
26 26 def __updateObjFromAmisrInput(self):
27 27
28 28 self.dataOut.timeZone = self.dataIn.timeZone
29 29 self.dataOut.dstFlag = self.dataIn.dstFlag
30 30 self.dataOut.errorCount = self.dataIn.errorCount
31 31 self.dataOut.useLocalTime = self.dataIn.useLocalTime
32 32
33 33 self.dataOut.flagNoData = self.dataIn.flagNoData
34 34 self.dataOut.data = self.dataIn.data
35 35 self.dataOut.utctime = self.dataIn.utctime
36 36 self.dataOut.channelList = self.dataIn.channelList
37 37 # self.dataOut.timeInterval = self.dataIn.timeInterval
38 38 self.dataOut.heightList = self.dataIn.heightList
39 39 self.dataOut.nProfiles = self.dataIn.nProfiles
40 40
41 41 self.dataOut.nCohInt = self.dataIn.nCohInt
42 42 self.dataOut.ippSeconds = self.dataIn.ippSeconds
43 43 self.dataOut.frequency = self.dataIn.frequency
44 44
45 45 self.dataOut.azimuth = self.dataIn.azimuth
46 46 self.dataOut.zenith = self.dataIn.zenith
47 47
48 48 self.dataOut.beam.codeList = self.dataIn.beam.codeList
49 49 self.dataOut.beam.azimuthList = self.dataIn.beam.azimuthList
50 50 self.dataOut.beam.zenithList = self.dataIn.beam.zenithList
51 51 #
52 52 # pass#
53 53 #
54 54 # def init(self):
55 55 #
56 56 #
57 57 # if self.dataIn.type == 'AMISR':
58 58 # self.__updateObjFromAmisrInput()
59 59 #
60 60 # if self.dataIn.type == 'Voltage':
61 61 # self.dataOut.copy(self.dataIn)
62 62 # # No necesita copiar en cada init() los atributos de dataIn
63 63 # # la copia deberia hacerse por cada nuevo bloque de datos
64 64
65 65 def selectChannels(self, channelList):
66 66
67 67 channelIndexList = []
68 68
69 69 for channel in channelList:
70 70 if channel not in self.dataOut.channelList:
71 71 raise ValueError, "Channel %d is not in %s" %(channel, str(self.dataOut.channelList))
72 72
73 73 index = self.dataOut.channelList.index(channel)
74 74 channelIndexList.append(index)
75 75
76 76 self.selectChannelsByIndex(channelIndexList)
77 77
78 78 def selectChannelsByIndex(self, channelIndexList):
79 79 """
80 80 Selecciona un bloque de datos en base a canales segun el channelIndexList
81 81
82 82 Input:
83 83 channelIndexList : lista sencilla de canales a seleccionar por ej. [2,3,7]
84 84
85 85 Affected:
86 86 self.dataOut.data
87 87 self.dataOut.channelIndexList
88 88 self.dataOut.nChannels
89 89 self.dataOut.m_ProcessingHeader.totalSpectra
90 90 self.dataOut.systemHeaderObj.numChannels
91 91 self.dataOut.m_ProcessingHeader.blockSize
92 92
93 93 Return:
94 94 None
95 95 """
96 96
97 97 for channelIndex in channelIndexList:
98 98 if channelIndex not in self.dataOut.channelIndexList:
99 99 print channelIndexList
100 100 raise ValueError, "The value %d in channelIndexList is not valid" %channelIndex
101 101
102 # nChannels = len(channelIndexList)
103 102 if self.dataOut.flagDataAsBlock:
104 103 """
105 104 Si la data es obtenida por bloques, dimension = [nChannels, nProfiles, nHeis]
106 105 """
107 106 data = self.dataOut.data[channelIndexList,:,:]
108 107 else:
109 108 data = self.dataOut.data[channelIndexList,:]
110 109
111 110 self.dataOut.data = data
112 111 self.dataOut.channelList = [self.dataOut.channelList[i] for i in channelIndexList]
113 112 # self.dataOut.nChannels = nChannels
114 113
115 114 return 1
116 115
117 116 def selectHeights(self, minHei=None, maxHei=None):
118 117 """
119 118 Selecciona un bloque de datos en base a un grupo de valores de alturas segun el rango
120 119 minHei <= height <= maxHei
121 120
122 121 Input:
123 122 minHei : valor minimo de altura a considerar
124 123 maxHei : valor maximo de altura a considerar
125 124
126 125 Affected:
127 126 Indirectamente son cambiados varios valores a travez del metodo selectHeightsByIndex
128 127
129 128 Return:
130 129 1 si el metodo se ejecuto con exito caso contrario devuelve 0
131 130 """
132 131
133 132 if minHei == None:
134 133 minHei = self.dataOut.heightList[0]
135 134
136 135 if maxHei == None:
137 136 maxHei = self.dataOut.heightList[-1]
138 137
139 138 if (minHei < self.dataOut.heightList[0]):
140 139 minHei = self.dataOut.heightList[0]
141 # raise ValueError, "height range [%d,%d] is not valid. Data height range is [%d, %d]" % (minHei,
142 # maxHei,
143 # self.dataOut.heightList[0],
144 # self.dataOut.heightList[-1])
145 140
146 141 if (maxHei > self.dataOut.heightList[-1]):
147 142 maxHei = self.dataOut.heightList[-1]
148 # raise ValueError, "some value in (%d,%d) is not valid" % (minHei, maxHei)
149 143
150 144 minIndex = 0
151 145 maxIndex = 0
152 146 heights = self.dataOut.heightList
153 147
154 148 inda = numpy.where(heights >= minHei)
155 149 indb = numpy.where(heights <= maxHei)
156 150
157 151 try:
158 152 minIndex = inda[0][0]
159 153 except:
160 154 minIndex = 0
161 155
162 156 try:
163 157 maxIndex = indb[0][-1]
164 158 except:
165 159 maxIndex = len(heights)
166 160
167 161 self.selectHeightsByIndex(minIndex, maxIndex)
168 162
169 163 return 1
170 164
171 165
172 166 def selectHeightsByIndex(self, minIndex, maxIndex):
173 167 """
174 168 Selecciona un bloque de datos en base a un grupo indices de alturas segun el rango
175 169 minIndex <= index <= maxIndex
176 170
177 171 Input:
178 172 minIndex : valor de indice minimo de altura a considerar
179 173 maxIndex : valor de indice maximo de altura a considerar
180 174
181 175 Affected:
182 176 self.dataOut.data
183 177 self.dataOut.heightList
184 178
185 179 Return:
186 180 1 si el metodo se ejecuto con exito caso contrario devuelve 0
187 181 """
188 182
189 183 if (minIndex < 0) or (minIndex > maxIndex):
190 raise ValueError, "some value in (%d,%d) is not valid" % (minIndex, maxIndex)
184 raise ValueError, "Height index range (%d,%d) is not valid" % (minIndex, maxIndex)
191 185
192 186 if (maxIndex >= self.dataOut.nHeights):
193 187 maxIndex = self.dataOut.nHeights
194 # raise ValueError, "some value in (%d,%d) is not valid" % (minIndex, maxIndex)
195
196 # nHeights = maxIndex - minIndex + 1
197 188
198 189 #voltage
199 190 if self.dataOut.flagDataAsBlock:
200 191 """
201 192 Si la data es obtenida por bloques, dimension = [nChannels, nProfiles, nHeis]
202 193 """
203 194 data = self.dataOut.data[:,:, minIndex:maxIndex]
204 195 else:
205 196 data = self.dataOut.data[:, minIndex:maxIndex]
206 197
207 198 # firstHeight = self.dataOut.heightList[minIndex]
208 199
209 200 self.dataOut.data = data
210 201 self.dataOut.heightList = self.dataOut.heightList[minIndex:maxIndex]
211 202
212 203 if self.dataOut.nHeights <= 1:
213 204 raise ValueError, "selectHeights: Too few heights. Current number of heights is %d" %(self.dataOut.nHeights)
214 205
215 206 return 1
216 207
217 208
218 209 def filterByHeights(self, window):
219 210
220 211 deltaHeight = self.dataOut.heightList[1] - self.dataOut.heightList[0]
221 212
222 213 if window == None:
223 214 window = (self.dataOut.radarControllerHeaderObj.txA/self.dataOut.radarControllerHeaderObj.nBaud) / deltaHeight
224 215
225 216 newdelta = deltaHeight * window
226 217 r = self.dataOut.nHeights % window
227 218 newheights = (self.dataOut.nHeights-r)/window
228 219
229 220 if newheights <= 1:
230 221 raise ValueError, "filterByHeights: Too few heights. Current number of heights is %d and window is %d" %(self.dataOut.nHeights, window)
231 222
232 223 if self.dataOut.flagDataAsBlock:
233 224 """
234 225 Si la data es obtenida por bloques, dimension = [nChannels, nProfiles, nHeis]
235 226 """
236 227 buffer = self.dataOut.data[:, :, 0:self.dataOut.nHeights-r]
237 228 buffer = buffer.reshape(self.dataOut.nChannels,self.dataOut.nProfiles,self.dataOut.nHeights/window,window)
238 229 buffer = numpy.sum(buffer,3)
239 230
240 231 else:
241 232 buffer = self.dataOut.data[:,0:self.dataOut.nHeights-r]
242 233 buffer = buffer.reshape(self.dataOut.nChannels,self.dataOut.nHeights/window,window)
243 234 buffer = numpy.sum(buffer,2)
244 235
245 236 self.dataOut.data = buffer
246 237 self.dataOut.heightList = self.dataOut.heightList[0] + numpy.arange( newheights )*newdelta
247 238 self.dataOut.windowOfFilter = window
248 239
249 240 def setH0(self, h0, deltaHeight = None):
250 241
251 242 if not deltaHeight:
252 243 deltaHeight = self.dataOut.heightList[1] - self.dataOut.heightList[0]
253 244
254 245 nHeights = self.dataOut.nHeights
255 246
256 247 newHeiRange = h0 + numpy.arange(nHeights)*deltaHeight
257 248
258 249 self.dataOut.heightList = newHeiRange
259 250
260 251 def deFlip(self, channelList = []):
261 252
262 253 data = self.dataOut.data.copy()
263 254
264 255 if self.dataOut.flagDataAsBlock:
265 256 flip = self.flip
266 257 profileList = range(self.dataOut.nProfiles)
267 258
268 259 if not channelList:
269 260 for thisProfile in profileList:
270 261 data[:,thisProfile,:] = data[:,thisProfile,:]*flip
271 262 flip *= -1.0
272 263 else:
273 264 for thisChannel in channelList:
274 265 if thisChannel not in self.dataOut.channelList:
275 266 continue
276 267
277 268 for thisProfile in profileList:
278 269 data[thisChannel,thisProfile,:] = data[thisChannel,thisProfile,:]*flip
279 270 flip *= -1.0
280 271
281 272 self.flip = flip
282 273
283 274 else:
284 275 if not channelList:
285 276 data[:,:] = data[:,:]*self.flip
286 277 else:
287 278 for thisChannel in channelList:
288 279 if thisChannel not in self.dataOut.channelList:
289 280 continue
290 281
291 282 data[thisChannel,:] = data[thisChannel,:]*self.flip
292 283
293 284 self.flip *= -1.
294 285
295 286 self.dataOut.data = data
296 287
297 288 def setRadarFrequency(self, frequency=None):
298 289
299 290 if frequency != None:
300 291 self.dataOut.frequency = frequency
301 292
302 293 return 1
303 294
304 295 class CohInt(Operation):
305 296
306 297 isConfig = False
307 298
308 299 __profIndex = 0
309 300 __withOverapping = False
310 301
311 302 __byTime = False
312 303 __initime = None
313 304 __lastdatatime = None
314 305 __integrationtime = None
315 306
316 307 __buffer = None
317 308
318 309 __dataReady = False
319 310
320 311 n = None
321 312
322 313
323 314 def __init__(self):
324 315
325 316 Operation.__init__(self)
326 317
327 318 # self.isConfig = False
328 319
329 320 def setup(self, n=None, timeInterval=None, overlapping=False, byblock=False):
330 321 """
331 322 Set the parameters of the integration class.
332 323
333 324 Inputs:
334 325
335 326 n : Number of coherent integrations
336 327 timeInterval : Time of integration. If the parameter "n" is selected this one does not work
337 328 overlapping :
338 329
339 330 """
340 331
341 332 self.__initime = None
342 333 self.__lastdatatime = 0
343 334 self.__buffer = None
344 335 self.__dataReady = False
345 336 self.byblock = byblock
346 337
347 338 if n == None and timeInterval == None:
348 339 raise ValueError, "n or timeInterval should be specified ..."
349 340
350 341 if n != None:
351 342 self.n = n
352 343 self.__byTime = False
353 344 else:
354 345 self.__integrationtime = timeInterval #* 60. #if (type(timeInterval)!=integer) -> change this line
355 346 self.n = 9999
356 347 self.__byTime = True
357 348
358 349 if overlapping:
359 350 self.__withOverapping = True
360 351 self.__buffer = None
361 352 else:
362 353 self.__withOverapping = False
363 354 self.__buffer = 0
364 355
365 356 self.__profIndex = 0
366 357
367 358 def putData(self, data):
368 359
369 360 """
370 361 Add a profile to the __buffer and increase in one the __profileIndex
371 362
372 363 """
373 364
374 365 if not self.__withOverapping:
375 366 self.__buffer += data.copy()
376 367 self.__profIndex += 1
377 368 return
378 369
379 370 #Overlapping data
380 371 nChannels, nHeis = data.shape
381 372 data = numpy.reshape(data, (1, nChannels, nHeis))
382 373
383 374 #If the buffer is empty then it takes the data value
384 375 if self.__buffer is None:
385 376 self.__buffer = data
386 377 self.__profIndex += 1
387 378 return
388 379
389 380 #If the buffer length is lower than n then stakcing the data value
390 381 if self.__profIndex < self.n:
391 382 self.__buffer = numpy.vstack((self.__buffer, data))
392 383 self.__profIndex += 1
393 384 return
394 385
395 386 #If the buffer length is equal to n then replacing the last buffer value with the data value
396 387 self.__buffer = numpy.roll(self.__buffer, -1, axis=0)
397 388 self.__buffer[self.n-1] = data
398 389 self.__profIndex = self.n
399 390 return
400 391
401 392
402 393 def pushData(self):
403 394 """
404 395 Return the sum of the last profiles and the profiles used in the sum.
405 396
406 397 Affected:
407 398
408 399 self.__profileIndex
409 400
410 401 """
411 402
412 403 if not self.__withOverapping:
413 404 data = self.__buffer
414 405 n = self.__profIndex
415 406
416 407 self.__buffer = 0
417 408 self.__profIndex = 0
418 409
419 410 return data, n
420 411
421 412 #Integration with Overlapping
422 413 data = numpy.sum(self.__buffer, axis=0)
423 414 n = self.__profIndex
424 415
425 416 return data, n
426 417
427 418 def byProfiles(self, data):
428 419
429 420 self.__dataReady = False
430 421 avgdata = None
431 422 # n = None
432 423
433 424 self.putData(data)
434 425
435 426 if self.__profIndex == self.n:
436 427
437 428 avgdata, n = self.pushData()
438 429 self.__dataReady = True
439 430
440 431 return avgdata
441 432
442 433 def byTime(self, data, datatime):
443 434
444 435 self.__dataReady = False
445 436 avgdata = None
446 437 n = None
447 438
448 439 self.putData(data)
449 440
450 441 if (datatime - self.__initime) >= self.__integrationtime:
451 442 avgdata, n = self.pushData()
452 443 self.n = n
453 444 self.__dataReady = True
454 445
455 446 return avgdata
456 447
457 448 def integrate(self, data, datatime=None):
458 449
459 450 if self.__initime == None:
460 451 self.__initime = datatime
461 452
462 453 if self.__byTime:
463 454 avgdata = self.byTime(data, datatime)
464 455 else:
465 456 avgdata = self.byProfiles(data)
466 457
467 458
468 459 self.__lastdatatime = datatime
469 460
470 461 if avgdata is None:
471 462 return None, None
472 463
473 464 avgdatatime = self.__initime
474 465
475 466 deltatime = datatime -self.__lastdatatime
476 467
477 468 if not self.__withOverapping:
478 469 self.__initime = datatime
479 470 else:
480 471 self.__initime += deltatime
481 472
482 473 return avgdata, avgdatatime
483 474
484 475 def integrateByBlock(self, dataOut):
485 476
486 477 times = int(dataOut.data.shape[1]/self.n)
487 478 avgdata = numpy.zeros((dataOut.nChannels, times, dataOut.nHeights), dtype=numpy.complex)
488 479
489 480 id_min = 0
490 481 id_max = self.n
491 482
492 483 for i in range(times):
493 484 junk = dataOut.data[:,id_min:id_max,:]
494 485 avgdata[:,i,:] = junk.sum(axis=1)
495 486 id_min += self.n
496 487 id_max += self.n
497 488
498 489 timeInterval = dataOut.ippSeconds*self.n
499 490 avgdatatime = (times - 1) * timeInterval + dataOut.utctime
500 491 self.__dataReady = True
501 492 return avgdata, avgdatatime
502 493
503 494 def run(self, dataOut, **kwargs):
504 495
505 496 if not self.isConfig:
506 497 self.setup(**kwargs)
507 498 self.isConfig = True
508 499
509 500 if dataOut.flagDataAsBlock:
510 501 """
511 502 Si la data es leida por bloques, dimension = [nChannels, nProfiles, nHeis]
512 503 """
513 504 avgdata, avgdatatime = self.integrateByBlock(dataOut)
514 505 else:
515 506 avgdata, avgdatatime = self.integrate(dataOut.data, dataOut.utctime)
516 507
517 508 # dataOut.timeInterval *= n
518 509 dataOut.flagNoData = True
519 510
520 511 if self.__dataReady:
521 512 dataOut.data = avgdata
522 513 dataOut.nCohInt *= self.n
523 514 dataOut.utctime = avgdatatime
524 515 # dataOut.timeInterval = dataOut.ippSeconds * dataOut.nCohInt
525 516 dataOut.flagNoData = False
526 517
527 518 class Decoder(Operation):
528 519
529 520 isConfig = False
530 521 __profIndex = 0
531 522
532 523 code = None
533 524
534 525 nCode = None
535 526 nBaud = None
536 527
537 528
538 529 def __init__(self):
539 530
540 531 Operation.__init__(self)
541 532
542 533 self.times = None
543 534 self.osamp = None
544 535 # self.__setValues = False
545 536 self.isConfig = False
546 537
547 538 def setup(self, code, osamp, dataOut):
548 539
549 540 self.__profIndex = 0
550 541
551 542 self.code = code
552 543
553 544 self.nCode = len(code)
554 545 self.nBaud = len(code[0])
555 546
556 547 if (osamp != None) and (osamp >1):
557 548 self.osamp = osamp
558 549 self.code = numpy.repeat(code, repeats=self.osamp, axis=1)
559 550 self.nBaud = self.nBaud*self.osamp
560 551
561 552 self.__nChannels = dataOut.nChannels
562 553 self.__nProfiles = dataOut.nProfiles
563 554 self.__nHeis = dataOut.nHeights
564 555
565 556 if self.__nHeis < self.nBaud:
566 print 'IOError: Number of heights (%d) should be greater than number of bauds (%d)' %(self.__nHeis, self.nBaud)
567 raise IOError, 'Number of heights (%d) should be greater than number of bauds (%d)' %(self.__nHeis, self.nBaud)
557 raise ValueError, 'Number of heights (%d) should be greater than number of bauds (%d)' %(self.__nHeis, self.nBaud)
568 558
569 559 #Frequency
570 560 __codeBuffer = numpy.zeros((self.nCode, self.__nHeis), dtype=numpy.complex)
571 561
572 562 __codeBuffer[:,0:self.nBaud] = self.code
573 563
574 564 self.fft_code = numpy.conj(numpy.fft.fft(__codeBuffer, axis=1))
575 565
576 566 if dataOut.flagDataAsBlock:
577 567
578 568 self.ndatadec = self.__nHeis #- self.nBaud + 1
579 569
580 570 self.datadecTime = numpy.zeros((self.__nChannels, self.__nProfiles, self.ndatadec), dtype=numpy.complex)
581 571
582 572 else:
583 573
584 574 #Time
585 575 self.ndatadec = self.__nHeis #- self.nBaud + 1
586 576
587 577 self.datadecTime = numpy.zeros((self.__nChannels, self.ndatadec), dtype=numpy.complex)
588 578
589 579 def __convolutionInFreq(self, data):
590 580
591 581 fft_code = self.fft_code[self.__profIndex].reshape(1,-1)
592 582
593 583 fft_data = numpy.fft.fft(data, axis=1)
594 584
595 585 conv = fft_data*fft_code
596 586
597 587 data = numpy.fft.ifft(conv,axis=1)
598 588
599 589 return data
600 590
601 591 def __convolutionInFreqOpt(self, data):
602 592
603 593 raise NotImplementedError
604 594
605 # fft_code = self.fft_code[self.__profIndex].reshape(1,-1)
606 #
607 # data = cfunctions.decoder(fft_code, data)
608 #
609 # datadec = data#[:,:]
610 #
611 # return datadec
612
613 595 def __convolutionInTime(self, data):
614 596
615 597 code = self.code[self.__profIndex]
616 598
617 599 for i in range(self.__nChannels):
618 600 self.datadecTime[i,:] = numpy.correlate(data[i,:], code, mode='full')[self.nBaud-1:]
619 601
620 602 return self.datadecTime
621 603
622 604 def __convolutionByBlockInTime(self, data):
623 605
624 606 repetitions = self.__nProfiles / self.nCode
625 607
626 608 junk = numpy.lib.stride_tricks.as_strided(self.code, (repetitions, self.code.size), (0, self.code.itemsize))
627 609 junk = junk.flatten()
628 610 code_block = numpy.reshape(junk, (self.nCode*repetitions, self.nBaud))
629 611
630 612 for i in range(self.__nChannels):
631 613 for j in range(self.__nProfiles):
632 614 self.datadecTime[i,j,:] = numpy.correlate(data[i,j,:], code_block[j,:], mode='full')[self.nBaud-1:]
633 615
634 616 return self.datadecTime
635 617
636 618 def __convolutionByBlockInFreq(self, data):
637 619
638 620 fft_code = self.fft_code[self.__profIndex].reshape(1,-1)
639 621
640 622 fft_data = numpy.fft.fft(data, axis=2)
641 623
642 624 conv = fft_data*fft_code
643 625
644 626 data = numpy.fft.ifft(conv,axis=2)
645 627
646 628 return data
647 629
648 630 def run(self, dataOut, code=None, nCode=None, nBaud=None, mode = 0, osamp=None, times=None):
649 631
650 632 if dataOut.flagDecodeData:
651 633 print "This data is already decoded, recoding again ..."
652 634
653 635 if not self.isConfig:
654 636
655 637 if code is None:
656 638 if dataOut.code is None:
657 print "Code is not defined"
658 raise ValueError, "Code could not be read from %s object. Enter a value in Code parameter" %dataOut.type
639 raise ValueError, "Code could not be read from %s instance. Enter a value in Code parameter" %dataOut.type
659 640
660 641 code = dataOut.code
661 642 else:
662 643 code = numpy.array(code).reshape(nCode,nBaud)
663 644
664 645 self.setup(code, osamp, dataOut)
665 646
666 647 self.isConfig = True
667 648
668 649 if self.code is None:
669 650 print "Fail decoding: Code is not defined."
670 651 return
671 652
672 653 datadec = None
673 654
674 655 if dataOut.flagDataAsBlock:
675 656 """
676 657 Decoding when data have been read as block,
677 658 """
678 659 if mode == 0:
679 660 datadec = self.__convolutionByBlockInTime(dataOut.data)
680 661 if mode == 1:
681 662 datadec = self.__convolutionByBlockInFreq(dataOut.data)
682 663 else:
683 664 """
684 665 Decoding when data have been read profile by profile
685 666 """
686 667 if mode == 0:
687 668 datadec = self.__convolutionInTime(dataOut.data)
688 669
689 670 if mode == 1:
690 671 datadec = self.__convolutionInFreq(dataOut.data)
691 672
692 673 if mode == 2:
693 674 datadec = self.__convolutionInFreqOpt(dataOut.data)
694 675
695 676 if datadec is None:
696 677 raise ValueError, "Codification mode selected is not valid: mode=%d. Try selecting 0 or 1" %mode
697 678
698 679 dataOut.code = self.code
699 680 dataOut.nCode = self.nCode
700 681 dataOut.nBaud = self.nBaud
701 682
702 683 dataOut.data = datadec
703 684
704 685 dataOut.heightList = dataOut.heightList[0:datadec.shape[-1]]
705 686
706 687 dataOut.flagDecodeData = True #asumo q la data esta decodificada
707 688
708 689 if self.__profIndex == self.nCode-1:
709 690 self.__profIndex = 0
710 691 return 1
711 692
712 693 self.__profIndex += 1
713 694
714 695 return 1
715 696 # dataOut.flagDeflipData = True #asumo q la data no esta sin flip
716 697
717 698
718 699 class ProfileConcat(Operation):
719 700
720 701 isConfig = False
721 702 buffer = None
722 703
723 704 def __init__(self):
724 705
725 706 Operation.__init__(self)
726 707 self.profileIndex = 0
727 708
728 709 def reset(self):
729 710 self.buffer = numpy.zeros_like(self.buffer)
730 711 self.start_index = 0
731 712 self.times = 1
732 713
733 714 def setup(self, data, m, n=1):
734 715 self.buffer = numpy.zeros((data.shape[0],data.shape[1]*m),dtype=type(data[0,0]))
735 716 self.nHeights = data.nHeights
736 717 self.start_index = 0
737 718 self.times = 1
738 719
739 720 def concat(self, data):
740 721
741 722 self.buffer[:,self.start_index:self.profiles*self.times] = data.copy()
742 723 self.start_index = self.start_index + self.nHeights
743 724
744 725 def run(self, dataOut, m):
745 726
746 727 dataOut.flagNoData = True
747 728
748 729 if not self.isConfig:
749 730 self.setup(dataOut.data, m, 1)
750 731 self.isConfig = True
751 732
752 733 if dataOut.flagDataAsBlock:
753
754 734 raise ValueError, "ProfileConcat can only be used when voltage have been read profile by profile, getBlock = False"
755 735
756 736 else:
757 737 self.concat(dataOut.data)
758 738 self.times += 1
759 739 if self.times > m:
760 740 dataOut.data = self.buffer
761 741 self.reset()
762 742 dataOut.flagNoData = False
763 743 # se deben actualizar mas propiedades del header y del objeto dataOut, por ejemplo, las alturas
764 744 deltaHeight = dataOut.heightList[1] - dataOut.heightList[0]
765 745 xf = dataOut.heightList[0] + dataOut.nHeights * deltaHeight * m
766 746 dataOut.heightList = numpy.arange(dataOut.heightList[0], xf, deltaHeight)
767 747 dataOut.ippSeconds *= m
768 748
769 749 class ProfileSelector(Operation):
770 750
771 751 profileIndex = None
772 752 # Tamanho total de los perfiles
773 753 nProfiles = None
774 754
775 755 def __init__(self):
776 756
777 757 Operation.__init__(self)
778 758 self.profileIndex = 0
779 759
780 760 def incIndex(self):
781 761
782 762 self.profileIndex += 1
783 763
784 764 if self.profileIndex >= self.nProfiles:
785 765 self.profileIndex = 0
786 766
787 767 def isThisProfileInRange(self, profileIndex, minIndex, maxIndex):
788 768
789 769 if profileIndex < minIndex:
790 770 return False
791 771
792 772 if profileIndex > maxIndex:
793 773 return False
794 774
795 775 return True
796 776
797 777 def isThisProfileInList(self, profileIndex, profileList):
798 778
799 779 if profileIndex not in profileList:
800 780 return False
801 781
802 782 return True
803 783
804 784 def run(self, dataOut, profileList=None, profileRangeList=None, beam=None, byblock=False, rangeList = None, nProfiles=None):
805 785
806 786 """
807 787 ProfileSelector:
808 788
809 789 Inputs:
810 790 profileList : Index of profiles selected. Example: profileList = (0,1,2,7,8)
811 791
812 792 profileRangeList : Minimum and maximum profile indexes. Example: profileRangeList = (4, 30)
813 793
814 794 rangeList : List of profile ranges. Example: rangeList = ((4, 30), (32, 64), (128, 256))
815 795
816 796 """
817 797
818 798 dataOut.flagNoData = True
819 799
820 800 if dataOut.flagDataAsBlock:
821 801 """
822 802 data dimension = [nChannels, nProfiles, nHeis]
823 803 """
824 804 if profileList != None:
825 805 dataOut.data = dataOut.data[:,profileList,:]
826 806 dataOut.nProfiles = len(profileList)
827 807 dataOut.profileIndex = dataOut.nProfiles - 1
828 808
829 809 if profileRangeList != None:
830 810 minIndex = profileRangeList[0]
831 811 maxIndex = profileRangeList[1]
832 812
833 813 dataOut.data = dataOut.data[:,minIndex:maxIndex+1,:]
834 814 dataOut.nProfiles = maxIndex - minIndex + 1
835 815 dataOut.profileIndex = dataOut.nProfiles - 1
836 816
837 817 if rangeList != None:
838 818 raise ValueError, "Profile Selector: Invalid argument rangeList. Not implemented for getByBlock yet"
839 819
840 820 dataOut.flagNoData = False
841 821
842 822 return True
843 823
844 824 """
845 825 data dimension = [nChannels, nHeis]
846 826 """
847 827
848 828 if nProfiles:
849 829 self.nProfiles = nProfiles
850 830 else:
851 831 self.nProfiles = dataOut.nProfiles
852 832
853 833 if profileList != None:
854 834
855 835 dataOut.nProfiles = len(profileList)
856 836
857 837 if self.isThisProfileInList(dataOut.profileIndex, profileList):
858 838 dataOut.flagNoData = False
859 839 dataOut.profileIndex = self.profileIndex
860 840
861 841 self.incIndex()
862 842 return True
863 843
864 844 if profileRangeList != None:
865 845
866 846 minIndex = profileRangeList[0]
867 847 maxIndex = profileRangeList[1]
868 848
869 849 dataOut.nProfiles = maxIndex - minIndex + 1
870 850
871 851 if self.isThisProfileInRange(dataOut.profileIndex, minIndex, maxIndex):
872 852 dataOut.flagNoData = False
873 853 dataOut.profileIndex = self.profileIndex
874 854
875 855 self.incIndex()
876 856 return True
877 857
878 858 if rangeList != None:
879 859
880 860 nProfiles = 0
881 861
882 862 for thisRange in rangeList:
883 863 minIndex = thisRange[0]
884 864 maxIndex = thisRange[1]
885 865
886 866 nProfiles += maxIndex - minIndex + 1
887 867
888 868 dataOut.nProfiles = nProfiles
889 869
890 870 for thisRange in rangeList:
891 871
892 872 minIndex = thisRange[0]
893 873 maxIndex = thisRange[1]
894 874
895 875 if self.isThisProfileInRange(dataOut.profileIndex, minIndex, maxIndex):
896 876
897 877 # print "profileIndex = ", dataOut.profileIndex
898 878
899 879 dataOut.flagNoData = False
900 880 dataOut.profileIndex = self.profileIndex
901 881
902 882 self.incIndex()
903 883 break
904 884 return True
905 885
906 886
907 887 if beam != None: #beam is only for AMISR data
908 888 if self.isThisProfileInList(dataOut.profileIndex, dataOut.beamRangeDict[beam]):
909 889 dataOut.flagNoData = False
910 890 dataOut.profileIndex = self.profileIndex
911 891
912 892 self.incIndex()
913 893
914 894 return True
915 895
916 896 raise ValueError, "ProfileSelector needs profileList, profileRangeList or rangeList parameter"
917 897
918 898 return False
919 899
920 900
921 901
922 902 class Reshaper(Operation):
923 903
924 904 def __init__(self):
925 905
926 906 Operation.__init__(self)
927 907 self.updateNewHeights = True
928 908
929 909 def run(self, dataOut, shape):
930 910
931 911 if not dataOut.flagDataAsBlock:
932 912 raise ValueError, "Reshaper can only be used when voltage have been read as Block, getBlock = True"
933 913
934 914 if len(shape) != 3:
935 915 raise ValueError, "shape len should be equal to 3, (nChannels, nProfiles, nHeis)"
936 916
937 917 shape_tuple = tuple(shape)
938 918 dataOut.data = numpy.reshape(dataOut.data, shape_tuple)
939 919 dataOut.flagNoData = False
940 920
941 921 if self.updateNewHeights:
942 922
943 923 old_nheights = dataOut.nHeights
944 924 new_nheights = dataOut.data.shape[2]
945 925 factor = 1.0*new_nheights / old_nheights
946 926
947 927 deltaHeight = dataOut.heightList[1] - dataOut.heightList[0]
948 928
949 929 xf = dataOut.heightList[0] + dataOut.nHeights * deltaHeight * factor
950 930
951 931 dataOut.heightList = numpy.arange(dataOut.heightList[0], xf, deltaHeight)
952 932
953 933 dataOut.nProfiles = dataOut.data.shape[1]
954 934
955 935 dataOut.ippSeconds *= factor
956 936 #
957 937 # import collections
958 938 # from scipy.stats import mode
959 939 #
960 940 # class Synchronize(Operation):
961 941 #
962 942 # isConfig = False
963 943 # __profIndex = 0
964 944 #
965 945 # def __init__(self):
966 946 #
967 947 # Operation.__init__(self)
968 948 # # self.isConfig = False
969 949 # self.__powBuffer = None
970 950 # self.__startIndex = 0
971 951 # self.__pulseFound = False
972 952 #
973 953 # def __findTxPulse(self, dataOut, channel=0, pulse_with = None):
974 954 #
975 955 # #Read data
976 956 #
977 957 # powerdB = dataOut.getPower(channel = channel)
978 958 # noisedB = dataOut.getNoise(channel = channel)[0]
979 959 #
980 960 # self.__powBuffer.extend(powerdB.flatten())
981 961 #
982 962 # dataArray = numpy.array(self.__powBuffer)
983 963 #
984 964 # filteredPower = numpy.correlate(dataArray, dataArray[0:self.__nSamples], "same")
985 965 #
986 966 # maxValue = numpy.nanmax(filteredPower)
987 967 #
988 968 # if maxValue < noisedB + 10:
989 969 # #No se encuentra ningun pulso de transmision
990 970 # return None
991 971 #
992 972 # maxValuesIndex = numpy.where(filteredPower > maxValue - 0.1*abs(maxValue))[0]
993 973 #
994 974 # if len(maxValuesIndex) < 2:
995 975 # #Solo se encontro un solo pulso de transmision de un baudio, esperando por el siguiente TX
996 976 # return None
997 977 #
998 978 # phasedMaxValuesIndex = maxValuesIndex - self.__nSamples
999 979 #
1000 980 # #Seleccionar solo valores con un espaciamiento de nSamples
1001 981 # pulseIndex = numpy.intersect1d(maxValuesIndex, phasedMaxValuesIndex)
1002 982 #
1003 983 # if len(pulseIndex) < 2:
1004 984 # #Solo se encontro un pulso de transmision con ancho mayor a 1
1005 985 # return None
1006 986 #
1007 987 # spacing = pulseIndex[1:] - pulseIndex[:-1]
1008 988 #
1009 989 # #remover senales que se distancien menos de 10 unidades o muestras
1010 990 # #(No deberian existir IPP menor a 10 unidades)
1011 991 #
1012 992 # realIndex = numpy.where(spacing > 10 )[0]
1013 993 #
1014 994 # if len(realIndex) < 2:
1015 995 # #Solo se encontro un pulso de transmision con ancho mayor a 1
1016 996 # return None
1017 997 #
1018 998 # #Eliminar pulsos anchos (deja solo la diferencia entre IPPs)
1019 999 # realPulseIndex = pulseIndex[realIndex]
1020 1000 #
1021 1001 # period = mode(realPulseIndex[1:] - realPulseIndex[:-1])[0][0]
1022 1002 #
1023 1003 # print "IPP = %d samples" %period
1024 1004 #
1025 1005 # self.__newNSamples = dataOut.nHeights #int(period)
1026 1006 # self.__startIndex = int(realPulseIndex[0])
1027 1007 #
1028 1008 # return 1
1029 1009 #
1030 1010 #
1031 1011 # def setup(self, nSamples, nChannels, buffer_size = 4):
1032 1012 #
1033 1013 # self.__powBuffer = collections.deque(numpy.zeros( buffer_size*nSamples,dtype=numpy.float),
1034 1014 # maxlen = buffer_size*nSamples)
1035 1015 #
1036 1016 # bufferList = []
1037 1017 #
1038 1018 # for i in range(nChannels):
1039 1019 # bufferByChannel = collections.deque(numpy.zeros( buffer_size*nSamples, dtype=numpy.complex) + numpy.NAN,
1040 1020 # maxlen = buffer_size*nSamples)
1041 1021 #
1042 1022 # bufferList.append(bufferByChannel)
1043 1023 #
1044 1024 # self.__nSamples = nSamples
1045 1025 # self.__nChannels = nChannels
1046 1026 # self.__bufferList = bufferList
1047 1027 #
1048 1028 # def run(self, dataOut, channel = 0):
1049 1029 #
1050 1030 # if not self.isConfig:
1051 1031 # nSamples = dataOut.nHeights
1052 1032 # nChannels = dataOut.nChannels
1053 1033 # self.setup(nSamples, nChannels)
1054 1034 # self.isConfig = True
1055 1035 #
1056 1036 # #Append new data to internal buffer
1057 1037 # for thisChannel in range(self.__nChannels):
1058 1038 # bufferByChannel = self.__bufferList[thisChannel]
1059 1039 # bufferByChannel.extend(dataOut.data[thisChannel])
1060 1040 #
1061 1041 # if self.__pulseFound:
1062 1042 # self.__startIndex -= self.__nSamples
1063 1043 #
1064 1044 # #Finding Tx Pulse
1065 1045 # if not self.__pulseFound:
1066 1046 # indexFound = self.__findTxPulse(dataOut, channel)
1067 1047 #
1068 1048 # if indexFound == None:
1069 1049 # dataOut.flagNoData = True
1070 1050 # return
1071 1051 #
1072 1052 # self.__arrayBuffer = numpy.zeros((self.__nChannels, self.__newNSamples), dtype = numpy.complex)
1073 1053 # self.__pulseFound = True
1074 1054 # self.__startIndex = indexFound
1075 1055 #
1076 1056 # #If pulse was found ...
1077 1057 # for thisChannel in range(self.__nChannels):
1078 1058 # bufferByChannel = self.__bufferList[thisChannel]
1079 1059 # #print self.__startIndex
1080 1060 # x = numpy.array(bufferByChannel)
1081 1061 # self.__arrayBuffer[thisChannel] = x[self.__startIndex:self.__startIndex+self.__newNSamples]
1082 1062 #
1083 1063 # deltaHeight = dataOut.heightList[1] - dataOut.heightList[0]
1084 1064 # dataOut.heightList = numpy.arange(self.__newNSamples)*deltaHeight
1085 1065 # # dataOut.ippSeconds = (self.__newNSamples / deltaHeight)/1e6
1086 1066 #
1087 1067 # dataOut.data = self.__arrayBuffer
1088 1068 #
1089 1069 # self.__startIndex += self.__newNSamples
1090 1070 #
1091 1071 # return No newline at end of file
General Comments 0
You need to be logged in to leave comments. Login now