##// END OF EJS Templates
Bug fixed:...
Miguel Valdez -
r624:bde47180e86e
parent child
Show More
@@ -1,1124 +1,1132
1 1 '''
2 2
3 3 $Author: murco $
4 4 $Id: JROData.py 173 2012-11-20 15:06:21Z murco $
5 5 '''
6 6
7 7 import copy
8 8 import numpy
9 9 import datetime
10 10
11 11 from jroheaderIO import SystemHeader, RadarControllerHeader
12 12
13 13 def getNumpyDtype(dataTypeCode):
14 14
15 15 if dataTypeCode == 0:
16 16 numpyDtype = numpy.dtype([('real','<i1'),('imag','<i1')])
17 17 elif dataTypeCode == 1:
18 18 numpyDtype = numpy.dtype([('real','<i2'),('imag','<i2')])
19 19 elif dataTypeCode == 2:
20 20 numpyDtype = numpy.dtype([('real','<i4'),('imag','<i4')])
21 21 elif dataTypeCode == 3:
22 22 numpyDtype = numpy.dtype([('real','<i8'),('imag','<i8')])
23 23 elif dataTypeCode == 4:
24 24 numpyDtype = numpy.dtype([('real','<f4'),('imag','<f4')])
25 25 elif dataTypeCode == 5:
26 26 numpyDtype = numpy.dtype([('real','<f8'),('imag','<f8')])
27 27 else:
28 28 raise ValueError, 'dataTypeCode was not defined'
29 29
30 30 return numpyDtype
31 31
32 32 def getDataTypeCode(numpyDtype):
33 33
34 34 if numpyDtype == numpy.dtype([('real','<i1'),('imag','<i1')]):
35 35 datatype = 0
36 36 elif numpyDtype == numpy.dtype([('real','<i2'),('imag','<i2')]):
37 37 datatype = 1
38 38 elif numpyDtype == numpy.dtype([('real','<i4'),('imag','<i4')]):
39 39 datatype = 2
40 40 elif numpyDtype == numpy.dtype([('real','<i8'),('imag','<i8')]):
41 41 datatype = 3
42 42 elif numpyDtype == numpy.dtype([('real','<f4'),('imag','<f4')]):
43 43 datatype = 4
44 44 elif numpyDtype == numpy.dtype([('real','<f8'),('imag','<f8')]):
45 45 datatype = 5
46 46 else:
47 47 datatype = None
48 48
49 49 return datatype
50 50
51 51 def hildebrand_sekhon(data, navg):
52 52 """
53 53 This method is for the objective determination of the noise level in Doppler spectra. This
54 54 implementation technique is based on the fact that the standard deviation of the spectral
55 55 densities is equal to the mean spectral density for white Gaussian noise
56 56
57 57 Inputs:
58 58 Data : heights
59 59 navg : numbers of averages
60 60
61 61 Return:
62 62 -1 : any error
63 63 anoise : noise's level
64 64 """
65 65
66 66 sortdata = numpy.sort(data,axis=None)
67 67 lenOfData = len(sortdata)
68 68 nums_min = lenOfData/10
69 69
70 70 if (lenOfData/10) > 2:
71 71 nums_min = lenOfData/10
72 72 else:
73 73 nums_min = 2
74 74
75 75 sump = 0.
76 76
77 77 sumq = 0.
78 78
79 79 j = 0
80 80
81 81 cont = 1
82 82
83 83 while((cont==1)and(j<lenOfData)):
84 84
85 85 sump += sortdata[j]
86 86
87 87 sumq += sortdata[j]**2
88 88
89 89 if j > nums_min:
90 90 rtest = float(j)/(j-1) + 1.0/navg
91 91 if ((sumq*j) > (rtest*sump**2)):
92 92 j = j - 1
93 93 sump = sump - sortdata[j]
94 94 sumq = sumq - sortdata[j]**2
95 95 cont = 0
96 96
97 97 j += 1
98 98
99 99 lnoise = sump /j
100 100 stdv = numpy.sqrt((sumq - lnoise**2)/(j - 1))
101 101 return lnoise
102 102
103 103 class Beam:
104 104 def __init__(self):
105 105 self.codeList = []
106 106 self.azimuthList = []
107 107 self.zenithList = []
108 108
109 109 class GenericData(object):
110 110
111 111 flagNoData = True
112 112
113 113 def __init__(self):
114 114
115 115 raise ValueError, "This class has not been implemented"
116 116
117 117 def copy(self, inputObj=None):
118 118
119 119 if inputObj == None:
120 120 return copy.deepcopy(self)
121 121
122 122 for key in inputObj.__dict__.keys():
123 123 self.__dict__[key] = inputObj.__dict__[key]
124 124
125 125 def deepcopy(self):
126 126
127 127 return copy.deepcopy(self)
128 128
129 129 def isEmpty(self):
130 130
131 131 return self.flagNoData
132 132
133 133 class JROData(GenericData):
134 134
135 135 # m_BasicHeader = BasicHeader()
136 136 # m_ProcessingHeader = ProcessingHeader()
137 137
138 138 systemHeaderObj = SystemHeader()
139 139
140 140 radarControllerHeaderObj = RadarControllerHeader()
141 141
142 142 # data = None
143 143
144 144 type = None
145 145
146 146 datatype = None #dtype but in string
147 147
148 148 # dtype = None
149 149
150 150 # nChannels = None
151 151
152 152 # nHeights = None
153 153
154 154 nProfiles = None
155 155
156 156 heightList = None
157 157
158 158 channelList = None
159 159
160 160 flagDiscontinuousBlock = False
161 161
162 162 useLocalTime = False
163 163
164 164 utctime = None
165 165
166 166 timeZone = None
167 167
168 168 dstFlag = None
169 169
170 170 errorCount = None
171 171
172 172 blocksize = None
173 173
174 174 # nCode = None
175 175 #
176 176 # nBaud = None
177 177 #
178 178 # code = None
179 179
180 180 flagDecodeData = False #asumo q la data no esta decodificada
181 181
182 182 flagDeflipData = False #asumo q la data no esta sin flip
183 183
184 184 flagShiftFFT = False
185 185
186 186 # ippSeconds = None
187 187
188 188 # timeInterval = None
189 189
190 190 nCohInt = None
191 191
192 192 # noise = None
193 193
194 194 windowOfFilter = 1
195 195
196 196 #Speed of ligth
197 197 C = 3e8
198 198
199 199 frequency = 49.92e6
200 200
201 201 realtime = False
202 202
203 203 beacon_heiIndexList = None
204 204
205 205 last_block = None
206 206
207 207 blocknow = None
208 208
209 209 azimuth = None
210 210
211 211 zenith = None
212 212
213 213 beam = Beam()
214 214
215 215 profileIndex = None
216 216
217 217 def __init__(self):
218 218
219 219 raise ValueError, "This class has not been implemented"
220 220
221 221 def getNoise(self):
222 222
223 223 raise ValueError, "Not implemented"
224 224
225 225 def getNChannels(self):
226 226
227 227 return len(self.channelList)
228 228
229 229 def getChannelIndexList(self):
230 230
231 231 return range(self.nChannels)
232 232
233 233 def getNHeights(self):
234 234
235 235 return len(self.heightList)
236 236
237 237 def getHeiRange(self, extrapoints=0):
238 238
239 239 heis = self.heightList
240 240 # deltah = self.heightList[1] - self.heightList[0]
241 241 #
242 242 # heis.append(self.heightList[-1])
243 243
244 244 return heis
245 245
246 246 def getltctime(self):
247 247
248 248 if self.useLocalTime:
249 249 return self.utctime - self.timeZone*60
250 250
251 251 return self.utctime
252 252
253 253 def getDatatime(self):
254 254
255 255 datatimeValue = datetime.datetime.utcfromtimestamp(self.ltctime)
256 256 return datatimeValue
257 257
258 258 def getTimeRange(self):
259 259
260 260 datatime = []
261 261
262 262 datatime.append(self.ltctime)
263 263 datatime.append(self.ltctime + self.timeInterval+60)
264 264
265 265 datatime = numpy.array(datatime)
266 266
267 267 return datatime
268 268
269 269 def getFmax(self):
270 270
271 271 PRF = 1./(self.ippSeconds * self.nCohInt)
272 272
273 273 fmax = PRF/2.
274 274
275 275 return fmax
276 276
277 277 def getVmax(self):
278 278
279 279 _lambda = self.C/self.frequency
280 280
281 281 vmax = self.getFmax() * _lambda
282 282
283 283 return vmax
284 284
285 285 def get_ippSeconds(self):
286 286 '''
287 287 '''
288 288 return self.radarControllerHeaderObj.ippSeconds
289 289
290 290 def set_ippSeconds(self, ippSeconds):
291 291 '''
292 292 '''
293 293
294 294 self.radarControllerHeaderObj.ippSeconds = ippSeconds
295 295
296 296 return
297 297
298 298 def get_dtype(self):
299 299 '''
300 300 '''
301 301 return getNumpyDtype(self.datatype)
302 302
303 303 def set_dtype(self, numpyDtype):
304 304 '''
305 305 '''
306 306
307 307 self.datatype = getDataTypeCode(numpyDtype)
308 308
309 309 def get_code(self):
310 310 '''
311 311 '''
312 312 return self.radarControllerHeaderObj.code
313 313
314 314 def set_code(self, code):
315 315 '''
316 316 '''
317 317 self.radarControllerHeaderObj.code = code
318 318
319 319 return
320 320
321 321 def get_ncode(self):
322 322 '''
323 323 '''
324 324 return self.radarControllerHeaderObj.nCode
325 325
326 326 def set_ncode(self, nCode):
327 327 '''
328 328 '''
329 329 self.radarControllerHeaderObj.nCode = nCode
330 330
331 331 return
332 332
333 333 def get_nbaud(self):
334 334 '''
335 335 '''
336 336 return self.radarControllerHeaderObj.nBaud
337 337
338 338 def set_nbaud(self, nBaud):
339 339 '''
340 340 '''
341 341 self.radarControllerHeaderObj.nBaud = nBaud
342 342
343 343 return
344 344 # def getTimeInterval(self):
345 345 #
346 346 # raise IOError, "This method should be implemented inside each Class"
347 347
348 348 nChannels = property(getNChannels, "I'm the 'nChannel' property.")
349 349 channelIndexList = property(getChannelIndexList, "I'm the 'channelIndexList' property.")
350 350 nHeights = property(getNHeights, "I'm the 'nHeights' property.")
351 351 #noise = property(getNoise, "I'm the 'nHeights' property.")
352 352 datatime = property(getDatatime, "I'm the 'datatime' property")
353 353 ltctime = property(getltctime, "I'm the 'ltctime' property")
354 354 ippSeconds = property(get_ippSeconds, set_ippSeconds)
355 355 dtype = property(get_dtype, set_dtype)
356 356 # timeInterval = property(getTimeInterval, "I'm the 'timeInterval' property")
357 357 code = property(get_code, set_code)
358 358 nCode = property(get_ncode, set_ncode)
359 359 nBaud = property(get_nbaud, set_nbaud)
360 360
361 361 class Voltage(JROData):
362 362
363 363 #data es un numpy array de 2 dmensiones (canales, alturas)
364 364 data = None
365 365
366 366 def __init__(self):
367 367 '''
368 368 Constructor
369 369 '''
370 370
371 371 self.useLocalTime = True
372 372
373 373 self.radarControllerHeaderObj = RadarControllerHeader()
374 374
375 375 self.systemHeaderObj = SystemHeader()
376 376
377 377 self.type = "Voltage"
378 378
379 379 self.data = None
380 380
381 381 # self.dtype = None
382 382
383 383 # self.nChannels = 0
384 384
385 385 # self.nHeights = 0
386 386
387 387 self.nProfiles = None
388 388
389 389 self.heightList = None
390 390
391 391 self.channelList = None
392 392
393 393 # self.channelIndexList = None
394 394
395 395 self.flagNoData = True
396 396
397 397 self.flagDiscontinuousBlock = False
398 398
399 399 self.utctime = None
400 400
401 401 self.timeZone = None
402 402
403 403 self.dstFlag = None
404 404
405 405 self.errorCount = None
406 406
407 407 self.nCohInt = None
408 408
409 409 self.blocksize = None
410 410
411 411 self.flagDecodeData = False #asumo q la data no esta decodificada
412 412
413 413 self.flagDeflipData = False #asumo q la data no esta sin flip
414 414
415 415 self.flagShiftFFT = False
416 416
417 417 self.flagDataAsBlock = False #Asumo que la data es leida perfil a perfil
418 418
419 419 self.profileIndex = 0
420 420
421 421 def getNoisebyHildebrand(self, channel = None):
422 422 """
423 423 Determino el nivel de ruido usando el metodo Hildebrand-Sekhon
424 424
425 425 Return:
426 426 noiselevel
427 427 """
428 428
429 429 if channel != None:
430 430 data = self.data[channel]
431 431 nChannels = 1
432 432 else:
433 433 data = self.data
434 434 nChannels = self.nChannels
435 435
436 436 noise = numpy.zeros(nChannels)
437 437 power = data * numpy.conjugate(data)
438 438
439 439 for thisChannel in range(nChannels):
440 440 if nChannels == 1:
441 441 daux = power[:].real
442 442 else:
443 443 daux = power[thisChannel,:].real
444 444 noise[thisChannel] = hildebrand_sekhon(daux, self.nCohInt)
445 445
446 446 return noise
447 447
448 448 def getNoise(self, type = 1, channel = None):
449 449
450 450 if type == 1:
451 451 noise = self.getNoisebyHildebrand(channel)
452 452
453 453 return 10*numpy.log10(noise)
454 454
455 455 def getPower(self, channel = None):
456 456
457 457 if channel != None:
458 458 data = self.data[channel]
459 459 else:
460 460 data = self.data
461 461
462 462 power = data * numpy.conjugate(data)
463 463
464 464 return 10*numpy.log10(power.real)
465 465
466 466 def getTimeInterval(self):
467 467
468 468 timeInterval = self.ippSeconds * self.nCohInt
469 469
470 470 return timeInterval
471 471
472 472 noise = property(getNoise, "I'm the 'nHeights' property.")
473 473 timeInterval = property(getTimeInterval, "I'm the 'timeInterval' property")
474 474
475 475 class Spectra(JROData):
476 476
477 477 #data es un numpy array de 2 dmensiones (canales, perfiles, alturas)
478 478 data_spc = None
479 479
480 480 #data es un numpy array de 2 dmensiones (canales, pares, alturas)
481 481 data_cspc = None
482 482
483 483 #data es un numpy array de 2 dmensiones (canales, alturas)
484 484 data_dc = None
485 485
486 486 nFFTPoints = None
487 487
488 488 # nPairs = None
489 489
490 490 pairsList = None
491 491
492 492 nIncohInt = None
493 493
494 494 wavelength = None #Necesario para cacular el rango de velocidad desde la frecuencia
495 495
496 496 nCohInt = None #se requiere para determinar el valor de timeInterval
497 497
498 498 ippFactor = None
499 499
500 500 profileIndex = 0
501 501
502 502 def __init__(self):
503 503 '''
504 504 Constructor
505 505 '''
506 506
507 507 self.useLocalTime = True
508 508
509 509 self.radarControllerHeaderObj = RadarControllerHeader()
510 510
511 511 self.systemHeaderObj = SystemHeader()
512 512
513 513 self.type = "Spectra"
514 514
515 515 # self.data = None
516 516
517 517 # self.dtype = None
518 518
519 519 # self.nChannels = 0
520 520
521 521 # self.nHeights = 0
522 522
523 523 self.nProfiles = None
524 524
525 525 self.heightList = None
526 526
527 527 self.channelList = None
528 528
529 529 # self.channelIndexList = None
530 530
531 531 self.pairsList = None
532 532
533 533 self.flagNoData = True
534 534
535 535 self.flagDiscontinuousBlock = False
536 536
537 537 self.utctime = None
538 538
539 539 self.nCohInt = None
540 540
541 541 self.nIncohInt = None
542 542
543 543 self.blocksize = None
544 544
545 545 self.nFFTPoints = None
546 546
547 547 self.wavelength = None
548 548
549 549 self.flagDecodeData = False #asumo q la data no esta decodificada
550 550
551 551 self.flagDeflipData = False #asumo q la data no esta sin flip
552 552
553 553 self.flagShiftFFT = False
554 554
555 555 self.ippFactor = 1
556 556
557 557 #self.noise = None
558 558
559 559 self.beacon_heiIndexList = []
560 560
561 561 self.noise_estimation = None
562 562
563 563
564 564 def getNoisebyHildebrand(self, xmin_index=None, xmax_index=None, ymin_index=None, ymax_index=None):
565 565 """
566 566 Determino el nivel de ruido usando el metodo Hildebrand-Sekhon
567 567
568 568 Return:
569 569 noiselevel
570 570 """
571 571
572 572 noise = numpy.zeros(self.nChannels)
573 573
574 574 for channel in range(self.nChannels):
575 575 daux = self.data_spc[channel,xmin_index:xmax_index,ymin_index:ymax_index]
576 576 noise[channel] = hildebrand_sekhon(daux, self.nIncohInt)
577 577
578 578 return noise
579 579
580 580 def getNoise(self, xmin_index=None, xmax_index=None, ymin_index=None, ymax_index=None):
581 581
582 582 if self.noise_estimation != None:
583 583 return self.noise_estimation #this was estimated by getNoise Operation defined in jroproc_spectra.py
584 584 else:
585 585 noise = self.getNoisebyHildebrand(xmin_index, xmax_index, ymin_index, ymax_index)
586 586 return noise
587 587
588 588
589 589 def getFreqRange(self, extrapoints=0):
590 590
591 591 deltafreq = self.getFmax() / (self.nFFTPoints*self.ippFactor)
592 592 freqrange = deltafreq*(numpy.arange(self.nFFTPoints+extrapoints)-self.nFFTPoints/2.) - deltafreq/2
593 593
594 594 return freqrange
595 595
596 596 def getVelRange(self, extrapoints=0):
597 597
598 598 deltav = self.getVmax() / (self.nFFTPoints*self.ippFactor)
599 599 velrange = deltav*(numpy.arange(self.nFFTPoints+extrapoints)-self.nFFTPoints/2.) - deltav/2
600 600
601 601 return velrange
602 602
603 603 def getNPairs(self):
604 604
605 605 return len(self.pairsList)
606 606
607 607 def getPairsIndexList(self):
608 608
609 609 return range(self.nPairs)
610 610
611 611 def getNormFactor(self):
612
612 613 pwcode = 1
614
613 615 if self.flagDecodeData:
614 616 pwcode = numpy.sum(self.code[0]**2)
615 617 #normFactor = min(self.nFFTPoints,self.nProfiles)*self.nIncohInt*self.nCohInt*pwcode*self.windowOfFilter
616 618 normFactor = self.nProfiles*self.nIncohInt*self.nCohInt*pwcode*self.windowOfFilter
617 619
618 620 return normFactor
619 621
620 622 def getFlagCspc(self):
621 623
622 624 if self.data_cspc is None:
623 625 return True
624 626
625 627 return False
626 628
627 629 def getFlagDc(self):
628 630
629 631 if self.data_dc is None:
630 632 return True
631 633
632 634 return False
633 635
634 636 def getTimeInterval(self):
635 637
636 638 timeInterval = self.ippSeconds * self.nCohInt * self.nIncohInt * self.nProfiles
637 639
638 640 return timeInterval
639 641
640 nPairs = property(getNPairs, "I'm the 'nPairs' property.")
641 pairsIndexList = property(getPairsIndexList, "I'm the 'pairsIndexList' property.")
642 normFactor = property(getNormFactor, "I'm the 'getNormFactor' property.")
643 flag_cspc = property(getFlagCspc)
644 flag_dc = property(getFlagDc)
645 noise = property(getNoise, "I'm the 'nHeights' property.")
646 timeInterval = property(getTimeInterval, "I'm the 'timeInterval' property")
642 def setValue(self, value):
643
644 print "This property should not be initialized"
645
646 return
647
648 nPairs = property(getNPairs, setValue, "I'm the 'nPairs' property.")
649 pairsIndexList = property(getPairsIndexList, setValue, "I'm the 'pairsIndexList' property.")
650 normFactor = property(getNormFactor, setValue, "I'm the 'getNormFactor' property.")
651 flag_cspc = property(getFlagCspc, setValue)
652 flag_dc = property(getFlagDc, setValue)
653 noise = property(getNoise, setValue, "I'm the 'nHeights' property.")
654 timeInterval = property(getTimeInterval, setValue, "I'm the 'timeInterval' property")
647 655
648 656 class SpectraHeis(Spectra):
649 657
650 658 data_spc = None
651 659
652 660 data_cspc = None
653 661
654 662 data_dc = None
655 663
656 664 nFFTPoints = None
657 665
658 666 # nPairs = None
659 667
660 668 pairsList = None
661 669
662 670 nCohInt = None
663 671
664 672 nIncohInt = None
665 673
666 674 def __init__(self):
667 675
668 676 self.radarControllerHeaderObj = RadarControllerHeader()
669 677
670 678 self.systemHeaderObj = SystemHeader()
671 679
672 680 self.type = "SpectraHeis"
673 681
674 682 # self.dtype = None
675 683
676 684 # self.nChannels = 0
677 685
678 686 # self.nHeights = 0
679 687
680 688 self.nProfiles = None
681 689
682 690 self.heightList = None
683 691
684 692 self.channelList = None
685 693
686 694 # self.channelIndexList = None
687 695
688 696 self.flagNoData = True
689 697
690 698 self.flagDiscontinuousBlock = False
691 699
692 700 # self.nPairs = 0
693 701
694 702 self.utctime = None
695 703
696 704 self.blocksize = None
697 705
698 706 self.profileIndex = 0
699 707
700 708 self.nCohInt = 1
701 709
702 710 self.nIncohInt = 1
703 711
704 712 def getNormFactor(self):
705 713 pwcode = 1
706 714 if self.flagDecodeData:
707 715 pwcode = numpy.sum(self.code[0]**2)
708 716
709 717 normFactor = self.nIncohInt*self.nCohInt*pwcode
710 718
711 719 return normFactor
712 720
713 721 def getTimeInterval(self):
714 722
715 723 timeInterval = self.ippSeconds * self.nCohInt * self.nIncohInt
716 724
717 725 return timeInterval
718 726
719 727 normFactor = property(getNormFactor, "I'm the 'getNormFactor' property.")
720 728 timeInterval = property(getTimeInterval, "I'm the 'timeInterval' property")
721 729
722 730 class Fits(JROData):
723 731
724 732 heightList = None
725 733
726 734 channelList = None
727 735
728 736 flagNoData = True
729 737
730 738 flagDiscontinuousBlock = False
731 739
732 740 useLocalTime = False
733 741
734 742 utctime = None
735 743
736 744 timeZone = None
737 745
738 746 # ippSeconds = None
739 747
740 748 # timeInterval = None
741 749
742 750 nCohInt = None
743 751
744 752 nIncohInt = None
745 753
746 754 noise = None
747 755
748 756 windowOfFilter = 1
749 757
750 758 #Speed of ligth
751 759 C = 3e8
752 760
753 761 frequency = 49.92e6
754 762
755 763 realtime = False
756 764
757 765
758 766 def __init__(self):
759 767
760 768 self.type = "Fits"
761 769
762 770 self.nProfiles = None
763 771
764 772 self.heightList = None
765 773
766 774 self.channelList = None
767 775
768 776 # self.channelIndexList = None
769 777
770 778 self.flagNoData = True
771 779
772 780 self.utctime = None
773 781
774 782 self.nCohInt = 1
775 783
776 784 self.nIncohInt = 1
777 785
778 786 self.useLocalTime = True
779 787
780 788 self.profileIndex = 0
781 789
782 790 # self.utctime = None
783 791 # self.timeZone = None
784 792 # self.ltctime = None
785 793 # self.timeInterval = None
786 794 # self.header = None
787 795 # self.data_header = None
788 796 # self.data = None
789 797 # self.datatime = None
790 798 # self.flagNoData = False
791 799 # self.expName = ''
792 800 # self.nChannels = None
793 801 # self.nSamples = None
794 802 # self.dataBlocksPerFile = None
795 803 # self.comments = ''
796 804 #
797 805
798 806
799 807 def getltctime(self):
800 808
801 809 if self.useLocalTime:
802 810 return self.utctime - self.timeZone*60
803 811
804 812 return self.utctime
805 813
806 814 def getDatatime(self):
807 815
808 816 datatime = datetime.datetime.utcfromtimestamp(self.ltctime)
809 817 return datatime
810 818
811 819 def getTimeRange(self):
812 820
813 821 datatime = []
814 822
815 823 datatime.append(self.ltctime)
816 824 datatime.append(self.ltctime + self.timeInterval)
817 825
818 826 datatime = numpy.array(datatime)
819 827
820 828 return datatime
821 829
822 830 def getHeiRange(self):
823 831
824 832 heis = self.heightList
825 833
826 834 return heis
827 835
828 836 def isEmpty(self):
829 837
830 838 return self.flagNoData
831 839
832 840 def getNHeights(self):
833 841
834 842 return len(self.heightList)
835 843
836 844 def getNChannels(self):
837 845
838 846 return len(self.channelList)
839 847
840 848 def getChannelIndexList(self):
841 849
842 850 return range(self.nChannels)
843 851
844 852 def getNoise(self, type = 1):
845 853
846 854 #noise = numpy.zeros(self.nChannels)
847 855
848 856 if type == 1:
849 857 noise = self.getNoisebyHildebrand()
850 858
851 859 if type == 2:
852 860 noise = self.getNoisebySort()
853 861
854 862 if type == 3:
855 863 noise = self.getNoisebyWindow()
856 864
857 865 return noise
858 866
859 867 def getTimeInterval(self):
860 868
861 869 timeInterval = self.ippSeconds * self.nCohInt * self.nIncohInt
862 870
863 871 return timeInterval
864 872
865 873 datatime = property(getDatatime, "I'm the 'datatime' property")
866 874 nHeights = property(getNHeights, "I'm the 'nHeights' property.")
867 875 nChannels = property(getNChannels, "I'm the 'nChannel' property.")
868 876 channelIndexList = property(getChannelIndexList, "I'm the 'channelIndexList' property.")
869 877 noise = property(getNoise, "I'm the 'nHeights' property.")
870 878 datatime = property(getDatatime, "I'm the 'datatime' property")
871 879 ltctime = property(getltctime, "I'm the 'ltctime' property")
872 880 timeInterval = property(getTimeInterval, "I'm the 'timeInterval' property")
873 881
874 882 class Correlation(JROData):
875 883
876 884 noise = None
877 885
878 886 SNR = None
879 887
880 888 pairsAutoCorr = None #Pairs of Autocorrelation
881 889
882 890 #--------------------------------------------------
883 891
884 892 data_corr = None
885 893
886 894 data_volt = None
887 895
888 896 lagT = None # each element value is a profileIndex
889 897
890 898 lagR = None # each element value is in km
891 899
892 900 pairsList = None
893 901
894 902 calculateVelocity = None
895 903
896 904 nPoints = None
897 905
898 906 nAvg = None
899 907
900 908 bufferSize = None
901 909
902 910 def __init__(self):
903 911 '''
904 912 Constructor
905 913 '''
906 914 self.radarControllerHeaderObj = RadarControllerHeader()
907 915
908 916 self.systemHeaderObj = SystemHeader()
909 917
910 918 self.type = "Correlation"
911 919
912 920 self.data = None
913 921
914 922 self.dtype = None
915 923
916 924 self.nProfiles = None
917 925
918 926 self.heightList = None
919 927
920 928 self.channelList = None
921 929
922 930 self.flagNoData = True
923 931
924 932 self.flagDiscontinuousBlock = False
925 933
926 934 self.utctime = None
927 935
928 936 self.timeZone = None
929 937
930 938 self.dstFlag = None
931 939
932 940 self.errorCount = None
933 941
934 942 self.blocksize = None
935 943
936 944 self.flagDecodeData = False #asumo q la data no esta decodificada
937 945
938 946 self.flagDeflipData = False #asumo q la data no esta sin flip
939 947
940 948 self.pairsList = None
941 949
942 950 self.nPoints = None
943 951
944 952 def getLagTRange(self, extrapoints=0):
945 953
946 954 lagTRange = self.lagT
947 955 diff = lagTRange[1] - lagTRange[0]
948 956 extra = numpy.arange(1,extrapoints + 1)*diff + lagTRange[-1]
949 957 lagTRange = numpy.hstack((lagTRange, extra))
950 958
951 959 return lagTRange
952 960
953 961 def getLagRRange(self, extrapoints=0):
954 962
955 963 return self.lagR
956 964
957 965 def getPairsList(self):
958 966
959 967 return self.pairsList
960 968
961 969 def getCalculateVelocity(self):
962 970
963 971 return self.calculateVelocity
964 972
965 973 def getNPoints(self):
966 974
967 975 return self.nPoints
968 976
969 977 def getNAvg(self):
970 978
971 979 return self.nAvg
972 980
973 981 def getBufferSize(self):
974 982
975 983 return self.bufferSize
976 984
977 985 def getPairsAutoCorr(self):
978 986 pairsList = self.pairsList
979 987 pairsAutoCorr = numpy.zeros(self.nChannels, dtype = 'int')*numpy.nan
980 988
981 989 for l in range(len(pairsList)):
982 990 firstChannel = pairsList[l][0]
983 991 secondChannel = pairsList[l][1]
984 992
985 993 #Obteniendo pares de Autocorrelacion
986 994 if firstChannel == secondChannel:
987 995 pairsAutoCorr[firstChannel] = int(l)
988 996
989 997 pairsAutoCorr = pairsAutoCorr.astype(int)
990 998
991 999 return pairsAutoCorr
992 1000
993 1001 def getNoise(self, mode = 2):
994 1002
995 1003 indR = numpy.where(self.lagR == 0)[0][0]
996 1004 indT = numpy.where(self.lagT == 0)[0][0]
997 1005
998 1006 jspectra0 = self.data_corr[:,:,indR,:]
999 1007 jspectra = copy.copy(jspectra0)
1000 1008
1001 1009 num_chan = jspectra.shape[0]
1002 1010 num_hei = jspectra.shape[2]
1003 1011
1004 1012 freq_dc = jspectra.shape[1]/2
1005 1013 ind_vel = numpy.array([-2,-1,1,2]) + freq_dc
1006 1014
1007 1015 if ind_vel[0]<0:
1008 1016 ind_vel[range(0,1)] = ind_vel[range(0,1)] + self.num_prof
1009 1017
1010 1018 if mode == 1:
1011 1019 jspectra[:,freq_dc,:] = (jspectra[:,ind_vel[1],:] + jspectra[:,ind_vel[2],:])/2 #CORRECCION
1012 1020
1013 1021 if mode == 2:
1014 1022
1015 1023 vel = numpy.array([-2,-1,1,2])
1016 1024 xx = numpy.zeros([4,4])
1017 1025
1018 1026 for fil in range(4):
1019 1027 xx[fil,:] = vel[fil]**numpy.asarray(range(4))
1020 1028
1021 1029 xx_inv = numpy.linalg.inv(xx)
1022 1030 xx_aux = xx_inv[0,:]
1023 1031
1024 1032 for ich in range(num_chan):
1025 1033 yy = jspectra[ich,ind_vel,:]
1026 1034 jspectra[ich,freq_dc,:] = numpy.dot(xx_aux,yy)
1027 1035
1028 1036 junkid = jspectra[ich,freq_dc,:]<=0
1029 1037 cjunkid = sum(junkid)
1030 1038
1031 1039 if cjunkid.any():
1032 1040 jspectra[ich,freq_dc,junkid.nonzero()] = (jspectra[ich,ind_vel[1],junkid] + jspectra[ich,ind_vel[2],junkid])/2
1033 1041
1034 1042 noise = jspectra0[:,freq_dc,:] - jspectra[:,freq_dc,:]
1035 1043
1036 1044 return noise
1037 1045
1038 1046 def getTimeInterval(self):
1039 1047
1040 1048 timeInterval = self.ippSeconds * self.nCohInt * self.nPoints
1041 1049
1042 1050 return timeInterval
1043 1051
1044 1052 timeInterval = property(getTimeInterval, "I'm the 'timeInterval' property")
1045 1053 # pairsList = property(getPairsList, "I'm the 'pairsList' property.")
1046 1054 # nPoints = property(getNPoints, "I'm the 'nPoints' property.")
1047 1055 calculateVelocity = property(getCalculateVelocity, "I'm the 'calculateVelocity' property.")
1048 1056 nAvg = property(getNAvg, "I'm the 'nAvg' property.")
1049 1057 bufferSize = property(getBufferSize, "I'm the 'bufferSize' property.")
1050 1058
1051 1059
1052 1060 class Parameters(JROData):
1053 1061
1054 1062 #Information from previous data
1055 1063
1056 1064 inputUnit = None #Type of data to be processed
1057 1065
1058 1066 operation = None #Type of operation to parametrize
1059 1067
1060 1068 normFactor = None #Normalization Factor
1061 1069
1062 1070 groupList = None #List of Pairs, Groups, etc
1063 1071
1064 1072 #Parameters
1065 1073
1066 1074 data_param = None #Parameters obtained
1067 1075
1068 1076 data_pre = None #Data Pre Parametrization
1069 1077
1070 1078 data_SNR = None #Signal to Noise Ratio
1071 1079
1072 1080 # heightRange = None #Heights
1073 1081
1074 1082 abscissaList = None #Abscissa, can be velocities, lags or time
1075 1083
1076 1084 noise = None #Noise Potency
1077 1085
1078 1086 utctimeInit = None #Initial UTC time
1079 1087
1080 1088 paramInterval = None #Time interval to calculate Parameters in seconds
1081 1089
1082 1090 #Fitting
1083 1091
1084 1092 data_error = None #Error of the estimation
1085 1093
1086 1094 constants = None
1087 1095
1088 1096 library = None
1089 1097
1090 1098 #Output signal
1091 1099
1092 1100 outputInterval = None #Time interval to calculate output signal in seconds
1093 1101
1094 1102 data_output = None #Out signal
1095 1103
1096 1104
1097 1105
1098 1106 def __init__(self):
1099 1107 '''
1100 1108 Constructor
1101 1109 '''
1102 1110 self.radarControllerHeaderObj = RadarControllerHeader()
1103 1111
1104 1112 self.systemHeaderObj = SystemHeader()
1105 1113
1106 1114 self.type = "Parameters"
1107 1115
1108 1116 def getTimeRange1(self):
1109 1117
1110 1118 datatime = []
1111 1119
1112 1120 if self.useLocalTime:
1113 1121 time1 = self.utctimeInit - self.timeZone*60
1114 1122 else:
1115 1123 time1 = utctimeInit
1116 1124
1117 1125 # datatime.append(self.utctimeInit)
1118 1126 # datatime.append(self.utctimeInit + self.outputInterval)
1119 1127 datatime.append(time1)
1120 1128 datatime.append(time1 + self.outputInterval)
1121 1129
1122 1130 datatime = numpy.array(datatime)
1123 1131
1124 1132 return datatime
@@ -1,679 +1,721
1 1 '''
2 2
3 3 $Author: murco $
4 4 $Id: JROHeaderIO.py 151 2012-10-31 19:00:51Z murco $
5 5 '''
6 6 import numpy
7 7 import copy
8 8 import datetime
9 9
10 10 SPEED_OF_LIGHT = 299792458
11 11 SPEED_OF_LIGHT = 3e8
12 12
13 13 BASIC_STRUCTURE = numpy.dtype([
14 14 ('nSize','<u4'),
15 15 ('nVersion','<u2'),
16 16 ('nDataBlockId','<u4'),
17 17 ('nUtime','<u4'),
18 18 ('nMilsec','<u2'),
19 19 ('nTimezone','<i2'),
20 20 ('nDstflag','<i2'),
21 21 ('nErrorCount','<u4')
22 22 ])
23 23
24 24 SYSTEM_STRUCTURE = numpy.dtype([
25 25 ('nSize','<u4'),
26 26 ('nNumSamples','<u4'),
27 27 ('nNumProfiles','<u4'),
28 28 ('nNumChannels','<u4'),
29 29 ('nADCResolution','<u4'),
30 30 ('nPCDIOBusWidth','<u4'),
31 31 ])
32 32
33 33 RADAR_STRUCTURE = numpy.dtype([
34 34 ('nSize','<u4'),
35 35 ('nExpType','<u4'),
36 36 ('nNTx','<u4'),
37 37 ('fIpp','<f4'),
38 38 ('fTxA','<f4'),
39 39 ('fTxB','<f4'),
40 40 ('nNumWindows','<u4'),
41 41 ('nNumTaus','<u4'),
42 42 ('nCodeType','<u4'),
43 43 ('nLine6Function','<u4'),
44 44 ('nLine5Function','<u4'),
45 45 ('fClock','<f4'),
46 46 ('nPrePulseBefore','<u4'),
47 47 ('nPrePulseAfter','<u4'),
48 48 ('sRangeIPP','<a20'),
49 49 ('sRangeTxA','<a20'),
50 50 ('sRangeTxB','<a20'),
51 51 ])
52 52
53 53 SAMPLING_STRUCTURE = numpy.dtype([('h0','<f4'),('dh','<f4'),('nsa','<u4')])
54 54
55 55
56 56 PROCESSING_STRUCTURE = numpy.dtype([
57 57 ('nSize','<u4'),
58 58 ('nDataType','<u4'),
59 59 ('nSizeOfDataBlock','<u4'),
60 60 ('nProfilesperBlock','<u4'),
61 61 ('nDataBlocksperFile','<u4'),
62 62 ('nNumWindows','<u4'),
63 63 ('nProcessFlags','<u4'),
64 64 ('nCoherentIntegrations','<u4'),
65 65 ('nIncoherentIntegrations','<u4'),
66 66 ('nTotalSpectra','<u4')
67 67 ])
68 68
69 69 class Header(object):
70 70
71 71 def __init__(self):
72 72 raise
73 73
74 74 def copy(self):
75 75 return copy.deepcopy(self)
76 76
77 77 def read(self):
78 78
79 79 raise ValueError
80 80
81 81 def write(self):
82 82
83 83 raise ValueError
84 84
85 85 def printInfo(self):
86 86
87 87 print "#"*100
88 88 print self.__class__.__name__.upper()
89 89 print "#"*100
90 90 for key in self.__dict__.keys():
91 91 print "%s = %s" %(key, self.__dict__[key])
92 92
93 93 class BasicHeader(Header):
94 94
95 95 size = None
96 96 version = None
97 97 dataBlock = None
98 98 utc = None
99 99 ltc = None
100 100 miliSecond = None
101 101 timeZone = None
102 102 dstFlag = None
103 103 errorCount = None
104 104 datatime = None
105 105
106 106 __LOCALTIME = None
107 107
108 108 def __init__(self, useLocalTime=True):
109 109
110 110 self.size = 24
111 111 self.version = 0
112 112 self.dataBlock = 0
113 113 self.utc = 0
114 114 self.miliSecond = 0
115 115 self.timeZone = 0
116 116 self.dstFlag = 0
117 117 self.errorCount = 0
118 118
119 119 self.useLocalTime = useLocalTime
120 120
121 121 def read(self, fp):
122 122 try:
123 123
124 124 header = numpy.fromfile(fp, BASIC_STRUCTURE,1)
125 125
126 126 self.size = int(header['nSize'][0])
127 127 self.version = int(header['nVersion'][0])
128 128 self.dataBlock = int(header['nDataBlockId'][0])
129 129 self.utc = int(header['nUtime'][0])
130 130 self.miliSecond = int(header['nMilsec'][0])
131 131 self.timeZone = int(header['nTimezone'][0])
132 132 self.dstFlag = int(header['nDstflag'][0])
133 133 self.errorCount = int(header['nErrorCount'][0])
134 134
135 135 except Exception, e:
136 136 print "BasicHeader: "
137 137 print e
138 138 return 0
139 139
140 140 return 1
141 141
142 142 def write(self, fp):
143 143
144 144 headerTuple = (self.size,self.version,self.dataBlock,self.utc,self.miliSecond,self.timeZone,self.dstFlag,self.errorCount)
145 145 header = numpy.array(headerTuple, BASIC_STRUCTURE)
146 146 header.tofile(fp)
147 147
148 148 return 1
149 149
150 150 def get_ltc(self):
151 151
152 152 return self.utc - self.timeZone*60
153 153
154 154 def set_ltc(self, value):
155 155
156 156 self.utc = value + self.timeZone*60
157 157
158 158 def get_datatime(self):
159 159
160 160 return datetime.datetime.utcfromtimestamp(self.ltc)
161 161
162 162 ltc = property(get_ltc, set_ltc)
163 163 datatime = property(get_datatime)
164 164
165 165 class SystemHeader(Header):
166 166
167 167 size = None
168 168 nSamples = None
169 169 nProfiles = None
170 170 nChannels = None
171 171 adcResolution = None
172 172 pciDioBusWidth = None
173 173
174 174 def __init__(self, nSamples=0, nProfiles=0, nChannels=0, adcResolution=14, pciDioBusWith=0):
175 175
176 176 self.size = 24
177 177 self.nSamples = nSamples
178 178 self.nProfiles = nProfiles
179 179 self.nChannels = nChannels
180 180 self.adcResolution = adcResolution
181 181 self.pciDioBusWidth = pciDioBusWith
182 182
183 183 def read(self, fp):
184 184
185 startFp = fp.tell()
186
185 187 try:
186 188 header = numpy.fromfile(fp,SYSTEM_STRUCTURE,1)
189
187 190 self.size = header['nSize'][0]
188 191 self.nSamples = header['nNumSamples'][0]
189 192 self.nProfiles = header['nNumProfiles'][0]
190 193 self.nChannels = header['nNumChannels'][0]
191 194 self.adcResolution = header['nADCResolution'][0]
192 195 self.pciDioBusWidth = header['nPCDIOBusWidth'][0]
193 196
194 197 except Exception, e:
195 198 print "SystemHeader: " + e
196 199 return 0
197 200
201 endFp = self.size + startFp
202
203 if fp.tell() != endFp:
204 raise IOError, "System Header is not consistent"
205
198 206 return 1
199 207
200 208 def write(self, fp):
201 209
202 210 headerTuple = (self.size,self.nSamples,self.nProfiles,self.nChannels,self.adcResolution,self.pciDioBusWidth)
203 211 header = numpy.array(headerTuple,SYSTEM_STRUCTURE)
204 212 header.tofile(fp)
205 213
206 214 return 1
207 215
208 216 class RadarControllerHeader(Header):
209 217
210 218 size = None
211 219 expType = None
212 220 nTx = None
213 221 ipp = None
214 222 txA = None
215 223 txB = None
216 224 nWindows = None
217 225 numTaus = None
218 226 codeType = None
219 227 line6Function = None
220 228 line5Function = None
221 229 fClock = None
222 230 prePulseBefore = None
223 231 prePulserAfter = None
224 232 rangeIpp = None
225 233 rangeTxA = None
226 234 rangeTxB = None
227 235
228 236 __size = None
229 237
230 238 def __init__(self, expType=2, nTx=1,
231 239 ippKm=None, txA=0, txB=0,
232 240 nWindows=None, nHeights=None, firstHeight=None, deltaHeight=None,
233 241 numTaus=0, line6Function=0, line5Function=0, fClock=None,
234 242 prePulseBefore=0, prePulseAfter=0,
235 243 codeType=0, nCode=0, nBaud=0, code=None,
236 244 flip1=0, flip2=0):
237 245
238 246 self.size = 116
239 247 self.expType = expType
240 248 self.nTx = nTx
241 249 self.ipp = ippKm
242 250 self.txA = txA
243 251 self.txB = txB
244 252 self.rangeIpp = ippKm
245 253 self.rangeTxA = txA
246 254 self.rangeTxB = txB
247 255
248 256 self.nWindows = nWindows
249 257 self.numTaus = numTaus
250 258 self.codeType = codeType
251 259 self.line6Function = line6Function
252 260 self.line5Function = line5Function
253 261 self.fClock = fClock
254 262 self.prePulseBefore = prePulseBefore
255 263 self.prePulserAfter = prePulseAfter
256 264
257 265 self.nHeights = nHeights
258 266 self.firstHeight = firstHeight
259 267 self.deltaHeight = deltaHeight
260 268 self.samplesWin = nHeights
261 269
262 270 self.nCode = nCode
263 271 self.nBaud = nBaud
264 272 self.code = code
265 273 self.flip1 = flip1
266 274 self.flip2 = flip2
267 275
268 276 self.code_size = int(numpy.ceil(self.nBaud/32.))*self.nCode*4
269 277 # self.dynamic = numpy.array([],numpy.dtype('byte'))
270 278
271 279 if self.fClock is None and self.deltaHeight is not None:
272 280 self.fClock = 0.15/(deltaHeight*1e-6) #0.15Km / (height * 1u)
273 281
274 282 def read(self, fp):
275 283
276 try:
277 startFp = fp.tell()
278 header = numpy.fromfile(fp,RADAR_STRUCTURE,1)
279
280 size = int(header['nSize'][0])
281 self.expType = int(header['nExpType'][0])
282 self.nTx = int(header['nNTx'][0])
283 self.ipp = float(header['fIpp'][0])
284 self.txA = float(header['fTxA'][0])
285 self.txB = float(header['fTxB'][0])
286 self.nWindows = int(header['nNumWindows'][0])
287 self.numTaus = int(header['nNumTaus'][0])
288 self.codeType = int(header['nCodeType'][0])
289 self.line6Function = int(header['nLine6Function'][0])
290 self.line5Function = int(header['nLine5Function'][0])
291 self.fClock = float(header['fClock'][0])
292 self.prePulseBefore = int(header['nPrePulseBefore'][0])
293 self.prePulserAfter = int(header['nPrePulseAfter'][0])
294 self.rangeIpp = header['sRangeIPP'][0]
295 self.rangeTxA = header['sRangeTxA'][0]
296 self.rangeTxB = header['sRangeTxB'][0]
297 # jump Dynamic Radar Controller Header
298 # jumpFp = size - 116
299 # self.dynamic = numpy.fromfile(fp,numpy.dtype('byte'),jumpFp)
300 #pointer backward to dynamic header and read
301 # backFp = fp.tell() - jumpFp
302 # fp.seek(backFp)
303
304 samplingWindow = numpy.fromfile(fp,SAMPLING_STRUCTURE,self.nWindows)
305
306 self.nHeights = int(numpy.sum(samplingWindow['nsa']))
307 self.firstHeight = samplingWindow['h0']
308 self.deltaHeight = samplingWindow['dh']
309 self.samplesWin = samplingWindow['nsa']
284
285 startFp = fp.tell()
286 header = numpy.fromfile(fp,RADAR_STRUCTURE,1)
287
288 size = int(header['nSize'][0])
289 self.expType = int(header['nExpType'][0])
290 self.nTx = int(header['nNTx'][0])
291 self.ipp = float(header['fIpp'][0])
292 self.txA = float(header['fTxA'][0])
293 self.txB = float(header['fTxB'][0])
294 self.nWindows = int(header['nNumWindows'][0])
295 self.numTaus = int(header['nNumTaus'][0])
296 self.codeType = int(header['nCodeType'][0])
297 self.line6Function = int(header['nLine6Function'][0])
298 self.line5Function = int(header['nLine5Function'][0])
299 self.fClock = float(header['fClock'][0])
300 self.prePulseBefore = int(header['nPrePulseBefore'][0])
301 self.prePulserAfter = int(header['nPrePulseAfter'][0])
302 self.rangeIpp = header['sRangeIPP'][0]
303 self.rangeTxA = header['sRangeTxA'][0]
304 self.rangeTxB = header['sRangeTxB'][0]
305
306 samplingWindow = numpy.fromfile(fp,SAMPLING_STRUCTURE,self.nWindows)
307
308 self.nHeights = int(numpy.sum(samplingWindow['nsa']))
309 self.firstHeight = samplingWindow['h0']
310 self.deltaHeight = samplingWindow['dh']
311 self.samplesWin = samplingWindow['nsa']
312
313 self.Taus = numpy.fromfile(fp,'<f4',self.numTaus)
314
315 self.code_size = 0
316 if self.codeType != 0:
317 self.nCode = int(numpy.fromfile(fp,'<u4',1))
318 self.nBaud = int(numpy.fromfile(fp,'<u4',1))
310 319
311 self.Taus = numpy.fromfile(fp,'<f4',self.numTaus)
320 code = numpy.empty([self.nCode,self.nBaud],dtype='i1')
321 for ic in range(self.nCode):
322 temp = numpy.fromfile(fp,'u4',int(numpy.ceil(self.nBaud/32.)))
323 for ib in range(self.nBaud-1,-1,-1):
324 code[ic,ib] = temp[ib/32]%2
325 temp[ib/32] = temp[ib/32]/2
326
327 self.code = 2.0*code - 1.0
328 self.code_size = int(numpy.ceil(self.nBaud/32.))*self.nCode*4
312 329
313 self.code_size = 0
314 if self.codeType != 0:
315 self.nCode = int(numpy.fromfile(fp,'<u4',1))
316 self.nBaud = int(numpy.fromfile(fp,'<u4',1))
317 self.code = numpy.empty([self.nCode,self.nBaud],dtype='i1')
318
319 for ic in range(self.nCode):
320 temp = numpy.fromfile(fp,'u4',int(numpy.ceil(self.nBaud/32.)))
321 for ib in range(self.nBaud-1,-1,-1):
322 self.code[ic,ib] = temp[ib/32]%2
323 temp[ib/32] = temp[ib/32]/2
324 self.code = 2.0*self.code - 1.0
325 self.code_size = int(numpy.ceil(self.nBaud/32.))*self.nCode*4
326
327 330 # if self.line5Function == RCfunction.FLIP:
328 331 # self.flip1 = numpy.fromfile(fp,'<u4',1)
329 332 #
330 333 # if self.line6Function == RCfunction.FLIP:
331 334 # self.flip2 = numpy.fromfile(fp,'<u4',1)
332
333 endFp = size + startFp
334 jumpFp = endFp - fp.tell()
335 if jumpFp > 0:
336 fp.seek(jumpFp)
337 335
338 except Exception, e:
339 print "RadarControllerHeader: " + e
340 return 0
336 endFp = size + startFp
341 337
338 if fp.tell() != endFp:
339 raise IOError, "Radar Controller Header is not consistent"
340
342 341 return 1
343 342
344 343 def write(self, fp):
345 344
346 345 headerTuple = (self.size,
347 346 self.expType,
348 347 self.nTx,
349 348 self.ipp,
350 349 self.txA,
351 350 self.txB,
352 351 self.nWindows,
353 352 self.numTaus,
354 353 self.codeType,
355 354 self.line6Function,
356 355 self.line5Function,
357 356 self.fClock,
358 357 self.prePulseBefore,
359 358 self.prePulserAfter,
360 359 self.rangeIpp,
361 360 self.rangeTxA,
362 361 self.rangeTxB)
363 362
364 363 header = numpy.array(headerTuple,RADAR_STRUCTURE)
365 364 header.tofile(fp)
366 365
367 366 sampleWindowTuple = (self.firstHeight,self.deltaHeight,self.samplesWin)
368 367 samplingWindow = numpy.array(sampleWindowTuple,SAMPLING_STRUCTURE)
369 368 samplingWindow.tofile(fp)
370 369
371 370 if self.numTaus > 0:
372 371 self.Taus.tofile(fp)
373 372
374 373 if self.codeType !=0:
375 374 nCode = numpy.array(self.nCode, '<u4')
376 375 nCode.tofile(fp)
377 376 nBaud = numpy.array(self.nBaud, '<u4')
378 377 nBaud.tofile(fp)
379 378 code1 = (self.code + 1.0)/2.
380 379
381 380 for ic in range(self.nCode):
382 381 tempx = numpy.zeros(numpy.ceil(self.nBaud/32.))
383 382 start = 0
384 383 end = 32
385 384 for i in range(len(tempx)):
386 385 code_selected = code1[ic,start:end]
387 386 for j in range(len(code_selected)-1,-1,-1):
388 387 if code_selected[j] == 1:
389 388 tempx[i] = tempx[i] + 2**(len(code_selected)-1-j)
390 389 start = start + 32
391 390 end = end + 32
392 391
393 392 tempx = tempx.astype('u4')
394 393 tempx.tofile(fp)
395 394
396 395 # if self.line5Function == RCfunction.FLIP:
397 396 # self.flip1.tofile(fp)
398 397 #
399 398 # if self.line6Function == RCfunction.FLIP:
400 399 # self.flip2.tofile(fp)
401 400
402 401 return 1
403 402
404 403 def get_ippSeconds(self):
405 404 '''
406 405 '''
407 406 ippSeconds = 2.0 * 1000 * self.ipp / SPEED_OF_LIGHT
408 407
409 408 return ippSeconds
410 409
411 410 def set_ippSeconds(self, ippSeconds):
412 411 '''
413 412 '''
414 413
415 414 self.ipp = ippSeconds * SPEED_OF_LIGHT / (2.0*1000)
416 415
417 416 return
418 417
419 418 def get_size(self):
420 419
421 420 self.__size = 116 + 12*self.nWindows + 4*self.numTaus
422 421
423 422 if self.codeType != 0:
424 423 self.__size += 4 + 4 + 4*self.nCode*numpy.ceil(self.nBaud/32.)
425 424
426 425 return self.__size
427 426
428 427 def set_size(self, value):
429 428
430 429 self.__size = value
431 430
432 431 return
433 432
434 433 ippSeconds = property(get_ippSeconds, set_ippSeconds)
435 434 size = property(get_size, set_size)
436 435
437 436 class ProcessingHeader(Header):
438 437
439 size = None
438 # size = None
440 439 dtype = None
441 440 blockSize = None
442 441 profilesPerBlock = None
443 442 dataBlocksPerFile = None
444 443 nWindows = None
445 444 processFlags = None
446 445 nCohInt = None
447 446 nIncohInt = None
448 447 totalSpectra = None
449 448
450 449 flag_dc = None
451 450 flag_cspc = None
452 451
453 452 def __init__(self):
454 453
455 self.size = 0
454 # self.size = 0
456 455 self.dtype = 0
457 456 self.blockSize = 0
458 457 self.profilesPerBlock = 0
459 458 self.dataBlocksPerFile = 0
460 459 self.nWindows = 0
461 460 self.processFlags = 0
462 461 self.nCohInt = 0
463 462 self.nIncohInt = 0
464 463 self.totalSpectra = 0
465 464
466 465 self.nHeights = 0
467 466 self.firstHeight = 0
468 467 self.deltaHeight = 0
469 468 self.samplesWin = 0
470 469 self.spectraComb = 0
471 # self.nCode = None
472 # self.code = None
473 # self.nBaud = None
470 self.nCode = None
471 self.code = None
472 self.nBaud = None
473
474 474 self.shif_fft = False
475 475 self.flag_dc = False
476 476 self.flag_cspc = False
477 self.flag_decode = False
478 self.flag_deflip = False
477 479
478 480 def read(self, fp):
479 # try:
481
482 startFp = fp.tell()
483
480 484 header = numpy.fromfile(fp,PROCESSING_STRUCTURE,1)
481 self.size = int(header['nSize'][0])
485
486 size = int(header['nSize'][0])
482 487 self.dtype = int(header['nDataType'][0])
483 488 self.blockSize = int(header['nSizeOfDataBlock'][0])
484 489 self.profilesPerBlock = int(header['nProfilesperBlock'][0])
485 490 self.dataBlocksPerFile = int(header['nDataBlocksperFile'][0])
486 491 self.nWindows = int(header['nNumWindows'][0])
487 492 self.processFlags = header['nProcessFlags']
488 493 self.nCohInt = int(header['nCoherentIntegrations'][0])
489 494 self.nIncohInt = int(header['nIncoherentIntegrations'][0])
490 495 self.totalSpectra = int(header['nTotalSpectra'][0])
491 496
492 497 samplingWindow = numpy.fromfile(fp,SAMPLING_STRUCTURE,self.nWindows)
493 498
494 499 self.nHeights = int(numpy.sum(samplingWindow['nsa']))
495 500 self.firstHeight = float(samplingWindow['h0'][0])
496 501 self.deltaHeight = float(samplingWindow['dh'][0])
497 502 self.samplesWin = samplingWindow['nsa'][0]
503
498 504 self.spectraComb = numpy.fromfile(fp,'u1',2*self.totalSpectra)
499 505
500 # if ((self.processFlags & PROCFLAG.DEFINE_PROCESS_CODE) == PROCFLAG.DEFINE_PROCESS_CODE):
501 # self.nCode = int(numpy.fromfile(fp,'<u4',1))
502 # self.nBaud = int(numpy.fromfile(fp,'<u4',1))
503 # self.code = numpy.fromfile(fp,'<f4',self.nCode*self.nBaud).reshape(self.nCode,self.nBaud)
506 if ((self.processFlags & PROCFLAG.DEFINE_PROCESS_CODE) == PROCFLAG.DEFINE_PROCESS_CODE):
507 self.nCode = int(numpy.fromfile(fp,'<u4',1))
508 self.nBaud = int(numpy.fromfile(fp,'<u4',1))
509 self.code = numpy.fromfile(fp,'<f4',self.nCode*self.nBaud).reshape(self.nCode,self.nBaud)
504 510
511 if ((self.processFlags & PROCFLAG.EXP_NAME_ESP) == PROCFLAG.EXP_NAME_ESP):
512 exp_name_len = int(numpy.fromfile(fp,'<u4',1))
513 exp_name = numpy.fromfile(fp,'u1',exp_name_len+1)
514
505 515 if ((self.processFlags & PROCFLAG.SHIFT_FFT_DATA) == PROCFLAG.SHIFT_FFT_DATA):
506 516 self.shif_fft = True
507 517 else:
508 518 self.shif_fft = False
509
519
510 520 if ((self.processFlags & PROCFLAG.SAVE_CHANNELS_DC) == PROCFLAG.SAVE_CHANNELS_DC):
511 521 self.flag_dc = True
522 else:
523 self.flag_dc = False
524
525 if ((self.processFlags & PROCFLAG.DECODE_DATA) == PROCFLAG.DECODE_DATA):
526 self.flag_decode = True
527 else:
528 self.flag_decode = False
512 529
530 if ((self.processFlags & PROCFLAG.DEFLIP_DATA) == PROCFLAG.DEFLIP_DATA):
531 self.flag_deflip = True
532 else:
533 self.flag_deflip = False
534
513 535 nChannels = 0
514 536 nPairs = 0
515 537 pairList = []
516 538
517 539 for i in range( 0, self.totalSpectra*2, 2 ):
518 540 if self.spectraComb[i] == self.spectraComb[i+1]:
519 541 nChannels = nChannels + 1 #par de canales iguales
520 542 else:
521 543 nPairs = nPairs + 1 #par de canales diferentes
522 544 pairList.append( (self.spectraComb[i], self.spectraComb[i+1]) )
523 545
524 546 self.flag_cspc = False
525 547 if nPairs > 0:
526 548 self.flag_cspc = True
527
528 # except Exception, e:
529 # print "Error ProcessingHeader: "
530 # return 0
549
550 endFp = size + startFp
551
552 if fp.tell() != endFp:
553 raise IOError, "Processing Header is not consistent"
531 554
532 555 return 1
533 556
534 557 def write(self, fp):
558 #Clear DEFINE_PROCESS_CODE
559 # self.processFlags = self.processFlags & (~PROCFLAG.DEFINE_PROCESS_CODE)
535 560
536 561 headerTuple = (self.size,
537 562 self.dtype,
538 563 self.blockSize,
539 564 self.profilesPerBlock,
540 565 self.dataBlocksPerFile,
541 566 self.nWindows,
542 567 self.processFlags,
543 568 self.nCohInt,
544 569 self.nIncohInt,
545 570 self.totalSpectra)
546 571
547 572 header = numpy.array(headerTuple,PROCESSING_STRUCTURE)
548 573 header.tofile(fp)
549 574
550 575 if self.nWindows != 0:
551 576 sampleWindowTuple = (self.firstHeight,self.deltaHeight,self.samplesWin)
552 577 samplingWindow = numpy.array(sampleWindowTuple,SAMPLING_STRUCTURE)
553 578 samplingWindow.tofile(fp)
554
555 579
556 580 if self.totalSpectra != 0:
557 spectraComb = numpy.array([],numpy.dtype('u1'))
581 # spectraComb = numpy.array([],numpy.dtype('u1'))
558 582 spectraComb = self.spectraComb
559 583 spectraComb.tofile(fp)
560 584
561 # if self.processFlags & PROCFLAG.DEFINE_PROCESS_CODE == PROCFLAG.DEFINE_PROCESS_CODE:
562 # nCode = numpy.array([self.nCode], numpy.dtype('u4')) #Probar con un dato que almacene codigo, hasta el momento no se hizo la prueba
563 # nCode.tofile(fp)
564 #
565 # nBaud = numpy.array([self.nBaud], numpy.dtype('u4'))
566 # nBaud.tofile(fp)
567 #
568 # code = self.code.reshape(self.nCode*self.nBaud)
569 # code = code.astype(numpy.dtype('<f4'))
570 # code.tofile(fp)
585 if self.processFlags & PROCFLAG.DEFINE_PROCESS_CODE == PROCFLAG.DEFINE_PROCESS_CODE:
586 nCode = numpy.array([self.nCode], numpy.dtype('u4')) #Probar con un dato que almacene codigo, hasta el momento no se hizo la prueba
587 nCode.tofile(fp)
588
589 nBaud = numpy.array([self.nBaud], numpy.dtype('u4'))
590 nBaud.tofile(fp)
591
592 code = self.code.reshape(self.nCode*self.nBaud)
593 code = code.astype(numpy.dtype('<f4'))
594 code.tofile(fp)
571 595
572 596 return 1
573 597
598 def get_size(self):
599
600 self.__size = 40 + 12*self.nWindows + 2*self.totalSpectra
601
602 if self.processFlags & PROCFLAG.DEFINE_PROCESS_CODE == PROCFLAG.DEFINE_PROCESS_CODE:
603 # self.__size += 4 + 4 + 4*self.nCode*numpy.ceil(self.nBaud/32.)
604 self.__size += 4 + 4 + 4 * self.nCode * self.nBaud
605
606 return self.__size
607
608 def set_size(self, value):
609
610 self.__size = value
611
612 return
613
614 size = property(get_size, set_size)
615
574 616 class RCfunction:
575 617 NONE=0
576 618 FLIP=1
577 619 CODE=2
578 620 SAMPLING=3
579 621 LIN6DIV256=4
580 622 SYNCHRO=5
581 623
582 624 class nCodeType:
583 625 NONE=0
584 626 USERDEFINE=1
585 627 BARKER2=2
586 628 BARKER3=3
587 629 BARKER4=4
588 630 BARKER5=5
589 631 BARKER7=6
590 632 BARKER11=7
591 633 BARKER13=8
592 634 AC128=9
593 635 COMPLEMENTARYCODE2=10
594 636 COMPLEMENTARYCODE4=11
595 637 COMPLEMENTARYCODE8=12
596 638 COMPLEMENTARYCODE16=13
597 639 COMPLEMENTARYCODE32=14
598 640 COMPLEMENTARYCODE64=15
599 641 COMPLEMENTARYCODE128=16
600 642 CODE_BINARY28=17
601 643
602 644 class PROCFLAG:
603 645
604 646 COHERENT_INTEGRATION = numpy.uint32(0x00000001)
605 647 DECODE_DATA = numpy.uint32(0x00000002)
606 648 SPECTRA_CALC = numpy.uint32(0x00000004)
607 649 INCOHERENT_INTEGRATION = numpy.uint32(0x00000008)
608 650 POST_COHERENT_INTEGRATION = numpy.uint32(0x00000010)
609 651 SHIFT_FFT_DATA = numpy.uint32(0x00000020)
610 652
611 653 DATATYPE_CHAR = numpy.uint32(0x00000040)
612 654 DATATYPE_SHORT = numpy.uint32(0x00000080)
613 655 DATATYPE_LONG = numpy.uint32(0x00000100)
614 656 DATATYPE_INT64 = numpy.uint32(0x00000200)
615 657 DATATYPE_FLOAT = numpy.uint32(0x00000400)
616 658 DATATYPE_DOUBLE = numpy.uint32(0x00000800)
617 659
618 660 DATAARRANGE_CONTIGUOUS_CH = numpy.uint32(0x00001000)
619 661 DATAARRANGE_CONTIGUOUS_H = numpy.uint32(0x00002000)
620 662 DATAARRANGE_CONTIGUOUS_P = numpy.uint32(0x00004000)
621 663
622 664 SAVE_CHANNELS_DC = numpy.uint32(0x00008000)
623 665 DEFLIP_DATA = numpy.uint32(0x00010000)
624 666 DEFINE_PROCESS_CODE = numpy.uint32(0x00020000)
625 667
626 668 ACQ_SYS_NATALIA = numpy.uint32(0x00040000)
627 669 ACQ_SYS_ECHOTEK = numpy.uint32(0x00080000)
628 670 ACQ_SYS_ADRXD = numpy.uint32(0x000C0000)
629 671 ACQ_SYS_JULIA = numpy.uint32(0x00100000)
630 672 ACQ_SYS_XXXXXX = numpy.uint32(0x00140000)
631 673
632 674 EXP_NAME_ESP = numpy.uint32(0x00200000)
633 675 CHANNEL_NAMES_ESP = numpy.uint32(0x00400000)
634 676
635 677 OPERATION_MASK = numpy.uint32(0x0000003F)
636 678 DATATYPE_MASK = numpy.uint32(0x00000FC0)
637 679 DATAARRANGE_MASK = numpy.uint32(0x00007000)
638 680 ACQ_SYS_MASK = numpy.uint32(0x001C0000)
639 681
640 682 dtype0 = numpy.dtype([('real','<i1'),('imag','<i1')])
641 683 dtype1 = numpy.dtype([('real','<i2'),('imag','<i2')])
642 684 dtype2 = numpy.dtype([('real','<i4'),('imag','<i4')])
643 685 dtype3 = numpy.dtype([('real','<i8'),('imag','<i8')])
644 686 dtype4 = numpy.dtype([('real','<f4'),('imag','<f4')])
645 687 dtype5 = numpy.dtype([('real','<f8'),('imag','<f8')])
646 688
647 689 NUMPY_DTYPE_LIST = [dtype0, dtype1, dtype2, dtype3, dtype4, dtype5]
648 690
649 691 PROCFLAG_DTYPE_LIST = [PROCFLAG.DATATYPE_CHAR,
650 692 PROCFLAG.DATATYPE_SHORT,
651 693 PROCFLAG.DATATYPE_LONG,
652 694 PROCFLAG.DATATYPE_INT64,
653 695 PROCFLAG.DATATYPE_FLOAT,
654 696 PROCFLAG.DATATYPE_DOUBLE]
655 697
656 698 DTYPE_WIDTH = [1, 2, 4, 8, 4, 8]
657 699
658 700 def get_dtype_index(numpy_dtype):
659 701
660 702 index = None
661 703
662 704 for i in range(len(NUMPY_DTYPE_LIST)):
663 705 if numpy_dtype == NUMPY_DTYPE_LIST[i]:
664 706 index = i
665 707 break
666 708
667 709 return index
668 710
669 711 def get_numpy_dtype(index):
670 712
671 713 return NUMPY_DTYPE_LIST[index]
672 714
673 715 def get_procflag_dtype(index):
674 716
675 717 return PROCFLAG_DTYPE_LIST[index]
676 718
677 719 def get_dtype_width(index):
678 720
679 721 return DTYPE_WIDTH[index] No newline at end of file
@@ -1,1526 +1,1529
1 1 '''
2 2 Created on Jul 2, 2014
3 3
4 4 @author: roj-idl71
5 5 '''
6 6 import os
7 7 import sys
8 8 import glob
9 9 import time
10 10 import numpy
11 11 import fnmatch
12 12 import time, datetime
13 13 #import h5py
14 14 import traceback
15 15
16 16 try:
17 17 from gevent import sleep
18 18 except:
19 19 from time import sleep
20 20
21 21 from schainpy.model.data.jroheaderIO import PROCFLAG, BasicHeader, SystemHeader, RadarControllerHeader, ProcessingHeader
22 22 from schainpy.model.data.jroheaderIO import get_dtype_index, get_numpy_dtype, get_procflag_dtype, get_dtype_width
23 23
24 24 LOCALTIME = True
25 25
26 26 def isNumber(cad):
27 27 """
28 28 Chequea si el conjunto de caracteres que componen un string puede ser convertidos a un numero.
29 29
30 30 Excepciones:
31 31 Si un determinado string no puede ser convertido a numero
32 32 Input:
33 33 str, string al cual se le analiza para determinar si convertible a un numero o no
34 34
35 35 Return:
36 36 True : si el string es uno numerico
37 37 False : no es un string numerico
38 38 """
39 39 try:
40 40 float( cad )
41 41 return True
42 42 except:
43 43 return False
44 44
45 45 def isThisFileinRange(filename, startUTSeconds, endUTSeconds):
46 46 """
47 47 Esta funcion determina si un archivo de datos se encuentra o no dentro del rango de fecha especificado.
48 48
49 49 Inputs:
50 50 filename : nombre completo del archivo de datos en formato Jicamarca (.r)
51 51
52 52 startUTSeconds : fecha inicial del rango seleccionado. La fecha esta dada en
53 53 segundos contados desde 01/01/1970.
54 54 endUTSeconds : fecha final del rango seleccionado. La fecha esta dada en
55 55 segundos contados desde 01/01/1970.
56 56
57 57 Return:
58 58 Boolean : Retorna True si el archivo de datos contiene datos en el rango de
59 59 fecha especificado, de lo contrario retorna False.
60 60
61 61 Excepciones:
62 62 Si el archivo no existe o no puede ser abierto
63 63 Si la cabecera no puede ser leida.
64 64
65 65 """
66 66 basicHeaderObj = BasicHeader(LOCALTIME)
67 67
68 68 try:
69 69 fp = open(filename,'rb')
70 70 except IOError:
71 71 traceback.print_exc()
72 72 raise IOError, "The file %s can't be opened" %(filename)
73 73
74 74 sts = basicHeaderObj.read(fp)
75 75 fp.close()
76 76
77 77 if not(sts):
78 78 print "Skipping the file %s because it has not a valid header" %(filename)
79 79 return 0
80 80
81 81 if not ((startUTSeconds <= basicHeaderObj.utc) and (endUTSeconds > basicHeaderObj.utc)):
82 82 return 0
83 83
84 84 return 1
85 85
86 86 def isFileinThisTime(filename, startTime, endTime):
87 87 """
88 88 Retorna 1 si el archivo de datos se encuentra dentro del rango de horas especificado.
89 89
90 90 Inputs:
91 91 filename : nombre completo del archivo de datos en formato Jicamarca (.r)
92 92
93 93 startTime : tiempo inicial del rango seleccionado en formato datetime.time
94 94
95 95 endTime : tiempo final del rango seleccionado en formato datetime.time
96 96
97 97 Return:
98 98 Boolean : Retorna True si el archivo de datos contiene datos en el rango de
99 99 fecha especificado, de lo contrario retorna False.
100 100
101 101 Excepciones:
102 102 Si el archivo no existe o no puede ser abierto
103 103 Si la cabecera no puede ser leida.
104 104
105 105 """
106 106
107 107
108 108 try:
109 109 fp = open(filename,'rb')
110 110 except IOError:
111 111 traceback.print_exc()
112 112 raise IOError, "The file %s can't be opened" %(filename)
113 113
114 114 basicHeaderObj = BasicHeader(LOCALTIME)
115 115 sts = basicHeaderObj.read(fp)
116 116 fp.close()
117 117
118 118 thisDatetime = basicHeaderObj.datatime
119 119 thisTime = thisDatetime.time()
120 120
121 121 if not(sts):
122 122 print "Skipping the file %s because it has not a valid header" %(filename)
123 123 return None
124 124
125 125 if not ((startTime <= thisTime) and (endTime > thisTime)):
126 126 return None
127 127
128 128 return thisDatetime
129 129
130 130 def getFileFromSet(path, ext, set):
131 131 validFilelist = []
132 132 fileList = os.listdir(path)
133 133
134 134 # 0 1234 567 89A BCDE
135 135 # H YYYY DDD SSS .ext
136 136
137 137 for thisFile in fileList:
138 138 try:
139 139 year = int(thisFile[1:5])
140 140 doy = int(thisFile[5:8])
141 141 except:
142 142 continue
143 143
144 144 if (os.path.splitext(thisFile)[-1].lower() != ext.lower()):
145 145 continue
146 146
147 147 validFilelist.append(thisFile)
148 148
149 149 myfile = fnmatch.filter(validFilelist,'*%4.4d%3.3d%3.3d*'%(year,doy,set))
150 150
151 151 if len(myfile)!= 0:
152 152 return myfile[0]
153 153 else:
154 154 filename = '*%4.4d%3.3d%3.3d%s'%(year,doy,set,ext.lower())
155 155 print 'the filename %s does not exist'%filename
156 156 print '...going to the last file: '
157 157
158 158 if validFilelist:
159 159 validFilelist = sorted( validFilelist, key=str.lower )
160 160 return validFilelist[-1]
161 161
162 162 return None
163 163
164 164 def getlastFileFromPath(path, ext):
165 165 """
166 166 Depura el fileList dejando solo los que cumplan el formato de "PYYYYDDDSSS.ext"
167 167 al final de la depuracion devuelve el ultimo file de la lista que quedo.
168 168
169 169 Input:
170 170 fileList : lista conteniendo todos los files (sin path) que componen una determinada carpeta
171 171 ext : extension de los files contenidos en una carpeta
172 172
173 173 Return:
174 174 El ultimo file de una determinada carpeta, no se considera el path.
175 175 """
176 176 validFilelist = []
177 177 fileList = os.listdir(path)
178 178
179 179 # 0 1234 567 89A BCDE
180 180 # H YYYY DDD SSS .ext
181 181
182 182 for thisFile in fileList:
183 183
184 184 year = thisFile[1:5]
185 185 if not isNumber(year):
186 186 continue
187 187
188 188 doy = thisFile[5:8]
189 189 if not isNumber(doy):
190 190 continue
191 191
192 192 year = int(year)
193 193 doy = int(doy)
194 194
195 195 if (os.path.splitext(thisFile)[-1].lower() != ext.lower()):
196 196 continue
197 197
198 198 validFilelist.append(thisFile)
199 199
200 200 if validFilelist:
201 201 validFilelist = sorted( validFilelist, key=str.lower )
202 202 return validFilelist[-1]
203 203
204 204 return None
205 205
206 206 def checkForRealPath(path, foldercounter, year, doy, set, ext):
207 207 """
208 208 Por ser Linux Case Sensitive entonces checkForRealPath encuentra el nombre correcto de un path,
209 209 Prueba por varias combinaciones de nombres entre mayusculas y minusculas para determinar
210 210 el path exacto de un determinado file.
211 211
212 212 Example :
213 213 nombre correcto del file es .../.../D2009307/P2009307367.ext
214 214
215 215 Entonces la funcion prueba con las siguientes combinaciones
216 216 .../.../y2009307367.ext
217 217 .../.../Y2009307367.ext
218 218 .../.../x2009307/y2009307367.ext
219 219 .../.../x2009307/Y2009307367.ext
220 220 .../.../X2009307/y2009307367.ext
221 221 .../.../X2009307/Y2009307367.ext
222 222 siendo para este caso, la ultima combinacion de letras, identica al file buscado
223 223
224 224 Return:
225 225 Si encuentra la cobinacion adecuada devuelve el path completo y el nombre del file
226 226 caso contrario devuelve None como path y el la ultima combinacion de nombre en mayusculas
227 227 para el filename
228 228 """
229 229 fullfilename = None
230 230 find_flag = False
231 231 filename = None
232 232
233 233 prefixDirList = [None,'d','D']
234 234 if ext.lower() == ".r": #voltage
235 235 prefixFileList = ['d','D']
236 236 elif ext.lower() == ".pdata": #spectra
237 237 prefixFileList = ['p','P']
238 238 else:
239 239 return None, filename
240 240
241 241 #barrido por las combinaciones posibles
242 242 for prefixDir in prefixDirList:
243 243 thispath = path
244 244 if prefixDir != None:
245 245 #formo el nombre del directorio xYYYYDDD (x=d o x=D)
246 246 if foldercounter == 0:
247 247 thispath = os.path.join(path, "%s%04d%03d" % ( prefixDir, year, doy ))
248 248 else:
249 249 thispath = os.path.join(path, "%s%04d%03d_%02d" % ( prefixDir, year, doy , foldercounter))
250 250 for prefixFile in prefixFileList: #barrido por las dos combinaciones posibles de "D"
251 251 filename = "%s%04d%03d%03d%s" % ( prefixFile, year, doy, set, ext ) #formo el nombre del file xYYYYDDDSSS.ext
252 252 fullfilename = os.path.join( thispath, filename ) #formo el path completo
253 253
254 254 if os.path.exists( fullfilename ): #verifico que exista
255 255 find_flag = True
256 256 break
257 257 if find_flag:
258 258 break
259 259
260 260 if not(find_flag):
261 261 return None, filename
262 262
263 263 return fullfilename, filename
264 264
265 265 def isRadarFolder(folder):
266 266 try:
267 267 year = int(folder[1:5])
268 268 doy = int(folder[5:8])
269 269 except:
270 270 return 0
271 271
272 272 return 1
273 273
274 274 def isRadarFile(file):
275 275 try:
276 276 year = int(file[1:5])
277 277 doy = int(file[5:8])
278 278 set = int(file[8:11])
279 279 except:
280 280 return 0
281 281
282 282 return 1
283 283
284 284 def getDateFromRadarFile(file):
285 285 try:
286 286 year = int(file[1:5])
287 287 doy = int(file[5:8])
288 288 set = int(file[8:11])
289 289 except:
290 290 return None
291 291
292 292 thisDate = datetime.date(year, 1, 1) + datetime.timedelta(doy-1)
293 293 return thisDate
294 294
295 295 class JRODataIO:
296 296
297 297 c = 3E8
298 298
299 299 isConfig = False
300 300
301 301 basicHeaderObj = None
302 302
303 303 systemHeaderObj = None
304 304
305 305 radarControllerHeaderObj = None
306 306
307 307 processingHeaderObj = None
308 308
309 309 online = 0
310 310
311 311 dtype = None
312 312
313 313 pathList = []
314 314
315 315 filenameList = []
316 316
317 317 filename = None
318 318
319 319 ext = None
320 320
321 321 flagIsNewFile = 1
322 322
323 323 flagDiscontinuousBlock = 0
324 324
325 325 flagIsNewBlock = 0
326 326
327 327 fp = None
328 328
329 329 firstHeaderSize = 0
330 330
331 331 basicHeaderSize = 24
332 332
333 333 versionFile = 1103
334 334
335 335 fileSize = None
336 336
337 337 # ippSeconds = None
338 338
339 339 fileSizeByHeader = None
340 340
341 341 fileIndex = None
342 342
343 343 profileIndex = None
344 344
345 345 blockIndex = None
346 346
347 347 nTotalBlocks = None
348 348
349 349 maxTimeStep = 30
350 350
351 351 lastUTTime = None
352 352
353 353 datablock = None
354 354
355 355 dataOut = None
356 356
357 357 blocksize = None
358 358
359 359 getByBlock = False
360 360
361 361 def __init__(self):
362 362
363 363 raise ValueError, "Not implemented"
364 364
365 365 def run(self):
366 366
367 367 raise ValueError, "Not implemented"
368 368
369 369 def getDtypeWidth(self):
370 370
371 371 dtype_index = get_dtype_index(self.dtype)
372 372 dtype_width = get_dtype_width(dtype_index)
373 373
374 374 return dtype_width
375 375
376 376 class JRODataReader(JRODataIO):
377 377
378 378 nReadBlocks = 0
379 379
380 380 delay = 10 #number of seconds waiting a new file
381 381
382 382 nTries = 3 #quantity tries
383 383
384 384 nFiles = 3 #number of files for searching
385 385
386 386 path = None
387 387
388 388 foldercounter = 0
389 389
390 390 flagNoMoreFiles = 0
391 391
392 392 datetimeList = []
393 393
394 394 __isFirstTimeOnline = 1
395 395
396 396 __printInfo = True
397 397
398 398 profileIndex = None
399 399
400 400 nTxs = 1
401 401
402 402 txIndex = None
403 403
404 404 def __init__(self):
405 405
406 406 """
407 407
408 408 """
409 409
410 410 # raise NotImplementedError, "This method has not been implemented"
411 411
412 412
413 413 def createObjByDefault(self):
414 414 """
415 415
416 416 """
417 417 raise NotImplementedError, "This method has not been implemented"
418 418
419 419 def getBlockDimension(self):
420 420
421 421 raise NotImplementedError, "No implemented"
422 422
423 423 def __searchFilesOffLine(self,
424 424 path,
425 425 startDate=None,
426 426 endDate=None,
427 427 startTime=datetime.time(0,0,0),
428 428 endTime=datetime.time(23,59,59),
429 429 set=None,
430 430 expLabel='',
431 431 ext='.r',
432 432 walk=True):
433 433
434 434 self.filenameList = []
435 435 self.datetimeList = []
436 436
437 437 pathList = []
438 438
439 439 if not walk:
440 440 #pathList.append(path)
441 441 multi_path = path.split(',')
442 442 for single_path in multi_path:
443 443
444 444 if not os.path.isdir(single_path):
445 445 continue
446 446
447 447 pathList.append(single_path)
448 448
449 449 else:
450 450 #dirList = []
451 451 multi_path = path.split(',')
452 452 for single_path in multi_path:
453 453
454 454 if not os.path.isdir(single_path):
455 455 continue
456 456
457 457 dirList = []
458 458 for thisPath in os.listdir(single_path):
459 459 if not os.path.isdir(os.path.join(single_path,thisPath)):
460 460 continue
461 461 if not isRadarFolder(thisPath):
462 462 continue
463 463
464 464 dirList.append(thisPath)
465 465
466 466 if not(dirList):
467 467 return None, None
468 468
469 469 if startDate and endDate:
470 470 thisDate = startDate
471 471
472 472 while(thisDate <= endDate):
473 473 year = thisDate.timetuple().tm_year
474 474 doy = thisDate.timetuple().tm_yday
475 475
476 476 matchlist = fnmatch.filter(dirList, '?' + '%4.4d%3.3d' % (year,doy) + '*')
477 477 if len(matchlist) == 0:
478 478 thisDate += datetime.timedelta(1)
479 479 continue
480 480 for match in matchlist:
481 481 pathList.append(os.path.join(single_path,match,expLabel))
482 482
483 483 thisDate += datetime.timedelta(1)
484 484 else:
485 485 for thiDir in dirList:
486 486 pathList.append(os.path.join(single_path,thiDir,expLabel))
487 487
488 488 if pathList == []:
489 489 print "Any folder was found for the date range: %s-%s" %(startDate, endDate)
490 490 return None, None
491 491
492 492 print "%d folder(s) was(were) found for the date range: %s - %s" %(len(pathList), startDate, endDate)
493 493
494 494 filenameList = []
495 495 datetimeList = []
496 496 pathDict = {}
497 497 filenameList_to_sort = []
498 498
499 499 for i in range(len(pathList)):
500 500
501 501 thisPath = pathList[i]
502 502
503 503 fileList = glob.glob1(thisPath, "*%s" %ext)
504 504 if len(fileList) < 1:
505 505 continue
506 506 fileList.sort()
507 507 pathDict.setdefault(fileList[0])
508 508 pathDict[fileList[0]] = i
509 509 filenameList_to_sort.append(fileList[0])
510 510
511 511 filenameList_to_sort.sort()
512 512
513 513 for file in filenameList_to_sort:
514 514 thisPath = pathList[pathDict[file]]
515 515
516 516 fileList = glob.glob1(thisPath, "*%s" %ext)
517 517 fileList.sort()
518 518
519 519 for file in fileList:
520 520
521 521 filename = os.path.join(thisPath,file)
522 522 thisDatetime = isFileinThisTime(filename, startTime, endTime)
523 523
524 524 if not(thisDatetime):
525 525 continue
526 526
527 527 filenameList.append(filename)
528 528 datetimeList.append(thisDatetime)
529 529
530 530 if not(filenameList):
531 531 print "Any file was found for the time range %s - %s" %(startTime, endTime)
532 532 return None, None
533 533
534 534 print "%d file(s) was(were) found for the time range: %s - %s" %(len(filenameList), startTime, endTime)
535 535 print
536 536
537 537 for i in range(len(filenameList)):
538 538 print "%s -> [%s]" %(filenameList[i], datetimeList[i].ctime())
539 539
540 540 self.filenameList = filenameList
541 541 self.datetimeList = datetimeList
542 542
543 543 return pathList, filenameList
544 544
545 545 def __searchFilesOnLine(self, path, expLabel = "", ext = None, walk=True, set=None):
546 546
547 547 """
548 548 Busca el ultimo archivo de la ultima carpeta (determinada o no por startDateTime) y
549 549 devuelve el archivo encontrado ademas de otros datos.
550 550
551 551 Input:
552 552 path : carpeta donde estan contenidos los files que contiene data
553 553
554 554 expLabel : Nombre del subexperimento (subfolder)
555 555
556 556 ext : extension de los files
557 557
558 558 walk : Si es habilitado no realiza busquedas dentro de los ubdirectorios (doypath)
559 559
560 560 Return:
561 561 directory : eL directorio donde esta el file encontrado
562 562 filename : el ultimo file de una determinada carpeta
563 563 year : el anho
564 564 doy : el numero de dia del anho
565 565 set : el set del archivo
566 566
567 567
568 568 """
569 569 dirList = []
570 570
571 571 if not walk:
572 572 fullpath = path
573 573 foldercounter = 0
574 574 else:
575 575 #Filtra solo los directorios
576 576 for thisPath in os.listdir(path):
577 577 if not os.path.isdir(os.path.join(path,thisPath)):
578 578 continue
579 579 if not isRadarFolder(thisPath):
580 580 continue
581 581
582 582 dirList.append(thisPath)
583 583
584 584 if not(dirList):
585 585 return None, None, None, None, None, None
586 586
587 587 dirList = sorted( dirList, key=str.lower )
588 588
589 589 doypath = dirList[-1]
590 590 foldercounter = int(doypath.split('_')[1]) if len(doypath.split('_'))>1 else 0
591 591 fullpath = os.path.join(path, doypath, expLabel)
592 592
593 593
594 594 print "%s folder was found: " %(fullpath )
595 595
596 596 if set == None:
597 597 filename = getlastFileFromPath(fullpath, ext)
598 598 else:
599 599 filename = getFileFromSet(fullpath, ext, set)
600 600
601 601 if not(filename):
602 602 return None, None, None, None, None, None
603 603
604 604 print "%s file was found" %(filename)
605 605
606 606 if not(self.__verifyFile(os.path.join(fullpath, filename))):
607 607 return None, None, None, None, None, None
608 608
609 609 year = int( filename[1:5] )
610 610 doy = int( filename[5:8] )
611 611 set = int( filename[8:11] )
612 612
613 613 return fullpath, foldercounter, filename, year, doy, set
614 614
615 615 def __setNextFileOffline(self):
616 616
617 617 idFile = self.fileIndex
618 618
619 619 while (True):
620 620 idFile += 1
621 621 if not(idFile < len(self.filenameList)):
622 622 self.flagNoMoreFiles = 1
623 623 # print "[Reading] No more Files"
624 624 return 0
625 625
626 626 filename = self.filenameList[idFile]
627 627
628 628 if not(self.__verifyFile(filename)):
629 629 continue
630 630
631 631 fileSize = os.path.getsize(filename)
632 632 fp = open(filename,'rb')
633 633 break
634 634
635 635 self.flagIsNewFile = 1
636 636 self.fileIndex = idFile
637 637 self.filename = filename
638 638 self.fileSize = fileSize
639 639 self.fp = fp
640 640
641 641 # print "[Reading] Setting the file: %s"%self.filename
642 642
643 643 return 1
644 644
645 645 def __setNextFileOnline(self):
646 646 """
647 647 Busca el siguiente file que tenga suficiente data para ser leida, dentro de un folder especifico, si
648 648 no encuentra un file valido espera un tiempo determinado y luego busca en los posibles n files
649 649 siguientes.
650 650
651 651 Affected:
652 652 self.flagIsNewFile
653 653 self.filename
654 654 self.fileSize
655 655 self.fp
656 656 self.set
657 657 self.flagNoMoreFiles
658 658
659 659 Return:
660 660 0 : si luego de una busqueda del siguiente file valido este no pudo ser encontrado
661 661 1 : si el file fue abierto con exito y esta listo a ser leido
662 662
663 663 Excepciones:
664 664 Si un determinado file no puede ser abierto
665 665 """
666 666 nFiles = 0
667 667 fileOk_flag = False
668 668 firstTime_flag = True
669 669
670 670 self.set += 1
671 671
672 672 if self.set > 999:
673 673 self.set = 0
674 674 self.foldercounter += 1
675 675
676 676 #busca el 1er file disponible
677 677 fullfilename, filename = checkForRealPath( self.path, self.foldercounter, self.year, self.doy, self.set, self.ext )
678 678 if fullfilename:
679 679 if self.__verifyFile(fullfilename, False):
680 680 fileOk_flag = True
681 681
682 682 #si no encuentra un file entonces espera y vuelve a buscar
683 683 if not(fileOk_flag):
684 684 for nFiles in range(self.nFiles+1): #busco en los siguientes self.nFiles+1 files posibles
685 685
686 686 if firstTime_flag: #si es la 1era vez entonces hace el for self.nTries veces
687 687 tries = self.nTries
688 688 else:
689 689 tries = 1 #si no es la 1era vez entonces solo lo hace una vez
690 690
691 691 for nTries in range( tries ):
692 692 if firstTime_flag:
693 693 print "\t[Reading] Waiting %0.2f sec for the file \"%s\" , try %03d ..." % ( self.delay, filename, nTries+1 )
694 694 sleep( self.delay )
695 695 else:
696 696 print "\t[Reading] Searching the next \"%s%04d%03d%03d%s\" file ..." % (self.optchar, self.year, self.doy, self.set, self.ext)
697 697
698 698 fullfilename, filename = checkForRealPath( self.path, self.foldercounter, self.year, self.doy, self.set, self.ext )
699 699 if fullfilename:
700 700 if self.__verifyFile(fullfilename):
701 701 fileOk_flag = True
702 702 break
703 703
704 704 if fileOk_flag:
705 705 break
706 706
707 707 firstTime_flag = False
708 708
709 709 print "\t[Reading] Skipping the file \"%s\" due to this file doesn't exist" % filename
710 710 self.set += 1
711 711
712 712 if nFiles == (self.nFiles-1): #si no encuentro el file buscado cambio de carpeta y busco en la siguiente carpeta
713 713 self.set = 0
714 714 self.doy += 1
715 715 self.foldercounter = 0
716 716
717 717 if fileOk_flag:
718 718 self.fileSize = os.path.getsize( fullfilename )
719 719 self.filename = fullfilename
720 720 self.flagIsNewFile = 1
721 721 if self.fp != None: self.fp.close()
722 722 self.fp = open(fullfilename, 'rb')
723 723 self.flagNoMoreFiles = 0
724 724 # print '[Reading] Setting the file: %s' % fullfilename
725 725 else:
726 726 self.fileSize = 0
727 727 self.filename = None
728 728 self.flagIsNewFile = 0
729 729 self.fp = None
730 730 self.flagNoMoreFiles = 1
731 731 # print '[Reading] No more files to read'
732 732
733 733 return fileOk_flag
734 734
735 735 def setNextFile(self):
736 736 if self.fp != None:
737 737 self.fp.close()
738 738
739 739 if self.online:
740 740 newFile = self.__setNextFileOnline()
741 741 else:
742 742 newFile = self.__setNextFileOffline()
743 743
744 744 if not(newFile):
745 745 print '[Reading] No more files to read'
746 746 return 0
747 747
748 748 print '[Reading] Setting the file: %s' % self.filename
749 749
750 750 self.__readFirstHeader()
751 751 self.nReadBlocks = 0
752 752 return 1
753 753
754 754 def __waitNewBlock(self):
755 755 """
756 756 Return 1 si se encontro un nuevo bloque de datos, 0 de otra forma.
757 757
758 758 Si el modo de lectura es OffLine siempre retorn 0
759 759 """
760 760 if not self.online:
761 761 return 0
762 762
763 763 if (self.nReadBlocks >= self.processingHeaderObj.dataBlocksPerFile):
764 764 return 0
765 765
766 766 currentPointer = self.fp.tell()
767 767
768 768 neededSize = self.processingHeaderObj.blockSize + self.basicHeaderSize
769 769
770 770 for nTries in range( self.nTries ):
771 771
772 772 self.fp.close()
773 773 self.fp = open( self.filename, 'rb' )
774 774 self.fp.seek( currentPointer )
775 775
776 776 self.fileSize = os.path.getsize( self.filename )
777 777 currentSize = self.fileSize - currentPointer
778 778
779 779 if ( currentSize >= neededSize ):
780 780 self.basicHeaderObj.read(self.fp)
781 781 return 1
782 782
783 783 if self.fileSize == self.fileSizeByHeader:
784 784 # self.flagEoF = True
785 785 return 0
786 786
787 787 print "[Reading] Waiting %0.2f seconds for the next block, try %03d ..." % (self.delay, nTries+1)
788 788 sleep( self.delay )
789 789
790 790
791 791 return 0
792 792
793 793 def waitDataBlock(self,pointer_location):
794 794
795 795 currentPointer = pointer_location
796 796
797 797 neededSize = self.processingHeaderObj.blockSize #+ self.basicHeaderSize
798 798
799 799 for nTries in range( self.nTries ):
800 800 self.fp.close()
801 801 self.fp = open( self.filename, 'rb' )
802 802 self.fp.seek( currentPointer )
803 803
804 804 self.fileSize = os.path.getsize( self.filename )
805 805 currentSize = self.fileSize - currentPointer
806 806
807 807 if ( currentSize >= neededSize ):
808 808 return 1
809 809
810 810 print "[Reading] Waiting %0.2f seconds for the next block, try %03d ..." % (self.delay, nTries+1)
811 811 sleep( self.delay )
812 812
813 813 return 0
814 814
815 815 def __jumpToLastBlock(self):
816 816
817 817 if not(self.__isFirstTimeOnline):
818 818 return
819 819
820 820 csize = self.fileSize - self.fp.tell()
821 821 blocksize = self.processingHeaderObj.blockSize
822 822
823 823 #salta el primer bloque de datos
824 824 if csize > self.processingHeaderObj.blockSize:
825 825 self.fp.seek(self.fp.tell() + blocksize)
826 826 else:
827 827 return
828 828
829 829 csize = self.fileSize - self.fp.tell()
830 830 neededsize = self.processingHeaderObj.blockSize + self.basicHeaderSize
831 831 while True:
832 832
833 833 if self.fp.tell()<self.fileSize:
834 834 self.fp.seek(self.fp.tell() + neededsize)
835 835 else:
836 836 self.fp.seek(self.fp.tell() - neededsize)
837 837 break
838 838
839 839 # csize = self.fileSize - self.fp.tell()
840 840 # neededsize = self.processingHeaderObj.blockSize + self.basicHeaderSize
841 841 # factor = int(csize/neededsize)
842 842 # if factor > 0:
843 843 # self.fp.seek(self.fp.tell() + factor*neededsize)
844 844
845 845 self.flagIsNewFile = 0
846 846 self.__isFirstTimeOnline = 0
847 847
848 848 def __setNewBlock(self):
849 849
850 850 if self.fp == None:
851 851 return 0
852 852
853 853 if self.online:
854 854 self.__jumpToLastBlock()
855 855
856 856 if self.flagIsNewFile:
857 857 return 1
858 858
859 859 self.lastUTTime = self.basicHeaderObj.utc
860 860 currentSize = self.fileSize - self.fp.tell()
861 861 neededSize = self.processingHeaderObj.blockSize + self.basicHeaderSize
862 862
863 863 if (currentSize >= neededSize):
864 864 self.basicHeaderObj.read(self.fp)
865 865 return 1
866 866
867 867 if self.__waitNewBlock():
868 868 return 1
869 869
870 870 if not(self.setNextFile()):
871 871 return 0
872 872
873 873 deltaTime = self.basicHeaderObj.utc - self.lastUTTime #
874 874
875 875 self.flagDiscontinuousBlock = 0
876 876
877 877 if deltaTime > self.maxTimeStep:
878 878 self.flagDiscontinuousBlock = 1
879 879
880 880 return 1
881 881
882 882 def readNextBlock(self):
883 883
884 884 if not(self.__setNewBlock()):
885 885 return 0
886 886
887 887 if not(self.readBlock()):
888 888 return 0
889 889
890 890 print "[Reading] Block No. %d/%d -> %s" %(self.basicHeaderObj.dataBlock+1,
891 891 self.processingHeaderObj.dataBlocksPerFile,
892 892 self.dataOut.datatime.ctime())
893 893 return 1
894 894
895 895 def __readFirstHeader(self):
896 896
897 897 self.basicHeaderObj.read(self.fp)
898 898 self.systemHeaderObj.read(self.fp)
899 899 self.radarControllerHeaderObj.read(self.fp)
900 900 self.processingHeaderObj.read(self.fp)
901 901
902 902 self.firstHeaderSize = self.basicHeaderObj.size
903 903
904 904 datatype = int(numpy.log2((self.processingHeaderObj.processFlags & PROCFLAG.DATATYPE_MASK))-numpy.log2(PROCFLAG.DATATYPE_CHAR))
905 905 if datatype == 0:
906 906 datatype_str = numpy.dtype([('real','<i1'),('imag','<i1')])
907 907 elif datatype == 1:
908 908 datatype_str = numpy.dtype([('real','<i2'),('imag','<i2')])
909 909 elif datatype == 2:
910 910 datatype_str = numpy.dtype([('real','<i4'),('imag','<i4')])
911 911 elif datatype == 3:
912 912 datatype_str = numpy.dtype([('real','<i8'),('imag','<i8')])
913 913 elif datatype == 4:
914 914 datatype_str = numpy.dtype([('real','<f4'),('imag','<f4')])
915 915 elif datatype == 5:
916 916 datatype_str = numpy.dtype([('real','<f8'),('imag','<f8')])
917 917 else:
918 918 raise ValueError, 'Data type was not defined'
919 919
920 920 self.dtype = datatype_str
921 921 #self.ippSeconds = 2 * 1000 * self.radarControllerHeaderObj.ipp / self.c
922 922 self.fileSizeByHeader = self.processingHeaderObj.dataBlocksPerFile * self.processingHeaderObj.blockSize + self.firstHeaderSize + self.basicHeaderSize*(self.processingHeaderObj.dataBlocksPerFile - 1)
923 923 # self.dataOut.channelList = numpy.arange(self.systemHeaderObj.numChannels)
924 924 # self.dataOut.channelIndexList = numpy.arange(self.systemHeaderObj.numChannels)
925 925 self.getBlockDimension()
926 926
927 927 def __verifyFile(self, filename, msgFlag=True):
928 928 msg = None
929 929 try:
930 930 fp = open(filename, 'rb')
931 931 currentPosition = fp.tell()
932 932 except IOError:
933 933 traceback.print_exc()
934 934 if msgFlag:
935 935 print "[Reading] The file %s can't be opened" % (filename)
936 936 return False
937 937
938 938 neededSize = self.processingHeaderObj.blockSize + self.firstHeaderSize
939 939
940 940 if neededSize == 0:
941 941 basicHeaderObj = BasicHeader(LOCALTIME)
942 942 systemHeaderObj = SystemHeader()
943 943 radarControllerHeaderObj = RadarControllerHeader()
944 944 processingHeaderObj = ProcessingHeader()
945 945
946 946 try:
947 947 if not( basicHeaderObj.read(fp) ): raise IOError
948 948 if not( systemHeaderObj.read(fp) ): raise IOError
949 949 if not( radarControllerHeaderObj.read(fp) ): raise IOError
950 950 if not( processingHeaderObj.read(fp) ): raise IOError
951 951 # data_type = int(numpy.log2((processingHeaderObj.processFlags & PROCFLAG.DATATYPE_MASK))-numpy.log2(PROCFLAG.DATATYPE_CHAR))
952 952
953 953 neededSize = processingHeaderObj.blockSize + basicHeaderObj.size
954 954
955 955 except IOError:
956 956 traceback.print_exc()
957 sys.exit(0)
958
957 959 if msgFlag:
958 960 print "[Reading] The file %s is empty or it hasn't enough data" % filename
959 961
960 962 fp.close()
961 963 return False
962 964 else:
963 965 msg = "[Reading] Skipping the file %s due to it hasn't enough data" %filename
964 966
965 967 fp.close()
966 968 fileSize = os.path.getsize(filename)
967 969 currentSize = fileSize - currentPosition
968 970 if currentSize < neededSize:
969 971 if msgFlag and (msg != None):
970 972 print msg #print"\tSkipping the file %s due to it hasn't enough data" %filename
971 973 return False
972 974
973 975 return True
974 976
975 977 def findDatafiles(self, path, startDate=None, endDate=None, expLabel='', ext='.r', walk=True):
976 978
977 979 dateList = []
978 980 pathList = []
979 981
980 982 if not walk:
981 983 #pathList.append(path)
982 984 multi_path = path.split(',')
983 985 for single_path in multi_path:
984 986
985 987 if not os.path.isdir(single_path):
986 988 continue
987 989
988 990 ok = False
989 991 fileList = glob.glob1(single_path, "*"+ext)
990 992
991 993 for thisFile in fileList:
992 994
993 995 if not os.path.isfile(os.path.join(single_path, thisFile)):
994 996 continue
995 997
996 998 if not isRadarFile(thisFile):
997 999 continue
998 1000
999 1001 ok = True
1000 1002 thisDate = getDateFromRadarFile(thisFile)
1001 1003
1002 1004 if thisDate not in dateList:
1003 1005 dateList.append(thisDate)
1004 1006
1005 1007 if ok:
1006 1008 pathList.append(single_path)
1007 1009
1008 1010 return dateList
1009 1011
1010 1012 multi_path = path.split(',')
1011 1013 for single_path in multi_path:
1012 1014
1013 1015 if not os.path.isdir(single_path):
1014 1016 continue
1015 1017
1016 1018 dirList = []
1017 1019
1018 1020 for thisPath in os.listdir(single_path):
1019 1021
1020 1022 if not os.path.isdir(os.path.join(single_path,thisPath)):
1021 1023 continue
1022 1024
1023 1025 if not isRadarFolder(thisPath):
1024 1026 continue
1025 1027
1026 1028 dirList.append(thisPath)
1027 1029
1028 1030 if not dirList:
1029 1031 return dateList
1030 1032
1031 1033 if startDate and endDate:
1032 1034 thisDate = startDate
1033 1035
1034 1036 while(thisDate <= endDate):
1035 1037 year = thisDate.timetuple().tm_year
1036 1038 doy = thisDate.timetuple().tm_yday
1037 1039
1038 1040 matchlist = fnmatch.filter(dirList, '?' + '%4.4d%3.3d' % (year,doy) + '*')
1039 1041 if len(matchlist) == 0:
1040 1042 thisDate += datetime.timedelta(1)
1041 1043 continue
1042 1044
1043 1045 for match in matchlist:
1044 1046 pathList.append(os.path.join(single_path,match,expLabel))
1045 1047 dateList.append(thisDate)
1046 1048
1047 1049 thisDate += datetime.timedelta(1)
1048 1050 else:
1049 1051 for thisDir in dirList:
1050 1052 year = int(thisDir[1:5])
1051 1053 doy = int(thisDir[5:8])
1052 1054 thisDate = datetime.date(year,1,1) + datetime.timedelta(doy-1)
1053 1055
1054 1056 pathList.append(os.path.join(single_path,thisDir,expLabel))
1055 1057 dateList.append(thisDate)
1056 1058
1057 1059 return dateList
1058 1060
1059 1061
1060 1062 def setup(self,
1061 1063 path=None,
1062 1064 startDate=None,
1063 1065 endDate=None,
1064 1066 startTime=datetime.time(0,0,0),
1065 1067 endTime=datetime.time(23,59,59),
1066 1068 set=None,
1067 1069 expLabel = "",
1068 1070 ext = None,
1069 1071 online = False,
1070 1072 delay = 60,
1071 1073 walk = True,
1072 1074 getblock = False,
1073 1075 nTxs = 1):
1074 1076
1075 1077 if path == None:
1076 1078 raise ValueError, "[Reading] The path is not valid"
1077 1079
1078 1080 if ext == None:
1079 1081 ext = self.ext
1080 1082
1081 1083 if online:
1082 1084 print "[Reading] Searching files in online mode..."
1083 1085
1084 1086 for nTries in range( self.nTries ):
1085 1087 fullpath, foldercounter, file, year, doy, set = self.__searchFilesOnLine(path=path, expLabel=expLabel, ext=ext, walk=walk, set=set)
1086 1088
1087 1089 if fullpath:
1088 1090 break
1089 1091
1090 1092 print '[Reading] Waiting %0.2f sec for an valid file in %s: try %02d ...' % (self.delay, path, nTries+1)
1091 1093 sleep( self.delay )
1092 1094
1093 1095 if not(fullpath):
1094 1096 print "[Reading] There 'isn't any valid file in %s" % path
1095 1097 return None
1096 1098
1097 1099 self.year = year
1098 1100 self.doy = doy
1099 1101 self.set = set - 1
1100 1102 self.path = path
1101 1103 self.foldercounter = foldercounter
1102 1104 last_set = None
1103 1105
1104 1106 else:
1105 1107 print "[Reading] Searching files in offline mode ..."
1106 1108 pathList, filenameList = self.__searchFilesOffLine(path, startDate=startDate, endDate=endDate,
1107 1109 startTime=startTime, endTime=endTime,
1108 1110 set=set, expLabel=expLabel, ext=ext,
1109 1111 walk=walk)
1110 1112
1111 1113 if not(pathList):
1112 1114 print "[Reading] No *%s files into the folder %s \nfor the range: %s - %s"%(ext, path,
1113 1115 datetime.datetime.combine(startDate,startTime).ctime(),
1114 1116 datetime.datetime.combine(endDate,endTime).ctime())
1115 1117
1116 1118 sys.exit(-1)
1117 1119
1118 1120
1119 1121 self.fileIndex = -1
1120 1122 self.pathList = pathList
1121 1123 self.filenameList = filenameList
1122 1124 file_name = os.path.basename(filenameList[-1])
1123 1125 basename, ext = os.path.splitext(file_name)
1124 1126 last_set = int(basename[-3:])
1125 1127
1126 1128 self.online = online
1127 1129 self.delay = delay
1128 1130 ext = ext.lower()
1129 1131 self.ext = ext
1130 1132 self.getByBlock = getblock
1131 1133 self.nTxs = int(nTxs)
1132 1134
1133 1135 if not(self.setNextFile()):
1134 1136 if (startDate!=None) and (endDate!=None):
1135 1137 print "[Reading] No files in range: %s - %s" %(datetime.datetime.combine(startDate,startTime).ctime(), datetime.datetime.combine(endDate,endTime).ctime())
1136 1138 elif startDate != None:
1137 1139 print "[Reading] No files in range: %s" %(datetime.datetime.combine(startDate,startTime).ctime())
1138 1140 else:
1139 1141 print "[Reading] No files"
1140 1142
1141 1143 sys.exit(-1)
1142 1144
1143 1145 self.getBasicHeader()
1144 1146
1145 1147 if last_set != None:
1146 1148 self.dataOut.last_block = last_set * self.processingHeaderObj.dataBlocksPerFile + self.basicHeaderObj.dataBlock
1147 1149 return
1148 1150
1149 1151 def getBasicHeader(self):
1150 1152
1151 1153 self.dataOut.utctime = self.basicHeaderObj.utc + self.basicHeaderObj.miliSecond/1000. + self.profileIndex * self.radarControllerHeaderObj.ippSeconds
1152 1154
1153 1155 self.dataOut.flagDiscontinuousBlock = self.flagDiscontinuousBlock
1154 1156
1155 1157 self.dataOut.timeZone = self.basicHeaderObj.timeZone
1156 1158
1157 1159 self.dataOut.dstFlag = self.basicHeaderObj.dstFlag
1158 1160
1159 1161 self.dataOut.errorCount = self.basicHeaderObj.errorCount
1160 1162
1161 1163 self.dataOut.useLocalTime = self.basicHeaderObj.useLocalTime
1162 1164
1163 1165 self.dataOut.ippSeconds = self.radarControllerHeaderObj.ippSeconds/self.nTxs
1164 1166
1165 1167 self.dataOut.nProfiles = self.processingHeaderObj.profilesPerBlock*self.nTxs
1166 1168
1167 1169
1168 1170 def getFirstHeader(self):
1169 1171
1170 1172 raise ValueError, "This method has not been implemented"
1171 1173
1172 1174 def getData(self):
1173 1175
1174 1176 raise ValueError, "This method has not been implemented"
1175 1177
1176 1178 def hasNotDataInBuffer(self):
1177 1179
1178 1180 raise ValueError, "This method has not been implemented"
1179 1181
1180 1182 def readBlock(self):
1181 1183
1182 1184 raise ValueError, "This method has not been implemented"
1183 1185
1184 1186 def isEndProcess(self):
1185 1187
1186 1188 return self.flagNoMoreFiles
1187 1189
1188 1190 def printReadBlocks(self):
1189 1191
1190 1192 print "[Reading] Number of read blocks per file %04d" %self.nReadBlocks
1191 1193
1192 1194 def printTotalBlocks(self):
1193 1195
1194 1196 print "[Reading] Number of read blocks %04d" %self.nTotalBlocks
1195 1197
1196 1198 def printNumberOfBlock(self):
1197 1199
1198 1200 if self.flagIsNewBlock:
1199 1201 print "[Reading] Block No. %d/%d -> %s" %(self.basicHeaderObj.dataBlock+1,
1200 1202 self.processingHeaderObj.dataBlocksPerFile,
1201 1203 self.dataOut.datatime.ctime())
1202 1204
1203 1205 self.dataOut.blocknow = self.basicHeaderObj.dataBlock
1204 1206
1205 1207 def printInfo(self):
1206 1208
1207 1209 if self.__printInfo == False:
1208 1210 return
1209 1211
1210 1212 self.basicHeaderObj.printInfo()
1211 1213 self.systemHeaderObj.printInfo()
1212 1214 self.radarControllerHeaderObj.printInfo()
1213 1215 self.processingHeaderObj.printInfo()
1214 1216
1215 1217 self.__printInfo = False
1216 1218
1217 1219
1218 1220 def run(self, **kwargs):
1219 1221
1220 1222 if not(self.isConfig):
1221 1223
1222 1224 # self.dataOut = dataOut
1223 1225 self.setup(**kwargs)
1224 1226 self.isConfig = True
1225 1227
1226 1228 self.getData()
1227 1229
1228 1230 class JRODataWriter(JRODataIO):
1229 1231
1230 1232 """
1231 1233 Esta clase permite escribir datos a archivos procesados (.r o ,pdata). La escritura
1232 1234 de los datos siempre se realiza por bloques.
1233 1235 """
1234 1236
1235 1237 blockIndex = 0
1236 1238
1237 1239 path = None
1238 1240
1239 1241 setFile = None
1240 1242
1241 1243 profilesPerBlock = None
1242 1244
1243 1245 blocksPerFile = None
1244 1246
1245 1247 nWriteBlocks = 0
1246 1248
1247 1249 def __init__(self, dataOut=None):
1248 1250 raise ValueError, "Not implemented"
1249 1251
1250 1252
1251 1253 def hasAllDataInBuffer(self):
1252 1254 raise ValueError, "Not implemented"
1253 1255
1254 1256
1255 1257 def setBlockDimension(self):
1256 1258 raise ValueError, "Not implemented"
1257 1259
1258 1260
1259 1261 def writeBlock(self):
1260 1262 raise ValueError, "No implemented"
1261 1263
1262 1264
1263 1265 def putData(self):
1264 1266 raise ValueError, "No implemented"
1265 1267
1266 1268
1267 1269 def getProcessFlags(self):
1268 1270
1269 1271 processFlags = 0
1270 1272
1271 1273 dtype_index = get_dtype_index(self.dtype)
1272 1274 procflag_dtype = get_procflag_dtype(dtype_index)
1273 1275
1274 1276 processFlags += procflag_dtype
1275 1277
1276 1278 if self.dataOut.flagDecodeData:
1277 1279 processFlags += PROCFLAG.DECODE_DATA
1278 1280
1279 1281 if self.dataOut.flagDeflipData:
1280 1282 processFlags += PROCFLAG.DEFLIP_DATA
1281 1283
1282 1284 if self.dataOut.code is not None:
1283 1285 processFlags += PROCFLAG.DEFINE_PROCESS_CODE
1284 1286
1285 1287 if self.dataOut.nCohInt > 1:
1286 1288 processFlags += PROCFLAG.COHERENT_INTEGRATION
1287 1289
1288 1290 if self.dataOut.type == "Spectra":
1289 1291 if self.dataOut.nIncohInt > 1:
1290 1292 processFlags += PROCFLAG.INCOHERENT_INTEGRATION
1291 1293
1292 1294 if self.dataOut.data_dc is not None:
1293 1295 processFlags += PROCFLAG.SAVE_CHANNELS_DC
1294 1296
1297 if self.dataOut.flagShiftFFT:
1298 processFlags += PROCFLAG.SHIFT_FFT_DATA
1299
1295 1300 return processFlags
1296 1301
1297 1302 def setBasicHeader(self):
1298 1303
1299 1304 self.basicHeaderObj.size = self.basicHeaderSize #bytes
1300 1305 self.basicHeaderObj.version = self.versionFile
1301 1306 self.basicHeaderObj.dataBlock = self.nTotalBlocks
1302 1307
1303 1308 utc = numpy.floor(self.dataOut.utctime)
1304 1309 milisecond = (self.dataOut.utctime - utc)* 1000.0
1305 1310
1306 1311 self.basicHeaderObj.utc = utc
1307 1312 self.basicHeaderObj.miliSecond = milisecond
1308 1313 self.basicHeaderObj.timeZone = self.dataOut.timeZone
1309 1314 self.basicHeaderObj.dstFlag = self.dataOut.dstFlag
1310 1315 self.basicHeaderObj.errorCount = self.dataOut.errorCount
1311 1316
1312 1317 def setFirstHeader(self):
1313 1318 """
1314 1319 Obtiene una copia del First Header
1315 1320
1316 1321 Affected:
1317 1322
1318 1323 self.basicHeaderObj
1319 1324 self.systemHeaderObj
1320 1325 self.radarControllerHeaderObj
1321 1326 self.processingHeaderObj self.
1322 1327
1323 1328 Return:
1324 1329 None
1325 1330 """
1326 1331
1327 1332 raise ValueError, "No implemented"
1328 1333
1329 1334 def __writeFirstHeader(self):
1330 1335 """
1331 1336 Escribe el primer header del file es decir el Basic header y el Long header (SystemHeader, RadarControllerHeader, ProcessingHeader)
1332 1337
1333 1338 Affected:
1334 1339 __dataType
1335 1340
1336 1341 Return:
1337 1342 None
1338 1343 """
1339 1344
1340 1345 # CALCULAR PARAMETROS
1341 1346
1342 1347 sizeLongHeader = self.systemHeaderObj.size + self.radarControllerHeaderObj.size + self.processingHeaderObj.size
1343 1348 self.basicHeaderObj.size = self.basicHeaderSize + sizeLongHeader
1344 1349
1345 1350 self.basicHeaderObj.write(self.fp)
1346 1351 self.systemHeaderObj.write(self.fp)
1347 1352 self.radarControllerHeaderObj.write(self.fp)
1348 1353 self.processingHeaderObj.write(self.fp)
1349 1354
1350
1351
1352 1355 def __setNewBlock(self):
1353 1356 """
1354 1357 Si es un nuevo file escribe el First Header caso contrario escribe solo el Basic Header
1355 1358
1356 1359 Return:
1357 1360 0 : si no pudo escribir nada
1358 1361 1 : Si escribio el Basic el First Header
1359 1362 """
1360 1363 if self.fp == None:
1361 1364 self.setNextFile()
1362 1365
1363 1366 if self.flagIsNewFile:
1364 1367 return 1
1365 1368
1366 1369 if self.blockIndex < self.processingHeaderObj.dataBlocksPerFile:
1367 1370 self.basicHeaderObj.write(self.fp)
1368 1371 return 1
1369 1372
1370 1373 if not( self.setNextFile() ):
1371 1374 return 0
1372 1375
1373 1376 return 1
1374 1377
1375 1378
1376 1379 def writeNextBlock(self):
1377 1380 """
1378 1381 Selecciona el bloque siguiente de datos y los escribe en un file
1379 1382
1380 1383 Return:
1381 1384 0 : Si no hizo pudo escribir el bloque de datos
1382 1385 1 : Si no pudo escribir el bloque de datos
1383 1386 """
1384 1387 if not( self.__setNewBlock() ):
1385 1388 return 0
1386 1389
1387 1390 self.writeBlock()
1388 1391
1389 1392 print "[Writing] Block No. %d/%d" %(self.blockIndex, self.processingHeaderObj.dataBlocksPerFile)
1390 1393
1391 1394 return 1
1392 1395
1393 1396 def setNextFile(self):
1394 1397 """
1395 1398 Determina el siguiente file que sera escrito
1396 1399
1397 1400 Affected:
1398 1401 self.filename
1399 1402 self.subfolder
1400 1403 self.fp
1401 1404 self.setFile
1402 1405 self.flagIsNewFile
1403 1406
1404 1407 Return:
1405 1408 0 : Si el archivo no puede ser escrito
1406 1409 1 : Si el archivo esta listo para ser escrito
1407 1410 """
1408 1411 ext = self.ext
1409 1412 path = self.path
1410 1413
1411 1414 if self.fp != None:
1412 1415 self.fp.close()
1413 1416
1414 1417 timeTuple = time.localtime( self.dataOut.utctime)
1415 1418 subfolder = 'd%4.4d%3.3d' % (timeTuple.tm_year,timeTuple.tm_yday)
1416 1419
1417 1420 fullpath = os.path.join( path, subfolder )
1418 1421 if not( os.path.exists(fullpath) ):
1419 1422 os.mkdir(fullpath)
1420 1423 self.setFile = -1 #inicializo mi contador de seteo
1421 1424 else:
1422 1425 filesList = os.listdir( fullpath )
1423 1426 if len( filesList ) > 0:
1424 1427 filesList = sorted( filesList, key=str.lower )
1425 1428 filen = filesList[-1]
1426 1429 # el filename debera tener el siguiente formato
1427 1430 # 0 1234 567 89A BCDE (hex)
1428 1431 # x YYYY DDD SSS .ext
1429 1432 if isNumber( filen[8:11] ):
1430 1433 self.setFile = int( filen[8:11] ) #inicializo mi contador de seteo al seteo del ultimo file
1431 1434 else:
1432 1435 self.setFile = -1
1433 1436 else:
1434 1437 self.setFile = -1 #inicializo mi contador de seteo
1435 1438
1436 1439 setFile = self.setFile
1437 1440 setFile += 1
1438 1441
1439 1442 filen = '%s%4.4d%3.3d%3.3d%s' % (self.optchar,
1440 1443 timeTuple.tm_year,
1441 1444 timeTuple.tm_yday,
1442 1445 setFile,
1443 1446 ext )
1444 1447
1445 1448 filename = os.path.join( path, subfolder, filen )
1446 1449
1447 1450 fp = open( filename,'wb' )
1448 1451
1449 1452 self.blockIndex = 0
1450 1453
1451 1454 #guardando atributos
1452 1455 self.filename = filename
1453 1456 self.subfolder = subfolder
1454 1457 self.fp = fp
1455 1458 self.setFile = setFile
1456 1459 self.flagIsNewFile = 1
1457 1460
1458 1461 self.setFirstHeader()
1459 1462
1460 1463 print '[Writing] Opening file: %s'%self.filename
1461 1464
1462 1465 self.__writeFirstHeader()
1463 1466
1464 1467 return 1
1465 1468
1466 1469 def setup(self, dataOut, path, blocksPerFile, profilesPerBlock=64, set=0, ext=None, datatype=2):
1467 1470 """
1468 1471 Setea el tipo de formato en la cual sera guardada la data y escribe el First Header
1469 1472
1470 1473 Inputs:
1471 1474 path : directory where data will be saved
1472 1475 profilesPerBlock : number of profiles per block
1473 1476 set : file set
1474 1477 datatype : An integer number that defines data type:
1475 1478 0 : int8 (1 byte)
1476 1479 1 : int16 (2 bytes)
1477 1480 2 : int32 (4 bytes)
1478 1481 3 : int64 (8 bytes)
1479 1482 4 : float (4 bytes)
1480 1483 5 : double (8 bytes)
1481 1484
1482 1485 Return:
1483 1486 0 : Si no realizo un buen seteo
1484 1487 1 : Si realizo un buen seteo
1485 1488 """
1486 1489
1487 1490 if ext == None:
1488 1491 ext = self.ext
1489 1492
1490 1493 ext = ext.lower()
1491 1494
1492 1495 self.ext = ext
1493 1496
1494 1497 self.path = path
1495 1498
1496 1499 self.setFile = set - 1
1497 1500
1498 1501 self.blocksPerFile = blocksPerFile
1499 1502
1500 1503 self.profilesPerBlock = profilesPerBlock
1501 1504
1502 1505 self.dataOut = dataOut
1503 1506
1504 1507 #By default
1505 1508 self.dtype = self.dataOut.dtype
1506 1509
1507 1510 if datatype is not None:
1508 1511 self.dtype = get_numpy_dtype(datatype)
1509 1512
1510 1513 if not(self.setNextFile()):
1511 1514 print "[Writing] There isn't a next file"
1512 1515 return 0
1513 1516
1514 1517 self.setBlockDimension()
1515 1518
1516 1519 return 1
1517 1520
1518 1521 def run(self, dataOut, **kwargs):
1519 1522
1520 1523 if not(self.isConfig):
1521 1524
1522 1525 self.setup(dataOut, **kwargs)
1523 1526 self.isConfig = True
1524 1527
1525 1528 self.putData()
1526 1529
@@ -1,104 +1,175
1 1 '''
2 2 Created on Jul 3, 2014
3 3
4 4 @author: roj-idl71
5 5 '''
6 6
7 7 import os
8 8
9 9 from schainpy.model.data.jrodata import Voltage
10 10 from schainpy.model.proc.jroproc_base import ProcessingUnit, Operation
11 11
12 12 class Reader(ProcessingUnit):
13 13 '''
14 14 classdocs
15 15 '''
16 16
17 17 def __init__(self):
18 18 '''
19 19 Constructor
20 20 '''
21 21
22 22 ProcessingUnit.__init__(self)
23 23
24 24 #Is really necessary create the output object in the initializer
25 25 self.dataOut = Voltage()
26
27 def fillJROHeader(*args):
28
29 self.dataOut.radarControllerHeaderObj = RadarControllerHeader(ippKm=10e5,
30 txA=0,
31 txB=0,
32 nWindows=1,
33 nHeights=self.__nSamples,
34 firstHeight=self.__firstHeigth,
35 deltaHeight=self.__deltaHeigth,
36 codeType=self.__codeType,
37 nCode=self.__nCode, nBaud=self.__nBaud,
38 code = self.__code)
39
40 self.dataOut.systemHeaderObj = SystemHeader(nSamples=self.__nSamples,
41 nProfiles=nProfiles,
42 nChannels=len(self.__channelList),
43 adcResolution=14)
44
45 self.dataOut.data = None
46
47 self.dataOut.dtype = numpy.dtype([('real','<i8'),('imag','<i8')])
48
49 self.dataOut.nProfiles = nProfiles
50
51 self.dataOut.heightList = self.__firstHeigth + numpy.arange(self.__nSamples, dtype = numpy.float)*self.__deltaHeigth
52
53 self.dataOut.channelList = self.__channelList
54
55 self.dataOut.blocksize = self.dataOut.getNChannels() * self.dataOut.getNHeights()
56
57 # self.dataOut.channelIndexList = None
58
59 self.dataOut.flagNoData = True
60
61 #Set to TRUE if the data is discontinuous
62 self.dataOut.flagDiscontinuousBlock = False
63
64 self.dataOut.utctime = None
65
66 self.dataOut.timeZone = self.__timezone/60 #timezone like jroheader, difference in minutes between UTC and localtime
67
68 self.dataOut.dstFlag = 0
69
70 self.dataOut.errorCount = 0
71
72 self.dataOut.nCohInt = 1
73
74 self.dataOut.flagDecodeData = False #asumo que la data esta decodificada
75
76 self.dataOut.flagDeflipData = False #asumo que la data esta sin flip
77
78 self.dataOut.flagShiftFFT = False
79
80 self.dataOut.ippSeconds = 1.0*self.__nSamples/self.__sample_rate
81
82 #Time interval between profiles
83 #self.dataOut.timeInterval = self.dataOut.ippSeconds * self.dataOut.nCohInt
84
85 self.dataOut.frequency = self.__frequency
86
87 self.dataOut.realtime = self.__online
26 88
27 89 def setup(self, path = None,
28 90 startDate = None,
29 91 endDate = None,
30 92 startTime = None,
31 93 endTime = None,
32 94 set = None,
33 95 expLabel = "",
34 96 ext = None,
35 97 online = False,
36 98 delay = 60,
37 99 walk = True):
38 100 '''
39 101 In this method we should set all initial parameters.
40 102
41 103 '''
42 104
43 105 '''
44 106 Add code
45 107 '''
46 108
47 109 self.isConfig = True
48 110
111 self.readUSRPHeader()
112 self.fillJROHeader()
113
114 def getData(self):
115
116 pass
117
49 118 def run(self, **kwargs):
50 119 '''
51 120 This method will be called many times so here you should put all your code
52 121 '''
53 122
54 123 if not self.isConfig:
55 124 self.setup(**kwargs)
56 125
126 self.getData()
127
57 128 '''
58 129 Add code
59 130 '''
60 131
61 132 class Writer(Operation):
62 133 '''
63 134 classdocs
64 135 '''
65 136
66 137 def __init__(self):
67 138 '''
68 139 Constructor
69 140 '''
70 141 self.dataOut = None
71 142
72 143 self.isConfig = False
73 144
74 145 def setup(self, dataIn, path, blocksPerFile, set=0, ext=None):
75 146 '''
76 147 In this method we should set all initial parameters.
77 148
78 149 Input:
79 150 dataIn : Input data will also be outputa data
80 151
81 152 '''
82 153 self.dataOut = dataIn
83 154
84 155
85 156
86 157
87 158
88 159 self.isConfig = True
89 160
90 161 return
91 162
92 163 def run(self, dataIn, **kwargs):
93 164 '''
94 165 This method will be called many times so here you should put all your code
95 166
96 167 Inputs:
97 168
98 169 dataIn : object with the data
99 170
100 171 '''
101 172
102 173 if not self.isConfig:
103 174 self.setup(dataIn, **kwargs)
104 175 No newline at end of file
@@ -1,705 +1,676
1 1 '''
2 2 Created on Jul 2, 2014
3 3
4 4 @author: roj-idl71
5 5 '''
6 6 import numpy
7 7
8 8 from jroIO_base import LOCALTIME, JRODataReader, JRODataWriter
9 9 from schainpy.model.proc.jroproc_base import ProcessingUnit, Operation
10 10 from schainpy.model.data.jroheaderIO import PROCFLAG, BasicHeader, SystemHeader, RadarControllerHeader, ProcessingHeader
11 11 from schainpy.model.data.jrodata import Spectra
12 12
13 13 class SpectraReader(JRODataReader, ProcessingUnit):
14 14 """
15 15 Esta clase permite leer datos de espectros desde archivos procesados (.pdata). La lectura
16 16 de los datos siempre se realiza por bloques. Los datos leidos (array de 3 dimensiones)
17 17 son almacenados en tres buffer's para el Self Spectra, el Cross Spectra y el DC Channel.
18 18
19 19 paresCanalesIguales * alturas * perfiles (Self Spectra)
20 20 paresCanalesDiferentes * alturas * perfiles (Cross Spectra)
21 21 canales * alturas (DC Channels)
22 22
23 23 Esta clase contiene instancias (objetos) de las clases BasicHeader, SystemHeader,
24 24 RadarControllerHeader y Spectra. Los tres primeros se usan para almacenar informacion de la
25 25 cabecera de datos (metadata), y el cuarto (Spectra) para obtener y almacenar un bloque de
26 26 datos desde el "buffer" cada vez que se ejecute el metodo "getData".
27 27
28 28 Example:
29 29 dpath = "/home/myuser/data"
30 30
31 31 startTime = datetime.datetime(2010,1,20,0,0,0,0,0,0)
32 32
33 33 endTime = datetime.datetime(2010,1,21,23,59,59,0,0,0)
34 34
35 35 readerObj = SpectraReader()
36 36
37 37 readerObj.setup(dpath, startTime, endTime)
38 38
39 39 while(True):
40 40
41 41 readerObj.getData()
42 42
43 43 print readerObj.data_spc
44 44
45 45 print readerObj.data_cspc
46 46
47 47 print readerObj.data_dc
48 48
49 49 if readerObj.flagNoMoreFiles:
50 50 break
51 51
52 52 """
53 53
54 54 pts2read_SelfSpectra = 0
55 55
56 56 pts2read_CrossSpectra = 0
57 57
58 58 pts2read_DCchannels = 0
59 59
60 60 ext = ".pdata"
61 61
62 62 optchar = "P"
63 63
64 64 dataOut = None
65 65
66 66 nRdChannels = None
67 67
68 68 nRdPairs = None
69 69
70 70 rdPairList = []
71 71
72 72 def __init__(self):
73 73 """
74 74 Inicializador de la clase SpectraReader para la lectura de datos de espectros.
75 75
76 76 Inputs:
77 77 dataOut : Objeto de la clase Spectra. Este objeto sera utilizado para
78 78 almacenar un perfil de datos cada vez que se haga un requerimiento
79 79 (getData). El perfil sera obtenido a partir del buffer de datos,
80 80 si el buffer esta vacio se hara un nuevo proceso de lectura de un
81 81 bloque de datos.
82 82 Si este parametro no es pasado se creara uno internamente.
83 83
84 84 Affected:
85 85 self.dataOut
86 86
87 87 Return : None
88 88 """
89 89
90 90 #Eliminar de la base la herencia
91 91 ProcessingUnit.__init__(self)
92 92
93 93 # self.isConfig = False
94 94
95 95 self.pts2read_SelfSpectra = 0
96 96
97 97 self.pts2read_CrossSpectra = 0
98 98
99 99 self.pts2read_DCchannels = 0
100 100
101 101 self.datablock = None
102 102
103 103 self.utc = None
104 104
105 105 self.ext = ".pdata"
106 106
107 107 self.optchar = "P"
108 108
109 109 self.basicHeaderObj = BasicHeader(LOCALTIME)
110 110
111 111 self.systemHeaderObj = SystemHeader()
112 112
113 113 self.radarControllerHeaderObj = RadarControllerHeader()
114 114
115 115 self.processingHeaderObj = ProcessingHeader()
116 116
117 117 self.online = 0
118 118
119 119 self.fp = None
120 120
121 121 self.idFile = None
122 122
123 123 self.dtype = None
124 124
125 125 self.fileSizeByHeader = None
126 126
127 127 self.filenameList = []
128 128
129 129 self.filename = None
130 130
131 131 self.fileSize = None
132 132
133 133 self.firstHeaderSize = 0
134 134
135 135 self.basicHeaderSize = 24
136 136
137 137 self.pathList = []
138 138
139 139 self.lastUTTime = 0
140 140
141 141 self.maxTimeStep = 30
142 142
143 143 self.flagNoMoreFiles = 0
144 144
145 145 self.set = 0
146 146
147 147 self.path = None
148 148
149 149 self.delay = 60 #seconds
150 150
151 151 self.nTries = 3 #quantity tries
152 152
153 153 self.nFiles = 3 #number of files for searching
154 154
155 155 self.nReadBlocks = 0
156 156
157 157 self.flagIsNewFile = 1
158 158
159 159 self.__isFirstTimeOnline = 1
160 160
161 161 # self.ippSeconds = 0
162 162
163 163 self.flagDiscontinuousBlock = 0
164 164
165 165 self.flagIsNewBlock = 0
166 166
167 167 self.nTotalBlocks = 0
168 168
169 169 self.blocksize = 0
170 170
171 171 self.dataOut = self.createObjByDefault()
172 172
173 173 self.profileIndex = 1 #Always
174 174
175 175
176 176 def createObjByDefault(self):
177 177
178 178 dataObj = Spectra()
179 179
180 180 return dataObj
181 181
182 182 def __hasNotDataInBuffer(self):
183 183 return 1
184 184
185 185
186 186 def getBlockDimension(self):
187 187 """
188 188 Obtiene la cantidad de puntos a leer por cada bloque de datos
189 189
190 190 Affected:
191 191 self.nRdChannels
192 192 self.nRdPairs
193 193 self.pts2read_SelfSpectra
194 194 self.pts2read_CrossSpectra
195 195 self.pts2read_DCchannels
196 196 self.blocksize
197 197 self.dataOut.nChannels
198 198 self.dataOut.nPairs
199 199
200 200 Return:
201 201 None
202 202 """
203 203 self.nRdChannels = 0
204 204 self.nRdPairs = 0
205 205 self.rdPairList = []
206 206
207 207 for i in range(0, self.processingHeaderObj.totalSpectra*2, 2):
208 208 if self.processingHeaderObj.spectraComb[i] == self.processingHeaderObj.spectraComb[i+1]:
209 209 self.nRdChannels = self.nRdChannels + 1 #par de canales iguales
210 210 else:
211 211 self.nRdPairs = self.nRdPairs + 1 #par de canales diferentes
212 212 self.rdPairList.append((self.processingHeaderObj.spectraComb[i], self.processingHeaderObj.spectraComb[i+1]))
213 213
214 214 pts2read = self.processingHeaderObj.nHeights * self.processingHeaderObj.profilesPerBlock
215 215
216 216 self.pts2read_SelfSpectra = int(self.nRdChannels * pts2read)
217 217 self.blocksize = self.pts2read_SelfSpectra
218 218
219 219 if self.processingHeaderObj.flag_cspc:
220 220 self.pts2read_CrossSpectra = int(self.nRdPairs * pts2read)
221 221 self.blocksize += self.pts2read_CrossSpectra
222 222
223 223 if self.processingHeaderObj.flag_dc:
224 224 self.pts2read_DCchannels = int(self.systemHeaderObj.nChannels * self.processingHeaderObj.nHeights)
225 225 self.blocksize += self.pts2read_DCchannels
226 226
227 227 # self.blocksize = self.pts2read_SelfSpectra + self.pts2read_CrossSpectra + self.pts2read_DCchannels
228 228
229 229
230 230 def readBlock(self):
231 231 """
232 232 Lee el bloque de datos desde la posicion actual del puntero del archivo
233 233 (self.fp) y actualiza todos los parametros relacionados al bloque de datos
234 234 (metadata + data). La data leida es almacenada en el buffer y el contador del buffer
235 235 es seteado a 0
236 236
237 237 Return: None
238 238
239 239 Variables afectadas:
240 240
241 241 self.flagIsNewFile
242 242 self.flagIsNewBlock
243 243 self.nTotalBlocks
244 244 self.data_spc
245 245 self.data_cspc
246 246 self.data_dc
247 247
248 248 Exceptions:
249 249 Si un bloque leido no es un bloque valido
250 250 """
251 251 blockOk_flag = False
252 252 fpointer = self.fp.tell()
253 253
254 254 spc = numpy.fromfile( self.fp, self.dtype[0], self.pts2read_SelfSpectra )
255 255 spc = spc.reshape( (self.nRdChannels, self.processingHeaderObj.nHeights, self.processingHeaderObj.profilesPerBlock) ) #transforma a un arreglo 3D
256 256
257 257 if self.processingHeaderObj.flag_cspc:
258 258 cspc = numpy.fromfile( self.fp, self.dtype, self.pts2read_CrossSpectra )
259 259 cspc = cspc.reshape( (self.nRdPairs, self.processingHeaderObj.nHeights, self.processingHeaderObj.profilesPerBlock) ) #transforma a un arreglo 3D
260 260
261 261 if self.processingHeaderObj.flag_dc:
262 262 dc = numpy.fromfile( self.fp, self.dtype, self.pts2read_DCchannels ) #int(self.processingHeaderObj.nHeights*self.systemHeaderObj.nChannels) )
263 263 dc = dc.reshape( (self.systemHeaderObj.nChannels, self.processingHeaderObj.nHeights) ) #transforma a un arreglo 2D
264 264
265 265
266 266 if not(self.processingHeaderObj.shif_fft):
267 267 #desplaza a la derecha en el eje 2 determinadas posiciones
268 268 shift = int(self.processingHeaderObj.profilesPerBlock/2)
269 269 spc = numpy.roll( spc, shift , axis=2 )
270 270
271 271 if self.processingHeaderObj.flag_cspc:
272 272 #desplaza a la derecha en el eje 2 determinadas posiciones
273 273 cspc = numpy.roll( cspc, shift, axis=2 )
274
275 # self.processingHeaderObj.shif_fft = True
276 274
277 275 #Dimensions : nChannels, nProfiles, nSamples
278 276 spc = numpy.transpose( spc, (0,2,1) )
279 277 self.data_spc = spc
280 278
281 279 if self.processingHeaderObj.flag_cspc:
282 280 cspc = numpy.transpose( cspc, (0,2,1) )
283 281 self.data_cspc = cspc['real'] + cspc['imag']*1j
284 282 else:
285 283 self.data_cspc = None
286 284
287 285 if self.processingHeaderObj.flag_dc:
288 286 self.data_dc = dc['real'] + dc['imag']*1j
289 287 else:
290 288 self.data_dc = None
291 289
292 290 self.flagIsNewFile = 0
293 291 self.flagIsNewBlock = 1
294 292
295 293 self.nTotalBlocks += 1
296 294 self.nReadBlocks += 1
297 295
298 296 return 1
299 297
300 298 def getFirstHeader(self):
301 299
302 300 self.getBasicHeader()
303 301
304 302 self.dataOut.systemHeaderObj = self.systemHeaderObj.copy()
305 303
306 304 self.dataOut.radarControllerHeaderObj = self.radarControllerHeaderObj.copy()
307 305
308 306 # self.dataOut.ippSeconds = self.ippSeconds
309 307
310 308 # self.dataOut.timeInterval = self.radarControllerHeaderObj.ippSeconds * self.processingHeaderObj.nCohInt * self.processingHeaderObj.nIncohInt * self.processingHeaderObj.profilesPerBlock
311 309
312 310 self.dataOut.dtype = self.dtype
313 311
314 312 # self.dataOut.nPairs = self.nPairs
315 313
316 314 self.dataOut.pairsList = self.rdPairList
317 315
318 316 self.dataOut.nProfiles = self.processingHeaderObj.profilesPerBlock
319 317
320 318 self.dataOut.nFFTPoints = self.processingHeaderObj.profilesPerBlock
321 319
322 320 self.dataOut.nCohInt = self.processingHeaderObj.nCohInt
323 321
324 322 self.dataOut.nIncohInt = self.processingHeaderObj.nIncohInt
325 323
326 324 xf = self.processingHeaderObj.firstHeight + self.processingHeaderObj.nHeights*self.processingHeaderObj.deltaHeight
327 325
328 326 self.dataOut.heightList = numpy.arange(self.processingHeaderObj.firstHeight, xf, self.processingHeaderObj.deltaHeight)
329 327
330 328 self.dataOut.channelList = range(self.systemHeaderObj.nChannels)
331 329
332 self.dataOut.flagShiftFFT = self.processingHeaderObj.shif_fft
330 self.dataOut.flagShiftFFT = True #Data is always shifted
333 331
334 self.dataOut.flagDecodeData = False #asumo q la data no esta decodificada
332 self.dataOut.flagDecodeData = self.processingHeaderObj.flag_decode #asumo q la data no esta decodificada
335 333
336 self.dataOut.flagDeflipData = False #asumo q la data esta sin flip
337
338 if self.radarControllerHeaderObj.code is not None:
339
340 # self.dataOut.nCode = self.radarControllerHeaderObj.nCode
341 #
342 # self.dataOut.nBaud = self.radarControllerHeaderObj.nBaud
343 #
344 # self.dataOut.code = self.radarControllerHeaderObj.code
345
346 self.dataOut.flagDecodeData = True
334 self.dataOut.flagDeflipData = self.processingHeaderObj.flag_deflip #asumo q la data esta sin flip
347 335
348 336 def getData(self):
349 337 """
350 338 First method to execute before "RUN" is called.
351 339
352 340 Copia el buffer de lectura a la clase "Spectra",
353 341 con todos los parametros asociados a este (metadata). cuando no hay datos en el buffer de
354 342 lectura es necesario hacer una nueva lectura de los bloques de datos usando "readNextBlock"
355 343
356 344 Return:
357 345 0 : Si no hay mas archivos disponibles
358 346 1 : Si hizo una buena copia del buffer
359 347
360 348 Affected:
361 349 self.dataOut
362 350
363 351 self.flagDiscontinuousBlock
364 352 self.flagIsNewBlock
365 353 """
366 354
367 355 if self.flagNoMoreFiles:
368 356 self.dataOut.flagNoData = True
369 357 print 'Process finished'
370 358 return 0
371 359
372 360 self.flagDiscontinuousBlock = 0
373 361 self.flagIsNewBlock = 0
374 362
375 363 if self.__hasNotDataInBuffer():
376 364
377 365 if not( self.readNextBlock() ):
378 366 self.dataOut.flagNoData = True
379 367 return 0
380 368
381 369 #data es un numpy array de 3 dmensiones (perfiles, alturas y canales)
382 370
383 if self.data_dc is None:
371 if self.data_spc is None:
384 372 self.dataOut.flagNoData = True
385 373 return 0
386 374
387 375 self.getBasicHeader()
388 376
389 377 self.getFirstHeader()
390 378
391 379 self.dataOut.data_spc = self.data_spc
392 380
393 381 self.dataOut.data_cspc = self.data_cspc
394 382
395 383 self.dataOut.data_dc = self.data_dc
396 384
397 385 self.dataOut.flagNoData = False
398 386
399 387 self.dataOut.realtime = self.online
400 388
401 389 return self.dataOut.data_spc
402 390
403 391 class SpectraWriter(JRODataWriter, Operation):
404 392
405 393 """
406 394 Esta clase permite escribir datos de espectros a archivos procesados (.pdata). La escritura
407 395 de los datos siempre se realiza por bloques.
408 396 """
409 397
410 398 ext = ".pdata"
411 399
412 400 optchar = "P"
413 401
414 402 shape_spc_Buffer = None
415 403
416 404 shape_cspc_Buffer = None
417 405
418 406 shape_dc_Buffer = None
419 407
420 408 data_spc = None
421 409
422 410 data_cspc = None
423 411
424 412 data_dc = None
425 413
426 414 # dataOut = None
427 415
428 416 def __init__(self):
429 417 """
430 418 Inicializador de la clase SpectraWriter para la escritura de datos de espectros.
431 419
432 420 Affected:
433 421 self.dataOut
434 422 self.basicHeaderObj
435 423 self.systemHeaderObj
436 424 self.radarControllerHeaderObj
437 425 self.processingHeaderObj
438 426
439 427 Return: None
440 428 """
441 429
442 430 Operation.__init__(self)
443 431
444 432 self.isConfig = False
445 433
446 434 self.nTotalBlocks = 0
447 435
448 436 self.data_spc = None
449 437
450 438 self.data_cspc = None
451 439
452 440 self.data_dc = None
453 441
454 442 self.fp = None
455 443
456 444 self.flagIsNewFile = 1
457 445
458 446 self.nTotalBlocks = 0
459 447
460 448 self.flagIsNewBlock = 0
461 449
462 450 self.setFile = None
463 451
464 452 self.dtype = None
465 453
466 454 self.path = None
467 455
468 456 self.noMoreFiles = 0
469 457
470 458 self.filename = None
471 459
472 460 self.basicHeaderObj = BasicHeader(LOCALTIME)
473 461
474 462 self.systemHeaderObj = SystemHeader()
475 463
476 464 self.radarControllerHeaderObj = RadarControllerHeader()
477 465
478 466 self.processingHeaderObj = ProcessingHeader()
479 467
480 468
481 469 def hasAllDataInBuffer(self):
482 470 return 1
483 471
484 472
485 473 def setBlockDimension(self):
486 474 """
487 475 Obtiene las formas dimensionales del los subbloques de datos que componen un bloque
488 476
489 477 Affected:
490 478 self.shape_spc_Buffer
491 479 self.shape_cspc_Buffer
492 480 self.shape_dc_Buffer
493 481
494 482 Return: None
495 483 """
496 484 self.shape_spc_Buffer = (self.dataOut.nChannels,
497 485 self.processingHeaderObj.nHeights,
498 486 self.processingHeaderObj.profilesPerBlock)
499 487
500 488 self.shape_cspc_Buffer = (self.dataOut.nPairs,
501 489 self.processingHeaderObj.nHeights,
502 490 self.processingHeaderObj.profilesPerBlock)
503 491
504 492 self.shape_dc_Buffer = (self.dataOut.nChannels,
505 493 self.processingHeaderObj.nHeights)
506 494
507 495
508 496 def writeBlock(self):
509 497 """
510 498 Escribe el buffer en el file designado
511 499
512 500 Affected:
513 501 self.data_spc
514 502 self.data_cspc
515 503 self.data_dc
516 504 self.flagIsNewFile
517 505 self.flagIsNewBlock
518 506 self.nTotalBlocks
519 507 self.nWriteBlocks
520 508
521 509 Return: None
522 510 """
523 511
524 512 spc = numpy.transpose( self.data_spc, (0,2,1) )
525 513 if not( self.processingHeaderObj.shif_fft ):
526 514 spc = numpy.roll( spc, self.processingHeaderObj.profilesPerBlock/2, axis=2 ) #desplaza a la derecha en el eje 2 determinadas posiciones
527 515 data = spc.reshape((-1))
528 516 data = data.astype(self.dtype[0])
529 517 data.tofile(self.fp)
530 518
531 519 if self.data_cspc is not None:
532 520 data = numpy.zeros( self.shape_cspc_Buffer, self.dtype )
533 521 cspc = numpy.transpose( self.data_cspc, (0,2,1) )
534 522 if not( self.processingHeaderObj.shif_fft ):
535 523 cspc = numpy.roll( cspc, self.processingHeaderObj.profilesPerBlock/2, axis=2 ) #desplaza a la derecha en el eje 2 determinadas posiciones
536 524 data['real'] = cspc.real
537 525 data['imag'] = cspc.imag
538 526 data = data.reshape((-1))
539 527 data.tofile(self.fp)
540 528
541 529 if self.data_dc is not None:
542 530 data = numpy.zeros( self.shape_dc_Buffer, self.dtype )
543 531 dc = self.data_dc
544 532 data['real'] = dc.real
545 533 data['imag'] = dc.imag
546 534 data = data.reshape((-1))
547 535 data.tofile(self.fp)
548 536
549 537 self.data_spc.fill(0)
550 538
551 539 if self.data_dc is not None:
552 540 self.data_dc.fill(0)
553 541
554 542 if self.data_cspc is not None:
555 543 self.data_cspc.fill(0)
556 544
557 545 self.flagIsNewFile = 0
558 546 self.flagIsNewBlock = 1
559 547 self.nTotalBlocks += 1
560 548 self.nWriteBlocks += 1
561 549 self.blockIndex += 1
562 550
563 551 # print "[Writing] Block = %d04" %self.blockIndex
564 552
565 553 def putData(self):
566 554 """
567 555 Setea un bloque de datos y luego los escribe en un file
568 556
569 557 Affected:
570 558 self.data_spc
571 559 self.data_cspc
572 560 self.data_dc
573 561
574 562 Return:
575 563 0 : Si no hay data o no hay mas files que puedan escribirse
576 564 1 : Si se escribio la data de un bloque en un file
577 565 """
578 566
579 567 if self.dataOut.flagNoData:
580 568 return 0
581 569
582 570 self.flagIsNewBlock = 0
583 571
584 572 if self.dataOut.flagDiscontinuousBlock:
585 573 self.data_spc.fill(0)
586 574 self.data_cspc.fill(0)
587 575 self.data_dc.fill(0)
588 576 self.setNextFile()
589 577
590 578 if self.flagIsNewFile == 0:
591 579 self.setBasicHeader()
592 580
593 581 self.data_spc = self.dataOut.data_spc.copy()
582
594 583 if self.dataOut.data_cspc is not None:
595 584 self.data_cspc = self.dataOut.data_cspc.copy()
585
596 586 self.data_dc = self.dataOut.data_dc.copy()
597 587
598 588 # #self.processingHeaderObj.dataBlocksPerFile)
599 589 if self.hasAllDataInBuffer():
600 590 # self.setFirstHeader()
601 591 self.writeNextBlock()
602 592
603 593 return 1
604 594
605 595 def __getBlockSize(self):
606 596 '''
607 597 Este metodos determina el cantidad de bytes para un bloque de datos de tipo Spectra
608 598 '''
609 599
610 600 dtype_width = self.getDtypeWidth()
611 601
612 602 pts2write = self.dataOut.nHeights * self.dataOut.nFFTPoints
613 603
614 604 pts2write_SelfSpectra = int(self.dataOut.nChannels * pts2write)
615 605 blocksize = (pts2write_SelfSpectra*dtype_width)
616 606
617 607 if self.dataOut.data_cspc is not None:
618 608 pts2write_CrossSpectra = int(self.dataOut.nPairs * pts2write)
619 609 blocksize += (pts2write_CrossSpectra*dtype_width*2)
620 610
621 611 if self.dataOut.data_dc is not None:
622 612 pts2write_DCchannels = int(self.dataOut.nChannels * self.dataOut.nHeights)
623 613 blocksize += (pts2write_DCchannels*dtype_width*2)
624 614
625 615 # blocksize = blocksize #* datatypeValue * 2 #CORREGIR ESTO
626 616
627 617 return blocksize
628 618
629 619 def setFirstHeader(self):
630 620
631 621 """
632 622 Obtiene una copia del First Header
633 623
634 624 Affected:
635 625 self.systemHeaderObj
636 626 self.radarControllerHeaderObj
637 627 self.dtype
638 628
639 629 Return:
640 630 None
641 631 """
642 632
643 633 self.systemHeaderObj = self.dataOut.systemHeaderObj.copy()
644 634 self.systemHeaderObj.nChannels = self.dataOut.nChannels
645 635 self.radarControllerHeaderObj = self.dataOut.radarControllerHeaderObj.copy()
646 old_code_size = self.dataOut.radarControllerHeaderObj.code_size
647 new_code_size = int(numpy.ceil(self.dataOut.nBaud/32.))*self.dataOut.nCode*4
648 self.radarControllerHeaderObj.size = self.radarControllerHeaderObj.size - old_code_size + new_code_size
649 636
650 self.setBasicHeader()
651
652 processingHeaderSize = 40 # bytes
653 637 self.processingHeaderObj.dtype = 1 # Spectra
654 638 self.processingHeaderObj.blockSize = self.__getBlockSize()
655 639 self.processingHeaderObj.profilesPerBlock = self.dataOut.nFFTPoints
656 640 self.processingHeaderObj.dataBlocksPerFile = self.blocksPerFile
657 641 self.processingHeaderObj.nWindows = 1 #podria ser 1 o self.dataOut.processingHeaderObj.nWindows
658 self.processingHeaderObj.processFlags = self.getProcessFlags()
659 642 self.processingHeaderObj.nCohInt = self.dataOut.nCohInt# Se requiere para determinar el valor de timeInterval
660 643 self.processingHeaderObj.nIncohInt = self.dataOut.nIncohInt
661 644 self.processingHeaderObj.totalSpectra = self.dataOut.nPairs + self.dataOut.nChannels
662 645 self.processingHeaderObj.shif_fft = self.dataOut.flagShiftFFT
663 646
664 647 if self.processingHeaderObj.totalSpectra > 0:
665 648 channelList = []
666 649 for channel in range(self.dataOut.nChannels):
667 650 channelList.append(channel)
668 651 channelList.append(channel)
669 652
670 653 pairsList = []
671 654 if self.dataOut.nPairs > 0:
672 655 for pair in self.dataOut.pairsList:
673 656 pairsList.append(pair[0])
674 657 pairsList.append(pair[1])
675 658
676 659 spectraComb = channelList + pairsList
677 spectraComb = numpy.array(spectraComb,dtype="u1")
660 spectraComb = numpy.array(spectraComb, dtype="u1")
678 661 self.processingHeaderObj.spectraComb = spectraComb
679 sizeOfSpcComb = len(spectraComb)
680 processingHeaderSize += sizeOfSpcComb
681
682 # The processing header should not have information about code
683 # if self.dataOut.code is not None:
684 # self.processingHeaderObj.code = self.dataOut.code
685 # self.processingHeaderObj.nCode = self.dataOut.nCode
686 # self.processingHeaderObj.nBaud = self.dataOut.nBaud
687 # nCodeSize = 4 # bytes
688 # nBaudSize = 4 # bytes
689 # codeSize = 4 # bytes
690 # sizeOfCode = int(nCodeSize + nBaudSize + codeSize * self.dataOut.nCode * self.dataOut.nBaud)
691 # processingHeaderSize += sizeOfCode
662
663 if self.dataOut.code is not None:
664 self.processingHeaderObj.code = self.dataOut.code
665 self.processingHeaderObj.nCode = self.dataOut.nCode
666 self.processingHeaderObj.nBaud = self.dataOut.nBaud
692 667
693 668 if self.processingHeaderObj.nWindows != 0:
694 669 self.processingHeaderObj.firstHeight = self.dataOut.heightList[0]
695 670 self.processingHeaderObj.deltaHeight = self.dataOut.heightList[1] - self.dataOut.heightList[0]
696 671 self.processingHeaderObj.nHeights = self.dataOut.nHeights
697 672 self.processingHeaderObj.samplesWin = self.dataOut.nHeights
698 sizeOfFirstHeight = 4
699 sizeOfdeltaHeight = 4
700 sizeOfnHeights = 4
701 sizeOfWindows = (sizeOfFirstHeight + sizeOfdeltaHeight + sizeOfnHeights)*self.processingHeaderObj.nWindows
702 processingHeaderSize += sizeOfWindows
703 673
704 self.processingHeaderObj.size = processingHeaderSize
674 self.processingHeaderObj.processFlags = self.getProcessFlags()
705 675
676 self.setBasicHeader()
@@ -1,599 +1,594
1 1 '''
2 2 Created on Jul 2, 2014
3 3
4 4 @author: roj-idl71
5 5 '''
6 6
7 7 import numpy
8 8
9 9 from jroIO_base import LOCALTIME, JRODataReader, JRODataWriter
10 10 from schainpy.model.proc.jroproc_base import ProcessingUnit, Operation
11 11 from schainpy.model.data.jroheaderIO import PROCFLAG, BasicHeader, SystemHeader, RadarControllerHeader, ProcessingHeader
12 12 from schainpy.model.data.jrodata import Voltage
13 13
14 14 class VoltageReader(JRODataReader, ProcessingUnit):
15 15 """
16 16 Esta clase permite leer datos de voltage desde archivos en formato rawdata (.r). La lectura
17 17 de los datos siempre se realiza por bloques. Los datos leidos (array de 3 dimensiones:
18 18 perfiles*alturas*canales) son almacenados en la variable "buffer".
19 19
20 20 perfiles * alturas * canales
21 21
22 22 Esta clase contiene instancias (objetos) de las clases BasicHeader, SystemHeader,
23 23 RadarControllerHeader y Voltage. Los tres primeros se usan para almacenar informacion de la
24 24 cabecera de datos (metadata), y el cuarto (Voltage) para obtener y almacenar un perfil de
25 25 datos desde el "buffer" cada vez que se ejecute el metodo "getData".
26 26
27 27 Example:
28 28
29 29 dpath = "/home/myuser/data"
30 30
31 31 startTime = datetime.datetime(2010,1,20,0,0,0,0,0,0)
32 32
33 33 endTime = datetime.datetime(2010,1,21,23,59,59,0,0,0)
34 34
35 35 readerObj = VoltageReader()
36 36
37 37 readerObj.setup(dpath, startTime, endTime)
38 38
39 39 while(True):
40 40
41 41 #to get one profile
42 42 profile = readerObj.getData()
43 43
44 44 #print the profile
45 45 print profile
46 46
47 47 #If you want to see all datablock
48 48 print readerObj.datablock
49 49
50 50 if readerObj.flagNoMoreFiles:
51 51 break
52 52
53 53 """
54 54
55 55 ext = ".r"
56 56
57 57 optchar = "D"
58 58 dataOut = None
59 59
60 60
61 61 def __init__(self):
62 62 """
63 63 Inicializador de la clase VoltageReader para la lectura de datos de voltage.
64 64
65 65 Input:
66 66 dataOut : Objeto de la clase Voltage. Este objeto sera utilizado para
67 67 almacenar un perfil de datos cada vez que se haga un requerimiento
68 68 (getData). El perfil sera obtenido a partir del buffer de datos,
69 69 si el buffer esta vacio se hara un nuevo proceso de lectura de un
70 70 bloque de datos.
71 71 Si este parametro no es pasado se creara uno internamente.
72 72
73 73 Variables afectadas:
74 74 self.dataOut
75 75
76 76 Return:
77 77 None
78 78 """
79 79
80 80 ProcessingUnit.__init__(self)
81 81
82 82 self.isConfig = False
83 83
84 84 self.datablock = None
85 85
86 86 self.utc = 0
87 87
88 88 self.ext = ".r"
89 89
90 90 self.optchar = "D"
91 91
92 92 self.basicHeaderObj = BasicHeader(LOCALTIME)
93 93
94 94 self.systemHeaderObj = SystemHeader()
95 95
96 96 self.radarControllerHeaderObj = RadarControllerHeader()
97 97
98 98 self.processingHeaderObj = ProcessingHeader()
99 99
100 100 self.online = 0
101 101
102 102 self.fp = None
103 103
104 104 self.idFile = None
105 105
106 106 self.dtype = None
107 107
108 108 self.fileSizeByHeader = None
109 109
110 110 self.filenameList = []
111 111
112 112 self.filename = None
113 113
114 114 self.fileSize = None
115 115
116 116 self.firstHeaderSize = 0
117 117
118 118 self.basicHeaderSize = 24
119 119
120 120 self.pathList = []
121 121
122 122 self.filenameList = []
123 123
124 124 self.lastUTTime = 0
125 125
126 126 self.maxTimeStep = 30
127 127
128 128 self.flagNoMoreFiles = 0
129 129
130 130 self.set = 0
131 131
132 132 self.path = None
133 133
134 134 self.profileIndex = 2**32-1
135 135
136 136 self.delay = 3 #seconds
137 137
138 138 self.nTries = 3 #quantity tries
139 139
140 140 self.nFiles = 3 #number of files for searching
141 141
142 142 self.nReadBlocks = 0
143 143
144 144 self.flagIsNewFile = 1
145 145
146 146 self.__isFirstTimeOnline = 1
147 147
148 148 # self.ippSeconds = 0
149 149
150 150 self.flagDiscontinuousBlock = 0
151 151
152 152 self.flagIsNewBlock = 0
153 153
154 154 self.nTotalBlocks = 0
155 155
156 156 self.blocksize = 0
157 157
158 158 self.dataOut = self.createObjByDefault()
159 159
160 160 self.nTxs = 1
161 161
162 162 self.txIndex = 0
163 163
164 164 def createObjByDefault(self):
165 165
166 166 dataObj = Voltage()
167 167
168 168 return dataObj
169 169
170 170 def __hasNotDataInBuffer(self):
171 171
172 172 if self.profileIndex >= self.processingHeaderObj.profilesPerBlock:
173 173 return 1
174 174
175 175 return 0
176 176
177 177
178 178 def getBlockDimension(self):
179 179 """
180 180 Obtiene la cantidad de puntos a leer por cada bloque de datos
181 181
182 182 Affected:
183 183 self.blocksize
184 184
185 185 Return:
186 186 None
187 187 """
188 188 pts2read = self.processingHeaderObj.profilesPerBlock * self.processingHeaderObj.nHeights * self.systemHeaderObj.nChannels
189 189 self.blocksize = pts2read
190 190
191 191
192 192 def readBlock(self):
193 193 """
194 194 readBlock lee el bloque de datos desde la posicion actual del puntero del archivo
195 195 (self.fp) y actualiza todos los parametros relacionados al bloque de datos
196 196 (metadata + data). La data leida es almacenada en el buffer y el contador del buffer
197 197 es seteado a 0
198 198
199 199 Inputs:
200 200 None
201 201
202 202 Return:
203 203 None
204 204
205 205 Affected:
206 206 self.profileIndex
207 207 self.datablock
208 208 self.flagIsNewFile
209 209 self.flagIsNewBlock
210 210 self.nTotalBlocks
211 211
212 212 Exceptions:
213 213 Si un bloque leido no es un bloque valido
214 214 """
215 215 current_pointer_location = self.fp.tell()
216 216 junk = numpy.fromfile( self.fp, self.dtype, self.blocksize )
217 217
218 218 try:
219 219 junk = junk.reshape( (self.processingHeaderObj.profilesPerBlock, self.processingHeaderObj.nHeights, self.systemHeaderObj.nChannels) )
220 220 except:
221 221 #print "The read block (%3d) has not enough data" %self.nReadBlocks
222 222
223 223 if self.waitDataBlock(pointer_location=current_pointer_location):
224 224 junk = numpy.fromfile( self.fp, self.dtype, self.blocksize )
225 225 junk = junk.reshape( (self.processingHeaderObj.profilesPerBlock, self.processingHeaderObj.nHeights, self.systemHeaderObj.nChannels) )
226 226 # return 0
227 227
228 228 #Dimensions : nChannels, nProfiles, nSamples
229 229
230 230 junk = numpy.transpose(junk, (2,0,1))
231 231 self.datablock = junk['real'] + junk['imag']*1j
232 232
233 233 self.profileIndex = 0
234 234
235 235 self.flagIsNewFile = 0
236 236 self.flagIsNewBlock = 1
237 237
238 238 self.nTotalBlocks += 1
239 239 self.nReadBlocks += 1
240 240
241 241 return 1
242 242
243 243 def getFirstHeader(self):
244 244
245 245 self.getBasicHeader()
246 246
247 247 self.dataOut.systemHeaderObj = self.systemHeaderObj.copy()
248 248
249 249 self.dataOut.radarControllerHeaderObj = self.radarControllerHeaderObj.copy()
250 250
251 251 if self.nTxs > 1:
252 252 self.dataOut.radarControllerHeaderObj.ippSeconds = self.radarControllerHeaderObj.ippSeconds/self.nTxs
253 253
254 #Time interval and code are propierties of dataOut. Its value depends of radarControllerHeaderObj.
255
254 256 # self.dataOut.timeInterval = self.radarControllerHeaderObj.ippSeconds * self.processingHeaderObj.nCohInt
255 257 #
256 258 # if self.radarControllerHeaderObj.code is not None:
257 259 #
258 260 # self.dataOut.nCode = self.radarControllerHeaderObj.nCode
259 261 #
260 262 # self.dataOut.nBaud = self.radarControllerHeaderObj.nBaud
261 263 #
262 264 # self.dataOut.code = self.radarControllerHeaderObj.code
263 265
264 266 self.dataOut.dtype = self.dtype
265 267
266 268 self.dataOut.nProfiles = self.processingHeaderObj.profilesPerBlock*self.nTxs
267 269
268 270 if self.processingHeaderObj.nHeights % self.nTxs != 0:
269 271 raise ValueError, "nTxs (%d) should be a multiple of nHeights (%d)" %(self.nTxs, self.processingHeaderObj.nHeights)
270 272
271 273 xf = self.processingHeaderObj.firstHeight + int(self.processingHeaderObj.nHeights/self.nTxs)*self.processingHeaderObj.deltaHeight
272 274
273 275 self.dataOut.heightList = numpy.arange(self.processingHeaderObj.firstHeight, xf, self.processingHeaderObj.deltaHeight)
274 276
275 277 self.dataOut.channelList = range(self.systemHeaderObj.nChannels)
276 278
277 279 self.dataOut.nCohInt = self.processingHeaderObj.nCohInt
278 280
279 self.dataOut.flagShiftFFT = False
280
281 self.dataOut.flagDecodeData = False #asumo q la data no esta decodificada
281 self.dataOut.flagDecodeData = self.processingHeaderObj.flag_decode #asumo q la data no esta decodificada
282 282
283 self.dataOut.flagDeflipData = False #asumo q la data no esta sin flip
283 self.dataOut.flagDeflipData = self.processingHeaderObj.flag_deflip #asumo q la data no esta sin flip
284 284
285 self.dataOut.flagShiftFFT = False
285 self.dataOut.flagShiftFFT = self.processingHeaderObj.shif_fft
286 286
287 287 def getData(self):
288 288 """
289 289 getData obtiene una unidad de datos del buffer de lectura, un perfil, y la copia al objeto self.dataOut
290 290 del tipo "Voltage" con todos los parametros asociados a este (metadata). cuando no hay datos
291 291 en el buffer de lectura es necesario hacer una nueva lectura de los bloques de datos usando
292 292 "readNextBlock"
293 293
294 294 Ademas incrementa el contador del buffer "self.profileIndex" en 1.
295 295
296 296 Return:
297 297
298 298 Si el flag self.getByBlock ha sido seteado el bloque completo es copiado a self.dataOut y el self.profileIndex
299 299 es igual al total de perfiles leidos desde el archivo.
300 300
301 301 Si self.getByBlock == False:
302 302
303 303 self.dataOut.data = buffer[:, thisProfile, :]
304 304
305 305 shape = [nChannels, nHeis]
306 306
307 307 Si self.getByBlock == True:
308 308
309 309 self.dataOut.data = buffer[:, :, :]
310 310
311 311 shape = [nChannels, nProfiles, nHeis]
312 312
313 313 Variables afectadas:
314 314 self.dataOut
315 315 self.profileIndex
316 316
317 317 Affected:
318 318 self.dataOut
319 319 self.profileIndex
320 320 self.flagDiscontinuousBlock
321 321 self.flagIsNewBlock
322 322 """
323 323
324 324 if self.flagNoMoreFiles:
325 325 self.dataOut.flagNoData = True
326 326 print 'Process finished'
327 327 return 0
328 328
329 329 self.flagDiscontinuousBlock = 0
330 330 self.flagIsNewBlock = 0
331 331
332 332 if self.__hasNotDataInBuffer():
333 333
334 334 if not( self.readNextBlock() ):
335 335 return 0
336 336
337 337 self.getFirstHeader()
338 338
339 339 if self.datablock is None:
340 340 self.dataOut.flagNoData = True
341 341 return 0
342 342
343 343 if not self.getByBlock:
344 344
345 345 """
346 346 Return profile by profile
347 347
348 348 If nTxs > 1 then one profile is divided by nTxs and number of total
349 349 blocks is increased by nTxs (nProfiles *= nTxs)
350 350 """
351 351 self.dataOut.flagDataAsBlock = False
352 352
353 353 if self.nTxs == 1:
354 354 self.dataOut.data = self.datablock[:,self.profileIndex,:]
355 355 self.dataOut.profileIndex = self.profileIndex
356 356
357 357 self.profileIndex += 1
358 358
359 359 else:
360 360 iniHei_ForThisTx = (self.txIndex)*int(self.processingHeaderObj.nHeights/self.nTxs)
361 361 endHei_ForThisTx = (self.txIndex+1)*int(self.processingHeaderObj.nHeights/self.nTxs)
362 362
363 363 # print iniHei_ForThisTx, endHei_ForThisTx
364 364
365 365 self.dataOut.data = self.datablock[:, self.profileIndex, iniHei_ForThisTx:endHei_ForThisTx]
366 366 self.dataOut.profileIndex = self.profileIndex*self.nTxs + self.txIndex
367 367
368 368 self.txIndex += 1
369 369
370 370 if self.txIndex == self.nTxs:
371 371 self.txIndex = 0
372 372 self.profileIndex += 1
373 373
374 374 else:
375 375 """
376 376 Return all block
377 377 """
378 378 self.dataOut.flagDataAsBlock = True
379 379 self.dataOut.data = self.datablock
380 380 self.dataOut.profileIndex = self.processingHeaderObj.profilesPerBlock
381 381
382 382 self.profileIndex = self.processingHeaderObj.profilesPerBlock
383 383
384 384 self.dataOut.flagNoData = False
385 385
386 386 self.getBasicHeader()
387 387
388 388 self.dataOut.realtime = self.online
389 389
390 390 return self.dataOut.data
391 391
392 392 class VoltageWriter(JRODataWriter, Operation):
393 393 """
394 394 Esta clase permite escribir datos de voltajes a archivos procesados (.r). La escritura
395 395 de los datos siempre se realiza por bloques.
396 396 """
397 397
398 398 ext = ".r"
399 399
400 400 optchar = "D"
401 401
402 402 shapeBuffer = None
403 403
404 404
405 405 def __init__(self):
406 406 """
407 407 Inicializador de la clase VoltageWriter para la escritura de datos de espectros.
408 408
409 409 Affected:
410 410 self.dataOut
411 411
412 412 Return: None
413 413 """
414 414 Operation.__init__(self)
415 415
416 416 self.nTotalBlocks = 0
417 417
418 418 self.profileIndex = 0
419 419
420 420 self.isConfig = False
421 421
422 422 self.fp = None
423 423
424 424 self.flagIsNewFile = 1
425 425
426 426 self.blockIndex = 0
427 427
428 428 self.flagIsNewBlock = 0
429 429
430 430 self.setFile = None
431 431
432 432 self.dtype = None
433 433
434 434 self.path = None
435 435
436 436 self.filename = None
437 437
438 438 self.basicHeaderObj = BasicHeader(LOCALTIME)
439 439
440 440 self.systemHeaderObj = SystemHeader()
441 441
442 442 self.radarControllerHeaderObj = RadarControllerHeader()
443 443
444 444 self.processingHeaderObj = ProcessingHeader()
445 445
446 446 def hasAllDataInBuffer(self):
447 447 if self.profileIndex >= self.processingHeaderObj.profilesPerBlock:
448 448 return 1
449 449 return 0
450 450
451 451
452 452 def setBlockDimension(self):
453 453 """
454 454 Obtiene las formas dimensionales del los subbloques de datos que componen un bloque
455 455
456 456 Affected:
457 457 self.shape_spc_Buffer
458 458 self.shape_cspc_Buffer
459 459 self.shape_dc_Buffer
460 460
461 461 Return: None
462 462 """
463 463 self.shapeBuffer = (self.processingHeaderObj.profilesPerBlock,
464 464 self.processingHeaderObj.nHeights,
465 465 self.systemHeaderObj.nChannels)
466 466
467 467 self.datablock = numpy.zeros((self.systemHeaderObj.nChannels,
468 468 self.processingHeaderObj.profilesPerBlock,
469 469 self.processingHeaderObj.nHeights),
470 470 dtype=numpy.dtype('complex64'))
471 471
472 472 def writeBlock(self):
473 473 """
474 474 Escribe el buffer en el file designado
475 475
476 476 Affected:
477 477 self.profileIndex
478 478 self.flagIsNewFile
479 479 self.flagIsNewBlock
480 480 self.nTotalBlocks
481 481 self.blockIndex
482 482
483 483 Return: None
484 484 """
485 485 data = numpy.zeros( self.shapeBuffer, self.dtype )
486 486
487 487 junk = numpy.transpose(self.datablock, (1,2,0))
488 488
489 489 data['real'] = junk.real
490 490 data['imag'] = junk.imag
491 491
492 492 data = data.reshape( (-1) )
493 493
494 494 data.tofile( self.fp )
495 495
496 496 self.datablock.fill(0)
497 497
498 498 self.profileIndex = 0
499 499 self.flagIsNewFile = 0
500 500 self.flagIsNewBlock = 1
501 501
502 502 self.blockIndex += 1
503 503 self.nTotalBlocks += 1
504 504
505 505 # print "[Writing] Block = %04d" %self.blockIndex
506 506
507 507 def putData(self):
508 508 """
509 509 Setea un bloque de datos y luego los escribe en un file
510 510
511 511 Affected:
512 512 self.flagIsNewBlock
513 513 self.profileIndex
514 514
515 515 Return:
516 516 0 : Si no hay data o no hay mas files que puedan escribirse
517 517 1 : Si se escribio la data de un bloque en un file
518 518 """
519 519 if self.dataOut.flagNoData:
520 520 return 0
521 521
522 522 self.flagIsNewBlock = 0
523 523
524 524 if self.dataOut.flagDiscontinuousBlock:
525 525 self.datablock.fill(0)
526 526 self.profileIndex = 0
527 527 self.setNextFile()
528 528
529 529 if self.profileIndex == 0:
530 530 self.setBasicHeader()
531 531
532 532 self.datablock[:,self.profileIndex,:] = self.dataOut.data
533 533
534 534 self.profileIndex += 1
535 535
536 536 if self.hasAllDataInBuffer():
537 537 #if self.flagIsNewFile:
538 538 self.writeNextBlock()
539 539 # self.setFirstHeader()
540 540
541 541 return 1
542 542
543 543 def __getBlockSize(self):
544 544 '''
545 545 Este metodos determina el cantidad de bytes para un bloque de datos de tipo Voltage
546 546 '''
547 547
548 548 dtype_width = self.getDtypeWidth()
549 549
550 550 blocksize = int(self.dataOut.nHeights * self.dataOut.nChannels * self.profilesPerBlock * dtype_width * 2)
551 551
552 552 return blocksize
553 553
554 554 def setFirstHeader(self):
555 555
556 556 """
557 557 Obtiene una copia del First Header
558 558
559 559 Affected:
560 560 self.systemHeaderObj
561 561 self.radarControllerHeaderObj
562 562 self.dtype
563 563
564 564 Return:
565 565 None
566 566 """
567 567
568 568 self.systemHeaderObj = self.dataOut.systemHeaderObj.copy()
569 569 self.systemHeaderObj.nChannels = self.dataOut.nChannels
570 570 self.radarControllerHeaderObj = self.dataOut.radarControllerHeaderObj.copy()
571 571
572 self.setBasicHeader()
573
574 processingHeaderSize = 40 # bytes
575 572 self.processingHeaderObj.dtype = 0 # Voltage
576 573 self.processingHeaderObj.blockSize = self.__getBlockSize()
577 574 self.processingHeaderObj.profilesPerBlock = self.profilesPerBlock
578 575 self.processingHeaderObj.dataBlocksPerFile = self.blocksPerFile
579 576 self.processingHeaderObj.nWindows = 1 #podria ser 1 o self.dataOut.processingHeaderObj.nWindows
580 self.processingHeaderObj.processFlags = self.getProcessFlags()
581 577 self.processingHeaderObj.nCohInt = self.dataOut.nCohInt
582 578 self.processingHeaderObj.nIncohInt = 1 # Cuando la data de origen es de tipo Voltage
583 579 self.processingHeaderObj.totalSpectra = 0 # Cuando la data de origen es de tipo Voltage
584 580
585 # if self.dataOut.code is not None:
586 # self.processingHeaderObj.code = self.dataOut.code
587 # self.processingHeaderObj.nCode = self.dataOut.nCode
588 # self.processingHeaderObj.nBaud = self.dataOut.nBaud
589 # codesize = int(8 + 4 * self.dataOut.nCode * self.dataOut.nBaud)
590 # processingHeaderSize += codesize
581 if self.dataOut.code is not None:
582 self.processingHeaderObj.code = self.dataOut.code
583 self.processingHeaderObj.nCode = self.dataOut.nCode
584 self.processingHeaderObj.nBaud = self.dataOut.nBaud
591 585
592 586 if self.processingHeaderObj.nWindows != 0:
593 587 self.processingHeaderObj.firstHeight = self.dataOut.heightList[0]
594 588 self.processingHeaderObj.deltaHeight = self.dataOut.heightList[1] - self.dataOut.heightList[0]
595 589 self.processingHeaderObj.nHeights = self.dataOut.nHeights
596 590 self.processingHeaderObj.samplesWin = self.dataOut.nHeights
597 processingHeaderSize += 12
598 591
599 self.processingHeaderObj.size = processingHeaderSize No newline at end of file
592 self.processingHeaderObj.processFlags = self.getProcessFlags()
593
594 self.setBasicHeader() No newline at end of file
@@ -1,873 +1,880
1 1 import numpy
2 2 import math
3 3
4 4 from jroproc_base import ProcessingUnit, Operation
5 5 from schainpy.model.data.jrodata import Spectra
6 6 from schainpy.model.data.jrodata import hildebrand_sekhon
7 7
8 8 class SpectraProc(ProcessingUnit):
9 9
10 10 def __init__(self):
11 11
12 12 ProcessingUnit.__init__(self)
13 13
14 14 self.buffer = None
15 15 self.firstdatatime = None
16 16 self.profIndex = 0
17 17 self.dataOut = Spectra()
18 18 self.id_min = None
19 19 self.id_max = None
20 20
21 21 def __updateSpecFromVoltage(self):
22 22
23 23 self.dataOut.timeZone = self.dataIn.timeZone
24 24 self.dataOut.dstFlag = self.dataIn.dstFlag
25 25 self.dataOut.errorCount = self.dataIn.errorCount
26 26 self.dataOut.useLocalTime = self.dataIn.useLocalTime
27 27
28 28 self.dataOut.radarControllerHeaderObj = self.dataIn.radarControllerHeaderObj.copy()
29 29 self.dataOut.systemHeaderObj = self.dataIn.systemHeaderObj.copy()
30 30 self.dataOut.channelList = self.dataIn.channelList
31 31 self.dataOut.heightList = self.dataIn.heightList
32 32 self.dataOut.dtype = numpy.dtype([('real','<f4'),('imag','<f4')])
33 33
34 34 self.dataOut.nBaud = self.dataIn.nBaud
35 35 self.dataOut.nCode = self.dataIn.nCode
36 36 self.dataOut.code = self.dataIn.code
37 37 self.dataOut.nProfiles = self.dataOut.nFFTPoints
38 38
39 39 self.dataOut.flagDiscontinuousBlock = self.dataIn.flagDiscontinuousBlock
40 40 self.dataOut.utctime = self.firstdatatime
41 41 self.dataOut.flagDecodeData = self.dataIn.flagDecodeData #asumo q la data esta decodificada
42 42 self.dataOut.flagDeflipData = self.dataIn.flagDeflipData #asumo q la data esta sin flip
43 43 self.dataOut.flagShiftFFT = False
44 44
45 45 self.dataOut.nCohInt = self.dataIn.nCohInt
46 46 self.dataOut.nIncohInt = 1
47 47
48 48 self.dataOut.windowOfFilter = self.dataIn.windowOfFilter
49 49
50 50 self.dataOut.frequency = self.dataIn.frequency
51 51 self.dataOut.realtime = self.dataIn.realtime
52 52
53 53 self.dataOut.azimuth = self.dataIn.azimuth
54 54 self.dataOut.zenith = self.dataIn.zenith
55 55
56 56 self.dataOut.beam.codeList = self.dataIn.beam.codeList
57 57 self.dataOut.beam.azimuthList = self.dataIn.beam.azimuthList
58 58 self.dataOut.beam.zenithList = self.dataIn.beam.zenithList
59 59
60 60 def __getFft(self):
61 61 """
62 62 Convierte valores de Voltaje a Spectra
63 63
64 64 Affected:
65 65 self.dataOut.data_spc
66 66 self.dataOut.data_cspc
67 67 self.dataOut.data_dc
68 68 self.dataOut.heightList
69 69 self.profIndex
70 70 self.buffer
71 71 self.dataOut.flagNoData
72 72 """
73 73 fft_volt = numpy.fft.fft(self.buffer,n=self.dataOut.nFFTPoints,axis=1)
74 74 fft_volt = fft_volt.astype(numpy.dtype('complex'))
75 75 dc = fft_volt[:,0,:]
76 76
77 77 #calculo de self-spectra
78 78 fft_volt = numpy.fft.fftshift(fft_volt,axes=(1,))
79 79 spc = fft_volt * numpy.conjugate(fft_volt)
80 80 spc = spc.real
81 81
82 82 blocksize = 0
83 83 blocksize += dc.size
84 84 blocksize += spc.size
85 85
86 86 cspc = None
87 87 pairIndex = 0
88 88 if self.dataOut.pairsList != None:
89 89 #calculo de cross-spectra
90 90 cspc = numpy.zeros((self.dataOut.nPairs, self.dataOut.nFFTPoints, self.dataOut.nHeights), dtype='complex')
91 91 for pair in self.dataOut.pairsList:
92 92 if pair[0] not in self.dataOut.channelList:
93 93 raise ValueError, "Error getting CrossSpectra: pair 0 of %s is not in channelList = %s" %(str(pair), str(self.dataOut.channelList))
94 94 if pair[1] not in self.dataOut.channelList:
95 95 raise ValueError, "Error getting CrossSpectra: pair 1 of %s is not in channelList = %s" %(str(pair), str(self.dataOut.channelList))
96 96
97 97 cspc[pairIndex,:,:] = fft_volt[pair[0],:,:] * numpy.conjugate(fft_volt[pair[1],:,:])
98 98 pairIndex += 1
99 99 blocksize += cspc.size
100 100
101 101 self.dataOut.data_spc = spc
102 102 self.dataOut.data_cspc = cspc
103 103 self.dataOut.data_dc = dc
104 104 self.dataOut.blockSize = blocksize
105 105 self.dataOut.flagShiftFFT = True
106 106
107 107 def run(self, nProfiles=None, nFFTPoints=None, pairsList=[], ippFactor=None):
108 108
109 109 self.dataOut.flagNoData = True
110 110
111 111 if self.dataIn.type == "Spectra":
112 112 self.dataOut.copy(self.dataIn)
113 113 return True
114 114
115 115 if self.dataIn.type == "Voltage":
116 116
117 117 if nFFTPoints == None:
118 118 raise ValueError, "This SpectraProc.run() need nFFTPoints input variable"
119 119
120 120 if nProfiles == None:
121 121 nProfiles = nFFTPoints
122 122 # raise ValueError, "This SpectraProc.run() need nProfiles input variable"
123 123
124 124 if ippFactor == None:
125 125 ippFactor = 1
126 126
127 127 self.dataOut.ippFactor = ippFactor
128 128
129 129 self.dataOut.nFFTPoints = nFFTPoints
130 130 self.dataOut.pairsList = pairsList
131 131
132 132 if self.buffer is None:
133 133 self.buffer = numpy.zeros( (self.dataIn.nChannels,
134 134 nProfiles,
135 135 self.dataIn.nHeights),
136 136 dtype='complex')
137 137
138 138 if self.dataIn.flagDataAsBlock:
139 139
140 140 if self.dataIn.nProfiles == nProfiles:
141 141 self.buffer = self.dataIn.data.copy()
142 142 self.profIndex = nProfiles
143 143
144 144 elif self.dataIn.nProfiles < nProfiles:
145 145
146 146 if self.profIndex == 0:
147 147 self.id_min = 0
148 148 self.id_max = self.dataIn.nProfiles
149 149
150 150 self.buffer[:,self.id_min:self.id_max,:] = self.dataIn.data
151 151 self.profIndex += self.dataIn.nProfiles
152 152 self.id_min += self.dataIn.data.shape[1]
153 153 self.id_max += self.dataIn.data.shape[1]
154 154 else:
155 155 raise ValueError, "The type object %s has %d profiles, it should be equal to %d profiles"%(self.dataIn.type,self.dataIn.data.shape[1],nProfiles)
156 156 self.dataOut.flagNoData = True
157 157 return 0
158 158 else:
159 159 self.buffer[:,self.profIndex,:] = self.dataIn.data.copy()
160 160 self.profIndex += 1
161 161
162 162 if self.firstdatatime == None:
163 163 self.firstdatatime = self.dataIn.utctime
164 164
165 165 if self.profIndex == nProfiles:
166 166 self.__updateSpecFromVoltage()
167 167 self.__getFft()
168 168
169 169 self.dataOut.flagNoData = False
170 170 self.firstdatatime = None
171 171 self.profIndex = 0
172 172
173 173 return True
174 174
175 175 raise ValueError, "The type of input object '%s' is not valid"%(self.dataIn.type)
176 176
177 177 def __selectPairs(self, channelList=None):
178 178
179 179 if channelList == None:
180 180 return
181 181
182 182 pairsIndexListSelected = []
183 183 for pairIndex in self.dataOut.pairsIndexList:
184 184 #First pair
185 185 if self.dataOut.pairsList[pairIndex][0] not in channelList:
186 186 continue
187 187 #Second pair
188 188 if self.dataOut.pairsList[pairIndex][1] not in channelList:
189 189 continue
190 190
191 191 pairsIndexListSelected.append(pairIndex)
192 192
193 193 if not pairsIndexListSelected:
194 194 self.dataOut.data_cspc = None
195 195 self.dataOut.pairsList = []
196 196 return
197 197
198 198 self.dataOut.data_cspc = self.dataOut.data_cspc[pairsIndexListSelected]
199 199 self.dataOut.pairsList = [self.dataOut.pairsList[i] for i in pairsIndexListSelected]
200 200
201 201 return
202 202
203 203 def selectChannels(self, channelList):
204 204
205 205 channelIndexList = []
206 206
207 207 for channel in channelList:
208 208 if channel not in self.dataOut.channelList:
209 209 raise ValueError, "Error selecting channels: The value %d in channelList is not valid.\nAvailable channels = %s" %(channel, str(self.dataOut.channelList))
210 210
211 211 index = self.dataOut.channelList.index(channel)
212 212 channelIndexList.append(index)
213 213
214 214 self.selectChannelsByIndex(channelIndexList)
215 215
216 216 def selectChannelsByIndex(self, channelIndexList):
217 217 """
218 218 Selecciona un bloque de datos en base a canales segun el channelIndexList
219 219
220 220 Input:
221 221 channelIndexList : lista sencilla de canales a seleccionar por ej. [2,3,7]
222 222
223 223 Affected:
224 224 self.dataOut.data_spc
225 225 self.dataOut.channelIndexList
226 226 self.dataOut.nChannels
227 227
228 228 Return:
229 229 None
230 230 """
231 231
232 232 for channelIndex in channelIndexList:
233 233 if channelIndex not in self.dataOut.channelIndexList:
234 234 raise ValueError, "Error selecting channels: The value %d in channelIndexList is not valid.\nAvailable channel indexes = " %(channelIndex, self.dataOut.channelIndexList)
235 235
236 236 # nChannels = len(channelIndexList)
237 237
238 238 data_spc = self.dataOut.data_spc[channelIndexList,:]
239 239 data_dc = self.dataOut.data_dc[channelIndexList,:]
240 240
241 241 self.dataOut.data_spc = data_spc
242 242 self.dataOut.data_dc = data_dc
243 243
244 244 self.dataOut.channelList = [self.dataOut.channelList[i] for i in channelIndexList]
245 245 # self.dataOut.nChannels = nChannels
246 246
247 247 self.__selectPairs(self.dataOut.channelList)
248 248
249 249 return 1
250 250
251 251 def selectHeights(self, minHei, maxHei):
252 252 """
253 253 Selecciona un bloque de datos en base a un grupo de valores de alturas segun el rango
254 254 minHei <= height <= maxHei
255 255
256 256 Input:
257 257 minHei : valor minimo de altura a considerar
258 258 maxHei : valor maximo de altura a considerar
259 259
260 260 Affected:
261 261 Indirectamente son cambiados varios valores a travez del metodo selectHeightsByIndex
262 262
263 263 Return:
264 264 1 si el metodo se ejecuto con exito caso contrario devuelve 0
265 265 """
266 266
267 267 if (minHei > maxHei):
268 268 raise ValueError, "Error selecting heights: Height range (%d,%d) is not valid" % (minHei, maxHei)
269 269
270 270 if (minHei < self.dataOut.heightList[0]):
271 271 minHei = self.dataOut.heightList[0]
272 272
273 273 if (maxHei > self.dataOut.heightList[-1]):
274 274 maxHei = self.dataOut.heightList[-1]
275 275 # raise ValueError, "some value in (%d,%d) is not valid" % (minHei, maxHei)
276 276
277 277 minIndex = 0
278 278 maxIndex = 0
279 279 heights = self.dataOut.heightList
280 280
281 281 inda = numpy.where(heights >= minHei)
282 282 indb = numpy.where(heights <= maxHei)
283 283
284 284 try:
285 285 minIndex = inda[0][0]
286 286 except:
287 287 minIndex = 0
288 288
289 289 try:
290 290 maxIndex = indb[0][-1]
291 291 except:
292 292 maxIndex = len(heights)
293 293
294 294 self.selectHeightsByIndex(minIndex, maxIndex)
295 295
296 296 return 1
297 297
298 298 def getBeaconSignal(self, tauindex = 0, channelindex = 0, hei_ref=None):
299 299 newheis = numpy.where(self.dataOut.heightList>self.dataOut.radarControllerHeaderObj.Taus[tauindex])
300 300
301 301 if hei_ref != None:
302 302 newheis = numpy.where(self.dataOut.heightList>hei_ref)
303 303
304 304 minIndex = min(newheis[0])
305 305 maxIndex = max(newheis[0])
306 306 data_spc = self.dataOut.data_spc[:,:,minIndex:maxIndex+1]
307 307 heightList = self.dataOut.heightList[minIndex:maxIndex+1]
308 308
309 309 # determina indices
310 310 nheis = int(self.dataOut.radarControllerHeaderObj.txB/(self.dataOut.heightList[1]-self.dataOut.heightList[0]))
311 311 avg_dB = 10*numpy.log10(numpy.sum(data_spc[channelindex,:,:],axis=0))
312 312 beacon_dB = numpy.sort(avg_dB)[-nheis:]
313 313 beacon_heiIndexList = []
314 314 for val in avg_dB.tolist():
315 315 if val >= beacon_dB[0]:
316 316 beacon_heiIndexList.append(avg_dB.tolist().index(val))
317 317
318 318 #data_spc = data_spc[:,:,beacon_heiIndexList]
319 319 data_cspc = None
320 320 if self.dataOut.data_cspc is not None:
321 321 data_cspc = self.dataOut.data_cspc[:,:,minIndex:maxIndex+1]
322 322 #data_cspc = data_cspc[:,:,beacon_heiIndexList]
323 323
324 324 data_dc = None
325 325 if self.dataOut.data_dc is not None:
326 326 data_dc = self.dataOut.data_dc[:,minIndex:maxIndex+1]
327 327 #data_dc = data_dc[:,beacon_heiIndexList]
328 328
329 329 self.dataOut.data_spc = data_spc
330 330 self.dataOut.data_cspc = data_cspc
331 331 self.dataOut.data_dc = data_dc
332 332 self.dataOut.heightList = heightList
333 333 self.dataOut.beacon_heiIndexList = beacon_heiIndexList
334 334
335 335 return 1
336 336
337 337
338 338 def selectHeightsByIndex(self, minIndex, maxIndex):
339 339 """
340 340 Selecciona un bloque de datos en base a un grupo indices de alturas segun el rango
341 341 minIndex <= index <= maxIndex
342 342
343 343 Input:
344 344 minIndex : valor de indice minimo de altura a considerar
345 345 maxIndex : valor de indice maximo de altura a considerar
346 346
347 347 Affected:
348 348 self.dataOut.data_spc
349 349 self.dataOut.data_cspc
350 350 self.dataOut.data_dc
351 351 self.dataOut.heightList
352 352
353 353 Return:
354 354 1 si el metodo se ejecuto con exito caso contrario devuelve 0
355 355 """
356 356
357 357 if (minIndex < 0) or (minIndex > maxIndex):
358 358 raise ValueError, "Error selecting heights by index: Index range in (%d,%d) is not valid" % (minIndex, maxIndex)
359 359
360 360 if (maxIndex >= self.dataOut.nHeights):
361 361 maxIndex = self.dataOut.nHeights-1
362 362 # raise ValueError, "some value in (%d,%d) is not valid" % (minIndex, maxIndex)
363 363
364 364 # nHeights = maxIndex - minIndex + 1
365 365
366 366 #Spectra
367 367 data_spc = self.dataOut.data_spc[:,:,minIndex:maxIndex+1]
368 368
369 369 data_cspc = None
370 370 if self.dataOut.data_cspc is not None:
371 371 data_cspc = self.dataOut.data_cspc[:,:,minIndex:maxIndex+1]
372 372
373 373 data_dc = None
374 374 if self.dataOut.data_dc is not None:
375 375 data_dc = self.dataOut.data_dc[:,minIndex:maxIndex+1]
376 376
377 377 self.dataOut.data_spc = data_spc
378 378 self.dataOut.data_cspc = data_cspc
379 379 self.dataOut.data_dc = data_dc
380 380
381 381 self.dataOut.heightList = self.dataOut.heightList[minIndex:maxIndex+1]
382 382
383 383 return 1
384 384
385 385 def removeDC(self, mode = 2):
386 386 jspectra = self.dataOut.data_spc
387 387 jcspectra = self.dataOut.data_cspc
388 388
389 389
390 390 num_chan = jspectra.shape[0]
391 391 num_hei = jspectra.shape[2]
392 392
393 393 if jcspectra is not None:
394 394 jcspectraExist = True
395 395 num_pairs = jcspectra.shape[0]
396 396 else: jcspectraExist = False
397 397
398 398 freq_dc = jspectra.shape[1]/2
399 399 ind_vel = numpy.array([-2,-1,1,2]) + freq_dc
400 400
401 401 if ind_vel[0]<0:
402 402 ind_vel[range(0,1)] = ind_vel[range(0,1)] + self.num_prof
403 403
404 404 if mode == 1:
405 405 jspectra[:,freq_dc,:] = (jspectra[:,ind_vel[1],:] + jspectra[:,ind_vel[2],:])/2 #CORRECCION
406 406
407 407 if jcspectraExist:
408 408 jcspectra[:,freq_dc,:] = (jcspectra[:,ind_vel[1],:] + jcspectra[:,ind_vel[2],:])/2
409 409
410 410 if mode == 2:
411 411
412 412 vel = numpy.array([-2,-1,1,2])
413 413 xx = numpy.zeros([4,4])
414 414
415 415 for fil in range(4):
416 416 xx[fil,:] = vel[fil]**numpy.asarray(range(4))
417 417
418 418 xx_inv = numpy.linalg.inv(xx)
419 419 xx_aux = xx_inv[0,:]
420 420
421 421 for ich in range(num_chan):
422 422 yy = jspectra[ich,ind_vel,:]
423 423 jspectra[ich,freq_dc,:] = numpy.dot(xx_aux,yy)
424 424
425 425 junkid = jspectra[ich,freq_dc,:]<=0
426 426 cjunkid = sum(junkid)
427 427
428 428 if cjunkid.any():
429 429 jspectra[ich,freq_dc,junkid.nonzero()] = (jspectra[ich,ind_vel[1],junkid] + jspectra[ich,ind_vel[2],junkid])/2
430 430
431 431 if jcspectraExist:
432 432 for ip in range(num_pairs):
433 433 yy = jcspectra[ip,ind_vel,:]
434 434 jcspectra[ip,freq_dc,:] = numpy.dot(xx_aux,yy)
435 435
436 436
437 437 self.dataOut.data_spc = jspectra
438 438 self.dataOut.data_cspc = jcspectra
439 439
440 440 return 1
441 441
442 442 def removeInterference(self, interf = 2,hei_interf = None, nhei_interf = None, offhei_interf = None):
443 443
444 444 jspectra = self.dataOut.data_spc
445 445 jcspectra = self.dataOut.data_cspc
446 446 jnoise = self.dataOut.getNoise()
447 447 num_incoh = self.dataOut.nIncohInt
448 448
449 449 num_channel = jspectra.shape[0]
450 450 num_prof = jspectra.shape[1]
451 451 num_hei = jspectra.shape[2]
452 452
453 453 #hei_interf
454 454 if hei_interf is None:
455 455 count_hei = num_hei/2 #Como es entero no importa
456 456 hei_interf = numpy.asmatrix(range(count_hei)) + num_hei - count_hei
457 457 hei_interf = numpy.asarray(hei_interf)[0]
458 458 #nhei_interf
459 459 if (nhei_interf == None):
460 460 nhei_interf = 5
461 461 if (nhei_interf < 1):
462 462 nhei_interf = 1
463 463 if (nhei_interf > count_hei):
464 464 nhei_interf = count_hei
465 465 if (offhei_interf == None):
466 466 offhei_interf = 0
467 467
468 468 ind_hei = range(num_hei)
469 469 # mask_prof = numpy.asarray(range(num_prof - 2)) + 1
470 470 # mask_prof[range(num_prof/2 - 1,len(mask_prof))] += 1
471 471 mask_prof = numpy.asarray(range(num_prof))
472 472 num_mask_prof = mask_prof.size
473 473 comp_mask_prof = [0, num_prof/2]
474 474
475 475
476 476 #noise_exist: Determina si la variable jnoise ha sido definida y contiene la informacion del ruido de cada canal
477 477 if (jnoise.size < num_channel or numpy.isnan(jnoise).any()):
478 478 jnoise = numpy.nan
479 479 noise_exist = jnoise[0] < numpy.Inf
480 480
481 481 #Subrutina de Remocion de la Interferencia
482 482 for ich in range(num_channel):
483 483 #Se ordena los espectros segun su potencia (menor a mayor)
484 484 power = jspectra[ich,mask_prof,:]
485 485 power = power[:,hei_interf]
486 486 power = power.sum(axis = 0)
487 487 psort = power.ravel().argsort()
488 488
489 489 #Se estima la interferencia promedio en los Espectros de Potencia empleando
490 490 junkspc_interf = jspectra[ich,:,hei_interf[psort[range(offhei_interf, nhei_interf + offhei_interf)]]]
491 491
492 492 if noise_exist:
493 493 # tmp_noise = jnoise[ich] / num_prof
494 494 tmp_noise = jnoise[ich]
495 495 junkspc_interf = junkspc_interf - tmp_noise
496 496 #junkspc_interf[:,comp_mask_prof] = 0
497 497
498 498 jspc_interf = junkspc_interf.sum(axis = 0) / nhei_interf
499 499 jspc_interf = jspc_interf.transpose()
500 500 #Calculando el espectro de interferencia promedio
501 501 noiseid = numpy.where(jspc_interf <= tmp_noise/ math.sqrt(num_incoh))
502 502 noiseid = noiseid[0]
503 503 cnoiseid = noiseid.size
504 504 interfid = numpy.where(jspc_interf > tmp_noise/ math.sqrt(num_incoh))
505 505 interfid = interfid[0]
506 506 cinterfid = interfid.size
507 507
508 508 if (cnoiseid > 0): jspc_interf[noiseid] = 0
509 509
510 510 #Expandiendo los perfiles a limpiar
511 511 if (cinterfid > 0):
512 512 new_interfid = (numpy.r_[interfid - 1, interfid, interfid + 1] + num_prof)%num_prof
513 513 new_interfid = numpy.asarray(new_interfid)
514 514 new_interfid = {x for x in new_interfid}
515 515 new_interfid = numpy.array(list(new_interfid))
516 516 new_cinterfid = new_interfid.size
517 517 else: new_cinterfid = 0
518 518
519 519 for ip in range(new_cinterfid):
520 520 ind = junkspc_interf[:,new_interfid[ip]].ravel().argsort()
521 521 jspc_interf[new_interfid[ip]] = junkspc_interf[ind[nhei_interf/2],new_interfid[ip]]
522 522
523 523
524 524 jspectra[ich,:,ind_hei] = jspectra[ich,:,ind_hei] - jspc_interf #Corregir indices
525 525
526 526 #Removiendo la interferencia del punto de mayor interferencia
527 527 ListAux = jspc_interf[mask_prof].tolist()
528 528 maxid = ListAux.index(max(ListAux))
529 529
530 530
531 531 if cinterfid > 0:
532 532 for ip in range(cinterfid*(interf == 2) - 1):
533 533 ind = (jspectra[ich,interfid[ip],:] < tmp_noise*(1 + 1/math.sqrt(num_incoh))).nonzero()
534 534 cind = len(ind)
535 535
536 536 if (cind > 0):
537 537 jspectra[ich,interfid[ip],ind] = tmp_noise*(1 + (numpy.random.uniform(cind) - 0.5)/math.sqrt(num_incoh))
538 538
539 539 ind = numpy.array([-2,-1,1,2])
540 540 xx = numpy.zeros([4,4])
541 541
542 542 for id1 in range(4):
543 543 xx[:,id1] = ind[id1]**numpy.asarray(range(4))
544 544
545 545 xx_inv = numpy.linalg.inv(xx)
546 546 xx = xx_inv[:,0]
547 547 ind = (ind + maxid + num_mask_prof)%num_mask_prof
548 548 yy = jspectra[ich,mask_prof[ind],:]
549 549 jspectra[ich,mask_prof[maxid],:] = numpy.dot(yy.transpose(),xx)
550 550
551 551
552 552 indAux = (jspectra[ich,:,:] < tmp_noise*(1-1/math.sqrt(num_incoh))).nonzero()
553 553 jspectra[ich,indAux[0],indAux[1]] = tmp_noise * (1 - 1/math.sqrt(num_incoh))
554 554
555 555 #Remocion de Interferencia en el Cross Spectra
556 556 if jcspectra is None: return jspectra, jcspectra
557 557 num_pairs = jcspectra.size/(num_prof*num_hei)
558 558 jcspectra = jcspectra.reshape(num_pairs, num_prof, num_hei)
559 559
560 560 for ip in range(num_pairs):
561 561
562 562 #-------------------------------------------
563 563
564 564 cspower = numpy.abs(jcspectra[ip,mask_prof,:])
565 565 cspower = cspower[:,hei_interf]
566 566 cspower = cspower.sum(axis = 0)
567 567
568 568 cspsort = cspower.ravel().argsort()
569 569 junkcspc_interf = jcspectra[ip,:,hei_interf[cspsort[range(offhei_interf, nhei_interf + offhei_interf)]]]
570 570 junkcspc_interf = junkcspc_interf.transpose()
571 571 jcspc_interf = junkcspc_interf.sum(axis = 1)/nhei_interf
572 572
573 573 ind = numpy.abs(jcspc_interf[mask_prof]).ravel().argsort()
574 574
575 575 median_real = numpy.median(numpy.real(junkcspc_interf[mask_prof[ind[range(3*num_prof/4)]],:]))
576 576 median_imag = numpy.median(numpy.imag(junkcspc_interf[mask_prof[ind[range(3*num_prof/4)]],:]))
577 577 junkcspc_interf[comp_mask_prof,:] = numpy.complex(median_real, median_imag)
578 578
579 579 for iprof in range(num_prof):
580 580 ind = numpy.abs(junkcspc_interf[iprof,:]).ravel().argsort()
581 581 jcspc_interf[iprof] = junkcspc_interf[iprof, ind[nhei_interf/2]]
582 582
583 583 #Removiendo la Interferencia
584 584 jcspectra[ip,:,ind_hei] = jcspectra[ip,:,ind_hei] - jcspc_interf
585 585
586 586 ListAux = numpy.abs(jcspc_interf[mask_prof]).tolist()
587 587 maxid = ListAux.index(max(ListAux))
588 588
589 589 ind = numpy.array([-2,-1,1,2])
590 590 xx = numpy.zeros([4,4])
591 591
592 592 for id1 in range(4):
593 593 xx[:,id1] = ind[id1]**numpy.asarray(range(4))
594 594
595 595 xx_inv = numpy.linalg.inv(xx)
596 596 xx = xx_inv[:,0]
597 597
598 598 ind = (ind + maxid + num_mask_prof)%num_mask_prof
599 599 yy = jcspectra[ip,mask_prof[ind],:]
600 600 jcspectra[ip,mask_prof[maxid],:] = numpy.dot(yy.transpose(),xx)
601 601
602 602 #Guardar Resultados
603 603 self.dataOut.data_spc = jspectra
604 604 self.dataOut.data_cspc = jcspectra
605 605
606 606 return 1
607 607
608 608 def setRadarFrequency(self, frequency=None):
609
609 610 if frequency != None:
610 611 self.dataOut.frequency = frequency
611 612
612 613 return 1
613 614
614 615 def getNoise(self, minHei=None, maxHei=None, minVel=None, maxVel=None):
615 616 #validacion de rango
616 617 if minHei == None:
617 618 minHei = self.dataOut.heightList[0]
618 619
619 620 if maxHei == None:
620 621 maxHei = self.dataOut.heightList[-1]
621 622
622 623 if (minHei < self.dataOut.heightList[0]) or (minHei > maxHei):
623 624 print 'minHei: %.2f is out of the heights range'%(minHei)
624 625 print 'minHei is setting to %.2f'%(self.dataOut.heightList[0])
625 626 minHei = self.dataOut.heightList[0]
626 627
627 628 if (maxHei > self.dataOut.heightList[-1]) or (maxHei < minHei):
628 629 print 'maxHei: %.2f is out of the heights range'%(maxHei)
629 630 print 'maxHei is setting to %.2f'%(self.dataOut.heightList[-1])
630 631 maxHei = self.dataOut.heightList[-1]
631 632
632 633 # validacion de velocidades
633 634 velrange = self.dataOut.getVelRange(1)
634 635
635 636 if minVel == None:
636 637 minVel = velrange[0]
637 638
638 639 if maxVel == None:
639 640 maxVel = velrange[-1]
640 641
641 642 if (minVel < velrange[0]) or (minVel > maxVel):
642 643 print 'minVel: %.2f is out of the velocity range'%(minVel)
643 644 print 'minVel is setting to %.2f'%(velrange[0])
644 645 minVel = velrange[0]
645 646
646 647 if (maxVel > velrange[-1]) or (maxVel < minVel):
647 648 print 'maxVel: %.2f is out of the velocity range'%(maxVel)
648 649 print 'maxVel is setting to %.2f'%(velrange[-1])
649 650 maxVel = velrange[-1]
650 651
651 652 # seleccion de indices para rango
652 653 minIndex = 0
653 654 maxIndex = 0
654 655 heights = self.dataOut.heightList
655 656
656 657 inda = numpy.where(heights >= minHei)
657 658 indb = numpy.where(heights <= maxHei)
658 659
659 660 try:
660 661 minIndex = inda[0][0]
661 662 except:
662 663 minIndex = 0
663 664
664 665 try:
665 666 maxIndex = indb[0][-1]
666 667 except:
667 668 maxIndex = len(heights)
668 669
669 670 if (minIndex < 0) or (minIndex > maxIndex):
670 671 raise ValueError, "some value in (%d,%d) is not valid" % (minIndex, maxIndex)
671 672
672 673 if (maxIndex >= self.dataOut.nHeights):
673 674 maxIndex = self.dataOut.nHeights-1
674 675
675 676 # seleccion de indices para velocidades
676 677 indminvel = numpy.where(velrange >= minVel)
677 678 indmaxvel = numpy.where(velrange <= maxVel)
678 679 try:
679 680 minIndexVel = indminvel[0][0]
680 681 except:
681 682 minIndexVel = 0
682 683
683 684 try:
684 685 maxIndexVel = indmaxvel[0][-1]
685 686 except:
686 687 maxIndexVel = len(velrange)
687 688
688 689 #seleccion del espectro
689 690 data_spc = self.dataOut.data_spc[:,minIndexVel:maxIndexVel+1,minIndex:maxIndex+1]
690 691 #estimacion de ruido
691 692 noise = numpy.zeros(self.dataOut.nChannels)
692 693
693 694 for channel in range(self.dataOut.nChannels):
694 695 daux = data_spc[channel,:,:]
695 696 noise[channel] = hildebrand_sekhon(daux, self.dataOut.nIncohInt)
696 697
697 698 self.dataOut.noise_estimation = noise.copy()
698 699
699 700 return 1
700 701
701 702 class IncohInt(Operation):
702 703
703 704
704 705 __profIndex = 0
705 706 __withOverapping = False
706 707
707 708 __byTime = False
708 709 __initime = None
709 710 __lastdatatime = None
710 711 __integrationtime = None
711 712
712 713 __buffer_spc = None
713 714 __buffer_cspc = None
714 715 __buffer_dc = None
715 716
716 717 __dataReady = False
717 718
718 719 __timeInterval = None
719 720
720 721 n = None
721 722
722 723
723 724
724 725 def __init__(self):
725 726
726 727 Operation.__init__(self)
727 728 # self.isConfig = False
728 729
729 730 def setup(self, n=None, timeInterval=None, overlapping=False):
730 731 """
731 732 Set the parameters of the integration class.
732 733
733 734 Inputs:
734 735
735 736 n : Number of coherent integrations
736 737 timeInterval : Time of integration. If the parameter "n" is selected this one does not work
737 738 overlapping :
738 739
739 740 """
740 741
741 742 self.__initime = None
742 743 self.__lastdatatime = 0
743 744
744 745 self.__buffer_spc = 0
745 746 self.__buffer_cspc = 0
746 747 self.__buffer_dc = 0
747 748
748 749 self.__profIndex = 0
749 750 self.__dataReady = False
750 751 self.__byTime = False
751 752
752 753 if n is None and timeInterval is None:
753 754 raise ValueError, "n or timeInterval should be specified ..."
754 755
755 756 if n is not None:
756 757 self.n = int(n)
757 758 else:
758 759 self.__integrationtime = int(timeInterval) #if (type(timeInterval)!=integer) -> change this line
759 760 self.n = None
760 761 self.__byTime = True
761 762
762 763 def putData(self, data_spc, data_cspc, data_dc):
763 764
764 765 """
765 766 Add a profile to the __buffer_spc and increase in one the __profileIndex
766 767
767 768 """
768 769
769 770 self.__buffer_spc += data_spc
770 771
771 772 if data_cspc is None:
772 773 self.__buffer_cspc = None
773 774 else:
774 775 self.__buffer_cspc += data_cspc
775 776
776 777 if data_dc is None:
777 778 self.__buffer_dc = None
778 779 else:
779 780 self.__buffer_dc += data_dc
780 781
781 782 self.__profIndex += 1
782 783
783 784 return
784 785
785 786 def pushData(self):
786 787 """
787 788 Return the sum of the last profiles and the profiles used in the sum.
788 789
789 790 Affected:
790 791
791 792 self.__profileIndex
792 793
793 794 """
794 795
795 796 data_spc = self.__buffer_spc
796 797 data_cspc = self.__buffer_cspc
797 798 data_dc = self.__buffer_dc
798 799 n = self.__profIndex
799 800
800 801 self.__buffer_spc = 0
801 802 self.__buffer_cspc = 0
802 803 self.__buffer_dc = 0
803 804 self.__profIndex = 0
804 805
805 806 return data_spc, data_cspc, data_dc, n
806 807
807 808 def byProfiles(self, *args):
808 809
809 810 self.__dataReady = False
810
811 avgdata_spc = None
812 avgdata_cspc = None
813 avgdata_dc = None
814
811 815 self.putData(*args)
812 816
813 817 if self.__profIndex == self.n:
814 818
815 819 avgdata_spc, avgdata_cspc, avgdata_dc, n = self.pushData()
816 820 self.n = n
817 821 self.__dataReady = True
818 822
819 823 return avgdata_spc, avgdata_cspc, avgdata_dc
820 824
821 825 def byTime(self, datatime, *args):
822 826
823 827 self.__dataReady = False
828 avgdata_spc = None
829 avgdata_cspc = None
830 avgdata_dc = None
824 831
825 832 self.putData(*args)
826 833
827 834 if (datatime - self.__initime) >= self.__integrationtime:
828 835 avgdata_spc, avgdata_cspc, avgdata_dc, n = self.pushData()
829 836 self.n = n
830 837 self.__dataReady = True
831 838
832 839 return avgdata_spc, avgdata_cspc, avgdata_dc
833 840
834 841 def integrate(self, datatime, *args):
835 842
836 843 if self.__profIndex == 0:
837 844 self.__initime = datatime
838 845
839 846 if self.__byTime:
840 847 avgdata_spc, avgdata_cspc, avgdata_dc = self.byTime(datatime, *args)
841 848 else:
842 849 avgdata_spc, avgdata_cspc, avgdata_dc = self.byProfiles(*args)
843 850
844 851 if not self.__dataReady:
845 852 return None, None, None, None
846 853
847 854 return self.__initime, avgdata_spc, avgdata_cspc, avgdata_dc
848 855
849 856 def run(self, dataOut, n=None, timeInterval=None, overlapping=False):
850 857
851 858 if n==1:
852 859 return
853 860
854 861 dataOut.flagNoData = True
855 862
856 863 if not self.isConfig:
857 864 self.setup(n, timeInterval, overlapping)
858 865 self.isConfig = True
859 866
860 867 avgdatatime, avgdata_spc, avgdata_cspc, avgdata_dc = self.integrate(dataOut.utctime,
861 868 dataOut.data_spc,
862 869 dataOut.data_cspc,
863 870 dataOut.data_dc)
864 871
865 872 if self.__dataReady:
866 873
867 874 dataOut.data_spc = avgdata_spc
868 875 dataOut.data_cspc = avgdata_cspc
869 876 dataOut.data_dc = avgdata_dc
870 877
871 878 dataOut.nIncohInt *= self.n
872 879 dataOut.utctime = avgdatatime
873 880 dataOut.flagNoData = False
@@ -1,1055 +1,1065
1 1 import numpy
2 2
3 3 from jroproc_base import ProcessingUnit, Operation
4 4 from schainpy.model.data.jrodata import Voltage
5 5
6 6 class VoltageProc(ProcessingUnit):
7 7
8 8
9 9 def __init__(self):
10 10
11 11 ProcessingUnit.__init__(self)
12 12
13 13 # self.objectDict = {}
14 14 self.dataOut = Voltage()
15 15 self.flip = 1
16 16
17 17 def run(self):
18 18 if self.dataIn.type == 'AMISR':
19 19 self.__updateObjFromAmisrInput()
20 20
21 21 if self.dataIn.type == 'Voltage':
22 22 self.dataOut.copy(self.dataIn)
23 23
24 24 # self.dataOut.copy(self.dataIn)
25 25
26 26 def __updateObjFromAmisrInput(self):
27 27
28 28 self.dataOut.timeZone = self.dataIn.timeZone
29 29 self.dataOut.dstFlag = self.dataIn.dstFlag
30 30 self.dataOut.errorCount = self.dataIn.errorCount
31 31 self.dataOut.useLocalTime = self.dataIn.useLocalTime
32 32
33 33 self.dataOut.flagNoData = self.dataIn.flagNoData
34 34 self.dataOut.data = self.dataIn.data
35 35 self.dataOut.utctime = self.dataIn.utctime
36 36 self.dataOut.channelList = self.dataIn.channelList
37 37 # self.dataOut.timeInterval = self.dataIn.timeInterval
38 38 self.dataOut.heightList = self.dataIn.heightList
39 39 self.dataOut.nProfiles = self.dataIn.nProfiles
40 40
41 41 self.dataOut.nCohInt = self.dataIn.nCohInt
42 42 self.dataOut.ippSeconds = self.dataIn.ippSeconds
43 43 self.dataOut.frequency = self.dataIn.frequency
44 44
45 45 self.dataOut.azimuth = self.dataIn.azimuth
46 46 self.dataOut.zenith = self.dataIn.zenith
47 47
48 48 self.dataOut.beam.codeList = self.dataIn.beam.codeList
49 49 self.dataOut.beam.azimuthList = self.dataIn.beam.azimuthList
50 50 self.dataOut.beam.zenithList = self.dataIn.beam.zenithList
51 51 #
52 52 # pass#
53 53 #
54 54 # def init(self):
55 55 #
56 56 #
57 57 # if self.dataIn.type == 'AMISR':
58 58 # self.__updateObjFromAmisrInput()
59 59 #
60 60 # if self.dataIn.type == 'Voltage':
61 61 # self.dataOut.copy(self.dataIn)
62 62 # # No necesita copiar en cada init() los atributos de dataIn
63 63 # # la copia deberia hacerse por cada nuevo bloque de datos
64 64
65 65 def selectChannels(self, channelList):
66 66
67 67 channelIndexList = []
68 68
69 69 for channel in channelList:
70 70 if channel not in self.dataOut.channelList:
71 71 raise ValueError, "Channel %d is not in %s" %(channel, str(self.dataOut.channelList))
72 72
73 73 index = self.dataOut.channelList.index(channel)
74 74 channelIndexList.append(index)
75 75
76 76 self.selectChannelsByIndex(channelIndexList)
77 77
78 78 def selectChannelsByIndex(self, channelIndexList):
79 79 """
80 80 Selecciona un bloque de datos en base a canales segun el channelIndexList
81 81
82 82 Input:
83 83 channelIndexList : lista sencilla de canales a seleccionar por ej. [2,3,7]
84 84
85 85 Affected:
86 86 self.dataOut.data
87 87 self.dataOut.channelIndexList
88 88 self.dataOut.nChannels
89 89 self.dataOut.m_ProcessingHeader.totalSpectra
90 90 self.dataOut.systemHeaderObj.numChannels
91 91 self.dataOut.m_ProcessingHeader.blockSize
92 92
93 93 Return:
94 94 None
95 95 """
96 96
97 97 for channelIndex in channelIndexList:
98 98 if channelIndex not in self.dataOut.channelIndexList:
99 99 print channelIndexList
100 100 raise ValueError, "The value %d in channelIndexList is not valid" %channelIndex
101 101
102 102 # nChannels = len(channelIndexList)
103 103 if self.dataOut.flagDataAsBlock:
104 104 """
105 105 Si la data es obtenida por bloques, dimension = [nChannels, nProfiles, nHeis]
106 106 """
107 107 data = self.dataOut.data[channelIndexList,:,:]
108 108 else:
109 109 data = self.dataOut.data[channelIndexList,:]
110 110
111 111 self.dataOut.data = data
112 112 self.dataOut.channelList = [self.dataOut.channelList[i] for i in channelIndexList]
113 113 # self.dataOut.nChannels = nChannels
114 114
115 115 return 1
116 116
117 117 def selectHeights(self, minHei=None, maxHei=None):
118 118 """
119 119 Selecciona un bloque de datos en base a un grupo de valores de alturas segun el rango
120 120 minHei <= height <= maxHei
121 121
122 122 Input:
123 123 minHei : valor minimo de altura a considerar
124 124 maxHei : valor maximo de altura a considerar
125 125
126 126 Affected:
127 127 Indirectamente son cambiados varios valores a travez del metodo selectHeightsByIndex
128 128
129 129 Return:
130 130 1 si el metodo se ejecuto con exito caso contrario devuelve 0
131 131 """
132 132
133 133 if minHei == None:
134 134 minHei = self.dataOut.heightList[0]
135 135
136 136 if maxHei == None:
137 137 maxHei = self.dataOut.heightList[-1]
138 138
139 if (minHei < self.dataOut.heightList[0]) or (minHei > maxHei):
140 raise ValueError, "some value in (%d,%d) is not valid" % (minHei, maxHei)
141
139 if (minHei < self.dataOut.heightList[0]):
140 minHei = self.dataOut.heightList[0]
141 # raise ValueError, "height range [%d,%d] is not valid. Data height range is [%d, %d]" % (minHei,
142 # maxHei,
143 # self.dataOut.heightList[0],
144 # self.dataOut.heightList[-1])
142 145
143 146 if (maxHei > self.dataOut.heightList[-1]):
144 147 maxHei = self.dataOut.heightList[-1]
145 148 # raise ValueError, "some value in (%d,%d) is not valid" % (minHei, maxHei)
146 149
147 150 minIndex = 0
148 151 maxIndex = 0
149 152 heights = self.dataOut.heightList
150 153
151 154 inda = numpy.where(heights >= minHei)
152 155 indb = numpy.where(heights <= maxHei)
153 156
154 157 try:
155 158 minIndex = inda[0][0]
156 159 except:
157 160 minIndex = 0
158 161
159 162 try:
160 163 maxIndex = indb[0][-1]
161 164 except:
162 165 maxIndex = len(heights)
163 166
164 167 self.selectHeightsByIndex(minIndex, maxIndex)
165 168
166 169 return 1
167 170
168 171
169 172 def selectHeightsByIndex(self, minIndex, maxIndex):
170 173 """
171 174 Selecciona un bloque de datos en base a un grupo indices de alturas segun el rango
172 175 minIndex <= index <= maxIndex
173 176
174 177 Input:
175 178 minIndex : valor de indice minimo de altura a considerar
176 179 maxIndex : valor de indice maximo de altura a considerar
177 180
178 181 Affected:
179 182 self.dataOut.data
180 183 self.dataOut.heightList
181 184
182 185 Return:
183 186 1 si el metodo se ejecuto con exito caso contrario devuelve 0
184 187 """
185 188
186 189 if (minIndex < 0) or (minIndex > maxIndex):
187 190 raise ValueError, "some value in (%d,%d) is not valid" % (minIndex, maxIndex)
188 191
189 192 if (maxIndex >= self.dataOut.nHeights):
190 193 maxIndex = self.dataOut.nHeights
191 194 # raise ValueError, "some value in (%d,%d) is not valid" % (minIndex, maxIndex)
192 195
193 196 # nHeights = maxIndex - minIndex + 1
194 197
195 198 #voltage
196 199 if self.dataOut.flagDataAsBlock:
197 200 """
198 201 Si la data es obtenida por bloques, dimension = [nChannels, nProfiles, nHeis]
199 202 """
200 203 data = self.dataOut.data[:,minIndex:maxIndex,:]
201 204 else:
202 205 data = self.dataOut.data[:,minIndex:maxIndex]
203 206
204 207 # firstHeight = self.dataOut.heightList[minIndex]
205 208
206 209 self.dataOut.data = data
207 210 self.dataOut.heightList = self.dataOut.heightList[minIndex:maxIndex]
208 211
209 212 if self.dataOut.nHeights <= 1:
210 213 raise ValueError, "selectHeights: Too few heights. Current number of heights is %d" %(self.dataOut.nHeights)
211 214
212 215 return 1
213 216
214 217
215 218 def filterByHeights(self, window):
216 219
217 220 deltaHeight = self.dataOut.heightList[1] - self.dataOut.heightList[0]
218 221
219 222 if window == None:
220 223 window = (self.dataOut.radarControllerHeaderObj.txA/self.dataOut.radarControllerHeaderObj.nBaud) / deltaHeight
221 224
222 225 newdelta = deltaHeight * window
223 226 r = self.dataOut.nHeights % window
224 227 newheights = (self.dataOut.nHeights-r)/window
225 228
226 229 if newheights <= 1:
227 230 raise ValueError, "filterByHeights: Too few heights. Current number of heights is %d and window is %d" %(self.dataOut.nHeights, window)
228 231
229 232 if self.dataOut.flagDataAsBlock:
230 233 """
231 234 Si la data es obtenida por bloques, dimension = [nChannels, nProfiles, nHeis]
232 235 """
233 236 buffer = self.dataOut.data[:, :, 0:self.dataOut.nHeights-r]
234 237 buffer = buffer.reshape(self.dataOut.nChannels,self.dataOut.nProfiles,self.dataOut.nHeights/window,window)
235 238 buffer = numpy.sum(buffer,3)
236 239
237 240 else:
238 241 buffer = self.dataOut.data[:,0:self.dataOut.nHeights-r]
239 242 buffer = buffer.reshape(self.dataOut.nChannels,self.dataOut.nHeights/window,window)
240 243 buffer = numpy.sum(buffer,2)
241 244
242 245 self.dataOut.data = buffer
243 246 self.dataOut.heightList = self.dataOut.heightList[0] + numpy.arange( newheights )*newdelta
244 247 self.dataOut.windowOfFilter = window
245 248
246 249 def setH0(self, h0, deltaHeight = None):
247 250
248 251 if not deltaHeight:
249 252 deltaHeight = self.dataOut.heightList[1] - self.dataOut.heightList[0]
250 253
251 254 nHeights = self.dataOut.nHeights
252 255
253 256 newHeiRange = h0 + numpy.arange(nHeights)*deltaHeight
254 257
255 258 self.dataOut.heightList = newHeiRange
256 259
257 260 def deFlip(self, channelList = []):
258 261
259 262 data = self.dataOut.data.copy()
260 263
261 264 if self.dataOut.flagDataAsBlock:
262 265 flip = self.flip
263 266 profileList = range(self.dataOut.nProfiles)
264 267
265 268 if not channelList:
266 269 for thisProfile in profileList:
267 270 data[:,thisProfile,:] = data[:,thisProfile,:]*flip
268 271 flip *= -1.0
269 272 else:
270 273 for thisChannel in channelList:
271 274 if thisChannel not in self.dataOut.channelList:
272 275 continue
273 276
274 277 for thisProfile in profileList:
275 278 data[thisChannel,thisProfile,:] = data[thisChannel,thisProfile,:]*flip
276 279 flip *= -1.0
277 280
278 281 self.flip = flip
279 282
280 283 else:
281 284 if not channelList:
282 285 data[:,:] = data[:,:]*self.flip
283 286 else:
284 287 for thisChannel in channelList:
285 288 if thisChannel not in self.dataOut.channelList:
286 289 continue
287 290
288 291 data[thisChannel,:] = data[thisChannel,:]*self.flip
289 292
290 293 self.flip *= -1.
291 294
292 295 self.dataOut.data = data
293 296
294 297 def setRadarFrequency(self, frequency=None):
295 298
296 299 if frequency != None:
297 300 self.dataOut.frequency = frequency
298 301
299 302 return 1
300 303
301 304 class CohInt(Operation):
302 305
303 306 isConfig = False
304 307
305 308 __profIndex = 0
306 309 __withOverapping = False
307 310
308 311 __byTime = False
309 312 __initime = None
310 313 __lastdatatime = None
311 314 __integrationtime = None
312 315
313 316 __buffer = None
314 317
315 318 __dataReady = False
316 319
317 320 n = None
318 321
319 322
320 323 def __init__(self):
321 324
322 325 Operation.__init__(self)
323 326
324 327 # self.isConfig = False
325 328
326 329 def setup(self, n=None, timeInterval=None, overlapping=False, byblock=False):
327 330 """
328 331 Set the parameters of the integration class.
329 332
330 333 Inputs:
331 334
332 335 n : Number of coherent integrations
333 336 timeInterval : Time of integration. If the parameter "n" is selected this one does not work
334 337 overlapping :
335 338
336 339 """
337 340
338 341 self.__initime = None
339 342 self.__lastdatatime = 0
340 343 self.__buffer = None
341 344 self.__dataReady = False
342 345 self.byblock = byblock
343 346
344 347 if n == None and timeInterval == None:
345 348 raise ValueError, "n or timeInterval should be specified ..."
346 349
347 350 if n != None:
348 351 self.n = n
349 352 self.__byTime = False
350 353 else:
351 354 self.__integrationtime = timeInterval #* 60. #if (type(timeInterval)!=integer) -> change this line
352 355 self.n = 9999
353 356 self.__byTime = True
354 357
355 358 if overlapping:
356 359 self.__withOverapping = True
357 360 self.__buffer = None
358 361 else:
359 362 self.__withOverapping = False
360 363 self.__buffer = 0
361 364
362 365 self.__profIndex = 0
363 366
364 367 def putData(self, data):
365 368
366 369 """
367 370 Add a profile to the __buffer and increase in one the __profileIndex
368 371
369 372 """
370 373
371 374 if not self.__withOverapping:
372 375 self.__buffer += data.copy()
373 376 self.__profIndex += 1
374 377 return
375 378
376 379 #Overlapping data
377 380 nChannels, nHeis = data.shape
378 381 data = numpy.reshape(data, (1, nChannels, nHeis))
379 382
380 383 #If the buffer is empty then it takes the data value
381 384 if self.__buffer is None:
382 385 self.__buffer = data
383 386 self.__profIndex += 1
384 387 return
385 388
386 389 #If the buffer length is lower than n then stakcing the data value
387 390 if self.__profIndex < self.n:
388 391 self.__buffer = numpy.vstack((self.__buffer, data))
389 392 self.__profIndex += 1
390 393 return
391 394
392 395 #If the buffer length is equal to n then replacing the last buffer value with the data value
393 396 self.__buffer = numpy.roll(self.__buffer, -1, axis=0)
394 397 self.__buffer[self.n-1] = data
395 398 self.__profIndex = self.n
396 399 return
397 400
398 401
399 402 def pushData(self):
400 403 """
401 404 Return the sum of the last profiles and the profiles used in the sum.
402 405
403 406 Affected:
404 407
405 408 self.__profileIndex
406 409
407 410 """
408 411
409 412 if not self.__withOverapping:
410 413 data = self.__buffer
411 414 n = self.__profIndex
412 415
413 416 self.__buffer = 0
414 417 self.__profIndex = 0
415 418
416 419 return data, n
417 420
418 421 #Integration with Overlapping
419 422 data = numpy.sum(self.__buffer, axis=0)
420 423 n = self.__profIndex
421 424
422 425 return data, n
423 426
424 427 def byProfiles(self, data):
425 428
426 429 self.__dataReady = False
427 430 avgdata = None
428 431 # n = None
429 432
430 433 self.putData(data)
431 434
432 435 if self.__profIndex == self.n:
433 436
434 437 avgdata, n = self.pushData()
435 438 self.__dataReady = True
436 439
437 440 return avgdata
438 441
439 442 def byTime(self, data, datatime):
440 443
441 444 self.__dataReady = False
442 445 avgdata = None
443 446 n = None
444 447
445 448 self.putData(data)
446 449
447 450 if (datatime - self.__initime) >= self.__integrationtime:
448 451 avgdata, n = self.pushData()
449 452 self.n = n
450 453 self.__dataReady = True
451 454
452 455 return avgdata
453 456
454 457 def integrate(self, data, datatime=None):
455 458
456 459 if self.__initime == None:
457 460 self.__initime = datatime
458 461
459 462 if self.__byTime:
460 463 avgdata = self.byTime(data, datatime)
461 464 else:
462 465 avgdata = self.byProfiles(data)
463 466
464 467
465 468 self.__lastdatatime = datatime
466 469
467 470 if avgdata is None:
468 471 return None, None
469 472
470 473 avgdatatime = self.__initime
471 474
472 475 deltatime = datatime -self.__lastdatatime
473 476
474 477 if not self.__withOverapping:
475 478 self.__initime = datatime
476 479 else:
477 480 self.__initime += deltatime
478 481
479 482 return avgdata, avgdatatime
480 483
481 484 def integrateByBlock(self, dataOut):
482 485
483 486 times = int(dataOut.data.shape[1]/self.n)
484 487 avgdata = numpy.zeros((dataOut.nChannels, times, dataOut.nHeights), dtype=numpy.complex)
485 488
486 489 id_min = 0
487 490 id_max = self.n
488 491
489 492 for i in range(times):
490 493 junk = dataOut.data[:,id_min:id_max,:]
491 494 avgdata[:,i,:] = junk.sum(axis=1)
492 495 id_min += self.n
493 496 id_max += self.n
494 497
495 498 timeInterval = dataOut.ippSeconds*self.n
496 499 avgdatatime = (times - 1) * timeInterval + dataOut.utctime
497 500 self.__dataReady = True
498 501 return avgdata, avgdatatime
499 502
500 503 def run(self, dataOut, **kwargs):
501 504
502 505 if not self.isConfig:
503 506 self.setup(**kwargs)
504 507 self.isConfig = True
505 508
506 509 if dataOut.flagDataAsBlock:
507 510 """
508 511 Si la data es leida por bloques, dimension = [nChannels, nProfiles, nHeis]
509 512 """
510 513 avgdata, avgdatatime = self.integrateByBlock(dataOut)
511 514 else:
512 515 avgdata, avgdatatime = self.integrate(dataOut.data, dataOut.utctime)
513 516
514 517 # dataOut.timeInterval *= n
515 518 dataOut.flagNoData = True
516 519
517 520 if self.__dataReady:
518 521 dataOut.data = avgdata
519 522 dataOut.nCohInt *= self.n
520 523 dataOut.utctime = avgdatatime
521 524 # dataOut.timeInterval = dataOut.ippSeconds * dataOut.nCohInt
522 525 dataOut.flagNoData = False
523 526
524 527 class Decoder(Operation):
525 528
526 529 isConfig = False
527 530 __profIndex = 0
528 531
529 532 code = None
530 533
531 534 nCode = None
532 535 nBaud = None
533 536
534 537
535 538 def __init__(self):
536 539
537 540 Operation.__init__(self)
538 541
539 542 self.times = None
540 543 self.osamp = None
541 544 # self.__setValues = False
542 545 self.isConfig = False
543 546
544 547 def setup(self, code, osamp, dataOut):
545 548
546 549 self.__profIndex = 0
547 550
548 551 self.code = code
549 552
550 553 self.nCode = len(code)
551 554 self.nBaud = len(code[0])
552 555
553 556 if (osamp != None) and (osamp >1):
554 557 self.osamp = osamp
555 558 self.code = numpy.repeat(code, repeats=self.osamp, axis=1)
556 559 self.nBaud = self.nBaud*self.osamp
557 560
558 561 self.__nChannels = dataOut.nChannels
559 562 self.__nProfiles = dataOut.nProfiles
560 563 self.__nHeis = dataOut.nHeights
561 564
562 565 if dataOut.flagDataAsBlock:
563 566
564 567 self.ndatadec = self.__nHeis #- self.nBaud + 1
565 568
566 569 self.datadecTime = numpy.zeros((self.__nChannels, self.__nProfiles, self.ndatadec), dtype=numpy.complex)
567 570
568 571 else:
569 572
570 573 __codeBuffer = numpy.zeros((self.nCode, self.__nHeis), dtype=numpy.complex)
571 574
572 575 __codeBuffer[:,0:self.nBaud] = self.code
573 576
574 577 self.fft_code = numpy.conj(numpy.fft.fft(__codeBuffer, axis=1))
575 578
576 579 self.ndatadec = self.__nHeis #- self.nBaud + 1
577 580
578 581 self.datadecTime = numpy.zeros((self.__nChannels, self.ndatadec), dtype=numpy.complex)
579 582
580 583 def convolutionInFreq(self, data):
581 584
582 585 fft_code = self.fft_code[self.__profIndex].reshape(1,-1)
583 586
584 587 fft_data = numpy.fft.fft(data, axis=1)
585 588
586 589 conv = fft_data*fft_code
587 590
588 591 data = numpy.fft.ifft(conv,axis=1)
589 592
590 593 datadec = data#[:,:]
591 594
592 595 return datadec
593 596
594 597 def convolutionInFreqOpt(self, data):
595 598
596 599 raise NotImplementedError
597 600
598 601 # fft_code = self.fft_code[self.__profIndex].reshape(1,-1)
599 602 #
600 603 # data = cfunctions.decoder(fft_code, data)
601 604 #
602 605 # datadec = data#[:,:]
603 606 #
604 607 # return datadec
605 608
606 609 def convolutionInTime(self, data):
607 610
608 611 code = self.code[self.__profIndex]
609 612
610 613 for i in range(self.__nChannels):
611 614 self.datadecTime[i,:] = numpy.correlate(data[i,:], code, mode='full')[self.nBaud-1:]
612 615
613 616 return self.datadecTime
614 617
615 618 def convolutionByBlockInTime(self, data):
616 619
617 620 repetitions = self.__nProfiles / self.nCode
618 621
619 622 junk = numpy.lib.stride_tricks.as_strided(self.code, (repetitions, self.code.size), (0, self.code.itemsize))
620 623 junk = junk.flatten()
621 624 code_block = numpy.reshape(junk, (self.nCode*repetitions, self.nBaud))
622 625
623 626 for i in range(self.__nChannels):
624 627 for j in range(self.__nProfiles):
625 628 self.datadecTime[i,j,:] = numpy.correlate(data[i,j,:], code_block[j,:], mode='same')
626 629
627 630 return self.datadecTime
628 631
629 632 def run(self, dataOut, code=None, nCode=None, nBaud=None, mode = 0, osamp=None, times=None):
630
633
634 if dataOut.flagDecodeData:
635 print "This data is already decoded, recoding again ..."
636
631 637 if not self.isConfig:
632 638
633 639 if code is None:
634 640 code = dataOut.code
635 641 else:
636 642 code = numpy.array(code).reshape(nCode,nBaud)
637 643
638 644 self.setup(code, osamp, dataOut)
639 645
640 646 self.isConfig = True
641
647
648 if self.code is None:
649 print "Fail decoding: Code is not defined."
650 return
651
642 652 if dataOut.flagDataAsBlock:
643 653 """
644 654 Decoding when data have been read as block,
645 655 """
646 656 datadec = self.convolutionByBlockInTime(dataOut.data)
647 657
648 658 else:
649 659 """
650 660 Decoding when data have been read profile by profile
651 661 """
652 662 if mode == 0:
653 663 datadec = self.convolutionInTime(dataOut.data)
654 664
655 665 if mode == 1:
656 666 datadec = self.convolutionInFreq(dataOut.data)
657 667
658 668 if mode == 2:
659 669 datadec = self.convolutionInFreqOpt(dataOut.data)
660 670
661 671 dataOut.code = self.code
662 672 dataOut.nCode = self.nCode
663 673 dataOut.nBaud = self.nBaud
664 674
665 675 dataOut.data = datadec
666 676
667 677 dataOut.heightList = dataOut.heightList[0:self.ndatadec]
668 678
669 679 dataOut.flagDecodeData = True #asumo q la data esta decodificada
670 680
671 681 if self.__profIndex == self.nCode-1:
672 682 self.__profIndex = 0
673 683 return 1
674 684
675 685 self.__profIndex += 1
676 686
677 687 return 1
678 688 # dataOut.flagDeflipData = True #asumo q la data no esta sin flip
679 689
680 690
681 691 class ProfileConcat(Operation):
682 692
683 693 isConfig = False
684 694 buffer = None
685 695
686 696 def __init__(self):
687 697
688 698 Operation.__init__(self)
689 699 self.profileIndex = 0
690 700
691 701 def reset(self):
692 702 self.buffer = numpy.zeros_like(self.buffer)
693 703 self.start_index = 0
694 704 self.times = 1
695 705
696 706 def setup(self, data, m, n=1):
697 707 self.buffer = numpy.zeros((data.shape[0],data.shape[1]*m),dtype=type(data[0,0]))
698 708 self.nHeights = data.nHeights
699 709 self.start_index = 0
700 710 self.times = 1
701 711
702 712 def concat(self, data):
703 713
704 714 self.buffer[:,self.start_index:self.profiles*self.times] = data.copy()
705 715 self.start_index = self.start_index + self.nHeights
706 716
707 717 def run(self, dataOut, m):
708 718
709 719 dataOut.flagNoData = True
710 720
711 721 if not self.isConfig:
712 722 self.setup(dataOut.data, m, 1)
713 723 self.isConfig = True
714 724
715 725 if dataOut.flagDataAsBlock:
716 726
717 727 raise ValueError, "ProfileConcat can only be used when voltage have been read profile by profile, getBlock = False"
718 728
719 729 else:
720 730 self.concat(dataOut.data)
721 731 self.times += 1
722 732 if self.times > m:
723 733 dataOut.data = self.buffer
724 734 self.reset()
725 735 dataOut.flagNoData = False
726 736 # se deben actualizar mas propiedades del header y del objeto dataOut, por ejemplo, las alturas
727 737 deltaHeight = dataOut.heightList[1] - dataOut.heightList[0]
728 738 xf = dataOut.heightList[0] + dataOut.nHeights * deltaHeight * m
729 739 dataOut.heightList = numpy.arange(dataOut.heightList[0], xf, deltaHeight)
730 740 dataOut.ippSeconds *= m
731 741
732 742 class ProfileSelector(Operation):
733 743
734 744 profileIndex = None
735 745 # Tamanho total de los perfiles
736 746 nProfiles = None
737 747
738 748 def __init__(self):
739 749
740 750 Operation.__init__(self)
741 751 self.profileIndex = 0
742 752
743 753 def incIndex(self):
744 754
745 755 self.profileIndex += 1
746 756
747 757 if self.profileIndex >= self.nProfiles:
748 758 self.profileIndex = 0
749 759
750 760 def isThisProfileInRange(self, profileIndex, minIndex, maxIndex):
751 761
752 762 if profileIndex < minIndex:
753 763 return False
754 764
755 765 if profileIndex > maxIndex:
756 766 return False
757 767
758 768 return True
759 769
760 770 def isThisProfileInList(self, profileIndex, profileList):
761 771
762 772 if profileIndex not in profileList:
763 773 return False
764 774
765 775 return True
766 776
767 777 def run(self, dataOut, profileList=None, profileRangeList=None, beam=None, byblock=False, rangeList = None, nProfiles=None):
768 778
769 779 """
770 780 ProfileSelector:
771 781
772 782 Inputs:
773 783 profileList : Index of profiles selected. Example: profileList = (0,1,2,7,8)
774 784
775 785 profileRangeList : Minimum and maximum profile indexes. Example: profileRangeList = (4, 30)
776 786
777 787 rangeList : List of profile ranges. Example: rangeList = ((4, 30), (32, 64), (128, 256))
778 788
779 789 """
780 790
781 791 dataOut.flagNoData = True
782 792
783 793 if nProfiles:
784 794 self.nProfiles = dataOut.nProfiles
785 795 else:
786 796 self.nProfiles = nProfiles
787 797
788 798 if dataOut.flagDataAsBlock:
789 799 """
790 800 data dimension = [nChannels, nProfiles, nHeis]
791 801 """
792 802 if profileList != None:
793 803 dataOut.data = dataOut.data[:,profileList,:]
794 804 dataOut.nProfiles = len(profileList)
795 805 dataOut.profileIndex = dataOut.nProfiles - 1
796 806
797 807 if profileRangeList != None:
798 808 minIndex = profileRangeList[0]
799 809 maxIndex = profileRangeList[1]
800 810
801 811 dataOut.data = dataOut.data[:,minIndex:maxIndex+1,:]
802 812 dataOut.nProfiles = maxIndex - minIndex + 1
803 813 dataOut.profileIndex = dataOut.nProfiles - 1
804 814
805 815 if rangeList != None:
806 816 raise ValueError, "Profile Selector: Not implemented for rangeList yet"
807 817
808 818 dataOut.flagNoData = False
809 819
810 820 return True
811 821
812 822 else:
813 823 """
814 824 data dimension = [nChannels, nHeis]
815 825
816 826 """
817 827 if profileList != None:
818 828
819 829 dataOut.nProfiles = len(profileList)
820 830
821 831 if self.isThisProfileInList(dataOut.profileIndex, profileList):
822 832 dataOut.flagNoData = False
823 833 dataOut.profileIndex = self.profileIndex
824 834
825 835 self.incIndex()
826 836 return True
827 837
828 838
829 839 if profileRangeList != None:
830 840
831 841 minIndex = profileRangeList[0]
832 842 maxIndex = profileRangeList[1]
833 843
834 844 dataOut.nProfiles = maxIndex - minIndex + 1
835 845
836 846 if self.isThisProfileInRange(dataOut.profileIndex, minIndex, maxIndex):
837 847 dataOut.flagNoData = False
838 848 dataOut.profileIndex = self.profileIndex
839 849
840 850 self.incIndex()
841 851 return True
842 852
843 853 if rangeList != None:
844 854
845 855 nProfiles = 0
846 856
847 857 for thisRange in rangeList:
848 858 minIndex = thisRange[0]
849 859 maxIndex = thisRange[1]
850 860
851 861 nProfiles += maxIndex - minIndex + 1
852 862
853 863 dataOut.nProfiles = nProfiles
854 864
855 865 for thisRange in rangeList:
856 866
857 867 minIndex = thisRange[0]
858 868 maxIndex = thisRange[1]
859 869
860 870 if self.isThisProfileInRange(dataOut.profileIndex, minIndex, maxIndex):
861 871
862 872 # print "profileIndex = ", dataOut.profileIndex
863 873
864 874 dataOut.flagNoData = False
865 875 dataOut.profileIndex = self.profileIndex
866 876
867 877 self.incIndex()
868 878 break
869 879 return True
870 880
871 881
872 882 if beam != None: #beam is only for AMISR data
873 883 if self.isThisProfileInList(dataOut.profileIndex, dataOut.beamRangeDict[beam]):
874 884 dataOut.flagNoData = False
875 885 dataOut.profileIndex = self.profileIndex
876 886
877 887 self.incIndex()
878 888 return 1
879 889
880 890 raise ValueError, "ProfileSelector needs profileList, profileRangeList or rangeList parameter"
881 891
882 892 return 0
883 893
884 894
885 895
886 896 class Reshaper(Operation):
887 897
888 898 def __init__(self):
889 899
890 900 Operation.__init__(self)
891 901 self.updateNewHeights = True
892 902
893 903 def run(self, dataOut, shape):
894 904
895 905 if not dataOut.flagDataAsBlock:
896 906 raise ValueError, "Reshaper can only be used when voltage have been read as Block, getBlock = True"
897 907
898 908 if len(shape) != 3:
899 909 raise ValueError, "shape len should be equal to 3, (nChannels, nProfiles, nHeis)"
900 910
901 911 shape_tuple = tuple(shape)
902 912 dataOut.data = numpy.reshape(dataOut.data, shape_tuple)
903 913 dataOut.flagNoData = False
904 914
905 915 if self.updateNewHeights:
906 916
907 917 old_nheights = dataOut.nHeights
908 918 new_nheights = dataOut.data.shape[2]
909 919 factor = 1.0*new_nheights / old_nheights
910 920
911 921 deltaHeight = dataOut.heightList[1] - dataOut.heightList[0]
912 922
913 923 xf = dataOut.heightList[0] + dataOut.nHeights * deltaHeight * factor
914 924
915 925 dataOut.heightList = numpy.arange(dataOut.heightList[0], xf, deltaHeight)
916 926
917 927 dataOut.nProfiles = dataOut.data.shape[1]
918 928
919 929 dataOut.ippSeconds *= factor
920
921 import collections
922 from scipy.stats import mode
923
924 class Synchronize(Operation):
925
926 isConfig = False
927 __profIndex = 0
928
929 def __init__(self):
930
931 Operation.__init__(self)
932 # self.isConfig = False
933 self.__powBuffer = None
934 self.__startIndex = 0
935 self.__pulseFound = False
936
937 def __findTxPulse(self, dataOut, channel=0, pulse_with = None):
938
939 #Read data
940
941 powerdB = dataOut.getPower(channel = channel)
942 noisedB = dataOut.getNoise(channel = channel)[0]
943
944 self.__powBuffer.extend(powerdB.flatten())
945
946 dataArray = numpy.array(self.__powBuffer)
947
948 filteredPower = numpy.correlate(dataArray, dataArray[0:self.__nSamples], "same")
949
950 maxValue = numpy.nanmax(filteredPower)
951
952 if maxValue < noisedB + 10:
953 #No se encuentra ningun pulso de transmision
954 return None
955
956 maxValuesIndex = numpy.where(filteredPower > maxValue - 0.1*abs(maxValue))[0]
957
958 if len(maxValuesIndex) < 2:
959 #Solo se encontro un solo pulso de transmision de un baudio, esperando por el siguiente TX
960 return None
961
962 phasedMaxValuesIndex = maxValuesIndex - self.__nSamples
963
964 #Seleccionar solo valores con un espaciamiento de nSamples
965 pulseIndex = numpy.intersect1d(maxValuesIndex, phasedMaxValuesIndex)
966
967 if len(pulseIndex) < 2:
968 #Solo se encontro un pulso de transmision con ancho mayor a 1
969 return None
970
971 spacing = pulseIndex[1:] - pulseIndex[:-1]
972
973 #remover senales que se distancien menos de 10 unidades o muestras
974 #(No deberian existir IPP menor a 10 unidades)
975
976 realIndex = numpy.where(spacing > 10 )[0]
977
978 if len(realIndex) < 2:
979 #Solo se encontro un pulso de transmision con ancho mayor a 1
980 return None
981
982 #Eliminar pulsos anchos (deja solo la diferencia entre IPPs)
983 realPulseIndex = pulseIndex[realIndex]
984
985 period = mode(realPulseIndex[1:] - realPulseIndex[:-1])[0][0]
986
987 print "IPP = %d samples" %period
988
989 self.__newNSamples = dataOut.nHeights #int(period)
990 self.__startIndex = int(realPulseIndex[0])
991
992 return 1
993
994
995 def setup(self, nSamples, nChannels, buffer_size = 4):
996
997 self.__powBuffer = collections.deque(numpy.zeros( buffer_size*nSamples,dtype=numpy.float),
998 maxlen = buffer_size*nSamples)
999
1000 bufferList = []
1001
1002 for i in range(nChannels):
1003 bufferByChannel = collections.deque(numpy.zeros( buffer_size*nSamples, dtype=numpy.complex) + numpy.NAN,
1004 maxlen = buffer_size*nSamples)
1005
1006 bufferList.append(bufferByChannel)
1007
1008 self.__nSamples = nSamples
1009 self.__nChannels = nChannels
1010 self.__bufferList = bufferList
1011
1012 def run(self, dataOut, channel = 0):
1013
1014 if not self.isConfig:
1015 nSamples = dataOut.nHeights
1016 nChannels = dataOut.nChannels
1017 self.setup(nSamples, nChannels)
1018 self.isConfig = True
1019
1020 #Append new data to internal buffer
1021 for thisChannel in range(self.__nChannels):
1022 bufferByChannel = self.__bufferList[thisChannel]
1023 bufferByChannel.extend(dataOut.data[thisChannel])
1024
1025 if self.__pulseFound:
1026 self.__startIndex -= self.__nSamples
1027
1028 #Finding Tx Pulse
1029 if not self.__pulseFound:
1030 indexFound = self.__findTxPulse(dataOut, channel)
1031
1032 if indexFound == None:
1033 dataOut.flagNoData = True
1034 return
1035
1036 self.__arrayBuffer = numpy.zeros((self.__nChannels, self.__newNSamples), dtype = numpy.complex)
1037 self.__pulseFound = True
1038 self.__startIndex = indexFound
1039
1040 #If pulse was found ...
1041 for thisChannel in range(self.__nChannels):
1042 bufferByChannel = self.__bufferList[thisChannel]
1043 #print self.__startIndex
1044 x = numpy.array(bufferByChannel)
1045 self.__arrayBuffer[thisChannel] = x[self.__startIndex:self.__startIndex+self.__newNSamples]
1046
1047 deltaHeight = dataOut.heightList[1] - dataOut.heightList[0]
1048 dataOut.heightList = numpy.arange(self.__newNSamples)*deltaHeight
1049 # dataOut.ippSeconds = (self.__newNSamples / deltaHeight)/1e6
1050
1051 dataOut.data = self.__arrayBuffer
1052
1053 self.__startIndex += self.__newNSamples
1054
1055 return No newline at end of file
930 #
931 # import collections
932 # from scipy.stats import mode
933 #
934 # class Synchronize(Operation):
935 #
936 # isConfig = False
937 # __profIndex = 0
938 #
939 # def __init__(self):
940 #
941 # Operation.__init__(self)
942 # # self.isConfig = False
943 # self.__powBuffer = None
944 # self.__startIndex = 0
945 # self.__pulseFound = False
946 #
947 # def __findTxPulse(self, dataOut, channel=0, pulse_with = None):
948 #
949 # #Read data
950 #
951 # powerdB = dataOut.getPower(channel = channel)
952 # noisedB = dataOut.getNoise(channel = channel)[0]
953 #
954 # self.__powBuffer.extend(powerdB.flatten())
955 #
956 # dataArray = numpy.array(self.__powBuffer)
957 #
958 # filteredPower = numpy.correlate(dataArray, dataArray[0:self.__nSamples], "same")
959 #
960 # maxValue = numpy.nanmax(filteredPower)
961 #
962 # if maxValue < noisedB + 10:
963 # #No se encuentra ningun pulso de transmision
964 # return None
965 #
966 # maxValuesIndex = numpy.where(filteredPower > maxValue - 0.1*abs(maxValue))[0]
967 #
968 # if len(maxValuesIndex) < 2:
969 # #Solo se encontro un solo pulso de transmision de un baudio, esperando por el siguiente TX
970 # return None
971 #
972 # phasedMaxValuesIndex = maxValuesIndex - self.__nSamples
973 #
974 # #Seleccionar solo valores con un espaciamiento de nSamples
975 # pulseIndex = numpy.intersect1d(maxValuesIndex, phasedMaxValuesIndex)
976 #
977 # if len(pulseIndex) < 2:
978 # #Solo se encontro un pulso de transmision con ancho mayor a 1
979 # return None
980 #
981 # spacing = pulseIndex[1:] - pulseIndex[:-1]
982 #
983 # #remover senales que se distancien menos de 10 unidades o muestras
984 # #(No deberian existir IPP menor a 10 unidades)
985 #
986 # realIndex = numpy.where(spacing > 10 )[0]
987 #
988 # if len(realIndex) < 2:
989 # #Solo se encontro un pulso de transmision con ancho mayor a 1
990 # return None
991 #
992 # #Eliminar pulsos anchos (deja solo la diferencia entre IPPs)
993 # realPulseIndex = pulseIndex[realIndex]
994 #
995 # period = mode(realPulseIndex[1:] - realPulseIndex[:-1])[0][0]
996 #
997 # print "IPP = %d samples" %period
998 #
999 # self.__newNSamples = dataOut.nHeights #int(period)
1000 # self.__startIndex = int(realPulseIndex[0])
1001 #
1002 # return 1
1003 #
1004 #
1005 # def setup(self, nSamples, nChannels, buffer_size = 4):
1006 #
1007 # self.__powBuffer = collections.deque(numpy.zeros( buffer_size*nSamples,dtype=numpy.float),
1008 # maxlen = buffer_size*nSamples)
1009 #
1010 # bufferList = []
1011 #
1012 # for i in range(nChannels):
1013 # bufferByChannel = collections.deque(numpy.zeros( buffer_size*nSamples, dtype=numpy.complex) + numpy.NAN,
1014 # maxlen = buffer_size*nSamples)
1015 #
1016 # bufferList.append(bufferByChannel)
1017 #
1018 # self.__nSamples = nSamples
1019 # self.__nChannels = nChannels
1020 # self.__bufferList = bufferList
1021 #
1022 # def run(self, dataOut, channel = 0):
1023 #
1024 # if not self.isConfig:
1025 # nSamples = dataOut.nHeights
1026 # nChannels = dataOut.nChannels
1027 # self.setup(nSamples, nChannels)
1028 # self.isConfig = True
1029 #
1030 # #Append new data to internal buffer
1031 # for thisChannel in range(self.__nChannels):
1032 # bufferByChannel = self.__bufferList[thisChannel]
1033 # bufferByChannel.extend(dataOut.data[thisChannel])
1034 #
1035 # if self.__pulseFound:
1036 # self.__startIndex -= self.__nSamples
1037 #
1038 # #Finding Tx Pulse
1039 # if not self.__pulseFound:
1040 # indexFound = self.__findTxPulse(dataOut, channel)
1041 #
1042 # if indexFound == None:
1043 # dataOut.flagNoData = True
1044 # return
1045 #
1046 # self.__arrayBuffer = numpy.zeros((self.__nChannels, self.__newNSamples), dtype = numpy.complex)
1047 # self.__pulseFound = True
1048 # self.__startIndex = indexFound
1049 #
1050 # #If pulse was found ...
1051 # for thisChannel in range(self.__nChannels):
1052 # bufferByChannel = self.__bufferList[thisChannel]
1053 # #print self.__startIndex
1054 # x = numpy.array(bufferByChannel)
1055 # self.__arrayBuffer[thisChannel] = x[self.__startIndex:self.__startIndex+self.__newNSamples]
1056 #
1057 # deltaHeight = dataOut.heightList[1] - dataOut.heightList[0]
1058 # dataOut.heightList = numpy.arange(self.__newNSamples)*deltaHeight
1059 # # dataOut.ippSeconds = (self.__newNSamples / deltaHeight)/1e6
1060 #
1061 # dataOut.data = self.__arrayBuffer
1062 #
1063 # self.__startIndex += self.__newNSamples
1064 #
1065 # return No newline at end of file
General Comments 0
You need to be logged in to leave comments. Login now