##// END OF EJS Templates
A SpectraProc se agrega:...
Daniel Valdez -
r445:ff1e0d352ade
parent child
Show More
@@ -1,721 +1,723
1 1 '''
2 2
3 3 $Author: murco $
4 4 $Id: JROData.py 173 2012-11-20 15:06:21Z murco $
5 5 '''
6 6
7 7 import os, sys
8 8 import copy
9 9 import numpy
10 10 import datetime
11 11
12 12 from jroheaderIO import SystemHeader, RadarControllerHeader
13 13
14 14 def hildebrand_sekhon(data, navg):
15 15 """
16 16 This method is for the objective determination of de noise level in Doppler spectra. This
17 17 implementation technique is based on the fact that the standard deviation of the spectral
18 18 densities is equal to the mean spectral density for white Gaussian noise
19 19
20 20 Inputs:
21 21 Data : heights
22 22 navg : numbers of averages
23 23
24 24 Return:
25 25 -1 : any error
26 26 anoise : noise's level
27 27 """
28 28
29 29 dataflat = data.copy().reshape(-1)
30 30 dataflat.sort()
31 31 npts = dataflat.size #numbers of points of the data
32 32 npts_noise = 0.2*npts
33 33
34 34 if npts < 32:
35 35 print "error in noise - requires at least 32 points"
36 36 return -1.0
37 37
38 38 dataflat2 = numpy.power(dataflat,2)
39 39
40 40 cs = numpy.cumsum(dataflat)
41 41 cs2 = numpy.cumsum(dataflat2)
42 42
43 43 # data sorted in ascending order
44 44 nmin = int((npts + 7.)/8)
45 45
46 46 for i in range(nmin, npts):
47 47 s = cs[i]
48 48 s2 = cs2[i]
49 49 p = s / float(i);
50 50 p2 = p**2;
51 51 q = s2 / float(i) - p2;
52 52 leftc = p2;
53 53 rightc = q * float(navg);
54 54 R2 = leftc/rightc
55 55
56 56 # Signal detect: R2 < 1 (R2 = leftc/rightc)
57 57 if R2 < 1:
58 58 npts_noise = i
59 59 break
60 60
61 61
62 62 anoise = numpy.average(dataflat[0:npts_noise])
63 63
64 64 return anoise;
65 65
66 66 def sorting_bruce(data, navg):
67 67
68 68 data = data.copy()
69 69
70 70 sortdata = numpy.sort(data)
71 71 lenOfData = len(data)
72 72 nums_min = lenOfData/10
73 73
74 74 if (lenOfData/10) > 0:
75 75 nums_min = lenOfData/10
76 76 else:
77 77 nums_min = 0
78 78
79 79 rtest = 1.0 + 1.0/navg
80 80
81 sum = 0.
81 sump = 0.
82 82
83 83 sumq = 0.
84 84
85 85 j = 0
86 86
87 87 cont = 1
88 88
89 89 while((cont==1)and(j<lenOfData)):
90 90
91 sum += sortdata[j]
91 sump += sortdata[j]
92 92
93 93 sumq += sortdata[j]**2
94 94
95 95 j += 1
96 96
97 97 if j > nums_min:
98 if ((sumq*j) <= (rtest*sum**2)):
99 lnoise = sum / j
98 if ((sumq*j) <= (rtest*sump**2)):
99 lnoise = sump / j
100 100 else:
101 101 j = j - 1
102 sum = sum - sordata[j]
103 sumq = sumq - sordata[j]**2
102 sump = sump - sortdata[j]
103 sumq = sumq - sortdata[j]**2
104 104 cont = 0
105 105
106 106 if j == nums_min:
107 lnoise = sum /j
107 lnoise = sump /j
108 108
109 109 return lnoise
110 110
111 111 class JROData:
112 112
113 113 # m_BasicHeader = BasicHeader()
114 114 # m_ProcessingHeader = ProcessingHeader()
115 115
116 116 systemHeaderObj = SystemHeader()
117 117
118 118 radarControllerHeaderObj = RadarControllerHeader()
119 119
120 120 # data = None
121 121
122 122 type = None
123 123
124 124 dtype = None
125 125
126 126 # nChannels = None
127 127
128 128 # nHeights = None
129 129
130 130 nProfiles = None
131 131
132 132 heightList = None
133 133
134 134 channelList = None
135 135
136 136 flagNoData = True
137 137
138 138 flagTimeBlock = False
139 139
140 140 useLocalTime = False
141 141
142 142 utctime = None
143 143
144 144 timeZone = None
145 145
146 146 dstFlag = None
147 147
148 148 errorCount = None
149 149
150 150 blocksize = None
151 151
152 152 nCode = None
153 153
154 154 nBaud = None
155 155
156 156 code = None
157 157
158 158 flagDecodeData = False #asumo q la data no esta decodificada
159 159
160 160 flagDeflipData = False #asumo q la data no esta sin flip
161 161
162 162 flagShiftFFT = False
163 163
164 164 ippSeconds = None
165 165
166 166 timeInterval = None
167 167
168 168 nCohInt = None
169 169
170 170 noise = None
171 171
172 172 windowOfFilter = 1
173 173
174 174 #Speed of ligth
175 175 C = 3e8
176 176
177 177 frequency = 49.92e6
178 178
179 179 realtime = False
180 180
181 181 def __init__(self):
182 182
183 183 raise ValueError, "This class has not been implemented"
184 184
185 185 def copy(self, inputObj=None):
186 186
187 187 if inputObj == None:
188 188 return copy.deepcopy(self)
189 189
190 190 for key in inputObj.__dict__.keys():
191 191 self.__dict__[key] = inputObj.__dict__[key]
192 192
193 193 def deepcopy(self):
194 194
195 195 return copy.deepcopy(self)
196 196
197 197 def isEmpty(self):
198 198
199 199 return self.flagNoData
200 200
201 201 def getNoise(self):
202 202
203 203 raise ValueError, "Not implemented"
204 204
205 205 def getNChannels(self):
206 206
207 207 return len(self.channelList)
208 208
209 209 def getChannelIndexList(self):
210 210
211 211 return range(self.nChannels)
212 212
213 213 def getNHeights(self):
214 214
215 215 return len(self.heightList)
216 216
217 217 def getHeiRange(self, extrapoints=0):
218 218
219 219 heis = self.heightList
220 220 # deltah = self.heightList[1] - self.heightList[0]
221 221 #
222 222 # heis.append(self.heightList[-1])
223 223
224 224 return heis
225 225
226 226 def getltctime(self):
227 227
228 228 if self.useLocalTime:
229 229 return self.utctime - self.timeZone*60
230 230
231 231 return self.utctime
232 232
233 233 def getDatatime(self):
234 234
235 235 datatime = datetime.datetime.utcfromtimestamp(self.ltctime)
236 236 return datatime
237 237
238 238 def getTimeRange(self):
239 239
240 240 datatime = []
241 241
242 242 datatime.append(self.ltctime)
243 243 datatime.append(self.ltctime + self.timeInterval)
244 244
245 245 datatime = numpy.array(datatime)
246 246
247 247 return datatime
248 248
249 249 def getFmax(self):
250 250
251 251 PRF = 1./(self.ippSeconds * self.nCohInt)
252 252
253 253 fmax = PRF/2.
254 254
255 255 return fmax
256 256
257 257 def getVmax(self):
258 258
259 259 _lambda = self.C/self.frequency
260 260
261 261 vmax = self.getFmax() * _lambda
262 262
263 263 return vmax
264 264
265 265 nChannels = property(getNChannels, "I'm the 'nChannel' property.")
266 266 channelIndexList = property(getChannelIndexList, "I'm the 'channelIndexList' property.")
267 267 nHeights = property(getNHeights, "I'm the 'nHeights' property.")
268 268 noise = property(getNoise, "I'm the 'nHeights' property.")
269 269 datatime = property(getDatatime, "I'm the 'datatime' property")
270 270 ltctime = property(getltctime, "I'm the 'ltctime' property")
271 271
272 272 class Voltage(JROData):
273 273
274 274 #data es un numpy array de 2 dmensiones (canales, alturas)
275 275 data = None
276 276
277 277 def __init__(self):
278 278 '''
279 279 Constructor
280 280 '''
281 281
282 282 self.radarControllerHeaderObj = RadarControllerHeader()
283 283
284 284 self.systemHeaderObj = SystemHeader()
285 285
286 286 self.type = "Voltage"
287 287
288 288 self.data = None
289 289
290 290 self.dtype = None
291 291
292 292 # self.nChannels = 0
293 293
294 294 # self.nHeights = 0
295 295
296 296 self.nProfiles = None
297 297
298 298 self.heightList = None
299 299
300 300 self.channelList = None
301 301
302 302 # self.channelIndexList = None
303 303
304 304 self.flagNoData = True
305 305
306 306 self.flagTimeBlock = False
307 307
308 308 self.utctime = None
309 309
310 310 self.timeZone = None
311 311
312 312 self.dstFlag = None
313 313
314 314 self.errorCount = None
315 315
316 316 self.nCohInt = None
317 317
318 318 self.blocksize = None
319 319
320 320 self.flagDecodeData = False #asumo q la data no esta decodificada
321 321
322 322 self.flagDeflipData = False #asumo q la data no esta sin flip
323 323
324 324 self.flagShiftFFT = False
325 325
326 326
327 327 def getNoisebyHildebrand(self):
328 328 """
329 329 Determino el nivel de ruido usando el metodo Hildebrand-Sekhon
330 330
331 331 Return:
332 332 noiselevel
333 333 """
334 334
335 335 for channel in range(self.nChannels):
336 336 daux = self.data_spc[channel,:,:]
337 337 self.noise[channel] = hildebrand_sekhon(daux, self.nCohInt)
338 338
339 339 return self.noise
340 340
341 341 def getNoise(self, type = 1):
342 342
343 343 self.noise = numpy.zeros(self.nChannels)
344 344
345 345 if type == 1:
346 346 noise = self.getNoisebyHildebrand()
347 347
348 348 return 10*numpy.log10(noise)
349 349
350 350 class Spectra(JROData):
351 351
352 352 #data es un numpy array de 2 dmensiones (canales, perfiles, alturas)
353 353 data_spc = None
354 354
355 355 #data es un numpy array de 2 dmensiones (canales, pares, alturas)
356 356 data_cspc = None
357 357
358 358 #data es un numpy array de 2 dmensiones (canales, alturas)
359 359 data_dc = None
360 360
361 361 nFFTPoints = None
362 362
363 363 nPairs = None
364 364
365 365 pairsList = None
366 366
367 367 nIncohInt = None
368 368
369 369 wavelength = None #Necesario para cacular el rango de velocidad desde la frecuencia
370 370
371 371 nCohInt = None #se requiere para determinar el valor de timeInterval
372 372
373 ippFactor = None
374
373 375 def __init__(self):
374 376 '''
375 377 Constructor
376 378 '''
377 379
378 380 self.radarControllerHeaderObj = RadarControllerHeader()
379 381
380 382 self.systemHeaderObj = SystemHeader()
381 383
382 384 self.type = "Spectra"
383 385
384 386 # self.data = None
385 387
386 388 self.dtype = None
387 389
388 390 # self.nChannels = 0
389 391
390 392 # self.nHeights = 0
391 393
392 394 self.nProfiles = None
393 395
394 396 self.heightList = None
395 397
396 398 self.channelList = None
397 399
398 400 # self.channelIndexList = None
399 401
400 402 self.flagNoData = True
401 403
402 404 self.flagTimeBlock = False
403 405
404 406 self.utctime = None
405 407
406 408 self.nCohInt = None
407 409
408 410 self.nIncohInt = None
409 411
410 412 self.blocksize = None
411 413
412 414 self.nFFTPoints = None
413 415
414 416 self.wavelength = None
415 417
416 418 self.flagDecodeData = False #asumo q la data no esta decodificada
417 419
418 420 self.flagDeflipData = False #asumo q la data no esta sin flip
419 421
420 422 self.flagShiftFFT = False
421 423
422 424 def getNoisebyHildebrand(self):
423 425 """
424 426 Determino el nivel de ruido usando el metodo Hildebrand-Sekhon
425 427
426 428 Return:
427 429 noiselevel
428 430 """
429 431
430 432 for channel in range(self.nChannels):
431 433 daux = self.data_spc[channel,:,:]
432 434 self.noise[channel] = hildebrand_sekhon(daux, self.nIncohInt)
433 435
434 436 return self.noise
435 437
436 438 def getNoisebyWindow(self, heiIndexMin=0, heiIndexMax=-1, freqIndexMin=0, freqIndexMax=-1):
437 439 """
438 440 Determina el ruido del canal utilizando la ventana indicada con las coordenadas:
439 441 (heiIndexMIn, freqIndexMin) hasta (heiIndexMax, freqIndexMAx)
440 442
441 443 Inputs:
442 444 heiIndexMin: Limite inferior del eje de alturas
443 445 heiIndexMax: Limite superior del eje de alturas
444 446 freqIndexMin: Limite inferior del eje de frecuencia
445 447 freqIndexMax: Limite supoerior del eje de frecuencia
446 448 """
447 449
448 450 data = self.data_spc[:, heiIndexMin:heiIndexMax, freqIndexMin:freqIndexMax]
449 451
450 452 for channel in range(self.nChannels):
451 453 daux = data[channel,:,:]
452 454 self.noise[channel] = numpy.average(daux)
453 455
454 456 return self.noise
455 457
456 458 def getNoisebySort(self):
457 459
458 460 for channel in range(self.nChannels):
459 461 daux = self.data_spc[channel,:,:]
460 462 self.noise[channel] = sorting_bruce(daux, self.nIncohInt)
461 463
462 464 return self.noise
463 465
464 466 def getNoise(self, type = 1):
465 467
466 468 self.noise = numpy.zeros(self.nChannels)
467 469
468 470 if type == 1:
469 471 noise = self.getNoisebyHildebrand()
470 472
471 473 if type == 2:
472 474 noise = self.getNoisebySort()
473 475
474 476 if type == 3:
475 477 noise = self.getNoisebyWindow()
476 478
477 479 return noise
478 480
479 481
480 482 def getFreqRange(self, extrapoints=0):
481 483
482 deltafreq = self.getFmax() / self.nFFTPoints
484 deltafreq = self.getFmax() / (self.nFFTPoints*self.ippFactor)
483 485 freqrange = deltafreq*(numpy.arange(self.nFFTPoints+extrapoints)-self.nFFTPoints/2.) - deltafreq/2
484 486
485 487 return freqrange
486 488
487 489 def getVelRange(self, extrapoints=0):
488 490
489 deltav = self.getVmax() / self.nFFTPoints
491 deltav = self.getVmax() / (self.nFFTPoints*self.ippFactor)
490 492 velrange = deltav*(numpy.arange(self.nFFTPoints+extrapoints)-self.nFFTPoints/2.) - deltav/2
491 493
492 494 return velrange
493 495
494 496 def getNPairs(self):
495 497
496 498 return len(self.pairsList)
497 499
498 500 def getPairsIndexList(self):
499 501
500 502 return range(self.nPairs)
501 503
502 504 def getNormFactor(self):
503 505 pwcode = 1
504 506 if self.flagDecodeData:
505 507 pwcode = numpy.sum(self.code[0]**2)
506 508 normFactor = min(self.nFFTPoints,self.nProfiles)*self.nIncohInt*self.nCohInt*pwcode
507 509
508 510 return normFactor
509 511
510 512 def getFlagCspc(self):
511 513
512 514 if self.data_cspc == None:
513 515 return True
514 516
515 517 return False
516 518
517 519 def getFlagDc(self):
518 520
519 521 if self.data_dc == None:
520 522 return True
521 523
522 524 return False
523 525
524 526 nPairs = property(getNPairs, "I'm the 'nPairs' property.")
525 527 pairsIndexList = property(getPairsIndexList, "I'm the 'pairsIndexList' property.")
526 528 normFactor = property(getNormFactor, "I'm the 'getNormFactor' property.")
527 529 flag_cspc = property(getFlagCspc)
528 530 flag_dc = property(getFlagDc)
529 531
530 532 class SpectraHeis(JROData):
531 533
532 534 data_spc = None
533 535
534 536 data_cspc = None
535 537
536 538 data_dc = None
537 539
538 540 nFFTPoints = None
539 541
540 542 nPairs = None
541 543
542 544 pairsList = None
543 545
544 546 nIncohInt = None
545 547
546 548 def __init__(self):
547 549
548 550 self.radarControllerHeaderObj = RadarControllerHeader()
549 551
550 552 self.systemHeaderObj = SystemHeader()
551 553
552 554 self.type = "SpectraHeis"
553 555
554 556 self.dtype = None
555 557
556 558 # self.nChannels = 0
557 559
558 560 # self.nHeights = 0
559 561
560 562 self.nProfiles = None
561 563
562 564 self.heightList = None
563 565
564 566 self.channelList = None
565 567
566 568 # self.channelIndexList = None
567 569
568 570 self.flagNoData = True
569 571
570 572 self.flagTimeBlock = False
571 573
572 574 self.nPairs = 0
573 575
574 576 self.utctime = None
575 577
576 578 self.blocksize = None
577 579
578 580 class Fits:
579 581
580 582 heightList = None
581 583
582 584 channelList = None
583 585
584 586 flagNoData = True
585 587
586 588 flagTimeBlock = False
587 589
588 590 useLocalTime = False
589 591
590 592 utctime = None
591 593
592 594 timeZone = None
593 595
594 596 ippSeconds = None
595 597
596 598 timeInterval = None
597 599
598 600 nCohInt = None
599 601
600 602 nIncohInt = None
601 603
602 604 noise = None
603 605
604 606 windowOfFilter = 1
605 607
606 608 #Speed of ligth
607 609 C = 3e8
608 610
609 611 frequency = 49.92e6
610 612
611 613 realtime = False
612 614
613 615
614 616 def __init__(self):
615 617
616 618 self.type = "Fits"
617 619
618 620 self.nProfiles = None
619 621
620 622 self.heightList = None
621 623
622 624 self.channelList = None
623 625
624 626 # self.channelIndexList = None
625 627
626 628 self.flagNoData = True
627 629
628 630 self.utctime = None
629 631
630 632 self.nCohInt = None
631 633
632 634 self.nIncohInt = None
633 635
634 636 self.useLocalTime = True
635 637
636 638 # self.utctime = None
637 639 # self.timeZone = None
638 640 # self.ltctime = None
639 641 # self.timeInterval = None
640 642 # self.header = None
641 643 # self.data_header = None
642 644 # self.data = None
643 645 # self.datatime = None
644 646 # self.flagNoData = False
645 647 # self.expName = ''
646 648 # self.nChannels = None
647 649 # self.nSamples = None
648 650 # self.dataBlocksPerFile = None
649 651 # self.comments = ''
650 652 #
651 653
652 654
653 655 def getltctime(self):
654 656
655 657 if self.useLocalTime:
656 658 return self.utctime - self.timeZone*60
657 659
658 660 return self.utctime
659 661
660 662 def getDatatime(self):
661 663
662 664 datatime = datetime.datetime.utcfromtimestamp(self.ltctime)
663 665 return datatime
664 666
665 667 def getTimeRange(self):
666 668
667 669 datatime = []
668 670
669 671 datatime.append(self.ltctime)
670 672 datatime.append(self.ltctime + self.timeInterval)
671 673
672 674 datatime = numpy.array(datatime)
673 675
674 676 return datatime
675 677
676 678 def getHeiRange(self):
677 679
678 680 heis = self.heightList
679 681
680 682 return heis
681 683
682 684 def isEmpty(self):
683 685
684 686 return self.flagNoData
685 687
686 688 def getNHeights(self):
687 689
688 690 return len(self.heightList)
689 691
690 692 def getNChannels(self):
691 693
692 694 return len(self.channelList)
693 695
694 696 def getChannelIndexList(self):
695 697
696 698 return range(self.nChannels)
697 699
698 700 def getNoise(self, type = 1):
699 701
700 702 self.noise = numpy.zeros(self.nChannels)
701 703
702 704 if type == 1:
703 705 noise = self.getNoisebyHildebrand()
704 706
705 707 if type == 2:
706 708 noise = self.getNoisebySort()
707 709
708 710 if type == 3:
709 711 noise = self.getNoisebyWindow()
710 712
711 713 return noise
712 714
713 715 datatime = property(getDatatime, "I'm the 'datatime' property")
714 716 nHeights = property(getNHeights, "I'm the 'nHeights' property.")
715 717 nChannels = property(getNChannels, "I'm the 'nChannel' property.")
716 718 channelIndexList = property(getChannelIndexList, "I'm the 'channelIndexList' property.")
717 719 noise = property(getNoise, "I'm the 'nHeights' property.")
718 720 datatime = property(getDatatime, "I'm the 'datatime' property")
719 721 ltctime = property(getltctime, "I'm the 'ltctime' property")
720 722
721 723 ltctime = property(getltctime, "I'm the 'ltctime' property") No newline at end of file
@@ -1,3452 +1,3389
1 1 '''
2 2
3 3 $Author: murco $
4 4 $Id: JRODataIO.py 169 2012-11-19 21:57:03Z murco $
5 5 '''
6 6
7 7 import os, sys
8 8 import glob
9 9 import time
10 10 import numpy
11 11 import fnmatch
12 12 import time, datetime
13 13 from xml.etree.ElementTree import Element, SubElement, ElementTree
14 14 try:
15 15 import pyfits
16 16 except:
17 17 print "pyfits module has not been imported, it should be installed to save files in fits format"
18 18
19 19 from jrodata import *
20 20 from jroheaderIO import *
21 21 from jroprocessing import *
22 22
23 23 LOCALTIME = True #-18000
24 24
25 25 def isNumber(str):
26 26 """
27 27 Chequea si el conjunto de caracteres que componen un string puede ser convertidos a un numero.
28 28
29 29 Excepciones:
30 30 Si un determinado string no puede ser convertido a numero
31 31 Input:
32 32 str, string al cual se le analiza para determinar si convertible a un numero o no
33 33
34 34 Return:
35 35 True : si el string es uno numerico
36 36 False : no es un string numerico
37 37 """
38 38 try:
39 39 float( str )
40 40 return True
41 41 except:
42 42 return False
43 43
44 44 def isThisFileinRange(filename, startUTSeconds, endUTSeconds):
45 45 """
46 46 Esta funcion determina si un archivo de datos se encuentra o no dentro del rango de fecha especificado.
47 47
48 48 Inputs:
49 49 filename : nombre completo del archivo de datos en formato Jicamarca (.r)
50 50
51 51 startUTSeconds : fecha inicial del rango seleccionado. La fecha esta dada en
52 52 segundos contados desde 01/01/1970.
53 53 endUTSeconds : fecha final del rango seleccionado. La fecha esta dada en
54 54 segundos contados desde 01/01/1970.
55 55
56 56 Return:
57 57 Boolean : Retorna True si el archivo de datos contiene datos en el rango de
58 58 fecha especificado, de lo contrario retorna False.
59 59
60 60 Excepciones:
61 61 Si el archivo no existe o no puede ser abierto
62 62 Si la cabecera no puede ser leida.
63 63
64 64 """
65 65 basicHeaderObj = BasicHeader(LOCALTIME)
66 66
67 67 try:
68 68 fp = open(filename,'rb')
69 69 except:
70 70 raise IOError, "The file %s can't be opened" %(filename)
71 71
72 72 sts = basicHeaderObj.read(fp)
73 73 fp.close()
74 74
75 75 if not(sts):
76 76 print "Skipping the file %s because it has not a valid header" %(filename)
77 77 return 0
78 78
79 79 if not ((startUTSeconds <= basicHeaderObj.utc) and (endUTSeconds > basicHeaderObj.utc)):
80 80 return 0
81 81
82 82 return 1
83 83
84 84 def isFileinThisTime(filename, startTime, endTime):
85 85 """
86 86 Retorna 1 si el archivo de datos se encuentra dentro del rango de horas especificado.
87 87
88 88 Inputs:
89 89 filename : nombre completo del archivo de datos en formato Jicamarca (.r)
90 90
91 91 startTime : tiempo inicial del rango seleccionado en formato datetime.time
92 92
93 93 endTime : tiempo final del rango seleccionado en formato datetime.time
94 94
95 95 Return:
96 96 Boolean : Retorna True si el archivo de datos contiene datos en el rango de
97 97 fecha especificado, de lo contrario retorna False.
98 98
99 99 Excepciones:
100 100 Si el archivo no existe o no puede ser abierto
101 101 Si la cabecera no puede ser leida.
102 102
103 103 """
104 104
105 105
106 106 try:
107 107 fp = open(filename,'rb')
108 108 except:
109 109 raise IOError, "The file %s can't be opened" %(filename)
110 110
111 111 basicHeaderObj = BasicHeader(LOCALTIME)
112 112 sts = basicHeaderObj.read(fp)
113 113 fp.close()
114 114
115 115 thisDatetime = basicHeaderObj.datatime
116 116 thisTime = basicHeaderObj.datatime.time()
117 117
118 118 if not(sts):
119 119 print "Skipping the file %s because it has not a valid header" %(filename)
120 120 return None
121 121
122 122 if not ((startTime <= thisTime) and (endTime > thisTime)):
123 123 return None
124 124
125 125 return thisDatetime
126 126
127 127 def getFileFromSet(path,ext,set):
128 128 validFilelist = []
129 129 fileList = os.listdir(path)
130 130
131 131 # 0 1234 567 89A BCDE
132 132 # H YYYY DDD SSS .ext
133 133
134 134 for file in fileList:
135 135 try:
136 136 year = int(file[1:5])
137 137 doy = int(file[5:8])
138 138
139 139
140 140 except:
141 141 continue
142 142
143 143 if (os.path.splitext(file)[-1].lower() != ext.lower()):
144 144 continue
145 145
146 146 validFilelist.append(file)
147 147
148 148 myfile = fnmatch.filter(validFilelist,'*%4.4d%3.3d%3.3d*'%(year,doy,set))
149 149
150 150 if len(myfile)!= 0:
151 151 return myfile[0]
152 152 else:
153 153 filename = '*%4.4d%3.3d%3.3d%s'%(year,doy,set,ext.lower())
154 154 print 'the filename %s does not exist'%filename
155 155 print '...going to the last file: '
156 156
157 157 if validFilelist:
158 158 validFilelist = sorted( validFilelist, key=str.lower )
159 159 return validFilelist[-1]
160 160
161 161 return None
162 162
163 163
164 164 def getlastFileFromPath(path, ext):
165 165 """
166 166 Depura el fileList dejando solo los que cumplan el formato de "PYYYYDDDSSS.ext"
167 167 al final de la depuracion devuelve el ultimo file de la lista que quedo.
168 168
169 169 Input:
170 170 fileList : lista conteniendo todos los files (sin path) que componen una determinada carpeta
171 171 ext : extension de los files contenidos en una carpeta
172 172
173 173 Return:
174 174 El ultimo file de una determinada carpeta, no se considera el path.
175 175 """
176 176 validFilelist = []
177 177 fileList = os.listdir(path)
178 178
179 179 # 0 1234 567 89A BCDE
180 180 # H YYYY DDD SSS .ext
181 181
182 182 for file in fileList:
183 183 try:
184 184 year = int(file[1:5])
185 185 doy = int(file[5:8])
186 186
187 187
188 188 except:
189 189 continue
190 190
191 191 if (os.path.splitext(file)[-1].lower() != ext.lower()):
192 192 continue
193 193
194 194 validFilelist.append(file)
195 195
196 196 if validFilelist:
197 197 validFilelist = sorted( validFilelist, key=str.lower )
198 198 return validFilelist[-1]
199 199
200 200 return None
201 201
202 202 def checkForRealPath(path, foldercounter, year, doy, set, ext):
203 203 """
204 204 Por ser Linux Case Sensitive entonces checkForRealPath encuentra el nombre correcto de un path,
205 205 Prueba por varias combinaciones de nombres entre mayusculas y minusculas para determinar
206 206 el path exacto de un determinado file.
207 207
208 208 Example :
209 209 nombre correcto del file es .../.../D2009307/P2009307367.ext
210 210
211 211 Entonces la funcion prueba con las siguientes combinaciones
212 212 .../.../y2009307367.ext
213 213 .../.../Y2009307367.ext
214 214 .../.../x2009307/y2009307367.ext
215 215 .../.../x2009307/Y2009307367.ext
216 216 .../.../X2009307/y2009307367.ext
217 217 .../.../X2009307/Y2009307367.ext
218 218 siendo para este caso, la ultima combinacion de letras, identica al file buscado
219 219
220 220 Return:
221 221 Si encuentra la cobinacion adecuada devuelve el path completo y el nombre del file
222 222 caso contrario devuelve None como path y el la ultima combinacion de nombre en mayusculas
223 223 para el filename
224 224 """
225 225 fullfilename = None
226 226 find_flag = False
227 227 filename = None
228 228
229 229 prefixDirList = [None,'d','D']
230 230 if ext.lower() == ".r": #voltage
231 231 prefixFileList = ['d','D']
232 232 elif ext.lower() == ".pdata": #spectra
233 233 prefixFileList = ['p','P']
234 234 else:
235 235 return None, filename
236 236
237 237 #barrido por las combinaciones posibles
238 238 for prefixDir in prefixDirList:
239 239 thispath = path
240 240 if prefixDir != None:
241 241 #formo el nombre del directorio xYYYYDDD (x=d o x=D)
242 242 if foldercounter == 0:
243 243 thispath = os.path.join(path, "%s%04d%03d" % ( prefixDir, year, doy ))
244 244 else:
245 245 thispath = os.path.join(path, "%s%04d%03d_%02d" % ( prefixDir, year, doy , foldercounter))
246 246 for prefixFile in prefixFileList: #barrido por las dos combinaciones posibles de "D"
247 247 filename = "%s%04d%03d%03d%s" % ( prefixFile, year, doy, set, ext ) #formo el nombre del file xYYYYDDDSSS.ext
248 248 fullfilename = os.path.join( thispath, filename ) #formo el path completo
249 249
250 250 if os.path.exists( fullfilename ): #verifico que exista
251 251 find_flag = True
252 252 break
253 253 if find_flag:
254 254 break
255 255
256 256 if not(find_flag):
257 257 return None, filename
258 258
259 259 return fullfilename, filename
260 260
261 261 def isDoyFolder(folder):
262 262 try:
263 263 year = int(folder[1:5])
264 264 except:
265 265 return 0
266 266
267 267 try:
268 268 doy = int(folder[5:8])
269 269 except:
270 270 return 0
271 271
272 272 return 1
273 273
274 274 class JRODataIO:
275 275
276 276 c = 3E8
277 277
278 278 isConfig = False
279 279
280 280 basicHeaderObj = BasicHeader(LOCALTIME)
281 281
282 282 systemHeaderObj = SystemHeader()
283 283
284 284 radarControllerHeaderObj = RadarControllerHeader()
285 285
286 286 processingHeaderObj = ProcessingHeader()
287 287
288 288 online = 0
289 289
290 290 dtype = None
291 291
292 292 pathList = []
293 293
294 294 filenameList = []
295 295
296 296 filename = None
297 297
298 298 ext = None
299 299
300 300 flagIsNewFile = 1
301 301
302 302 flagTimeBlock = 0
303 303
304 304 flagIsNewBlock = 0
305 305
306 306 fp = None
307 307
308 308 firstHeaderSize = 0
309 309
310 310 basicHeaderSize = 24
311 311
312 312 versionFile = 1103
313 313
314 314 fileSize = None
315 315
316 316 ippSeconds = None
317 317
318 318 fileSizeByHeader = None
319 319
320 320 fileIndex = None
321 321
322 322 profileIndex = None
323 323
324 324 blockIndex = None
325 325
326 326 nTotalBlocks = None
327 327
328 328 maxTimeStep = 30
329 329
330 330 lastUTTime = None
331 331
332 332 datablock = None
333 333
334 334 dataOut = None
335 335
336 336 blocksize = None
337 337
338 338 def __init__(self):
339 339
340 340 raise ValueError, "Not implemented"
341 341
342 342 def run(self):
343 343
344 344 raise ValueError, "Not implemented"
345 345
346 346 def getOutput(self):
347 347
348 348 return self.dataOut
349 349
350 350 class JRODataReader(JRODataIO, ProcessingUnit):
351 351
352 352 nReadBlocks = 0
353 353
354 354 delay = 10 #number of seconds waiting a new file
355 355
356 356 nTries = 3 #quantity tries
357 357
358 358 nFiles = 3 #number of files for searching
359 359
360 360 path = None
361 361
362 362 foldercounter = 0
363 363
364 364 flagNoMoreFiles = 0
365 365
366 366 datetimeList = []
367 367
368 368 __isFirstTimeOnline = 1
369 369
370 370 __printInfo = True
371 371
372 372 profileIndex = None
373 373
374 374 def __init__(self):
375 375
376 376 """
377 377
378 378 """
379 379
380 380 raise ValueError, "This method has not been implemented"
381 381
382 382
383 383 def createObjByDefault(self):
384 384 """
385 385
386 386 """
387 387 raise ValueError, "This method has not been implemented"
388 388
389 389 def getBlockDimension(self):
390 390
391 391 raise ValueError, "No implemented"
392 392
393 393 def __searchFilesOffLine(self,
394 394 path,
395 395 startDate,
396 396 endDate,
397 397 startTime=datetime.time(0,0,0),
398 398 endTime=datetime.time(23,59,59),
399 399 set=None,
400 400 expLabel='',
401 401 ext='.r',
402 402 walk=True):
403 403
404 404 pathList = []
405 405
406 406 if not walk:
407 407 pathList.append(path)
408 408
409 409 else:
410 410 dirList = []
411 411 for thisPath in os.listdir(path):
412 412 if not os.path.isdir(os.path.join(path,thisPath)):
413 413 continue
414 414 if not isDoyFolder(thisPath):
415 415 continue
416 416
417 417 dirList.append(thisPath)
418 418
419 419 if not(dirList):
420 420 return None, None
421 421
422 422 thisDate = startDate
423 423
424 424 while(thisDate <= endDate):
425 425 year = thisDate.timetuple().tm_year
426 426 doy = thisDate.timetuple().tm_yday
427 427
428 428 matchlist = fnmatch.filter(dirList, '?' + '%4.4d%3.3d' % (year,doy) + '*')
429 429 if len(matchlist) == 0:
430 430 thisDate += datetime.timedelta(1)
431 431 continue
432 432 for match in matchlist:
433 433 pathList.append(os.path.join(path,match,expLabel))
434 434
435 435 thisDate += datetime.timedelta(1)
436 436
437 437 if pathList == []:
438 438 print "Any folder was found for the date range: %s-%s" %(startDate, endDate)
439 439 return None, None
440 440
441 441 print "%d folder(s) was(were) found for the date range: %s - %s" %(len(pathList), startDate, endDate)
442 442
443 443 filenameList = []
444 444 datetimeList = []
445 445
446 446 for i in range(len(pathList)):
447 447
448 448 thisPath = pathList[i]
449 449
450 450 fileList = glob.glob1(thisPath, "*%s" %ext)
451 451 fileList.sort()
452 452
453 453 for file in fileList:
454 454
455 455 filename = os.path.join(thisPath,file)
456 456 thisDatetime = isFileinThisTime(filename, startTime, endTime)
457 457
458 458 if not(thisDatetime):
459 459 continue
460 460
461 461 filenameList.append(filename)
462 462 datetimeList.append(thisDatetime)
463 463
464 464 if not(filenameList):
465 465 print "Any file was found for the time range %s - %s" %(startTime, endTime)
466 466 return None, None
467 467
468 468 print "%d file(s) was(were) found for the time range: %s - %s" %(len(filenameList), startTime, endTime)
469 469 print
470 470
471 471 for i in range(len(filenameList)):
472 472 print "%s -> [%s]" %(filenameList[i], datetimeList[i].ctime())
473 473
474 474 self.filenameList = filenameList
475 475 self.datetimeList = datetimeList
476 476
477 477 return pathList, filenameList
478 478
479 479 def __searchFilesOnLine(self, path, expLabel = "", ext = None, walk=True, set=None):
480 480
481 481 """
482 482 Busca el ultimo archivo de la ultima carpeta (determinada o no por startDateTime) y
483 483 devuelve el archivo encontrado ademas de otros datos.
484 484
485 485 Input:
486 486 path : carpeta donde estan contenidos los files que contiene data
487 487
488 488 expLabel : Nombre del subexperimento (subfolder)
489 489
490 490 ext : extension de los files
491 491
492 492 walk : Si es habilitado no realiza busquedas dentro de los ubdirectorios (doypath)
493 493
494 494 Return:
495 495 directory : eL directorio donde esta el file encontrado
496 496 filename : el ultimo file de una determinada carpeta
497 497 year : el anho
498 498 doy : el numero de dia del anho
499 499 set : el set del archivo
500 500
501 501
502 502 """
503 503 dirList = []
504 504
505 505 if not walk:
506 506 fullpath = path
507 507 foldercounter = 0
508 508 else:
509 509 #Filtra solo los directorios
510 510 for thisPath in os.listdir(path):
511 511 if not os.path.isdir(os.path.join(path,thisPath)):
512 512 continue
513 513 if not isDoyFolder(thisPath):
514 514 continue
515 515
516 516 dirList.append(thisPath)
517 517
518 518 if not(dirList):
519 519 return None, None, None, None, None, None
520 520
521 521 dirList = sorted( dirList, key=str.lower )
522 522
523 523 doypath = dirList[-1]
524 524 foldercounter = int(doypath.split('_')[1]) if len(doypath.split('_'))>1 else 0
525 525 fullpath = os.path.join(path, doypath, expLabel)
526 526
527 527
528 528 print "%s folder was found: " %(fullpath )
529 529
530 530 if set == None:
531 531 filename = getlastFileFromPath(fullpath, ext)
532 532 else:
533 533 filename = getFileFromSet(fullpath, ext, set)
534 534
535 535 if not(filename):
536 536 return None, None, None, None, None, None
537 537
538 538 print "%s file was found" %(filename)
539 539
540 540 if not(self.__verifyFile(os.path.join(fullpath, filename))):
541 541 return None, None, None, None, None, None
542 542
543 543 year = int( filename[1:5] )
544 544 doy = int( filename[5:8] )
545 545 set = int( filename[8:11] )
546 546
547 547 return fullpath, foldercounter, filename, year, doy, set
548 548
549 549 def __setNextFileOffline(self):
550 550
551 551 idFile = self.fileIndex
552 552
553 553 while (True):
554 554 idFile += 1
555 555 if not(idFile < len(self.filenameList)):
556 556 self.flagNoMoreFiles = 1
557 557 print "No more Files"
558 558 return 0
559 559
560 560 filename = self.filenameList[idFile]
561 561
562 562 if not(self.__verifyFile(filename)):
563 563 continue
564 564
565 565 fileSize = os.path.getsize(filename)
566 566 fp = open(filename,'rb')
567 567 break
568 568
569 569 self.flagIsNewFile = 1
570 570 self.fileIndex = idFile
571 571 self.filename = filename
572 572 self.fileSize = fileSize
573 573 self.fp = fp
574 574
575 575 print "Setting the file: %s"%self.filename
576 576
577 577 return 1
578 578
579 579 def __setNextFileOnline(self):
580 580 """
581 581 Busca el siguiente file que tenga suficiente data para ser leida, dentro de un folder especifico, si
582 582 no encuentra un file valido espera un tiempo determinado y luego busca en los posibles n files
583 583 siguientes.
584 584
585 585 Affected:
586 586 self.flagIsNewFile
587 587 self.filename
588 588 self.fileSize
589 589 self.fp
590 590 self.set
591 591 self.flagNoMoreFiles
592 592
593 593 Return:
594 594 0 : si luego de una busqueda del siguiente file valido este no pudo ser encontrado
595 595 1 : si el file fue abierto con exito y esta listo a ser leido
596 596
597 597 Excepciones:
598 598 Si un determinado file no puede ser abierto
599 599 """
600 600 nFiles = 0
601 601 fileOk_flag = False
602 602 firstTime_flag = True
603 603
604 604 self.set += 1
605 605
606 606 if self.set > 999:
607 607 self.set = 0
608 608 self.foldercounter += 1
609 609
610 610 #busca el 1er file disponible
611 611 fullfilename, filename = checkForRealPath( self.path, self.foldercounter, self.year, self.doy, self.set, self.ext )
612 612 if fullfilename:
613 613 if self.__verifyFile(fullfilename, False):
614 614 fileOk_flag = True
615 615
616 616 #si no encuentra un file entonces espera y vuelve a buscar
617 617 if not(fileOk_flag):
618 618 for nFiles in range(self.nFiles+1): #busco en los siguientes self.nFiles+1 files posibles
619 619
620 620 if firstTime_flag: #si es la 1era vez entonces hace el for self.nTries veces
621 621 tries = self.nTries
622 622 else:
623 623 tries = 1 #si no es la 1era vez entonces solo lo hace una vez
624 624
625 625 for nTries in range( tries ):
626 626 if firstTime_flag:
627 627 print "\tWaiting %0.2f sec for the file \"%s\" , try %03d ..." % ( self.delay, filename, nTries+1 )
628 628 time.sleep( self.delay )
629 629 else:
630 630 print "\tSearching next \"%s%04d%03d%03d%s\" file ..." % (self.optchar, self.year, self.doy, self.set, self.ext)
631 631
632 632 fullfilename, filename = checkForRealPath( self.path, self.foldercounter, self.year, self.doy, self.set, self.ext )
633 633 if fullfilename:
634 634 if self.__verifyFile(fullfilename):
635 635 fileOk_flag = True
636 636 break
637 637
638 638 if fileOk_flag:
639 639 break
640 640
641 641 firstTime_flag = False
642 642
643 643 print "\tSkipping the file \"%s\" due to this file doesn't exist" % filename
644 644 self.set += 1
645 645
646 646 if nFiles == (self.nFiles-1): #si no encuentro el file buscado cambio de carpeta y busco en la siguiente carpeta
647 647 self.set = 0
648 648 self.doy += 1
649 649 self.foldercounter = 0
650 650
651 651 if fileOk_flag:
652 652 self.fileSize = os.path.getsize( fullfilename )
653 653 self.filename = fullfilename
654 654 self.flagIsNewFile = 1
655 655 if self.fp != None: self.fp.close()
656 656 self.fp = open(fullfilename, 'rb')
657 657 self.flagNoMoreFiles = 0
658 658 print 'Setting the file: %s' % fullfilename
659 659 else:
660 660 self.fileSize = 0
661 661 self.filename = None
662 662 self.flagIsNewFile = 0
663 663 self.fp = None
664 664 self.flagNoMoreFiles = 1
665 665 print 'No more Files'
666 666
667 667 return fileOk_flag
668 668
669 669
670 670 def setNextFile(self):
671 671 if self.fp != None:
672 672 self.fp.close()
673 673
674 674 if self.online:
675 675 newFile = self.__setNextFileOnline()
676 676 else:
677 677 newFile = self.__setNextFileOffline()
678 678
679 679 if not(newFile):
680 680 return 0
681 681
682 682 self.__readFirstHeader()
683 683 self.nReadBlocks = 0
684 684 return 1
685 685
686 686 def __waitNewBlock(self):
687 687 """
688 688 Return 1 si se encontro un nuevo bloque de datos, 0 de otra forma.
689 689
690 690 Si el modo de lectura es OffLine siempre retorn 0
691 691 """
692 692 if not self.online:
693 693 return 0
694 694
695 695 if (self.nReadBlocks >= self.processingHeaderObj.dataBlocksPerFile):
696 696 return 0
697 697
698 698 currentPointer = self.fp.tell()
699 699
700 700 neededSize = self.processingHeaderObj.blockSize + self.basicHeaderSize
701 701
702 702 for nTries in range( self.nTries ):
703 703
704 704 self.fp.close()
705 705 self.fp = open( self.filename, 'rb' )
706 706 self.fp.seek( currentPointer )
707 707
708 708 self.fileSize = os.path.getsize( self.filename )
709 709 currentSize = self.fileSize - currentPointer
710 710
711 711 if ( currentSize >= neededSize ):
712 712 self.__rdBasicHeader()
713 713 return 1
714 714
715 715 if self.fileSize == self.fileSizeByHeader:
716 716 # self.flagEoF = True
717 717 return 0
718 718
719 719 print "\tWaiting %0.2f seconds for the next block, try %03d ..." % (self.delay, nTries+1)
720 720 time.sleep( self.delay )
721 721
722 722
723 723 return 0
724 724
725 725 def waitDataBlock(self,pointer_location):
726 726
727 727 currentPointer = pointer_location
728 728
729 729 neededSize = self.processingHeaderObj.blockSize #+ self.basicHeaderSize
730 730
731 731 for nTries in range( self.nTries ):
732 732 self.fp.close()
733 733 self.fp = open( self.filename, 'rb' )
734 734 self.fp.seek( currentPointer )
735 735
736 736 self.fileSize = os.path.getsize( self.filename )
737 737 currentSize = self.fileSize - currentPointer
738 738
739 739 if ( currentSize >= neededSize ):
740 740 return 1
741 741
742 742 print "\tWaiting %0.2f seconds for the next block, try %03d ..." % (self.delay, nTries+1)
743 743 time.sleep( self.delay )
744 744
745 745 return 0
746 746
747 747
748 748 def __jumpToLastBlock(self):
749 749
750 750 if not(self.__isFirstTimeOnline):
751 751 return
752 752
753 753 csize = self.fileSize - self.fp.tell()
754 754 blocksize = self.processingHeaderObj.blockSize
755 755
756 756 #salta el primer bloque de datos
757 757 if csize > self.processingHeaderObj.blockSize:
758 758 self.fp.seek(self.fp.tell() + blocksize)
759 759 else:
760 760 return
761 761
762 762 csize = self.fileSize - self.fp.tell()
763 763 neededsize = self.processingHeaderObj.blockSize + self.basicHeaderSize
764 764 while True:
765 765
766 766 if self.fp.tell()<self.fileSize:
767 767 self.fp.seek(self.fp.tell() + neededsize)
768 768 else:
769 769 self.fp.seek(self.fp.tell() - neededsize)
770 770 break
771 771
772 772 # csize = self.fileSize - self.fp.tell()
773 773 # neededsize = self.processingHeaderObj.blockSize + self.basicHeaderSize
774 774 # factor = int(csize/neededsize)
775 775 # if factor > 0:
776 776 # self.fp.seek(self.fp.tell() + factor*neededsize)
777 777
778 778 self.flagIsNewFile = 0
779 779 self.__isFirstTimeOnline = 0
780 780
781 781
782 782 def __setNewBlock(self):
783 783
784 784 if self.fp == None:
785 785 return 0
786 786
787 787 if self.online:
788 788 self.__jumpToLastBlock()
789 789
790 790 if self.flagIsNewFile:
791 791 return 1
792 792
793 793 self.lastUTTime = self.basicHeaderObj.utc
794 794 currentSize = self.fileSize - self.fp.tell()
795 795 neededSize = self.processingHeaderObj.blockSize + self.basicHeaderSize
796 796
797 797 if (currentSize >= neededSize):
798 798 self.__rdBasicHeader()
799 799 return 1
800 800
801 801 if self.__waitNewBlock():
802 802 return 1
803 803
804 804 if not(self.setNextFile()):
805 805 return 0
806 806
807 807 deltaTime = self.basicHeaderObj.utc - self.lastUTTime #
808 808
809 809 self.flagTimeBlock = 0
810 810
811 811 if deltaTime > self.maxTimeStep:
812 812 self.flagTimeBlock = 1
813 813
814 814 return 1
815 815
816 816
817 817 def readNextBlock(self):
818 818 if not(self.__setNewBlock()):
819 819 return 0
820 820
821 821 if not(self.readBlock()):
822 822 return 0
823 823
824 824 return 1
825 825
826 826 def __rdProcessingHeader(self, fp=None):
827 827 if fp == None:
828 828 fp = self.fp
829 829
830 830 self.processingHeaderObj.read(fp)
831 831
832 832 def __rdRadarControllerHeader(self, fp=None):
833 833 if fp == None:
834 834 fp = self.fp
835 835
836 836 self.radarControllerHeaderObj.read(fp)
837 837
838 838 def __rdSystemHeader(self, fp=None):
839 839 if fp == None:
840 840 fp = self.fp
841 841
842 842 self.systemHeaderObj.read(fp)
843 843
844 844 def __rdBasicHeader(self, fp=None):
845 845 if fp == None:
846 846 fp = self.fp
847 847
848 848 self.basicHeaderObj.read(fp)
849 849
850 850
851 851 def __readFirstHeader(self):
852 852 self.__rdBasicHeader()
853 853 self.__rdSystemHeader()
854 854 self.__rdRadarControllerHeader()
855 855 self.__rdProcessingHeader()
856 856
857 857 self.firstHeaderSize = self.basicHeaderObj.size
858 858
859 859 datatype = int(numpy.log2((self.processingHeaderObj.processFlags & PROCFLAG.DATATYPE_MASK))-numpy.log2(PROCFLAG.DATATYPE_CHAR))
860 860 if datatype == 0:
861 861 datatype_str = numpy.dtype([('real','<i1'),('imag','<i1')])
862 862 elif datatype == 1:
863 863 datatype_str = numpy.dtype([('real','<i2'),('imag','<i2')])
864 864 elif datatype == 2:
865 865 datatype_str = numpy.dtype([('real','<i4'),('imag','<i4')])
866 866 elif datatype == 3:
867 867 datatype_str = numpy.dtype([('real','<i8'),('imag','<i8')])
868 868 elif datatype == 4:
869 869 datatype_str = numpy.dtype([('real','<f4'),('imag','<f4')])
870 870 elif datatype == 5:
871 871 datatype_str = numpy.dtype([('real','<f8'),('imag','<f8')])
872 872 else:
873 873 raise ValueError, 'Data type was not defined'
874 874
875 875 self.dtype = datatype_str
876 876 self.ippSeconds = 2 * 1000 * self.radarControllerHeaderObj.ipp / self.c
877 877 self.fileSizeByHeader = self.processingHeaderObj.dataBlocksPerFile * self.processingHeaderObj.blockSize + self.firstHeaderSize + self.basicHeaderSize*(self.processingHeaderObj.dataBlocksPerFile - 1)
878 878 # self.dataOut.channelList = numpy.arange(self.systemHeaderObj.numChannels)
879 879 # self.dataOut.channelIndexList = numpy.arange(self.systemHeaderObj.numChannels)
880 880 self.getBlockDimension()
881 881
882 882
883 883 def __verifyFile(self, filename, msgFlag=True):
884 884 msg = None
885 885 try:
886 886 fp = open(filename, 'rb')
887 887 currentPosition = fp.tell()
888 888 except:
889 889 if msgFlag:
890 890 print "The file %s can't be opened" % (filename)
891 891 return False
892 892
893 893 neededSize = self.processingHeaderObj.blockSize + self.firstHeaderSize
894 894
895 895 if neededSize == 0:
896 896 basicHeaderObj = BasicHeader(LOCALTIME)
897 897 systemHeaderObj = SystemHeader()
898 898 radarControllerHeaderObj = RadarControllerHeader()
899 899 processingHeaderObj = ProcessingHeader()
900 900
901 901 try:
902 902 if not( basicHeaderObj.read(fp) ): raise IOError
903 903 if not( systemHeaderObj.read(fp) ): raise IOError
904 904 if not( radarControllerHeaderObj.read(fp) ): raise IOError
905 905 if not( processingHeaderObj.read(fp) ): raise IOError
906 906 data_type = int(numpy.log2((processingHeaderObj.processFlags & PROCFLAG.DATATYPE_MASK))-numpy.log2(PROCFLAG.DATATYPE_CHAR))
907 907
908 908 neededSize = processingHeaderObj.blockSize + basicHeaderObj.size
909 909
910 910 except:
911 911 if msgFlag:
912 912 print "\tThe file %s is empty or it hasn't enough data" % filename
913 913
914 914 fp.close()
915 915 return False
916 916 else:
917 917 msg = "\tSkipping the file %s due to it hasn't enough data" %filename
918 918
919 919 fp.close()
920 920 fileSize = os.path.getsize(filename)
921 921 currentSize = fileSize - currentPosition
922 922 if currentSize < neededSize:
923 923 if msgFlag and (msg != None):
924 924 print msg #print"\tSkipping the file %s due to it hasn't enough data" %filename
925 925 return False
926 926
927 927 return True
928 928
929 929 def setup(self,
930 930 path=None,
931 931 startDate=None,
932 932 endDate=None,
933 933 startTime=datetime.time(0,0,0),
934 934 endTime=datetime.time(23,59,59),
935 935 set=None,
936 936 expLabel = "",
937 937 ext = None,
938 938 online = False,
939 939 delay = 60,
940 940 walk = True):
941 941
942 942 if path == None:
943 943 raise ValueError, "The path is not valid"
944 944
945 945 if ext == None:
946 946 ext = self.ext
947 947
948 948 if online:
949 949 print "Searching files in online mode..."
950 950
951 951 for nTries in range( self.nTries ):
952 952 fullpath, foldercounter, file, year, doy, set = self.__searchFilesOnLine(path=path, expLabel=expLabel, ext=ext, walk=walk, set=set)
953 953
954 954 if fullpath:
955 955 break
956 956
957 957 print '\tWaiting %0.2f sec for an valid file in %s: try %02d ...' % (self.delay, path, nTries+1)
958 958 time.sleep( self.delay )
959 959
960 960 if not(fullpath):
961 961 print "There 'isn't valied files in %s" % path
962 962 return None
963 963
964 964 self.year = year
965 965 self.doy = doy
966 966 self.set = set - 1
967 967 self.path = path
968 968 self.foldercounter = foldercounter
969 969
970 970 else:
971 971 print "Searching files in offline mode ..."
972 972 pathList, filenameList = self.__searchFilesOffLine(path, startDate=startDate, endDate=endDate,
973 973 startTime=startTime, endTime=endTime,
974 974 set=set, expLabel=expLabel, ext=ext,
975 975 walk=walk)
976 976
977 977 if not(pathList):
978 978 print "No *%s files into the folder %s \nfor the range: %s - %s"%(ext, path,
979 979 datetime.datetime.combine(startDate,startTime).ctime(),
980 980 datetime.datetime.combine(endDate,endTime).ctime())
981 981
982 982 sys.exit(-1)
983 983
984 984
985 985 self.fileIndex = -1
986 986 self.pathList = pathList
987 987 self.filenameList = filenameList
988 988
989 989 self.online = online
990 990 self.delay = delay
991 991 ext = ext.lower()
992 992 self.ext = ext
993 993
994 994 if not(self.setNextFile()):
995 995 if (startDate!=None) and (endDate!=None):
996 996 print "No files in range: %s - %s" %(datetime.datetime.combine(startDate,startTime).ctime(), datetime.datetime.combine(endDate,endTime).ctime())
997 997 elif startDate != None:
998 998 print "No files in range: %s" %(datetime.datetime.combine(startDate,startTime).ctime())
999 999 else:
1000 1000 print "No files"
1001 1001
1002 1002 sys.exit(-1)
1003 1003
1004 1004 # self.updateDataHeader()
1005 1005
1006 1006 return self.dataOut
1007 1007
1008 1008 def getBasicHeader(self):
1009 1009
1010 1010 self.dataOut.utctime = self.basicHeaderObj.utc + self.basicHeaderObj.miliSecond/1000. + self.profileIndex * self.ippSeconds
1011 1011
1012 1012 self.dataOut.flagTimeBlock = self.flagTimeBlock
1013 1013
1014 1014 self.dataOut.timeZone = self.basicHeaderObj.timeZone
1015 1015
1016 1016 self.dataOut.dstFlag = self.basicHeaderObj.dstFlag
1017 1017
1018 1018 self.dataOut.errorCount = self.basicHeaderObj.errorCount
1019 1019
1020 1020 self.dataOut.useLocalTime = self.basicHeaderObj.useLocalTime
1021 1021
1022 1022 def getFirstHeader(self):
1023 1023
1024 1024 raise ValueError, "This method has not been implemented"
1025 1025
1026 1026 def getData():
1027 1027
1028 1028 raise ValueError, "This method has not been implemented"
1029 1029
1030 1030 def hasNotDataInBuffer():
1031 1031
1032 1032 raise ValueError, "This method has not been implemented"
1033 1033
1034 1034 def readBlock():
1035 1035
1036 1036 raise ValueError, "This method has not been implemented"
1037 1037
1038 1038 def isEndProcess(self):
1039 1039
1040 1040 return self.flagNoMoreFiles
1041 1041
1042 1042 def printReadBlocks(self):
1043 1043
1044 1044 print "Number of read blocks per file %04d" %self.nReadBlocks
1045 1045
1046 1046 def printTotalBlocks(self):
1047 1047
1048 1048 print "Number of read blocks %04d" %self.nTotalBlocks
1049 1049
1050 1050 def printNumberOfBlock(self):
1051 1051
1052 1052 if self.flagIsNewBlock:
1053 1053 print "Block No. %04d, Total blocks %04d -> %s" %(self.basicHeaderObj.dataBlock, self.nTotalBlocks, self.dataOut.datatime.ctime())
1054 1054
1055 1055 def printInfo(self):
1056 1056
1057 1057 if self.__printInfo == False:
1058 1058 return
1059 1059
1060 1060 self.basicHeaderObj.printInfo()
1061 1061 self.systemHeaderObj.printInfo()
1062 1062 self.radarControllerHeaderObj.printInfo()
1063 1063 self.processingHeaderObj.printInfo()
1064 1064
1065 1065 self.__printInfo = False
1066 1066
1067 1067
1068 1068 def run(self, **kwargs):
1069 1069
1070 1070 if not(self.isConfig):
1071 1071
1072 1072 # self.dataOut = dataOut
1073 1073 self.setup(**kwargs)
1074 1074 self.isConfig = True
1075 1075
1076 1076 self.getData()
1077 1077
1078 1078 class JRODataWriter(JRODataIO, Operation):
1079 1079
1080 1080 """
1081 1081 Esta clase permite escribir datos a archivos procesados (.r o ,pdata). La escritura
1082 1082 de los datos siempre se realiza por bloques.
1083 1083 """
1084 1084
1085 1085 blockIndex = 0
1086 1086
1087 1087 path = None
1088 1088
1089 1089 setFile = None
1090 1090
1091 1091 profilesPerBlock = None
1092 1092
1093 1093 blocksPerFile = None
1094 1094
1095 1095 nWriteBlocks = 0
1096 1096
1097 1097 def __init__(self, dataOut=None):
1098 1098 raise ValueError, "Not implemented"
1099 1099
1100 1100
1101 1101 def hasAllDataInBuffer(self):
1102 1102 raise ValueError, "Not implemented"
1103 1103
1104 1104
1105 1105 def setBlockDimension(self):
1106 1106 raise ValueError, "Not implemented"
1107 1107
1108 1108
1109 1109 def writeBlock(self):
1110 1110 raise ValueError, "No implemented"
1111 1111
1112 1112
1113 1113 def putData(self):
1114 1114 raise ValueError, "No implemented"
1115 1115
1116 1116
1117 1117 def setBasicHeader(self):
1118 1118
1119 1119 self.basicHeaderObj.size = self.basicHeaderSize #bytes
1120 1120 self.basicHeaderObj.version = self.versionFile
1121 1121 self.basicHeaderObj.dataBlock = self.nTotalBlocks
1122 1122
1123 1123 utc = numpy.floor(self.dataOut.utctime)
1124 1124 milisecond = (self.dataOut.utctime - utc)* 1000.0
1125 1125
1126 1126 self.basicHeaderObj.utc = utc
1127 1127 self.basicHeaderObj.miliSecond = milisecond
1128 1128 self.basicHeaderObj.timeZone = self.dataOut.timeZone
1129 1129 self.basicHeaderObj.dstFlag = self.dataOut.dstFlag
1130 1130 self.basicHeaderObj.errorCount = self.dataOut.errorCount
1131 1131
1132 1132 def setFirstHeader(self):
1133 1133 """
1134 1134 Obtiene una copia del First Header
1135 1135
1136 1136 Affected:
1137 1137
1138 1138 self.basicHeaderObj
1139 1139 self.systemHeaderObj
1140 1140 self.radarControllerHeaderObj
1141 1141 self.processingHeaderObj self.
1142 1142
1143 1143 Return:
1144 1144 None
1145 1145 """
1146 1146
1147 1147 raise ValueError, "No implemented"
1148 1148
1149 1149 def __writeFirstHeader(self):
1150 1150 """
1151 1151 Escribe el primer header del file es decir el Basic header y el Long header (SystemHeader, RadarControllerHeader, ProcessingHeader)
1152 1152
1153 1153 Affected:
1154 1154 __dataType
1155 1155
1156 1156 Return:
1157 1157 None
1158 1158 """
1159 1159
1160 1160 # CALCULAR PARAMETROS
1161 1161
1162 1162 sizeLongHeader = self.systemHeaderObj.size + self.radarControllerHeaderObj.size + self.processingHeaderObj.size
1163 1163 self.basicHeaderObj.size = self.basicHeaderSize + sizeLongHeader
1164 1164
1165 1165 self.basicHeaderObj.write(self.fp)
1166 1166 self.systemHeaderObj.write(self.fp)
1167 1167 self.radarControllerHeaderObj.write(self.fp)
1168 1168 self.processingHeaderObj.write(self.fp)
1169 1169
1170 1170 self.dtype = self.dataOut.dtype
1171 1171
1172 1172 def __setNewBlock(self):
1173 1173 """
1174 1174 Si es un nuevo file escribe el First Header caso contrario escribe solo el Basic Header
1175 1175
1176 1176 Return:
1177 1177 0 : si no pudo escribir nada
1178 1178 1 : Si escribio el Basic el First Header
1179 1179 """
1180 1180 if self.fp == None:
1181 1181 self.setNextFile()
1182 1182
1183 1183 if self.flagIsNewFile:
1184 1184 return 1
1185 1185
1186 1186 if self.blockIndex < self.processingHeaderObj.dataBlocksPerFile:
1187 1187 self.basicHeaderObj.write(self.fp)
1188 1188 return 1
1189 1189
1190 1190 if not( self.setNextFile() ):
1191 1191 return 0
1192 1192
1193 1193 return 1
1194 1194
1195 1195
1196 1196 def writeNextBlock(self):
1197 1197 """
1198 1198 Selecciona el bloque siguiente de datos y los escribe en un file
1199 1199
1200 1200 Return:
1201 1201 0 : Si no hizo pudo escribir el bloque de datos
1202 1202 1 : Si no pudo escribir el bloque de datos
1203 1203 """
1204 1204 if not( self.__setNewBlock() ):
1205 1205 return 0
1206 1206
1207 1207 self.writeBlock()
1208 1208
1209 1209 return 1
1210 1210
1211 1211 def setNextFile(self):
1212 1212 """
1213 1213 Determina el siguiente file que sera escrito
1214 1214
1215 1215 Affected:
1216 1216 self.filename
1217 1217 self.subfolder
1218 1218 self.fp
1219 1219 self.setFile
1220 1220 self.flagIsNewFile
1221 1221
1222 1222 Return:
1223 1223 0 : Si el archivo no puede ser escrito
1224 1224 1 : Si el archivo esta listo para ser escrito
1225 1225 """
1226 1226 ext = self.ext
1227 1227 path = self.path
1228 1228
1229 1229 if self.fp != None:
1230 1230 self.fp.close()
1231 1231
1232 1232 timeTuple = time.localtime( self.dataOut.utctime)
1233 1233 subfolder = 'd%4.4d%3.3d' % (timeTuple.tm_year,timeTuple.tm_yday)
1234 1234
1235 1235 fullpath = os.path.join( path, subfolder )
1236 1236 if not( os.path.exists(fullpath) ):
1237 1237 os.mkdir(fullpath)
1238 1238 self.setFile = -1 #inicializo mi contador de seteo
1239 1239 else:
1240 1240 filesList = os.listdir( fullpath )
1241 1241 if len( filesList ) > 0:
1242 1242 filesList = sorted( filesList, key=str.lower )
1243 1243 filen = filesList[-1]
1244 1244 # el filename debera tener el siguiente formato
1245 1245 # 0 1234 567 89A BCDE (hex)
1246 1246 # x YYYY DDD SSS .ext
1247 1247 if isNumber( filen[8:11] ):
1248 1248 self.setFile = int( filen[8:11] ) #inicializo mi contador de seteo al seteo del ultimo file
1249 1249 else:
1250 1250 self.setFile = -1
1251 1251 else:
1252 1252 self.setFile = -1 #inicializo mi contador de seteo
1253 1253
1254 1254 setFile = self.setFile
1255 1255 setFile += 1
1256 1256
1257 1257 file = '%s%4.4d%3.3d%3.3d%s' % (self.optchar,
1258 1258 timeTuple.tm_year,
1259 1259 timeTuple.tm_yday,
1260 1260 setFile,
1261 1261 ext )
1262 1262
1263 1263 filename = os.path.join( path, subfolder, file )
1264 1264
1265 1265 fp = open( filename,'wb' )
1266 1266
1267 1267 self.blockIndex = 0
1268 1268
1269 1269 #guardando atributos
1270 1270 self.filename = filename
1271 1271 self.subfolder = subfolder
1272 1272 self.fp = fp
1273 1273 self.setFile = setFile
1274 1274 self.flagIsNewFile = 1
1275 1275
1276 1276 self.setFirstHeader()
1277 1277
1278 1278 print 'Writing the file: %s'%self.filename
1279 1279
1280 1280 self.__writeFirstHeader()
1281 1281
1282 1282 return 1
1283 1283
1284 1284 def setup(self, dataOut, path, blocksPerFile, profilesPerBlock=64, set=0, ext=None):
1285 1285 """
1286 1286 Setea el tipo de formato en la cual sera guardada la data y escribe el First Header
1287 1287
1288 1288 Inputs:
1289 1289 path : el path destino en el cual se escribiran los files a crear
1290 1290 format : formato en el cual sera salvado un file
1291 1291 set : el setebo del file
1292 1292
1293 1293 Return:
1294 1294 0 : Si no realizo un buen seteo
1295 1295 1 : Si realizo un buen seteo
1296 1296 """
1297 1297
1298 1298 if ext == None:
1299 1299 ext = self.ext
1300 1300
1301 1301 ext = ext.lower()
1302 1302
1303 1303 self.ext = ext
1304 1304
1305 1305 self.path = path
1306 1306
1307 1307 self.setFile = set - 1
1308 1308
1309 1309 self.blocksPerFile = blocksPerFile
1310 1310
1311 1311 self.profilesPerBlock = profilesPerBlock
1312 1312
1313 1313 self.dataOut = dataOut
1314 1314
1315 1315 if not(self.setNextFile()):
1316 1316 print "There isn't a next file"
1317 1317 return 0
1318 1318
1319 1319 self.setBlockDimension()
1320 1320
1321 1321 return 1
1322 1322
1323 1323 def run(self, dataOut, **kwargs):
1324 1324
1325 1325 if not(self.isConfig):
1326 1326
1327 1327 self.setup(dataOut, **kwargs)
1328 1328 self.isConfig = True
1329 1329
1330 1330 self.putData()
1331 1331
1332 1332 class VoltageReader(JRODataReader):
1333 1333 """
1334 1334 Esta clase permite leer datos de voltage desde archivos en formato rawdata (.r). La lectura
1335 1335 de los datos siempre se realiza por bloques. Los datos leidos (array de 3 dimensiones:
1336 1336 perfiles*alturas*canales) son almacenados en la variable "buffer".
1337 1337
1338 1338 perfiles * alturas * canales
1339 1339
1340 1340 Esta clase contiene instancias (objetos) de las clases BasicHeader, SystemHeader,
1341 1341 RadarControllerHeader y Voltage. Los tres primeros se usan para almacenar informacion de la
1342 1342 cabecera de datos (metadata), y el cuarto (Voltage) para obtener y almacenar un perfil de
1343 1343 datos desde el "buffer" cada vez que se ejecute el metodo "getData".
1344 1344
1345 1345 Example:
1346 1346
1347 1347 dpath = "/home/myuser/data"
1348 1348
1349 1349 startTime = datetime.datetime(2010,1,20,0,0,0,0,0,0)
1350 1350
1351 1351 endTime = datetime.datetime(2010,1,21,23,59,59,0,0,0)
1352 1352
1353 1353 readerObj = VoltageReader()
1354 1354
1355 1355 readerObj.setup(dpath, startTime, endTime)
1356 1356
1357 1357 while(True):
1358 1358
1359 1359 #to get one profile
1360 1360 profile = readerObj.getData()
1361 1361
1362 1362 #print the profile
1363 1363 print profile
1364 1364
1365 1365 #If you want to see all datablock
1366 1366 print readerObj.datablock
1367 1367
1368 1368 if readerObj.flagNoMoreFiles:
1369 1369 break
1370 1370
1371 1371 """
1372 1372
1373 1373 ext = ".r"
1374 1374
1375 1375 optchar = "D"
1376 1376 dataOut = None
1377 1377
1378 1378
1379 1379 def __init__(self):
1380 1380 """
1381 1381 Inicializador de la clase VoltageReader para la lectura de datos de voltage.
1382 1382
1383 1383 Input:
1384 1384 dataOut : Objeto de la clase Voltage. Este objeto sera utilizado para
1385 1385 almacenar un perfil de datos cada vez que se haga un requerimiento
1386 1386 (getData). El perfil sera obtenido a partir del buffer de datos,
1387 1387 si el buffer esta vacio se hara un nuevo proceso de lectura de un
1388 1388 bloque de datos.
1389 1389 Si este parametro no es pasado se creara uno internamente.
1390 1390
1391 1391 Variables afectadas:
1392 1392 self.dataOut
1393 1393
1394 1394 Return:
1395 1395 None
1396 1396 """
1397 1397
1398 1398 self.isConfig = False
1399 1399
1400 1400 self.datablock = None
1401 1401
1402 1402 self.utc = 0
1403 1403
1404 1404 self.ext = ".r"
1405 1405
1406 1406 self.optchar = "D"
1407 1407
1408 1408 self.basicHeaderObj = BasicHeader(LOCALTIME)
1409 1409
1410 1410 self.systemHeaderObj = SystemHeader()
1411 1411
1412 1412 self.radarControllerHeaderObj = RadarControllerHeader()
1413 1413
1414 1414 self.processingHeaderObj = ProcessingHeader()
1415 1415
1416 1416 self.online = 0
1417 1417
1418 1418 self.fp = None
1419 1419
1420 1420 self.idFile = None
1421 1421
1422 1422 self.dtype = None
1423 1423
1424 1424 self.fileSizeByHeader = None
1425 1425
1426 1426 self.filenameList = []
1427 1427
1428 1428 self.filename = None
1429 1429
1430 1430 self.fileSize = None
1431 1431
1432 1432 self.firstHeaderSize = 0
1433 1433
1434 1434 self.basicHeaderSize = 24
1435 1435
1436 1436 self.pathList = []
1437 1437
1438 1438 self.filenameList = []
1439 1439
1440 1440 self.lastUTTime = 0
1441 1441
1442 1442 self.maxTimeStep = 30
1443 1443
1444 1444 self.flagNoMoreFiles = 0
1445 1445
1446 1446 self.set = 0
1447 1447
1448 1448 self.path = None
1449 1449
1450 1450 self.profileIndex = 2**32-1
1451 1451
1452 1452 self.delay = 3 #seconds
1453 1453
1454 1454 self.nTries = 3 #quantity tries
1455 1455
1456 1456 self.nFiles = 3 #number of files for searching
1457 1457
1458 1458 self.nReadBlocks = 0
1459 1459
1460 1460 self.flagIsNewFile = 1
1461 1461
1462 1462 self.__isFirstTimeOnline = 1
1463 1463
1464 1464 self.ippSeconds = 0
1465 1465
1466 1466 self.flagTimeBlock = 0
1467 1467
1468 1468 self.flagIsNewBlock = 0
1469 1469
1470 1470 self.nTotalBlocks = 0
1471 1471
1472 1472 self.blocksize = 0
1473 1473
1474 1474 self.dataOut = self.createObjByDefault()
1475 1475
1476 1476 def createObjByDefault(self):
1477 1477
1478 1478 dataObj = Voltage()
1479 1479
1480 1480 return dataObj
1481 1481
1482 1482 def __hasNotDataInBuffer(self):
1483 1483 if self.profileIndex >= self.processingHeaderObj.profilesPerBlock:
1484 1484 return 1
1485 1485 return 0
1486 1486
1487 1487
1488 1488 def getBlockDimension(self):
1489 1489 """
1490 1490 Obtiene la cantidad de puntos a leer por cada bloque de datos
1491 1491
1492 1492 Affected:
1493 1493 self.blocksize
1494 1494
1495 1495 Return:
1496 1496 None
1497 1497 """
1498 1498 pts2read = self.processingHeaderObj.profilesPerBlock * self.processingHeaderObj.nHeights * self.systemHeaderObj.nChannels
1499 1499 self.blocksize = pts2read
1500 1500
1501 1501
1502 1502 def readBlock(self):
1503 1503 """
1504 1504 readBlock lee el bloque de datos desde la posicion actual del puntero del archivo
1505 1505 (self.fp) y actualiza todos los parametros relacionados al bloque de datos
1506 1506 (metadata + data). La data leida es almacenada en el buffer y el contador del buffer
1507 1507 es seteado a 0
1508 1508
1509 1509 Inputs:
1510 1510 None
1511 1511
1512 1512 Return:
1513 1513 None
1514 1514
1515 1515 Affected:
1516 1516 self.profileIndex
1517 1517 self.datablock
1518 1518 self.flagIsNewFile
1519 1519 self.flagIsNewBlock
1520 1520 self.nTotalBlocks
1521 1521
1522 1522 Exceptions:
1523 1523 Si un bloque leido no es un bloque valido
1524 1524 """
1525 1525 current_pointer_location = self.fp.tell()
1526 1526 junk = numpy.fromfile( self.fp, self.dtype, self.blocksize )
1527 1527
1528 1528 try:
1529 1529 junk = junk.reshape( (self.processingHeaderObj.profilesPerBlock, self.processingHeaderObj.nHeights, self.systemHeaderObj.nChannels) )
1530 1530 except:
1531 1531 #print "The read block (%3d) has not enough data" %self.nReadBlocks
1532 1532
1533 1533 if self.waitDataBlock(pointer_location=current_pointer_location):
1534 1534 junk = numpy.fromfile( self.fp, self.dtype, self.blocksize )
1535 1535 junk = junk.reshape( (self.processingHeaderObj.profilesPerBlock, self.processingHeaderObj.nHeights, self.systemHeaderObj.nChannels) )
1536 1536 # return 0
1537 1537
1538 1538 junk = numpy.transpose(junk, (2,0,1))
1539 1539 self.datablock = junk['real'] + junk['imag']*1j
1540 1540
1541 1541 self.profileIndex = 0
1542 1542
1543 1543 self.flagIsNewFile = 0
1544 1544 self.flagIsNewBlock = 1
1545 1545
1546 1546 self.nTotalBlocks += 1
1547 1547 self.nReadBlocks += 1
1548 1548
1549 1549 return 1
1550 1550
1551 1551 def getFirstHeader(self):
1552 1552
1553 1553 self.dataOut.dtype = self.dtype
1554 1554
1555 1555 self.dataOut.nProfiles = self.processingHeaderObj.profilesPerBlock
1556 1556
1557 1557 xf = self.processingHeaderObj.firstHeight + self.processingHeaderObj.nHeights*self.processingHeaderObj.deltaHeight
1558 1558
1559 1559 self.dataOut.heightList = numpy.arange(self.processingHeaderObj.firstHeight, xf, self.processingHeaderObj.deltaHeight)
1560 1560
1561 1561 self.dataOut.channelList = range(self.systemHeaderObj.nChannels)
1562 1562
1563 1563 self.dataOut.ippSeconds = self.ippSeconds
1564 1564
1565 1565 self.dataOut.timeInterval = self.ippSeconds * self.processingHeaderObj.nCohInt
1566 1566
1567 1567 self.dataOut.nCohInt = self.processingHeaderObj.nCohInt
1568 1568
1569 1569 self.dataOut.flagShiftFFT = False
1570 1570
1571 1571 if self.radarControllerHeaderObj.code != None:
1572 1572
1573 1573 self.dataOut.nCode = self.radarControllerHeaderObj.nCode
1574 1574
1575 1575 self.dataOut.nBaud = self.radarControllerHeaderObj.nBaud
1576 1576
1577 1577 self.dataOut.code = self.radarControllerHeaderObj.code
1578 1578
1579 1579 self.dataOut.systemHeaderObj = self.systemHeaderObj.copy()
1580 1580
1581 1581 self.dataOut.radarControllerHeaderObj = self.radarControllerHeaderObj.copy()
1582 1582
1583 1583 self.dataOut.flagDecodeData = False #asumo q la data no esta decodificada
1584 1584
1585 1585 self.dataOut.flagDeflipData = False #asumo q la data no esta sin flip
1586 1586
1587 1587 self.dataOut.flagShiftFFT = False
1588 1588
1589 1589 def getData(self):
1590 1590 """
1591 1591 getData obtiene una unidad de datos del buffer de lectura y la copia a la clase "Voltage"
1592 1592 con todos los parametros asociados a este (metadata). cuando no hay datos en el buffer de
1593 1593 lectura es necesario hacer una nueva lectura de los bloques de datos usando "readNextBlock"
1594 1594
1595 1595 Ademas incrementa el contador del buffer en 1.
1596 1596
1597 1597 Return:
1598 1598 data : retorna un perfil de voltages (alturas * canales) copiados desde el
1599 1599 buffer. Si no hay mas archivos a leer retorna None.
1600 1600
1601 1601 Variables afectadas:
1602 1602 self.dataOut
1603 1603 self.profileIndex
1604 1604
1605 1605 Affected:
1606 1606 self.dataOut
1607 1607 self.profileIndex
1608 1608 self.flagTimeBlock
1609 1609 self.flagIsNewBlock
1610 1610 """
1611 1611
1612 1612 if self.flagNoMoreFiles:
1613 1613 self.dataOut.flagNoData = True
1614 1614 print 'Process finished'
1615 1615 return 0
1616 1616
1617 1617 self.flagTimeBlock = 0
1618 1618 self.flagIsNewBlock = 0
1619 1619
1620 1620 if self.__hasNotDataInBuffer():
1621 1621
1622 1622 if not( self.readNextBlock() ):
1623 1623 return 0
1624 1624
1625 1625 self.getFirstHeader()
1626 1626
1627 1627 if self.datablock == None:
1628 1628 self.dataOut.flagNoData = True
1629 1629 return 0
1630 1630
1631 1631 self.dataOut.data = self.datablock[:,self.profileIndex,:]
1632 1632
1633 1633 self.dataOut.flagNoData = False
1634 1634
1635 1635 self.getBasicHeader()
1636 1636
1637 1637 self.profileIndex += 1
1638 1638
1639 1639 self.dataOut.realtime = self.online
1640 1640
1641 1641 return self.dataOut.data
1642 1642
1643 1643
1644 1644 class VoltageWriter(JRODataWriter):
1645 1645 """
1646 1646 Esta clase permite escribir datos de voltajes a archivos procesados (.r). La escritura
1647 1647 de los datos siempre se realiza por bloques.
1648 1648 """
1649 1649
1650 1650 ext = ".r"
1651 1651
1652 1652 optchar = "D"
1653 1653
1654 1654 shapeBuffer = None
1655 1655
1656 1656
1657 1657 def __init__(self):
1658 1658 """
1659 1659 Inicializador de la clase VoltageWriter para la escritura de datos de espectros.
1660 1660
1661 1661 Affected:
1662 1662 self.dataOut
1663 1663
1664 1664 Return: None
1665 1665 """
1666 1666
1667 1667 self.nTotalBlocks = 0
1668 1668
1669 1669 self.profileIndex = 0
1670 1670
1671 1671 self.isConfig = False
1672 1672
1673 1673 self.fp = None
1674 1674
1675 1675 self.flagIsNewFile = 1
1676 1676
1677 1677 self.nTotalBlocks = 0
1678 1678
1679 1679 self.flagIsNewBlock = 0
1680 1680
1681 1681 self.setFile = None
1682 1682
1683 1683 self.dtype = None
1684 1684
1685 1685 self.path = None
1686 1686
1687 1687 self.filename = None
1688 1688
1689 1689 self.basicHeaderObj = BasicHeader(LOCALTIME)
1690 1690
1691 1691 self.systemHeaderObj = SystemHeader()
1692 1692
1693 1693 self.radarControllerHeaderObj = RadarControllerHeader()
1694 1694
1695 1695 self.processingHeaderObj = ProcessingHeader()
1696 1696
1697 1697 def hasAllDataInBuffer(self):
1698 1698 if self.profileIndex >= self.processingHeaderObj.profilesPerBlock:
1699 1699 return 1
1700 1700 return 0
1701 1701
1702 1702
1703 1703 def setBlockDimension(self):
1704 1704 """
1705 1705 Obtiene las formas dimensionales del los subbloques de datos que componen un bloque
1706 1706
1707 1707 Affected:
1708 1708 self.shape_spc_Buffer
1709 1709 self.shape_cspc_Buffer
1710 1710 self.shape_dc_Buffer
1711 1711
1712 1712 Return: None
1713 1713 """
1714 1714 self.shapeBuffer = (self.processingHeaderObj.profilesPerBlock,
1715 1715 self.processingHeaderObj.nHeights,
1716 1716 self.systemHeaderObj.nChannels)
1717 1717
1718 1718 self.datablock = numpy.zeros((self.systemHeaderObj.nChannels,
1719 1719 self.processingHeaderObj.profilesPerBlock,
1720 1720 self.processingHeaderObj.nHeights),
1721 1721 dtype=numpy.dtype('complex64'))
1722 1722
1723 1723
1724 1724 def writeBlock(self):
1725 1725 """
1726 1726 Escribe el buffer en el file designado
1727 1727
1728 1728 Affected:
1729 1729 self.profileIndex
1730 1730 self.flagIsNewFile
1731 1731 self.flagIsNewBlock
1732 1732 self.nTotalBlocks
1733 1733 self.blockIndex
1734 1734
1735 1735 Return: None
1736 1736 """
1737 1737 data = numpy.zeros( self.shapeBuffer, self.dtype )
1738 1738
1739 1739 junk = numpy.transpose(self.datablock, (1,2,0))
1740 1740
1741 1741 data['real'] = junk.real
1742 1742 data['imag'] = junk.imag
1743 1743
1744 1744 data = data.reshape( (-1) )
1745 1745
1746 1746 data.tofile( self.fp )
1747 1747
1748 1748 self.datablock.fill(0)
1749 1749
1750 1750 self.profileIndex = 0
1751 1751 self.flagIsNewFile = 0
1752 1752 self.flagIsNewBlock = 1
1753 1753
1754 1754 self.blockIndex += 1
1755 1755 self.nTotalBlocks += 1
1756 1756
1757 1757 def putData(self):
1758 1758 """
1759 1759 Setea un bloque de datos y luego los escribe en un file
1760 1760
1761 1761 Affected:
1762 1762 self.flagIsNewBlock
1763 1763 self.profileIndex
1764 1764
1765 1765 Return:
1766 1766 0 : Si no hay data o no hay mas files que puedan escribirse
1767 1767 1 : Si se escribio la data de un bloque en un file
1768 1768 """
1769 1769 if self.dataOut.flagNoData:
1770 1770 return 0
1771 1771
1772 1772 self.flagIsNewBlock = 0
1773 1773
1774 1774 if self.dataOut.flagTimeBlock:
1775 1775
1776 1776 self.datablock.fill(0)
1777 1777 self.profileIndex = 0
1778 1778 self.setNextFile()
1779 1779
1780 1780 if self.profileIndex == 0:
1781 1781 self.setBasicHeader()
1782 1782
1783 1783 self.datablock[:,self.profileIndex,:] = self.dataOut.data
1784 1784
1785 1785 self.profileIndex += 1
1786 1786
1787 1787 if self.hasAllDataInBuffer():
1788 1788 #if self.flagIsNewFile:
1789 1789 self.writeNextBlock()
1790 1790 # self.setFirstHeader()
1791 1791
1792 1792 return 1
1793 1793
1794 1794 def __getProcessFlags(self):
1795 1795
1796 1796 processFlags = 0
1797 1797
1798 1798 dtype0 = numpy.dtype([('real','<i1'),('imag','<i1')])
1799 1799 dtype1 = numpy.dtype([('real','<i2'),('imag','<i2')])
1800 1800 dtype2 = numpy.dtype([('real','<i4'),('imag','<i4')])
1801 1801 dtype3 = numpy.dtype([('real','<i8'),('imag','<i8')])
1802 1802 dtype4 = numpy.dtype([('real','<f4'),('imag','<f4')])
1803 1803 dtype5 = numpy.dtype([('real','<f8'),('imag','<f8')])
1804 1804
1805 1805 dtypeList = [dtype0, dtype1, dtype2, dtype3, dtype4, dtype5]
1806 1806
1807 1807
1808 1808
1809 1809 datatypeValueList = [PROCFLAG.DATATYPE_CHAR,
1810 1810 PROCFLAG.DATATYPE_SHORT,
1811 1811 PROCFLAG.DATATYPE_LONG,
1812 1812 PROCFLAG.DATATYPE_INT64,
1813 1813 PROCFLAG.DATATYPE_FLOAT,
1814 1814 PROCFLAG.DATATYPE_DOUBLE]
1815 1815
1816 1816
1817 1817 for index in range(len(dtypeList)):
1818 1818 if self.dataOut.dtype == dtypeList[index]:
1819 1819 dtypeValue = datatypeValueList[index]
1820 1820 break
1821 1821
1822 1822 processFlags += dtypeValue
1823 1823
1824 1824 if self.dataOut.flagDecodeData:
1825 1825 processFlags += PROCFLAG.DECODE_DATA
1826 1826
1827 1827 if self.dataOut.flagDeflipData:
1828 1828 processFlags += PROCFLAG.DEFLIP_DATA
1829 1829
1830 1830 if self.dataOut.code != None:
1831 1831 processFlags += PROCFLAG.DEFINE_PROCESS_CODE
1832 1832
1833 1833 if self.dataOut.nCohInt > 1:
1834 1834 processFlags += PROCFLAG.COHERENT_INTEGRATION
1835 1835
1836 1836 return processFlags
1837 1837
1838 1838
1839 1839 def __getBlockSize(self):
1840 1840 '''
1841 1841 Este metodos determina el cantidad de bytes para un bloque de datos de tipo Voltage
1842 1842 '''
1843 1843
1844 1844 dtype0 = numpy.dtype([('real','<i1'),('imag','<i1')])
1845 1845 dtype1 = numpy.dtype([('real','<i2'),('imag','<i2')])
1846 1846 dtype2 = numpy.dtype([('real','<i4'),('imag','<i4')])
1847 1847 dtype3 = numpy.dtype([('real','<i8'),('imag','<i8')])
1848 1848 dtype4 = numpy.dtype([('real','<f4'),('imag','<f4')])
1849 1849 dtype5 = numpy.dtype([('real','<f8'),('imag','<f8')])
1850 1850
1851 1851 dtypeList = [dtype0, dtype1, dtype2, dtype3, dtype4, dtype5]
1852 1852 datatypeValueList = [1,2,4,8,4,8]
1853 1853 for index in range(len(dtypeList)):
1854 1854 if self.dataOut.dtype == dtypeList[index]:
1855 1855 datatypeValue = datatypeValueList[index]
1856 1856 break
1857 1857
1858 1858 blocksize = int(self.dataOut.nHeights * self.dataOut.nChannels * self.profilesPerBlock * datatypeValue * 2)
1859 1859
1860 1860 return blocksize
1861 1861
1862 1862 def setFirstHeader(self):
1863 1863
1864 1864 """
1865 1865 Obtiene una copia del First Header
1866 1866
1867 1867 Affected:
1868 1868 self.systemHeaderObj
1869 1869 self.radarControllerHeaderObj
1870 1870 self.dtype
1871 1871
1872 1872 Return:
1873 1873 None
1874 1874 """
1875 1875
1876 1876 self.systemHeaderObj = self.dataOut.systemHeaderObj.copy()
1877 1877 self.systemHeaderObj.nChannels = self.dataOut.nChannels
1878 1878 self.radarControllerHeaderObj = self.dataOut.radarControllerHeaderObj.copy()
1879 1879
1880 1880 self.setBasicHeader()
1881 1881
1882 1882 processingHeaderSize = 40 # bytes
1883 1883 self.processingHeaderObj.dtype = 0 # Voltage
1884 1884 self.processingHeaderObj.blockSize = self.__getBlockSize()
1885 1885 self.processingHeaderObj.profilesPerBlock = self.profilesPerBlock
1886 1886 self.processingHeaderObj.dataBlocksPerFile = self.blocksPerFile
1887 1887 self.processingHeaderObj.nWindows = 1 #podria ser 1 o self.dataOut.processingHeaderObj.nWindows
1888 1888 self.processingHeaderObj.processFlags = self.__getProcessFlags()
1889 1889 self.processingHeaderObj.nCohInt = self.dataOut.nCohInt
1890 1890 self.processingHeaderObj.nIncohInt = 1 # Cuando la data de origen es de tipo Voltage
1891 1891 self.processingHeaderObj.totalSpectra = 0 # Cuando la data de origen es de tipo Voltage
1892 1892
1893 1893 # if self.dataOut.code != None:
1894 1894 # self.processingHeaderObj.code = self.dataOut.code
1895 1895 # self.processingHeaderObj.nCode = self.dataOut.nCode
1896 1896 # self.processingHeaderObj.nBaud = self.dataOut.nBaud
1897 1897 # codesize = int(8 + 4 * self.dataOut.nCode * self.dataOut.nBaud)
1898 1898 # processingHeaderSize += codesize
1899 1899
1900 1900 if self.processingHeaderObj.nWindows != 0:
1901 1901 self.processingHeaderObj.firstHeight = self.dataOut.heightList[0]
1902 1902 self.processingHeaderObj.deltaHeight = self.dataOut.heightList[1] - self.dataOut.heightList[0]
1903 1903 self.processingHeaderObj.nHeights = self.dataOut.nHeights
1904 1904 self.processingHeaderObj.samplesWin = self.dataOut.nHeights
1905 1905 processingHeaderSize += 12
1906 1906
1907 1907 self.processingHeaderObj.size = processingHeaderSize
1908 1908
1909 1909 class SpectraReader(JRODataReader):
1910 1910 """
1911 1911 Esta clase permite leer datos de espectros desde archivos procesados (.pdata). La lectura
1912 1912 de los datos siempre se realiza por bloques. Los datos leidos (array de 3 dimensiones)
1913 1913 son almacenados en tres buffer's para el Self Spectra, el Cross Spectra y el DC Channel.
1914 1914
1915 1915 paresCanalesIguales * alturas * perfiles (Self Spectra)
1916 1916 paresCanalesDiferentes * alturas * perfiles (Cross Spectra)
1917 1917 canales * alturas (DC Channels)
1918 1918
1919 1919 Esta clase contiene instancias (objetos) de las clases BasicHeader, SystemHeader,
1920 1920 RadarControllerHeader y Spectra. Los tres primeros se usan para almacenar informacion de la
1921 1921 cabecera de datos (metadata), y el cuarto (Spectra) para obtener y almacenar un bloque de
1922 1922 datos desde el "buffer" cada vez que se ejecute el metodo "getData".
1923 1923
1924 1924 Example:
1925 1925 dpath = "/home/myuser/data"
1926 1926
1927 1927 startTime = datetime.datetime(2010,1,20,0,0,0,0,0,0)
1928 1928
1929 1929 endTime = datetime.datetime(2010,1,21,23,59,59,0,0,0)
1930 1930
1931 1931 readerObj = SpectraReader()
1932 1932
1933 1933 readerObj.setup(dpath, startTime, endTime)
1934 1934
1935 1935 while(True):
1936 1936
1937 1937 readerObj.getData()
1938 1938
1939 1939 print readerObj.data_spc
1940 1940
1941 1941 print readerObj.data_cspc
1942 1942
1943 1943 print readerObj.data_dc
1944 1944
1945 1945 if readerObj.flagNoMoreFiles:
1946 1946 break
1947 1947
1948 1948 """
1949 1949
1950 1950 pts2read_SelfSpectra = 0
1951 1951
1952 1952 pts2read_CrossSpectra = 0
1953 1953
1954 1954 pts2read_DCchannels = 0
1955 1955
1956 1956 ext = ".pdata"
1957 1957
1958 1958 optchar = "P"
1959 1959
1960 1960 dataOut = None
1961 1961
1962 1962 nRdChannels = None
1963 1963
1964 1964 nRdPairs = None
1965 1965
1966 1966 rdPairList = []
1967 1967
1968 1968 def __init__(self):
1969 1969 """
1970 1970 Inicializador de la clase SpectraReader para la lectura de datos de espectros.
1971 1971
1972 1972 Inputs:
1973 1973 dataOut : Objeto de la clase Spectra. Este objeto sera utilizado para
1974 1974 almacenar un perfil de datos cada vez que se haga un requerimiento
1975 1975 (getData). El perfil sera obtenido a partir del buffer de datos,
1976 1976 si el buffer esta vacio se hara un nuevo proceso de lectura de un
1977 1977 bloque de datos.
1978 1978 Si este parametro no es pasado se creara uno internamente.
1979 1979
1980 1980 Affected:
1981 1981 self.dataOut
1982 1982
1983 1983 Return : None
1984 1984 """
1985 1985
1986 1986 self.isConfig = False
1987 1987
1988 1988 self.pts2read_SelfSpectra = 0
1989 1989
1990 1990 self.pts2read_CrossSpectra = 0
1991 1991
1992 1992 self.pts2read_DCchannels = 0
1993 1993
1994 1994 self.datablock = None
1995 1995
1996 1996 self.utc = None
1997 1997
1998 1998 self.ext = ".pdata"
1999 1999
2000 2000 self.optchar = "P"
2001 2001
2002 2002 self.basicHeaderObj = BasicHeader(LOCALTIME)
2003 2003
2004 2004 self.systemHeaderObj = SystemHeader()
2005 2005
2006 2006 self.radarControllerHeaderObj = RadarControllerHeader()
2007 2007
2008 2008 self.processingHeaderObj = ProcessingHeader()
2009 2009
2010 2010 self.online = 0
2011 2011
2012 2012 self.fp = None
2013 2013
2014 2014 self.idFile = None
2015 2015
2016 2016 self.dtype = None
2017 2017
2018 2018 self.fileSizeByHeader = None
2019 2019
2020 2020 self.filenameList = []
2021 2021
2022 2022 self.filename = None
2023 2023
2024 2024 self.fileSize = None
2025 2025
2026 2026 self.firstHeaderSize = 0
2027 2027
2028 2028 self.basicHeaderSize = 24
2029 2029
2030 2030 self.pathList = []
2031 2031
2032 2032 self.lastUTTime = 0
2033 2033
2034 2034 self.maxTimeStep = 30
2035 2035
2036 2036 self.flagNoMoreFiles = 0
2037 2037
2038 2038 self.set = 0
2039 2039
2040 2040 self.path = None
2041 2041
2042 2042 self.delay = 60 #seconds
2043 2043
2044 2044 self.nTries = 3 #quantity tries
2045 2045
2046 2046 self.nFiles = 3 #number of files for searching
2047 2047
2048 2048 self.nReadBlocks = 0
2049 2049
2050 2050 self.flagIsNewFile = 1
2051 2051
2052 2052 self.__isFirstTimeOnline = 1
2053 2053
2054 2054 self.ippSeconds = 0
2055 2055
2056 2056 self.flagTimeBlock = 0
2057 2057
2058 2058 self.flagIsNewBlock = 0
2059 2059
2060 2060 self.nTotalBlocks = 0
2061 2061
2062 2062 self.blocksize = 0
2063 2063
2064 2064 self.dataOut = self.createObjByDefault()
2065 2065
2066 2066 self.profileIndex = 1 #Always
2067 2067
2068 2068
2069 2069 def createObjByDefault(self):
2070 2070
2071 2071 dataObj = Spectra()
2072 2072
2073 2073 return dataObj
2074 2074
2075 2075 def __hasNotDataInBuffer(self):
2076 2076 return 1
2077 2077
2078 2078
2079 2079 def getBlockDimension(self):
2080 2080 """
2081 2081 Obtiene la cantidad de puntos a leer por cada bloque de datos
2082 2082
2083 2083 Affected:
2084 2084 self.nRdChannels
2085 2085 self.nRdPairs
2086 2086 self.pts2read_SelfSpectra
2087 2087 self.pts2read_CrossSpectra
2088 2088 self.pts2read_DCchannels
2089 2089 self.blocksize
2090 2090 self.dataOut.nChannels
2091 2091 self.dataOut.nPairs
2092 2092
2093 2093 Return:
2094 2094 None
2095 2095 """
2096 2096 self.nRdChannels = 0
2097 2097 self.nRdPairs = 0
2098 2098 self.rdPairList = []
2099 2099
2100 2100 for i in range(0, self.processingHeaderObj.totalSpectra*2, 2):
2101 2101 if self.processingHeaderObj.spectraComb[i] == self.processingHeaderObj.spectraComb[i+1]:
2102 2102 self.nRdChannels = self.nRdChannels + 1 #par de canales iguales
2103 2103 else:
2104 2104 self.nRdPairs = self.nRdPairs + 1 #par de canales diferentes
2105 2105 self.rdPairList.append((self.processingHeaderObj.spectraComb[i], self.processingHeaderObj.spectraComb[i+1]))
2106 2106
2107 2107 pts2read = self.processingHeaderObj.nHeights * self.processingHeaderObj.profilesPerBlock
2108 2108
2109 2109 self.pts2read_SelfSpectra = int(self.nRdChannels * pts2read)
2110 2110 self.blocksize = self.pts2read_SelfSpectra
2111 2111
2112 2112 if self.processingHeaderObj.flag_cspc:
2113 2113 self.pts2read_CrossSpectra = int(self.nRdPairs * pts2read)
2114 2114 self.blocksize += self.pts2read_CrossSpectra
2115 2115
2116 2116 if self.processingHeaderObj.flag_dc:
2117 2117 self.pts2read_DCchannels = int(self.systemHeaderObj.nChannels * self.processingHeaderObj.nHeights)
2118 2118 self.blocksize += self.pts2read_DCchannels
2119 2119
2120 2120 # self.blocksize = self.pts2read_SelfSpectra + self.pts2read_CrossSpectra + self.pts2read_DCchannels
2121 2121
2122 2122
2123 2123 def readBlock(self):
2124 2124 """
2125 2125 Lee el bloque de datos desde la posicion actual del puntero del archivo
2126 2126 (self.fp) y actualiza todos los parametros relacionados al bloque de datos
2127 2127 (metadata + data). La data leida es almacenada en el buffer y el contador del buffer
2128 2128 es seteado a 0
2129 2129
2130 2130 Return: None
2131 2131
2132 2132 Variables afectadas:
2133 2133
2134 2134 self.flagIsNewFile
2135 2135 self.flagIsNewBlock
2136 2136 self.nTotalBlocks
2137 2137 self.data_spc
2138 2138 self.data_cspc
2139 2139 self.data_dc
2140 2140
2141 2141 Exceptions:
2142 2142 Si un bloque leido no es un bloque valido
2143 2143 """
2144 2144 blockOk_flag = False
2145 2145 fpointer = self.fp.tell()
2146 2146
2147 2147 spc = numpy.fromfile( self.fp, self.dtype[0], self.pts2read_SelfSpectra )
2148 2148 spc = spc.reshape( (self.nRdChannels, self.processingHeaderObj.nHeights, self.processingHeaderObj.profilesPerBlock) ) #transforma a un arreglo 3D
2149 2149
2150 2150 if self.processingHeaderObj.flag_cspc:
2151 2151 cspc = numpy.fromfile( self.fp, self.dtype, self.pts2read_CrossSpectra )
2152 2152 cspc = cspc.reshape( (self.nRdPairs, self.processingHeaderObj.nHeights, self.processingHeaderObj.profilesPerBlock) ) #transforma a un arreglo 3D
2153 2153
2154 2154 if self.processingHeaderObj.flag_dc:
2155 2155 dc = numpy.fromfile( self.fp, self.dtype, self.pts2read_DCchannels ) #int(self.processingHeaderObj.nHeights*self.systemHeaderObj.nChannels) )
2156 2156 dc = dc.reshape( (self.systemHeaderObj.nChannels, self.processingHeaderObj.nHeights) ) #transforma a un arreglo 2D
2157 2157
2158 2158
2159 2159 if not(self.processingHeaderObj.shif_fft):
2160 2160 #desplaza a la derecha en el eje 2 determinadas posiciones
2161 2161 shift = int(self.processingHeaderObj.profilesPerBlock/2)
2162 2162 spc = numpy.roll( spc, shift , axis=2 )
2163 2163
2164 2164 if self.processingHeaderObj.flag_cspc:
2165 2165 #desplaza a la derecha en el eje 2 determinadas posiciones
2166 2166 cspc = numpy.roll( cspc, shift, axis=2 )
2167 2167
2168 2168 # self.processingHeaderObj.shif_fft = True
2169 2169
2170 2170 spc = numpy.transpose( spc, (0,2,1) )
2171 2171 self.data_spc = spc
2172 2172
2173 2173 if self.processingHeaderObj.flag_cspc:
2174 2174 cspc = numpy.transpose( cspc, (0,2,1) )
2175 2175 self.data_cspc = cspc['real'] + cspc['imag']*1j
2176 2176 else:
2177 2177 self.data_cspc = None
2178 2178
2179 2179 if self.processingHeaderObj.flag_dc:
2180 2180 self.data_dc = dc['real'] + dc['imag']*1j
2181 2181 else:
2182 2182 self.data_dc = None
2183 2183
2184 2184 self.flagIsNewFile = 0
2185 2185 self.flagIsNewBlock = 1
2186 2186
2187 2187 self.nTotalBlocks += 1
2188 2188 self.nReadBlocks += 1
2189 2189
2190 2190 return 1
2191 2191
2192 2192 def getFirstHeader(self):
2193 2193
2194 2194 self.dataOut.dtype = self.dtype
2195 2195
2196 2196 self.dataOut.nPairs = self.nRdPairs
2197 2197
2198 2198 self.dataOut.pairsList = self.rdPairList
2199 2199
2200 2200 self.dataOut.nProfiles = self.processingHeaderObj.profilesPerBlock
2201 2201
2202 2202 self.dataOut.nFFTPoints = self.processingHeaderObj.profilesPerBlock
2203 2203
2204 2204 self.dataOut.nCohInt = self.processingHeaderObj.nCohInt
2205 2205
2206 2206 self.dataOut.nIncohInt = self.processingHeaderObj.nIncohInt
2207 2207
2208 2208 xf = self.processingHeaderObj.firstHeight + self.processingHeaderObj.nHeights*self.processingHeaderObj.deltaHeight
2209 2209
2210 2210 self.dataOut.heightList = numpy.arange(self.processingHeaderObj.firstHeight, xf, self.processingHeaderObj.deltaHeight)
2211 2211
2212 2212 self.dataOut.channelList = range(self.systemHeaderObj.nChannels)
2213 2213
2214 2214 self.dataOut.ippSeconds = self.ippSeconds
2215 2215
2216 2216 self.dataOut.timeInterval = self.ippSeconds * self.processingHeaderObj.nCohInt * self.processingHeaderObj.nIncohInt * self.dataOut.nFFTPoints
2217 2217
2218 2218 self.dataOut.systemHeaderObj = self.systemHeaderObj.copy()
2219 2219
2220 2220 self.dataOut.radarControllerHeaderObj = self.radarControllerHeaderObj.copy()
2221 2221
2222 2222 self.dataOut.flagShiftFFT = self.processingHeaderObj.shif_fft
2223 2223
2224 2224 self.dataOut.flagDecodeData = False #asumo q la data no esta decodificada
2225 2225
2226 2226 self.dataOut.flagDeflipData = True #asumo q la data no esta sin flip
2227 2227
2228 2228 if self.processingHeaderObj.code != None:
2229 2229
2230 2230 self.dataOut.nCode = self.processingHeaderObj.nCode
2231 2231
2232 2232 self.dataOut.nBaud = self.processingHeaderObj.nBaud
2233 2233
2234 2234 self.dataOut.code = self.processingHeaderObj.code
2235 2235
2236 2236 self.dataOut.flagDecodeData = True
2237 2237
2238 2238 def getData(self):
2239 2239 """
2240 2240 Copia el buffer de lectura a la clase "Spectra",
2241 2241 con todos los parametros asociados a este (metadata). cuando no hay datos en el buffer de
2242 2242 lectura es necesario hacer una nueva lectura de los bloques de datos usando "readNextBlock"
2243 2243
2244 2244 Return:
2245 2245 0 : Si no hay mas archivos disponibles
2246 2246 1 : Si hizo una buena copia del buffer
2247 2247
2248 2248 Affected:
2249 2249 self.dataOut
2250 2250
2251 2251 self.flagTimeBlock
2252 2252 self.flagIsNewBlock
2253 2253 """
2254 2254
2255 2255 if self.flagNoMoreFiles:
2256 2256 self.dataOut.flagNoData = True
2257 2257 print 'Process finished'
2258 2258 return 0
2259 2259
2260 2260 self.flagTimeBlock = 0
2261 2261 self.flagIsNewBlock = 0
2262 2262
2263 2263 if self.__hasNotDataInBuffer():
2264 2264
2265 2265 if not( self.readNextBlock() ):
2266 2266 self.dataOut.flagNoData = True
2267 2267 return 0
2268 2268
2269 2269 #data es un numpy array de 3 dmensiones (perfiles, alturas y canales)
2270 2270
2271 2271 if self.data_dc == None:
2272 2272 self.dataOut.flagNoData = True
2273 2273 return 0
2274 2274
2275 2275 self.getBasicHeader()
2276 2276
2277 2277 self.getFirstHeader()
2278 2278
2279 2279 self.dataOut.data_spc = self.data_spc
2280 2280
2281 2281 self.dataOut.data_cspc = self.data_cspc
2282 2282
2283 2283 self.dataOut.data_dc = self.data_dc
2284 2284
2285 2285 self.dataOut.flagNoData = False
2286 2286
2287 2287 self.dataOut.realtime = self.online
2288 2288
2289 2289 return self.dataOut.data_spc
2290 2290
2291 2291
2292 2292 class SpectraWriter(JRODataWriter):
2293 2293
2294 2294 """
2295 2295 Esta clase permite escribir datos de espectros a archivos procesados (.pdata). La escritura
2296 2296 de los datos siempre se realiza por bloques.
2297 2297 """
2298 2298
2299 2299 ext = ".pdata"
2300 2300
2301 2301 optchar = "P"
2302 2302
2303 2303 shape_spc_Buffer = None
2304 2304
2305 2305 shape_cspc_Buffer = None
2306 2306
2307 2307 shape_dc_Buffer = None
2308 2308
2309 2309 data_spc = None
2310 2310
2311 2311 data_cspc = None
2312 2312
2313 2313 data_dc = None
2314 2314
2315 2315 # dataOut = None
2316 2316
2317 2317 def __init__(self):
2318 2318 """
2319 2319 Inicializador de la clase SpectraWriter para la escritura de datos de espectros.
2320 2320
2321 2321 Affected:
2322 2322 self.dataOut
2323 2323 self.basicHeaderObj
2324 2324 self.systemHeaderObj
2325 2325 self.radarControllerHeaderObj
2326 2326 self.processingHeaderObj
2327 2327
2328 2328 Return: None
2329 2329 """
2330 2330
2331 2331 self.isConfig = False
2332 2332
2333 2333 self.nTotalBlocks = 0
2334 2334
2335 2335 self.data_spc = None
2336 2336
2337 2337 self.data_cspc = None
2338 2338
2339 2339 self.data_dc = None
2340 2340
2341 2341 self.fp = None
2342 2342
2343 2343 self.flagIsNewFile = 1
2344 2344
2345 2345 self.nTotalBlocks = 0
2346 2346
2347 2347 self.flagIsNewBlock = 0
2348 2348
2349 2349 self.setFile = None
2350 2350
2351 2351 self.dtype = None
2352 2352
2353 2353 self.path = None
2354 2354
2355 2355 self.noMoreFiles = 0
2356 2356
2357 2357 self.filename = None
2358 2358
2359 2359 self.basicHeaderObj = BasicHeader(LOCALTIME)
2360 2360
2361 2361 self.systemHeaderObj = SystemHeader()
2362 2362
2363 2363 self.radarControllerHeaderObj = RadarControllerHeader()
2364 2364
2365 2365 self.processingHeaderObj = ProcessingHeader()
2366 2366
2367 2367
2368 2368 def hasAllDataInBuffer(self):
2369 2369 return 1
2370 2370
2371 2371
2372 2372 def setBlockDimension(self):
2373 2373 """
2374 2374 Obtiene las formas dimensionales del los subbloques de datos que componen un bloque
2375 2375
2376 2376 Affected:
2377 2377 self.shape_spc_Buffer
2378 2378 self.shape_cspc_Buffer
2379 2379 self.shape_dc_Buffer
2380 2380
2381 2381 Return: None
2382 2382 """
2383 2383 self.shape_spc_Buffer = (self.dataOut.nChannels,
2384 2384 self.processingHeaderObj.nHeights,
2385 2385 self.processingHeaderObj.profilesPerBlock)
2386 2386
2387 2387 self.shape_cspc_Buffer = (self.dataOut.nPairs,
2388 2388 self.processingHeaderObj.nHeights,
2389 2389 self.processingHeaderObj.profilesPerBlock)
2390 2390
2391 2391 self.shape_dc_Buffer = (self.dataOut.nChannels,
2392 2392 self.processingHeaderObj.nHeights)
2393 2393
2394 2394
2395 2395 def writeBlock(self):
2396 2396 """
2397 2397 Escribe el buffer en el file designado
2398 2398
2399 2399 Affected:
2400 2400 self.data_spc
2401 2401 self.data_cspc
2402 2402 self.data_dc
2403 2403 self.flagIsNewFile
2404 2404 self.flagIsNewBlock
2405 2405 self.nTotalBlocks
2406 2406 self.nWriteBlocks
2407 2407
2408 2408 Return: None
2409 2409 """
2410 2410
2411 2411 spc = numpy.transpose( self.data_spc, (0,2,1) )
2412 2412 if not( self.processingHeaderObj.shif_fft ):
2413 2413 spc = numpy.roll( spc, self.processingHeaderObj.profilesPerBlock/2, axis=2 ) #desplaza a la derecha en el eje 2 determinadas posiciones
2414 2414 data = spc.reshape((-1))
2415 2415 data = data.astype(self.dtype[0])
2416 2416 data.tofile(self.fp)
2417 2417
2418 2418 if self.data_cspc != None:
2419 2419 data = numpy.zeros( self.shape_cspc_Buffer, self.dtype )
2420 2420 cspc = numpy.transpose( self.data_cspc, (0,2,1) )
2421 2421 if not( self.processingHeaderObj.shif_fft ):
2422 2422 cspc = numpy.roll( cspc, self.processingHeaderObj.profilesPerBlock/2, axis=2 ) #desplaza a la derecha en el eje 2 determinadas posiciones
2423 2423 data['real'] = cspc.real
2424 2424 data['imag'] = cspc.imag
2425 2425 data = data.reshape((-1))
2426 2426 data.tofile(self.fp)
2427 2427
2428 2428 if self.data_dc != None:
2429 2429 data = numpy.zeros( self.shape_dc_Buffer, self.dtype )
2430 2430 dc = self.data_dc
2431 2431 data['real'] = dc.real
2432 2432 data['imag'] = dc.imag
2433 2433 data = data.reshape((-1))
2434 2434 data.tofile(self.fp)
2435 2435
2436 2436 self.data_spc.fill(0)
2437 2437
2438 2438 if self.data_dc != None:
2439 2439 self.data_dc.fill(0)
2440 2440
2441 2441 if self.data_cspc != None:
2442 2442 self.data_cspc.fill(0)
2443 2443
2444 2444 self.flagIsNewFile = 0
2445 2445 self.flagIsNewBlock = 1
2446 2446 self.nTotalBlocks += 1
2447 2447 self.nWriteBlocks += 1
2448 2448 self.blockIndex += 1
2449 2449
2450 2450
2451 2451 def putData(self):
2452 2452 """
2453 2453 Setea un bloque de datos y luego los escribe en un file
2454 2454
2455 2455 Affected:
2456 2456 self.data_spc
2457 2457 self.data_cspc
2458 2458 self.data_dc
2459 2459
2460 2460 Return:
2461 2461 0 : Si no hay data o no hay mas files que puedan escribirse
2462 2462 1 : Si se escribio la data de un bloque en un file
2463 2463 """
2464 2464
2465 2465 if self.dataOut.flagNoData:
2466 2466 return 0
2467 2467
2468 2468 self.flagIsNewBlock = 0
2469 2469
2470 2470 if self.dataOut.flagTimeBlock:
2471 2471 self.data_spc.fill(0)
2472 2472 self.data_cspc.fill(0)
2473 2473 self.data_dc.fill(0)
2474 2474 self.setNextFile()
2475 2475
2476 2476 if self.flagIsNewFile == 0:
2477 2477 self.setBasicHeader()
2478 2478
2479 2479 self.data_spc = self.dataOut.data_spc.copy()
2480 2480 if self.dataOut.data_cspc != None:
2481 2481 self.data_cspc = self.dataOut.data_cspc.copy()
2482 2482 self.data_dc = self.dataOut.data_dc.copy()
2483 2483
2484 2484 # #self.processingHeaderObj.dataBlocksPerFile)
2485 2485 if self.hasAllDataInBuffer():
2486 2486 # self.setFirstHeader()
2487 2487 self.writeNextBlock()
2488 2488
2489 2489 return 1
2490 2490
2491 2491
2492 2492 def __getProcessFlags(self):
2493 2493
2494 2494 processFlags = 0
2495 2495
2496 2496 dtype0 = numpy.dtype([('real','<i1'),('imag','<i1')])
2497 2497 dtype1 = numpy.dtype([('real','<i2'),('imag','<i2')])
2498 2498 dtype2 = numpy.dtype([('real','<i4'),('imag','<i4')])
2499 2499 dtype3 = numpy.dtype([('real','<i8'),('imag','<i8')])
2500 2500 dtype4 = numpy.dtype([('real','<f4'),('imag','<f4')])
2501 2501 dtype5 = numpy.dtype([('real','<f8'),('imag','<f8')])
2502 2502
2503 2503 dtypeList = [dtype0, dtype1, dtype2, dtype3, dtype4, dtype5]
2504 2504
2505 2505
2506 2506
2507 2507 datatypeValueList = [PROCFLAG.DATATYPE_CHAR,
2508 2508 PROCFLAG.DATATYPE_SHORT,
2509 2509 PROCFLAG.DATATYPE_LONG,
2510 2510 PROCFLAG.DATATYPE_INT64,
2511 2511 PROCFLAG.DATATYPE_FLOAT,
2512 2512 PROCFLAG.DATATYPE_DOUBLE]
2513 2513
2514 2514
2515 2515 for index in range(len(dtypeList)):
2516 2516 if self.dataOut.dtype == dtypeList[index]:
2517 2517 dtypeValue = datatypeValueList[index]
2518 2518 break
2519 2519
2520 2520 processFlags += dtypeValue
2521 2521
2522 2522 if self.dataOut.flagDecodeData:
2523 2523 processFlags += PROCFLAG.DECODE_DATA
2524 2524
2525 2525 if self.dataOut.flagDeflipData:
2526 2526 processFlags += PROCFLAG.DEFLIP_DATA
2527 2527
2528 2528 if self.dataOut.code != None:
2529 2529 processFlags += PROCFLAG.DEFINE_PROCESS_CODE
2530 2530
2531 2531 if self.dataOut.nIncohInt > 1:
2532 2532 processFlags += PROCFLAG.INCOHERENT_INTEGRATION
2533 2533
2534 2534 if self.dataOut.data_dc != None:
2535 2535 processFlags += PROCFLAG.SAVE_CHANNELS_DC
2536 2536
2537 2537 return processFlags
2538 2538
2539 2539
2540 2540 def __getBlockSize(self):
2541 2541 '''
2542 2542 Este metodos determina el cantidad de bytes para un bloque de datos de tipo Spectra
2543 2543 '''
2544 2544
2545 2545 dtype0 = numpy.dtype([('real','<i1'),('imag','<i1')])
2546 2546 dtype1 = numpy.dtype([('real','<i2'),('imag','<i2')])
2547 2547 dtype2 = numpy.dtype([('real','<i4'),('imag','<i4')])
2548 2548 dtype3 = numpy.dtype([('real','<i8'),('imag','<i8')])
2549 2549 dtype4 = numpy.dtype([('real','<f4'),('imag','<f4')])
2550 2550 dtype5 = numpy.dtype([('real','<f8'),('imag','<f8')])
2551 2551
2552 2552 dtypeList = [dtype0, dtype1, dtype2, dtype3, dtype4, dtype5]
2553 2553 datatypeValueList = [1,2,4,8,4,8]
2554 2554 for index in range(len(dtypeList)):
2555 2555 if self.dataOut.dtype == dtypeList[index]:
2556 2556 datatypeValue = datatypeValueList[index]
2557 2557 break
2558 2558
2559 2559
2560 2560 pts2write = self.dataOut.nHeights * self.dataOut.nFFTPoints
2561 2561
2562 2562 pts2write_SelfSpectra = int(self.dataOut.nChannels * pts2write)
2563 2563 blocksize = (pts2write_SelfSpectra*datatypeValue)
2564 2564
2565 2565 if self.dataOut.data_cspc != None:
2566 2566 pts2write_CrossSpectra = int(self.dataOut.nPairs * pts2write)
2567 2567 blocksize += (pts2write_CrossSpectra*datatypeValue*2)
2568 2568
2569 2569 if self.dataOut.data_dc != None:
2570 2570 pts2write_DCchannels = int(self.dataOut.nChannels * self.dataOut.nHeights)
2571 2571 blocksize += (pts2write_DCchannels*datatypeValue*2)
2572 2572
2573 2573 blocksize = blocksize #* datatypeValue * 2 #CORREGIR ESTO
2574 2574
2575 2575 return blocksize
2576 2576
2577 2577 def setFirstHeader(self):
2578 2578
2579 2579 """
2580 2580 Obtiene una copia del First Header
2581 2581
2582 2582 Affected:
2583 2583 self.systemHeaderObj
2584 2584 self.radarControllerHeaderObj
2585 2585 self.dtype
2586 2586
2587 2587 Return:
2588 2588 None
2589 2589 """
2590 2590
2591 2591 self.systemHeaderObj = self.dataOut.systemHeaderObj.copy()
2592 2592 self.systemHeaderObj.nChannels = self.dataOut.nChannels
2593 2593 self.radarControllerHeaderObj = self.dataOut.radarControllerHeaderObj.copy()
2594 2594
2595 2595 self.setBasicHeader()
2596 2596
2597 2597 processingHeaderSize = 40 # bytes
2598 2598 self.processingHeaderObj.dtype = 1 # Spectra
2599 2599 self.processingHeaderObj.blockSize = self.__getBlockSize()
2600 2600 self.processingHeaderObj.profilesPerBlock = self.dataOut.nFFTPoints
2601 2601 self.processingHeaderObj.dataBlocksPerFile = self.blocksPerFile
2602 2602 self.processingHeaderObj.nWindows = 1 #podria ser 1 o self.dataOut.processingHeaderObj.nWindows
2603 2603 self.processingHeaderObj.processFlags = self.__getProcessFlags()
2604 2604 self.processingHeaderObj.nCohInt = self.dataOut.nCohInt# Se requiere para determinar el valor de timeInterval
2605 2605 self.processingHeaderObj.nIncohInt = self.dataOut.nIncohInt
2606 2606 self.processingHeaderObj.totalSpectra = self.dataOut.nPairs + self.dataOut.nChannels
2607 2607 self.processingHeaderObj.shif_fft = self.dataOut.flagShiftFFT
2608 2608
2609 2609 if self.processingHeaderObj.totalSpectra > 0:
2610 2610 channelList = []
2611 2611 for channel in range(self.dataOut.nChannels):
2612 2612 channelList.append(channel)
2613 2613 channelList.append(channel)
2614 2614
2615 2615 pairsList = []
2616 2616 if self.dataOut.nPairs > 0:
2617 2617 for pair in self.dataOut.pairsList:
2618 2618 pairsList.append(pair[0])
2619 2619 pairsList.append(pair[1])
2620 2620
2621 2621 spectraComb = channelList + pairsList
2622 2622 spectraComb = numpy.array(spectraComb,dtype="u1")
2623 2623 self.processingHeaderObj.spectraComb = spectraComb
2624 2624 sizeOfSpcComb = len(spectraComb)
2625 2625 processingHeaderSize += sizeOfSpcComb
2626 2626
2627 2627 # The processing header should not have information about code
2628 2628 # if self.dataOut.code != None:
2629 2629 # self.processingHeaderObj.code = self.dataOut.code
2630 2630 # self.processingHeaderObj.nCode = self.dataOut.nCode
2631 2631 # self.processingHeaderObj.nBaud = self.dataOut.nBaud
2632 2632 # nCodeSize = 4 # bytes
2633 2633 # nBaudSize = 4 # bytes
2634 2634 # codeSize = 4 # bytes
2635 2635 # sizeOfCode = int(nCodeSize + nBaudSize + codeSize * self.dataOut.nCode * self.dataOut.nBaud)
2636 2636 # processingHeaderSize += sizeOfCode
2637 2637
2638 2638 if self.processingHeaderObj.nWindows != 0:
2639 2639 self.processingHeaderObj.firstHeight = self.dataOut.heightList[0]
2640 2640 self.processingHeaderObj.deltaHeight = self.dataOut.heightList[1] - self.dataOut.heightList[0]
2641 2641 self.processingHeaderObj.nHeights = self.dataOut.nHeights
2642 2642 self.processingHeaderObj.samplesWin = self.dataOut.nHeights
2643 2643 sizeOfFirstHeight = 4
2644 2644 sizeOfdeltaHeight = 4
2645 2645 sizeOfnHeights = 4
2646 2646 sizeOfWindows = (sizeOfFirstHeight + sizeOfdeltaHeight + sizeOfnHeights)*self.processingHeaderObj.nWindows
2647 2647 processingHeaderSize += sizeOfWindows
2648 2648
2649 2649 self.processingHeaderObj.size = processingHeaderSize
2650 2650
2651 2651 class SpectraHeisWriter(Operation):
2652 2652 # set = None
2653 2653 setFile = None
2654 2654 idblock = None
2655 2655 doypath = None
2656 2656 subfolder = None
2657 2657
2658 2658 def __init__(self):
2659 2659 self.wrObj = FITS()
2660 2660 # self.dataOut = dataOut
2661 2661 self.nTotalBlocks=0
2662 2662 # self.set = None
2663 2663 self.setFile = None
2664 2664 self.idblock = 0
2665 2665 self.wrpath = None
2666 2666 self.doypath = None
2667 2667 self.subfolder = None
2668 2668 self.isConfig = False
2669 2669
2670 2670 def isNumber(str):
2671 2671 """
2672 2672 Chequea si el conjunto de caracteres que componen un string puede ser convertidos a un numero.
2673 2673
2674 2674 Excepciones:
2675 2675 Si un determinado string no puede ser convertido a numero
2676 2676 Input:
2677 2677 str, string al cual se le analiza para determinar si convertible a un numero o no
2678 2678
2679 2679 Return:
2680 2680 True : si el string es uno numerico
2681 2681 False : no es un string numerico
2682 2682 """
2683 2683 try:
2684 2684 float( str )
2685 2685 return True
2686 2686 except:
2687 2687 return False
2688 2688
2689 2689 def setup(self, dataOut, wrpath):
2690 2690
2691 2691 if not(os.path.exists(wrpath)):
2692 2692 os.mkdir(wrpath)
2693 2693
2694 2694 self.wrpath = wrpath
2695 2695 # self.setFile = 0
2696 2696 self.dataOut = dataOut
2697 2697
2698 2698 def putData(self):
2699 2699 name= time.localtime( self.dataOut.utctime)
2700 2700 ext=".fits"
2701 2701
2702 2702 if self.doypath == None:
2703 2703 self.subfolder = 'F%4.4d%3.3d_%d' % (name.tm_year,name.tm_yday,time.mktime(datetime.datetime.now().timetuple()))
2704 2704 self.doypath = os.path.join( self.wrpath, self.subfolder )
2705 2705 os.mkdir(self.doypath)
2706 2706
2707 2707 if self.setFile == None:
2708 2708 # self.set = self.dataOut.set
2709 2709 self.setFile = 0
2710 2710 # if self.set != self.dataOut.set:
2711 2711 ## self.set = self.dataOut.set
2712 2712 # self.setFile = 0
2713 2713
2714 2714 #make the filename
2715 2715 file = 'D%4.4d%3.3d_%3.3d%s' % (name.tm_year,name.tm_yday,self.setFile,ext)
2716 2716
2717 2717 filename = os.path.join(self.wrpath,self.subfolder, file)
2718 2718
2719 2719 idblock = numpy.array([self.idblock],dtype="int64")
2720 2720 header=self.wrObj.cFImage(idblock=idblock,
2721 2721 year=time.gmtime(self.dataOut.utctime).tm_year,
2722 2722 month=time.gmtime(self.dataOut.utctime).tm_mon,
2723 2723 day=time.gmtime(self.dataOut.utctime).tm_mday,
2724 2724 hour=time.gmtime(self.dataOut.utctime).tm_hour,
2725 2725 minute=time.gmtime(self.dataOut.utctime).tm_min,
2726 2726 second=time.gmtime(self.dataOut.utctime).tm_sec)
2727 2727
2728 2728 c=3E8
2729 2729 deltaHeight = self.dataOut.heightList[1] - self.dataOut.heightList[0]
2730 2730 freq=numpy.arange(-1*self.dataOut.nHeights/2.,self.dataOut.nHeights/2.)*(c/(2*deltaHeight*1000))
2731 2731
2732 2732 colList = []
2733 2733
2734 2734 colFreq=self.wrObj.setColF(name="freq", format=str(self.dataOut.nFFTPoints)+'E', array=freq)
2735 2735
2736 2736 colList.append(colFreq)
2737 2737
2738 2738 nchannel=self.dataOut.nChannels
2739 2739
2740 2740 for i in range(nchannel):
2741 2741 col = self.wrObj.writeData(name="PCh"+str(i+1),
2742 2742 format=str(self.dataOut.nFFTPoints)+'E',
2743 2743 data=10*numpy.log10(self.dataOut.data_spc[i,:]))
2744 2744
2745 2745 colList.append(col)
2746 2746
2747 2747 data=self.wrObj.Ctable(colList=colList)
2748 2748
2749 2749 self.wrObj.CFile(header,data)
2750 2750
2751 2751 self.wrObj.wFile(filename)
2752 2752
2753 2753 #update the setFile
2754 2754 self.setFile += 1
2755 2755 self.idblock += 1
2756 2756
2757 2757 return 1
2758 2758
2759 2759 def run(self, dataOut, **kwargs):
2760 2760
2761 2761 if not(self.isConfig):
2762 2762
2763 2763 self.setup(dataOut, **kwargs)
2764 2764 self.isConfig = True
2765 2765
2766 2766 self.putData()
2767 2767
2768 2768
2769 class FITS:
2770 name=None
2771 format=None
2772 array =None
2773 data =None
2774 thdulist=None
2775 prihdr=None
2776 hdu=None
2777
2778 def __init__(self):
2779
2780 pass
2781
2782 def setColF(self,name,format,array):
2783 self.name=name
2784 self.format=format
2785 self.array=array
2786 a1=numpy.array([self.array],dtype=numpy.float32)
2787 self.col1 = pyfits.Column(name=self.name, format=self.format, array=a1)
2788 return self.col1
2789
2790 # def setColP(self,name,format,data):
2791 # self.name=name
2792 # self.format=format
2793 # self.data=data
2794 # a2=numpy.array([self.data],dtype=numpy.float32)
2795 # self.col2 = pyfits.Column(name=self.name, format=self.format, array=a2)
2796 # return self.col2
2797
2798
2799 def writeData(self,name,format,data):
2800 self.name=name
2801 self.format=format
2802 self.data=data
2803 a2=numpy.array([self.data],dtype=numpy.float32)
2804 self.col2 = pyfits.Column(name=self.name, format=self.format, array=a2)
2805 return self.col2
2806
2807 def cFImage(self,idblock,year,month,day,hour,minute,second):
2808 self.hdu= pyfits.PrimaryHDU(idblock)
2809 self.hdu.header.set("Year",year)
2810 self.hdu.header.set("Month",month)
2811 self.hdu.header.set("Day",day)
2812 self.hdu.header.set("Hour",hour)
2813 self.hdu.header.set("Minute",minute)
2814 self.hdu.header.set("Second",second)
2815 return self.hdu
2816
2817
2818 def Ctable(self,colList):
2819 self.cols=pyfits.ColDefs(colList)
2820 self.tbhdu = pyfits.new_table(self.cols)
2821 return self.tbhdu
2822
2823
2824 def CFile(self,hdu,tbhdu):
2825 self.thdulist=pyfits.HDUList([hdu,tbhdu])
2826
2827 def wFile(self,filename):
2828 if os.path.isfile(filename):
2829 os.remove(filename)
2830 self.thdulist.writeto(filename)
2831
2832 2769
2833 2770 class ParameterConf:
2834 2771 ELEMENTNAME = 'Parameter'
2835 2772 def __init__(self):
2836 2773 self.name = ''
2837 2774 self.value = ''
2838 2775
2839 2776 def readXml(self, parmElement):
2840 2777 self.name = parmElement.get('name')
2841 2778 self.value = parmElement.get('value')
2842 2779
2843 2780 def getElementName(self):
2844 2781 return self.ELEMENTNAME
2845 2782
2846 2783 class Metadata:
2847 2784
2848 2785 def __init__(self, filename):
2849 2786 self.parmConfObjList = []
2850 2787 self.readXml(filename)
2851 2788
2852 2789 def readXml(self, filename):
2853 2790 self.projectElement = None
2854 2791 self.procUnitConfObjDict = {}
2855 2792 self.projectElement = ElementTree().parse(filename)
2856 2793 self.project = self.projectElement.tag
2857 2794
2858 2795 parmElementList = self.projectElement.getiterator(ParameterConf().getElementName())
2859 2796
2860 2797 for parmElement in parmElementList:
2861 2798 parmConfObj = ParameterConf()
2862 2799 parmConfObj.readXml(parmElement)
2863 2800 self.parmConfObjList.append(parmConfObj)
2864 2801
2865 2802 class FitsWriter(Operation):
2866 2803
2867 2804 def __init__(self):
2868 2805 self.isConfig = False
2869 2806 self.dataBlocksPerFile = None
2870 2807 self.blockIndex = 0
2871 2808 self.flagIsNewFile = 1
2872 2809 self.fitsObj = None
2873 2810 self.optchar = 'P'
2874 2811 self.ext = '.fits'
2875 2812 self.setFile = 0
2876 2813
2877 2814 def setFitsHeader(self, dataOut, metadatafile):
2878 2815
2879 2816 header_data = pyfits.PrimaryHDU()
2880 2817
2881 2818 metadata4fits = Metadata(metadatafile)
2882 2819 for parameter in metadata4fits.parmConfObjList:
2883 2820 parm_name = parameter.name
2884 2821 parm_value = parameter.value
2885 2822
2886 2823 # if parm_value == 'fromdatadatetime':
2887 2824 # value = time.strftime("%b %d %Y %H:%M:%S", dataOut.datatime.timetuple())
2888 2825 # elif parm_value == 'fromdataheights':
2889 2826 # value = dataOut.nHeights
2890 2827 # elif parm_value == 'fromdatachannel':
2891 2828 # value = dataOut.nChannels
2892 2829 # elif parm_value == 'fromdatasamples':
2893 2830 # value = dataOut.nFFTPoints
2894 2831 # else:
2895 2832 # value = parm_value
2896 2833
2897 2834 header_data.header[parm_name] = parm_value
2898 2835
2899 2836
2900 2837 header_data.header['DATETIME'] = time.strftime("%b %d %Y %H:%M:%S", dataOut.datatime.timetuple())
2901 2838 header_data.header['CHANNELLIST'] = str(dataOut.channelList)
2902 2839 header_data.header['NCHANNELS'] = dataOut.nChannels
2903 2840 #header_data.header['HEIGHTS'] = dataOut.heightList
2904 2841 header_data.header['NHEIGHTS'] = dataOut.nHeights
2905 2842
2906 2843 header_data.header['IPPSECONDS'] = dataOut.ippSeconds
2907 2844 header_data.header['NCOHINT'] = dataOut.nCohInt
2908 2845 header_data.header['NINCOHINT'] = dataOut.nIncohInt
2909 2846 header_data.header['TIMEZONE'] = dataOut.timeZone
2910 2847 header_data.header['NBLOCK'] = self.blockIndex
2911 2848
2912 2849 header_data.writeto(self.filename)
2913 2850
2914 2851 self.addExtension(dataOut.heightList,'HEIGHTLIST')
2915 2852
2916 2853
2917 2854 def setup(self, dataOut, path, dataBlocksPerFile, metadatafile):
2918 2855
2919 2856 self.path = path
2920 2857 self.dataOut = dataOut
2921 2858 self.metadatafile = metadatafile
2922 2859 self.dataBlocksPerFile = dataBlocksPerFile
2923 2860
2924 2861 def open(self):
2925 2862 self.fitsObj = pyfits.open(self.filename, mode='update')
2926 2863
2927 2864
2928 2865 def addExtension(self, data, tagname):
2929 2866 self.open()
2930 2867 extension = pyfits.ImageHDU(data=data, name=tagname)
2931 2868 #extension.header['TAG'] = tagname
2932 2869 self.fitsObj.append(extension)
2933 2870 self.write()
2934 2871
2935 2872 def addData(self, data):
2936 2873 self.open()
2937 2874 extension = pyfits.ImageHDU(data=data, name=self.fitsObj[0].header['DATATYPE'])
2938 2875 extension.header['UTCTIME'] = self.dataOut.utctime
2939 2876 self.fitsObj.append(extension)
2940 2877 self.blockIndex += 1
2941 2878 self.fitsObj[0].header['NBLOCK'] = self.blockIndex
2942 2879
2943 2880 self.write()
2944 2881
2945 2882 def write(self):
2946 2883
2947 2884 self.fitsObj.flush(verbose=True)
2948 2885 self.fitsObj.close()
2949 2886
2950 2887
2951 2888 def setNextFile(self):
2952 2889
2953 2890 ext = self.ext
2954 2891 path = self.path
2955 2892
2956 2893 timeTuple = time.localtime( self.dataOut.utctime)
2957 2894 subfolder = 'd%4.4d%3.3d' % (timeTuple.tm_year,timeTuple.tm_yday)
2958 2895
2959 2896 fullpath = os.path.join( path, subfolder )
2960 2897 if not( os.path.exists(fullpath) ):
2961 2898 os.mkdir(fullpath)
2962 2899 self.setFile = -1 #inicializo mi contador de seteo
2963 2900 else:
2964 2901 filesList = os.listdir( fullpath )
2965 2902 if len( filesList ) > 0:
2966 2903 filesList = sorted( filesList, key=str.lower )
2967 2904 filen = filesList[-1]
2968 2905
2969 2906 if isNumber( filen[8:11] ):
2970 2907 self.setFile = int( filen[8:11] ) #inicializo mi contador de seteo al seteo del ultimo file
2971 2908 else:
2972 2909 self.setFile = -1
2973 2910 else:
2974 2911 self.setFile = -1 #inicializo mi contador de seteo
2975 2912
2976 2913 setFile = self.setFile
2977 2914 setFile += 1
2978 2915
2979 2916 file = '%s%4.4d%3.3d%3.3d%s' % (self.optchar,
2980 2917 timeTuple.tm_year,
2981 2918 timeTuple.tm_yday,
2982 2919 setFile,
2983 2920 ext )
2984 2921
2985 2922 filename = os.path.join( path, subfolder, file )
2986 2923
2987 2924 self.blockIndex = 0
2988 2925 self.filename = filename
2989 2926 self.setFile = setFile
2990 2927 self.flagIsNewFile = 1
2991 2928
2992 2929 print 'Writing the file: %s'%self.filename
2993 2930
2994 2931 self.setFitsHeader(self.dataOut, self.metadatafile)
2995 2932
2996 2933 return 1
2997 2934
2998 2935 def writeBlock(self):
2999 2936 self.addData(self.dataOut.data_spc)
3000 2937 self.flagIsNewFile = 0
3001 2938
3002 2939
3003 2940 def __setNewBlock(self):
3004 2941
3005 2942 if self.flagIsNewFile:
3006 2943 return 1
3007 2944
3008 2945 if self.blockIndex < self.dataBlocksPerFile:
3009 2946 return 1
3010 2947
3011 2948 if not( self.setNextFile() ):
3012 2949 return 0
3013 2950
3014 2951 return 1
3015 2952
3016 2953 def writeNextBlock(self):
3017 2954 if not( self.__setNewBlock() ):
3018 2955 return 0
3019 2956 self.writeBlock()
3020 2957 return 1
3021 2958
3022 2959 def putData(self):
3023 2960 if self.flagIsNewFile:
3024 2961 self.setNextFile()
3025 2962 self.writeNextBlock()
3026 2963
3027 2964 def run(self, dataOut, **kwargs):
3028 2965 if not(self.isConfig):
3029 2966 self.setup(dataOut, **kwargs)
3030 2967 self.isConfig = True
3031 2968 self.putData()
3032 2969
3033 2970
3034 2971 class FitsReader(ProcessingUnit):
3035 2972
3036 2973 # __TIMEZONE = time.timezone
3037 2974
3038 2975 expName = None
3039 2976 datetimestr = None
3040 2977 utc = None
3041 2978 nChannels = None
3042 2979 nSamples = None
3043 2980 dataBlocksPerFile = None
3044 2981 comments = None
3045 2982 lastUTTime = None
3046 2983 header_dict = None
3047 2984 data = None
3048 2985 data_header_dict = None
3049 2986
3050 2987 def __init__(self):
3051 2988 self.isConfig = False
3052 2989 self.ext = '.fits'
3053 2990 self.setFile = 0
3054 2991 self.flagNoMoreFiles = 0
3055 2992 self.flagIsNewFile = 1
3056 2993 self.flagTimeBlock = None
3057 2994 self.fileIndex = None
3058 2995 self.filename = None
3059 2996 self.fileSize = None
3060 2997 self.fitsObj = None
3061 2998 self.timeZone = None
3062 2999 self.nReadBlocks = 0
3063 3000 self.nTotalBlocks = 0
3064 3001 self.dataOut = self.createObjByDefault()
3065 3002 self.maxTimeStep = 10# deberia ser definido por el usuario usando el metodo setup()
3066 3003 self.blockIndex = 1
3067 3004
3068 3005 def createObjByDefault(self):
3069 3006
3070 3007 dataObj = Fits()
3071 3008
3072 3009 return dataObj
3073 3010
3074 3011 def isFileinThisTime(self, filename, startTime, endTime, useLocalTime=False):
3075 3012 try:
3076 3013 fitsObj = pyfits.open(filename,'readonly')
3077 3014 except:
3078 3015 raise IOError, "The file %s can't be opened" %(filename)
3079 3016
3080 3017 header = fitsObj[0].header
3081 3018 struct_time = time.strptime(header['DATETIME'], "%b %d %Y %H:%M:%S")
3082 3019 utc = time.mktime(struct_time) - time.timezone #TIMEZONE debe ser un parametro del header FITS
3083 3020
3084 3021 ltc = utc
3085 3022 if useLocalTime:
3086 3023 ltc -= time.timezone
3087 3024 thisDatetime = datetime.datetime.utcfromtimestamp(ltc)
3088 3025 thisTime = thisDatetime.time()
3089 3026
3090 3027 if not ((startTime <= thisTime) and (endTime > thisTime)):
3091 3028 return None
3092 3029
3093 3030 return thisDatetime
3094 3031
3095 3032 def __setNextFileOnline(self):
3096 3033 raise ValueError, "No implemented"
3097 3034
3098 3035 def __setNextFileOffline(self):
3099 3036 idFile = self.fileIndex
3100 3037
3101 3038 while (True):
3102 3039 idFile += 1
3103 3040 if not(idFile < len(self.filenameList)):
3104 3041 self.flagNoMoreFiles = 1
3105 3042 print "No more Files"
3106 3043 return 0
3107 3044
3108 3045 filename = self.filenameList[idFile]
3109 3046
3110 3047 # if not(self.__verifyFile(filename)):
3111 3048 # continue
3112 3049
3113 3050 fileSize = os.path.getsize(filename)
3114 3051 fitsObj = pyfits.open(filename,'readonly')
3115 3052 break
3116 3053
3117 3054 self.flagIsNewFile = 1
3118 3055 self.fileIndex = idFile
3119 3056 self.filename = filename
3120 3057 self.fileSize = fileSize
3121 3058 self.fitsObj = fitsObj
3122 3059 self.blockIndex = 0
3123 3060 print "Setting the file: %s"%self.filename
3124 3061
3125 3062 return 1
3126 3063
3127 3064 def readHeader(self):
3128 3065 headerObj = self.fitsObj[0]
3129 3066
3130 3067 self.header_dict = headerObj.header
3131 3068 if 'EXPNAME' in headerObj.header.keys():
3132 3069 self.expName = headerObj.header['EXPNAME']
3133 3070
3134 3071 if 'DATATYPE' in headerObj.header.keys():
3135 3072 self.dataType = headerObj.header['DATATYPE']
3136 3073
3137 3074 self.datetimestr = headerObj.header['DATETIME']
3138 3075 self.channelList = headerObj.header['CHANNELLIST']
3139 3076 self.nChannels = headerObj.header['NCHANNELS']
3140 3077 self.nHeights = headerObj.header['NHEIGHTS']
3141 3078 self.ippSeconds = headerObj.header['IPPSECONDS']
3142 3079 self.nCohInt = headerObj.header['NCOHINT']
3143 3080 self.nIncohInt = headerObj.header['NINCOHINT']
3144 3081 self.dataBlocksPerFile = headerObj.header['NBLOCK']
3145 3082 self.timeZone = headerObj.header['TIMEZONE']
3146 3083
3147 3084 self.timeInterval = self.ippSeconds * self.nCohInt * self.nIncohInt
3148 3085
3149 3086 if 'COMMENT' in headerObj.header.keys():
3150 3087 self.comments = headerObj.header['COMMENT']
3151 3088
3152 3089 self.readHeightList()
3153 3090
3154 3091 def readHeightList(self):
3155 3092 self.blockIndex = self.blockIndex + 1
3156 3093 obj = self.fitsObj[self.blockIndex]
3157 3094 self.heightList = obj.data
3158 3095 self.blockIndex = self.blockIndex + 1
3159 3096
3160 3097 def readExtension(self):
3161 3098 obj = self.fitsObj[self.blockIndex]
3162 3099 self.heightList = obj.data
3163 3100 self.blockIndex = self.blockIndex + 1
3164 3101
3165 3102 def setNextFile(self):
3166 3103
3167 3104 if self.online:
3168 3105 newFile = self.__setNextFileOnline()
3169 3106 else:
3170 3107 newFile = self.__setNextFileOffline()
3171 3108
3172 3109 if not(newFile):
3173 3110 return 0
3174 3111
3175 3112 self.readHeader()
3176 3113
3177 3114 self.nReadBlocks = 0
3178 3115 # self.blockIndex = 1
3179 3116 return 1
3180 3117
3181 3118 def __searchFilesOffLine(self,
3182 3119 path,
3183 3120 startDate,
3184 3121 endDate,
3185 3122 startTime=datetime.time(0,0,0),
3186 3123 endTime=datetime.time(23,59,59),
3187 3124 set=None,
3188 3125 expLabel='',
3189 3126 ext='.fits',
3190 3127 walk=True):
3191 3128
3192 3129 pathList = []
3193 3130
3194 3131 if not walk:
3195 3132 pathList.append(path)
3196 3133
3197 3134 else:
3198 3135 dirList = []
3199 3136 for thisPath in os.listdir(path):
3200 3137 if not os.path.isdir(os.path.join(path,thisPath)):
3201 3138 continue
3202 3139 if not isDoyFolder(thisPath):
3203 3140 continue
3204 3141
3205 3142 dirList.append(thisPath)
3206 3143
3207 3144 if not(dirList):
3208 3145 return None, None
3209 3146
3210 3147 thisDate = startDate
3211 3148
3212 3149 while(thisDate <= endDate):
3213 3150 year = thisDate.timetuple().tm_year
3214 3151 doy = thisDate.timetuple().tm_yday
3215 3152
3216 3153 matchlist = fnmatch.filter(dirList, '?' + '%4.4d%3.3d' % (year,doy) + '*')
3217 3154 if len(matchlist) == 0:
3218 3155 thisDate += datetime.timedelta(1)
3219 3156 continue
3220 3157 for match in matchlist:
3221 3158 pathList.append(os.path.join(path,match,expLabel))
3222 3159
3223 3160 thisDate += datetime.timedelta(1)
3224 3161
3225 3162 if pathList == []:
3226 3163 print "Any folder was found for the date range: %s-%s" %(startDate, endDate)
3227 3164 return None, None
3228 3165
3229 3166 print "%d folder(s) was(were) found for the date range: %s - %s" %(len(pathList), startDate, endDate)
3230 3167
3231 3168 filenameList = []
3232 3169 datetimeList = []
3233 3170
3234 3171 for i in range(len(pathList)):
3235 3172
3236 3173 thisPath = pathList[i]
3237 3174
3238 3175 fileList = glob.glob1(thisPath, "*%s" %ext)
3239 3176 fileList.sort()
3240 3177
3241 3178 for file in fileList:
3242 3179
3243 3180 filename = os.path.join(thisPath,file)
3244 3181 thisDatetime = self.isFileinThisTime(filename, startTime, endTime)
3245 3182
3246 3183 if not(thisDatetime):
3247 3184 continue
3248 3185
3249 3186 filenameList.append(filename)
3250 3187 datetimeList.append(thisDatetime)
3251 3188
3252 3189 if not(filenameList):
3253 3190 print "Any file was found for the time range %s - %s" %(startTime, endTime)
3254 3191 return None, None
3255 3192
3256 3193 print "%d file(s) was(were) found for the time range: %s - %s" %(len(filenameList), startTime, endTime)
3257 3194 print
3258 3195
3259 3196 for i in range(len(filenameList)):
3260 3197 print "%s -> [%s]" %(filenameList[i], datetimeList[i].ctime())
3261 3198
3262 3199 self.filenameList = filenameList
3263 3200 self.datetimeList = datetimeList
3264 3201
3265 3202 return pathList, filenameList
3266 3203
3267 3204 def setup(self, path=None,
3268 3205 startDate=None,
3269 3206 endDate=None,
3270 3207 startTime=datetime.time(0,0,0),
3271 3208 endTime=datetime.time(23,59,59),
3272 3209 set=0,
3273 3210 expLabel = "",
3274 3211 ext = None,
3275 3212 online = False,
3276 3213 delay = 60,
3277 3214 walk = True):
3278 3215
3279 3216 if path == None:
3280 3217 raise ValueError, "The path is not valid"
3281 3218
3282 3219 if ext == None:
3283 3220 ext = self.ext
3284 3221
3285 3222 if not(online):
3286 3223 print "Searching files in offline mode ..."
3287 3224 pathList, filenameList = self.__searchFilesOffLine(path, startDate=startDate, endDate=endDate,
3288 3225 startTime=startTime, endTime=endTime,
3289 3226 set=set, expLabel=expLabel, ext=ext,
3290 3227 walk=walk)
3291 3228
3292 3229 if not(pathList):
3293 3230 print "No *%s files into the folder %s \nfor the range: %s - %s"%(ext, path,
3294 3231 datetime.datetime.combine(startDate,startTime).ctime(),
3295 3232 datetime.datetime.combine(endDate,endTime).ctime())
3296 3233
3297 3234 sys.exit(-1)
3298 3235
3299 3236 self.fileIndex = -1
3300 3237 self.pathList = pathList
3301 3238 self.filenameList = filenameList
3302 3239
3303 3240 self.online = online
3304 3241 self.delay = delay
3305 3242 ext = ext.lower()
3306 3243 self.ext = ext
3307 3244
3308 3245 if not(self.setNextFile()):
3309 3246 if (startDate!=None) and (endDate!=None):
3310 3247 print "No files in range: %s - %s" %(datetime.datetime.combine(startDate,startTime).ctime(), datetime.datetime.combine(endDate,endTime).ctime())
3311 3248 elif startDate != None:
3312 3249 print "No files in range: %s" %(datetime.datetime.combine(startDate,startTime).ctime())
3313 3250 else:
3314 3251 print "No files"
3315 3252
3316 3253 sys.exit(-1)
3317 3254
3318 3255
3319 3256
3320 3257 def readBlock(self):
3321 3258 dataObj = self.fitsObj[self.blockIndex]
3322 3259
3323 3260 self.data = dataObj.data
3324 3261 self.data_header_dict = dataObj.header
3325 3262 self.utc = self.data_header_dict['UTCTIME']
3326 3263
3327 3264 self.flagIsNewFile = 0
3328 3265 self.blockIndex += 1
3329 3266 self.nTotalBlocks += 1
3330 3267 self.nReadBlocks += 1
3331 3268
3332 3269 return 1
3333 3270
3334 3271 def __jumpToLastBlock(self):
3335 3272 raise ValueError, "No implemented"
3336 3273
3337 3274 def __waitNewBlock(self):
3338 3275 """
3339 3276 Return 1 si se encontro un nuevo bloque de datos, 0 de otra forma.
3340 3277
3341 3278 Si el modo de lectura es OffLine siempre retorn 0
3342 3279 """
3343 3280 if not self.online:
3344 3281 return 0
3345 3282
3346 3283 if (self.nReadBlocks >= self.dataBlocksPerFile):
3347 3284 return 0
3348 3285
3349 3286 currentPointer = self.fp.tell()
3350 3287
3351 3288 neededSize = self.processingHeaderObj.blockSize + self.basicHeaderSize
3352 3289
3353 3290 for nTries in range( self.nTries ):
3354 3291
3355 3292 self.fp.close()
3356 3293 self.fp = open( self.filename, 'rb' )
3357 3294 self.fp.seek( currentPointer )
3358 3295
3359 3296 self.fileSize = os.path.getsize( self.filename )
3360 3297 currentSize = self.fileSize - currentPointer
3361 3298
3362 3299 if ( currentSize >= neededSize ):
3363 3300 self.__rdBasicHeader()
3364 3301 return 1
3365 3302
3366 3303 print "\tWaiting %0.2f seconds for the next block, try %03d ..." % (self.delay, nTries+1)
3367 3304 time.sleep( self.delay )
3368 3305
3369 3306
3370 3307 return 0
3371 3308
3372 3309 def __setNewBlock(self):
3373 3310
3374 3311 if self.online:
3375 3312 self.__jumpToLastBlock()
3376 3313
3377 3314 if self.flagIsNewFile:
3378 3315 return 1
3379 3316
3380 3317 self.lastUTTime = self.utc
3381 3318
3382 3319 if self.online:
3383 3320 if self.__waitNewBlock():
3384 3321 return 1
3385 3322
3386 3323 if self.nReadBlocks < self.dataBlocksPerFile:
3387 3324 return 1
3388 3325
3389 3326 if not(self.setNextFile()):
3390 3327 return 0
3391 3328
3392 3329 deltaTime = self.utc - self.lastUTTime
3393 3330
3394 3331 self.flagTimeBlock = 0
3395 3332
3396 3333 if deltaTime > self.maxTimeStep:
3397 3334 self.flagTimeBlock = 1
3398 3335
3399 3336 return 1
3400 3337
3401 3338
3402 3339 def readNextBlock(self):
3403 3340 if not(self.__setNewBlock()):
3404 3341 return 0
3405 3342
3406 3343 if not(self.readBlock()):
3407 3344 return 0
3408 3345
3409 3346 return 1
3410 3347
3411 3348
3412 3349 def getData(self):
3413 3350
3414 3351 if self.flagNoMoreFiles:
3415 3352 self.dataOut.flagNoData = True
3416 3353 print 'Process finished'
3417 3354 return 0
3418 3355
3419 3356 self.flagTimeBlock = 0
3420 3357 self.flagIsNewBlock = 0
3421 3358
3422 3359 if not(self.readNextBlock()):
3423 3360 return 0
3424 3361
3425 3362 if self.data == None:
3426 3363 self.dataOut.flagNoData = True
3427 3364 return 0
3428 3365
3429 3366 self.dataOut.data = self.data
3430 3367 self.dataOut.data_header = self.data_header_dict
3431 3368 self.dataOut.utctime = self.utc
3432 3369
3433 3370 self.dataOut.header = self.header_dict
3434 3371 self.dataOut.expName = self.expName
3435 3372 self.dataOut.nChannels = self.nChannels
3436 3373 self.dataOut.timeZone = self.timeZone
3437 3374 self.dataOut.dataBlocksPerFile = self.dataBlocksPerFile
3438 3375 self.dataOut.comments = self.comments
3439 3376 self.dataOut.timeInterval = self.timeInterval
3440 3377 self.dataOut.channelList = self.channelList
3441 3378 self.dataOut.heightList = self.heightList
3442 3379 self.dataOut.flagNoData = False
3443 3380
3444 3381 return self.dataOut.data
3445 3382
3446 3383 def run(self, **kwargs):
3447 3384
3448 3385 if not(self.isConfig):
3449 3386 self.setup(**kwargs)
3450 3387 self.isConfig = True
3451 3388
3452 3389 self.getData() No newline at end of file
@@ -1,1720 +1,1917
1 1 '''
2 2
3 3 $Author: dsuarez $
4 4 $Id: Processor.py 1 2012-11-12 18:56:07Z dsuarez $
5 5 '''
6 6 import os
7 7 import numpy
8 8 import datetime
9 9 import time
10 10
11 11 from jrodata import *
12 12 from jrodataIO import *
13 13 from jroplot import *
14 14
15 15 try:
16 16 import cfunctions
17 17 except:
18 18 pass
19 19
20 20 class ProcessingUnit:
21 21
22 22 """
23 23 Esta es la clase base para el procesamiento de datos.
24 24
25 25 Contiene el metodo "call" para llamar operaciones. Las operaciones pueden ser:
26 26 - Metodos internos (callMethod)
27 27 - Objetos del tipo Operation (callObject). Antes de ser llamados, estos objetos
28 28 tienen que ser agreagados con el metodo "add".
29 29
30 30 """
31 31 # objeto de datos de entrada (Voltage, Spectra o Correlation)
32 32 dataIn = None
33 33
34 34 # objeto de datos de entrada (Voltage, Spectra o Correlation)
35 35 dataOut = None
36 36
37 37
38 38 objectDict = None
39 39
40 40 def __init__(self):
41 41
42 42 self.objectDict = {}
43 43
44 44 def init(self):
45 45
46 46 raise ValueError, "Not implemented"
47 47
48 48 def addOperation(self, object, objId):
49 49
50 50 """
51 51 Agrega el objeto "object" a la lista de objetos "self.objectList" y retorna el
52 52 identificador asociado a este objeto.
53 53
54 54 Input:
55 55
56 56 object : objeto de la clase "Operation"
57 57
58 58 Return:
59 59
60 60 objId : identificador del objeto, necesario para ejecutar la operacion
61 61 """
62 62
63 63 self.objectDict[objId] = object
64 64
65 65 return objId
66 66
67 67 def operation(self, **kwargs):
68 68
69 69 """
70 70 Operacion directa sobre la data (dataOut.data). Es necesario actualizar los valores de los
71 71 atributos del objeto dataOut
72 72
73 73 Input:
74 74
75 75 **kwargs : Diccionario de argumentos de la funcion a ejecutar
76 76 """
77 77
78 78 raise ValueError, "ImplementedError"
79 79
80 80 def callMethod(self, name, **kwargs):
81 81
82 82 """
83 83 Ejecuta el metodo con el nombre "name" y con argumentos **kwargs de la propia clase.
84 84
85 85 Input:
86 86 name : nombre del metodo a ejecutar
87 87
88 88 **kwargs : diccionario con los nombres y valores de la funcion a ejecutar.
89 89
90 90 """
91 91 if name != 'run':
92 92
93 93 if name == 'init' and self.dataIn.isEmpty():
94 94 self.dataOut.flagNoData = True
95 95 return False
96 96
97 97 if name != 'init' and self.dataOut.isEmpty():
98 98 return False
99 99
100 100 methodToCall = getattr(self, name)
101 101
102 102 methodToCall(**kwargs)
103 103
104 104 if name != 'run':
105 105 return True
106 106
107 107 if self.dataOut.isEmpty():
108 108 return False
109 109
110 110 return True
111 111
112 112 def callObject(self, objId, **kwargs):
113 113
114 114 """
115 115 Ejecuta la operacion asociada al identificador del objeto "objId"
116 116
117 117 Input:
118 118
119 119 objId : identificador del objeto a ejecutar
120 120
121 121 **kwargs : diccionario con los nombres y valores de la funcion a ejecutar.
122 122
123 123 Return:
124 124
125 125 None
126 126 """
127 127
128 128 if self.dataOut.isEmpty():
129 129 return False
130 130
131 131 object = self.objectDict[objId]
132 132
133 133 object.run(self.dataOut, **kwargs)
134 134
135 135 return True
136 136
137 137 def call(self, operationConf, **kwargs):
138 138
139 139 """
140 140 Return True si ejecuta la operacion "operationConf.name" con los
141 141 argumentos "**kwargs". False si la operacion no se ha ejecutado.
142 142 La operacion puede ser de dos tipos:
143 143
144 144 1. Un metodo propio de esta clase:
145 145
146 146 operation.type = "self"
147 147
148 148 2. El metodo "run" de un objeto del tipo Operation o de un derivado de ella:
149 149 operation.type = "other".
150 150
151 151 Este objeto de tipo Operation debe de haber sido agregado antes con el metodo:
152 152 "addOperation" e identificado con el operation.id
153 153
154 154
155 155 con el id de la operacion.
156 156
157 157 Input:
158 158
159 159 Operation : Objeto del tipo operacion con los atributos: name, type y id.
160 160
161 161 """
162 162
163 163 if operationConf.type == 'self':
164 164 sts = self.callMethod(operationConf.name, **kwargs)
165 165
166 166 if operationConf.type == 'other':
167 167 sts = self.callObject(operationConf.id, **kwargs)
168 168
169 169 return sts
170 170
171 171 def setInput(self, dataIn):
172 172
173 173 self.dataIn = dataIn
174 174
175 175 def getOutput(self):
176 176
177 177 return self.dataOut
178 178
179 179 class Operation():
180 180
181 181 """
182 182 Clase base para definir las operaciones adicionales que se pueden agregar a la clase ProcessingUnit
183 183 y necesiten acumular informacion previa de los datos a procesar. De preferencia usar un buffer de
184 184 acumulacion dentro de esta clase
185 185
186 186 Ejemplo: Integraciones coherentes, necesita la informacion previa de los n perfiles anteriores (bufffer)
187 187
188 188 """
189 189
190 190 __buffer = None
191 191 __isConfig = False
192 192
193 193 def __init__(self):
194 194
195 195 pass
196 196
197 197 def run(self, dataIn, **kwargs):
198 198
199 199 """
200 200 Realiza las operaciones necesarias sobre la dataIn.data y actualiza los atributos del objeto dataIn.
201 201
202 202 Input:
203 203
204 204 dataIn : objeto del tipo JROData
205 205
206 206 Return:
207 207
208 208 None
209 209
210 210 Affected:
211 211 __buffer : buffer de recepcion de datos.
212 212
213 213 """
214 214
215 215 raise ValueError, "ImplementedError"
216 216
217 217 class VoltageProc(ProcessingUnit):
218 218
219 219
220 220 def __init__(self):
221 221
222 222 self.objectDict = {}
223 223 self.dataOut = Voltage()
224 224 self.flip = 1
225 225
226 226 def init(self):
227 227
228 228 self.dataOut.copy(self.dataIn)
229 229 # No necesita copiar en cada init() los atributos de dataIn
230 230 # la copia deberia hacerse por cada nuevo bloque de datos
231 231
232 232 def selectChannels(self, channelList):
233 233
234 234 channelIndexList = []
235 235
236 236 for channel in channelList:
237 237 index = self.dataOut.channelList.index(channel)
238 238 channelIndexList.append(index)
239 239
240 240 self.selectChannelsByIndex(channelIndexList)
241 241
242 242 def selectChannelsByIndex(self, channelIndexList):
243 243 """
244 244 Selecciona un bloque de datos en base a canales segun el channelIndexList
245 245
246 246 Input:
247 247 channelIndexList : lista sencilla de canales a seleccionar por ej. [2,3,7]
248 248
249 249 Affected:
250 250 self.dataOut.data
251 251 self.dataOut.channelIndexList
252 252 self.dataOut.nChannels
253 253 self.dataOut.m_ProcessingHeader.totalSpectra
254 254 self.dataOut.systemHeaderObj.numChannels
255 255 self.dataOut.m_ProcessingHeader.blockSize
256 256
257 257 Return:
258 258 None
259 259 """
260 260
261 261 for channelIndex in channelIndexList:
262 262 if channelIndex not in self.dataOut.channelIndexList:
263 263 print channelIndexList
264 264 raise ValueError, "The value %d in channelIndexList is not valid" %channelIndex
265 265
266 266 nChannels = len(channelIndexList)
267 267
268 268 data = self.dataOut.data[channelIndexList,:]
269 269
270 270 self.dataOut.data = data
271 271 self.dataOut.channelList = [self.dataOut.channelList[i] for i in channelIndexList]
272 272 # self.dataOut.nChannels = nChannels
273 273
274 274 return 1
275 275
276 276 def selectHeights(self, minHei=None, maxHei=None):
277 277 """
278 278 Selecciona un bloque de datos en base a un grupo de valores de alturas segun el rango
279 279 minHei <= height <= maxHei
280 280
281 281 Input:
282 282 minHei : valor minimo de altura a considerar
283 283 maxHei : valor maximo de altura a considerar
284 284
285 285 Affected:
286 286 Indirectamente son cambiados varios valores a travez del metodo selectHeightsByIndex
287 287
288 288 Return:
289 289 1 si el metodo se ejecuto con exito caso contrario devuelve 0
290 290 """
291 291
292 292 if minHei == None:
293 293 minHei = self.dataOut.heightList[0]
294 294
295 295 if maxHei == None:
296 296 maxHei = self.dataOut.heightList[-1]
297 297
298 298 if (minHei < self.dataOut.heightList[0]) or (minHei > maxHei):
299 299 raise ValueError, "some value in (%d,%d) is not valid" % (minHei, maxHei)
300 300
301 301
302 302 if (maxHei > self.dataOut.heightList[-1]):
303 303 maxHei = self.dataOut.heightList[-1]
304 304 # raise ValueError, "some value in (%d,%d) is not valid" % (minHei, maxHei)
305 305
306 306 minIndex = 0
307 307 maxIndex = 0
308 308 heights = self.dataOut.heightList
309 309
310 310 inda = numpy.where(heights >= minHei)
311 311 indb = numpy.where(heights <= maxHei)
312 312
313 313 try:
314 314 minIndex = inda[0][0]
315 315 except:
316 316 minIndex = 0
317 317
318 318 try:
319 319 maxIndex = indb[0][-1]
320 320 except:
321 321 maxIndex = len(heights)
322 322
323 323 self.selectHeightsByIndex(minIndex, maxIndex)
324 324
325 325 return 1
326 326
327 327
328 328 def selectHeightsByIndex(self, minIndex, maxIndex):
329 329 """
330 330 Selecciona un bloque de datos en base a un grupo indices de alturas segun el rango
331 331 minIndex <= index <= maxIndex
332 332
333 333 Input:
334 334 minIndex : valor de indice minimo de altura a considerar
335 335 maxIndex : valor de indice maximo de altura a considerar
336 336
337 337 Affected:
338 338 self.dataOut.data
339 339 self.dataOut.heightList
340 340
341 341 Return:
342 342 1 si el metodo se ejecuto con exito caso contrario devuelve 0
343 343 """
344 344
345 345 if (minIndex < 0) or (minIndex > maxIndex):
346 346 raise ValueError, "some value in (%d,%d) is not valid" % (minIndex, maxIndex)
347 347
348 348 if (maxIndex >= self.dataOut.nHeights):
349 349 maxIndex = self.dataOut.nHeights-1
350 350 # raise ValueError, "some value in (%d,%d) is not valid" % (minIndex, maxIndex)
351 351
352 352 nHeights = maxIndex - minIndex + 1
353 353
354 354 #voltage
355 355 data = self.dataOut.data[:,minIndex:maxIndex+1]
356 356
357 357 firstHeight = self.dataOut.heightList[minIndex]
358 358
359 359 self.dataOut.data = data
360 360 self.dataOut.heightList = self.dataOut.heightList[minIndex:maxIndex+1]
361 361
362 362 return 1
363 363
364 364
365 365 def filterByHeights(self, window):
366 366 deltaHeight = self.dataOut.heightList[1] - self.dataOut.heightList[0]
367 367
368 368 if window == None:
369 369 window = (self.dataOut.radarControllerHeaderObj.txA/self.dataOut.radarControllerHeaderObj.nBaud) / deltaHeight
370 370
371 371 newdelta = deltaHeight * window
372 372 r = self.dataOut.data.shape[1] % window
373 373 buffer = self.dataOut.data[:,0:self.dataOut.data.shape[1]-r]
374 374 buffer = buffer.reshape(self.dataOut.data.shape[0],self.dataOut.data.shape[1]/window,window)
375 375 buffer = numpy.sum(buffer,2)
376 376 self.dataOut.data = buffer
377 377 self.dataOut.heightList = numpy.arange(self.dataOut.heightList[0],newdelta*(self.dataOut.nHeights-r)/window,newdelta)
378 378 self.dataOut.windowOfFilter = window
379 379
380 380 def deFlip(self):
381 381 self.dataOut.data *= self.flip
382 382 self.flip *= -1.
383 383
384 384 def setRadarFrequency(self, frequency=None):
385 385 if frequency != None:
386 386 self.dataOut.frequency = frequency
387 387
388 388 return 1
389 389
390 390 class CohInt(Operation):
391 391
392 392 __isConfig = False
393 393
394 394 __profIndex = 0
395 395 __withOverapping = False
396 396
397 397 __byTime = False
398 398 __initime = None
399 399 __lastdatatime = None
400 400 __integrationtime = None
401 401
402 402 __buffer = None
403 403
404 404 __dataReady = False
405 405
406 406 n = None
407 407
408 408
409 409 def __init__(self):
410 410
411 411 self.__isConfig = False
412 412
413 413 def setup(self, n=None, timeInterval=None, overlapping=False):
414 414 """
415 415 Set the parameters of the integration class.
416 416
417 417 Inputs:
418 418
419 419 n : Number of coherent integrations
420 420 timeInterval : Time of integration. If the parameter "n" is selected this one does not work
421 421 overlapping :
422 422
423 423 """
424 424
425 425 self.__initime = None
426 426 self.__lastdatatime = 0
427 427 self.__buffer = None
428 428 self.__dataReady = False
429 429
430 430
431 431 if n == None and timeInterval == None:
432 432 raise ValueError, "n or timeInterval should be specified ..."
433 433
434 434 if n != None:
435 435 self.n = n
436 436 self.__byTime = False
437 437 else:
438 438 self.__integrationtime = timeInterval * 60. #if (type(timeInterval)!=integer) -> change this line
439 439 self.n = 9999
440 440 self.__byTime = True
441 441
442 442 if overlapping:
443 443 self.__withOverapping = True
444 444 self.__buffer = None
445 445 else:
446 446 self.__withOverapping = False
447 447 self.__buffer = 0
448 448
449 449 self.__profIndex = 0
450 450
451 451 def putData(self, data):
452 452
453 453 """
454 454 Add a profile to the __buffer and increase in one the __profileIndex
455 455
456 456 """
457 457
458 458 if not self.__withOverapping:
459 459 self.__buffer += data.copy()
460 460 self.__profIndex += 1
461 461 return
462 462
463 463 #Overlapping data
464 464 nChannels, nHeis = data.shape
465 465 data = numpy.reshape(data, (1, nChannels, nHeis))
466 466
467 467 #If the buffer is empty then it takes the data value
468 468 if self.__buffer == None:
469 469 self.__buffer = data
470 470 self.__profIndex += 1
471 471 return
472 472
473 473 #If the buffer length is lower than n then stakcing the data value
474 474 if self.__profIndex < self.n:
475 475 self.__buffer = numpy.vstack((self.__buffer, data))
476 476 self.__profIndex += 1
477 477 return
478 478
479 479 #If the buffer length is equal to n then replacing the last buffer value with the data value
480 480 self.__buffer = numpy.roll(self.__buffer, -1, axis=0)
481 481 self.__buffer[self.n-1] = data
482 482 self.__profIndex = self.n
483 483 return
484 484
485 485
486 486 def pushData(self):
487 487 """
488 488 Return the sum of the last profiles and the profiles used in the sum.
489 489
490 490 Affected:
491 491
492 492 self.__profileIndex
493 493
494 494 """
495 495
496 496 if not self.__withOverapping:
497 497 data = self.__buffer
498 498 n = self.__profIndex
499 499
500 500 self.__buffer = 0
501 501 self.__profIndex = 0
502 502
503 503 return data, n
504 504
505 505 #Integration with Overlapping
506 506 data = numpy.sum(self.__buffer, axis=0)
507 507 n = self.__profIndex
508 508
509 509 return data, n
510 510
511 511 def byProfiles(self, data):
512 512
513 513 self.__dataReady = False
514 514 avgdata = None
515 515 n = None
516 516
517 517 self.putData(data)
518 518
519 519 if self.__profIndex == self.n:
520 520
521 521 avgdata, n = self.pushData()
522 522 self.__dataReady = True
523 523
524 524 return avgdata
525 525
526 526 def byTime(self, data, datatime):
527 527
528 528 self.__dataReady = False
529 529 avgdata = None
530 530 n = None
531 531
532 532 self.putData(data)
533 533
534 534 if (datatime - self.__initime) >= self.__integrationtime:
535 535 avgdata, n = self.pushData()
536 536 self.n = n
537 537 self.__dataReady = True
538 538
539 539 return avgdata
540 540
541 541 def integrate(self, data, datatime=None):
542 542
543 543 if self.__initime == None:
544 544 self.__initime = datatime
545 545
546 546 if self.__byTime:
547 547 avgdata = self.byTime(data, datatime)
548 548 else:
549 549 avgdata = self.byProfiles(data)
550 550
551 551
552 552 self.__lastdatatime = datatime
553 553
554 554 if avgdata == None:
555 555 return None, None
556 556
557 557 avgdatatime = self.__initime
558 558
559 559 deltatime = datatime -self.__lastdatatime
560 560
561 561 if not self.__withOverapping:
562 562 self.__initime = datatime
563 563 else:
564 564 self.__initime += deltatime
565 565
566 566 return avgdata, avgdatatime
567 567
568 568 def run(self, dataOut, **kwargs):
569 569
570 570 if not self.__isConfig:
571 571 self.setup(**kwargs)
572 572 self.__isConfig = True
573 573
574 574 avgdata, avgdatatime = self.integrate(dataOut.data, dataOut.utctime)
575 575
576 576 # dataOut.timeInterval *= n
577 577 dataOut.flagNoData = True
578 578
579 579 if self.__dataReady:
580 580 dataOut.data = avgdata
581 581 dataOut.nCohInt *= self.n
582 582 dataOut.utctime = avgdatatime
583 583 dataOut.timeInterval = dataOut.ippSeconds * dataOut.nCohInt
584 584 dataOut.flagNoData = False
585 585
586 586
587 587 class Decoder(Operation):
588 588
589 589 __isConfig = False
590 590 __profIndex = 0
591 591
592 592 code = None
593 593
594 594 nCode = None
595 595 nBaud = None
596 596
597 597 def __init__(self):
598 598
599 599 self.__isConfig = False
600 600
601 601 def setup(self, code, shape):
602 602
603 603 self.__profIndex = 0
604 604
605 605 self.code = code
606 606
607 607 self.nCode = len(code)
608 608 self.nBaud = len(code[0])
609 609
610 610 self.__nChannels, self.__nHeis = shape
611 611
612 612 __codeBuffer = numpy.zeros((self.nCode, self.__nHeis), dtype=numpy.complex)
613 613
614 614 __codeBuffer[:,0:self.nBaud] = self.code
615 615
616 616 self.fft_code = numpy.conj(numpy.fft.fft(__codeBuffer, axis=1))
617 617
618 618 self.ndatadec = self.__nHeis - self.nBaud + 1
619 619
620 620 self.datadecTime = numpy.zeros((self.__nChannels, self.ndatadec), dtype=numpy.complex)
621 621
622 622 def convolutionInFreq(self, data):
623 623
624 624 fft_code = self.fft_code[self.__profIndex].reshape(1,-1)
625 625
626 626 fft_data = numpy.fft.fft(data, axis=1)
627 627
628 628 conv = fft_data*fft_code
629 629
630 630 data = numpy.fft.ifft(conv,axis=1)
631 631
632 632 datadec = data[:,:-self.nBaud+1]
633 633
634 634 return datadec
635 635
636 636 def convolutionInFreqOpt(self, data):
637 637
638 638 fft_code = self.fft_code[self.__profIndex].reshape(1,-1)
639 639
640 640 data = cfunctions.decoder(fft_code, data)
641 641
642 642 datadec = data[:,:-self.nBaud+1]
643 643
644 644 return datadec
645 645
646 646 def convolutionInTime(self, data):
647 647
648 648 code = self.code[self.__profIndex]
649 649
650 650 for i in range(self.__nChannels):
651 651 self.datadecTime[i,:] = numpy.correlate(data[i,:], code, mode='valid')
652 652
653 653 return self.datadecTime
654 654
655 655 def run(self, dataOut, code=None, nCode=None, nBaud=None, mode = 0):
656 656
657 657 if not self.__isConfig:
658 658
659 659 if code == None:
660 660 code = dataOut.code
661 661 else:
662 662 code = numpy.array(code).reshape(nCode,nBaud)
663 663 dataOut.code = code
664 664 dataOut.nCode = nCode
665 665 dataOut.nBaud = nBaud
666 666
667 667 if code == None:
668 668 return 1
669 669
670 670 self.setup(code, dataOut.data.shape)
671 671 self.__isConfig = True
672 672
673 673 if mode == 0:
674 674 datadec = self.convolutionInTime(dataOut.data)
675 675
676 676 if mode == 1:
677 677 datadec = self.convolutionInFreq(dataOut.data)
678 678
679 679 if mode == 2:
680 680 datadec = self.convolutionInFreqOpt(dataOut.data)
681 681
682 682 dataOut.data = datadec
683 683
684 684 dataOut.heightList = dataOut.heightList[0:self.ndatadec]
685 685
686 686 dataOut.flagDecodeData = True #asumo q la data no esta decodificada
687 687
688 688 if self.__profIndex == self.nCode-1:
689 689 self.__profIndex = 0
690 690 return 1
691 691
692 692 self.__profIndex += 1
693 693
694 694 return 1
695 695 # dataOut.flagDeflipData = True #asumo q la data no esta sin flip
696 696
697 697
698 698
699 699 class SpectraProc(ProcessingUnit):
700 700
701 701 def __init__(self):
702 702
703 703 self.objectDict = {}
704 704 self.buffer = None
705 705 self.firstdatatime = None
706 706 self.profIndex = 0
707 707 self.dataOut = Spectra()
708 708
709 709 def __updateObjFromInput(self):
710 710
711 711 self.dataOut.timeZone = self.dataIn.timeZone
712 712 self.dataOut.dstFlag = self.dataIn.dstFlag
713 713 self.dataOut.errorCount = self.dataIn.errorCount
714 714 self.dataOut.useLocalTime = self.dataIn.useLocalTime
715 715
716 716 self.dataOut.radarControllerHeaderObj = self.dataIn.radarControllerHeaderObj.copy()
717 717 self.dataOut.systemHeaderObj = self.dataIn.systemHeaderObj.copy()
718 718 self.dataOut.channelList = self.dataIn.channelList
719 719 self.dataOut.heightList = self.dataIn.heightList
720 720 self.dataOut.dtype = numpy.dtype([('real','<f4'),('imag','<f4')])
721 721 # self.dataOut.nHeights = self.dataIn.nHeights
722 722 # self.dataOut.nChannels = self.dataIn.nChannels
723 723 self.dataOut.nBaud = self.dataIn.nBaud
724 724 self.dataOut.nCode = self.dataIn.nCode
725 725 self.dataOut.code = self.dataIn.code
726 726 self.dataOut.nProfiles = self.dataOut.nFFTPoints
727 727 # self.dataOut.channelIndexList = self.dataIn.channelIndexList
728 728 self.dataOut.flagTimeBlock = self.dataIn.flagTimeBlock
729 729 self.dataOut.utctime = self.firstdatatime
730 730 self.dataOut.flagDecodeData = self.dataIn.flagDecodeData #asumo q la data esta decodificada
731 731 self.dataOut.flagDeflipData = self.dataIn.flagDeflipData #asumo q la data esta sin flip
732 732 # self.dataOut.flagShiftFFT = self.dataIn.flagShiftFFT
733 733 self.dataOut.nCohInt = self.dataIn.nCohInt
734 734 self.dataOut.nIncohInt = 1
735 735 self.dataOut.ippSeconds = self.dataIn.ippSeconds
736 736 self.dataOut.windowOfFilter = self.dataIn.windowOfFilter
737 737
738 738 self.dataOut.timeInterval = self.dataIn.timeInterval*self.dataOut.nFFTPoints*self.dataOut.nIncohInt
739 739 self.dataOut.frequency = self.dataIn.frequency
740 740 self.dataOut.realtime = self.dataIn.realtime
741 741
742 742 def __getFft(self):
743 743 """
744 744 Convierte valores de Voltaje a Spectra
745 745
746 746 Affected:
747 747 self.dataOut.data_spc
748 748 self.dataOut.data_cspc
749 749 self.dataOut.data_dc
750 750 self.dataOut.heightList
751 751 self.profIndex
752 752 self.buffer
753 753 self.dataOut.flagNoData
754 754 """
755 755 fft_volt = numpy.fft.fft(self.buffer,n=self.dataOut.nFFTPoints,axis=1)
756 756 fft_volt = fft_volt.astype(numpy.dtype('complex'))
757 757 dc = fft_volt[:,0,:]
758 758
759 759 #calculo de self-spectra
760 760 fft_volt = numpy.fft.fftshift(fft_volt,axes=(1,))
761 761 spc = fft_volt * numpy.conjugate(fft_volt)
762 762 spc = spc.real
763 763
764 764 blocksize = 0
765 765 blocksize += dc.size
766 766 blocksize += spc.size
767 767
768 768 cspc = None
769 769 pairIndex = 0
770 770 if self.dataOut.pairsList != None:
771 771 #calculo de cross-spectra
772 772 cspc = numpy.zeros((self.dataOut.nPairs, self.dataOut.nFFTPoints, self.dataOut.nHeights), dtype='complex')
773 773 for pair in self.dataOut.pairsList:
774 774 cspc[pairIndex,:,:] = fft_volt[pair[0],:,:] * numpy.conjugate(fft_volt[pair[1],:,:])
775 775 pairIndex += 1
776 776 blocksize += cspc.size
777 777
778 778 self.dataOut.data_spc = spc
779 779 self.dataOut.data_cspc = cspc
780 780 self.dataOut.data_dc = dc
781 781 self.dataOut.blockSize = blocksize
782 782 self.dataOut.flagShiftFFT = False
783 783
784 def init(self, nProfiles=None, nFFTPoints=None, pairsList=None):
784 def init(self, nProfiles=None, nFFTPoints=None, pairsList=None, ippFactor=None):
785 785
786 786 self.dataOut.flagNoData = True
787 787
788 788 if self.dataIn.type == "Spectra":
789 789 self.dataOut.copy(self.dataIn)
790 790 return
791 791
792 792 if self.dataIn.type == "Voltage":
793 793
794 794 if nFFTPoints == None:
795 795 raise ValueError, "This SpectraProc.init() need nFFTPoints input variable"
796 796
797 797 if pairsList == None:
798 798 nPairs = 0
799 799 else:
800 800 nPairs = len(pairsList)
801 801
802 if ippFactor == None:
803 ippFactor = 1
804 self.dataOut.ippFactor = ippFactor
805
802 806 self.dataOut.nFFTPoints = nFFTPoints
803 807 self.dataOut.pairsList = pairsList
804 808 self.dataOut.nPairs = nPairs
805 809
806 810 if self.buffer == None:
807 811 self.buffer = numpy.zeros((self.dataIn.nChannels,
808 812 nProfiles,
809 813 self.dataIn.nHeights),
810 814 dtype='complex')
811 815
812 816
813 817 self.buffer[:,self.profIndex,:] = self.dataIn.data.copy()
814 818 self.profIndex += 1
815 819
816 820 if self.firstdatatime == None:
817 821 self.firstdatatime = self.dataIn.utctime
818 822
819 823 if self.profIndex == nProfiles:
820 824 self.__updateObjFromInput()
821 825 self.__getFft()
822 826
823 827 self.dataOut.flagNoData = False
824 828
825 829 self.buffer = None
826 830 self.firstdatatime = None
827 831 self.profIndex = 0
828 832
829 833 return
830 834
831 raise ValuError, "The type object %s is not valid"%(self.dataIn.type)
835 raise ValueError, "The type object %s is not valid"%(self.dataIn.type)
832 836
833 837 def selectChannels(self, channelList):
834 838
835 839 channelIndexList = []
836 840
837 841 for channel in channelList:
838 842 index = self.dataOut.channelList.index(channel)
839 843 channelIndexList.append(index)
840 844
841 845 self.selectChannelsByIndex(channelIndexList)
842 846
843 847 def selectChannelsByIndex(self, channelIndexList):
844 848 """
845 849 Selecciona un bloque de datos en base a canales segun el channelIndexList
846 850
847 851 Input:
848 852 channelIndexList : lista sencilla de canales a seleccionar por ej. [2,3,7]
849 853
850 854 Affected:
851 855 self.dataOut.data_spc
852 856 self.dataOut.channelIndexList
853 857 self.dataOut.nChannels
854 858
855 859 Return:
856 860 None
857 861 """
858 862
859 863 for channelIndex in channelIndexList:
860 864 if channelIndex not in self.dataOut.channelIndexList:
861 865 print channelIndexList
862 866 raise ValueError, "The value %d in channelIndexList is not valid" %channelIndex
863 867
864 868 nChannels = len(channelIndexList)
865 869
866 870 data_spc = self.dataOut.data_spc[channelIndexList,:]
867 871
868 872 self.dataOut.data_spc = data_spc
869 873 self.dataOut.channelList = [self.dataOut.channelList[i] for i in channelIndexList]
870 874 # self.dataOut.nChannels = nChannels
871 875
872 876 return 1
873 877
874 878 def selectHeights(self, minHei, maxHei):
875 879 """
876 880 Selecciona un bloque de datos en base a un grupo de valores de alturas segun el rango
877 881 minHei <= height <= maxHei
878 882
879 883 Input:
880 884 minHei : valor minimo de altura a considerar
881 885 maxHei : valor maximo de altura a considerar
882 886
883 887 Affected:
884 888 Indirectamente son cambiados varios valores a travez del metodo selectHeightsByIndex
885 889
886 890 Return:
887 891 1 si el metodo se ejecuto con exito caso contrario devuelve 0
888 892 """
889 893 if (minHei < self.dataOut.heightList[0]) or (minHei > maxHei):
890 894 raise ValueError, "some value in (%d,%d) is not valid" % (minHei, maxHei)
891 895
892 896 if (maxHei > self.dataOut.heightList[-1]):
893 897 maxHei = self.dataOut.heightList[-1]
894 898 # raise ValueError, "some value in (%d,%d) is not valid" % (minHei, maxHei)
895 899
896 900 minIndex = 0
897 901 maxIndex = 0
898 902 heights = self.dataOut.heightList
899 903
900 904 inda = numpy.where(heights >= minHei)
901 905 indb = numpy.where(heights <= maxHei)
902 906
903 907 try:
904 908 minIndex = inda[0][0]
905 909 except:
906 910 minIndex = 0
907 911
908 912 try:
909 913 maxIndex = indb[0][-1]
910 914 except:
911 915 maxIndex = len(heights)
912 916
913 917 self.selectHeightsByIndex(minIndex, maxIndex)
914 918
915 919 return 1
916 920
917 921
918 922 def selectHeightsByIndex(self, minIndex, maxIndex):
919 923 """
920 924 Selecciona un bloque de datos en base a un grupo indices de alturas segun el rango
921 925 minIndex <= index <= maxIndex
922 926
923 927 Input:
924 928 minIndex : valor de indice minimo de altura a considerar
925 929 maxIndex : valor de indice maximo de altura a considerar
926 930
927 931 Affected:
928 932 self.dataOut.data_spc
929 933 self.dataOut.data_cspc
930 934 self.dataOut.data_dc
931 935 self.dataOut.heightList
932 936
933 937 Return:
934 938 1 si el metodo se ejecuto con exito caso contrario devuelve 0
935 939 """
936 940
937 941 if (minIndex < 0) or (minIndex > maxIndex):
938 942 raise ValueError, "some value in (%d,%d) is not valid" % (minIndex, maxIndex)
939 943
940 944 if (maxIndex >= self.dataOut.nHeights):
941 945 maxIndex = self.dataOut.nHeights-1
942 946 # raise ValueError, "some value in (%d,%d) is not valid" % (minIndex, maxIndex)
943 947
944 948 nHeights = maxIndex - minIndex + 1
945 949
946 950 #Spectra
947 951 data_spc = self.dataOut.data_spc[:,:,minIndex:maxIndex+1]
948 952
949 953 data_cspc = None
950 954 if self.dataOut.data_cspc != None:
951 955 data_cspc = self.dataOut.data_cspc[:,:,minIndex:maxIndex+1]
952 956
953 957 data_dc = None
954 958 if self.dataOut.data_dc != None:
955 959 data_dc = self.dataOut.data_dc[:,minIndex:maxIndex+1]
956 960
957 961 self.dataOut.data_spc = data_spc
958 962 self.dataOut.data_cspc = data_cspc
959 963 self.dataOut.data_dc = data_dc
960 964
961 965 self.dataOut.heightList = self.dataOut.heightList[minIndex:maxIndex+1]
962 966
963 967 return 1
964 968
965 def removeDC(self, mode = 1):
969 def removeDC(self, mode = 2):
970 jspectra = self.dataOut.data_spc
971 jcspectra = self.dataOut.data_cspc
966 972
967 dc_index = 0
968 freq_index = numpy.array([-2,-1,1,2])
969 data_spc = self.dataOut.data_spc
970 data_cspc = self.dataOut.data_cspc
971 data_dc = self.dataOut.data_dc
973
974 num_chan = jspectra.shape[0]
975 num_hei = jspectra.shape[2]
976
977 if jcspectra != None:
978 jcspectraExist = True
979 num_pairs = jcspectra.shape[0]
980 else: jcspectraExist = False
981
982 freq_dc = jspectra.shape[1]/2
983 ind_vel = numpy.array([-2,-1,1,2]) + freq_dc
984
985 if ind_vel[0]<0:
986 ind_vel[range(0,1)] = ind_vel[range(0,1)] + self.num_prof
987
988 if mode == 1:
989 jspectra[:freq_dc,:] = (jspectra[:ind_vel[1],:] + jspectra[:,ind_vel[2],:])/2 #CORRECCION
990
991 if jcspectraExist:
992 jcspectra[:freq_dc,:] = (jcspectra[:ind_vel[1],:] + jcspectra[:,ind_vel[2],:])/2
972 993
973 if self.dataOut.flagShiftFFT:
974 dc_index += self.dataOut.nFFTPoints/2
975 freq_index += self.dataOut.nFFTPoints/2
994 if mode == 2:
976 995
977 if mode == 1:
978 data_spc[dc_index] = (data_spc[:,freq_index[1],:] + data_spc[:,freq_index[2],:])/2
979 if data_cspc != None:
980 data_cspc[dc_index] = (data_cspc[:,freq_index[1],:] + data_cspc[:,freq_index[2],:])/2
981 return 1
996 vel = numpy.array([-2,-1,1,2])
997 xx = numpy.zeros([4,4])
998
999 for fil in range(4):
1000 xx[fil,:] = vel[fil]**numpy.asarray(range(4))
1001
1002 xx_inv = numpy.linalg.inv(xx)
1003 xx_aux = xx_inv[0,:]
1004
1005 for ich in range(num_chan):
1006 yy = jspectra[ich,ind_vel,:]
1007 jspectra[ich,freq_dc,:] = numpy.dot(xx_aux,yy)
1008
1009 junkid = jspectra[ich,freq_dc,:]<=0
1010 cjunkid = sum(junkid)
1011
1012 if cjunkid.any():
1013 jspectra[ich,freq_dc,junkid.nonzero()] = (jspectra[ich,ind_vel[1],junkid] + jspectra[ich,ind_vel[2],junkid])/2
1014
1015 if jcspectraExist:
1016 for ip in range(num_pairs):
1017 yy = jcspectra[ip,ind_vel,:]
1018 jcspectra[ip,freq_dc,:] = numpy.dot(xx_aux,yy)
982 1019
983 if mode == 2:
984 pass
985 1020
986 if mode == 3:
987 pass
988
989 raise ValueError, "mode parameter has to be 1, 2 or 3"
1021 self.dataOut.data_spc = jspectra
1022 self.dataOut.data_cspc = jcspectra
1023
1024 return 1
990 1025
991 def removeInterference(self):
1026 def removeInterference(self, interf = 2,hei_interf = None, nhei_interf = None, offhei_interf = None):
1027
1028 jspectra = self.dataOut.data_spc
1029 jcspectra = self.dataOut.data_cspc
1030 jnoise = self.dataOut.getNoise()
1031 num_incoh = self.dataOut.nIncohInt
1032
1033 num_channel = jspectra.shape[0]
1034 num_prof = jspectra.shape[1]
1035 num_hei = jspectra.shape[2]
1036
1037 #hei_interf
1038 if hei_interf == None:
1039 count_hei = num_hei/2 #Como es entero no importa
1040 hei_interf = numpy.asmatrix(range(count_hei)) + num_hei - count_hei
1041 hei_interf = numpy.asarray(hei_interf)[0]
1042 #nhei_interf
1043 if (nhei_interf == None):
1044 nhei_interf = 5
1045 if (nhei_interf < 1):
1046 nhei_interf = 1
1047 if (nhei_interf > count_hei):
1048 nhei_interf = count_hei
1049 if (offhei_interf == None):
1050 offhei_interf = 0
1051
1052 ind_hei = range(num_hei)
1053 # mask_prof = numpy.asarray(range(num_prof - 2)) + 1
1054 # mask_prof[range(num_prof/2 - 1,len(mask_prof))] += 1
1055 mask_prof = numpy.asarray(range(num_prof))
1056 num_mask_prof = mask_prof.size
1057 comp_mask_prof = [0, num_prof/2]
1058
1059
1060 #noise_exist: Determina si la variable jnoise ha sido definida y contiene la informacion del ruido de cada canal
1061 if (jnoise.size < num_channel or numpy.isnan(jnoise).any()):
1062 jnoise = numpy.nan
1063 noise_exist = jnoise[0] < numpy.Inf
1064
1065 #Subrutina de Remocion de la Interferencia
1066 for ich in range(num_channel):
1067 #Se ordena los espectros segun su potencia (menor a mayor)
1068 power = jspectra[ich,mask_prof,:]
1069 power = power[:,hei_interf]
1070 power = power.sum(axis = 0)
1071 psort = power.ravel().argsort()
1072
1073 #Se estima la interferencia promedio en los Espectros de Potencia empleando
1074 junkspc_interf = jspectra[ich,:,hei_interf[psort[range(offhei_interf, nhei_interf + offhei_interf)]]]
1075
1076 if noise_exist:
1077 # tmp_noise = jnoise[ich] / num_prof
1078 tmp_noise = jnoise[ich]
1079 junkspc_interf = junkspc_interf - tmp_noise
1080 #junkspc_interf[:,comp_mask_prof] = 0
1081
1082 jspc_interf = junkspc_interf.sum(axis = 0) / nhei_interf
1083 jspc_interf = jspc_interf.transpose()
1084 #Calculando el espectro de interferencia promedio
1085 noiseid = numpy.where(jspc_interf <= tmp_noise/ math.sqrt(num_incoh))
1086 noiseid = noiseid[0]
1087 cnoiseid = noiseid.size
1088 interfid = numpy.where(jspc_interf > tmp_noise/ math.sqrt(num_incoh))
1089 interfid = interfid[0]
1090 cinterfid = interfid.size
1091
1092 if (cnoiseid > 0): jspc_interf[noiseid] = 0
1093
1094 #Expandiendo los perfiles a limpiar
1095 if (cinterfid > 0):
1096 new_interfid = (numpy.r_[interfid - 1, interfid, interfid + 1] + num_prof)%num_prof
1097 new_interfid = numpy.asarray(new_interfid)
1098 new_interfid = {x for x in new_interfid}
1099 new_interfid = numpy.array(list(new_interfid))
1100 new_cinterfid = new_interfid.size
1101 else: new_cinterfid = 0
1102
1103 for ip in range(new_cinterfid):
1104 ind = junkspc_interf[:,new_interfid[ip]].ravel().argsort()
1105 jspc_interf[new_interfid[ip]] = junkspc_interf[ind[nhei_interf/2],new_interfid[ip]]
1106
1107
1108 jspectra[ich,:,ind_hei] = jspectra[ich,:,ind_hei] - jspc_interf #Corregir indices
1109
1110 #Removiendo la interferencia del punto de mayor interferencia
1111 ListAux = jspc_interf[mask_prof].tolist()
1112 maxid = ListAux.index(max(ListAux))
1113
1114
1115 if cinterfid > 0:
1116 for ip in range(cinterfid*(interf == 2) - 1):
1117 ind = (jspectra[ich,interfid[ip],:] < tmp_noise*(1 + 1/math.sqrt(num_incoh))).nonzero()
1118 cind = len(ind)
1119
1120 if (cind > 0):
1121 jspectra[ich,interfid[ip],ind] = tmp_noise*(1 + (numpy.random.uniform(cind) - 0.5)/math.sqrt(num_incoh))
1122
1123 ind = numpy.array([-2,-1,1,2])
1124 xx = numpy.zeros([4,4])
1125
1126 for id1 in range(4):
1127 xx[:,id1] = ind[id1]**numpy.asarray(range(4))
1128
1129 xx_inv = numpy.linalg.inv(xx)
1130 xx = xx_inv[:,0]
1131 ind = (ind + maxid + num_mask_prof)%num_mask_prof
1132 yy = jspectra[ich,mask_prof[ind],:]
1133 jspectra[ich,mask_prof[maxid],:] = numpy.dot(yy.transpose(),xx)
1134
1135
1136 indAux = (jspectra[ich,:,:] < tmp_noise*(1-1/math.sqrt(num_incoh))).nonzero()
1137 jspectra[ich,indAux[0],indAux[1]] = tmp_noise * (1 - 1/math.sqrt(num_incoh))
1138
1139 #Remocion de Interferencia en el Cross Spectra
1140 if jcspectra == None: return jspectra, jcspectra
1141 num_pairs = jcspectra.size/(num_prof*num_hei)
1142 jcspectra = jcspectra.reshape(num_pairs, num_prof, num_hei)
992 1143
993 pass
1144 for ip in range(num_pairs):
1145
1146 #-------------------------------------------
1147
1148 cspower = numpy.abs(jcspectra[ip,mask_prof,:])
1149 cspower = cspower[:,hei_interf]
1150 cspower = cspower.sum(axis = 0)
1151
1152 cspsort = cspower.ravel().argsort()
1153 junkcspc_interf = jcspectra[ip,:,hei_interf[cspsort[range(offhei_interf, nhei_interf + offhei_interf)]]]
1154 junkcspc_interf = junkcspc_interf.transpose()
1155 jcspc_interf = junkcspc_interf.sum(axis = 1)/nhei_interf
1156
1157 ind = numpy.abs(jcspc_interf[mask_prof]).ravel().argsort()
1158
1159 median_real = numpy.median(numpy.real(junkcspc_interf[mask_prof[ind[range(3*num_prof/4)]],:]))
1160 median_imag = numpy.median(numpy.imag(junkcspc_interf[mask_prof[ind[range(3*num_prof/4)]],:]))
1161 junkcspc_interf[comp_mask_prof,:] = numpy.complex(median_real, median_imag)
1162
1163 for iprof in range(num_prof):
1164 ind = numpy.abs(junkcspc_interf[iprof,:]).ravel().argsort()
1165 jcspc_interf[iprof] = junkcspc_interf[iprof, ind[nhei_interf/2]]
1166
1167 #Removiendo la Interferencia
1168 jcspectra[ip,:,ind_hei] = jcspectra[ip,:,ind_hei] - jcspc_interf
1169
1170 ListAux = numpy.abs(jcspc_interf[mask_prof]).tolist()
1171 maxid = ListAux.index(max(ListAux))
1172
1173 ind = numpy.array([-2,-1,1,2])
1174 xx = numpy.zeros([4,4])
1175
1176 for id1 in range(4):
1177 xx[:,id1] = ind[id1]**numpy.asarray(range(4))
1178
1179 xx_inv = numpy.linalg.inv(xx)
1180 xx = xx_inv[:,0]
1181
1182 ind = (ind + maxid + num_mask_prof)%num_mask_prof
1183 yy = jcspectra[ip,mask_prof[ind],:]
1184 jcspectra[ip,mask_prof[maxid],:] = numpy.dot(yy.transpose(),xx)
1185
1186 #Guardar Resultados
1187 self.dataOut.data_spc = jspectra
1188 self.dataOut.data_cspc = jcspectra
1189
1190 return 1
994 1191
995 1192 def setRadarFrequency(self, frequency=None):
996 1193 if frequency != None:
997 1194 self.dataOut.frequency = frequency
998 1195
999 1196 return 1
1000 1197
1001 1198
1002 1199 class IncohInt(Operation):
1003 1200
1004 1201
1005 1202 __profIndex = 0
1006 1203 __withOverapping = False
1007 1204
1008 1205 __byTime = False
1009 1206 __initime = None
1010 1207 __lastdatatime = None
1011 1208 __integrationtime = None
1012 1209
1013 1210 __buffer_spc = None
1014 1211 __buffer_cspc = None
1015 1212 __buffer_dc = None
1016 1213
1017 1214 __dataReady = False
1018 1215
1019 1216 __timeInterval = None
1020 1217
1021 1218 n = None
1022 1219
1023 1220
1024 1221
1025 1222 def __init__(self):
1026 1223
1027 1224 self.__isConfig = False
1028 1225
1029 1226 def setup(self, n=None, timeInterval=None, overlapping=False):
1030 1227 """
1031 1228 Set the parameters of the integration class.
1032 1229
1033 1230 Inputs:
1034 1231
1035 1232 n : Number of coherent integrations
1036 1233 timeInterval : Time of integration. If the parameter "n" is selected this one does not work
1037 1234 overlapping :
1038 1235
1039 1236 """
1040 1237
1041 1238 self.__initime = None
1042 1239 self.__lastdatatime = 0
1043 1240 self.__buffer_spc = None
1044 1241 self.__buffer_cspc = None
1045 1242 self.__buffer_dc = None
1046 1243 self.__dataReady = False
1047 1244
1048 1245
1049 1246 if n == None and timeInterval == None:
1050 1247 raise ValueError, "n or timeInterval should be specified ..."
1051 1248
1052 1249 if n != None:
1053 1250 self.n = n
1054 1251 self.__byTime = False
1055 1252 else:
1056 1253 self.__integrationtime = timeInterval #if (type(timeInterval)!=integer) -> change this line
1057 1254 self.n = 9999
1058 1255 self.__byTime = True
1059 1256
1060 1257 if overlapping:
1061 1258 self.__withOverapping = True
1062 1259 else:
1063 1260 self.__withOverapping = False
1064 1261 self.__buffer_spc = 0
1065 1262 self.__buffer_cspc = 0
1066 1263 self.__buffer_dc = 0
1067 1264
1068 1265 self.__profIndex = 0
1069 1266
1070 1267 def putData(self, data_spc, data_cspc, data_dc):
1071 1268
1072 1269 """
1073 1270 Add a profile to the __buffer_spc and increase in one the __profileIndex
1074 1271
1075 1272 """
1076 1273
1077 1274 if not self.__withOverapping:
1078 1275 self.__buffer_spc += data_spc
1079 1276
1080 1277 if data_cspc == None:
1081 1278 self.__buffer_cspc = None
1082 1279 else:
1083 1280 self.__buffer_cspc += data_cspc
1084 1281
1085 1282 if data_dc == None:
1086 1283 self.__buffer_dc = None
1087 1284 else:
1088 1285 self.__buffer_dc += data_dc
1089 1286
1090 1287 self.__profIndex += 1
1091 1288 return
1092 1289
1093 1290 #Overlapping data
1094 1291 nChannels, nFFTPoints, nHeis = data_spc.shape
1095 1292 data_spc = numpy.reshape(data_spc, (1, nChannels, nFFTPoints, nHeis))
1096 1293 if data_cspc != None:
1097 1294 data_cspc = numpy.reshape(data_cspc, (1, -1, nFFTPoints, nHeis))
1098 1295 if data_dc != None:
1099 1296 data_dc = numpy.reshape(data_dc, (1, -1, nHeis))
1100 1297
1101 1298 #If the buffer is empty then it takes the data value
1102 1299 if self.__buffer_spc == None:
1103 1300 self.__buffer_spc = data_spc
1104 1301
1105 1302 if data_cspc == None:
1106 1303 self.__buffer_cspc = None
1107 1304 else:
1108 1305 self.__buffer_cspc += data_cspc
1109 1306
1110 1307 if data_dc == None:
1111 1308 self.__buffer_dc = None
1112 1309 else:
1113 1310 self.__buffer_dc += data_dc
1114 1311
1115 1312 self.__profIndex += 1
1116 1313 return
1117 1314
1118 1315 #If the buffer length is lower than n then stakcing the data value
1119 1316 if self.__profIndex < self.n:
1120 1317 self.__buffer_spc = numpy.vstack((self.__buffer_spc, data_spc))
1121 1318
1122 1319 if data_cspc != None:
1123 1320 self.__buffer_cspc = numpy.vstack((self.__buffer_cspc, data_cspc))
1124 1321
1125 1322 if data_dc != None:
1126 1323 self.__buffer_dc = numpy.vstack((self.__buffer_dc, data_dc))
1127 1324
1128 1325 self.__profIndex += 1
1129 1326 return
1130 1327
1131 1328 #If the buffer length is equal to n then replacing the last buffer value with the data value
1132 1329 self.__buffer_spc = numpy.roll(self.__buffer_spc, -1, axis=0)
1133 1330 self.__buffer_spc[self.n-1] = data_spc
1134 1331
1135 1332 if data_cspc != None:
1136 1333 self.__buffer_cspc = numpy.roll(self.__buffer_cspc, -1, axis=0)
1137 1334 self.__buffer_cspc[self.n-1] = data_cspc
1138 1335
1139 1336 if data_dc != None:
1140 1337 self.__buffer_dc = numpy.roll(self.__buffer_dc, -1, axis=0)
1141 1338 self.__buffer_dc[self.n-1] = data_dc
1142 1339
1143 1340 self.__profIndex = self.n
1144 1341 return
1145 1342
1146 1343
1147 1344 def pushData(self):
1148 1345 """
1149 1346 Return the sum of the last profiles and the profiles used in the sum.
1150 1347
1151 1348 Affected:
1152 1349
1153 1350 self.__profileIndex
1154 1351
1155 1352 """
1156 1353 data_spc = None
1157 1354 data_cspc = None
1158 1355 data_dc = None
1159 1356
1160 1357 if not self.__withOverapping:
1161 1358 data_spc = self.__buffer_spc
1162 1359 data_cspc = self.__buffer_cspc
1163 1360 data_dc = self.__buffer_dc
1164 1361
1165 1362 n = self.__profIndex
1166 1363
1167 1364 self.__buffer_spc = 0
1168 1365 self.__buffer_cspc = 0
1169 1366 self.__buffer_dc = 0
1170 1367 self.__profIndex = 0
1171 1368
1172 1369 return data_spc, data_cspc, data_dc, n
1173 1370
1174 1371 #Integration with Overlapping
1175 1372 data_spc = numpy.sum(self.__buffer_spc, axis=0)
1176 1373
1177 1374 if self.__buffer_cspc != None:
1178 1375 data_cspc = numpy.sum(self.__buffer_cspc, axis=0)
1179 1376
1180 1377 if self.__buffer_dc != None:
1181 1378 data_dc = numpy.sum(self.__buffer_dc, axis=0)
1182 1379
1183 1380 n = self.__profIndex
1184 1381
1185 1382 return data_spc, data_cspc, data_dc, n
1186 1383
1187 1384 def byProfiles(self, *args):
1188 1385
1189 1386 self.__dataReady = False
1190 1387 avgdata_spc = None
1191 1388 avgdata_cspc = None
1192 1389 avgdata_dc = None
1193 1390 n = None
1194 1391
1195 1392 self.putData(*args)
1196 1393
1197 1394 if self.__profIndex == self.n:
1198 1395
1199 1396 avgdata_spc, avgdata_cspc, avgdata_dc, n = self.pushData()
1200 1397 self.__dataReady = True
1201 1398
1202 1399 return avgdata_spc, avgdata_cspc, avgdata_dc
1203 1400
1204 1401 def byTime(self, datatime, *args):
1205 1402
1206 1403 self.__dataReady = False
1207 1404 avgdata_spc = None
1208 1405 avgdata_cspc = None
1209 1406 avgdata_dc = None
1210 1407 n = None
1211 1408
1212 1409 self.putData(*args)
1213 1410
1214 1411 if (datatime - self.__initime) >= self.__integrationtime:
1215 1412 avgdata_spc, avgdata_cspc, avgdata_dc, n = self.pushData()
1216 1413 self.n = n
1217 1414 self.__dataReady = True
1218 1415
1219 1416 return avgdata_spc, avgdata_cspc, avgdata_dc
1220 1417
1221 1418 def integrate(self, datatime, *args):
1222 1419
1223 1420 if self.__initime == None:
1224 1421 self.__initime = datatime
1225 1422
1226 1423 if self.__byTime:
1227 1424 avgdata_spc, avgdata_cspc, avgdata_dc = self.byTime(datatime, *args)
1228 1425 else:
1229 1426 avgdata_spc, avgdata_cspc, avgdata_dc = self.byProfiles(*args)
1230 1427
1231 1428 self.__lastdatatime = datatime
1232 1429
1233 1430 if avgdata_spc == None:
1234 1431 return None, None, None, None
1235 1432
1236 1433 avgdatatime = self.__initime
1237 1434 try:
1238 1435 self.__timeInterval = (self.__lastdatatime - self.__initime)/(self.n - 1)
1239 1436 except:
1240 1437 self.__timeInterval = self.__lastdatatime - self.__initime
1241 1438
1242 1439 deltatime = datatime -self.__lastdatatime
1243 1440
1244 1441 if not self.__withOverapping:
1245 1442 self.__initime = datatime
1246 1443 else:
1247 1444 self.__initime += deltatime
1248 1445
1249 1446 return avgdatatime, avgdata_spc, avgdata_cspc, avgdata_dc
1250 1447
1251 1448 def run(self, dataOut, n=None, timeInterval=None, overlapping=False):
1252 1449
1253 1450 if n==1:
1254 1451 dataOut.flagNoData = False
1255 1452 return
1256 1453
1257 1454 if not self.__isConfig:
1258 1455 self.setup(n, timeInterval, overlapping)
1259 1456 self.__isConfig = True
1260 1457
1261 1458 avgdatatime, avgdata_spc, avgdata_cspc, avgdata_dc = self.integrate(dataOut.utctime,
1262 1459 dataOut.data_spc,
1263 1460 dataOut.data_cspc,
1264 1461 dataOut.data_dc)
1265 1462
1266 1463 # dataOut.timeInterval *= n
1267 1464 dataOut.flagNoData = True
1268 1465
1269 1466 if self.__dataReady:
1270 1467
1271 1468 dataOut.data_spc = avgdata_spc
1272 1469 dataOut.data_cspc = avgdata_cspc
1273 1470 dataOut.data_dc = avgdata_dc
1274 1471
1275 1472 dataOut.nIncohInt *= self.n
1276 1473 dataOut.utctime = avgdatatime
1277 1474 #dataOut.timeInterval = dataOut.ippSeconds * dataOut.nCohInt * dataOut.nIncohInt * dataOut.nFFTPoints
1278 1475 dataOut.timeInterval = self.__timeInterval*self.n
1279 1476 dataOut.flagNoData = False
1280 1477
1281 1478 class ProfileConcat(Operation):
1282 1479
1283 1480 __isConfig = False
1284 1481 buffer = None
1285 1482
1286 1483 def __init__(self):
1287 1484
1288 1485 self.profileIndex = 0
1289 1486
1290 1487 def reset(self):
1291 1488 self.buffer = numpy.zeros_like(self.buffer)
1292 1489 self.start_index = 0
1293 1490 self.times = 1
1294 1491
1295 1492 def setup(self, data, m, n=1):
1296 1493 self.buffer = numpy.zeros((data.shape[0],data.shape[1]*m),dtype=type(data[0,0]))
1297 1494 self.profiles = data.shape[1]
1298 1495 self.start_index = 0
1299 1496 self.times = 1
1300 1497
1301 1498 def concat(self, data):
1302 1499
1303 1500 self.buffer[:,self.start_index:self.profiles*self.times] = data.copy()
1304 1501 self.start_index = self.start_index + self.profiles
1305 1502
1306 1503 def run(self, dataOut, m):
1307 1504
1308 1505 dataOut.flagNoData = True
1309 1506
1310 1507 if not self.__isConfig:
1311 1508 self.setup(dataOut.data, m, 1)
1312 1509 self.__isConfig = True
1313 1510
1314 1511 self.concat(dataOut.data)
1315 1512 self.times += 1
1316 1513 if self.times > m:
1317 1514 dataOut.data = self.buffer
1318 1515 self.reset()
1319 1516 dataOut.flagNoData = False
1320 1517 # se deben actualizar mas propiedades del header y del objeto dataOut, por ejemplo, las alturas
1321 1518 deltaHeight = dataOut.heightList[1] - dataOut.heightList[0]
1322 1519 xf = dataOut.heightList[0] + dataOut.nHeights * deltaHeight * 5
1323 1520 dataOut.heightList = numpy.arange(dataOut.heightList[0], xf, deltaHeight)
1324 1521
1325 1522
1326 1523
1327 1524 class ProfileSelector(Operation):
1328 1525
1329 1526 profileIndex = None
1330 1527 # Tamanho total de los perfiles
1331 1528 nProfiles = None
1332 1529
1333 1530 def __init__(self):
1334 1531
1335 1532 self.profileIndex = 0
1336 1533
1337 1534 def incIndex(self):
1338 1535 self.profileIndex += 1
1339 1536
1340 1537 if self.profileIndex >= self.nProfiles:
1341 1538 self.profileIndex = 0
1342 1539
1343 1540 def isProfileInRange(self, minIndex, maxIndex):
1344 1541
1345 1542 if self.profileIndex < minIndex:
1346 1543 return False
1347 1544
1348 1545 if self.profileIndex > maxIndex:
1349 1546 return False
1350 1547
1351 1548 return True
1352 1549
1353 1550 def isProfileInList(self, profileList):
1354 1551
1355 1552 if self.profileIndex not in profileList:
1356 1553 return False
1357 1554
1358 1555 return True
1359 1556
1360 1557 def run(self, dataOut, profileList=None, profileRangeList=None):
1361 1558
1362 1559 dataOut.flagNoData = True
1363 1560 self.nProfiles = dataOut.nProfiles
1364 1561
1365 1562 if profileList != None:
1366 1563 if self.isProfileInList(profileList):
1367 1564 dataOut.flagNoData = False
1368 1565
1369 1566 self.incIndex()
1370 1567 return 1
1371 1568
1372 1569
1373 1570 elif profileRangeList != None:
1374 1571 minIndex = profileRangeList[0]
1375 1572 maxIndex = profileRangeList[1]
1376 1573 if self.isProfileInRange(minIndex, maxIndex):
1377 1574 dataOut.flagNoData = False
1378 1575
1379 1576 self.incIndex()
1380 1577 return 1
1381 1578
1382 1579 else:
1383 1580 raise ValueError, "ProfileSelector needs profileList or profileRangeList"
1384 1581
1385 1582 return 0
1386 1583
1387 1584 class SpectraHeisProc(ProcessingUnit):
1388 1585 def __init__(self):
1389 1586 self.objectDict = {}
1390 1587 # self.buffer = None
1391 1588 # self.firstdatatime = None
1392 1589 # self.profIndex = 0
1393 1590 self.dataOut = SpectraHeis()
1394 1591
1395 1592 def __updateObjFromInput(self):
1396 1593 self.dataOut.timeZone = self.dataIn.timeZone
1397 1594 self.dataOut.dstFlag = self.dataIn.dstFlag
1398 1595 self.dataOut.errorCount = self.dataIn.errorCount
1399 1596 self.dataOut.useLocalTime = self.dataIn.useLocalTime
1400 1597
1401 1598 self.dataOut.radarControllerHeaderObj = self.dataIn.radarControllerHeaderObj.copy()#
1402 1599 self.dataOut.systemHeaderObj = self.dataIn.systemHeaderObj.copy()#
1403 1600 self.dataOut.channelList = self.dataIn.channelList
1404 1601 self.dataOut.heightList = self.dataIn.heightList
1405 1602 # self.dataOut.dtype = self.dataIn.dtype
1406 1603 self.dataOut.dtype = numpy.dtype([('real','<f4'),('imag','<f4')])
1407 1604 # self.dataOut.nHeights = self.dataIn.nHeights
1408 1605 # self.dataOut.nChannels = self.dataIn.nChannels
1409 1606 self.dataOut.nBaud = self.dataIn.nBaud
1410 1607 self.dataOut.nCode = self.dataIn.nCode
1411 1608 self.dataOut.code = self.dataIn.code
1412 1609 # self.dataOut.nProfiles = 1
1413 1610 # self.dataOut.nProfiles = self.dataOut.nFFTPoints
1414 1611 self.dataOut.nFFTPoints = self.dataIn.nHeights
1415 1612 # self.dataOut.channelIndexList = self.dataIn.channelIndexList
1416 1613 # self.dataOut.flagNoData = self.dataIn.flagNoData
1417 1614 self.dataOut.flagTimeBlock = self.dataIn.flagTimeBlock
1418 1615 self.dataOut.utctime = self.dataIn.utctime
1419 1616 # self.dataOut.utctime = self.firstdatatime
1420 1617 self.dataOut.flagDecodeData = self.dataIn.flagDecodeData #asumo q la data esta decodificada
1421 1618 self.dataOut.flagDeflipData = self.dataIn.flagDeflipData #asumo q la data esta sin flip
1422 1619 # self.dataOut.flagShiftFFT = self.dataIn.flagShiftFFT
1423 1620 self.dataOut.nCohInt = self.dataIn.nCohInt
1424 1621 self.dataOut.nIncohInt = 1
1425 1622 self.dataOut.ippSeconds= self.dataIn.ippSeconds
1426 1623 self.dataOut.windowOfFilter = self.dataIn.windowOfFilter
1427 1624
1428 1625 self.dataOut.timeInterval = self.dataIn.timeInterval*self.dataOut.nIncohInt
1429 1626 # self.dataOut.set=self.dataIn.set
1430 1627 # self.dataOut.deltaHeight=self.dataIn.deltaHeight
1431 1628
1432 1629
1433 1630 def __updateObjFromFits(self):
1434 1631 self.dataOut.utctime = self.dataIn.utctime
1435 1632 self.dataOut.channelIndexList = self.dataIn.channelIndexList
1436 1633
1437 1634 self.dataOut.channelList = self.dataIn.channelList
1438 1635 self.dataOut.heightList = self.dataIn.heightList
1439 1636 self.dataOut.data_spc = self.dataIn.data
1440 1637 self.dataOut.timeInterval = self.dataIn.timeInterval
1441 1638 self.dataOut.timeZone = self.dataIn.timeZone
1442 1639 self.dataOut.useLocalTime = True
1443 1640 # self.dataOut.
1444 1641 # self.dataOut.
1445 1642
1446 1643 def __getFft(self):
1447 1644
1448 1645 fft_volt = numpy.fft.fft(self.dataIn.data, axis=1)
1449 1646 fft_volt = numpy.fft.fftshift(fft_volt,axes=(1,))
1450 1647 spc = numpy.abs(fft_volt * numpy.conjugate(fft_volt))/(self.dataOut.nFFTPoints)
1451 1648 self.dataOut.data_spc = spc
1452 1649
1453 1650 def init(self):
1454 1651
1455 1652 self.dataOut.flagNoData = True
1456 1653
1457 1654 if self.dataIn.type == "Fits":
1458 1655 self.__updateObjFromFits()
1459 1656 self.dataOut.flagNoData = False
1460 1657 return
1461 1658
1462 1659 if self.dataIn.type == "SpectraHeis":
1463 1660 self.dataOut.copy(self.dataIn)
1464 1661 return
1465 1662
1466 1663 if self.dataIn.type == "Voltage":
1467 1664 self.__updateObjFromInput()
1468 1665 self.__getFft()
1469 1666 self.dataOut.flagNoData = False
1470 1667
1471 1668 return
1472 1669
1473 raise ValuError, "The type object %s is not valid"%(self.dataIn.type)
1670 raise ValueError, "The type object %s is not valid"%(self.dataIn.type)
1474 1671
1475 1672
1476 1673 def selectChannels(self, channelList):
1477 1674
1478 1675 channelIndexList = []
1479 1676
1480 1677 for channel in channelList:
1481 1678 index = self.dataOut.channelList.index(channel)
1482 1679 channelIndexList.append(index)
1483 1680
1484 1681 self.selectChannelsByIndex(channelIndexList)
1485 1682
1486 1683 def selectChannelsByIndex(self, channelIndexList):
1487 1684 """
1488 1685 Selecciona un bloque de datos en base a canales segun el channelIndexList
1489 1686
1490 1687 Input:
1491 1688 channelIndexList : lista sencilla de canales a seleccionar por ej. [2,3,7]
1492 1689
1493 1690 Affected:
1494 1691 self.dataOut.data
1495 1692 self.dataOut.channelIndexList
1496 1693 self.dataOut.nChannels
1497 1694 self.dataOut.m_ProcessingHeader.totalSpectra
1498 1695 self.dataOut.systemHeaderObj.numChannels
1499 1696 self.dataOut.m_ProcessingHeader.blockSize
1500 1697
1501 1698 Return:
1502 1699 None
1503 1700 """
1504 1701
1505 1702 for channelIndex in channelIndexList:
1506 1703 if channelIndex not in self.dataOut.channelIndexList:
1507 1704 print channelIndexList
1508 1705 raise ValueError, "The value %d in channelIndexList is not valid" %channelIndex
1509 1706
1510 1707 nChannels = len(channelIndexList)
1511 1708
1512 1709 data_spc = self.dataOut.data_spc[channelIndexList,:]
1513 1710
1514 1711 self.dataOut.data_spc = data_spc
1515 1712 self.dataOut.channelList = [self.dataOut.channelList[i] for i in channelIndexList]
1516 1713
1517 1714 return 1
1518 1715
1519 1716 class IncohInt4SpectraHeis(Operation):
1520 1717
1521 1718 __isConfig = False
1522 1719
1523 1720 __profIndex = 0
1524 1721 __withOverapping = False
1525 1722
1526 1723 __byTime = False
1527 1724 __initime = None
1528 1725 __lastdatatime = None
1529 1726 __integrationtime = None
1530 1727
1531 1728 __buffer = None
1532 1729
1533 1730 __dataReady = False
1534 1731
1535 1732 n = None
1536 1733
1537 1734
1538 1735 def __init__(self):
1539 1736
1540 1737 self.__isConfig = False
1541 1738
1542 1739 def setup(self, n=None, timeInterval=None, overlapping=False):
1543 1740 """
1544 1741 Set the parameters of the integration class.
1545 1742
1546 1743 Inputs:
1547 1744
1548 1745 n : Number of coherent integrations
1549 1746 timeInterval : Time of integration. If the parameter "n" is selected this one does not work
1550 1747 overlapping :
1551 1748
1552 1749 """
1553 1750
1554 1751 self.__initime = None
1555 1752 self.__lastdatatime = 0
1556 1753 self.__buffer = None
1557 1754 self.__dataReady = False
1558 1755
1559 1756
1560 1757 if n == None and timeInterval == None:
1561 1758 raise ValueError, "n or timeInterval should be specified ..."
1562 1759
1563 1760 if n != None:
1564 1761 self.n = n
1565 1762 self.__byTime = False
1566 1763 else:
1567 1764 self.__integrationtime = timeInterval #* 60. #if (type(timeInterval)!=integer) -> change this line
1568 1765 self.n = 9999
1569 1766 self.__byTime = True
1570 1767
1571 1768 if overlapping:
1572 1769 self.__withOverapping = True
1573 1770 self.__buffer = None
1574 1771 else:
1575 1772 self.__withOverapping = False
1576 1773 self.__buffer = 0
1577 1774
1578 1775 self.__profIndex = 0
1579 1776
1580 1777 def putData(self, data):
1581 1778
1582 1779 """
1583 1780 Add a profile to the __buffer and increase in one the __profileIndex
1584 1781
1585 1782 """
1586 1783
1587 1784 if not self.__withOverapping:
1588 1785 self.__buffer += data.copy()
1589 1786 self.__profIndex += 1
1590 1787 return
1591 1788
1592 1789 #Overlapping data
1593 1790 nChannels, nHeis = data.shape
1594 1791 data = numpy.reshape(data, (1, nChannels, nHeis))
1595 1792
1596 1793 #If the buffer is empty then it takes the data value
1597 1794 if self.__buffer == None:
1598 1795 self.__buffer = data
1599 1796 self.__profIndex += 1
1600 1797 return
1601 1798
1602 1799 #If the buffer length is lower than n then stakcing the data value
1603 1800 if self.__profIndex < self.n:
1604 1801 self.__buffer = numpy.vstack((self.__buffer, data))
1605 1802 self.__profIndex += 1
1606 1803 return
1607 1804
1608 1805 #If the buffer length is equal to n then replacing the last buffer value with the data value
1609 1806 self.__buffer = numpy.roll(self.__buffer, -1, axis=0)
1610 1807 self.__buffer[self.n-1] = data
1611 1808 self.__profIndex = self.n
1612 1809 return
1613 1810
1614 1811
1615 1812 def pushData(self):
1616 1813 """
1617 1814 Return the sum of the last profiles and the profiles used in the sum.
1618 1815
1619 1816 Affected:
1620 1817
1621 1818 self.__profileIndex
1622 1819
1623 1820 """
1624 1821
1625 1822 if not self.__withOverapping:
1626 1823 data = self.__buffer
1627 1824 n = self.__profIndex
1628 1825
1629 1826 self.__buffer = 0
1630 1827 self.__profIndex = 0
1631 1828
1632 1829 return data, n
1633 1830
1634 1831 #Integration with Overlapping
1635 1832 data = numpy.sum(self.__buffer, axis=0)
1636 1833 n = self.__profIndex
1637 1834
1638 1835 return data, n
1639 1836
1640 1837 def byProfiles(self, data):
1641 1838
1642 1839 self.__dataReady = False
1643 1840 avgdata = None
1644 1841 n = None
1645 1842
1646 1843 self.putData(data)
1647 1844
1648 1845 if self.__profIndex == self.n:
1649 1846
1650 1847 avgdata, n = self.pushData()
1651 1848 self.__dataReady = True
1652 1849
1653 1850 return avgdata
1654 1851
1655 1852 def byTime(self, data, datatime):
1656 1853
1657 1854 self.__dataReady = False
1658 1855 avgdata = None
1659 1856 n = None
1660 1857
1661 1858 self.putData(data)
1662 1859
1663 1860 if (datatime - self.__initime) >= self.__integrationtime:
1664 1861 avgdata, n = self.pushData()
1665 1862 self.n = n
1666 1863 self.__dataReady = True
1667 1864
1668 1865 return avgdata
1669 1866
1670 1867 def integrate(self, data, datatime=None):
1671 1868
1672 1869 if self.__initime == None:
1673 1870 self.__initime = datatime
1674 1871
1675 1872 if self.__byTime:
1676 1873 avgdata = self.byTime(data, datatime)
1677 1874 else:
1678 1875 avgdata = self.byProfiles(data)
1679 1876
1680 1877
1681 1878 self.__lastdatatime = datatime
1682 1879
1683 1880 if avgdata == None:
1684 1881 return None, None
1685 1882
1686 1883 avgdatatime = self.__initime
1687 1884
1688 1885 deltatime = datatime -self.__lastdatatime
1689 1886
1690 1887 if not self.__withOverapping:
1691 1888 self.__initime = datatime
1692 1889 else:
1693 1890 self.__initime += deltatime
1694 1891
1695 1892 return avgdata, avgdatatime
1696 1893
1697 1894 def run(self, dataOut, **kwargs):
1698 1895
1699 1896 if not self.__isConfig:
1700 1897 self.setup(**kwargs)
1701 1898 self.__isConfig = True
1702 1899
1703 1900 avgdata, avgdatatime = self.integrate(dataOut.data_spc, dataOut.utctime)
1704 1901
1705 1902 # dataOut.timeInterval *= n
1706 1903 dataOut.flagNoData = True
1707 1904
1708 1905 if self.__dataReady:
1709 1906 dataOut.data_spc = avgdata
1710 1907 dataOut.nIncohInt *= self.n
1711 1908 # dataOut.nCohInt *= self.n
1712 1909 dataOut.utctime = avgdatatime
1713 1910 dataOut.timeInterval = dataOut.ippSeconds * dataOut.nIncohInt
1714 1911 # dataOut.timeInterval = self.__timeInterval*self.n
1715 1912 dataOut.flagNoData = False
1716 1913
1717 1914
1718 1915
1719 1916
1720 1917 No newline at end of file
General Comments 0
You need to be logged in to leave comments. Login now