##// END OF EJS Templates
new changes to indentify and select beams
Daniel Valdez -
r477:9729744d0cb2
parent child
Show More
@@ -1,690 +1,702
1 1 '''
2 2
3 3 $Author: murco $
4 4 $Id: JROData.py 173 2012-11-20 15:06:21Z murco $
5 5 '''
6 6
7 7 import os, sys
8 8 import copy
9 9 import numpy
10 10 import datetime
11 11 import time
12 12 from jroheaderIO import SystemHeader, RadarControllerHeader
13 13
14 14
15 15 def hildebrand_sekhon(data, navg):
16 16
17 17 data = data.copy()
18 18
19 19 sortdata = numpy.sort(data,axis=None)
20 20 lenOfData = len(sortdata)
21 21 nums_min = lenOfData/10
22 22
23 23 if (lenOfData/10) > 2:
24 24 nums_min = lenOfData/10
25 25 else:
26 26 nums_min = 2
27 27
28 28 sump = 0.
29 29
30 30 sumq = 0.
31 31
32 32 j = 0
33 33
34 34 cont = 1
35 35
36 36 while((cont==1)and(j<lenOfData)):
37 37
38 38 sump += sortdata[j]
39 39
40 40 sumq += sortdata[j]**2
41 41
42 42 j += 1
43 43
44 44 if j > nums_min:
45 45 rtest = float(j)/(j-1) + 1.0/navg
46 46 if ((sumq*j) > (rtest*sump**2)):
47 47 j = j - 1
48 48 sump = sump - sortdata[j]
49 49 sumq = sumq - sortdata[j]**2
50 50 cont = 0
51 51
52 52 lnoise = sump /j
53 53 stdv = numpy.sqrt((sumq - lnoise**2)/(j - 1))
54 54 return lnoise
55 55
56 56 class JROData:
57 57
58 58 # m_BasicHeader = BasicHeader()
59 59 # m_ProcessingHeader = ProcessingHeader()
60 60
61 61 systemHeaderObj = SystemHeader()
62 62
63 63 radarControllerHeaderObj = RadarControllerHeader()
64 64
65 65 # data = None
66 66
67 67 type = None
68 68
69 69 dtype = None
70 70
71 71 # nChannels = None
72 72
73 73 # nHeights = None
74 74
75 75 nProfiles = None
76 76
77 77 heightList = None
78 78
79 79 channelList = None
80 80
81 81 flagNoData = True
82 82
83 83 flagTimeBlock = False
84 84
85 85 useLocalTime = False
86 86
87 87 utctime = None
88 88
89 89 timeZone = None
90 90
91 91 dstFlag = None
92 92
93 93 errorCount = None
94 94
95 95 blocksize = None
96 96
97 97 nCode = None
98 98
99 99 nBaud = None
100 100
101 101 code = None
102 102
103 103 flagDecodeData = False #asumo q la data no esta decodificada
104 104
105 105 flagDeflipData = False #asumo q la data no esta sin flip
106 106
107 107 flagShiftFFT = False
108 108
109 109 ippSeconds = None
110 110
111 111 timeInterval = None
112 112
113 113 nCohInt = None
114 114
115 115 noise = None
116 116
117 117 windowOfFilter = 1
118 118
119 119 #Speed of ligth
120 120 C = 3e8
121 121
122 122 frequency = 49.92e6
123 123
124 124 realtime = False
125 125
126 126 beacon_heiIndexList = None
127 127
128 128 last_block = None
129 129
130 130 blocknow = None
131 131
132 132 def __init__(self):
133 133
134 134 raise ValueError, "This class has not been implemented"
135 135
136 136 def copy(self, inputObj=None):
137 137
138 138 if inputObj == None:
139 139 return copy.deepcopy(self)
140 140
141 141 for key in inputObj.__dict__.keys():
142 142 self.__dict__[key] = inputObj.__dict__[key]
143 143
144 144 def deepcopy(self):
145 145
146 146 return copy.deepcopy(self)
147 147
148 148 def isEmpty(self):
149 149
150 150 return self.flagNoData
151 151
152 152 def getNoise(self):
153 153
154 154 raise ValueError, "Not implemented"
155 155
156 156 def getNChannels(self):
157 157
158 158 return len(self.channelList)
159 159
160 160 def getChannelIndexList(self):
161 161
162 162 return range(self.nChannels)
163 163
164 164 def getNHeights(self):
165 165
166 166 return len(self.heightList)
167 167
168 168 def getHeiRange(self, extrapoints=0):
169 169
170 170 heis = self.heightList
171 171 # deltah = self.heightList[1] - self.heightList[0]
172 172 #
173 173 # heis.append(self.heightList[-1])
174 174
175 175 return heis
176 176
177 177 def getltctime(self):
178 178
179 179 if self.useLocalTime:
180 180 return self.utctime - self.timeZone*60
181 181
182 182 return self.utctime
183 183
184 184 def getDatatime(self):
185 185
186 186 datatime = datetime.datetime.utcfromtimestamp(self.ltctime)
187 187 return datatime
188 188
189 189 def getTimeRange(self):
190 190
191 191 datatime = []
192 192
193 193 datatime.append(self.ltctime)
194 194 datatime.append(self.ltctime + self.timeInterval)
195 195
196 196 datatime = numpy.array(datatime)
197 197
198 198 return datatime
199 199
200 200 def getFmax(self):
201 201
202 202 PRF = 1./(self.ippSeconds * self.nCohInt)
203 203
204 204 fmax = PRF/2.
205 205
206 206 return fmax
207 207
208 208 def getVmax(self):
209 209
210 210 _lambda = self.C/self.frequency
211 211
212 212 vmax = self.getFmax() * _lambda
213 213
214 214 return vmax
215 215
216 216 nChannels = property(getNChannels, "I'm the 'nChannel' property.")
217 217 channelIndexList = property(getChannelIndexList, "I'm the 'channelIndexList' property.")
218 218 nHeights = property(getNHeights, "I'm the 'nHeights' property.")
219 219 noise = property(getNoise, "I'm the 'nHeights' property.")
220 220 datatime = property(getDatatime, "I'm the 'datatime' property")
221 221 ltctime = property(getltctime, "I'm the 'ltctime' property")
222 222
223 223 class Voltage(JROData):
224 224
225 225 #data es un numpy array de 2 dmensiones (canales, alturas)
226 226 data = None
227 227
228 228 def __init__(self):
229 229 '''
230 230 Constructor
231 231 '''
232 232
233 233 self.radarControllerHeaderObj = RadarControllerHeader()
234 234
235 235 self.systemHeaderObj = SystemHeader()
236 236
237 237 self.type = "Voltage"
238 238
239 239 self.data = None
240 240
241 241 self.dtype = None
242 242
243 243 # self.nChannels = 0
244 244
245 245 # self.nHeights = 0
246 246
247 247 self.nProfiles = None
248 248
249 249 self.heightList = None
250 250
251 251 self.channelList = None
252 252
253 253 # self.channelIndexList = None
254 254
255 255 self.flagNoData = True
256 256
257 257 self.flagTimeBlock = False
258 258
259 259 self.utctime = None
260 260
261 261 self.timeZone = None
262 262
263 263 self.dstFlag = None
264 264
265 265 self.errorCount = None
266 266
267 267 self.nCohInt = None
268 268
269 269 self.blocksize = None
270 270
271 271 self.flagDecodeData = False #asumo q la data no esta decodificada
272 272
273 273 self.flagDeflipData = False #asumo q la data no esta sin flip
274 274
275 275 self.flagShiftFFT = False
276 276
277 277
278 278 def getNoisebyHildebrand(self):
279 279 """
280 280 Determino el nivel de ruido usando el metodo Hildebrand-Sekhon
281 281
282 282 Return:
283 283 noiselevel
284 284 """
285 285
286 286 for channel in range(self.nChannels):
287 287 daux = self.data_spc[channel,:,:]
288 288 self.noise[channel] = hildebrand_sekhon(daux, self.nCohInt)
289 289
290 290 return self.noise
291 291
292 292 def getNoise(self, type = 1):
293 293
294 294 self.noise = numpy.zeros(self.nChannels)
295 295
296 296 if type == 1:
297 297 noise = self.getNoisebyHildebrand()
298 298
299 299 return 10*numpy.log10(noise)
300 300
301 301 class Spectra(JROData):
302 302
303 303 #data es un numpy array de 2 dmensiones (canales, perfiles, alturas)
304 304 data_spc = None
305 305
306 306 #data es un numpy array de 2 dmensiones (canales, pares, alturas)
307 307 data_cspc = None
308 308
309 309 #data es un numpy array de 2 dmensiones (canales, alturas)
310 310 data_dc = None
311 311
312 312 nFFTPoints = None
313 313
314 314 nPairs = None
315 315
316 316 pairsList = None
317 317
318 318 nIncohInt = None
319 319
320 320 wavelength = None #Necesario para cacular el rango de velocidad desde la frecuencia
321 321
322 322 nCohInt = None #se requiere para determinar el valor de timeInterval
323 323
324 324 ippFactor = None
325 325
326 326 def __init__(self):
327 327 '''
328 328 Constructor
329 329 '''
330 330
331 331 self.radarControllerHeaderObj = RadarControllerHeader()
332 332
333 333 self.systemHeaderObj = SystemHeader()
334 334
335 335 self.type = "Spectra"
336 336
337 337 # self.data = None
338 338
339 339 self.dtype = None
340 340
341 341 # self.nChannels = 0
342 342
343 343 # self.nHeights = 0
344 344
345 345 self.nProfiles = None
346 346
347 347 self.heightList = None
348 348
349 349 self.channelList = None
350 350
351 351 # self.channelIndexList = None
352 352
353 353 self.flagNoData = True
354 354
355 355 self.flagTimeBlock = False
356 356
357 357 self.utctime = None
358 358
359 359 self.nCohInt = None
360 360
361 361 self.nIncohInt = None
362 362
363 363 self.blocksize = None
364 364
365 365 self.nFFTPoints = None
366 366
367 367 self.wavelength = None
368 368
369 369 self.flagDecodeData = False #asumo q la data no esta decodificada
370 370
371 371 self.flagDeflipData = False #asumo q la data no esta sin flip
372 372
373 373 self.flagShiftFFT = False
374 374
375 375 self.ippFactor = 1
376 376
377 377 self.noise = None
378 378
379 379 self.beacon_heiIndexList = []
380 380
381 381
382 382 def getNoisebyHildebrand(self):
383 383 """
384 384 Determino el nivel de ruido usando el metodo Hildebrand-Sekhon
385 385
386 386 Return:
387 387 noiselevel
388 388 """
389 389 noise = numpy.zeros(self.nChannels)
390 390 for channel in range(self.nChannels):
391 391 daux = self.data_spc[channel,:,:]
392 392 noise[channel] = hildebrand_sekhon(daux, self.nIncohInt)
393 393
394 394 return noise
395 395
396 396 def getNoise(self):
397 397 if self.noise != None:
398 398 return self.noise
399 399 else:
400 400 noise = self.getNoisebyHildebrand()
401 401 return noise
402 402
403 403
404 404 def getFreqRange(self, extrapoints=0):
405 405
406 406 deltafreq = self.getFmax() / (self.nFFTPoints*self.ippFactor)
407 407 freqrange = deltafreq*(numpy.arange(self.nFFTPoints+extrapoints)-self.nFFTPoints/2.) - deltafreq/2
408 408
409 409 return freqrange
410 410
411 411 def getVelRange(self, extrapoints=0):
412 412
413 413 deltav = self.getVmax() / (self.nFFTPoints*self.ippFactor)
414 414 velrange = deltav*(numpy.arange(self.nFFTPoints+extrapoints)-self.nFFTPoints/2.) - deltav/2
415 415
416 416 return velrange
417 417
418 418 def getNPairs(self):
419 419
420 420 return len(self.pairsList)
421 421
422 422 def getPairsIndexList(self):
423 423
424 424 return range(self.nPairs)
425 425
426 426 def getNormFactor(self):
427 427 pwcode = 1
428 428 if self.flagDecodeData:
429 429 pwcode = numpy.sum(self.code[0]**2)
430 430 #normFactor = min(self.nFFTPoints,self.nProfiles)*self.nIncohInt*self.nCohInt*pwcode*self.windowOfFilter
431 431 normFactor = self.nProfiles*self.nIncohInt*self.nCohInt*pwcode*self.windowOfFilter
432 432
433 433 return normFactor
434 434
435 435 def getFlagCspc(self):
436 436
437 437 if self.data_cspc == None:
438 438 return True
439 439
440 440 return False
441 441
442 442 def getFlagDc(self):
443 443
444 444 if self.data_dc == None:
445 445 return True
446 446
447 447 return False
448 448
449 449 nPairs = property(getNPairs, "I'm the 'nPairs' property.")
450 450 pairsIndexList = property(getPairsIndexList, "I'm the 'pairsIndexList' property.")
451 451 normFactor = property(getNormFactor, "I'm the 'getNormFactor' property.")
452 452 flag_cspc = property(getFlagCspc)
453 453 flag_dc = property(getFlagDc)
454 454
455 455 class SpectraHeis(JROData):
456 456
457 457 data_spc = None
458 458
459 459 data_cspc = None
460 460
461 461 data_dc = None
462 462
463 463 nFFTPoints = None
464 464
465 465 nPairs = None
466 466
467 467 pairsList = None
468 468
469 469 nIncohInt = None
470 470
471 471 def __init__(self):
472 472
473 473 self.radarControllerHeaderObj = RadarControllerHeader()
474 474
475 475 self.systemHeaderObj = SystemHeader()
476 476
477 477 self.type = "SpectraHeis"
478 478
479 479 self.dtype = None
480 480
481 481 # self.nChannels = 0
482 482
483 483 # self.nHeights = 0
484 484
485 485 self.nProfiles = None
486 486
487 487 self.heightList = None
488 488
489 489 self.channelList = None
490 490
491 491 # self.channelIndexList = None
492 492
493 493 self.flagNoData = True
494 494
495 495 self.flagTimeBlock = False
496 496
497 497 self.nPairs = 0
498 498
499 499 self.utctime = None
500 500
501 501 self.blocksize = None
502 502
503 503 class Fits:
504 504
505 505 heightList = None
506 506
507 507 channelList = None
508 508
509 509 flagNoData = True
510 510
511 511 flagTimeBlock = False
512 512
513 513 useLocalTime = False
514 514
515 515 utctime = None
516 516
517 517 timeZone = None
518 518
519 519 ippSeconds = None
520 520
521 521 timeInterval = None
522 522
523 523 nCohInt = None
524 524
525 525 nIncohInt = None
526 526
527 527 noise = None
528 528
529 529 windowOfFilter = 1
530 530
531 531 #Speed of ligth
532 532 C = 3e8
533 533
534 534 frequency = 49.92e6
535 535
536 536 realtime = False
537 537
538 538
539 539 def __init__(self):
540 540
541 541 self.type = "Fits"
542 542
543 543 self.nProfiles = None
544 544
545 545 self.heightList = None
546 546
547 547 self.channelList = None
548 548
549 549 # self.channelIndexList = None
550 550
551 551 self.flagNoData = True
552 552
553 553 self.utctime = None
554 554
555 555 self.nCohInt = None
556 556
557 557 self.nIncohInt = None
558 558
559 559 self.useLocalTime = True
560 560
561 561 # self.utctime = None
562 562 # self.timeZone = None
563 563 # self.ltctime = None
564 564 # self.timeInterval = None
565 565 # self.header = None
566 566 # self.data_header = None
567 567 # self.data = None
568 568 # self.datatime = None
569 569 # self.flagNoData = False
570 570 # self.expName = ''
571 571 # self.nChannels = None
572 572 # self.nSamples = None
573 573 # self.dataBlocksPerFile = None
574 574 # self.comments = ''
575 575 #
576 576
577 577
578 578 def getltctime(self):
579 579
580 580 if self.useLocalTime:
581 581 return self.utctime - self.timeZone*60
582 582
583 583 return self.utctime
584 584
585 585 def getDatatime(self):
586 586
587 587 datatime = datetime.datetime.utcfromtimestamp(self.ltctime)
588 588 return datatime
589 589
590 590 def getTimeRange(self):
591 591
592 592 datatime = []
593 593
594 594 datatime.append(self.ltctime)
595 595 datatime.append(self.ltctime + self.timeInterval)
596 596
597 597 datatime = numpy.array(datatime)
598 598
599 599 return datatime
600 600
601 601 def getHeiRange(self):
602 602
603 603 heis = self.heightList
604 604
605 605 return heis
606 606
607 607 def isEmpty(self):
608 608
609 609 return self.flagNoData
610 610
611 611 def getNHeights(self):
612 612
613 613 return len(self.heightList)
614 614
615 615 def getNChannels(self):
616 616
617 617 return len(self.channelList)
618 618
619 619 def getChannelIndexList(self):
620 620
621 621 return range(self.nChannels)
622 622
623 623 def getNoise(self, type = 1):
624 624
625 625 self.noise = numpy.zeros(self.nChannels)
626 626
627 627 if type == 1:
628 628 noise = self.getNoisebyHildebrand()
629 629
630 630 if type == 2:
631 631 noise = self.getNoisebySort()
632 632
633 633 if type == 3:
634 634 noise = self.getNoisebyWindow()
635 635
636 636 return noise
637 637
638 638 datatime = property(getDatatime, "I'm the 'datatime' property")
639 639 nHeights = property(getNHeights, "I'm the 'nHeights' property.")
640 640 nChannels = property(getNChannels, "I'm the 'nChannel' property.")
641 641 channelIndexList = property(getChannelIndexList, "I'm the 'channelIndexList' property.")
642 642 noise = property(getNoise, "I'm the 'nHeights' property.")
643 643 datatime = property(getDatatime, "I'm the 'datatime' property")
644 644 ltctime = property(getltctime, "I'm the 'ltctime' property")
645 645
646 646 ltctime = property(getltctime, "I'm the 'ltctime' property")
647 647
648 648 class AMISR:
649 649 def __init__(self):
650 650 self.flagNoData = True
651 651 self.data = None
652 652 self.utctime = None
653 653 self.type = "AMISR"
654 654
655 655 #propiedades para compatibilidad con Voltages
656 self.timeZone = 300#timezone like jroheader, difference in minutes between UTC and localtime
656 self.timeZone = 0#timezone like jroheader, difference in minutes between UTC and localtime
657 657 self.dstFlag = 0#self.dataIn.dstFlag
658 658 self.errorCount = 0#self.dataIn.errorCount
659 659 self.useLocalTime = True#self.dataIn.useLocalTime
660 660
661 661 self.radarControllerHeaderObj = None#self.dataIn.radarControllerHeaderObj.copy()
662 662 self.systemHeaderObj = None#self.dataIn.systemHeaderObj.copy()
663 663 self.channelList = [0]#self.dataIn.channelList esto solo aplica para el caso de AMISR
664 664 self.dtype = numpy.dtype([('real','<f4'),('imag','<f4')])
665 665
666 666 self.flagTimeBlock = None#self.dataIn.flagTimeBlock
667 667 #self.utctime = #self.firstdatatime
668 668 self.flagDecodeData = None#self.dataIn.flagDecodeData #asumo q la data esta decodificada
669 669 self.flagDeflipData = None#self.dataIn.flagDeflipData #asumo q la data esta sin flip
670 670
671 671 self.nCohInt = 1#self.dataIn.nCohInt
672 672 self.nIncohInt = 1
673 673 self.ippSeconds = 0.004#self.dataIn.ippSeconds, segun el filename/Setup/Tufile
674 674 self.windowOfFilter = None#self.dataIn.windowOfFilter
675 675
676 676 self.timeInterval = None#self.dataIn.timeInterval*self.dataOut.nFFTPoints*self.dataOut.nIncohInt
677 677 self.frequency = 20000000#self.dataIn.frequency
678 678 self.realtime = 0#self.dataIn.realtime
679 679
680 680 #actualizar en la lectura de datos
681 681 self.heightList = None#self.dataIn.heightList
682 682 self.nProfiles = None#self.dataOut.nFFTPoints
683 683 self.nBaud = None#self.dataIn.nBaud
684 684 self.nCode = None#self.dataIn.nCode
685 685 self.code = None#self.dataIn.code
686 686
687 #consideracion para los Beams
688 self.beamCodeDict = None
689 self.beamRangeDict = None
690
691 def copy(self, inputObj=None):
692
693 if inputObj == None:
694 return copy.deepcopy(self)
695
696 for key in inputObj.__dict__.keys():
697 self.__dict__[key] = inputObj.__dict__[key]
698
687 699
688 700 def isEmpty(self):
689 701
690 702 return self.flagNoData No newline at end of file
@@ -1,3836 +1,3863
1 1 '''
2 2
3 3 $Author: murco $
4 4 $Id: JRODataIO.py 169 2012-11-19 21:57:03Z murco $
5 5 '''
6 6
7 7 import os, sys
8 8 import glob
9 9 import time
10 10 import numpy
11 11 import fnmatch
12 12 import time, datetime
13 13 import h5py
14 14 from xml.etree.ElementTree import Element, SubElement, ElementTree
15 15 try:
16 16 import pyfits
17 17 except:
18 18 print "pyfits module has not been imported, it should be installed to save files in fits format"
19 19
20 20 from jrodata import *
21 21 from jroheaderIO import *
22 22 from jroprocessing import *
23 23
24 24 LOCALTIME = True #-18000
25 25
26 26 def isNumber(str):
27 27 """
28 28 Chequea si el conjunto de caracteres que componen un string puede ser convertidos a un numero.
29 29
30 30 Excepciones:
31 31 Si un determinado string no puede ser convertido a numero
32 32 Input:
33 33 str, string al cual se le analiza para determinar si convertible a un numero o no
34 34
35 35 Return:
36 36 True : si el string es uno numerico
37 37 False : no es un string numerico
38 38 """
39 39 try:
40 40 float( str )
41 41 return True
42 42 except:
43 43 return False
44 44
45 45 def isThisFileinRange(filename, startUTSeconds, endUTSeconds):
46 46 """
47 47 Esta funcion determina si un archivo de datos se encuentra o no dentro del rango de fecha especificado.
48 48
49 49 Inputs:
50 50 filename : nombre completo del archivo de datos en formato Jicamarca (.r)
51 51
52 52 startUTSeconds : fecha inicial del rango seleccionado. La fecha esta dada en
53 53 segundos contados desde 01/01/1970.
54 54 endUTSeconds : fecha final del rango seleccionado. La fecha esta dada en
55 55 segundos contados desde 01/01/1970.
56 56
57 57 Return:
58 58 Boolean : Retorna True si el archivo de datos contiene datos en el rango de
59 59 fecha especificado, de lo contrario retorna False.
60 60
61 61 Excepciones:
62 62 Si el archivo no existe o no puede ser abierto
63 63 Si la cabecera no puede ser leida.
64 64
65 65 """
66 66 basicHeaderObj = BasicHeader(LOCALTIME)
67 67
68 68 try:
69 69 fp = open(filename,'rb')
70 70 except:
71 71 raise IOError, "The file %s can't be opened" %(filename)
72 72
73 73 sts = basicHeaderObj.read(fp)
74 74 fp.close()
75 75
76 76 if not(sts):
77 77 print "Skipping the file %s because it has not a valid header" %(filename)
78 78 return 0
79 79
80 80 if not ((startUTSeconds <= basicHeaderObj.utc) and (endUTSeconds > basicHeaderObj.utc)):
81 81 return 0
82 82
83 83 return 1
84 84
85 85 def isFileinThisTime(filename, startTime, endTime):
86 86 """
87 87 Retorna 1 si el archivo de datos se encuentra dentro del rango de horas especificado.
88 88
89 89 Inputs:
90 90 filename : nombre completo del archivo de datos en formato Jicamarca (.r)
91 91
92 92 startTime : tiempo inicial del rango seleccionado en formato datetime.time
93 93
94 94 endTime : tiempo final del rango seleccionado en formato datetime.time
95 95
96 96 Return:
97 97 Boolean : Retorna True si el archivo de datos contiene datos en el rango de
98 98 fecha especificado, de lo contrario retorna False.
99 99
100 100 Excepciones:
101 101 Si el archivo no existe o no puede ser abierto
102 102 Si la cabecera no puede ser leida.
103 103
104 104 """
105 105
106 106
107 107 try:
108 108 fp = open(filename,'rb')
109 109 except:
110 110 raise IOError, "The file %s can't be opened" %(filename)
111 111
112 112 basicHeaderObj = BasicHeader(LOCALTIME)
113 113 sts = basicHeaderObj.read(fp)
114 114 fp.close()
115 115
116 116 thisDatetime = basicHeaderObj.datatime
117 117 thisTime = basicHeaderObj.datatime.time()
118 118
119 119 if not(sts):
120 120 print "Skipping the file %s because it has not a valid header" %(filename)
121 121 return None
122 122
123 123 if not ((startTime <= thisTime) and (endTime > thisTime)):
124 124 return None
125 125
126 126 return thisDatetime
127 127
128 128 def getFileFromSet(path,ext,set):
129 129 validFilelist = []
130 130 fileList = os.listdir(path)
131 131
132 132 # 0 1234 567 89A BCDE
133 133 # H YYYY DDD SSS .ext
134 134
135 135 for file in fileList:
136 136 try:
137 137 year = int(file[1:5])
138 138 doy = int(file[5:8])
139 139
140 140
141 141 except:
142 142 continue
143 143
144 144 if (os.path.splitext(file)[-1].lower() != ext.lower()):
145 145 continue
146 146
147 147 validFilelist.append(file)
148 148
149 149 myfile = fnmatch.filter(validFilelist,'*%4.4d%3.3d%3.3d*'%(year,doy,set))
150 150
151 151 if len(myfile)!= 0:
152 152 return myfile[0]
153 153 else:
154 154 filename = '*%4.4d%3.3d%3.3d%s'%(year,doy,set,ext.lower())
155 155 print 'the filename %s does not exist'%filename
156 156 print '...going to the last file: '
157 157
158 158 if validFilelist:
159 159 validFilelist = sorted( validFilelist, key=str.lower )
160 160 return validFilelist[-1]
161 161
162 162 return None
163 163
164 164
165 165 def getlastFileFromPath(path, ext):
166 166 """
167 167 Depura el fileList dejando solo los que cumplan el formato de "PYYYYDDDSSS.ext"
168 168 al final de la depuracion devuelve el ultimo file de la lista que quedo.
169 169
170 170 Input:
171 171 fileList : lista conteniendo todos los files (sin path) que componen una determinada carpeta
172 172 ext : extension de los files contenidos en una carpeta
173 173
174 174 Return:
175 175 El ultimo file de una determinada carpeta, no se considera el path.
176 176 """
177 177 validFilelist = []
178 178 fileList = os.listdir(path)
179 179
180 180 # 0 1234 567 89A BCDE
181 181 # H YYYY DDD SSS .ext
182 182
183 183 for file in fileList:
184 184 try:
185 185 year = int(file[1:5])
186 186 doy = int(file[5:8])
187 187
188 188
189 189 except:
190 190 continue
191 191
192 192 if (os.path.splitext(file)[-1].lower() != ext.lower()):
193 193 continue
194 194
195 195 validFilelist.append(file)
196 196
197 197 if validFilelist:
198 198 validFilelist = sorted( validFilelist, key=str.lower )
199 199 return validFilelist[-1]
200 200
201 201 return None
202 202
203 203 def checkForRealPath(path, foldercounter, year, doy, set, ext):
204 204 """
205 205 Por ser Linux Case Sensitive entonces checkForRealPath encuentra el nombre correcto de un path,
206 206 Prueba por varias combinaciones de nombres entre mayusculas y minusculas para determinar
207 207 el path exacto de un determinado file.
208 208
209 209 Example :
210 210 nombre correcto del file es .../.../D2009307/P2009307367.ext
211 211
212 212 Entonces la funcion prueba con las siguientes combinaciones
213 213 .../.../y2009307367.ext
214 214 .../.../Y2009307367.ext
215 215 .../.../x2009307/y2009307367.ext
216 216 .../.../x2009307/Y2009307367.ext
217 217 .../.../X2009307/y2009307367.ext
218 218 .../.../X2009307/Y2009307367.ext
219 219 siendo para este caso, la ultima combinacion de letras, identica al file buscado
220 220
221 221 Return:
222 222 Si encuentra la cobinacion adecuada devuelve el path completo y el nombre del file
223 223 caso contrario devuelve None como path y el la ultima combinacion de nombre en mayusculas
224 224 para el filename
225 225 """
226 226 fullfilename = None
227 227 find_flag = False
228 228 filename = None
229 229
230 230 prefixDirList = [None,'d','D']
231 231 if ext.lower() == ".r": #voltage
232 232 prefixFileList = ['d','D']
233 233 elif ext.lower() == ".pdata": #spectra
234 234 prefixFileList = ['p','P']
235 235 else:
236 236 return None, filename
237 237
238 238 #barrido por las combinaciones posibles
239 239 for prefixDir in prefixDirList:
240 240 thispath = path
241 241 if prefixDir != None:
242 242 #formo el nombre del directorio xYYYYDDD (x=d o x=D)
243 243 if foldercounter == 0:
244 244 thispath = os.path.join(path, "%s%04d%03d" % ( prefixDir, year, doy ))
245 245 else:
246 246 thispath = os.path.join(path, "%s%04d%03d_%02d" % ( prefixDir, year, doy , foldercounter))
247 247 for prefixFile in prefixFileList: #barrido por las dos combinaciones posibles de "D"
248 248 filename = "%s%04d%03d%03d%s" % ( prefixFile, year, doy, set, ext ) #formo el nombre del file xYYYYDDDSSS.ext
249 249 fullfilename = os.path.join( thispath, filename ) #formo el path completo
250 250
251 251 if os.path.exists( fullfilename ): #verifico que exista
252 252 find_flag = True
253 253 break
254 254 if find_flag:
255 255 break
256 256
257 257 if not(find_flag):
258 258 return None, filename
259 259
260 260 return fullfilename, filename
261 261
262 262 def isDoyFolder(folder):
263 263 try:
264 264 year = int(folder[1:5])
265 265 except:
266 266 return 0
267 267
268 268 try:
269 269 doy = int(folder[5:8])
270 270 except:
271 271 return 0
272 272
273 273 return 1
274 274
275 275 class JRODataIO:
276 276
277 277 c = 3E8
278 278
279 279 isConfig = False
280 280
281 281 basicHeaderObj = BasicHeader(LOCALTIME)
282 282
283 283 systemHeaderObj = SystemHeader()
284 284
285 285 radarControllerHeaderObj = RadarControllerHeader()
286 286
287 287 processingHeaderObj = ProcessingHeader()
288 288
289 289 online = 0
290 290
291 291 dtype = None
292 292
293 293 pathList = []
294 294
295 295 filenameList = []
296 296
297 297 filename = None
298 298
299 299 ext = None
300 300
301 301 flagIsNewFile = 1
302 302
303 303 flagTimeBlock = 0
304 304
305 305 flagIsNewBlock = 0
306 306
307 307 fp = None
308 308
309 309 firstHeaderSize = 0
310 310
311 311 basicHeaderSize = 24
312 312
313 313 versionFile = 1103
314 314
315 315 fileSize = None
316 316
317 317 ippSeconds = None
318 318
319 319 fileSizeByHeader = None
320 320
321 321 fileIndex = None
322 322
323 323 profileIndex = None
324 324
325 325 blockIndex = None
326 326
327 327 nTotalBlocks = None
328 328
329 329 maxTimeStep = 30
330 330
331 331 lastUTTime = None
332 332
333 333 datablock = None
334 334
335 335 dataOut = None
336 336
337 337 blocksize = None
338 338
339 339 def __init__(self):
340 340
341 341 raise ValueError, "Not implemented"
342 342
343 343 def run(self):
344 344
345 345 raise ValueError, "Not implemented"
346 346
347 347 def getOutput(self):
348 348
349 349 return self.dataOut
350 350
351 351 class JRODataReader(JRODataIO, ProcessingUnit):
352 352
353 353 nReadBlocks = 0
354 354
355 355 delay = 10 #number of seconds waiting a new file
356 356
357 357 nTries = 3 #quantity tries
358 358
359 359 nFiles = 3 #number of files for searching
360 360
361 361 path = None
362 362
363 363 foldercounter = 0
364 364
365 365 flagNoMoreFiles = 0
366 366
367 367 datetimeList = []
368 368
369 369 __isFirstTimeOnline = 1
370 370
371 371 __printInfo = True
372 372
373 373 profileIndex = None
374 374
375 375 def __init__(self):
376 376
377 377 """
378 378
379 379 """
380 380
381 381 raise ValueError, "This method has not been implemented"
382 382
383 383
384 384 def createObjByDefault(self):
385 385 """
386 386
387 387 """
388 388 raise ValueError, "This method has not been implemented"
389 389
390 390 def getBlockDimension(self):
391 391
392 392 raise ValueError, "No implemented"
393 393
394 394 def __searchFilesOffLine(self,
395 395 path,
396 396 startDate,
397 397 endDate,
398 398 startTime=datetime.time(0,0,0),
399 399 endTime=datetime.time(23,59,59),
400 400 set=None,
401 401 expLabel='',
402 402 ext='.r',
403 403 walk=True):
404 404
405 405 pathList = []
406 406
407 407 if not walk:
408 408 #pathList.append(path)
409 409 multi_path = path.split(',')
410 410 for single_path in multi_path:
411 411 pathList.append(single_path)
412 412
413 413 else:
414 414 #dirList = []
415 415 multi_path = path.split(',')
416 416 for single_path in multi_path:
417 417 dirList = []
418 418 for thisPath in os.listdir(single_path):
419 419 if not os.path.isdir(os.path.join(single_path,thisPath)):
420 420 continue
421 421 if not isDoyFolder(thisPath):
422 422 continue
423 423
424 424 dirList.append(thisPath)
425 425
426 426 if not(dirList):
427 427 return None, None
428 428
429 429 thisDate = startDate
430 430
431 431 while(thisDate <= endDate):
432 432 year = thisDate.timetuple().tm_year
433 433 doy = thisDate.timetuple().tm_yday
434 434
435 435 matchlist = fnmatch.filter(dirList, '?' + '%4.4d%3.3d' % (year,doy) + '*')
436 436 if len(matchlist) == 0:
437 437 thisDate += datetime.timedelta(1)
438 438 continue
439 439 for match in matchlist:
440 440 pathList.append(os.path.join(single_path,match,expLabel))
441 441
442 442 thisDate += datetime.timedelta(1)
443 443
444 444 if pathList == []:
445 445 print "Any folder was found for the date range: %s-%s" %(startDate, endDate)
446 446 return None, None
447 447
448 448 print "%d folder(s) was(were) found for the date range: %s - %s" %(len(pathList), startDate, endDate)
449 449
450 450 filenameList = []
451 451 datetimeList = []
452 452 pathDict = {}
453 453 filenameList_to_sort = []
454 454
455 455 for i in range(len(pathList)):
456 456
457 457 thisPath = pathList[i]
458 458
459 459 fileList = glob.glob1(thisPath, "*%s" %ext)
460 460 fileList.sort()
461 461 pathDict.setdefault(fileList[0])
462 462 pathDict[fileList[0]] = i
463 463 filenameList_to_sort.append(fileList[0])
464 464
465 465 filenameList_to_sort.sort()
466 466
467 467 for file in filenameList_to_sort:
468 468 thisPath = pathList[pathDict[file]]
469 469
470 470 fileList = glob.glob1(thisPath, "*%s" %ext)
471 471 fileList.sort()
472 472
473 473 for file in fileList:
474 474
475 475 filename = os.path.join(thisPath,file)
476 476 thisDatetime = isFileinThisTime(filename, startTime, endTime)
477 477
478 478 if not(thisDatetime):
479 479 continue
480 480
481 481 filenameList.append(filename)
482 482 datetimeList.append(thisDatetime)
483 483
484 484 if not(filenameList):
485 485 print "Any file was found for the time range %s - %s" %(startTime, endTime)
486 486 return None, None
487 487
488 488 print "%d file(s) was(were) found for the time range: %s - %s" %(len(filenameList), startTime, endTime)
489 489 print
490 490
491 491 for i in range(len(filenameList)):
492 492 print "%s -> [%s]" %(filenameList[i], datetimeList[i].ctime())
493 493
494 494 self.filenameList = filenameList
495 495 self.datetimeList = datetimeList
496 496
497 497 return pathList, filenameList
498 498
499 499 def __searchFilesOnLine(self, path, expLabel = "", ext = None, walk=True, set=None):
500 500
501 501 """
502 502 Busca el ultimo archivo de la ultima carpeta (determinada o no por startDateTime) y
503 503 devuelve el archivo encontrado ademas de otros datos.
504 504
505 505 Input:
506 506 path : carpeta donde estan contenidos los files que contiene data
507 507
508 508 expLabel : Nombre del subexperimento (subfolder)
509 509
510 510 ext : extension de los files
511 511
512 512 walk : Si es habilitado no realiza busquedas dentro de los ubdirectorios (doypath)
513 513
514 514 Return:
515 515 directory : eL directorio donde esta el file encontrado
516 516 filename : el ultimo file de una determinada carpeta
517 517 year : el anho
518 518 doy : el numero de dia del anho
519 519 set : el set del archivo
520 520
521 521
522 522 """
523 523 dirList = []
524 524
525 525 if not walk:
526 526 fullpath = path
527 527 foldercounter = 0
528 528 else:
529 529 #Filtra solo los directorios
530 530 for thisPath in os.listdir(path):
531 531 if not os.path.isdir(os.path.join(path,thisPath)):
532 532 continue
533 533 if not isDoyFolder(thisPath):
534 534 continue
535 535
536 536 dirList.append(thisPath)
537 537
538 538 if not(dirList):
539 539 return None, None, None, None, None, None
540 540
541 541 dirList = sorted( dirList, key=str.lower )
542 542
543 543 doypath = dirList[-1]
544 544 foldercounter = int(doypath.split('_')[1]) if len(doypath.split('_'))>1 else 0
545 545 fullpath = os.path.join(path, doypath, expLabel)
546 546
547 547
548 548 print "%s folder was found: " %(fullpath )
549 549
550 550 if set == None:
551 551 filename = getlastFileFromPath(fullpath, ext)
552 552 else:
553 553 filename = getFileFromSet(fullpath, ext, set)
554 554
555 555 if not(filename):
556 556 return None, None, None, None, None, None
557 557
558 558 print "%s file was found" %(filename)
559 559
560 560 if not(self.__verifyFile(os.path.join(fullpath, filename))):
561 561 return None, None, None, None, None, None
562 562
563 563 year = int( filename[1:5] )
564 564 doy = int( filename[5:8] )
565 565 set = int( filename[8:11] )
566 566
567 567 return fullpath, foldercounter, filename, year, doy, set
568 568
569 569 def __setNextFileOffline(self):
570 570
571 571 idFile = self.fileIndex
572 572
573 573 while (True):
574 574 idFile += 1
575 575 if not(idFile < len(self.filenameList)):
576 576 self.flagNoMoreFiles = 1
577 577 print "No more Files"
578 578 return 0
579 579
580 580 filename = self.filenameList[idFile]
581 581
582 582 if not(self.__verifyFile(filename)):
583 583 continue
584 584
585 585 fileSize = os.path.getsize(filename)
586 586 fp = open(filename,'rb')
587 587 break
588 588
589 589 self.flagIsNewFile = 1
590 590 self.fileIndex = idFile
591 591 self.filename = filename
592 592 self.fileSize = fileSize
593 593 self.fp = fp
594 594
595 595 print "Setting the file: %s"%self.filename
596 596
597 597 return 1
598 598
599 599 def __setNextFileOnline(self):
600 600 """
601 601 Busca el siguiente file que tenga suficiente data para ser leida, dentro de un folder especifico, si
602 602 no encuentra un file valido espera un tiempo determinado y luego busca en los posibles n files
603 603 siguientes.
604 604
605 605 Affected:
606 606 self.flagIsNewFile
607 607 self.filename
608 608 self.fileSize
609 609 self.fp
610 610 self.set
611 611 self.flagNoMoreFiles
612 612
613 613 Return:
614 614 0 : si luego de una busqueda del siguiente file valido este no pudo ser encontrado
615 615 1 : si el file fue abierto con exito y esta listo a ser leido
616 616
617 617 Excepciones:
618 618 Si un determinado file no puede ser abierto
619 619 """
620 620 nFiles = 0
621 621 fileOk_flag = False
622 622 firstTime_flag = True
623 623
624 624 self.set += 1
625 625
626 626 if self.set > 999:
627 627 self.set = 0
628 628 self.foldercounter += 1
629 629
630 630 #busca el 1er file disponible
631 631 fullfilename, filename = checkForRealPath( self.path, self.foldercounter, self.year, self.doy, self.set, self.ext )
632 632 if fullfilename:
633 633 if self.__verifyFile(fullfilename, False):
634 634 fileOk_flag = True
635 635
636 636 #si no encuentra un file entonces espera y vuelve a buscar
637 637 if not(fileOk_flag):
638 638 for nFiles in range(self.nFiles+1): #busco en los siguientes self.nFiles+1 files posibles
639 639
640 640 if firstTime_flag: #si es la 1era vez entonces hace el for self.nTries veces
641 641 tries = self.nTries
642 642 else:
643 643 tries = 1 #si no es la 1era vez entonces solo lo hace una vez
644 644
645 645 for nTries in range( tries ):
646 646 if firstTime_flag:
647 647 print "\tWaiting %0.2f sec for the file \"%s\" , try %03d ..." % ( self.delay, filename, nTries+1 )
648 648 time.sleep( self.delay )
649 649 else:
650 650 print "\tSearching next \"%s%04d%03d%03d%s\" file ..." % (self.optchar, self.year, self.doy, self.set, self.ext)
651 651
652 652 fullfilename, filename = checkForRealPath( self.path, self.foldercounter, self.year, self.doy, self.set, self.ext )
653 653 if fullfilename:
654 654 if self.__verifyFile(fullfilename):
655 655 fileOk_flag = True
656 656 break
657 657
658 658 if fileOk_flag:
659 659 break
660 660
661 661 firstTime_flag = False
662 662
663 663 print "\tSkipping the file \"%s\" due to this file doesn't exist" % filename
664 664 self.set += 1
665 665
666 666 if nFiles == (self.nFiles-1): #si no encuentro el file buscado cambio de carpeta y busco en la siguiente carpeta
667 667 self.set = 0
668 668 self.doy += 1
669 669 self.foldercounter = 0
670 670
671 671 if fileOk_flag:
672 672 self.fileSize = os.path.getsize( fullfilename )
673 673 self.filename = fullfilename
674 674 self.flagIsNewFile = 1
675 675 if self.fp != None: self.fp.close()
676 676 self.fp = open(fullfilename, 'rb')
677 677 self.flagNoMoreFiles = 0
678 678 print 'Setting the file: %s' % fullfilename
679 679 else:
680 680 self.fileSize = 0
681 681 self.filename = None
682 682 self.flagIsNewFile = 0
683 683 self.fp = None
684 684 self.flagNoMoreFiles = 1
685 685 print 'No more Files'
686 686
687 687 return fileOk_flag
688 688
689 689
690 690 def setNextFile(self):
691 691 if self.fp != None:
692 692 self.fp.close()
693 693
694 694 if self.online:
695 695 newFile = self.__setNextFileOnline()
696 696 else:
697 697 newFile = self.__setNextFileOffline()
698 698
699 699 if not(newFile):
700 700 return 0
701 701
702 702 self.__readFirstHeader()
703 703 self.nReadBlocks = 0
704 704 return 1
705 705
706 706 def __waitNewBlock(self):
707 707 """
708 708 Return 1 si se encontro un nuevo bloque de datos, 0 de otra forma.
709 709
710 710 Si el modo de lectura es OffLine siempre retorn 0
711 711 """
712 712 if not self.online:
713 713 return 0
714 714
715 715 if (self.nReadBlocks >= self.processingHeaderObj.dataBlocksPerFile):
716 716 return 0
717 717
718 718 currentPointer = self.fp.tell()
719 719
720 720 neededSize = self.processingHeaderObj.blockSize + self.basicHeaderSize
721 721
722 722 for nTries in range( self.nTries ):
723 723
724 724 self.fp.close()
725 725 self.fp = open( self.filename, 'rb' )
726 726 self.fp.seek( currentPointer )
727 727
728 728 self.fileSize = os.path.getsize( self.filename )
729 729 currentSize = self.fileSize - currentPointer
730 730
731 731 if ( currentSize >= neededSize ):
732 732 self.__rdBasicHeader()
733 733 return 1
734 734
735 735 if self.fileSize == self.fileSizeByHeader:
736 736 # self.flagEoF = True
737 737 return 0
738 738
739 739 print "\tWaiting %0.2f seconds for the next block, try %03d ..." % (self.delay, nTries+1)
740 740 time.sleep( self.delay )
741 741
742 742
743 743 return 0
744 744
745 745 def waitDataBlock(self,pointer_location):
746 746
747 747 currentPointer = pointer_location
748 748
749 749 neededSize = self.processingHeaderObj.blockSize #+ self.basicHeaderSize
750 750
751 751 for nTries in range( self.nTries ):
752 752 self.fp.close()
753 753 self.fp = open( self.filename, 'rb' )
754 754 self.fp.seek( currentPointer )
755 755
756 756 self.fileSize = os.path.getsize( self.filename )
757 757 currentSize = self.fileSize - currentPointer
758 758
759 759 if ( currentSize >= neededSize ):
760 760 return 1
761 761
762 762 print "\tWaiting %0.2f seconds for the next block, try %03d ..." % (self.delay, nTries+1)
763 763 time.sleep( self.delay )
764 764
765 765 return 0
766 766
767 767
768 768 def __jumpToLastBlock(self):
769 769
770 770 if not(self.__isFirstTimeOnline):
771 771 return
772 772
773 773 csize = self.fileSize - self.fp.tell()
774 774 blocksize = self.processingHeaderObj.blockSize
775 775
776 776 #salta el primer bloque de datos
777 777 if csize > self.processingHeaderObj.blockSize:
778 778 self.fp.seek(self.fp.tell() + blocksize)
779 779 else:
780 780 return
781 781
782 782 csize = self.fileSize - self.fp.tell()
783 783 neededsize = self.processingHeaderObj.blockSize + self.basicHeaderSize
784 784 while True:
785 785
786 786 if self.fp.tell()<self.fileSize:
787 787 self.fp.seek(self.fp.tell() + neededsize)
788 788 else:
789 789 self.fp.seek(self.fp.tell() - neededsize)
790 790 break
791 791
792 792 # csize = self.fileSize - self.fp.tell()
793 793 # neededsize = self.processingHeaderObj.blockSize + self.basicHeaderSize
794 794 # factor = int(csize/neededsize)
795 795 # if factor > 0:
796 796 # self.fp.seek(self.fp.tell() + factor*neededsize)
797 797
798 798 self.flagIsNewFile = 0
799 799 self.__isFirstTimeOnline = 0
800 800
801 801
802 802 def __setNewBlock(self):
803 803
804 804 if self.fp == None:
805 805 return 0
806 806
807 807 if self.online:
808 808 self.__jumpToLastBlock()
809 809
810 810 if self.flagIsNewFile:
811 811 return 1
812 812
813 813 self.lastUTTime = self.basicHeaderObj.utc
814 814 currentSize = self.fileSize - self.fp.tell()
815 815 neededSize = self.processingHeaderObj.blockSize + self.basicHeaderSize
816 816
817 817 if (currentSize >= neededSize):
818 818 self.__rdBasicHeader()
819 819 return 1
820 820
821 821 if self.__waitNewBlock():
822 822 return 1
823 823
824 824 if not(self.setNextFile()):
825 825 return 0
826 826
827 827 deltaTime = self.basicHeaderObj.utc - self.lastUTTime #
828 828
829 829 self.flagTimeBlock = 0
830 830
831 831 if deltaTime > self.maxTimeStep:
832 832 self.flagTimeBlock = 1
833 833
834 834 return 1
835 835
836 836
837 837 def readNextBlock(self):
838 838 if not(self.__setNewBlock()):
839 839 return 0
840 840
841 841 if not(self.readBlock()):
842 842 return 0
843 843
844 844 return 1
845 845
846 846 def __rdProcessingHeader(self, fp=None):
847 847 if fp == None:
848 848 fp = self.fp
849 849
850 850 self.processingHeaderObj.read(fp)
851 851
852 852 def __rdRadarControllerHeader(self, fp=None):
853 853 if fp == None:
854 854 fp = self.fp
855 855
856 856 self.radarControllerHeaderObj.read(fp)
857 857
858 858 def __rdSystemHeader(self, fp=None):
859 859 if fp == None:
860 860 fp = self.fp
861 861
862 862 self.systemHeaderObj.read(fp)
863 863
864 864 def __rdBasicHeader(self, fp=None):
865 865 if fp == None:
866 866 fp = self.fp
867 867
868 868 self.basicHeaderObj.read(fp)
869 869
870 870
871 871 def __readFirstHeader(self):
872 872 self.__rdBasicHeader()
873 873 self.__rdSystemHeader()
874 874 self.__rdRadarControllerHeader()
875 875 self.__rdProcessingHeader()
876 876
877 877 self.firstHeaderSize = self.basicHeaderObj.size
878 878
879 879 datatype = int(numpy.log2((self.processingHeaderObj.processFlags & PROCFLAG.DATATYPE_MASK))-numpy.log2(PROCFLAG.DATATYPE_CHAR))
880 880 if datatype == 0:
881 881 datatype_str = numpy.dtype([('real','<i1'),('imag','<i1')])
882 882 elif datatype == 1:
883 883 datatype_str = numpy.dtype([('real','<i2'),('imag','<i2')])
884 884 elif datatype == 2:
885 885 datatype_str = numpy.dtype([('real','<i4'),('imag','<i4')])
886 886 elif datatype == 3:
887 887 datatype_str = numpy.dtype([('real','<i8'),('imag','<i8')])
888 888 elif datatype == 4:
889 889 datatype_str = numpy.dtype([('real','<f4'),('imag','<f4')])
890 890 elif datatype == 5:
891 891 datatype_str = numpy.dtype([('real','<f8'),('imag','<f8')])
892 892 else:
893 893 raise ValueError, 'Data type was not defined'
894 894
895 895 self.dtype = datatype_str
896 896 self.ippSeconds = 2 * 1000 * self.radarControllerHeaderObj.ipp / self.c
897 897 self.fileSizeByHeader = self.processingHeaderObj.dataBlocksPerFile * self.processingHeaderObj.blockSize + self.firstHeaderSize + self.basicHeaderSize*(self.processingHeaderObj.dataBlocksPerFile - 1)
898 898 # self.dataOut.channelList = numpy.arange(self.systemHeaderObj.numChannels)
899 899 # self.dataOut.channelIndexList = numpy.arange(self.systemHeaderObj.numChannels)
900 900 self.getBlockDimension()
901 901
902 902
903 903 def __verifyFile(self, filename, msgFlag=True):
904 904 msg = None
905 905 try:
906 906 fp = open(filename, 'rb')
907 907 currentPosition = fp.tell()
908 908 except:
909 909 if msgFlag:
910 910 print "The file %s can't be opened" % (filename)
911 911 return False
912 912
913 913 neededSize = self.processingHeaderObj.blockSize + self.firstHeaderSize
914 914
915 915 if neededSize == 0:
916 916 basicHeaderObj = BasicHeader(LOCALTIME)
917 917 systemHeaderObj = SystemHeader()
918 918 radarControllerHeaderObj = RadarControllerHeader()
919 919 processingHeaderObj = ProcessingHeader()
920 920
921 921 try:
922 922 if not( basicHeaderObj.read(fp) ): raise IOError
923 923 if not( systemHeaderObj.read(fp) ): raise IOError
924 924 if not( radarControllerHeaderObj.read(fp) ): raise IOError
925 925 if not( processingHeaderObj.read(fp) ): raise IOError
926 926 data_type = int(numpy.log2((processingHeaderObj.processFlags & PROCFLAG.DATATYPE_MASK))-numpy.log2(PROCFLAG.DATATYPE_CHAR))
927 927
928 928 neededSize = processingHeaderObj.blockSize + basicHeaderObj.size
929 929
930 930 except:
931 931 if msgFlag:
932 932 print "\tThe file %s is empty or it hasn't enough data" % filename
933 933
934 934 fp.close()
935 935 return False
936 936 else:
937 937 msg = "\tSkipping the file %s due to it hasn't enough data" %filename
938 938
939 939 fp.close()
940 940 fileSize = os.path.getsize(filename)
941 941 currentSize = fileSize - currentPosition
942 942 if currentSize < neededSize:
943 943 if msgFlag and (msg != None):
944 944 print msg #print"\tSkipping the file %s due to it hasn't enough data" %filename
945 945 return False
946 946
947 947 return True
948 948
949 949 def setup(self,
950 950 path=None,
951 951 startDate=None,
952 952 endDate=None,
953 953 startTime=datetime.time(0,0,0),
954 954 endTime=datetime.time(23,59,59),
955 955 set=None,
956 956 expLabel = "",
957 957 ext = None,
958 958 online = False,
959 959 delay = 60,
960 960 walk = True):
961 961
962 962 if path == None:
963 963 raise ValueError, "The path is not valid"
964 964
965 965 if ext == None:
966 966 ext = self.ext
967 967
968 968 if online:
969 969 print "Searching files in online mode..."
970 970
971 971 for nTries in range( self.nTries ):
972 972 fullpath, foldercounter, file, year, doy, set = self.__searchFilesOnLine(path=path, expLabel=expLabel, ext=ext, walk=walk, set=set)
973 973
974 974 if fullpath:
975 975 break
976 976
977 977 print '\tWaiting %0.2f sec for an valid file in %s: try %02d ...' % (self.delay, path, nTries+1)
978 978 time.sleep( self.delay )
979 979
980 980 if not(fullpath):
981 981 print "There 'isn't valied files in %s" % path
982 982 return None
983 983
984 984 self.year = year
985 985 self.doy = doy
986 986 self.set = set - 1
987 987 self.path = path
988 988 self.foldercounter = foldercounter
989 989 last_set = None
990 990
991 991 else:
992 992 print "Searching files in offline mode ..."
993 993 pathList, filenameList = self.__searchFilesOffLine(path, startDate=startDate, endDate=endDate,
994 994 startTime=startTime, endTime=endTime,
995 995 set=set, expLabel=expLabel, ext=ext,
996 996 walk=walk)
997 997
998 998 if not(pathList):
999 999 print "No *%s files into the folder %s \nfor the range: %s - %s"%(ext, path,
1000 1000 datetime.datetime.combine(startDate,startTime).ctime(),
1001 1001 datetime.datetime.combine(endDate,endTime).ctime())
1002 1002
1003 1003 sys.exit(-1)
1004 1004
1005 1005
1006 1006 self.fileIndex = -1
1007 1007 self.pathList = pathList
1008 1008 self.filenameList = filenameList
1009 1009 file_name = os.path.basename(filenameList[-1])
1010 1010 basename, ext = os.path.splitext(file_name)
1011 1011 last_set = int(basename[-3:])
1012 1012
1013 1013 self.online = online
1014 1014 self.delay = delay
1015 1015 ext = ext.lower()
1016 1016 self.ext = ext
1017 1017
1018 1018 if not(self.setNextFile()):
1019 1019 if (startDate!=None) and (endDate!=None):
1020 1020 print "No files in range: %s - %s" %(datetime.datetime.combine(startDate,startTime).ctime(), datetime.datetime.combine(endDate,endTime).ctime())
1021 1021 elif startDate != None:
1022 1022 print "No files in range: %s" %(datetime.datetime.combine(startDate,startTime).ctime())
1023 1023 else:
1024 1024 print "No files"
1025 1025
1026 1026 sys.exit(-1)
1027 1027
1028 1028 # self.updateDataHeader()
1029 1029 if last_set != None:
1030 1030 self.dataOut.last_block = last_set * self.processingHeaderObj.dataBlocksPerFile + self.basicHeaderObj.dataBlock
1031 1031 return self.dataOut
1032 1032
1033 1033 def getBasicHeader(self):
1034 1034
1035 1035 self.dataOut.utctime = self.basicHeaderObj.utc + self.basicHeaderObj.miliSecond/1000. + self.profileIndex * self.ippSeconds
1036 1036
1037 1037 self.dataOut.flagTimeBlock = self.flagTimeBlock
1038 1038
1039 1039 self.dataOut.timeZone = self.basicHeaderObj.timeZone
1040 1040
1041 1041 self.dataOut.dstFlag = self.basicHeaderObj.dstFlag
1042 1042
1043 1043 self.dataOut.errorCount = self.basicHeaderObj.errorCount
1044 1044
1045 1045 self.dataOut.useLocalTime = self.basicHeaderObj.useLocalTime
1046 1046
1047 1047 def getFirstHeader(self):
1048 1048
1049 1049 raise ValueError, "This method has not been implemented"
1050 1050
1051 1051 def getData():
1052 1052
1053 1053 raise ValueError, "This method has not been implemented"
1054 1054
1055 1055 def hasNotDataInBuffer():
1056 1056
1057 1057 raise ValueError, "This method has not been implemented"
1058 1058
1059 1059 def readBlock():
1060 1060
1061 1061 raise ValueError, "This method has not been implemented"
1062 1062
1063 1063 def isEndProcess(self):
1064 1064
1065 1065 return self.flagNoMoreFiles
1066 1066
1067 1067 def printReadBlocks(self):
1068 1068
1069 1069 print "Number of read blocks per file %04d" %self.nReadBlocks
1070 1070
1071 1071 def printTotalBlocks(self):
1072 1072
1073 1073 print "Number of read blocks %04d" %self.nTotalBlocks
1074 1074
1075 1075 def printNumberOfBlock(self):
1076 1076
1077 1077 if self.flagIsNewBlock:
1078 1078 print "Block No. %04d, Total blocks %04d -> %s" %(self.basicHeaderObj.dataBlock, self.nTotalBlocks, self.dataOut.datatime.ctime())
1079 1079 self.dataOut.blocknow = self.basicHeaderObj.dataBlock
1080 1080 def printInfo(self):
1081 1081
1082 1082 if self.__printInfo == False:
1083 1083 return
1084 1084
1085 1085 self.basicHeaderObj.printInfo()
1086 1086 self.systemHeaderObj.printInfo()
1087 1087 self.radarControllerHeaderObj.printInfo()
1088 1088 self.processingHeaderObj.printInfo()
1089 1089
1090 1090 self.__printInfo = False
1091 1091
1092 1092
1093 1093 def run(self, **kwargs):
1094 1094
1095 1095 if not(self.isConfig):
1096 1096
1097 1097 # self.dataOut = dataOut
1098 1098 self.setup(**kwargs)
1099 1099 self.isConfig = True
1100 1100
1101 1101 self.getData()
1102 1102
1103 1103 class JRODataWriter(JRODataIO, Operation):
1104 1104
1105 1105 """
1106 1106 Esta clase permite escribir datos a archivos procesados (.r o ,pdata). La escritura
1107 1107 de los datos siempre se realiza por bloques.
1108 1108 """
1109 1109
1110 1110 blockIndex = 0
1111 1111
1112 1112 path = None
1113 1113
1114 1114 setFile = None
1115 1115
1116 1116 profilesPerBlock = None
1117 1117
1118 1118 blocksPerFile = None
1119 1119
1120 1120 nWriteBlocks = 0
1121 1121
1122 1122 def __init__(self, dataOut=None):
1123 1123 raise ValueError, "Not implemented"
1124 1124
1125 1125
1126 1126 def hasAllDataInBuffer(self):
1127 1127 raise ValueError, "Not implemented"
1128 1128
1129 1129
1130 1130 def setBlockDimension(self):
1131 1131 raise ValueError, "Not implemented"
1132 1132
1133 1133
1134 1134 def writeBlock(self):
1135 1135 raise ValueError, "No implemented"
1136 1136
1137 1137
1138 1138 def putData(self):
1139 1139 raise ValueError, "No implemented"
1140 1140
1141 1141
1142 1142 def setBasicHeader(self):
1143 1143
1144 1144 self.basicHeaderObj.size = self.basicHeaderSize #bytes
1145 1145 self.basicHeaderObj.version = self.versionFile
1146 1146 self.basicHeaderObj.dataBlock = self.nTotalBlocks
1147 1147
1148 1148 utc = numpy.floor(self.dataOut.utctime)
1149 1149 milisecond = (self.dataOut.utctime - utc)* 1000.0
1150 1150
1151 1151 self.basicHeaderObj.utc = utc
1152 1152 self.basicHeaderObj.miliSecond = milisecond
1153 1153 self.basicHeaderObj.timeZone = self.dataOut.timeZone
1154 1154 self.basicHeaderObj.dstFlag = self.dataOut.dstFlag
1155 1155 self.basicHeaderObj.errorCount = self.dataOut.errorCount
1156 1156
1157 1157 def setFirstHeader(self):
1158 1158 """
1159 1159 Obtiene una copia del First Header
1160 1160
1161 1161 Affected:
1162 1162
1163 1163 self.basicHeaderObj
1164 1164 self.systemHeaderObj
1165 1165 self.radarControllerHeaderObj
1166 1166 self.processingHeaderObj self.
1167 1167
1168 1168 Return:
1169 1169 None
1170 1170 """
1171 1171
1172 1172 raise ValueError, "No implemented"
1173 1173
1174 1174 def __writeFirstHeader(self):
1175 1175 """
1176 1176 Escribe el primer header del file es decir el Basic header y el Long header (SystemHeader, RadarControllerHeader, ProcessingHeader)
1177 1177
1178 1178 Affected:
1179 1179 __dataType
1180 1180
1181 1181 Return:
1182 1182 None
1183 1183 """
1184 1184
1185 1185 # CALCULAR PARAMETROS
1186 1186
1187 1187 sizeLongHeader = self.systemHeaderObj.size + self.radarControllerHeaderObj.size + self.processingHeaderObj.size
1188 1188 self.basicHeaderObj.size = self.basicHeaderSize + sizeLongHeader
1189 1189
1190 1190 self.basicHeaderObj.write(self.fp)
1191 1191 self.systemHeaderObj.write(self.fp)
1192 1192 self.radarControllerHeaderObj.write(self.fp)
1193 1193 self.processingHeaderObj.write(self.fp)
1194 1194
1195 1195 self.dtype = self.dataOut.dtype
1196 1196
1197 1197 def __setNewBlock(self):
1198 1198 """
1199 1199 Si es un nuevo file escribe el First Header caso contrario escribe solo el Basic Header
1200 1200
1201 1201 Return:
1202 1202 0 : si no pudo escribir nada
1203 1203 1 : Si escribio el Basic el First Header
1204 1204 """
1205 1205 if self.fp == None:
1206 1206 self.setNextFile()
1207 1207
1208 1208 if self.flagIsNewFile:
1209 1209 return 1
1210 1210
1211 1211 if self.blockIndex < self.processingHeaderObj.dataBlocksPerFile:
1212 1212 self.basicHeaderObj.write(self.fp)
1213 1213 return 1
1214 1214
1215 1215 if not( self.setNextFile() ):
1216 1216 return 0
1217 1217
1218 1218 return 1
1219 1219
1220 1220
1221 1221 def writeNextBlock(self):
1222 1222 """
1223 1223 Selecciona el bloque siguiente de datos y los escribe en un file
1224 1224
1225 1225 Return:
1226 1226 0 : Si no hizo pudo escribir el bloque de datos
1227 1227 1 : Si no pudo escribir el bloque de datos
1228 1228 """
1229 1229 if not( self.__setNewBlock() ):
1230 1230 return 0
1231 1231
1232 1232 self.writeBlock()
1233 1233
1234 1234 return 1
1235 1235
1236 1236 def setNextFile(self):
1237 1237 """
1238 1238 Determina el siguiente file que sera escrito
1239 1239
1240 1240 Affected:
1241 1241 self.filename
1242 1242 self.subfolder
1243 1243 self.fp
1244 1244 self.setFile
1245 1245 self.flagIsNewFile
1246 1246
1247 1247 Return:
1248 1248 0 : Si el archivo no puede ser escrito
1249 1249 1 : Si el archivo esta listo para ser escrito
1250 1250 """
1251 1251 ext = self.ext
1252 1252 path = self.path
1253 1253
1254 1254 if self.fp != None:
1255 1255 self.fp.close()
1256 1256
1257 1257 timeTuple = time.localtime( self.dataOut.utctime)
1258 1258 subfolder = 'd%4.4d%3.3d' % (timeTuple.tm_year,timeTuple.tm_yday)
1259 1259
1260 1260 fullpath = os.path.join( path, subfolder )
1261 1261 if not( os.path.exists(fullpath) ):
1262 1262 os.mkdir(fullpath)
1263 1263 self.setFile = -1 #inicializo mi contador de seteo
1264 1264 else:
1265 1265 filesList = os.listdir( fullpath )
1266 1266 if len( filesList ) > 0:
1267 1267 filesList = sorted( filesList, key=str.lower )
1268 1268 filen = filesList[-1]
1269 1269 # el filename debera tener el siguiente formato
1270 1270 # 0 1234 567 89A BCDE (hex)
1271 1271 # x YYYY DDD SSS .ext
1272 1272 if isNumber( filen[8:11] ):
1273 1273 self.setFile = int( filen[8:11] ) #inicializo mi contador de seteo al seteo del ultimo file
1274 1274 else:
1275 1275 self.setFile = -1
1276 1276 else:
1277 1277 self.setFile = -1 #inicializo mi contador de seteo
1278 1278
1279 1279 setFile = self.setFile
1280 1280 setFile += 1
1281 1281
1282 1282 file = '%s%4.4d%3.3d%3.3d%s' % (self.optchar,
1283 1283 timeTuple.tm_year,
1284 1284 timeTuple.tm_yday,
1285 1285 setFile,
1286 1286 ext )
1287 1287
1288 1288 filename = os.path.join( path, subfolder, file )
1289 1289
1290 1290 fp = open( filename,'wb' )
1291 1291
1292 1292 self.blockIndex = 0
1293 1293
1294 1294 #guardando atributos
1295 1295 self.filename = filename
1296 1296 self.subfolder = subfolder
1297 1297 self.fp = fp
1298 1298 self.setFile = setFile
1299 1299 self.flagIsNewFile = 1
1300 1300
1301 1301 self.setFirstHeader()
1302 1302
1303 1303 print 'Writing the file: %s'%self.filename
1304 1304
1305 1305 self.__writeFirstHeader()
1306 1306
1307 1307 return 1
1308 1308
1309 1309 def setup(self, dataOut, path, blocksPerFile, profilesPerBlock=64, set=0, ext=None):
1310 1310 """
1311 1311 Setea el tipo de formato en la cual sera guardada la data y escribe el First Header
1312 1312
1313 1313 Inputs:
1314 1314 path : el path destino en el cual se escribiran los files a crear
1315 1315 format : formato en el cual sera salvado un file
1316 1316 set : el setebo del file
1317 1317
1318 1318 Return:
1319 1319 0 : Si no realizo un buen seteo
1320 1320 1 : Si realizo un buen seteo
1321 1321 """
1322 1322
1323 1323 if ext == None:
1324 1324 ext = self.ext
1325 1325
1326 1326 ext = ext.lower()
1327 1327
1328 1328 self.ext = ext
1329 1329
1330 1330 self.path = path
1331 1331
1332 1332 self.setFile = set - 1
1333 1333
1334 1334 self.blocksPerFile = blocksPerFile
1335 1335
1336 1336 self.profilesPerBlock = profilesPerBlock
1337 1337
1338 1338 self.dataOut = dataOut
1339 1339
1340 1340 if not(self.setNextFile()):
1341 1341 print "There isn't a next file"
1342 1342 return 0
1343 1343
1344 1344 self.setBlockDimension()
1345 1345
1346 1346 return 1
1347 1347
1348 1348 def run(self, dataOut, **kwargs):
1349 1349
1350 1350 if not(self.isConfig):
1351 1351
1352 1352 self.setup(dataOut, **kwargs)
1353 1353 self.isConfig = True
1354 1354
1355 1355 self.putData()
1356 1356
1357 1357 class VoltageReader(JRODataReader):
1358 1358 """
1359 1359 Esta clase permite leer datos de voltage desde archivos en formato rawdata (.r). La lectura
1360 1360 de los datos siempre se realiza por bloques. Los datos leidos (array de 3 dimensiones:
1361 1361 perfiles*alturas*canales) son almacenados en la variable "buffer".
1362 1362
1363 1363 perfiles * alturas * canales
1364 1364
1365 1365 Esta clase contiene instancias (objetos) de las clases BasicHeader, SystemHeader,
1366 1366 RadarControllerHeader y Voltage. Los tres primeros se usan para almacenar informacion de la
1367 1367 cabecera de datos (metadata), y el cuarto (Voltage) para obtener y almacenar un perfil de
1368 1368 datos desde el "buffer" cada vez que se ejecute el metodo "getData".
1369 1369
1370 1370 Example:
1371 1371
1372 1372 dpath = "/home/myuser/data"
1373 1373
1374 1374 startTime = datetime.datetime(2010,1,20,0,0,0,0,0,0)
1375 1375
1376 1376 endTime = datetime.datetime(2010,1,21,23,59,59,0,0,0)
1377 1377
1378 1378 readerObj = VoltageReader()
1379 1379
1380 1380 readerObj.setup(dpath, startTime, endTime)
1381 1381
1382 1382 while(True):
1383 1383
1384 1384 #to get one profile
1385 1385 profile = readerObj.getData()
1386 1386
1387 1387 #print the profile
1388 1388 print profile
1389 1389
1390 1390 #If you want to see all datablock
1391 1391 print readerObj.datablock
1392 1392
1393 1393 if readerObj.flagNoMoreFiles:
1394 1394 break
1395 1395
1396 1396 """
1397 1397
1398 1398 ext = ".r"
1399 1399
1400 1400 optchar = "D"
1401 1401 dataOut = None
1402 1402
1403 1403
1404 1404 def __init__(self):
1405 1405 """
1406 1406 Inicializador de la clase VoltageReader para la lectura de datos de voltage.
1407 1407
1408 1408 Input:
1409 1409 dataOut : Objeto de la clase Voltage. Este objeto sera utilizado para
1410 1410 almacenar un perfil de datos cada vez que se haga un requerimiento
1411 1411 (getData). El perfil sera obtenido a partir del buffer de datos,
1412 1412 si el buffer esta vacio se hara un nuevo proceso de lectura de un
1413 1413 bloque de datos.
1414 1414 Si este parametro no es pasado se creara uno internamente.
1415 1415
1416 1416 Variables afectadas:
1417 1417 self.dataOut
1418 1418
1419 1419 Return:
1420 1420 None
1421 1421 """
1422 1422
1423 1423 self.isConfig = False
1424 1424
1425 1425 self.datablock = None
1426 1426
1427 1427 self.utc = 0
1428 1428
1429 1429 self.ext = ".r"
1430 1430
1431 1431 self.optchar = "D"
1432 1432
1433 1433 self.basicHeaderObj = BasicHeader(LOCALTIME)
1434 1434
1435 1435 self.systemHeaderObj = SystemHeader()
1436 1436
1437 1437 self.radarControllerHeaderObj = RadarControllerHeader()
1438 1438
1439 1439 self.processingHeaderObj = ProcessingHeader()
1440 1440
1441 1441 self.online = 0
1442 1442
1443 1443 self.fp = None
1444 1444
1445 1445 self.idFile = None
1446 1446
1447 1447 self.dtype = None
1448 1448
1449 1449 self.fileSizeByHeader = None
1450 1450
1451 1451 self.filenameList = []
1452 1452
1453 1453 self.filename = None
1454 1454
1455 1455 self.fileSize = None
1456 1456
1457 1457 self.firstHeaderSize = 0
1458 1458
1459 1459 self.basicHeaderSize = 24
1460 1460
1461 1461 self.pathList = []
1462 1462
1463 1463 self.filenameList = []
1464 1464
1465 1465 self.lastUTTime = 0
1466 1466
1467 1467 self.maxTimeStep = 30
1468 1468
1469 1469 self.flagNoMoreFiles = 0
1470 1470
1471 1471 self.set = 0
1472 1472
1473 1473 self.path = None
1474 1474
1475 1475 self.profileIndex = 2**32-1
1476 1476
1477 1477 self.delay = 3 #seconds
1478 1478
1479 1479 self.nTries = 3 #quantity tries
1480 1480
1481 1481 self.nFiles = 3 #number of files for searching
1482 1482
1483 1483 self.nReadBlocks = 0
1484 1484
1485 1485 self.flagIsNewFile = 1
1486 1486
1487 1487 self.__isFirstTimeOnline = 1
1488 1488
1489 1489 self.ippSeconds = 0
1490 1490
1491 1491 self.flagTimeBlock = 0
1492 1492
1493 1493 self.flagIsNewBlock = 0
1494 1494
1495 1495 self.nTotalBlocks = 0
1496 1496
1497 1497 self.blocksize = 0
1498 1498
1499 1499 self.dataOut = self.createObjByDefault()
1500 1500
1501 1501 def createObjByDefault(self):
1502 1502
1503 1503 dataObj = Voltage()
1504 1504
1505 1505 return dataObj
1506 1506
1507 1507 def __hasNotDataInBuffer(self):
1508 1508 if self.profileIndex >= self.processingHeaderObj.profilesPerBlock:
1509 1509 return 1
1510 1510 return 0
1511 1511
1512 1512
1513 1513 def getBlockDimension(self):
1514 1514 """
1515 1515 Obtiene la cantidad de puntos a leer por cada bloque de datos
1516 1516
1517 1517 Affected:
1518 1518 self.blocksize
1519 1519
1520 1520 Return:
1521 1521 None
1522 1522 """
1523 1523 pts2read = self.processingHeaderObj.profilesPerBlock * self.processingHeaderObj.nHeights * self.systemHeaderObj.nChannels
1524 1524 self.blocksize = pts2read
1525 1525
1526 1526
1527 1527 def readBlock(self):
1528 1528 """
1529 1529 readBlock lee el bloque de datos desde la posicion actual del puntero del archivo
1530 1530 (self.fp) y actualiza todos los parametros relacionados al bloque de datos
1531 1531 (metadata + data). La data leida es almacenada en el buffer y el contador del buffer
1532 1532 es seteado a 0
1533 1533
1534 1534 Inputs:
1535 1535 None
1536 1536
1537 1537 Return:
1538 1538 None
1539 1539
1540 1540 Affected:
1541 1541 self.profileIndex
1542 1542 self.datablock
1543 1543 self.flagIsNewFile
1544 1544 self.flagIsNewBlock
1545 1545 self.nTotalBlocks
1546 1546
1547 1547 Exceptions:
1548 1548 Si un bloque leido no es un bloque valido
1549 1549 """
1550 1550 current_pointer_location = self.fp.tell()
1551 1551 junk = numpy.fromfile( self.fp, self.dtype, self.blocksize )
1552 1552
1553 1553 try:
1554 1554 junk = junk.reshape( (self.processingHeaderObj.profilesPerBlock, self.processingHeaderObj.nHeights, self.systemHeaderObj.nChannels) )
1555 1555 except:
1556 1556 #print "The read block (%3d) has not enough data" %self.nReadBlocks
1557 1557
1558 1558 if self.waitDataBlock(pointer_location=current_pointer_location):
1559 1559 junk = numpy.fromfile( self.fp, self.dtype, self.blocksize )
1560 1560 junk = junk.reshape( (self.processingHeaderObj.profilesPerBlock, self.processingHeaderObj.nHeights, self.systemHeaderObj.nChannels) )
1561 1561 # return 0
1562 1562
1563 1563 junk = numpy.transpose(junk, (2,0,1))
1564 1564 self.datablock = junk['real'] + junk['imag']*1j
1565 1565
1566 1566 self.profileIndex = 0
1567 1567
1568 1568 self.flagIsNewFile = 0
1569 1569 self.flagIsNewBlock = 1
1570 1570
1571 1571 self.nTotalBlocks += 1
1572 1572 self.nReadBlocks += 1
1573 1573
1574 1574 return 1
1575 1575
1576 1576 def getFirstHeader(self):
1577 1577
1578 1578 self.dataOut.dtype = self.dtype
1579 1579
1580 1580 self.dataOut.nProfiles = self.processingHeaderObj.profilesPerBlock
1581 1581
1582 1582 xf = self.processingHeaderObj.firstHeight + self.processingHeaderObj.nHeights*self.processingHeaderObj.deltaHeight
1583 1583
1584 1584 self.dataOut.heightList = numpy.arange(self.processingHeaderObj.firstHeight, xf, self.processingHeaderObj.deltaHeight)
1585 1585
1586 1586 self.dataOut.channelList = range(self.systemHeaderObj.nChannels)
1587 1587
1588 1588 self.dataOut.ippSeconds = self.ippSeconds
1589 1589
1590 1590 self.dataOut.timeInterval = self.ippSeconds * self.processingHeaderObj.nCohInt
1591 1591
1592 1592 self.dataOut.nCohInt = self.processingHeaderObj.nCohInt
1593 1593
1594 1594 self.dataOut.flagShiftFFT = False
1595 1595
1596 1596 if self.radarControllerHeaderObj.code != None:
1597 1597
1598 1598 self.dataOut.nCode = self.radarControllerHeaderObj.nCode
1599 1599
1600 1600 self.dataOut.nBaud = self.radarControllerHeaderObj.nBaud
1601 1601
1602 1602 self.dataOut.code = self.radarControllerHeaderObj.code
1603 1603
1604 1604 self.dataOut.systemHeaderObj = self.systemHeaderObj.copy()
1605 1605
1606 1606 self.dataOut.radarControllerHeaderObj = self.radarControllerHeaderObj.copy()
1607 1607
1608 1608 self.dataOut.flagDecodeData = False #asumo q la data no esta decodificada
1609 1609
1610 1610 self.dataOut.flagDeflipData = False #asumo q la data no esta sin flip
1611 1611
1612 1612 self.dataOut.flagShiftFFT = False
1613 1613
1614 1614 def getData(self):
1615 1615 """
1616 1616 getData obtiene una unidad de datos del buffer de lectura y la copia a la clase "Voltage"
1617 1617 con todos los parametros asociados a este (metadata). cuando no hay datos en el buffer de
1618 1618 lectura es necesario hacer una nueva lectura de los bloques de datos usando "readNextBlock"
1619 1619
1620 1620 Ademas incrementa el contador del buffer en 1.
1621 1621
1622 1622 Return:
1623 1623 data : retorna un perfil de voltages (alturas * canales) copiados desde el
1624 1624 buffer. Si no hay mas archivos a leer retorna None.
1625 1625
1626 1626 Variables afectadas:
1627 1627 self.dataOut
1628 1628 self.profileIndex
1629 1629
1630 1630 Affected:
1631 1631 self.dataOut
1632 1632 self.profileIndex
1633 1633 self.flagTimeBlock
1634 1634 self.flagIsNewBlock
1635 1635 """
1636 1636
1637 1637 if self.flagNoMoreFiles:
1638 1638 self.dataOut.flagNoData = True
1639 1639 print 'Process finished'
1640 1640 return 0
1641 1641
1642 1642 self.flagTimeBlock = 0
1643 1643 self.flagIsNewBlock = 0
1644 1644
1645 1645 if self.__hasNotDataInBuffer():
1646 1646
1647 1647 if not( self.readNextBlock() ):
1648 1648 return 0
1649 1649
1650 1650 self.getFirstHeader()
1651 1651
1652 1652 if self.datablock == None:
1653 1653 self.dataOut.flagNoData = True
1654 1654 return 0
1655 1655
1656 1656 self.dataOut.data = self.datablock[:,self.profileIndex,:]
1657 1657
1658 1658 self.dataOut.flagNoData = False
1659 1659
1660 1660 self.getBasicHeader()
1661 1661
1662 1662 self.profileIndex += 1
1663 1663
1664 1664 self.dataOut.realtime = self.online
1665 1665
1666 1666 return self.dataOut.data
1667 1667
1668 1668
1669 1669 class VoltageWriter(JRODataWriter):
1670 1670 """
1671 1671 Esta clase permite escribir datos de voltajes a archivos procesados (.r). La escritura
1672 1672 de los datos siempre se realiza por bloques.
1673 1673 """
1674 1674
1675 1675 ext = ".r"
1676 1676
1677 1677 optchar = "D"
1678 1678
1679 1679 shapeBuffer = None
1680 1680
1681 1681
1682 1682 def __init__(self):
1683 1683 """
1684 1684 Inicializador de la clase VoltageWriter para la escritura de datos de espectros.
1685 1685
1686 1686 Affected:
1687 1687 self.dataOut
1688 1688
1689 1689 Return: None
1690 1690 """
1691 1691
1692 1692 self.nTotalBlocks = 0
1693 1693
1694 1694 self.profileIndex = 0
1695 1695
1696 1696 self.isConfig = False
1697 1697
1698 1698 self.fp = None
1699 1699
1700 1700 self.flagIsNewFile = 1
1701 1701
1702 1702 self.nTotalBlocks = 0
1703 1703
1704 1704 self.flagIsNewBlock = 0
1705 1705
1706 1706 self.setFile = None
1707 1707
1708 1708 self.dtype = None
1709 1709
1710 1710 self.path = None
1711 1711
1712 1712 self.filename = None
1713 1713
1714 1714 self.basicHeaderObj = BasicHeader(LOCALTIME)
1715 1715
1716 1716 self.systemHeaderObj = SystemHeader()
1717 1717
1718 1718 self.radarControllerHeaderObj = RadarControllerHeader()
1719 1719
1720 1720 self.processingHeaderObj = ProcessingHeader()
1721 1721
1722 1722 def hasAllDataInBuffer(self):
1723 1723 if self.profileIndex >= self.processingHeaderObj.profilesPerBlock:
1724 1724 return 1
1725 1725 return 0
1726 1726
1727 1727
1728 1728 def setBlockDimension(self):
1729 1729 """
1730 1730 Obtiene las formas dimensionales del los subbloques de datos que componen un bloque
1731 1731
1732 1732 Affected:
1733 1733 self.shape_spc_Buffer
1734 1734 self.shape_cspc_Buffer
1735 1735 self.shape_dc_Buffer
1736 1736
1737 1737 Return: None
1738 1738 """
1739 1739 self.shapeBuffer = (self.processingHeaderObj.profilesPerBlock,
1740 1740 self.processingHeaderObj.nHeights,
1741 1741 self.systemHeaderObj.nChannels)
1742 1742
1743 1743 self.datablock = numpy.zeros((self.systemHeaderObj.nChannels,
1744 1744 self.processingHeaderObj.profilesPerBlock,
1745 1745 self.processingHeaderObj.nHeights),
1746 1746 dtype=numpy.dtype('complex64'))
1747 1747
1748 1748
1749 1749 def writeBlock(self):
1750 1750 """
1751 1751 Escribe el buffer en el file designado
1752 1752
1753 1753 Affected:
1754 1754 self.profileIndex
1755 1755 self.flagIsNewFile
1756 1756 self.flagIsNewBlock
1757 1757 self.nTotalBlocks
1758 1758 self.blockIndex
1759 1759
1760 1760 Return: None
1761 1761 """
1762 1762 data = numpy.zeros( self.shapeBuffer, self.dtype )
1763 1763
1764 1764 junk = numpy.transpose(self.datablock, (1,2,0))
1765 1765
1766 1766 data['real'] = junk.real
1767 1767 data['imag'] = junk.imag
1768 1768
1769 1769 data = data.reshape( (-1) )
1770 1770
1771 1771 data.tofile( self.fp )
1772 1772
1773 1773 self.datablock.fill(0)
1774 1774
1775 1775 self.profileIndex = 0
1776 1776 self.flagIsNewFile = 0
1777 1777 self.flagIsNewBlock = 1
1778 1778
1779 1779 self.blockIndex += 1
1780 1780 self.nTotalBlocks += 1
1781 1781
1782 1782 def putData(self):
1783 1783 """
1784 1784 Setea un bloque de datos y luego los escribe en un file
1785 1785
1786 1786 Affected:
1787 1787 self.flagIsNewBlock
1788 1788 self.profileIndex
1789 1789
1790 1790 Return:
1791 1791 0 : Si no hay data o no hay mas files que puedan escribirse
1792 1792 1 : Si se escribio la data de un bloque en un file
1793 1793 """
1794 1794 if self.dataOut.flagNoData:
1795 1795 return 0
1796 1796
1797 1797 self.flagIsNewBlock = 0
1798 1798
1799 1799 if self.dataOut.flagTimeBlock:
1800 1800
1801 1801 self.datablock.fill(0)
1802 1802 self.profileIndex = 0
1803 1803 self.setNextFile()
1804 1804
1805 1805 if self.profileIndex == 0:
1806 1806 self.setBasicHeader()
1807 1807
1808 1808 self.datablock[:,self.profileIndex,:] = self.dataOut.data
1809 1809
1810 1810 self.profileIndex += 1
1811 1811
1812 1812 if self.hasAllDataInBuffer():
1813 1813 #if self.flagIsNewFile:
1814 1814 self.writeNextBlock()
1815 1815 # self.setFirstHeader()
1816 1816
1817 1817 return 1
1818 1818
1819 1819 def __getProcessFlags(self):
1820 1820
1821 1821 processFlags = 0
1822 1822
1823 1823 dtype0 = numpy.dtype([('real','<i1'),('imag','<i1')])
1824 1824 dtype1 = numpy.dtype([('real','<i2'),('imag','<i2')])
1825 1825 dtype2 = numpy.dtype([('real','<i4'),('imag','<i4')])
1826 1826 dtype3 = numpy.dtype([('real','<i8'),('imag','<i8')])
1827 1827 dtype4 = numpy.dtype([('real','<f4'),('imag','<f4')])
1828 1828 dtype5 = numpy.dtype([('real','<f8'),('imag','<f8')])
1829 1829
1830 1830 dtypeList = [dtype0, dtype1, dtype2, dtype3, dtype4, dtype5]
1831 1831
1832 1832
1833 1833
1834 1834 datatypeValueList = [PROCFLAG.DATATYPE_CHAR,
1835 1835 PROCFLAG.DATATYPE_SHORT,
1836 1836 PROCFLAG.DATATYPE_LONG,
1837 1837 PROCFLAG.DATATYPE_INT64,
1838 1838 PROCFLAG.DATATYPE_FLOAT,
1839 1839 PROCFLAG.DATATYPE_DOUBLE]
1840 1840
1841 1841
1842 1842 for index in range(len(dtypeList)):
1843 1843 if self.dataOut.dtype == dtypeList[index]:
1844 1844 dtypeValue = datatypeValueList[index]
1845 1845 break
1846 1846
1847 1847 processFlags += dtypeValue
1848 1848
1849 1849 if self.dataOut.flagDecodeData:
1850 1850 processFlags += PROCFLAG.DECODE_DATA
1851 1851
1852 1852 if self.dataOut.flagDeflipData:
1853 1853 processFlags += PROCFLAG.DEFLIP_DATA
1854 1854
1855 1855 if self.dataOut.code != None:
1856 1856 processFlags += PROCFLAG.DEFINE_PROCESS_CODE
1857 1857
1858 1858 if self.dataOut.nCohInt > 1:
1859 1859 processFlags += PROCFLAG.COHERENT_INTEGRATION
1860 1860
1861 1861 return processFlags
1862 1862
1863 1863
1864 1864 def __getBlockSize(self):
1865 1865 '''
1866 1866 Este metodos determina el cantidad de bytes para un bloque de datos de tipo Voltage
1867 1867 '''
1868 1868
1869 1869 dtype0 = numpy.dtype([('real','<i1'),('imag','<i1')])
1870 1870 dtype1 = numpy.dtype([('real','<i2'),('imag','<i2')])
1871 1871 dtype2 = numpy.dtype([('real','<i4'),('imag','<i4')])
1872 1872 dtype3 = numpy.dtype([('real','<i8'),('imag','<i8')])
1873 1873 dtype4 = numpy.dtype([('real','<f4'),('imag','<f4')])
1874 1874 dtype5 = numpy.dtype([('real','<f8'),('imag','<f8')])
1875 1875
1876 1876 dtypeList = [dtype0, dtype1, dtype2, dtype3, dtype4, dtype5]
1877 1877 datatypeValueList = [1,2,4,8,4,8]
1878 1878 for index in range(len(dtypeList)):
1879 1879 if self.dataOut.dtype == dtypeList[index]:
1880 1880 datatypeValue = datatypeValueList[index]
1881 1881 break
1882 1882
1883 1883 blocksize = int(self.dataOut.nHeights * self.dataOut.nChannels * self.profilesPerBlock * datatypeValue * 2)
1884 1884
1885 1885 return blocksize
1886 1886
1887 1887 def setFirstHeader(self):
1888 1888
1889 1889 """
1890 1890 Obtiene una copia del First Header
1891 1891
1892 1892 Affected:
1893 1893 self.systemHeaderObj
1894 1894 self.radarControllerHeaderObj
1895 1895 self.dtype
1896 1896
1897 1897 Return:
1898 1898 None
1899 1899 """
1900 1900
1901 1901 self.systemHeaderObj = self.dataOut.systemHeaderObj.copy()
1902 1902 self.systemHeaderObj.nChannels = self.dataOut.nChannels
1903 1903 self.radarControllerHeaderObj = self.dataOut.radarControllerHeaderObj.copy()
1904 1904
1905 1905 self.setBasicHeader()
1906 1906
1907 1907 processingHeaderSize = 40 # bytes
1908 1908 self.processingHeaderObj.dtype = 0 # Voltage
1909 1909 self.processingHeaderObj.blockSize = self.__getBlockSize()
1910 1910 self.processingHeaderObj.profilesPerBlock = self.profilesPerBlock
1911 1911 self.processingHeaderObj.dataBlocksPerFile = self.blocksPerFile
1912 1912 self.processingHeaderObj.nWindows = 1 #podria ser 1 o self.dataOut.processingHeaderObj.nWindows
1913 1913 self.processingHeaderObj.processFlags = self.__getProcessFlags()
1914 1914 self.processingHeaderObj.nCohInt = self.dataOut.nCohInt
1915 1915 self.processingHeaderObj.nIncohInt = 1 # Cuando la data de origen es de tipo Voltage
1916 1916 self.processingHeaderObj.totalSpectra = 0 # Cuando la data de origen es de tipo Voltage
1917 1917
1918 1918 # if self.dataOut.code != None:
1919 1919 # self.processingHeaderObj.code = self.dataOut.code
1920 1920 # self.processingHeaderObj.nCode = self.dataOut.nCode
1921 1921 # self.processingHeaderObj.nBaud = self.dataOut.nBaud
1922 1922 # codesize = int(8 + 4 * self.dataOut.nCode * self.dataOut.nBaud)
1923 1923 # processingHeaderSize += codesize
1924 1924
1925 1925 if self.processingHeaderObj.nWindows != 0:
1926 1926 self.processingHeaderObj.firstHeight = self.dataOut.heightList[0]
1927 1927 self.processingHeaderObj.deltaHeight = self.dataOut.heightList[1] - self.dataOut.heightList[0]
1928 1928 self.processingHeaderObj.nHeights = self.dataOut.nHeights
1929 1929 self.processingHeaderObj.samplesWin = self.dataOut.nHeights
1930 1930 processingHeaderSize += 12
1931 1931
1932 1932 self.processingHeaderObj.size = processingHeaderSize
1933 1933
1934 1934 class SpectraReader(JRODataReader):
1935 1935 """
1936 1936 Esta clase permite leer datos de espectros desde archivos procesados (.pdata). La lectura
1937 1937 de los datos siempre se realiza por bloques. Los datos leidos (array de 3 dimensiones)
1938 1938 son almacenados en tres buffer's para el Self Spectra, el Cross Spectra y el DC Channel.
1939 1939
1940 1940 paresCanalesIguales * alturas * perfiles (Self Spectra)
1941 1941 paresCanalesDiferentes * alturas * perfiles (Cross Spectra)
1942 1942 canales * alturas (DC Channels)
1943 1943
1944 1944 Esta clase contiene instancias (objetos) de las clases BasicHeader, SystemHeader,
1945 1945 RadarControllerHeader y Spectra. Los tres primeros se usan para almacenar informacion de la
1946 1946 cabecera de datos (metadata), y el cuarto (Spectra) para obtener y almacenar un bloque de
1947 1947 datos desde el "buffer" cada vez que se ejecute el metodo "getData".
1948 1948
1949 1949 Example:
1950 1950 dpath = "/home/myuser/data"
1951 1951
1952 1952 startTime = datetime.datetime(2010,1,20,0,0,0,0,0,0)
1953 1953
1954 1954 endTime = datetime.datetime(2010,1,21,23,59,59,0,0,0)
1955 1955
1956 1956 readerObj = SpectraReader()
1957 1957
1958 1958 readerObj.setup(dpath, startTime, endTime)
1959 1959
1960 1960 while(True):
1961 1961
1962 1962 readerObj.getData()
1963 1963
1964 1964 print readerObj.data_spc
1965 1965
1966 1966 print readerObj.data_cspc
1967 1967
1968 1968 print readerObj.data_dc
1969 1969
1970 1970 if readerObj.flagNoMoreFiles:
1971 1971 break
1972 1972
1973 1973 """
1974 1974
1975 1975 pts2read_SelfSpectra = 0
1976 1976
1977 1977 pts2read_CrossSpectra = 0
1978 1978
1979 1979 pts2read_DCchannels = 0
1980 1980
1981 1981 ext = ".pdata"
1982 1982
1983 1983 optchar = "P"
1984 1984
1985 1985 dataOut = None
1986 1986
1987 1987 nRdChannels = None
1988 1988
1989 1989 nRdPairs = None
1990 1990
1991 1991 rdPairList = []
1992 1992
1993 1993 def __init__(self):
1994 1994 """
1995 1995 Inicializador de la clase SpectraReader para la lectura de datos de espectros.
1996 1996
1997 1997 Inputs:
1998 1998 dataOut : Objeto de la clase Spectra. Este objeto sera utilizado para
1999 1999 almacenar un perfil de datos cada vez que se haga un requerimiento
2000 2000 (getData). El perfil sera obtenido a partir del buffer de datos,
2001 2001 si el buffer esta vacio se hara un nuevo proceso de lectura de un
2002 2002 bloque de datos.
2003 2003 Si este parametro no es pasado se creara uno internamente.
2004 2004
2005 2005 Affected:
2006 2006 self.dataOut
2007 2007
2008 2008 Return : None
2009 2009 """
2010 2010
2011 2011 self.isConfig = False
2012 2012
2013 2013 self.pts2read_SelfSpectra = 0
2014 2014
2015 2015 self.pts2read_CrossSpectra = 0
2016 2016
2017 2017 self.pts2read_DCchannels = 0
2018 2018
2019 2019 self.datablock = None
2020 2020
2021 2021 self.utc = None
2022 2022
2023 2023 self.ext = ".pdata"
2024 2024
2025 2025 self.optchar = "P"
2026 2026
2027 2027 self.basicHeaderObj = BasicHeader(LOCALTIME)
2028 2028
2029 2029 self.systemHeaderObj = SystemHeader()
2030 2030
2031 2031 self.radarControllerHeaderObj = RadarControllerHeader()
2032 2032
2033 2033 self.processingHeaderObj = ProcessingHeader()
2034 2034
2035 2035 self.online = 0
2036 2036
2037 2037 self.fp = None
2038 2038
2039 2039 self.idFile = None
2040 2040
2041 2041 self.dtype = None
2042 2042
2043 2043 self.fileSizeByHeader = None
2044 2044
2045 2045 self.filenameList = []
2046 2046
2047 2047 self.filename = None
2048 2048
2049 2049 self.fileSize = None
2050 2050
2051 2051 self.firstHeaderSize = 0
2052 2052
2053 2053 self.basicHeaderSize = 24
2054 2054
2055 2055 self.pathList = []
2056 2056
2057 2057 self.lastUTTime = 0
2058 2058
2059 2059 self.maxTimeStep = 30
2060 2060
2061 2061 self.flagNoMoreFiles = 0
2062 2062
2063 2063 self.set = 0
2064 2064
2065 2065 self.path = None
2066 2066
2067 2067 self.delay = 60 #seconds
2068 2068
2069 2069 self.nTries = 3 #quantity tries
2070 2070
2071 2071 self.nFiles = 3 #number of files for searching
2072 2072
2073 2073 self.nReadBlocks = 0
2074 2074
2075 2075 self.flagIsNewFile = 1
2076 2076
2077 2077 self.__isFirstTimeOnline = 1
2078 2078
2079 2079 self.ippSeconds = 0
2080 2080
2081 2081 self.flagTimeBlock = 0
2082 2082
2083 2083 self.flagIsNewBlock = 0
2084 2084
2085 2085 self.nTotalBlocks = 0
2086 2086
2087 2087 self.blocksize = 0
2088 2088
2089 2089 self.dataOut = self.createObjByDefault()
2090 2090
2091 2091 self.profileIndex = 1 #Always
2092 2092
2093 2093
2094 2094 def createObjByDefault(self):
2095 2095
2096 2096 dataObj = Spectra()
2097 2097
2098 2098 return dataObj
2099 2099
2100 2100 def __hasNotDataInBuffer(self):
2101 2101 return 1
2102 2102
2103 2103
2104 2104 def getBlockDimension(self):
2105 2105 """
2106 2106 Obtiene la cantidad de puntos a leer por cada bloque de datos
2107 2107
2108 2108 Affected:
2109 2109 self.nRdChannels
2110 2110 self.nRdPairs
2111 2111 self.pts2read_SelfSpectra
2112 2112 self.pts2read_CrossSpectra
2113 2113 self.pts2read_DCchannels
2114 2114 self.blocksize
2115 2115 self.dataOut.nChannels
2116 2116 self.dataOut.nPairs
2117 2117
2118 2118 Return:
2119 2119 None
2120 2120 """
2121 2121 self.nRdChannels = 0
2122 2122 self.nRdPairs = 0
2123 2123 self.rdPairList = []
2124 2124
2125 2125 for i in range(0, self.processingHeaderObj.totalSpectra*2, 2):
2126 2126 if self.processingHeaderObj.spectraComb[i] == self.processingHeaderObj.spectraComb[i+1]:
2127 2127 self.nRdChannels = self.nRdChannels + 1 #par de canales iguales
2128 2128 else:
2129 2129 self.nRdPairs = self.nRdPairs + 1 #par de canales diferentes
2130 2130 self.rdPairList.append((self.processingHeaderObj.spectraComb[i], self.processingHeaderObj.spectraComb[i+1]))
2131 2131
2132 2132 pts2read = self.processingHeaderObj.nHeights * self.processingHeaderObj.profilesPerBlock
2133 2133
2134 2134 self.pts2read_SelfSpectra = int(self.nRdChannels * pts2read)
2135 2135 self.blocksize = self.pts2read_SelfSpectra
2136 2136
2137 2137 if self.processingHeaderObj.flag_cspc:
2138 2138 self.pts2read_CrossSpectra = int(self.nRdPairs * pts2read)
2139 2139 self.blocksize += self.pts2read_CrossSpectra
2140 2140
2141 2141 if self.processingHeaderObj.flag_dc:
2142 2142 self.pts2read_DCchannels = int(self.systemHeaderObj.nChannels * self.processingHeaderObj.nHeights)
2143 2143 self.blocksize += self.pts2read_DCchannels
2144 2144
2145 2145 # self.blocksize = self.pts2read_SelfSpectra + self.pts2read_CrossSpectra + self.pts2read_DCchannels
2146 2146
2147 2147
2148 2148 def readBlock(self):
2149 2149 """
2150 2150 Lee el bloque de datos desde la posicion actual del puntero del archivo
2151 2151 (self.fp) y actualiza todos los parametros relacionados al bloque de datos
2152 2152 (metadata + data). La data leida es almacenada en el buffer y el contador del buffer
2153 2153 es seteado a 0
2154 2154
2155 2155 Return: None
2156 2156
2157 2157 Variables afectadas:
2158 2158
2159 2159 self.flagIsNewFile
2160 2160 self.flagIsNewBlock
2161 2161 self.nTotalBlocks
2162 2162 self.data_spc
2163 2163 self.data_cspc
2164 2164 self.data_dc
2165 2165
2166 2166 Exceptions:
2167 2167 Si un bloque leido no es un bloque valido
2168 2168 """
2169 2169 blockOk_flag = False
2170 2170 fpointer = self.fp.tell()
2171 2171
2172 2172 spc = numpy.fromfile( self.fp, self.dtype[0], self.pts2read_SelfSpectra )
2173 2173 spc = spc.reshape( (self.nRdChannels, self.processingHeaderObj.nHeights, self.processingHeaderObj.profilesPerBlock) ) #transforma a un arreglo 3D
2174 2174
2175 2175 if self.processingHeaderObj.flag_cspc:
2176 2176 cspc = numpy.fromfile( self.fp, self.dtype, self.pts2read_CrossSpectra )
2177 2177 cspc = cspc.reshape( (self.nRdPairs, self.processingHeaderObj.nHeights, self.processingHeaderObj.profilesPerBlock) ) #transforma a un arreglo 3D
2178 2178
2179 2179 if self.processingHeaderObj.flag_dc:
2180 2180 dc = numpy.fromfile( self.fp, self.dtype, self.pts2read_DCchannels ) #int(self.processingHeaderObj.nHeights*self.systemHeaderObj.nChannels) )
2181 2181 dc = dc.reshape( (self.systemHeaderObj.nChannels, self.processingHeaderObj.nHeights) ) #transforma a un arreglo 2D
2182 2182
2183 2183
2184 2184 if not(self.processingHeaderObj.shif_fft):
2185 2185 #desplaza a la derecha en el eje 2 determinadas posiciones
2186 2186 shift = int(self.processingHeaderObj.profilesPerBlock/2)
2187 2187 spc = numpy.roll( spc, shift , axis=2 )
2188 2188
2189 2189 if self.processingHeaderObj.flag_cspc:
2190 2190 #desplaza a la derecha en el eje 2 determinadas posiciones
2191 2191 cspc = numpy.roll( cspc, shift, axis=2 )
2192 2192
2193 2193 # self.processingHeaderObj.shif_fft = True
2194 2194
2195 2195 spc = numpy.transpose( spc, (0,2,1) )
2196 2196 self.data_spc = spc
2197 2197
2198 2198 if self.processingHeaderObj.flag_cspc:
2199 2199 cspc = numpy.transpose( cspc, (0,2,1) )
2200 2200 self.data_cspc = cspc['real'] + cspc['imag']*1j
2201 2201 else:
2202 2202 self.data_cspc = None
2203 2203
2204 2204 if self.processingHeaderObj.flag_dc:
2205 2205 self.data_dc = dc['real'] + dc['imag']*1j
2206 2206 else:
2207 2207 self.data_dc = None
2208 2208
2209 2209 self.flagIsNewFile = 0
2210 2210 self.flagIsNewBlock = 1
2211 2211
2212 2212 self.nTotalBlocks += 1
2213 2213 self.nReadBlocks += 1
2214 2214
2215 2215 return 1
2216 2216
2217 2217 def getFirstHeader(self):
2218 2218
2219 2219 self.dataOut.dtype = self.dtype
2220 2220
2221 2221 self.dataOut.nPairs = self.nRdPairs
2222 2222
2223 2223 self.dataOut.pairsList = self.rdPairList
2224 2224
2225 2225 self.dataOut.nProfiles = self.processingHeaderObj.profilesPerBlock
2226 2226
2227 2227 self.dataOut.nFFTPoints = self.processingHeaderObj.profilesPerBlock
2228 2228
2229 2229 self.dataOut.nCohInt = self.processingHeaderObj.nCohInt
2230 2230
2231 2231 self.dataOut.nIncohInt = self.processingHeaderObj.nIncohInt
2232 2232
2233 2233 xf = self.processingHeaderObj.firstHeight + self.processingHeaderObj.nHeights*self.processingHeaderObj.deltaHeight
2234 2234
2235 2235 self.dataOut.heightList = numpy.arange(self.processingHeaderObj.firstHeight, xf, self.processingHeaderObj.deltaHeight)
2236 2236
2237 2237 self.dataOut.channelList = range(self.systemHeaderObj.nChannels)
2238 2238
2239 2239 self.dataOut.ippSeconds = self.ippSeconds
2240 2240
2241 2241 self.dataOut.timeInterval = self.ippSeconds * self.processingHeaderObj.nCohInt * self.processingHeaderObj.nIncohInt * self.dataOut.nFFTPoints
2242 2242
2243 2243 self.dataOut.systemHeaderObj = self.systemHeaderObj.copy()
2244 2244
2245 2245 self.dataOut.radarControllerHeaderObj = self.radarControllerHeaderObj.copy()
2246 2246
2247 2247 self.dataOut.flagShiftFFT = self.processingHeaderObj.shif_fft
2248 2248
2249 2249 self.dataOut.flagDecodeData = False #asumo q la data no esta decodificada
2250 2250
2251 2251 self.dataOut.flagDeflipData = True #asumo q la data no esta sin flip
2252 2252
2253 2253 if self.radarControllerHeaderObj.code != None:
2254 2254
2255 2255 self.dataOut.nCode = self.radarControllerHeaderObj.nCode
2256 2256
2257 2257 self.dataOut.nBaud = self.radarControllerHeaderObj.nBaud
2258 2258
2259 2259 self.dataOut.code = self.radarControllerHeaderObj.code
2260 2260
2261 2261 self.dataOut.flagDecodeData = True
2262 2262
2263 2263 def getData(self):
2264 2264 """
2265 2265 Copia el buffer de lectura a la clase "Spectra",
2266 2266 con todos los parametros asociados a este (metadata). cuando no hay datos en el buffer de
2267 2267 lectura es necesario hacer una nueva lectura de los bloques de datos usando "readNextBlock"
2268 2268
2269 2269 Return:
2270 2270 0 : Si no hay mas archivos disponibles
2271 2271 1 : Si hizo una buena copia del buffer
2272 2272
2273 2273 Affected:
2274 2274 self.dataOut
2275 2275
2276 2276 self.flagTimeBlock
2277 2277 self.flagIsNewBlock
2278 2278 """
2279 2279
2280 2280 if self.flagNoMoreFiles:
2281 2281 self.dataOut.flagNoData = True
2282 2282 print 'Process finished'
2283 2283 return 0
2284 2284
2285 2285 self.flagTimeBlock = 0
2286 2286 self.flagIsNewBlock = 0
2287 2287
2288 2288 if self.__hasNotDataInBuffer():
2289 2289
2290 2290 if not( self.readNextBlock() ):
2291 2291 self.dataOut.flagNoData = True
2292 2292 return 0
2293 2293
2294 2294 #data es un numpy array de 3 dmensiones (perfiles, alturas y canales)
2295 2295
2296 2296 if self.data_dc == None:
2297 2297 self.dataOut.flagNoData = True
2298 2298 return 0
2299 2299
2300 2300 self.getBasicHeader()
2301 2301
2302 2302 self.getFirstHeader()
2303 2303
2304 2304 self.dataOut.data_spc = self.data_spc
2305 2305
2306 2306 self.dataOut.data_cspc = self.data_cspc
2307 2307
2308 2308 self.dataOut.data_dc = self.data_dc
2309 2309
2310 2310 self.dataOut.flagNoData = False
2311 2311
2312 2312 self.dataOut.realtime = self.online
2313 2313
2314 2314 return self.dataOut.data_spc
2315 2315
2316 2316
2317 2317 class SpectraWriter(JRODataWriter):
2318 2318
2319 2319 """
2320 2320 Esta clase permite escribir datos de espectros a archivos procesados (.pdata). La escritura
2321 2321 de los datos siempre se realiza por bloques.
2322 2322 """
2323 2323
2324 2324 ext = ".pdata"
2325 2325
2326 2326 optchar = "P"
2327 2327
2328 2328 shape_spc_Buffer = None
2329 2329
2330 2330 shape_cspc_Buffer = None
2331 2331
2332 2332 shape_dc_Buffer = None
2333 2333
2334 2334 data_spc = None
2335 2335
2336 2336 data_cspc = None
2337 2337
2338 2338 data_dc = None
2339 2339
2340 2340 # dataOut = None
2341 2341
2342 2342 def __init__(self):
2343 2343 """
2344 2344 Inicializador de la clase SpectraWriter para la escritura de datos de espectros.
2345 2345
2346 2346 Affected:
2347 2347 self.dataOut
2348 2348 self.basicHeaderObj
2349 2349 self.systemHeaderObj
2350 2350 self.radarControllerHeaderObj
2351 2351 self.processingHeaderObj
2352 2352
2353 2353 Return: None
2354 2354 """
2355 2355
2356 2356 self.isConfig = False
2357 2357
2358 2358 self.nTotalBlocks = 0
2359 2359
2360 2360 self.data_spc = None
2361 2361
2362 2362 self.data_cspc = None
2363 2363
2364 2364 self.data_dc = None
2365 2365
2366 2366 self.fp = None
2367 2367
2368 2368 self.flagIsNewFile = 1
2369 2369
2370 2370 self.nTotalBlocks = 0
2371 2371
2372 2372 self.flagIsNewBlock = 0
2373 2373
2374 2374 self.setFile = None
2375 2375
2376 2376 self.dtype = None
2377 2377
2378 2378 self.path = None
2379 2379
2380 2380 self.noMoreFiles = 0
2381 2381
2382 2382 self.filename = None
2383 2383
2384 2384 self.basicHeaderObj = BasicHeader(LOCALTIME)
2385 2385
2386 2386 self.systemHeaderObj = SystemHeader()
2387 2387
2388 2388 self.radarControllerHeaderObj = RadarControllerHeader()
2389 2389
2390 2390 self.processingHeaderObj = ProcessingHeader()
2391 2391
2392 2392
2393 2393 def hasAllDataInBuffer(self):
2394 2394 return 1
2395 2395
2396 2396
2397 2397 def setBlockDimension(self):
2398 2398 """
2399 2399 Obtiene las formas dimensionales del los subbloques de datos que componen un bloque
2400 2400
2401 2401 Affected:
2402 2402 self.shape_spc_Buffer
2403 2403 self.shape_cspc_Buffer
2404 2404 self.shape_dc_Buffer
2405 2405
2406 2406 Return: None
2407 2407 """
2408 2408 self.shape_spc_Buffer = (self.dataOut.nChannels,
2409 2409 self.processingHeaderObj.nHeights,
2410 2410 self.processingHeaderObj.profilesPerBlock)
2411 2411
2412 2412 self.shape_cspc_Buffer = (self.dataOut.nPairs,
2413 2413 self.processingHeaderObj.nHeights,
2414 2414 self.processingHeaderObj.profilesPerBlock)
2415 2415
2416 2416 self.shape_dc_Buffer = (self.dataOut.nChannels,
2417 2417 self.processingHeaderObj.nHeights)
2418 2418
2419 2419
2420 2420 def writeBlock(self):
2421 2421 """
2422 2422 Escribe el buffer en el file designado
2423 2423
2424 2424 Affected:
2425 2425 self.data_spc
2426 2426 self.data_cspc
2427 2427 self.data_dc
2428 2428 self.flagIsNewFile
2429 2429 self.flagIsNewBlock
2430 2430 self.nTotalBlocks
2431 2431 self.nWriteBlocks
2432 2432
2433 2433 Return: None
2434 2434 """
2435 2435
2436 2436 spc = numpy.transpose( self.data_spc, (0,2,1) )
2437 2437 if not( self.processingHeaderObj.shif_fft ):
2438 2438 spc = numpy.roll( spc, self.processingHeaderObj.profilesPerBlock/2, axis=2 ) #desplaza a la derecha en el eje 2 determinadas posiciones
2439 2439 data = spc.reshape((-1))
2440 2440 data = data.astype(self.dtype[0])
2441 2441 data.tofile(self.fp)
2442 2442
2443 2443 if self.data_cspc != None:
2444 2444 data = numpy.zeros( self.shape_cspc_Buffer, self.dtype )
2445 2445 cspc = numpy.transpose( self.data_cspc, (0,2,1) )
2446 2446 if not( self.processingHeaderObj.shif_fft ):
2447 2447 cspc = numpy.roll( cspc, self.processingHeaderObj.profilesPerBlock/2, axis=2 ) #desplaza a la derecha en el eje 2 determinadas posiciones
2448 2448 data['real'] = cspc.real
2449 2449 data['imag'] = cspc.imag
2450 2450 data = data.reshape((-1))
2451 2451 data.tofile(self.fp)
2452 2452
2453 2453 if self.data_dc != None:
2454 2454 data = numpy.zeros( self.shape_dc_Buffer, self.dtype )
2455 2455 dc = self.data_dc
2456 2456 data['real'] = dc.real
2457 2457 data['imag'] = dc.imag
2458 2458 data = data.reshape((-1))
2459 2459 data.tofile(self.fp)
2460 2460
2461 2461 self.data_spc.fill(0)
2462 2462
2463 2463 if self.data_dc != None:
2464 2464 self.data_dc.fill(0)
2465 2465
2466 2466 if self.data_cspc != None:
2467 2467 self.data_cspc.fill(0)
2468 2468
2469 2469 self.flagIsNewFile = 0
2470 2470 self.flagIsNewBlock = 1
2471 2471 self.nTotalBlocks += 1
2472 2472 self.nWriteBlocks += 1
2473 2473 self.blockIndex += 1
2474 2474
2475 2475
2476 2476 def putData(self):
2477 2477 """
2478 2478 Setea un bloque de datos y luego los escribe en un file
2479 2479
2480 2480 Affected:
2481 2481 self.data_spc
2482 2482 self.data_cspc
2483 2483 self.data_dc
2484 2484
2485 2485 Return:
2486 2486 0 : Si no hay data o no hay mas files que puedan escribirse
2487 2487 1 : Si se escribio la data de un bloque en un file
2488 2488 """
2489 2489
2490 2490 if self.dataOut.flagNoData:
2491 2491 return 0
2492 2492
2493 2493 self.flagIsNewBlock = 0
2494 2494
2495 2495 if self.dataOut.flagTimeBlock:
2496 2496 self.data_spc.fill(0)
2497 2497 self.data_cspc.fill(0)
2498 2498 self.data_dc.fill(0)
2499 2499 self.setNextFile()
2500 2500
2501 2501 if self.flagIsNewFile == 0:
2502 2502 self.setBasicHeader()
2503 2503
2504 2504 self.data_spc = self.dataOut.data_spc.copy()
2505 2505 if self.dataOut.data_cspc != None:
2506 2506 self.data_cspc = self.dataOut.data_cspc.copy()
2507 2507 self.data_dc = self.dataOut.data_dc.copy()
2508 2508
2509 2509 # #self.processingHeaderObj.dataBlocksPerFile)
2510 2510 if self.hasAllDataInBuffer():
2511 2511 # self.setFirstHeader()
2512 2512 self.writeNextBlock()
2513 2513
2514 2514 return 1
2515 2515
2516 2516
2517 2517 def __getProcessFlags(self):
2518 2518
2519 2519 processFlags = 0
2520 2520
2521 2521 dtype0 = numpy.dtype([('real','<i1'),('imag','<i1')])
2522 2522 dtype1 = numpy.dtype([('real','<i2'),('imag','<i2')])
2523 2523 dtype2 = numpy.dtype([('real','<i4'),('imag','<i4')])
2524 2524 dtype3 = numpy.dtype([('real','<i8'),('imag','<i8')])
2525 2525 dtype4 = numpy.dtype([('real','<f4'),('imag','<f4')])
2526 2526 dtype5 = numpy.dtype([('real','<f8'),('imag','<f8')])
2527 2527
2528 2528 dtypeList = [dtype0, dtype1, dtype2, dtype3, dtype4, dtype5]
2529 2529
2530 2530
2531 2531
2532 2532 datatypeValueList = [PROCFLAG.DATATYPE_CHAR,
2533 2533 PROCFLAG.DATATYPE_SHORT,
2534 2534 PROCFLAG.DATATYPE_LONG,
2535 2535 PROCFLAG.DATATYPE_INT64,
2536 2536 PROCFLAG.DATATYPE_FLOAT,
2537 2537 PROCFLAG.DATATYPE_DOUBLE]
2538 2538
2539 2539
2540 2540 for index in range(len(dtypeList)):
2541 2541 if self.dataOut.dtype == dtypeList[index]:
2542 2542 dtypeValue = datatypeValueList[index]
2543 2543 break
2544 2544
2545 2545 processFlags += dtypeValue
2546 2546
2547 2547 if self.dataOut.flagDecodeData:
2548 2548 processFlags += PROCFLAG.DECODE_DATA
2549 2549
2550 2550 if self.dataOut.flagDeflipData:
2551 2551 processFlags += PROCFLAG.DEFLIP_DATA
2552 2552
2553 2553 if self.dataOut.code != None:
2554 2554 processFlags += PROCFLAG.DEFINE_PROCESS_CODE
2555 2555
2556 2556 if self.dataOut.nIncohInt > 1:
2557 2557 processFlags += PROCFLAG.INCOHERENT_INTEGRATION
2558 2558
2559 2559 if self.dataOut.data_dc != None:
2560 2560 processFlags += PROCFLAG.SAVE_CHANNELS_DC
2561 2561
2562 2562 return processFlags
2563 2563
2564 2564
2565 2565 def __getBlockSize(self):
2566 2566 '''
2567 2567 Este metodos determina el cantidad de bytes para un bloque de datos de tipo Spectra
2568 2568 '''
2569 2569
2570 2570 dtype0 = numpy.dtype([('real','<i1'),('imag','<i1')])
2571 2571 dtype1 = numpy.dtype([('real','<i2'),('imag','<i2')])
2572 2572 dtype2 = numpy.dtype([('real','<i4'),('imag','<i4')])
2573 2573 dtype3 = numpy.dtype([('real','<i8'),('imag','<i8')])
2574 2574 dtype4 = numpy.dtype([('real','<f4'),('imag','<f4')])
2575 2575 dtype5 = numpy.dtype([('real','<f8'),('imag','<f8')])
2576 2576
2577 2577 dtypeList = [dtype0, dtype1, dtype2, dtype3, dtype4, dtype5]
2578 2578 datatypeValueList = [1,2,4,8,4,8]
2579 2579 for index in range(len(dtypeList)):
2580 2580 if self.dataOut.dtype == dtypeList[index]:
2581 2581 datatypeValue = datatypeValueList[index]
2582 2582 break
2583 2583
2584 2584
2585 2585 pts2write = self.dataOut.nHeights * self.dataOut.nFFTPoints
2586 2586
2587 2587 pts2write_SelfSpectra = int(self.dataOut.nChannels * pts2write)
2588 2588 blocksize = (pts2write_SelfSpectra*datatypeValue)
2589 2589
2590 2590 if self.dataOut.data_cspc != None:
2591 2591 pts2write_CrossSpectra = int(self.dataOut.nPairs * pts2write)
2592 2592 blocksize += (pts2write_CrossSpectra*datatypeValue*2)
2593 2593
2594 2594 if self.dataOut.data_dc != None:
2595 2595 pts2write_DCchannels = int(self.dataOut.nChannels * self.dataOut.nHeights)
2596 2596 blocksize += (pts2write_DCchannels*datatypeValue*2)
2597 2597
2598 2598 blocksize = blocksize #* datatypeValue * 2 #CORREGIR ESTO
2599 2599
2600 2600 return blocksize
2601 2601
2602 2602 def setFirstHeader(self):
2603 2603
2604 2604 """
2605 2605 Obtiene una copia del First Header
2606 2606
2607 2607 Affected:
2608 2608 self.systemHeaderObj
2609 2609 self.radarControllerHeaderObj
2610 2610 self.dtype
2611 2611
2612 2612 Return:
2613 2613 None
2614 2614 """
2615 2615
2616 2616 self.systemHeaderObj = self.dataOut.systemHeaderObj.copy()
2617 2617 self.systemHeaderObj.nChannels = self.dataOut.nChannels
2618 2618 self.radarControllerHeaderObj = self.dataOut.radarControllerHeaderObj.copy()
2619 2619 old_code_size = self.dataOut.radarControllerHeaderObj.code_size
2620 2620 new_code_size = int(numpy.ceil(self.dataOut.nBaud/32.))*self.dataOut.nCode*4
2621 2621 self.radarControllerHeaderObj.size = self.radarControllerHeaderObj.size - old_code_size + new_code_size
2622 2622
2623 2623 self.setBasicHeader()
2624 2624
2625 2625 processingHeaderSize = 40 # bytes
2626 2626 self.processingHeaderObj.dtype = 1 # Spectra
2627 2627 self.processingHeaderObj.blockSize = self.__getBlockSize()
2628 2628 self.processingHeaderObj.profilesPerBlock = self.dataOut.nFFTPoints
2629 2629 self.processingHeaderObj.dataBlocksPerFile = self.blocksPerFile
2630 2630 self.processingHeaderObj.nWindows = 1 #podria ser 1 o self.dataOut.processingHeaderObj.nWindows
2631 2631 self.processingHeaderObj.processFlags = self.__getProcessFlags()
2632 2632 self.processingHeaderObj.nCohInt = self.dataOut.nCohInt# Se requiere para determinar el valor de timeInterval
2633 2633 self.processingHeaderObj.nIncohInt = self.dataOut.nIncohInt
2634 2634 self.processingHeaderObj.totalSpectra = self.dataOut.nPairs + self.dataOut.nChannels
2635 2635 self.processingHeaderObj.shif_fft = self.dataOut.flagShiftFFT
2636 2636
2637 2637 if self.processingHeaderObj.totalSpectra > 0:
2638 2638 channelList = []
2639 2639 for channel in range(self.dataOut.nChannels):
2640 2640 channelList.append(channel)
2641 2641 channelList.append(channel)
2642 2642
2643 2643 pairsList = []
2644 2644 if self.dataOut.nPairs > 0:
2645 2645 for pair in self.dataOut.pairsList:
2646 2646 pairsList.append(pair[0])
2647 2647 pairsList.append(pair[1])
2648 2648
2649 2649 spectraComb = channelList + pairsList
2650 2650 spectraComb = numpy.array(spectraComb,dtype="u1")
2651 2651 self.processingHeaderObj.spectraComb = spectraComb
2652 2652 sizeOfSpcComb = len(spectraComb)
2653 2653 processingHeaderSize += sizeOfSpcComb
2654 2654
2655 2655 # The processing header should not have information about code
2656 2656 # if self.dataOut.code != None:
2657 2657 # self.processingHeaderObj.code = self.dataOut.code
2658 2658 # self.processingHeaderObj.nCode = self.dataOut.nCode
2659 2659 # self.processingHeaderObj.nBaud = self.dataOut.nBaud
2660 2660 # nCodeSize = 4 # bytes
2661 2661 # nBaudSize = 4 # bytes
2662 2662 # codeSize = 4 # bytes
2663 2663 # sizeOfCode = int(nCodeSize + nBaudSize + codeSize * self.dataOut.nCode * self.dataOut.nBaud)
2664 2664 # processingHeaderSize += sizeOfCode
2665 2665
2666 2666 if self.processingHeaderObj.nWindows != 0:
2667 2667 self.processingHeaderObj.firstHeight = self.dataOut.heightList[0]
2668 2668 self.processingHeaderObj.deltaHeight = self.dataOut.heightList[1] - self.dataOut.heightList[0]
2669 2669 self.processingHeaderObj.nHeights = self.dataOut.nHeights
2670 2670 self.processingHeaderObj.samplesWin = self.dataOut.nHeights
2671 2671 sizeOfFirstHeight = 4
2672 2672 sizeOfdeltaHeight = 4
2673 2673 sizeOfnHeights = 4
2674 2674 sizeOfWindows = (sizeOfFirstHeight + sizeOfdeltaHeight + sizeOfnHeights)*self.processingHeaderObj.nWindows
2675 2675 processingHeaderSize += sizeOfWindows
2676 2676
2677 2677 self.processingHeaderObj.size = processingHeaderSize
2678 2678
2679 2679 class SpectraHeisWriter(Operation):
2680 2680 # set = None
2681 2681 setFile = None
2682 2682 idblock = None
2683 2683 doypath = None
2684 2684 subfolder = None
2685 2685
2686 2686 def __init__(self):
2687 2687 self.wrObj = FITS()
2688 2688 # self.dataOut = dataOut
2689 2689 self.nTotalBlocks=0
2690 2690 # self.set = None
2691 2691 self.setFile = None
2692 2692 self.idblock = 0
2693 2693 self.wrpath = None
2694 2694 self.doypath = None
2695 2695 self.subfolder = None
2696 2696 self.isConfig = False
2697 2697
2698 2698 def isNumber(str):
2699 2699 """
2700 2700 Chequea si el conjunto de caracteres que componen un string puede ser convertidos a un numero.
2701 2701
2702 2702 Excepciones:
2703 2703 Si un determinado string no puede ser convertido a numero
2704 2704 Input:
2705 2705 str, string al cual se le analiza para determinar si convertible a un numero o no
2706 2706
2707 2707 Return:
2708 2708 True : si el string es uno numerico
2709 2709 False : no es un string numerico
2710 2710 """
2711 2711 try:
2712 2712 float( str )
2713 2713 return True
2714 2714 except:
2715 2715 return False
2716 2716
2717 2717 def setup(self, dataOut, wrpath):
2718 2718
2719 2719 if not(os.path.exists(wrpath)):
2720 2720 os.mkdir(wrpath)
2721 2721
2722 2722 self.wrpath = wrpath
2723 2723 # self.setFile = 0
2724 2724 self.dataOut = dataOut
2725 2725
2726 2726 def putData(self):
2727 2727 name= time.localtime( self.dataOut.utctime)
2728 2728 ext=".fits"
2729 2729
2730 2730 if self.doypath == None:
2731 2731 self.subfolder = 'F%4.4d%3.3d_%d' % (name.tm_year,name.tm_yday,time.mktime(datetime.datetime.now().timetuple()))
2732 2732 self.doypath = os.path.join( self.wrpath, self.subfolder )
2733 2733 os.mkdir(self.doypath)
2734 2734
2735 2735 if self.setFile == None:
2736 2736 # self.set = self.dataOut.set
2737 2737 self.setFile = 0
2738 2738 # if self.set != self.dataOut.set:
2739 2739 ## self.set = self.dataOut.set
2740 2740 # self.setFile = 0
2741 2741
2742 2742 #make the filename
2743 2743 file = 'D%4.4d%3.3d_%3.3d%s' % (name.tm_year,name.tm_yday,self.setFile,ext)
2744 2744
2745 2745 filename = os.path.join(self.wrpath,self.subfolder, file)
2746 2746
2747 2747 idblock = numpy.array([self.idblock],dtype="int64")
2748 2748 header=self.wrObj.cFImage(idblock=idblock,
2749 2749 year=time.gmtime(self.dataOut.utctime).tm_year,
2750 2750 month=time.gmtime(self.dataOut.utctime).tm_mon,
2751 2751 day=time.gmtime(self.dataOut.utctime).tm_mday,
2752 2752 hour=time.gmtime(self.dataOut.utctime).tm_hour,
2753 2753 minute=time.gmtime(self.dataOut.utctime).tm_min,
2754 2754 second=time.gmtime(self.dataOut.utctime).tm_sec)
2755 2755
2756 2756 c=3E8
2757 2757 deltaHeight = self.dataOut.heightList[1] - self.dataOut.heightList[0]
2758 2758 freq=numpy.arange(-1*self.dataOut.nHeights/2.,self.dataOut.nHeights/2.)*(c/(2*deltaHeight*1000))
2759 2759
2760 2760 colList = []
2761 2761
2762 2762 colFreq=self.wrObj.setColF(name="freq", format=str(self.dataOut.nFFTPoints)+'E', array=freq)
2763 2763
2764 2764 colList.append(colFreq)
2765 2765
2766 2766 nchannel=self.dataOut.nChannels
2767 2767
2768 2768 for i in range(nchannel):
2769 2769 col = self.wrObj.writeData(name="PCh"+str(i+1),
2770 2770 format=str(self.dataOut.nFFTPoints)+'E',
2771 2771 data=10*numpy.log10(self.dataOut.data_spc[i,:]))
2772 2772
2773 2773 colList.append(col)
2774 2774
2775 2775 data=self.wrObj.Ctable(colList=colList)
2776 2776
2777 2777 self.wrObj.CFile(header,data)
2778 2778
2779 2779 self.wrObj.wFile(filename)
2780 2780
2781 2781 #update the setFile
2782 2782 self.setFile += 1
2783 2783 self.idblock += 1
2784 2784
2785 2785 return 1
2786 2786
2787 2787 def run(self, dataOut, **kwargs):
2788 2788
2789 2789 if not(self.isConfig):
2790 2790
2791 2791 self.setup(dataOut, **kwargs)
2792 2792 self.isConfig = True
2793 2793
2794 2794 self.putData()
2795 2795
2796 2796
2797 2797
2798 2798 class ParameterConf:
2799 2799 ELEMENTNAME = 'Parameter'
2800 2800 def __init__(self):
2801 2801 self.name = ''
2802 2802 self.value = ''
2803 2803
2804 2804 def readXml(self, parmElement):
2805 2805 self.name = parmElement.get('name')
2806 2806 self.value = parmElement.get('value')
2807 2807
2808 2808 def getElementName(self):
2809 2809 return self.ELEMENTNAME
2810 2810
2811 2811 class Metadata:
2812 2812
2813 2813 def __init__(self, filename):
2814 2814 self.parmConfObjList = []
2815 2815 self.readXml(filename)
2816 2816
2817 2817 def readXml(self, filename):
2818 2818 self.projectElement = None
2819 2819 self.procUnitConfObjDict = {}
2820 2820 self.projectElement = ElementTree().parse(filename)
2821 2821 self.project = self.projectElement.tag
2822 2822
2823 2823 parmElementList = self.projectElement.getiterator(ParameterConf().getElementName())
2824 2824
2825 2825 for parmElement in parmElementList:
2826 2826 parmConfObj = ParameterConf()
2827 2827 parmConfObj.readXml(parmElement)
2828 2828 self.parmConfObjList.append(parmConfObj)
2829 2829
2830 2830 class FitsWriter(Operation):
2831 2831
2832 2832 def __init__(self):
2833 2833 self.isConfig = False
2834 2834 self.dataBlocksPerFile = None
2835 2835 self.blockIndex = 0
2836 2836 self.flagIsNewFile = 1
2837 2837 self.fitsObj = None
2838 2838 self.optchar = 'P'
2839 2839 self.ext = '.fits'
2840 2840 self.setFile = 0
2841 2841
2842 2842 def setFitsHeader(self, dataOut, metadatafile):
2843 2843
2844 2844 header_data = pyfits.PrimaryHDU()
2845 2845
2846 2846 metadata4fits = Metadata(metadatafile)
2847 2847 for parameter in metadata4fits.parmConfObjList:
2848 2848 parm_name = parameter.name
2849 2849 parm_value = parameter.value
2850 2850
2851 2851 # if parm_value == 'fromdatadatetime':
2852 2852 # value = time.strftime("%b %d %Y %H:%M:%S", dataOut.datatime.timetuple())
2853 2853 # elif parm_value == 'fromdataheights':
2854 2854 # value = dataOut.nHeights
2855 2855 # elif parm_value == 'fromdatachannel':
2856 2856 # value = dataOut.nChannels
2857 2857 # elif parm_value == 'fromdatasamples':
2858 2858 # value = dataOut.nFFTPoints
2859 2859 # else:
2860 2860 # value = parm_value
2861 2861
2862 2862 header_data.header[parm_name] = parm_value
2863 2863
2864 2864
2865 2865 header_data.header['DATETIME'] = time.strftime("%b %d %Y %H:%M:%S", dataOut.datatime.timetuple())
2866 2866 header_data.header['CHANNELLIST'] = str(dataOut.channelList)
2867 2867 header_data.header['NCHANNELS'] = dataOut.nChannels
2868 2868 #header_data.header['HEIGHTS'] = dataOut.heightList
2869 2869 header_data.header['NHEIGHTS'] = dataOut.nHeights
2870 2870
2871 2871 header_data.header['IPPSECONDS'] = dataOut.ippSeconds
2872 2872 header_data.header['NCOHINT'] = dataOut.nCohInt
2873 2873 header_data.header['NINCOHINT'] = dataOut.nIncohInt
2874 2874 header_data.header['TIMEZONE'] = dataOut.timeZone
2875 2875 header_data.header['NBLOCK'] = self.blockIndex
2876 2876
2877 2877 header_data.writeto(self.filename)
2878 2878
2879 2879 self.addExtension(dataOut.heightList,'HEIGHTLIST')
2880 2880
2881 2881
2882 2882 def setup(self, dataOut, path, dataBlocksPerFile, metadatafile):
2883 2883
2884 2884 self.path = path
2885 2885 self.dataOut = dataOut
2886 2886 self.metadatafile = metadatafile
2887 2887 self.dataBlocksPerFile = dataBlocksPerFile
2888 2888
2889 2889 def open(self):
2890 2890 self.fitsObj = pyfits.open(self.filename, mode='update')
2891 2891
2892 2892
2893 2893 def addExtension(self, data, tagname):
2894 2894 self.open()
2895 2895 extension = pyfits.ImageHDU(data=data, name=tagname)
2896 2896 #extension.header['TAG'] = tagname
2897 2897 self.fitsObj.append(extension)
2898 2898 self.write()
2899 2899
2900 2900 def addData(self, data):
2901 2901 self.open()
2902 2902 extension = pyfits.ImageHDU(data=data, name=self.fitsObj[0].header['DATATYPE'])
2903 2903 extension.header['UTCTIME'] = self.dataOut.utctime
2904 2904 self.fitsObj.append(extension)
2905 2905 self.blockIndex += 1
2906 2906 self.fitsObj[0].header['NBLOCK'] = self.blockIndex
2907 2907
2908 2908 self.write()
2909 2909
2910 2910 def write(self):
2911 2911
2912 2912 self.fitsObj.flush(verbose=True)
2913 2913 self.fitsObj.close()
2914 2914
2915 2915
2916 2916 def setNextFile(self):
2917 2917
2918 2918 ext = self.ext
2919 2919 path = self.path
2920 2920
2921 2921 timeTuple = time.localtime( self.dataOut.utctime)
2922 2922 subfolder = 'd%4.4d%3.3d' % (timeTuple.tm_year,timeTuple.tm_yday)
2923 2923
2924 2924 fullpath = os.path.join( path, subfolder )
2925 2925 if not( os.path.exists(fullpath) ):
2926 2926 os.mkdir(fullpath)
2927 2927 self.setFile = -1 #inicializo mi contador de seteo
2928 2928 else:
2929 2929 filesList = os.listdir( fullpath )
2930 2930 if len( filesList ) > 0:
2931 2931 filesList = sorted( filesList, key=str.lower )
2932 2932 filen = filesList[-1]
2933 2933
2934 2934 if isNumber( filen[8:11] ):
2935 2935 self.setFile = int( filen[8:11] ) #inicializo mi contador de seteo al seteo del ultimo file
2936 2936 else:
2937 2937 self.setFile = -1
2938 2938 else:
2939 2939 self.setFile = -1 #inicializo mi contador de seteo
2940 2940
2941 2941 setFile = self.setFile
2942 2942 setFile += 1
2943 2943
2944 2944 file = '%s%4.4d%3.3d%3.3d%s' % (self.optchar,
2945 2945 timeTuple.tm_year,
2946 2946 timeTuple.tm_yday,
2947 2947 setFile,
2948 2948 ext )
2949 2949
2950 2950 filename = os.path.join( path, subfolder, file )
2951 2951
2952 2952 self.blockIndex = 0
2953 2953 self.filename = filename
2954 2954 self.setFile = setFile
2955 2955 self.flagIsNewFile = 1
2956 2956
2957 2957 print 'Writing the file: %s'%self.filename
2958 2958
2959 2959 self.setFitsHeader(self.dataOut, self.metadatafile)
2960 2960
2961 2961 return 1
2962 2962
2963 2963 def writeBlock(self):
2964 2964 self.addData(self.dataOut.data_spc)
2965 2965 self.flagIsNewFile = 0
2966 2966
2967 2967
2968 2968 def __setNewBlock(self):
2969 2969
2970 2970 if self.flagIsNewFile:
2971 2971 return 1
2972 2972
2973 2973 if self.blockIndex < self.dataBlocksPerFile:
2974 2974 return 1
2975 2975
2976 2976 if not( self.setNextFile() ):
2977 2977 return 0
2978 2978
2979 2979 return 1
2980 2980
2981 2981 def writeNextBlock(self):
2982 2982 if not( self.__setNewBlock() ):
2983 2983 return 0
2984 2984 self.writeBlock()
2985 2985 return 1
2986 2986
2987 2987 def putData(self):
2988 2988 if self.flagIsNewFile:
2989 2989 self.setNextFile()
2990 2990 self.writeNextBlock()
2991 2991
2992 2992 def run(self, dataOut, **kwargs):
2993 2993 if not(self.isConfig):
2994 2994 self.setup(dataOut, **kwargs)
2995 2995 self.isConfig = True
2996 2996 self.putData()
2997 2997
2998 2998
2999 2999 class FitsReader(ProcessingUnit):
3000 3000
3001 3001 # __TIMEZONE = time.timezone
3002 3002
3003 3003 expName = None
3004 3004 datetimestr = None
3005 3005 utc = None
3006 3006 nChannels = None
3007 3007 nSamples = None
3008 3008 dataBlocksPerFile = None
3009 3009 comments = None
3010 3010 lastUTTime = None
3011 3011 header_dict = None
3012 3012 data = None
3013 3013 data_header_dict = None
3014 3014
3015 3015 def __init__(self):
3016 3016 self.isConfig = False
3017 3017 self.ext = '.fits'
3018 3018 self.setFile = 0
3019 3019 self.flagNoMoreFiles = 0
3020 3020 self.flagIsNewFile = 1
3021 3021 self.flagTimeBlock = None
3022 3022 self.fileIndex = None
3023 3023 self.filename = None
3024 3024 self.fileSize = None
3025 3025 self.fitsObj = None
3026 3026 self.timeZone = None
3027 3027 self.nReadBlocks = 0
3028 3028 self.nTotalBlocks = 0
3029 3029 self.dataOut = self.createObjByDefault()
3030 3030 self.maxTimeStep = 10# deberia ser definido por el usuario usando el metodo setup()
3031 3031 self.blockIndex = 1
3032 3032
3033 3033 def createObjByDefault(self):
3034 3034
3035 3035 dataObj = Fits()
3036 3036
3037 3037 return dataObj
3038 3038
3039 3039 def isFileinThisTime(self, filename, startTime, endTime, useLocalTime=False):
3040 3040 try:
3041 3041 fitsObj = pyfits.open(filename,'readonly')
3042 3042 except:
3043 3043 raise IOError, "The file %s can't be opened" %(filename)
3044 3044
3045 3045 header = fitsObj[0].header
3046 3046 struct_time = time.strptime(header['DATETIME'], "%b %d %Y %H:%M:%S")
3047 3047 utc = time.mktime(struct_time) - time.timezone #TIMEZONE debe ser un parametro del header FITS
3048 3048
3049 3049 ltc = utc
3050 3050 if useLocalTime:
3051 3051 ltc -= time.timezone
3052 3052 thisDatetime = datetime.datetime.utcfromtimestamp(ltc)
3053 3053 thisTime = thisDatetime.time()
3054 3054
3055 3055 if not ((startTime <= thisTime) and (endTime > thisTime)):
3056 3056 return None
3057 3057
3058 3058 return thisDatetime
3059 3059
3060 3060 def __setNextFileOnline(self):
3061 3061 raise ValueError, "No implemented"
3062 3062
3063 3063 def __setNextFileOffline(self):
3064 3064 idFile = self.fileIndex
3065 3065
3066 3066 while (True):
3067 3067 idFile += 1
3068 3068 if not(idFile < len(self.filenameList)):
3069 3069 self.flagNoMoreFiles = 1
3070 3070 print "No more Files"
3071 3071 return 0
3072 3072
3073 3073 filename = self.filenameList[idFile]
3074 3074
3075 3075 # if not(self.__verifyFile(filename)):
3076 3076 # continue
3077 3077
3078 3078 fileSize = os.path.getsize(filename)
3079 3079 fitsObj = pyfits.open(filename,'readonly')
3080 3080 break
3081 3081
3082 3082 self.flagIsNewFile = 1
3083 3083 self.fileIndex = idFile
3084 3084 self.filename = filename
3085 3085 self.fileSize = fileSize
3086 3086 self.fitsObj = fitsObj
3087 3087 self.blockIndex = 0
3088 3088 print "Setting the file: %s"%self.filename
3089 3089
3090 3090 return 1
3091 3091
3092 3092 def readHeader(self):
3093 3093 headerObj = self.fitsObj[0]
3094 3094
3095 3095 self.header_dict = headerObj.header
3096 3096 if 'EXPNAME' in headerObj.header.keys():
3097 3097 self.expName = headerObj.header['EXPNAME']
3098 3098
3099 3099 if 'DATATYPE' in headerObj.header.keys():
3100 3100 self.dataType = headerObj.header['DATATYPE']
3101 3101
3102 3102 self.datetimestr = headerObj.header['DATETIME']
3103 3103 channelList = headerObj.header['CHANNELLIST']
3104 3104 channelList = channelList.split('[')
3105 3105 channelList = channelList[1].split(']')
3106 3106 channelList = channelList[0].split(',')
3107 3107 channelList = [int(ch) for ch in channelList]
3108 3108 self.channelList = channelList
3109 3109 self.nChannels = headerObj.header['NCHANNELS']
3110 3110 self.nHeights = headerObj.header['NHEIGHTS']
3111 3111 self.ippSeconds = headerObj.header['IPPSECONDS']
3112 3112 self.nCohInt = headerObj.header['NCOHINT']
3113 3113 self.nIncohInt = headerObj.header['NINCOHINT']
3114 3114 self.dataBlocksPerFile = headerObj.header['NBLOCK']
3115 3115 self.timeZone = headerObj.header['TIMEZONE']
3116 3116
3117 3117 self.timeInterval = self.ippSeconds * self.nCohInt * self.nIncohInt
3118 3118
3119 3119 if 'COMMENT' in headerObj.header.keys():
3120 3120 self.comments = headerObj.header['COMMENT']
3121 3121
3122 3122 self.readHeightList()
3123 3123
3124 3124 def readHeightList(self):
3125 3125 self.blockIndex = self.blockIndex + 1
3126 3126 obj = self.fitsObj[self.blockIndex]
3127 3127 self.heightList = obj.data
3128 3128 self.blockIndex = self.blockIndex + 1
3129 3129
3130 3130 def readExtension(self):
3131 3131 obj = self.fitsObj[self.blockIndex]
3132 3132 self.heightList = obj.data
3133 3133 self.blockIndex = self.blockIndex + 1
3134 3134
3135 3135 def setNextFile(self):
3136 3136
3137 3137 if self.online:
3138 3138 newFile = self.__setNextFileOnline()
3139 3139 else:
3140 3140 newFile = self.__setNextFileOffline()
3141 3141
3142 3142 if not(newFile):
3143 3143 return 0
3144 3144
3145 3145 self.readHeader()
3146 3146
3147 3147 self.nReadBlocks = 0
3148 3148 # self.blockIndex = 1
3149 3149 return 1
3150 3150
3151 3151 def __searchFilesOffLine(self,
3152 3152 path,
3153 3153 startDate,
3154 3154 endDate,
3155 3155 startTime=datetime.time(0,0,0),
3156 3156 endTime=datetime.time(23,59,59),
3157 3157 set=None,
3158 3158 expLabel='',
3159 3159 ext='.fits',
3160 3160 walk=True):
3161 3161
3162 3162 pathList = []
3163 3163
3164 3164 if not walk:
3165 3165 pathList.append(path)
3166 3166
3167 3167 else:
3168 3168 dirList = []
3169 3169 for thisPath in os.listdir(path):
3170 3170 if not os.path.isdir(os.path.join(path,thisPath)):
3171 3171 continue
3172 3172 if not isDoyFolder(thisPath):
3173 3173 continue
3174 3174
3175 3175 dirList.append(thisPath)
3176 3176
3177 3177 if not(dirList):
3178 3178 return None, None
3179 3179
3180 3180 thisDate = startDate
3181 3181
3182 3182 while(thisDate <= endDate):
3183 3183 year = thisDate.timetuple().tm_year
3184 3184 doy = thisDate.timetuple().tm_yday
3185 3185
3186 3186 matchlist = fnmatch.filter(dirList, '?' + '%4.4d%3.3d' % (year,doy) + '*')
3187 3187 if len(matchlist) == 0:
3188 3188 thisDate += datetime.timedelta(1)
3189 3189 continue
3190 3190 for match in matchlist:
3191 3191 pathList.append(os.path.join(path,match,expLabel))
3192 3192
3193 3193 thisDate += datetime.timedelta(1)
3194 3194
3195 3195 if pathList == []:
3196 3196 print "Any folder was found for the date range: %s-%s" %(startDate, endDate)
3197 3197 return None, None
3198 3198
3199 3199 print "%d folder(s) was(were) found for the date range: %s - %s" %(len(pathList), startDate, endDate)
3200 3200
3201 3201 filenameList = []
3202 3202 datetimeList = []
3203 3203
3204 3204 for i in range(len(pathList)):
3205 3205
3206 3206 thisPath = pathList[i]
3207 3207
3208 3208 fileList = glob.glob1(thisPath, "*%s" %ext)
3209 3209 fileList.sort()
3210 3210
3211 3211 for file in fileList:
3212 3212
3213 3213 filename = os.path.join(thisPath,file)
3214 3214 thisDatetime = self.isFileinThisTime(filename, startTime, endTime)
3215 3215
3216 3216 if not(thisDatetime):
3217 3217 continue
3218 3218
3219 3219 filenameList.append(filename)
3220 3220 datetimeList.append(thisDatetime)
3221 3221
3222 3222 if not(filenameList):
3223 3223 print "Any file was found for the time range %s - %s" %(startTime, endTime)
3224 3224 return None, None
3225 3225
3226 3226 print "%d file(s) was(were) found for the time range: %s - %s" %(len(filenameList), startTime, endTime)
3227 3227 print
3228 3228
3229 3229 for i in range(len(filenameList)):
3230 3230 print "%s -> [%s]" %(filenameList[i], datetimeList[i].ctime())
3231 3231
3232 3232 self.filenameList = filenameList
3233 3233 self.datetimeList = datetimeList
3234 3234
3235 3235 return pathList, filenameList
3236 3236
3237 3237 def setup(self, path=None,
3238 3238 startDate=None,
3239 3239 endDate=None,
3240 3240 startTime=datetime.time(0,0,0),
3241 3241 endTime=datetime.time(23,59,59),
3242 3242 set=0,
3243 3243 expLabel = "",
3244 3244 ext = None,
3245 3245 online = False,
3246 3246 delay = 60,
3247 3247 walk = True):
3248 3248
3249 3249 if path == None:
3250 3250 raise ValueError, "The path is not valid"
3251 3251
3252 3252 if ext == None:
3253 3253 ext = self.ext
3254 3254
3255 3255 if not(online):
3256 3256 print "Searching files in offline mode ..."
3257 3257 pathList, filenameList = self.__searchFilesOffLine(path, startDate=startDate, endDate=endDate,
3258 3258 startTime=startTime, endTime=endTime,
3259 3259 set=set, expLabel=expLabel, ext=ext,
3260 3260 walk=walk)
3261 3261
3262 3262 if not(pathList):
3263 3263 print "No *%s files into the folder %s \nfor the range: %s - %s"%(ext, path,
3264 3264 datetime.datetime.combine(startDate,startTime).ctime(),
3265 3265 datetime.datetime.combine(endDate,endTime).ctime())
3266 3266
3267 3267 sys.exit(-1)
3268 3268
3269 3269 self.fileIndex = -1
3270 3270 self.pathList = pathList
3271 3271 self.filenameList = filenameList
3272 3272
3273 3273 self.online = online
3274 3274 self.delay = delay
3275 3275 ext = ext.lower()
3276 3276 self.ext = ext
3277 3277
3278 3278 if not(self.setNextFile()):
3279 3279 if (startDate!=None) and (endDate!=None):
3280 3280 print "No files in range: %s - %s" %(datetime.datetime.combine(startDate,startTime).ctime(), datetime.datetime.combine(endDate,endTime).ctime())
3281 3281 elif startDate != None:
3282 3282 print "No files in range: %s" %(datetime.datetime.combine(startDate,startTime).ctime())
3283 3283 else:
3284 3284 print "No files"
3285 3285
3286 3286 sys.exit(-1)
3287 3287
3288 3288
3289 3289
3290 3290 def readBlock(self):
3291 3291 dataObj = self.fitsObj[self.blockIndex]
3292 3292
3293 3293 self.data = dataObj.data
3294 3294 self.data_header_dict = dataObj.header
3295 3295 self.utc = self.data_header_dict['UTCTIME']
3296 3296
3297 3297 self.flagIsNewFile = 0
3298 3298 self.blockIndex += 1
3299 3299 self.nTotalBlocks += 1
3300 3300 self.nReadBlocks += 1
3301 3301
3302 3302 return 1
3303 3303
3304 3304 def __jumpToLastBlock(self):
3305 3305 raise ValueError, "No implemented"
3306 3306
3307 3307 def __waitNewBlock(self):
3308 3308 """
3309 3309 Return 1 si se encontro un nuevo bloque de datos, 0 de otra forma.
3310 3310
3311 3311 Si el modo de lectura es OffLine siempre retorn 0
3312 3312 """
3313 3313 if not self.online:
3314 3314 return 0
3315 3315
3316 3316 if (self.nReadBlocks >= self.dataBlocksPerFile):
3317 3317 return 0
3318 3318
3319 3319 currentPointer = self.fp.tell()
3320 3320
3321 3321 neededSize = self.processingHeaderObj.blockSize + self.basicHeaderSize
3322 3322
3323 3323 for nTries in range( self.nTries ):
3324 3324
3325 3325 self.fp.close()
3326 3326 self.fp = open( self.filename, 'rb' )
3327 3327 self.fp.seek( currentPointer )
3328 3328
3329 3329 self.fileSize = os.path.getsize( self.filename )
3330 3330 currentSize = self.fileSize - currentPointer
3331 3331
3332 3332 if ( currentSize >= neededSize ):
3333 3333 self.__rdBasicHeader()
3334 3334 return 1
3335 3335
3336 3336 print "\tWaiting %0.2f seconds for the next block, try %03d ..." % (self.delay, nTries+1)
3337 3337 time.sleep( self.delay )
3338 3338
3339 3339
3340 3340 return 0
3341 3341
3342 3342 def __setNewBlock(self):
3343 3343
3344 3344 if self.online:
3345 3345 self.__jumpToLastBlock()
3346 3346
3347 3347 if self.flagIsNewFile:
3348 3348 return 1
3349 3349
3350 3350 self.lastUTTime = self.utc
3351 3351
3352 3352 if self.online:
3353 3353 if self.__waitNewBlock():
3354 3354 return 1
3355 3355
3356 3356 if self.nReadBlocks < self.dataBlocksPerFile:
3357 3357 return 1
3358 3358
3359 3359 if not(self.setNextFile()):
3360 3360 return 0
3361 3361
3362 3362 deltaTime = self.utc - self.lastUTTime
3363 3363
3364 3364 self.flagTimeBlock = 0
3365 3365
3366 3366 if deltaTime > self.maxTimeStep:
3367 3367 self.flagTimeBlock = 1
3368 3368
3369 3369 return 1
3370 3370
3371 3371
3372 3372 def readNextBlock(self):
3373 3373 if not(self.__setNewBlock()):
3374 3374 return 0
3375 3375
3376 3376 if not(self.readBlock()):
3377 3377 return 0
3378 3378
3379 3379 return 1
3380 3380
3381 3381
3382 3382 def getData(self):
3383 3383
3384 3384 if self.flagNoMoreFiles:
3385 3385 self.dataOut.flagNoData = True
3386 3386 print 'Process finished'
3387 3387 return 0
3388 3388
3389 3389 self.flagTimeBlock = 0
3390 3390 self.flagIsNewBlock = 0
3391 3391
3392 3392 if not(self.readNextBlock()):
3393 3393 return 0
3394 3394
3395 3395 if self.data == None:
3396 3396 self.dataOut.flagNoData = True
3397 3397 return 0
3398 3398
3399 3399 self.dataOut.data = self.data
3400 3400 self.dataOut.data_header = self.data_header_dict
3401 3401 self.dataOut.utctime = self.utc
3402 3402
3403 3403 self.dataOut.header = self.header_dict
3404 3404 self.dataOut.expName = self.expName
3405 3405 self.dataOut.nChannels = self.nChannels
3406 3406 self.dataOut.timeZone = self.timeZone
3407 3407 self.dataOut.dataBlocksPerFile = self.dataBlocksPerFile
3408 3408 self.dataOut.comments = self.comments
3409 3409 self.dataOut.timeInterval = self.timeInterval
3410 3410 self.dataOut.channelList = self.channelList
3411 3411 self.dataOut.heightList = self.heightList
3412 3412 self.dataOut.flagNoData = False
3413 3413
3414 3414 return self.dataOut.data
3415 3415
3416 3416 def run(self, **kwargs):
3417 3417
3418 3418 if not(self.isConfig):
3419 3419 self.setup(**kwargs)
3420 3420 self.isConfig = True
3421 3421
3422 3422 self.getData()
3423 3423
3424 3424
3425 3425 class RadacHeader():
3426 3426 def __init__(self, fp):
3427 3427 header = 'Raw11/Data/RadacHeader'
3428 self.beamCode = fp.get(header+'/BeamCode')
3428 self.beamCodeByPulse = fp.get(header+'/BeamCode')
3429 self.beamCode = fp.get('Raw11/Data/Beamcodes')
3429 3430 self.code = fp.get(header+'/Code')
3430 3431 self.frameCount = fp.get(header+'/FrameCount')
3431 3432 self.modeGroup = fp.get(header+'/ModeGroup')
3432 3433 self.nsamplesPulse = fp.get(header+'/NSamplesPulse')
3433 3434 self.pulseCount = fp.get(header+'/PulseCount')
3434 3435 self.radacTime = fp.get(header+'/RadacTime')
3435 3436 self.timeCount = fp.get(header+'/TimeCount')
3436 3437 self.timeStatus = fp.get(header+'/TimeStatus')
3437 3438
3438 3439 self.nrecords = self.pulseCount.shape[0] #numero de bloques
3439 3440 self.npulses = self.pulseCount.shape[1] #numero de perfiles
3440 3441 self.nsamples = self.nsamplesPulse[0,0] #numero de alturas
3442 self.nbeams = self.beamCode.shape[1] #numero de beams
3441 3443
3442 3444
3443 3445 def getIndexRangeToPulse(self, idrecord=0):
3444 3446 indexToZero = numpy.where(self.pulseCount.value[idrecord,:]==0)
3445 3447 startPulseCountId = indexToZero[0][0]
3446 3448 endPulseCountId = startPulseCountId - 1
3447 3449 range1 = numpy.arange(startPulseCountId,self.npulses,1)
3448 3450 range2 = numpy.arange(0,startPulseCountId,1)
3449 3451 return range1, range2
3450 3452
3451 3453
3452 3454 class AMISRReader(ProcessingUnit):
3453 3455
3454 3456 path = None
3455 3457 startDate = None
3456 3458 endDate = None
3457 3459 startTime = None
3458 3460 endTime = None
3459 3461 walk = None
3460 3462 isConfig = False
3461 3463
3462 3464 def __init__(self):
3463 3465 self.set = None
3464 3466 self.subset = None
3465 3467 self.extension_file = '.h5'
3466 3468 self.dtc_str = 'dtc'
3467 3469 self.dtc_id = 0
3468 3470 self.status = True
3469 3471 self.isConfig = False
3470 3472 self.dirnameList = []
3471 3473 self.filenameList = []
3472 3474 self.fileIndex = None
3473 3475 self.flagNoMoreFiles = False
3474 3476 self.flagIsNewFile = 0
3475 3477 self.filename = ''
3476 3478 self.amisrFilePointer = None
3477 3479 self.radacHeaderObj = None
3478 3480 self.dataOut = self.__createObjByDefault()
3479 3481 self.datablock = None
3480 3482 self.rest_datablock = None
3481 3483 self.range = None
3482 3484 self.idrecord_count = 0
3483 3485 self.profileIndex = 0
3484 3486 self.idpulse_range1 = None
3485 3487 self.idpulse_range2 = None
3486 3488 self.beamCodeByFrame = None
3487 3489 self.radacTimeByFrame = None
3488 3490 #atributos originales tal y como esta en el archivo de datos
3489 3491 self.beamCodesFromFile = None
3490 3492 self.radacTimeFromFile = None
3491 3493 self.rangeFromFile = None
3492 3494 self.dataByFrame = None
3493 3495 self.dataset = None
3494 3496
3497 self.beamCodeDict = {}
3498 self.beamRangeDict = {}
3499
3495 3500
3496 3501 def __createObjByDefault(self):
3497 3502
3498 3503 dataObj = AMISR()
3499 3504
3500 3505 return dataObj
3501 3506
3502 3507 def __setParameters(self,path,startDate,endDate,startTime,endTime,walk):
3503 3508 self.path = path
3504 3509 self.startDate = startDate
3505 3510 self.endDate = endDate
3506 3511 self.startTime = startTime
3507 3512 self.endTime = endTime
3508 3513 self.walk = walk
3509 3514
3510 3515 def __checkPath(self):
3511 3516 if os.path.exists(self.path):
3512 3517 self.status = 1
3513 3518 else:
3514 3519 self.status = 0
3515 3520 print 'Path:%s does not exists'%self.path
3516 3521
3517 3522 return
3518 3523
3519 3524 def __selDates(self, amisr_dirname_format):
3520 3525 year = int(amisr_dirname_format[0:4])
3521 3526 month = int(amisr_dirname_format[4:6])
3522 3527 dom = int(amisr_dirname_format[6:8])
3523 3528 thisDate = datetime.date(year,month,dom)
3524 3529
3525 3530 if (thisDate>=self.startDate and thisDate <= self.endDate):
3526 3531 return amisr_dirname_format
3527 3532
3528 3533 def __findDataForDates(self):
3529 3534
3530 3535 import re
3531 3536
3532 3537 if not(self.status):
3533 3538 return None
3534 3539
3535 3540 pat = '\d+.\d+'
3536 3541 dirnameList = [re.search(pat,x).string for x in os.listdir(self.path)]
3537 3542 dirnameList = [self.__selDates(x) for x in dirnameList]
3538 3543 dirnameList = filter(lambda x:x!=None,dirnameList)
3539 3544 if len(dirnameList)>0:
3540 3545 self.status = 1
3541 3546 self.dirnameList = dirnameList
3542 3547 self.dirnameList.sort()
3543 3548 else:
3544 3549 self.status = 0
3545 3550 return None
3546 3551
3547 3552 def __getTimeFromData(self):
3548 3553 pass
3549 3554
3550 3555 def __filterByGlob1(self, dirName):
3551 3556 filter_files = glob.glob1(dirName, '*.*%s'%self.extension_file)
3552 3557 filterDict = {}
3553 3558 filterDict.setdefault(dirName)
3554 3559 filterDict[dirName] = filter_files
3555 3560 return filterDict
3556 3561
3557 3562 def __getFilenameList(self, fileListInKeys, dirList):
3558 3563 for value in fileListInKeys:
3559 3564 dirName = value.keys()[0]
3560 3565 for file in value[dirName]:
3561 3566 filename = os.path.join(dirName, file)
3562 3567 self.filenameList.append(filename)
3563 3568
3564 3569
3565 3570 def __selectDataForTimes(self):
3566 3571 #aun no esta implementado el filtro for tiempo
3567 3572 if not(self.status):
3568 3573 return None
3569 3574
3570 3575 dirList = [os.path.join(self.path,x) for x in self.dirnameList]
3571 3576
3572 3577 fileListInKeys = [self.__filterByGlob1(x) for x in dirList]
3573 3578
3574 3579 self.__getFilenameList(fileListInKeys, dirList)
3575 3580
3576 3581 if len(self.filenameList)>0:
3577 3582 self.status = 1
3578 3583 self.filenameList.sort()
3579 3584 else:
3580 3585 self.status = 0
3581 3586 return None
3582 3587
3583 3588
3584 3589 def __searchFilesOffline(self,
3585 3590 path,
3586 3591 startDate,
3587 3592 endDate,
3588 3593 startTime=datetime.time(0,0,0),
3589 3594 endTime=datetime.time(23,59,59),
3590 3595 walk=True):
3591 3596
3592 3597 self.__setParameters(path, startDate, endDate, startTime, endTime, walk)
3593 3598
3594 3599 self.__checkPath()
3595 3600
3596 3601 self.__findDataForDates()
3597 3602
3598 3603 self.__selectDataForTimes()
3599 3604
3600 3605 for i in range(len(self.filenameList)):
3601 3606 print "%s" %(self.filenameList[i])
3602 3607
3603 3608 return
3604 3609
3605 3610 def __setNextFileOffline(self):
3606 3611 idFile = self.fileIndex
3607 3612
3608 3613 while (True):
3609 3614 idFile += 1
3610 3615 if not(idFile < len(self.filenameList)):
3611 3616 self.flagNoMoreFiles = 1
3612 3617 print "No more Files"
3613 3618 return 0
3614 3619
3615 3620 filename = self.filenameList[idFile]
3616 3621
3617 3622 amisrFilePointer = h5py.File(filename,'r')
3618 3623
3619 3624 break
3620 3625
3621 3626 self.flagIsNewFile = 1
3622 3627 self.fileIndex = idFile
3623 3628 self.filename = filename
3624 3629
3625 3630 self.amisrFilePointer = amisrFilePointer
3626 3631
3627 3632 print "Setting the file: %s"%self.filename
3628 3633
3629 3634 return 1
3630 3635
3631 3636 def __readHeader(self):
3632 3637 self.radacHeaderObj = RadacHeader(self.amisrFilePointer)
3633 3638 self.flagIsNewFile = 1
3639
3640 def __getBeamCode(self):
3641 self.beamCodeDict = {}
3642 self.beamRangeDict = {}
3643
3644 for i in range(len(self.radacHeaderObj.beamCode[0,:])):
3645 self.beamCodeDict.setdefault(i)
3646 self.beamRangeDict.setdefault(i)
3647 self.beamCodeDict[i] = self.radacHeaderObj.beamCode[0,i]
3648
3634 3649
3650 just4record0 = self.radacHeaderObj.beamCodeByPulse[0,:]
3651
3652 for i in range(len(self.beamCodeDict.values())):
3653 xx = numpy.where(just4record0==self.beamCodeDict.values()[i])
3654 self.beamRangeDict[i] = xx[0]
3635 3655
3636 3656
3637 3657 def __setNextFile(self):
3638 3658
3639 3659 newFile = self.__setNextFileOffline()
3640 3660
3641 3661 if not(newFile):
3642 3662 return 0
3643 3663
3644 3664 self.__readHeader()
3645
3665 self.__getBeamCode()
3646 3666 self.readDataBlock()
3647 3667
3648 3668
3649 3669 def setup(self,path=None,
3650 3670 startDate=None,
3651 3671 endDate=None,
3652 3672 startTime=datetime.time(0,0,0),
3653 3673 endTime=datetime.time(23,59,59),
3654 3674 walk=True):
3655 3675
3656 3676 #Busqueda de archivos offline
3657 3677 self.__searchFilesOffline(path, startDate, endDate, startTime, endTime, walk)
3658 3678
3659 3679 if not(self.filenameList):
3660 3680 print "There is no files into the folder: %s"%(path)
3661 3681
3662 3682 sys.exit(-1)
3663 3683
3664 3684 self.fileIndex = -1
3665 3685
3666 3686 self.__setNextFile()
3667 3687
3668 3688 def readRanges(self):
3669 3689 dataset = self.amisrFilePointer.get('Raw11/Data/Samples/Range')
3670 3690 #self.rangeFromFile = dataset.value
3671 3691 self.rangeFromFile = numpy.reshape(dataset.value,(-1))
3672 3692 return range
3673 3693
3674 3694
3675 3695 def readRadacTime(self,idrecord, range1, range2):
3676 3696 self.radacTimeFromFile = self.radacHeaderObj.radacTime.value
3677 3697
3678 3698 radacTimeByFrame = numpy.zeros((self.radacHeaderObj.npulses))
3679 3699 #radacTimeByFrame = dataset[idrecord - 1,range1]
3680 3700 #radacTimeByFrame = dataset[idrecord,range2]
3681 3701
3682 3702 return radacTimeByFrame
3683 3703
3684 3704 def readBeamCode(self, idrecord, range1, range2):
3685 3705 dataset = self.amisrFilePointer.get('Raw11/Data/RadacHeader/BeamCode')
3686 3706 beamcodeByFrame = numpy.zeros((self.radacHeaderObj.npulses))
3687 3707 self.beamCodesFromFile = dataset.value
3688 3708
3689 3709 #beamcodeByFrame[range1] = dataset[idrecord - 1, range1]
3690 3710 #beamcodeByFrame[range2] = dataset[idrecord, range2]
3691 3711 beamcodeByFrame[range1] = dataset[idrecord, range1]
3692 3712 beamcodeByFrame[range2] = dataset[idrecord, range2]
3693 3713
3694 3714 return beamcodeByFrame
3695 3715
3696 3716
3697 3717 def __setDataByFrame(self):
3698 3718 ndata = 2 # porque es complejo
3699 3719 dataByFrame = numpy.zeros((self.radacHeaderObj.npulses, self.radacHeaderObj.nsamples, ndata))
3700 3720 return dataByFrame
3701 3721
3702 3722 def __readDataSet(self):
3703 3723 dataset = self.amisrFilePointer.get('Raw11/Data/Samples/Data')
3704 3724 return dataset
3705 3725
3706 3726 def __setDataBlock(self,):
3707 3727 real = self.dataByFrame[:,:,0] #asumo que 0 es real
3708 3728 imag = self.dataByFrame[:,:,1] #asumo que 1 es imaginario
3709 3729 datablock = real + imag*1j #armo el complejo
3710 3730 return datablock
3711 3731
3712 3732 def readSamples_version1(self,idrecord):
3713 3733 #estas tres primeras lineas solo se deben ejecutar una vez
3714 3734 if self.flagIsNewFile:
3715 3735 self.idpulse_range1, self.idpulse_range2 = self.radacHeaderObj.getIndexRangeToPulse(0)
3716 3736 self.dataByFrame = self.__setDataByFrame()
3717 3737 self.beamCodeByFrame = self.readBeamCode(idrecord, self.idpulse_range1, self.idpulse_range2)
3718 3738 self.radacTimeByFrame = self.readRadacTime(idrecord, self.idpulse_range1, self.idpulse_range2)
3719 3739 #reading dataset
3720 3740 self.dataset = self.__readDataSet()
3721 3741
3722 3742 if idrecord == 0:
3723 3743
3724 3744 if len(numpy.where(self.dataByFrame!=0.0)[0]) or len(numpy.where(self.dataByFrame!=0.0)[1]) or len(numpy.where(self.dataByFrame!=0.0)[2]):
3725 3745 #falta agregar una condicion para datos discontinuos
3726 3746 #por defecto une los datos del record anterior
3727 3747 self.dataByFrame[self.idpulse_range2, :, :] = self.dataset[idrecord, self.idpulse_range2, :, :]
3728 3748 #timepulse
3729 3749 self.radacTimeByFrame[self.idpulse_range2] = self.radacHeaderObj.radacTime[idrecord, self.idpulse_range2]
3730 3750 else:
3731 3751 self.dataByFrame[self.idpulse_range1, :, :] = self.dataset[idrecord, self.idpulse_range1, :, :]
3732 3752
3733 3753 self.radacTimeByFrame[self.idpulse_range1] = self.radacHeaderObj.radacTime[idrecord, self.idpulse_range1]
3734 3754
3735 3755 datablock = self.__setDataBlock()
3736 3756
3737 3757 return datablock
3738 3758
3739 3759 self.dataByFrame[self.idpulse_range1, :, :] = self.dataset[idrecord - 1,self.idpulse_range1, :, :]
3740 3760 self.dataByFrame[self.idpulse_range2, :, :] = self.dataset[idrecord, self.idpulse_range2, :, :]
3741 3761 datablock = self.__setDataBlock()
3742 3762 self.flagIsNewFile = 0
3743 3763
3744 3764 self.dataByFrame[self.idpulse_range1, :, :] = self.dataset[idrecord, self.idpulse_range1, :, :]
3745 3765
3746 3766
3747 3767 return datablock
3748 3768
3749 3769
3750 3770 def readSamples(self,idrecord):
3751 3771 if self.flagIsNewFile:
3752 3772 self.dataByFrame = self.__setDataByFrame()
3753 3773 self.beamCodeByFrame = self.amisrFilePointer.get('Raw11/Data/RadacHeader/BeamCode').value[idrecord, :]
3754 3774
3755 3775 #reading ranges
3756 3776 self.readRanges()
3757 3777 #reading dataset
3758 3778 self.dataset = self.__readDataSet()
3759 3779
3760 3780 self.flagIsNewFile = 0
3761 3781 self.radacTimeByFrame = self.radacHeaderObj.radacTime.value[idrecord, :]
3762 3782 self.dataByFrame = self.dataset[idrecord, :, :, :]
3763 3783 datablock = self.__setDataBlock()
3764 3784 return datablock
3765 3785
3766 3786
3767 3787 def readDataBlock(self):
3768 3788
3769 3789 #self.datablock = self.readSamples(self.idrecord_count)
3770 3790 self.datablock = self.readSamples(self.idrecord_count)
3771 3791 #print 'record:', self.idrecord_count
3772 3792
3773 3793 self.idrecord_count += 1
3774 3794 self.profileIndex = 0
3775 3795
3776 3796 if self.idrecord_count >= self.radacHeaderObj.nrecords:
3777 3797 self.idrecord_count = 0
3778 3798 self.flagIsNewFile = 1
3779 3799
3780 3800 def readNextBlock(self):
3781 3801
3782 3802 self.readDataBlock()
3783 3803
3784 3804 if self.flagIsNewFile:
3785 3805 self.__setNextFile()
3786 3806 pass
3787 3807
3788 3808 def __hasNotDataInBuffer(self):
3789 3809 #self.radacHeaderObj.npulses debe ser otra variable para considerar el numero de pulsos a tomar en el primer y ultimo record
3790 3810 if self.profileIndex >= self.radacHeaderObj.npulses:
3791 3811 return 1
3792 3812 return 0
3793 3813
3814 def printUTC(self):
3815 print self.dataOut.utctime
3816 print ''
3817
3794 3818 def setObjProperties(self):
3795 3819 self.dataOut.heightList = self.rangeFromFile/1000.0 #km
3796 3820 self.dataOut.nProfiles = self.radacHeaderObj.npulses
3797 3821 self.dataOut.timeInterval = self.dataOut.ippSeconds * self.dataOut.nCohInt
3798 3822 self.dataOut.nBaud = None
3799 3823 self.dataOut.nCode = None
3800 3824 self.dataOut.code = None
3825
3826 self.dataOut.beamCodeDict = self.beamCodeDict
3827 self.dataOut.beamRangeDict = self.beamRangeDict
3801 3828
3802 3829 def getData(self):
3803 3830
3804 3831 if self.flagNoMoreFiles:
3805 3832 self.dataOut.flagNoData = True
3806 3833 print 'Process finished'
3807 3834 return 0
3808 3835
3809 3836 if self.__hasNotDataInBuffer():
3810 3837 self.readNextBlock()
3811 3838 # if not( self.readNextBlock() ):
3812 3839 # return 0
3813 3840 # self.getFirstHeader()
3814 3841
3815 3842 if self.datablock == None: # setear esta condicion cuando no hayan datos por leers
3816 3843 self.dataOut.flagNoData = True
3817 3844 return 0
3818 3845
3819 3846 self.dataOut.data = numpy.reshape(self.datablock[self.profileIndex,:],(1,-1))
3820 3847
3821 3848 self.dataOut.utctime = self.radacTimeByFrame[self.profileIndex]
3822 3849
3823 3850 self.dataOut.flagNoData = False
3824 3851
3825 3852 self.profileIndex += 1
3826 3853
3827 3854 return self.dataOut.data
3828 3855
3829 3856
3830 3857 def run(self, **kwargs):
3831 3858 if not(self.isConfig):
3832 3859 self.setup(**kwargs)
3833 3860 self.setObjProperties()
3834 3861 self.isConfig = True
3835 3862
3836 3863 self.getData()
@@ -1,2070 +1,2132
1 1 '''
2 2
3 3 $Author: dsuarez $
4 4 $Id: Processor.py 1 2012-11-12 18:56:07Z dsuarez $
5 5 '''
6 6 import os
7 7 import numpy
8 8 import datetime
9 9 import time
10 10 import math
11 11 from jrodata import *
12 12 from jrodataIO import *
13 13 from jroplot import *
14 14
15 15 try:
16 16 import cfunctions
17 17 except:
18 18 pass
19 19
20 20 class ProcessingUnit:
21 21
22 22 """
23 23 Esta es la clase base para el procesamiento de datos.
24 24
25 25 Contiene el metodo "call" para llamar operaciones. Las operaciones pueden ser:
26 26 - Metodos internos (callMethod)
27 27 - Objetos del tipo Operation (callObject). Antes de ser llamados, estos objetos
28 28 tienen que ser agreagados con el metodo "add".
29 29
30 30 """
31 31 # objeto de datos de entrada (Voltage, Spectra o Correlation)
32 32 dataIn = None
33 33
34 34 # objeto de datos de entrada (Voltage, Spectra o Correlation)
35 35 dataOut = None
36 36
37 37
38 38 objectDict = None
39 39
40 40 def __init__(self):
41 41
42 42 self.objectDict = {}
43 43
44 44 def init(self):
45 45
46 46 raise ValueError, "Not implemented"
47 47
48 48 def addOperation(self, object, objId):
49 49
50 50 """
51 51 Agrega el objeto "object" a la lista de objetos "self.objectList" y retorna el
52 52 identificador asociado a este objeto.
53 53
54 54 Input:
55 55
56 56 object : objeto de la clase "Operation"
57 57
58 58 Return:
59 59
60 60 objId : identificador del objeto, necesario para ejecutar la operacion
61 61 """
62 62
63 63 self.objectDict[objId] = object
64 64
65 65 return objId
66 66
67 67 def operation(self, **kwargs):
68 68
69 69 """
70 70 Operacion directa sobre la data (dataOut.data). Es necesario actualizar los valores de los
71 71 atributos del objeto dataOut
72 72
73 73 Input:
74 74
75 75 **kwargs : Diccionario de argumentos de la funcion a ejecutar
76 76 """
77 77
78 78 raise ValueError, "ImplementedError"
79 79
80 80 def callMethod(self, name, **kwargs):
81 81
82 82 """
83 83 Ejecuta el metodo con el nombre "name" y con argumentos **kwargs de la propia clase.
84 84
85 85 Input:
86 86 name : nombre del metodo a ejecutar
87 87
88 88 **kwargs : diccionario con los nombres y valores de la funcion a ejecutar.
89 89
90 90 """
91 91 if name != 'run':
92 92
93 93 if name == 'init' and self.dataIn.isEmpty():
94 94 self.dataOut.flagNoData = True
95 95 return False
96 96
97 97 if name != 'init' and self.dataOut.isEmpty():
98 98 return False
99 99
100 100 methodToCall = getattr(self, name)
101 101
102 102 methodToCall(**kwargs)
103 103
104 104 if name != 'run':
105 105 return True
106 106
107 107 if self.dataOut.isEmpty():
108 108 return False
109 109
110 110 return True
111 111
112 112 def callObject(self, objId, **kwargs):
113 113
114 114 """
115 115 Ejecuta la operacion asociada al identificador del objeto "objId"
116 116
117 117 Input:
118 118
119 119 objId : identificador del objeto a ejecutar
120 120
121 121 **kwargs : diccionario con los nombres y valores de la funcion a ejecutar.
122 122
123 123 Return:
124 124
125 125 None
126 126 """
127 127
128 128 if self.dataOut.isEmpty():
129 129 return False
130 130
131 131 object = self.objectDict[objId]
132 132
133 133 object.run(self.dataOut, **kwargs)
134 134
135 135 return True
136 136
137 137 def call(self, operationConf, **kwargs):
138 138
139 139 """
140 140 Return True si ejecuta la operacion "operationConf.name" con los
141 141 argumentos "**kwargs". False si la operacion no se ha ejecutado.
142 142 La operacion puede ser de dos tipos:
143 143
144 144 1. Un metodo propio de esta clase:
145 145
146 146 operation.type = "self"
147 147
148 148 2. El metodo "run" de un objeto del tipo Operation o de un derivado de ella:
149 149 operation.type = "other".
150 150
151 151 Este objeto de tipo Operation debe de haber sido agregado antes con el metodo:
152 152 "addOperation" e identificado con el operation.id
153 153
154 154
155 155 con el id de la operacion.
156 156
157 157 Input:
158 158
159 159 Operation : Objeto del tipo operacion con los atributos: name, type y id.
160 160
161 161 """
162 162
163 163 if operationConf.type == 'self':
164 164 sts = self.callMethod(operationConf.name, **kwargs)
165 165
166 166 if operationConf.type == 'other':
167 167 sts = self.callObject(operationConf.id, **kwargs)
168 168
169 169 return sts
170 170
171 171 def setInput(self, dataIn):
172 172
173 173 self.dataIn = dataIn
174 174
175 175 def getOutput(self):
176 176
177 177 return self.dataOut
178 178
179 179 class Operation():
180 180
181 181 """
182 182 Clase base para definir las operaciones adicionales que se pueden agregar a la clase ProcessingUnit
183 183 y necesiten acumular informacion previa de los datos a procesar. De preferencia usar un buffer de
184 184 acumulacion dentro de esta clase
185 185
186 186 Ejemplo: Integraciones coherentes, necesita la informacion previa de los n perfiles anteriores (bufffer)
187 187
188 188 """
189 189
190 190 __buffer = None
191 191 __isConfig = False
192 192
193 193 def __init__(self):
194 194
195 195 pass
196 196
197 197 def run(self, dataIn, **kwargs):
198 198
199 199 """
200 200 Realiza las operaciones necesarias sobre la dataIn.data y actualiza los atributos del objeto dataIn.
201 201
202 202 Input:
203 203
204 204 dataIn : objeto del tipo JROData
205 205
206 206 Return:
207 207
208 208 None
209 209
210 210 Affected:
211 211 __buffer : buffer de recepcion de datos.
212 212
213 213 """
214 214
215 215 raise ValueError, "ImplementedError"
216 216
217 217 class VoltageProc(ProcessingUnit):
218 218
219 219
220 220 def __init__(self):
221 221
222 222 self.objectDict = {}
223 223 self.dataOut = Voltage()
224 224 self.flip = 1
225 225
226 226 def __updateObjFromAmisrInput(self):
227 227
228 228 self.dataOut.timeZone = self.dataIn.timeZone
229 229 self.dataOut.dstFlag = self.dataIn.dstFlag
230 230 self.dataOut.errorCount = self.dataIn.errorCount
231 231 self.dataOut.useLocalTime = self.dataIn.useLocalTime
232 232
233 233 self.dataOut.flagNoData = self.dataIn.flagNoData
234 234 self.dataOut.data = self.dataIn.data
235 235 self.dataOut.utctime = self.dataIn.utctime
236 236 self.dataOut.channelList = self.dataIn.channelList
237 237 self.dataOut.timeInterval = self.dataIn.timeInterval
238 238 self.dataOut.heightList = self.dataIn.heightList
239 239 self.dataOut.nProfiles = self.dataIn.nProfiles
240 240
241 241 self.dataOut.nCohInt = self.dataIn.nCohInt
242 242 self.dataOut.ippSeconds = self.dataIn.ippSeconds
243 243 self.dataOut.frequency = self.dataIn.frequency
244 244
245 245 pass
246 246
247 247 def init(self):
248 248
249 249
250 250 if self.dataIn.type == 'AMISR':
251 251 self.__updateObjFromAmisrInput()
252 252
253 253 if self.dataIn.type == 'Voltage':
254 254 self.dataOut.copy(self.dataIn)
255 255 # No necesita copiar en cada init() los atributos de dataIn
256 256 # la copia deberia hacerse por cada nuevo bloque de datos
257 257
258 258 def selectChannels(self, channelList):
259 259
260 260 channelIndexList = []
261 261
262 262 for channel in channelList:
263 263 index = self.dataOut.channelList.index(channel)
264 264 channelIndexList.append(index)
265 265
266 266 self.selectChannelsByIndex(channelIndexList)
267 267
268 268 def selectChannelsByIndex(self, channelIndexList):
269 269 """
270 270 Selecciona un bloque de datos en base a canales segun el channelIndexList
271 271
272 272 Input:
273 273 channelIndexList : lista sencilla de canales a seleccionar por ej. [2,3,7]
274 274
275 275 Affected:
276 276 self.dataOut.data
277 277 self.dataOut.channelIndexList
278 278 self.dataOut.nChannels
279 279 self.dataOut.m_ProcessingHeader.totalSpectra
280 280 self.dataOut.systemHeaderObj.numChannels
281 281 self.dataOut.m_ProcessingHeader.blockSize
282 282
283 283 Return:
284 284 None
285 285 """
286 286
287 287 for channelIndex in channelIndexList:
288 288 if channelIndex not in self.dataOut.channelIndexList:
289 289 print channelIndexList
290 290 raise ValueError, "The value %d in channelIndexList is not valid" %channelIndex
291 291
292 292 nChannels = len(channelIndexList)
293 293
294 294 data = self.dataOut.data[channelIndexList,:]
295 295
296 296 self.dataOut.data = data
297 297 self.dataOut.channelList = [self.dataOut.channelList[i] for i in channelIndexList]
298 298 # self.dataOut.nChannels = nChannels
299 299
300 300 return 1
301 301
302 302 def selectHeights(self, minHei=None, maxHei=None):
303 303 """
304 304 Selecciona un bloque de datos en base a un grupo de valores de alturas segun el rango
305 305 minHei <= height <= maxHei
306 306
307 307 Input:
308 308 minHei : valor minimo de altura a considerar
309 309 maxHei : valor maximo de altura a considerar
310 310
311 311 Affected:
312 312 Indirectamente son cambiados varios valores a travez del metodo selectHeightsByIndex
313 313
314 314 Return:
315 315 1 si el metodo se ejecuto con exito caso contrario devuelve 0
316 316 """
317 317
318 318 if minHei == None:
319 319 minHei = self.dataOut.heightList[0]
320 320
321 321 if maxHei == None:
322 322 maxHei = self.dataOut.heightList[-1]
323 323
324 324 if (minHei < self.dataOut.heightList[0]) or (minHei > maxHei):
325 325 raise ValueError, "some value in (%d,%d) is not valid" % (minHei, maxHei)
326 326
327 327
328 328 if (maxHei > self.dataOut.heightList[-1]):
329 329 maxHei = self.dataOut.heightList[-1]
330 330 # raise ValueError, "some value in (%d,%d) is not valid" % (minHei, maxHei)
331 331
332 332 minIndex = 0
333 333 maxIndex = 0
334 334 heights = self.dataOut.heightList
335 335
336 336 inda = numpy.where(heights >= minHei)
337 337 indb = numpy.where(heights <= maxHei)
338 338
339 339 try:
340 340 minIndex = inda[0][0]
341 341 except:
342 342 minIndex = 0
343 343
344 344 try:
345 345 maxIndex = indb[0][-1]
346 346 except:
347 347 maxIndex = len(heights)
348 348
349 349 self.selectHeightsByIndex(minIndex, maxIndex)
350 350
351 351 return 1
352 352
353 353
354 354 def selectHeightsByIndex(self, minIndex, maxIndex):
355 355 """
356 356 Selecciona un bloque de datos en base a un grupo indices de alturas segun el rango
357 357 minIndex <= index <= maxIndex
358 358
359 359 Input:
360 360 minIndex : valor de indice minimo de altura a considerar
361 361 maxIndex : valor de indice maximo de altura a considerar
362 362
363 363 Affected:
364 364 self.dataOut.data
365 365 self.dataOut.heightList
366 366
367 367 Return:
368 368 1 si el metodo se ejecuto con exito caso contrario devuelve 0
369 369 """
370 370
371 371 if (minIndex < 0) or (minIndex > maxIndex):
372 372 raise ValueError, "some value in (%d,%d) is not valid" % (minIndex, maxIndex)
373 373
374 374 if (maxIndex >= self.dataOut.nHeights):
375 375 maxIndex = self.dataOut.nHeights-1
376 376 # raise ValueError, "some value in (%d,%d) is not valid" % (minIndex, maxIndex)
377 377
378 378 nHeights = maxIndex - minIndex + 1
379 379
380 380 #voltage
381 381 data = self.dataOut.data[:,minIndex:maxIndex+1]
382 382
383 383 firstHeight = self.dataOut.heightList[minIndex]
384 384
385 385 self.dataOut.data = data
386 386 self.dataOut.heightList = self.dataOut.heightList[minIndex:maxIndex+1]
387 387
388 388 return 1
389 389
390 390
391 391 def filterByHeights(self, window):
392 392 deltaHeight = self.dataOut.heightList[1] - self.dataOut.heightList[0]
393 393
394 394 if window == None:
395 395 window = (self.dataOut.radarControllerHeaderObj.txA/self.dataOut.radarControllerHeaderObj.nBaud) / deltaHeight
396 396
397 397 newdelta = deltaHeight * window
398 398 r = self.dataOut.data.shape[1] % window
399 399 buffer = self.dataOut.data[:,0:self.dataOut.data.shape[1]-r]
400 400 buffer = buffer.reshape(self.dataOut.data.shape[0],self.dataOut.data.shape[1]/window,window)
401 401 buffer = numpy.sum(buffer,2)
402 402 self.dataOut.data = buffer
403 403 self.dataOut.heightList = numpy.arange(self.dataOut.heightList[0],newdelta*(self.dataOut.nHeights-r)/window,newdelta)
404 404 self.dataOut.windowOfFilter = window
405 405
406 406 def deFlip(self):
407 407 self.dataOut.data *= self.flip
408 408 self.flip *= -1.
409 409
410 410 def setRadarFrequency(self, frequency=None):
411 411 if frequency != None:
412 412 self.dataOut.frequency = frequency
413 413
414 414 return 1
415 415
416 416 class CohInt(Operation):
417 417
418 418 __isConfig = False
419 419
420 420 __profIndex = 0
421 421 __withOverapping = False
422 422
423 423 __byTime = False
424 424 __initime = None
425 425 __lastdatatime = None
426 426 __integrationtime = None
427 427
428 428 __buffer = None
429 429
430 430 __dataReady = False
431 431
432 432 n = None
433 433
434 434
435 435 def __init__(self):
436 436
437 437 self.__isConfig = False
438 438
439 439 def setup(self, n=None, timeInterval=None, overlapping=False):
440 440 """
441 441 Set the parameters of the integration class.
442 442
443 443 Inputs:
444 444
445 445 n : Number of coherent integrations
446 446 timeInterval : Time of integration. If the parameter "n" is selected this one does not work
447 447 overlapping :
448 448
449 449 """
450 450
451 451 self.__initime = None
452 452 self.__lastdatatime = 0
453 453 self.__buffer = None
454 454 self.__dataReady = False
455 455
456 456
457 457 if n == None and timeInterval == None:
458 458 raise ValueError, "n or timeInterval should be specified ..."
459 459
460 460 if n != None:
461 461 self.n = n
462 462 self.__byTime = False
463 463 else:
464 464 self.__integrationtime = timeInterval * 60. #if (type(timeInterval)!=integer) -> change this line
465 465 self.n = 9999
466 466 self.__byTime = True
467 467
468 468 if overlapping:
469 469 self.__withOverapping = True
470 470 self.__buffer = None
471 471 else:
472 472 self.__withOverapping = False
473 473 self.__buffer = 0
474 474
475 475 self.__profIndex = 0
476 476
477 477 def putData(self, data):
478 478
479 479 """
480 480 Add a profile to the __buffer and increase in one the __profileIndex
481 481
482 482 """
483 483
484 484 if not self.__withOverapping:
485 485 self.__buffer += data.copy()
486 486 self.__profIndex += 1
487 487 return
488 488
489 489 #Overlapping data
490 490 nChannels, nHeis = data.shape
491 491 data = numpy.reshape(data, (1, nChannels, nHeis))
492 492
493 493 #If the buffer is empty then it takes the data value
494 494 if self.__buffer == None:
495 495 self.__buffer = data
496 496 self.__profIndex += 1
497 497 return
498 498
499 499 #If the buffer length is lower than n then stakcing the data value
500 500 if self.__profIndex < self.n:
501 501 self.__buffer = numpy.vstack((self.__buffer, data))
502 502 self.__profIndex += 1
503 503 return
504 504
505 505 #If the buffer length is equal to n then replacing the last buffer value with the data value
506 506 self.__buffer = numpy.roll(self.__buffer, -1, axis=0)
507 507 self.__buffer[self.n-1] = data
508 508 self.__profIndex = self.n
509 509 return
510 510
511 511
512 512 def pushData(self):
513 513 """
514 514 Return the sum of the last profiles and the profiles used in the sum.
515 515
516 516 Affected:
517 517
518 518 self.__profileIndex
519 519
520 520 """
521 521
522 522 if not self.__withOverapping:
523 523 data = self.__buffer
524 524 n = self.__profIndex
525 525
526 526 self.__buffer = 0
527 527 self.__profIndex = 0
528 528
529 529 return data, n
530 530
531 531 #Integration with Overlapping
532 532 data = numpy.sum(self.__buffer, axis=0)
533 533 n = self.__profIndex
534 534
535 535 return data, n
536 536
537 537 def byProfiles(self, data):
538 538
539 539 self.__dataReady = False
540 540 avgdata = None
541 541 n = None
542 542
543 543 self.putData(data)
544 544
545 545 if self.__profIndex == self.n:
546 546
547 547 avgdata, n = self.pushData()
548 548 self.__dataReady = True
549 549
550 550 return avgdata
551 551
552 552 def byTime(self, data, datatime):
553 553
554 554 self.__dataReady = False
555 555 avgdata = None
556 556 n = None
557 557
558 558 self.putData(data)
559 559
560 560 if (datatime - self.__initime) >= self.__integrationtime:
561 561 avgdata, n = self.pushData()
562 562 self.n = n
563 563 self.__dataReady = True
564 564
565 565 return avgdata
566 566
567 567 def integrate(self, data, datatime=None):
568 568
569 569 if self.__initime == None:
570 570 self.__initime = datatime
571 571
572 572 if self.__byTime:
573 573 avgdata = self.byTime(data, datatime)
574 574 else:
575 575 avgdata = self.byProfiles(data)
576 576
577 577
578 578 self.__lastdatatime = datatime
579 579
580 580 if avgdata == None:
581 581 return None, None
582 582
583 583 avgdatatime = self.__initime
584 584
585 585 deltatime = datatime -self.__lastdatatime
586 586
587 587 if not self.__withOverapping:
588 588 self.__initime = datatime
589 589 else:
590 590 self.__initime += deltatime
591 591
592 592 return avgdata, avgdatatime
593 593
594 594 def run(self, dataOut, **kwargs):
595 595
596 596 if not self.__isConfig:
597 597 self.setup(**kwargs)
598 598 self.__isConfig = True
599 599
600 600 avgdata, avgdatatime = self.integrate(dataOut.data, dataOut.utctime)
601 601
602 602 # dataOut.timeInterval *= n
603 603 dataOut.flagNoData = True
604 604
605 605 if self.__dataReady:
606 606 dataOut.data = avgdata
607 607 dataOut.nCohInt *= self.n
608 608 dataOut.utctime = avgdatatime
609 609 dataOut.timeInterval = dataOut.ippSeconds * dataOut.nCohInt
610 610 dataOut.flagNoData = False
611 611
612 612
613 613 class Decoder(Operation):
614 614
615 615 __isConfig = False
616 616 __profIndex = 0
617 617
618 618 code = None
619 619
620 620 nCode = None
621 621 nBaud = None
622 622
623 623 def __init__(self):
624 624
625 625 self.__isConfig = False
626 626
627 627 def setup(self, code, shape):
628 628
629 629 self.__profIndex = 0
630 630
631 631 self.code = code
632 632
633 633 self.nCode = len(code)
634 634 self.nBaud = len(code[0])
635 635
636 636 self.__nChannels, self.__nHeis = shape
637 637
638 638 __codeBuffer = numpy.zeros((self.nCode, self.__nHeis), dtype=numpy.complex)
639 639
640 640 __codeBuffer[:,0:self.nBaud] = self.code
641 641
642 642 self.fft_code = numpy.conj(numpy.fft.fft(__codeBuffer, axis=1))
643 643
644 644 self.ndatadec = self.__nHeis - self.nBaud + 1
645 645
646 646 self.datadecTime = numpy.zeros((self.__nChannels, self.ndatadec), dtype=numpy.complex)
647 647
648 648 def convolutionInFreq(self, data):
649 649
650 650 fft_code = self.fft_code[self.__profIndex].reshape(1,-1)
651 651
652 652 fft_data = numpy.fft.fft(data, axis=1)
653 653
654 654 conv = fft_data*fft_code
655 655
656 656 data = numpy.fft.ifft(conv,axis=1)
657 657
658 658 datadec = data[:,:-self.nBaud+1]
659 659
660 660 return datadec
661 661
662 662 def convolutionInFreqOpt(self, data):
663 663
664 664 fft_code = self.fft_code[self.__profIndex].reshape(1,-1)
665 665
666 666 data = cfunctions.decoder(fft_code, data)
667 667
668 668 datadec = data[:,:-self.nBaud+1]
669 669
670 670 return datadec
671 671
672 672 def convolutionInTime(self, data):
673 673
674 674 code = self.code[self.__profIndex]
675 675
676 676 for i in range(self.__nChannels):
677 677 self.datadecTime[i,:] = numpy.correlate(data[i,:], code, mode='valid')
678 678
679 679 return self.datadecTime
680 680
681 681 def run(self, dataOut, code=None, nCode=None, nBaud=None, mode = 0):
682 682
683 683 if code == None:
684 684 code = dataOut.code
685 685 else:
686 686 code = numpy.array(code).reshape(nCode,nBaud)
687 687 dataOut.code = code
688 688 dataOut.nCode = nCode
689 689 dataOut.nBaud = nBaud
690 690 dataOut.radarControllerHeaderObj.code = code
691 691 dataOut.radarControllerHeaderObj.nCode = nCode
692 692 dataOut.radarControllerHeaderObj.nBaud = nBaud
693 693
694 694
695 695 if not self.__isConfig:
696 696
697 697 self.setup(code, dataOut.data.shape)
698 698 self.__isConfig = True
699 699
700 700 if mode == 0:
701 701 datadec = self.convolutionInTime(dataOut.data)
702 702
703 703 if mode == 1:
704 704 datadec = self.convolutionInFreq(dataOut.data)
705 705
706 706 if mode == 2:
707 707 datadec = self.convolutionInFreqOpt(dataOut.data)
708 708
709 709 dataOut.data = datadec
710 710
711 711 dataOut.heightList = dataOut.heightList[0:self.ndatadec]
712 712
713 713 dataOut.flagDecodeData = True #asumo q la data no esta decodificada
714 714
715 715 if self.__profIndex == self.nCode-1:
716 716 self.__profIndex = 0
717 717 return 1
718 718
719 719 self.__profIndex += 1
720 720
721 721 return 1
722 722 # dataOut.flagDeflipData = True #asumo q la data no esta sin flip
723 723
724 724
725 725
726 726 class SpectraProc(ProcessingUnit):
727 727
728 728 def __init__(self):
729 729
730 730 self.objectDict = {}
731 731 self.buffer = None
732 732 self.firstdatatime = None
733 733 self.profIndex = 0
734 734 self.dataOut = Spectra()
735 735
736 736 def __updateObjFromInput(self):
737 737
738 738 self.dataOut.timeZone = self.dataIn.timeZone
739 739 self.dataOut.dstFlag = self.dataIn.dstFlag
740 740 self.dataOut.errorCount = self.dataIn.errorCount
741 741 self.dataOut.useLocalTime = self.dataIn.useLocalTime
742 742
743 743 self.dataOut.radarControllerHeaderObj = self.dataIn.radarControllerHeaderObj.copy()
744 744 self.dataOut.systemHeaderObj = self.dataIn.systemHeaderObj.copy()
745 745 self.dataOut.channelList = self.dataIn.channelList
746 746 self.dataOut.heightList = self.dataIn.heightList
747 747 self.dataOut.dtype = numpy.dtype([('real','<f4'),('imag','<f4')])
748 748 # self.dataOut.nHeights = self.dataIn.nHeights
749 749 # self.dataOut.nChannels = self.dataIn.nChannels
750 750 self.dataOut.nBaud = self.dataIn.nBaud
751 751 self.dataOut.nCode = self.dataIn.nCode
752 752 self.dataOut.code = self.dataIn.code
753 753 self.dataOut.nProfiles = self.dataOut.nFFTPoints
754 754 # self.dataOut.channelIndexList = self.dataIn.channelIndexList
755 755 self.dataOut.flagTimeBlock = self.dataIn.flagTimeBlock
756 756 self.dataOut.utctime = self.firstdatatime
757 757 self.dataOut.flagDecodeData = self.dataIn.flagDecodeData #asumo q la data esta decodificada
758 758 self.dataOut.flagDeflipData = self.dataIn.flagDeflipData #asumo q la data esta sin flip
759 759 # self.dataOut.flagShiftFFT = self.dataIn.flagShiftFFT
760 760 self.dataOut.nCohInt = self.dataIn.nCohInt
761 761 self.dataOut.nIncohInt = 1
762 762 self.dataOut.ippSeconds = self.dataIn.ippSeconds
763 763 self.dataOut.windowOfFilter = self.dataIn.windowOfFilter
764 764
765 765 self.dataOut.timeInterval = self.dataIn.timeInterval*self.dataOut.nFFTPoints*self.dataOut.nIncohInt
766 766 self.dataOut.frequency = self.dataIn.frequency
767 767 self.dataOut.realtime = self.dataIn.realtime
768 768
769 769 def __getFft(self):
770 770 """
771 771 Convierte valores de Voltaje a Spectra
772 772
773 773 Affected:
774 774 self.dataOut.data_spc
775 775 self.dataOut.data_cspc
776 776 self.dataOut.data_dc
777 777 self.dataOut.heightList
778 778 self.profIndex
779 779 self.buffer
780 780 self.dataOut.flagNoData
781 781 """
782 782 fft_volt = numpy.fft.fft(self.buffer,n=self.dataOut.nFFTPoints,axis=1)
783 783 fft_volt = fft_volt.astype(numpy.dtype('complex'))
784 784 dc = fft_volt[:,0,:]
785 785
786 786 #calculo de self-spectra
787 787 fft_volt = numpy.fft.fftshift(fft_volt,axes=(1,))
788 788 spc = fft_volt * numpy.conjugate(fft_volt)
789 789 spc = spc.real
790 790
791 791 blocksize = 0
792 792 blocksize += dc.size
793 793 blocksize += spc.size
794 794
795 795 cspc = None
796 796 pairIndex = 0
797 797 if self.dataOut.pairsList != None:
798 798 #calculo de cross-spectra
799 799 cspc = numpy.zeros((self.dataOut.nPairs, self.dataOut.nFFTPoints, self.dataOut.nHeights), dtype='complex')
800 800 for pair in self.dataOut.pairsList:
801 801 cspc[pairIndex,:,:] = fft_volt[pair[0],:,:] * numpy.conjugate(fft_volt[pair[1],:,:])
802 802 pairIndex += 1
803 803 blocksize += cspc.size
804 804
805 805 self.dataOut.data_spc = spc
806 806 self.dataOut.data_cspc = cspc
807 807 self.dataOut.data_dc = dc
808 808 self.dataOut.blockSize = blocksize
809 809 self.dataOut.flagShiftFFT = False
810 810
811 811 def init(self, nProfiles=None, nFFTPoints=None, pairsList=None, ippFactor=None):
812 812
813 813 self.dataOut.flagNoData = True
814 814
815 815 if self.dataIn.type == "Spectra":
816 816 self.dataOut.copy(self.dataIn)
817 817 return
818 818
819 819 if self.dataIn.type == "Voltage":
820 820
821 821 if nFFTPoints == None:
822 822 raise ValueError, "This SpectraProc.init() need nFFTPoints input variable"
823 823
824 824 if pairsList == None:
825 825 nPairs = 0
826 826 else:
827 827 nPairs = len(pairsList)
828 828
829 829 if ippFactor == None:
830 830 ippFactor = 1
831 831 self.dataOut.ippFactor = ippFactor
832 832
833 833 self.dataOut.nFFTPoints = nFFTPoints
834 834 self.dataOut.pairsList = pairsList
835 835 self.dataOut.nPairs = nPairs
836 836
837 837 if self.buffer == None:
838 838 self.buffer = numpy.zeros((self.dataIn.nChannels,
839 839 nProfiles,
840 840 self.dataIn.nHeights),
841 841 dtype='complex')
842 842
843 843
844 844 self.buffer[:,self.profIndex,:] = self.dataIn.data.copy()
845 845 self.profIndex += 1
846 846
847 847 if self.firstdatatime == None:
848 848 self.firstdatatime = self.dataIn.utctime
849 849
850 850 if self.profIndex == nProfiles:
851 851 self.__updateObjFromInput()
852 852 self.__getFft()
853 853
854 854 self.dataOut.flagNoData = False
855 855
856 856 self.buffer = None
857 857 self.firstdatatime = None
858 858 self.profIndex = 0
859 859
860 860 return
861 861
862 862 raise ValueError, "The type object %s is not valid"%(self.dataIn.type)
863 863
864 864 def selectChannels(self, channelList):
865 865
866 866 channelIndexList = []
867 867
868 868 for channel in channelList:
869 869 index = self.dataOut.channelList.index(channel)
870 870 channelIndexList.append(index)
871 871
872 872 self.selectChannelsByIndex(channelIndexList)
873 873
874 874 def selectChannelsByIndex(self, channelIndexList):
875 875 """
876 876 Selecciona un bloque de datos en base a canales segun el channelIndexList
877 877
878 878 Input:
879 879 channelIndexList : lista sencilla de canales a seleccionar por ej. [2,3,7]
880 880
881 881 Affected:
882 882 self.dataOut.data_spc
883 883 self.dataOut.channelIndexList
884 884 self.dataOut.nChannels
885 885
886 886 Return:
887 887 None
888 888 """
889 889
890 890 for channelIndex in channelIndexList:
891 891 if channelIndex not in self.dataOut.channelIndexList:
892 892 print channelIndexList
893 893 raise ValueError, "The value %d in channelIndexList is not valid" %channelIndex
894 894
895 895 nChannels = len(channelIndexList)
896 896
897 897 data_spc = self.dataOut.data_spc[channelIndexList,:]
898 898
899 899 self.dataOut.data_spc = data_spc
900 900 self.dataOut.channelList = [self.dataOut.channelList[i] for i in channelIndexList]
901 901 # self.dataOut.nChannels = nChannels
902 902
903 903 return 1
904 904
905 905 def selectHeights(self, minHei, maxHei):
906 906 """
907 907 Selecciona un bloque de datos en base a un grupo de valores de alturas segun el rango
908 908 minHei <= height <= maxHei
909 909
910 910 Input:
911 911 minHei : valor minimo de altura a considerar
912 912 maxHei : valor maximo de altura a considerar
913 913
914 914 Affected:
915 915 Indirectamente son cambiados varios valores a travez del metodo selectHeightsByIndex
916 916
917 917 Return:
918 918 1 si el metodo se ejecuto con exito caso contrario devuelve 0
919 919 """
920 920 if (minHei < self.dataOut.heightList[0]) or (minHei > maxHei):
921 921 raise ValueError, "some value in (%d,%d) is not valid" % (minHei, maxHei)
922 922
923 923 if (maxHei > self.dataOut.heightList[-1]):
924 924 maxHei = self.dataOut.heightList[-1]
925 925 # raise ValueError, "some value in (%d,%d) is not valid" % (minHei, maxHei)
926 926
927 927 minIndex = 0
928 928 maxIndex = 0
929 929 heights = self.dataOut.heightList
930 930
931 931 inda = numpy.where(heights >= minHei)
932 932 indb = numpy.where(heights <= maxHei)
933 933
934 934 try:
935 935 minIndex = inda[0][0]
936 936 except:
937 937 minIndex = 0
938 938
939 939 try:
940 940 maxIndex = indb[0][-1]
941 941 except:
942 942 maxIndex = len(heights)
943 943
944 944 self.selectHeightsByIndex(minIndex, maxIndex)
945 945
946 946 return 1
947 947
948 948 def getBeaconSignal(self, tauindex = 0, channelindex = 0, hei_ref=None):
949 949 newheis = numpy.where(self.dataOut.heightList>self.dataOut.radarControllerHeaderObj.Taus[tauindex])
950 950
951 951 if hei_ref != None:
952 952 newheis = numpy.where(self.dataOut.heightList>hei_ref)
953 953
954 954 minIndex = min(newheis[0])
955 955 maxIndex = max(newheis[0])
956 956 data_spc = self.dataOut.data_spc[:,:,minIndex:maxIndex+1]
957 957 heightList = self.dataOut.heightList[minIndex:maxIndex+1]
958 958
959 959 # determina indices
960 960 nheis = int(self.dataOut.radarControllerHeaderObj.txB/(self.dataOut.heightList[1]-self.dataOut.heightList[0]))
961 961 avg_dB = 10*numpy.log10(numpy.sum(data_spc[channelindex,:,:],axis=0))
962 962 beacon_dB = numpy.sort(avg_dB)[-nheis:]
963 963 beacon_heiIndexList = []
964 964 for val in avg_dB.tolist():
965 965 if val >= beacon_dB[0]:
966 966 beacon_heiIndexList.append(avg_dB.tolist().index(val))
967 967
968 968 #data_spc = data_spc[:,:,beacon_heiIndexList]
969 969 data_cspc = None
970 970 if self.dataOut.data_cspc != None:
971 971 data_cspc = self.dataOut.data_cspc[:,:,minIndex:maxIndex+1]
972 972 #data_cspc = data_cspc[:,:,beacon_heiIndexList]
973 973
974 974 data_dc = None
975 975 if self.dataOut.data_dc != None:
976 976 data_dc = self.dataOut.data_dc[:,minIndex:maxIndex+1]
977 977 #data_dc = data_dc[:,beacon_heiIndexList]
978 978
979 979 self.dataOut.data_spc = data_spc
980 980 self.dataOut.data_cspc = data_cspc
981 981 self.dataOut.data_dc = data_dc
982 982 self.dataOut.heightList = heightList
983 983 self.dataOut.beacon_heiIndexList = beacon_heiIndexList
984 984
985 985 return 1
986 986
987 987
988 988 def selectHeightsByIndex(self, minIndex, maxIndex):
989 989 """
990 990 Selecciona un bloque de datos en base a un grupo indices de alturas segun el rango
991 991 minIndex <= index <= maxIndex
992 992
993 993 Input:
994 994 minIndex : valor de indice minimo de altura a considerar
995 995 maxIndex : valor de indice maximo de altura a considerar
996 996
997 997 Affected:
998 998 self.dataOut.data_spc
999 999 self.dataOut.data_cspc
1000 1000 self.dataOut.data_dc
1001 1001 self.dataOut.heightList
1002 1002
1003 1003 Return:
1004 1004 1 si el metodo se ejecuto con exito caso contrario devuelve 0
1005 1005 """
1006 1006
1007 1007 if (minIndex < 0) or (minIndex > maxIndex):
1008 1008 raise ValueError, "some value in (%d,%d) is not valid" % (minIndex, maxIndex)
1009 1009
1010 1010 if (maxIndex >= self.dataOut.nHeights):
1011 1011 maxIndex = self.dataOut.nHeights-1
1012 1012 # raise ValueError, "some value in (%d,%d) is not valid" % (minIndex, maxIndex)
1013 1013
1014 1014 nHeights = maxIndex - minIndex + 1
1015 1015
1016 1016 #Spectra
1017 1017 data_spc = self.dataOut.data_spc[:,:,minIndex:maxIndex+1]
1018 1018
1019 1019 data_cspc = None
1020 1020 if self.dataOut.data_cspc != None:
1021 1021 data_cspc = self.dataOut.data_cspc[:,:,minIndex:maxIndex+1]
1022 1022
1023 1023 data_dc = None
1024 1024 if self.dataOut.data_dc != None:
1025 1025 data_dc = self.dataOut.data_dc[:,minIndex:maxIndex+1]
1026 1026
1027 1027 self.dataOut.data_spc = data_spc
1028 1028 self.dataOut.data_cspc = data_cspc
1029 1029 self.dataOut.data_dc = data_dc
1030 1030
1031 1031 self.dataOut.heightList = self.dataOut.heightList[minIndex:maxIndex+1]
1032 1032
1033 1033 return 1
1034 1034
1035 1035 def removeDC(self, mode = 2):
1036 1036 jspectra = self.dataOut.data_spc
1037 1037 jcspectra = self.dataOut.data_cspc
1038 1038
1039 1039
1040 1040 num_chan = jspectra.shape[0]
1041 1041 num_hei = jspectra.shape[2]
1042 1042
1043 1043 if jcspectra != None:
1044 1044 jcspectraExist = True
1045 1045 num_pairs = jcspectra.shape[0]
1046 1046 else: jcspectraExist = False
1047 1047
1048 1048 freq_dc = jspectra.shape[1]/2
1049 1049 ind_vel = numpy.array([-2,-1,1,2]) + freq_dc
1050 1050
1051 1051 if ind_vel[0]<0:
1052 1052 ind_vel[range(0,1)] = ind_vel[range(0,1)] + self.num_prof
1053 1053
1054 1054 if mode == 1:
1055 1055 jspectra[:,freq_dc,:] = (jspectra[:,ind_vel[1],:] + jspectra[:,ind_vel[2],:])/2 #CORRECCION
1056 1056
1057 1057 if jcspectraExist:
1058 1058 jcspectra[:,freq_dc,:] = (jcspectra[:,ind_vel[1],:] + jcspectra[:,ind_vel[2],:])/2
1059 1059
1060 1060 if mode == 2:
1061 1061
1062 1062 vel = numpy.array([-2,-1,1,2])
1063 1063 xx = numpy.zeros([4,4])
1064 1064
1065 1065 for fil in range(4):
1066 1066 xx[fil,:] = vel[fil]**numpy.asarray(range(4))
1067 1067
1068 1068 xx_inv = numpy.linalg.inv(xx)
1069 1069 xx_aux = xx_inv[0,:]
1070 1070
1071 1071 for ich in range(num_chan):
1072 1072 yy = jspectra[ich,ind_vel,:]
1073 1073 jspectra[ich,freq_dc,:] = numpy.dot(xx_aux,yy)
1074 1074
1075 1075 junkid = jspectra[ich,freq_dc,:]<=0
1076 1076 cjunkid = sum(junkid)
1077 1077
1078 1078 if cjunkid.any():
1079 1079 jspectra[ich,freq_dc,junkid.nonzero()] = (jspectra[ich,ind_vel[1],junkid] + jspectra[ich,ind_vel[2],junkid])/2
1080 1080
1081 1081 if jcspectraExist:
1082 1082 for ip in range(num_pairs):
1083 1083 yy = jcspectra[ip,ind_vel,:]
1084 1084 jcspectra[ip,freq_dc,:] = numpy.dot(xx_aux,yy)
1085 1085
1086 1086
1087 1087 self.dataOut.data_spc = jspectra
1088 1088 self.dataOut.data_cspc = jcspectra
1089 1089
1090 1090 return 1
1091 1091
1092 1092 def removeInterference(self, interf = 2,hei_interf = None, nhei_interf = None, offhei_interf = None):
1093 1093
1094 1094 jspectra = self.dataOut.data_spc
1095 1095 jcspectra = self.dataOut.data_cspc
1096 1096 jnoise = self.dataOut.getNoise()
1097 1097 num_incoh = self.dataOut.nIncohInt
1098 1098
1099 1099 num_channel = jspectra.shape[0]
1100 1100 num_prof = jspectra.shape[1]
1101 1101 num_hei = jspectra.shape[2]
1102 1102
1103 1103 #hei_interf
1104 1104 if hei_interf == None:
1105 1105 count_hei = num_hei/2 #Como es entero no importa
1106 1106 hei_interf = numpy.asmatrix(range(count_hei)) + num_hei - count_hei
1107 1107 hei_interf = numpy.asarray(hei_interf)[0]
1108 1108 #nhei_interf
1109 1109 if (nhei_interf == None):
1110 1110 nhei_interf = 5
1111 1111 if (nhei_interf < 1):
1112 1112 nhei_interf = 1
1113 1113 if (nhei_interf > count_hei):
1114 1114 nhei_interf = count_hei
1115 1115 if (offhei_interf == None):
1116 1116 offhei_interf = 0
1117 1117
1118 1118 ind_hei = range(num_hei)
1119 1119 # mask_prof = numpy.asarray(range(num_prof - 2)) + 1
1120 1120 # mask_prof[range(num_prof/2 - 1,len(mask_prof))] += 1
1121 1121 mask_prof = numpy.asarray(range(num_prof))
1122 1122 num_mask_prof = mask_prof.size
1123 1123 comp_mask_prof = [0, num_prof/2]
1124 1124
1125 1125
1126 1126 #noise_exist: Determina si la variable jnoise ha sido definida y contiene la informacion del ruido de cada canal
1127 1127 if (jnoise.size < num_channel or numpy.isnan(jnoise).any()):
1128 1128 jnoise = numpy.nan
1129 1129 noise_exist = jnoise[0] < numpy.Inf
1130 1130
1131 1131 #Subrutina de Remocion de la Interferencia
1132 1132 for ich in range(num_channel):
1133 1133 #Se ordena los espectros segun su potencia (menor a mayor)
1134 1134 power = jspectra[ich,mask_prof,:]
1135 1135 power = power[:,hei_interf]
1136 1136 power = power.sum(axis = 0)
1137 1137 psort = power.ravel().argsort()
1138 1138
1139 1139 #Se estima la interferencia promedio en los Espectros de Potencia empleando
1140 1140 junkspc_interf = jspectra[ich,:,hei_interf[psort[range(offhei_interf, nhei_interf + offhei_interf)]]]
1141 1141
1142 1142 if noise_exist:
1143 1143 # tmp_noise = jnoise[ich] / num_prof
1144 1144 tmp_noise = jnoise[ich]
1145 1145 junkspc_interf = junkspc_interf - tmp_noise
1146 1146 #junkspc_interf[:,comp_mask_prof] = 0
1147 1147
1148 1148 jspc_interf = junkspc_interf.sum(axis = 0) / nhei_interf
1149 1149 jspc_interf = jspc_interf.transpose()
1150 1150 #Calculando el espectro de interferencia promedio
1151 1151 noiseid = numpy.where(jspc_interf <= tmp_noise/ math.sqrt(num_incoh))
1152 1152 noiseid = noiseid[0]
1153 1153 cnoiseid = noiseid.size
1154 1154 interfid = numpy.where(jspc_interf > tmp_noise/ math.sqrt(num_incoh))
1155 1155 interfid = interfid[0]
1156 1156 cinterfid = interfid.size
1157 1157
1158 1158 if (cnoiseid > 0): jspc_interf[noiseid] = 0
1159 1159
1160 1160 #Expandiendo los perfiles a limpiar
1161 1161 if (cinterfid > 0):
1162 1162 new_interfid = (numpy.r_[interfid - 1, interfid, interfid + 1] + num_prof)%num_prof
1163 1163 new_interfid = numpy.asarray(new_interfid)
1164 1164 new_interfid = {x for x in new_interfid}
1165 1165 new_interfid = numpy.array(list(new_interfid))
1166 1166 new_cinterfid = new_interfid.size
1167 1167 else: new_cinterfid = 0
1168 1168
1169 1169 for ip in range(new_cinterfid):
1170 1170 ind = junkspc_interf[:,new_interfid[ip]].ravel().argsort()
1171 1171 jspc_interf[new_interfid[ip]] = junkspc_interf[ind[nhei_interf/2],new_interfid[ip]]
1172 1172
1173 1173
1174 1174 jspectra[ich,:,ind_hei] = jspectra[ich,:,ind_hei] - jspc_interf #Corregir indices
1175 1175
1176 1176 #Removiendo la interferencia del punto de mayor interferencia
1177 1177 ListAux = jspc_interf[mask_prof].tolist()
1178 1178 maxid = ListAux.index(max(ListAux))
1179 1179
1180 1180
1181 1181 if cinterfid > 0:
1182 1182 for ip in range(cinterfid*(interf == 2) - 1):
1183 1183 ind = (jspectra[ich,interfid[ip],:] < tmp_noise*(1 + 1/math.sqrt(num_incoh))).nonzero()
1184 1184 cind = len(ind)
1185 1185
1186 1186 if (cind > 0):
1187 1187 jspectra[ich,interfid[ip],ind] = tmp_noise*(1 + (numpy.random.uniform(cind) - 0.5)/math.sqrt(num_incoh))
1188 1188
1189 1189 ind = numpy.array([-2,-1,1,2])
1190 1190 xx = numpy.zeros([4,4])
1191 1191
1192 1192 for id1 in range(4):
1193 1193 xx[:,id1] = ind[id1]**numpy.asarray(range(4))
1194 1194
1195 1195 xx_inv = numpy.linalg.inv(xx)
1196 1196 xx = xx_inv[:,0]
1197 1197 ind = (ind + maxid + num_mask_prof)%num_mask_prof
1198 1198 yy = jspectra[ich,mask_prof[ind],:]
1199 1199 jspectra[ich,mask_prof[maxid],:] = numpy.dot(yy.transpose(),xx)
1200 1200
1201 1201
1202 1202 indAux = (jspectra[ich,:,:] < tmp_noise*(1-1/math.sqrt(num_incoh))).nonzero()
1203 1203 jspectra[ich,indAux[0],indAux[1]] = tmp_noise * (1 - 1/math.sqrt(num_incoh))
1204 1204
1205 1205 #Remocion de Interferencia en el Cross Spectra
1206 1206 if jcspectra == None: return jspectra, jcspectra
1207 1207 num_pairs = jcspectra.size/(num_prof*num_hei)
1208 1208 jcspectra = jcspectra.reshape(num_pairs, num_prof, num_hei)
1209 1209
1210 1210 for ip in range(num_pairs):
1211 1211
1212 1212 #-------------------------------------------
1213 1213
1214 1214 cspower = numpy.abs(jcspectra[ip,mask_prof,:])
1215 1215 cspower = cspower[:,hei_interf]
1216 1216 cspower = cspower.sum(axis = 0)
1217 1217
1218 1218 cspsort = cspower.ravel().argsort()
1219 1219 junkcspc_interf = jcspectra[ip,:,hei_interf[cspsort[range(offhei_interf, nhei_interf + offhei_interf)]]]
1220 1220 junkcspc_interf = junkcspc_interf.transpose()
1221 1221 jcspc_interf = junkcspc_interf.sum(axis = 1)/nhei_interf
1222 1222
1223 1223 ind = numpy.abs(jcspc_interf[mask_prof]).ravel().argsort()
1224 1224
1225 1225 median_real = numpy.median(numpy.real(junkcspc_interf[mask_prof[ind[range(3*num_prof/4)]],:]))
1226 1226 median_imag = numpy.median(numpy.imag(junkcspc_interf[mask_prof[ind[range(3*num_prof/4)]],:]))
1227 1227 junkcspc_interf[comp_mask_prof,:] = numpy.complex(median_real, median_imag)
1228 1228
1229 1229 for iprof in range(num_prof):
1230 1230 ind = numpy.abs(junkcspc_interf[iprof,:]).ravel().argsort()
1231 1231 jcspc_interf[iprof] = junkcspc_interf[iprof, ind[nhei_interf/2]]
1232 1232
1233 1233 #Removiendo la Interferencia
1234 1234 jcspectra[ip,:,ind_hei] = jcspectra[ip,:,ind_hei] - jcspc_interf
1235 1235
1236 1236 ListAux = numpy.abs(jcspc_interf[mask_prof]).tolist()
1237 1237 maxid = ListAux.index(max(ListAux))
1238 1238
1239 1239 ind = numpy.array([-2,-1,1,2])
1240 1240 xx = numpy.zeros([4,4])
1241 1241
1242 1242 for id1 in range(4):
1243 1243 xx[:,id1] = ind[id1]**numpy.asarray(range(4))
1244 1244
1245 1245 xx_inv = numpy.linalg.inv(xx)
1246 1246 xx = xx_inv[:,0]
1247 1247
1248 1248 ind = (ind + maxid + num_mask_prof)%num_mask_prof
1249 1249 yy = jcspectra[ip,mask_prof[ind],:]
1250 1250 jcspectra[ip,mask_prof[maxid],:] = numpy.dot(yy.transpose(),xx)
1251 1251
1252 1252 #Guardar Resultados
1253 1253 self.dataOut.data_spc = jspectra
1254 1254 self.dataOut.data_cspc = jcspectra
1255 1255
1256 1256 return 1
1257 1257
1258 1258 def setRadarFrequency(self, frequency=None):
1259 1259 if frequency != None:
1260 1260 self.dataOut.frequency = frequency
1261 1261
1262 1262 return 1
1263 1263
1264 1264 def getNoise(self, minHei=None, maxHei=None, minVel=None, maxVel=None):
1265 1265 #validacion de rango
1266 1266 if minHei == None:
1267 1267 minHei = self.dataOut.heightList[0]
1268 1268
1269 1269 if maxHei == None:
1270 1270 maxHei = self.dataOut.heightList[-1]
1271 1271
1272 1272 if (minHei < self.dataOut.heightList[0]) or (minHei > maxHei):
1273 1273 print 'minHei: %.2f is out of the heights range'%(minHei)
1274 1274 print 'minHei is setting to %.2f'%(self.dataOut.heightList[0])
1275 1275 minHei = self.dataOut.heightList[0]
1276 1276
1277 1277 if (maxHei > self.dataOut.heightList[-1]) or (maxHei < minHei):
1278 1278 print 'maxHei: %.2f is out of the heights range'%(maxHei)
1279 1279 print 'maxHei is setting to %.2f'%(self.dataOut.heightList[-1])
1280 1280 maxHei = self.dataOut.heightList[-1]
1281 1281
1282 1282 # validacion de velocidades
1283 1283 velrange = self.dataOut.getVelRange(1)
1284 1284
1285 1285 if minVel == None:
1286 1286 minVel = velrange[0]
1287 1287
1288 1288 if maxVel == None:
1289 1289 maxVel = velrange[-1]
1290 1290
1291 1291 if (minVel < velrange[0]) or (minVel > maxVel):
1292 1292 print 'minVel: %.2f is out of the velocity range'%(minVel)
1293 1293 print 'minVel is setting to %.2f'%(velrange[0])
1294 1294 minVel = velrange[0]
1295 1295
1296 1296 if (maxVel > velrange[-1]) or (maxVel < minVel):
1297 1297 print 'maxVel: %.2f is out of the velocity range'%(maxVel)
1298 1298 print 'maxVel is setting to %.2f'%(velrange[-1])
1299 1299 maxVel = velrange[-1]
1300 1300
1301 1301 # seleccion de indices para rango
1302 1302 minIndex = 0
1303 1303 maxIndex = 0
1304 1304 heights = self.dataOut.heightList
1305 1305
1306 1306 inda = numpy.where(heights >= minHei)
1307 1307 indb = numpy.where(heights <= maxHei)
1308 1308
1309 1309 try:
1310 1310 minIndex = inda[0][0]
1311 1311 except:
1312 1312 minIndex = 0
1313 1313
1314 1314 try:
1315 1315 maxIndex = indb[0][-1]
1316 1316 except:
1317 1317 maxIndex = len(heights)
1318 1318
1319 1319 if (minIndex < 0) or (minIndex > maxIndex):
1320 1320 raise ValueError, "some value in (%d,%d) is not valid" % (minIndex, maxIndex)
1321 1321
1322 1322 if (maxIndex >= self.dataOut.nHeights):
1323 1323 maxIndex = self.dataOut.nHeights-1
1324 1324
1325 1325 # seleccion de indices para velocidades
1326 1326 indminvel = numpy.where(velrange >= minVel)
1327 1327 indmaxvel = numpy.where(velrange <= maxVel)
1328 1328 try:
1329 1329 minIndexVel = indminvel[0][0]
1330 1330 except:
1331 1331 minIndexVel = 0
1332 1332
1333 1333 try:
1334 1334 maxIndexVel = indmaxvel[0][-1]
1335 1335 except:
1336 1336 maxIndexVel = len(velrange)
1337 1337
1338 1338 #seleccion del espectro
1339 1339 data_spc = self.dataOut.data_spc[:,minIndexVel:maxIndexVel+1,minIndex:maxIndex+1]
1340 1340 #estimacion de ruido
1341 1341 noise = numpy.zeros(self.dataOut.nChannels)
1342 1342
1343 1343 for channel in range(self.dataOut.nChannels):
1344 1344 daux = data_spc[channel,:,:]
1345 1345 noise[channel] = hildebrand_sekhon(daux, self.dataOut.nIncohInt)
1346 1346
1347 1347 self.dataOut.noise = noise.copy()
1348 1348
1349 1349 return 1
1350 1350
1351 1351
1352 1352 class IncohInt(Operation):
1353 1353
1354 1354
1355 1355 __profIndex = 0
1356 1356 __withOverapping = False
1357 1357
1358 1358 __byTime = False
1359 1359 __initime = None
1360 1360 __lastdatatime = None
1361 1361 __integrationtime = None
1362 1362
1363 1363 __buffer_spc = None
1364 1364 __buffer_cspc = None
1365 1365 __buffer_dc = None
1366 1366
1367 1367 __dataReady = False
1368 1368
1369 1369 __timeInterval = None
1370 1370
1371 1371 n = None
1372 1372
1373 1373
1374 1374
1375 1375 def __init__(self):
1376 1376
1377 1377 self.__isConfig = False
1378 1378
1379 1379 def setup(self, n=None, timeInterval=None, overlapping=False):
1380 1380 """
1381 1381 Set the parameters of the integration class.
1382 1382
1383 1383 Inputs:
1384 1384
1385 1385 n : Number of coherent integrations
1386 1386 timeInterval : Time of integration. If the parameter "n" is selected this one does not work
1387 1387 overlapping :
1388 1388
1389 1389 """
1390 1390
1391 1391 self.__initime = None
1392 1392 self.__lastdatatime = 0
1393 1393 self.__buffer_spc = None
1394 1394 self.__buffer_cspc = None
1395 1395 self.__buffer_dc = None
1396 1396 self.__dataReady = False
1397 1397
1398 1398
1399 1399 if n == None and timeInterval == None:
1400 1400 raise ValueError, "n or timeInterval should be specified ..."
1401 1401
1402 1402 if n != None:
1403 1403 self.n = n
1404 1404 self.__byTime = False
1405 1405 else:
1406 1406 self.__integrationtime = timeInterval #if (type(timeInterval)!=integer) -> change this line
1407 1407 self.n = 9999
1408 1408 self.__byTime = True
1409 1409
1410 1410 if overlapping:
1411 1411 self.__withOverapping = True
1412 1412 else:
1413 1413 self.__withOverapping = False
1414 1414 self.__buffer_spc = 0
1415 1415 self.__buffer_cspc = 0
1416 1416 self.__buffer_dc = 0
1417 1417
1418 1418 self.__profIndex = 0
1419 1419
1420 1420 def putData(self, data_spc, data_cspc, data_dc):
1421 1421
1422 1422 """
1423 1423 Add a profile to the __buffer_spc and increase in one the __profileIndex
1424 1424
1425 1425 """
1426 1426
1427 1427 if not self.__withOverapping:
1428 1428 self.__buffer_spc += data_spc
1429 1429
1430 1430 if data_cspc == None:
1431 1431 self.__buffer_cspc = None
1432 1432 else:
1433 1433 self.__buffer_cspc += data_cspc
1434 1434
1435 1435 if data_dc == None:
1436 1436 self.__buffer_dc = None
1437 1437 else:
1438 1438 self.__buffer_dc += data_dc
1439 1439
1440 1440 self.__profIndex += 1
1441 1441 return
1442 1442
1443 1443 #Overlapping data
1444 1444 nChannels, nFFTPoints, nHeis = data_spc.shape
1445 1445 data_spc = numpy.reshape(data_spc, (1, nChannels, nFFTPoints, nHeis))
1446 1446 if data_cspc != None:
1447 1447 data_cspc = numpy.reshape(data_cspc, (1, -1, nFFTPoints, nHeis))
1448 1448 if data_dc != None:
1449 1449 data_dc = numpy.reshape(data_dc, (1, -1, nHeis))
1450 1450
1451 1451 #If the buffer is empty then it takes the data value
1452 1452 if self.__buffer_spc == None:
1453 1453 self.__buffer_spc = data_spc
1454 1454
1455 1455 if data_cspc == None:
1456 1456 self.__buffer_cspc = None
1457 1457 else:
1458 1458 self.__buffer_cspc += data_cspc
1459 1459
1460 1460 if data_dc == None:
1461 1461 self.__buffer_dc = None
1462 1462 else:
1463 1463 self.__buffer_dc += data_dc
1464 1464
1465 1465 self.__profIndex += 1
1466 1466 return
1467 1467
1468 1468 #If the buffer length is lower than n then stakcing the data value
1469 1469 if self.__profIndex < self.n:
1470 1470 self.__buffer_spc = numpy.vstack((self.__buffer_spc, data_spc))
1471 1471
1472 1472 if data_cspc != None:
1473 1473 self.__buffer_cspc = numpy.vstack((self.__buffer_cspc, data_cspc))
1474 1474
1475 1475 if data_dc != None:
1476 1476 self.__buffer_dc = numpy.vstack((self.__buffer_dc, data_dc))
1477 1477
1478 1478 self.__profIndex += 1
1479 1479 return
1480 1480
1481 1481 #If the buffer length is equal to n then replacing the last buffer value with the data value
1482 1482 self.__buffer_spc = numpy.roll(self.__buffer_spc, -1, axis=0)
1483 1483 self.__buffer_spc[self.n-1] = data_spc
1484 1484
1485 1485 if data_cspc != None:
1486 1486 self.__buffer_cspc = numpy.roll(self.__buffer_cspc, -1, axis=0)
1487 1487 self.__buffer_cspc[self.n-1] = data_cspc
1488 1488
1489 1489 if data_dc != None:
1490 1490 self.__buffer_dc = numpy.roll(self.__buffer_dc, -1, axis=0)
1491 1491 self.__buffer_dc[self.n-1] = data_dc
1492 1492
1493 1493 self.__profIndex = self.n
1494 1494 return
1495 1495
1496 1496
1497 1497 def pushData(self):
1498 1498 """
1499 1499 Return the sum of the last profiles and the profiles used in the sum.
1500 1500
1501 1501 Affected:
1502 1502
1503 1503 self.__profileIndex
1504 1504
1505 1505 """
1506 1506 data_spc = None
1507 1507 data_cspc = None
1508 1508 data_dc = None
1509 1509
1510 1510 if not self.__withOverapping:
1511 1511 data_spc = self.__buffer_spc
1512 1512 data_cspc = self.__buffer_cspc
1513 1513 data_dc = self.__buffer_dc
1514 1514
1515 1515 n = self.__profIndex
1516 1516
1517 1517 self.__buffer_spc = 0
1518 1518 self.__buffer_cspc = 0
1519 1519 self.__buffer_dc = 0
1520 1520 self.__profIndex = 0
1521 1521
1522 1522 return data_spc, data_cspc, data_dc, n
1523 1523
1524 1524 #Integration with Overlapping
1525 1525 data_spc = numpy.sum(self.__buffer_spc, axis=0)
1526 1526
1527 1527 if self.__buffer_cspc != None:
1528 1528 data_cspc = numpy.sum(self.__buffer_cspc, axis=0)
1529 1529
1530 1530 if self.__buffer_dc != None:
1531 1531 data_dc = numpy.sum(self.__buffer_dc, axis=0)
1532 1532
1533 1533 n = self.__profIndex
1534 1534
1535 1535 return data_spc, data_cspc, data_dc, n
1536 1536
1537 1537 def byProfiles(self, *args):
1538 1538
1539 1539 self.__dataReady = False
1540 1540 avgdata_spc = None
1541 1541 avgdata_cspc = None
1542 1542 avgdata_dc = None
1543 1543 n = None
1544 1544
1545 1545 self.putData(*args)
1546 1546
1547 1547 if self.__profIndex == self.n:
1548 1548
1549 1549 avgdata_spc, avgdata_cspc, avgdata_dc, n = self.pushData()
1550 1550 self.__dataReady = True
1551 1551
1552 1552 return avgdata_spc, avgdata_cspc, avgdata_dc
1553 1553
1554 1554 def byTime(self, datatime, *args):
1555 1555
1556 1556 self.__dataReady = False
1557 1557 avgdata_spc = None
1558 1558 avgdata_cspc = None
1559 1559 avgdata_dc = None
1560 1560 n = None
1561 1561
1562 1562 self.putData(*args)
1563 1563
1564 1564 if (datatime - self.__initime) >= self.__integrationtime:
1565 1565 avgdata_spc, avgdata_cspc, avgdata_dc, n = self.pushData()
1566 1566 self.n = n
1567 1567 self.__dataReady = True
1568 1568
1569 1569 return avgdata_spc, avgdata_cspc, avgdata_dc
1570 1570
1571 1571 def integrate(self, datatime, *args):
1572 1572
1573 1573 if self.__initime == None:
1574 1574 self.__initime = datatime
1575 1575
1576 1576 if self.__byTime:
1577 1577 avgdata_spc, avgdata_cspc, avgdata_dc = self.byTime(datatime, *args)
1578 1578 else:
1579 1579 avgdata_spc, avgdata_cspc, avgdata_dc = self.byProfiles(*args)
1580 1580
1581 1581 self.__lastdatatime = datatime
1582 1582
1583 1583 if avgdata_spc == None:
1584 1584 return None, None, None, None
1585 1585
1586 1586 avgdatatime = self.__initime
1587 1587 try:
1588 1588 self.__timeInterval = (self.__lastdatatime - self.__initime)/(self.n - 1)
1589 1589 except:
1590 1590 self.__timeInterval = self.__lastdatatime - self.__initime
1591 1591
1592 1592 deltatime = datatime -self.__lastdatatime
1593 1593
1594 1594 if not self.__withOverapping:
1595 1595 self.__initime = datatime
1596 1596 else:
1597 1597 self.__initime += deltatime
1598 1598
1599 1599 return avgdatatime, avgdata_spc, avgdata_cspc, avgdata_dc
1600 1600
1601 1601 def run(self, dataOut, n=None, timeInterval=None, overlapping=False):
1602 1602
1603 1603 if n==1:
1604 1604 dataOut.flagNoData = False
1605 1605 return
1606 1606
1607 1607 if not self.__isConfig:
1608 1608 self.setup(n, timeInterval, overlapping)
1609 1609 self.__isConfig = True
1610 1610
1611 1611 avgdatatime, avgdata_spc, avgdata_cspc, avgdata_dc = self.integrate(dataOut.utctime,
1612 1612 dataOut.data_spc,
1613 1613 dataOut.data_cspc,
1614 1614 dataOut.data_dc)
1615 1615
1616 1616 # dataOut.timeInterval *= n
1617 1617 dataOut.flagNoData = True
1618 1618
1619 1619 if self.__dataReady:
1620 1620
1621 1621 dataOut.data_spc = avgdata_spc
1622 1622 dataOut.data_cspc = avgdata_cspc
1623 1623 dataOut.data_dc = avgdata_dc
1624 1624
1625 1625 dataOut.nIncohInt *= self.n
1626 1626 dataOut.utctime = avgdatatime
1627 1627 #dataOut.timeInterval = dataOut.ippSeconds * dataOut.nCohInt * dataOut.nIncohInt * dataOut.nFFTPoints
1628 1628 dataOut.timeInterval = self.__timeInterval*self.n
1629 1629 dataOut.flagNoData = False
1630 1630
1631 1631 class ProfileConcat(Operation):
1632 1632
1633 1633 __isConfig = False
1634 1634 buffer = None
1635 1635
1636 1636 def __init__(self):
1637 1637
1638 1638 self.profileIndex = 0
1639 1639
1640 1640 def reset(self):
1641 1641 self.buffer = numpy.zeros_like(self.buffer)
1642 1642 self.start_index = 0
1643 1643 self.times = 1
1644 1644
1645 1645 def setup(self, data, m, n=1):
1646 1646 self.buffer = numpy.zeros((data.shape[0],data.shape[1]*m),dtype=type(data[0,0]))
1647 1647 self.profiles = data.shape[1]
1648 1648 self.start_index = 0
1649 1649 self.times = 1
1650 1650
1651 1651 def concat(self, data):
1652 1652
1653 1653 self.buffer[:,self.start_index:self.profiles*self.times] = data.copy()
1654 1654 self.start_index = self.start_index + self.profiles
1655 1655
1656 1656 def run(self, dataOut, m):
1657 1657
1658 1658 dataOut.flagNoData = True
1659 1659
1660 1660 if not self.__isConfig:
1661 1661 self.setup(dataOut.data, m, 1)
1662 1662 self.__isConfig = True
1663 1663
1664 1664 self.concat(dataOut.data)
1665 1665 self.times += 1
1666 1666 if self.times > m:
1667 1667 dataOut.data = self.buffer
1668 1668 self.reset()
1669 1669 dataOut.flagNoData = False
1670 1670 # se deben actualizar mas propiedades del header y del objeto dataOut, por ejemplo, las alturas
1671 1671 deltaHeight = dataOut.heightList[1] - dataOut.heightList[0]
1672 1672 xf = dataOut.heightList[0] + dataOut.nHeights * deltaHeight * 5
1673 1673 dataOut.heightList = numpy.arange(dataOut.heightList[0], xf, deltaHeight)
1674 1674
1675 1675
1676 1676
1677 1677 class ProfileSelector(Operation):
1678 1678
1679 1679 profileIndex = None
1680 1680 # Tamanho total de los perfiles
1681 1681 nProfiles = None
1682 1682
1683 1683 def __init__(self):
1684 1684
1685 1685 self.profileIndex = 0
1686 1686
1687 1687 def incIndex(self):
1688 1688 self.profileIndex += 1
1689 1689
1690 1690 if self.profileIndex >= self.nProfiles:
1691 1691 self.profileIndex = 0
1692 1692
1693 1693 def isProfileInRange(self, minIndex, maxIndex):
1694 1694
1695 1695 if self.profileIndex < minIndex:
1696 1696 return False
1697 1697
1698 1698 if self.profileIndex > maxIndex:
1699 1699 return False
1700 1700
1701 1701 return True
1702 1702
1703 1703 def isProfileInList(self, profileList):
1704 1704
1705 1705 if self.profileIndex not in profileList:
1706 1706 return False
1707 1707
1708 1708 return True
1709 1709
1710 def run(self, dataOut, profileList=None, profileRangeList=None):
1710 def run(self, dataOut, profileList=None, profileRangeList=None, beam=None):
1711 1711
1712 1712 dataOut.flagNoData = True
1713 1713 self.nProfiles = dataOut.nProfiles
1714 1714
1715 1715 if profileList != None:
1716 1716 if self.isProfileInList(profileList):
1717 1717 dataOut.flagNoData = False
1718 1718
1719 1719 self.incIndex()
1720 1720 return 1
1721 1721
1722 1722
1723 1723 elif profileRangeList != None:
1724 1724 minIndex = profileRangeList[0]
1725 1725 maxIndex = profileRangeList[1]
1726 1726 if self.isProfileInRange(minIndex, maxIndex):
1727 1727 dataOut.flagNoData = False
1728 1728
1729 1729 self.incIndex()
1730 1730 return 1
1731 elif beam != None:
1732 if self.isProfileInList(dataOut.beamRangeDict[beam]):
1733 dataOut.flagNoData = False
1734
1735 self.incIndex()
1736 return 1
1731 1737
1732 1738 else:
1733 1739 raise ValueError, "ProfileSelector needs profileList or profileRangeList"
1734 1740
1735 1741 return 0
1736 1742
1737 1743 class SpectraHeisProc(ProcessingUnit):
1738 1744 def __init__(self):
1739 1745 self.objectDict = {}
1740 1746 # self.buffer = None
1741 1747 # self.firstdatatime = None
1742 1748 # self.profIndex = 0
1743 1749 self.dataOut = SpectraHeis()
1744 1750
1745 1751 def __updateObjFromInput(self):
1746 1752 self.dataOut.timeZone = self.dataIn.timeZone
1747 1753 self.dataOut.dstFlag = self.dataIn.dstFlag
1748 1754 self.dataOut.errorCount = self.dataIn.errorCount
1749 1755 self.dataOut.useLocalTime = self.dataIn.useLocalTime
1750 1756
1751 1757 self.dataOut.radarControllerHeaderObj = self.dataIn.radarControllerHeaderObj.copy()#
1752 1758 self.dataOut.systemHeaderObj = self.dataIn.systemHeaderObj.copy()#
1753 1759 self.dataOut.channelList = self.dataIn.channelList
1754 1760 self.dataOut.heightList = self.dataIn.heightList
1755 1761 # self.dataOut.dtype = self.dataIn.dtype
1756 1762 self.dataOut.dtype = numpy.dtype([('real','<f4'),('imag','<f4')])
1757 1763 # self.dataOut.nHeights = self.dataIn.nHeights
1758 1764 # self.dataOut.nChannels = self.dataIn.nChannels
1759 1765 self.dataOut.nBaud = self.dataIn.nBaud
1760 1766 self.dataOut.nCode = self.dataIn.nCode
1761 1767 self.dataOut.code = self.dataIn.code
1762 1768 # self.dataOut.nProfiles = 1
1763 1769 # self.dataOut.nProfiles = self.dataOut.nFFTPoints
1764 1770 self.dataOut.nFFTPoints = self.dataIn.nHeights
1765 1771 # self.dataOut.channelIndexList = self.dataIn.channelIndexList
1766 1772 # self.dataOut.flagNoData = self.dataIn.flagNoData
1767 1773 self.dataOut.flagTimeBlock = self.dataIn.flagTimeBlock
1768 1774 self.dataOut.utctime = self.dataIn.utctime
1769 1775 # self.dataOut.utctime = self.firstdatatime
1770 1776 self.dataOut.flagDecodeData = self.dataIn.flagDecodeData #asumo q la data esta decodificada
1771 1777 self.dataOut.flagDeflipData = self.dataIn.flagDeflipData #asumo q la data esta sin flip
1772 1778 # self.dataOut.flagShiftFFT = self.dataIn.flagShiftFFT
1773 1779 self.dataOut.nCohInt = self.dataIn.nCohInt
1774 1780 self.dataOut.nIncohInt = 1
1775 1781 self.dataOut.ippSeconds= self.dataIn.ippSeconds
1776 1782 self.dataOut.windowOfFilter = self.dataIn.windowOfFilter
1777 1783
1778 1784 self.dataOut.timeInterval = self.dataIn.timeInterval*self.dataOut.nIncohInt
1779 1785 # self.dataOut.set=self.dataIn.set
1780 1786 # self.dataOut.deltaHeight=self.dataIn.deltaHeight
1781 1787
1782 1788
1783 1789 def __updateObjFromFits(self):
1784 1790 self.dataOut.utctime = self.dataIn.utctime
1785 1791 self.dataOut.channelIndexList = self.dataIn.channelIndexList
1786 1792
1787 1793 self.dataOut.channelList = self.dataIn.channelList
1788 1794 self.dataOut.heightList = self.dataIn.heightList
1789 1795 self.dataOut.data_spc = self.dataIn.data
1790 1796 self.dataOut.timeInterval = self.dataIn.timeInterval
1791 1797 self.dataOut.timeZone = self.dataIn.timeZone
1792 1798 self.dataOut.useLocalTime = True
1793 1799 # self.dataOut.
1794 1800 # self.dataOut.
1795 1801
1796 1802 def __getFft(self):
1797 1803
1798 1804 fft_volt = numpy.fft.fft(self.dataIn.data, axis=1)
1799 1805 fft_volt = numpy.fft.fftshift(fft_volt,axes=(1,))
1800 1806 spc = numpy.abs(fft_volt * numpy.conjugate(fft_volt))/(self.dataOut.nFFTPoints)
1801 1807 self.dataOut.data_spc = spc
1802 1808
1803 1809 def init(self):
1804 1810
1805 1811 self.dataOut.flagNoData = True
1806 1812
1807 1813 if self.dataIn.type == "Fits":
1808 1814 self.__updateObjFromFits()
1809 1815 self.dataOut.flagNoData = False
1810 1816 return
1811 1817
1812 1818 if self.dataIn.type == "SpectraHeis":
1813 1819 self.dataOut.copy(self.dataIn)
1814 1820 return
1815 1821
1816 1822 if self.dataIn.type == "Voltage":
1817 1823 self.__updateObjFromInput()
1818 1824 self.__getFft()
1819 1825 self.dataOut.flagNoData = False
1820 1826
1821 1827 return
1822 1828
1823 1829 raise ValueError, "The type object %s is not valid"%(self.dataIn.type)
1824 1830
1825 1831
1826 1832 def selectChannels(self, channelList):
1827 1833
1828 1834 channelIndexList = []
1829 1835
1830 1836 for channel in channelList:
1831 1837 index = self.dataOut.channelList.index(channel)
1832 1838 channelIndexList.append(index)
1833 1839
1834 1840 self.selectChannelsByIndex(channelIndexList)
1835 1841
1836 1842 def selectChannelsByIndex(self, channelIndexList):
1837 1843 """
1838 1844 Selecciona un bloque de datos en base a canales segun el channelIndexList
1839 1845
1840 1846 Input:
1841 1847 channelIndexList : lista sencilla de canales a seleccionar por ej. [2,3,7]
1842 1848
1843 1849 Affected:
1844 1850 self.dataOut.data
1845 1851 self.dataOut.channelIndexList
1846 1852 self.dataOut.nChannels
1847 1853 self.dataOut.m_ProcessingHeader.totalSpectra
1848 1854 self.dataOut.systemHeaderObj.numChannels
1849 1855 self.dataOut.m_ProcessingHeader.blockSize
1850 1856
1851 1857 Return:
1852 1858 None
1853 1859 """
1854 1860
1855 1861 for channelIndex in channelIndexList:
1856 1862 if channelIndex not in self.dataOut.channelIndexList:
1857 1863 print channelIndexList
1858 1864 raise ValueError, "The value %d in channelIndexList is not valid" %channelIndex
1859 1865
1860 1866 nChannels = len(channelIndexList)
1861 1867
1862 1868 data_spc = self.dataOut.data_spc[channelIndexList,:]
1863 1869
1864 1870 self.dataOut.data_spc = data_spc
1865 1871 self.dataOut.channelList = [self.dataOut.channelList[i] for i in channelIndexList]
1866 1872
1867 1873 return 1
1868 1874
1869 1875 class IncohInt4SpectraHeis(Operation):
1870 1876
1871 1877 __isConfig = False
1872 1878
1873 1879 __profIndex = 0
1874 1880 __withOverapping = False
1875 1881
1876 1882 __byTime = False
1877 1883 __initime = None
1878 1884 __lastdatatime = None
1879 1885 __integrationtime = None
1880 1886
1881 1887 __buffer = None
1882 1888
1883 1889 __dataReady = False
1884 1890
1885 1891 n = None
1886 1892
1887 1893
1888 1894 def __init__(self):
1889 1895
1890 1896 self.__isConfig = False
1891 1897
1892 1898 def setup(self, n=None, timeInterval=None, overlapping=False):
1893 1899 """
1894 1900 Set the parameters of the integration class.
1895 1901
1896 1902 Inputs:
1897 1903
1898 1904 n : Number of coherent integrations
1899 1905 timeInterval : Time of integration. If the parameter "n" is selected this one does not work
1900 1906 overlapping :
1901 1907
1902 1908 """
1903 1909
1904 1910 self.__initime = None
1905 1911 self.__lastdatatime = 0
1906 1912 self.__buffer = None
1907 1913 self.__dataReady = False
1908 1914
1909 1915
1910 1916 if n == None and timeInterval == None:
1911 1917 raise ValueError, "n or timeInterval should be specified ..."
1912 1918
1913 1919 if n != None:
1914 1920 self.n = n
1915 1921 self.__byTime = False
1916 1922 else:
1917 1923 self.__integrationtime = timeInterval #* 60. #if (type(timeInterval)!=integer) -> change this line
1918 1924 self.n = 9999
1919 1925 self.__byTime = True
1920 1926
1921 1927 if overlapping:
1922 1928 self.__withOverapping = True
1923 1929 self.__buffer = None
1924 1930 else:
1925 1931 self.__withOverapping = False
1926 1932 self.__buffer = 0
1927 1933
1928 1934 self.__profIndex = 0
1929 1935
1930 1936 def putData(self, data):
1931 1937
1932 1938 """
1933 1939 Add a profile to the __buffer and increase in one the __profileIndex
1934 1940
1935 1941 """
1936 1942
1937 1943 if not self.__withOverapping:
1938 1944 self.__buffer += data.copy()
1939 1945 self.__profIndex += 1
1940 1946 return
1941 1947
1942 1948 #Overlapping data
1943 1949 nChannels, nHeis = data.shape
1944 1950 data = numpy.reshape(data, (1, nChannels, nHeis))
1945 1951
1946 1952 #If the buffer is empty then it takes the data value
1947 1953 if self.__buffer == None:
1948 1954 self.__buffer = data
1949 1955 self.__profIndex += 1
1950 1956 return
1951 1957
1952 1958 #If the buffer length is lower than n then stakcing the data value
1953 1959 if self.__profIndex < self.n:
1954 1960 self.__buffer = numpy.vstack((self.__buffer, data))
1955 1961 self.__profIndex += 1
1956 1962 return
1957 1963
1958 1964 #If the buffer length is equal to n then replacing the last buffer value with the data value
1959 1965 self.__buffer = numpy.roll(self.__buffer, -1, axis=0)
1960 1966 self.__buffer[self.n-1] = data
1961 1967 self.__profIndex = self.n
1962 1968 return
1963 1969
1964 1970
1965 1971 def pushData(self):
1966 1972 """
1967 1973 Return the sum of the last profiles and the profiles used in the sum.
1968 1974
1969 1975 Affected:
1970 1976
1971 1977 self.__profileIndex
1972 1978
1973 1979 """
1974 1980
1975 1981 if not self.__withOverapping:
1976 1982 data = self.__buffer
1977 1983 n = self.__profIndex
1978 1984
1979 1985 self.__buffer = 0
1980 1986 self.__profIndex = 0
1981 1987
1982 1988 return data, n
1983 1989
1984 1990 #Integration with Overlapping
1985 1991 data = numpy.sum(self.__buffer, axis=0)
1986 1992 n = self.__profIndex
1987 1993
1988 1994 return data, n
1989 1995
1990 1996 def byProfiles(self, data):
1991 1997
1992 1998 self.__dataReady = False
1993 1999 avgdata = None
1994 2000 n = None
1995 2001
1996 2002 self.putData(data)
1997 2003
1998 2004 if self.__profIndex == self.n:
1999 2005
2000 2006 avgdata, n = self.pushData()
2001 2007 self.__dataReady = True
2002 2008
2003 2009 return avgdata
2004 2010
2005 2011 def byTime(self, data, datatime):
2006 2012
2007 2013 self.__dataReady = False
2008 2014 avgdata = None
2009 2015 n = None
2010 2016
2011 2017 self.putData(data)
2012 2018
2013 2019 if (datatime - self.__initime) >= self.__integrationtime:
2014 2020 avgdata, n = self.pushData()
2015 2021 self.n = n
2016 2022 self.__dataReady = True
2017 2023
2018 2024 return avgdata
2019 2025
2020 2026 def integrate(self, data, datatime=None):
2021 2027
2022 2028 if self.__initime == None:
2023 2029 self.__initime = datatime
2024 2030
2025 2031 if self.__byTime:
2026 2032 avgdata = self.byTime(data, datatime)
2027 2033 else:
2028 2034 avgdata = self.byProfiles(data)
2029 2035
2030 2036
2031 2037 self.__lastdatatime = datatime
2032 2038
2033 2039 if avgdata == None:
2034 2040 return None, None
2035 2041
2036 2042 avgdatatime = self.__initime
2037 2043
2038 2044 deltatime = datatime -self.__lastdatatime
2039 2045
2040 2046 if not self.__withOverapping:
2041 2047 self.__initime = datatime
2042 2048 else:
2043 2049 self.__initime += deltatime
2044 2050
2045 2051 return avgdata, avgdatatime
2046 2052
2047 2053 def run(self, dataOut, **kwargs):
2048 2054
2049 2055 if not self.__isConfig:
2050 2056 self.setup(**kwargs)
2051 2057 self.__isConfig = True
2052 2058
2053 2059 avgdata, avgdatatime = self.integrate(dataOut.data_spc, dataOut.utctime)
2054 2060
2055 2061 # dataOut.timeInterval *= n
2056 2062 dataOut.flagNoData = True
2057 2063
2058 2064 if self.__dataReady:
2059 2065 dataOut.data_spc = avgdata
2060 2066 dataOut.nIncohInt *= self.n
2061 2067 # dataOut.nCohInt *= self.n
2062 2068 dataOut.utctime = avgdatatime
2063 2069 dataOut.timeInterval = dataOut.ippSeconds * dataOut.nIncohInt
2064 2070 # dataOut.timeInterval = self.__timeInterval*self.n
2065 2071 dataOut.flagNoData = False
2066 2072
2067 2073
2068 2074
2069 2075
2070 No newline at end of file
2076 class AMISRProc(ProcessingUnit):
2077 def __init__(self):
2078 self.objectDict = {}
2079 self.dataOut = AMISR()
2080
2081 def init(self):
2082 if self.dataIn.type == 'AMISR':
2083 self.dataOut.copy(self.dataIn)
2084
2085 class BeamSelector(Operation):
2086 profileIndex = None
2087 # Tamanho total de los perfiles
2088 nProfiles = None
2089
2090 def __init__(self):
2091
2092 self.profileIndex = 0
2093
2094 def incIndex(self):
2095 self.profileIndex += 1
2096
2097 if self.profileIndex >= self.nProfiles:
2098 self.profileIndex = 0
2099
2100 def isProfileInRange(self, minIndex, maxIndex):
2101
2102 if self.profileIndex < minIndex:
2103 return False
2104
2105 if self.profileIndex > maxIndex:
2106 return False
2107
2108 return True
2109
2110 def isProfileInList(self, profileList):
2111
2112 if self.profileIndex not in profileList:
2113 return False
2114
2115 return True
2116
2117 def run(self, dataOut, beam=None):
2118
2119 dataOut.flagNoData = True
2120 self.nProfiles = dataOut.nProfiles
2121
2122 if beam != None:
2123 if self.isProfileInList(dataOut.beamRangeDict[beam]):
2124 dataOut.flagNoData = False
2125
2126 self.incIndex()
2127 return 1
2128
2129 else:
2130 raise ValueError, "BeamSelector needs beam value"
2131
2132 return 0 No newline at end of file
General Comments 0
You need to be logged in to leave comments. Login now