##// END OF EJS Templates
Bug Fixed: looking folders inside of path...
Daniel Valdez -
r479:c8a51503fa1e
parent child
Show More
@@ -1,702 +1,704
1 1 '''
2 2
3 3 $Author: murco $
4 4 $Id: JROData.py 173 2012-11-20 15:06:21Z murco $
5 5 '''
6 6
7 7 import os, sys
8 8 import copy
9 9 import numpy
10 10 import datetime
11 11 import time
12 12 from jroheaderIO import SystemHeader, RadarControllerHeader
13 13
14 14
15 15 def hildebrand_sekhon(data, navg):
16 16
17 17 data = data.copy()
18 18
19 19 sortdata = numpy.sort(data,axis=None)
20 20 lenOfData = len(sortdata)
21 21 nums_min = lenOfData/10
22 22
23 23 if (lenOfData/10) > 2:
24 24 nums_min = lenOfData/10
25 25 else:
26 26 nums_min = 2
27 27
28 28 sump = 0.
29 29
30 30 sumq = 0.
31 31
32 32 j = 0
33 33
34 34 cont = 1
35 35
36 36 while((cont==1)and(j<lenOfData)):
37 37
38 38 sump += sortdata[j]
39 39
40 40 sumq += sortdata[j]**2
41 41
42 42 j += 1
43 43
44 44 if j > nums_min:
45 45 rtest = float(j)/(j-1) + 1.0/navg
46 46 if ((sumq*j) > (rtest*sump**2)):
47 47 j = j - 1
48 48 sump = sump - sortdata[j]
49 49 sumq = sumq - sortdata[j]**2
50 50 cont = 0
51 51
52 52 lnoise = sump /j
53 53 stdv = numpy.sqrt((sumq - lnoise**2)/(j - 1))
54 54 return lnoise
55 55
56 56 class JROData:
57 57
58 58 # m_BasicHeader = BasicHeader()
59 59 # m_ProcessingHeader = ProcessingHeader()
60 60
61 61 systemHeaderObj = SystemHeader()
62 62
63 63 radarControllerHeaderObj = RadarControllerHeader()
64 64
65 65 # data = None
66 66
67 67 type = None
68 68
69 69 dtype = None
70 70
71 71 # nChannels = None
72 72
73 73 # nHeights = None
74 74
75 75 nProfiles = None
76 76
77 77 heightList = None
78 78
79 79 channelList = None
80 80
81 81 flagNoData = True
82 82
83 83 flagTimeBlock = False
84 84
85 85 useLocalTime = False
86 86
87 87 utctime = None
88 88
89 89 timeZone = None
90 90
91 91 dstFlag = None
92 92
93 93 errorCount = None
94 94
95 95 blocksize = None
96 96
97 97 nCode = None
98 98
99 99 nBaud = None
100 100
101 101 code = None
102 102
103 103 flagDecodeData = False #asumo q la data no esta decodificada
104 104
105 105 flagDeflipData = False #asumo q la data no esta sin flip
106 106
107 107 flagShiftFFT = False
108 108
109 109 ippSeconds = None
110 110
111 111 timeInterval = None
112 112
113 113 nCohInt = None
114 114
115 115 noise = None
116 116
117 117 windowOfFilter = 1
118 118
119 119 #Speed of ligth
120 120 C = 3e8
121 121
122 122 frequency = 49.92e6
123 123
124 124 realtime = False
125 125
126 126 beacon_heiIndexList = None
127 127
128 128 last_block = None
129 129
130 130 blocknow = None
131 131
132 132 def __init__(self):
133 133
134 134 raise ValueError, "This class has not been implemented"
135 135
136 136 def copy(self, inputObj=None):
137 137
138 138 if inputObj == None:
139 139 return copy.deepcopy(self)
140 140
141 141 for key in inputObj.__dict__.keys():
142 142 self.__dict__[key] = inputObj.__dict__[key]
143 143
144 144 def deepcopy(self):
145 145
146 146 return copy.deepcopy(self)
147 147
148 148 def isEmpty(self):
149 149
150 150 return self.flagNoData
151 151
152 152 def getNoise(self):
153 153
154 154 raise ValueError, "Not implemented"
155 155
156 156 def getNChannels(self):
157 157
158 158 return len(self.channelList)
159 159
160 160 def getChannelIndexList(self):
161 161
162 162 return range(self.nChannels)
163 163
164 164 def getNHeights(self):
165 165
166 166 return len(self.heightList)
167 167
168 168 def getHeiRange(self, extrapoints=0):
169 169
170 170 heis = self.heightList
171 171 # deltah = self.heightList[1] - self.heightList[0]
172 172 #
173 173 # heis.append(self.heightList[-1])
174 174
175 175 return heis
176 176
177 177 def getltctime(self):
178 178
179 179 if self.useLocalTime:
180 180 return self.utctime - self.timeZone*60
181 181
182 182 return self.utctime
183 183
184 184 def getDatatime(self):
185 185
186 186 datatime = datetime.datetime.utcfromtimestamp(self.ltctime)
187 187 return datatime
188 188
189 189 def getTimeRange(self):
190 190
191 191 datatime = []
192 192
193 193 datatime.append(self.ltctime)
194 194 datatime.append(self.ltctime + self.timeInterval)
195 195
196 196 datatime = numpy.array(datatime)
197 197
198 198 return datatime
199 199
200 200 def getFmax(self):
201 201
202 202 PRF = 1./(self.ippSeconds * self.nCohInt)
203 203
204 204 fmax = PRF/2.
205 205
206 206 return fmax
207 207
208 208 def getVmax(self):
209 209
210 210 _lambda = self.C/self.frequency
211 211
212 212 vmax = self.getFmax() * _lambda
213 213
214 214 return vmax
215 215
216 216 nChannels = property(getNChannels, "I'm the 'nChannel' property.")
217 217 channelIndexList = property(getChannelIndexList, "I'm the 'channelIndexList' property.")
218 218 nHeights = property(getNHeights, "I'm the 'nHeights' property.")
219 219 noise = property(getNoise, "I'm the 'nHeights' property.")
220 220 datatime = property(getDatatime, "I'm the 'datatime' property")
221 221 ltctime = property(getltctime, "I'm the 'ltctime' property")
222 222
223 223 class Voltage(JROData):
224 224
225 225 #data es un numpy array de 2 dmensiones (canales, alturas)
226 226 data = None
227 227
228 228 def __init__(self):
229 229 '''
230 230 Constructor
231 231 '''
232 232
233 233 self.radarControllerHeaderObj = RadarControllerHeader()
234 234
235 235 self.systemHeaderObj = SystemHeader()
236 236
237 237 self.type = "Voltage"
238 238
239 239 self.data = None
240 240
241 241 self.dtype = None
242 242
243 243 # self.nChannels = 0
244 244
245 245 # self.nHeights = 0
246 246
247 247 self.nProfiles = None
248 248
249 249 self.heightList = None
250 250
251 251 self.channelList = None
252 252
253 253 # self.channelIndexList = None
254 254
255 255 self.flagNoData = True
256 256
257 257 self.flagTimeBlock = False
258 258
259 259 self.utctime = None
260 260
261 261 self.timeZone = None
262 262
263 263 self.dstFlag = None
264 264
265 265 self.errorCount = None
266 266
267 267 self.nCohInt = None
268 268
269 269 self.blocksize = None
270 270
271 271 self.flagDecodeData = False #asumo q la data no esta decodificada
272 272
273 273 self.flagDeflipData = False #asumo q la data no esta sin flip
274 274
275 275 self.flagShiftFFT = False
276 276
277 277
278 278 def getNoisebyHildebrand(self):
279 279 """
280 280 Determino el nivel de ruido usando el metodo Hildebrand-Sekhon
281 281
282 282 Return:
283 283 noiselevel
284 284 """
285 285
286 286 for channel in range(self.nChannels):
287 287 daux = self.data_spc[channel,:,:]
288 288 self.noise[channel] = hildebrand_sekhon(daux, self.nCohInt)
289 289
290 290 return self.noise
291 291
292 292 def getNoise(self, type = 1):
293 293
294 294 self.noise = numpy.zeros(self.nChannels)
295 295
296 296 if type == 1:
297 297 noise = self.getNoisebyHildebrand()
298 298
299 299 return 10*numpy.log10(noise)
300 300
301 301 class Spectra(JROData):
302 302
303 303 #data es un numpy array de 2 dmensiones (canales, perfiles, alturas)
304 304 data_spc = None
305 305
306 306 #data es un numpy array de 2 dmensiones (canales, pares, alturas)
307 307 data_cspc = None
308 308
309 309 #data es un numpy array de 2 dmensiones (canales, alturas)
310 310 data_dc = None
311 311
312 312 nFFTPoints = None
313 313
314 314 nPairs = None
315 315
316 316 pairsList = None
317 317
318 318 nIncohInt = None
319 319
320 320 wavelength = None #Necesario para cacular el rango de velocidad desde la frecuencia
321 321
322 322 nCohInt = None #se requiere para determinar el valor de timeInterval
323 323
324 324 ippFactor = None
325 325
326 326 def __init__(self):
327 327 '''
328 328 Constructor
329 329 '''
330 330
331 331 self.radarControllerHeaderObj = RadarControllerHeader()
332 332
333 333 self.systemHeaderObj = SystemHeader()
334 334
335 335 self.type = "Spectra"
336 336
337 337 # self.data = None
338 338
339 339 self.dtype = None
340 340
341 341 # self.nChannels = 0
342 342
343 343 # self.nHeights = 0
344 344
345 345 self.nProfiles = None
346 346
347 347 self.heightList = None
348 348
349 349 self.channelList = None
350 350
351 351 # self.channelIndexList = None
352 352
353 353 self.flagNoData = True
354 354
355 355 self.flagTimeBlock = False
356 356
357 357 self.utctime = None
358 358
359 359 self.nCohInt = None
360 360
361 361 self.nIncohInt = None
362 362
363 363 self.blocksize = None
364 364
365 365 self.nFFTPoints = None
366 366
367 367 self.wavelength = None
368 368
369 369 self.flagDecodeData = False #asumo q la data no esta decodificada
370 370
371 371 self.flagDeflipData = False #asumo q la data no esta sin flip
372 372
373 373 self.flagShiftFFT = False
374 374
375 375 self.ippFactor = 1
376 376
377 377 self.noise = None
378 378
379 379 self.beacon_heiIndexList = []
380 380
381 381
382 382 def getNoisebyHildebrand(self):
383 383 """
384 384 Determino el nivel de ruido usando el metodo Hildebrand-Sekhon
385 385
386 386 Return:
387 387 noiselevel
388 388 """
389 389 noise = numpy.zeros(self.nChannels)
390 390 for channel in range(self.nChannels):
391 391 daux = self.data_spc[channel,:,:]
392 392 noise[channel] = hildebrand_sekhon(daux, self.nIncohInt)
393 393
394 394 return noise
395 395
396 396 def getNoise(self):
397 397 if self.noise != None:
398 398 return self.noise
399 399 else:
400 400 noise = self.getNoisebyHildebrand()
401 401 return noise
402 402
403 403
404 404 def getFreqRange(self, extrapoints=0):
405 405
406 406 deltafreq = self.getFmax() / (self.nFFTPoints*self.ippFactor)
407 407 freqrange = deltafreq*(numpy.arange(self.nFFTPoints+extrapoints)-self.nFFTPoints/2.) - deltafreq/2
408 408
409 409 return freqrange
410 410
411 411 def getVelRange(self, extrapoints=0):
412 412
413 413 deltav = self.getVmax() / (self.nFFTPoints*self.ippFactor)
414 414 velrange = deltav*(numpy.arange(self.nFFTPoints+extrapoints)-self.nFFTPoints/2.) - deltav/2
415 415
416 416 return velrange
417 417
418 418 def getNPairs(self):
419 419
420 420 return len(self.pairsList)
421 421
422 422 def getPairsIndexList(self):
423 423
424 424 return range(self.nPairs)
425 425
426 426 def getNormFactor(self):
427 427 pwcode = 1
428 428 if self.flagDecodeData:
429 429 pwcode = numpy.sum(self.code[0]**2)
430 430 #normFactor = min(self.nFFTPoints,self.nProfiles)*self.nIncohInt*self.nCohInt*pwcode*self.windowOfFilter
431 431 normFactor = self.nProfiles*self.nIncohInt*self.nCohInt*pwcode*self.windowOfFilter
432 432
433 433 return normFactor
434 434
435 435 def getFlagCspc(self):
436 436
437 437 if self.data_cspc == None:
438 438 return True
439 439
440 440 return False
441 441
442 442 def getFlagDc(self):
443 443
444 444 if self.data_dc == None:
445 445 return True
446 446
447 447 return False
448 448
449 449 nPairs = property(getNPairs, "I'm the 'nPairs' property.")
450 450 pairsIndexList = property(getPairsIndexList, "I'm the 'pairsIndexList' property.")
451 451 normFactor = property(getNormFactor, "I'm the 'getNormFactor' property.")
452 452 flag_cspc = property(getFlagCspc)
453 453 flag_dc = property(getFlagDc)
454 454
455 455 class SpectraHeis(JROData):
456 456
457 457 data_spc = None
458 458
459 459 data_cspc = None
460 460
461 461 data_dc = None
462 462
463 463 nFFTPoints = None
464 464
465 465 nPairs = None
466 466
467 467 pairsList = None
468 468
469 469 nIncohInt = None
470 470
471 471 def __init__(self):
472 472
473 473 self.radarControllerHeaderObj = RadarControllerHeader()
474 474
475 475 self.systemHeaderObj = SystemHeader()
476 476
477 477 self.type = "SpectraHeis"
478 478
479 479 self.dtype = None
480 480
481 481 # self.nChannels = 0
482 482
483 483 # self.nHeights = 0
484 484
485 485 self.nProfiles = None
486 486
487 487 self.heightList = None
488 488
489 489 self.channelList = None
490 490
491 491 # self.channelIndexList = None
492 492
493 493 self.flagNoData = True
494 494
495 495 self.flagTimeBlock = False
496 496
497 497 self.nPairs = 0
498 498
499 499 self.utctime = None
500 500
501 501 self.blocksize = None
502 502
503 503 class Fits:
504 504
505 505 heightList = None
506 506
507 507 channelList = None
508 508
509 509 flagNoData = True
510 510
511 511 flagTimeBlock = False
512 512
513 513 useLocalTime = False
514 514
515 515 utctime = None
516 516
517 517 timeZone = None
518 518
519 519 ippSeconds = None
520 520
521 521 timeInterval = None
522 522
523 523 nCohInt = None
524 524
525 525 nIncohInt = None
526 526
527 527 noise = None
528 528
529 529 windowOfFilter = 1
530 530
531 531 #Speed of ligth
532 532 C = 3e8
533 533
534 534 frequency = 49.92e6
535 535
536 536 realtime = False
537 537
538 538
539 539 def __init__(self):
540 540
541 541 self.type = "Fits"
542 542
543 543 self.nProfiles = None
544 544
545 545 self.heightList = None
546 546
547 547 self.channelList = None
548 548
549 549 # self.channelIndexList = None
550 550
551 551 self.flagNoData = True
552 552
553 553 self.utctime = None
554 554
555 555 self.nCohInt = None
556 556
557 557 self.nIncohInt = None
558 558
559 559 self.useLocalTime = True
560 560
561 561 # self.utctime = None
562 562 # self.timeZone = None
563 563 # self.ltctime = None
564 564 # self.timeInterval = None
565 565 # self.header = None
566 566 # self.data_header = None
567 567 # self.data = None
568 568 # self.datatime = None
569 569 # self.flagNoData = False
570 570 # self.expName = ''
571 571 # self.nChannels = None
572 572 # self.nSamples = None
573 573 # self.dataBlocksPerFile = None
574 574 # self.comments = ''
575 575 #
576 576
577 577
578 578 def getltctime(self):
579 579
580 580 if self.useLocalTime:
581 581 return self.utctime - self.timeZone*60
582 582
583 583 return self.utctime
584 584
585 585 def getDatatime(self):
586 586
587 587 datatime = datetime.datetime.utcfromtimestamp(self.ltctime)
588 588 return datatime
589 589
590 590 def getTimeRange(self):
591 591
592 592 datatime = []
593 593
594 594 datatime.append(self.ltctime)
595 595 datatime.append(self.ltctime + self.timeInterval)
596 596
597 597 datatime = numpy.array(datatime)
598 598
599 599 return datatime
600 600
601 601 def getHeiRange(self):
602 602
603 603 heis = self.heightList
604 604
605 605 return heis
606 606
607 607 def isEmpty(self):
608 608
609 609 return self.flagNoData
610 610
611 611 def getNHeights(self):
612 612
613 613 return len(self.heightList)
614 614
615 615 def getNChannels(self):
616 616
617 617 return len(self.channelList)
618 618
619 619 def getChannelIndexList(self):
620 620
621 621 return range(self.nChannels)
622 622
623 623 def getNoise(self, type = 1):
624 624
625 625 self.noise = numpy.zeros(self.nChannels)
626 626
627 627 if type == 1:
628 628 noise = self.getNoisebyHildebrand()
629 629
630 630 if type == 2:
631 631 noise = self.getNoisebySort()
632 632
633 633 if type == 3:
634 634 noise = self.getNoisebyWindow()
635 635
636 636 return noise
637 637
638 638 datatime = property(getDatatime, "I'm the 'datatime' property")
639 639 nHeights = property(getNHeights, "I'm the 'nHeights' property.")
640 640 nChannels = property(getNChannels, "I'm the 'nChannel' property.")
641 641 channelIndexList = property(getChannelIndexList, "I'm the 'channelIndexList' property.")
642 642 noise = property(getNoise, "I'm the 'nHeights' property.")
643 643 datatime = property(getDatatime, "I'm the 'datatime' property")
644 644 ltctime = property(getltctime, "I'm the 'ltctime' property")
645 645
646 646 ltctime = property(getltctime, "I'm the 'ltctime' property")
647 647
648 648 class AMISR:
649 649 def __init__(self):
650 650 self.flagNoData = True
651 651 self.data = None
652 652 self.utctime = None
653 653 self.type = "AMISR"
654 654
655 655 #propiedades para compatibilidad con Voltages
656 self.timeZone = 0#timezone like jroheader, difference in minutes between UTC and localtime
656 self.timeZone = 300#timezone like jroheader, difference in minutes between UTC and localtime
657 657 self.dstFlag = 0#self.dataIn.dstFlag
658 658 self.errorCount = 0#self.dataIn.errorCount
659 659 self.useLocalTime = True#self.dataIn.useLocalTime
660 660
661 661 self.radarControllerHeaderObj = None#self.dataIn.radarControllerHeaderObj.copy()
662 662 self.systemHeaderObj = None#self.dataIn.systemHeaderObj.copy()
663 663 self.channelList = [0]#self.dataIn.channelList esto solo aplica para el caso de AMISR
664 664 self.dtype = numpy.dtype([('real','<f4'),('imag','<f4')])
665 665
666 666 self.flagTimeBlock = None#self.dataIn.flagTimeBlock
667 667 #self.utctime = #self.firstdatatime
668 668 self.flagDecodeData = None#self.dataIn.flagDecodeData #asumo q la data esta decodificada
669 669 self.flagDeflipData = None#self.dataIn.flagDeflipData #asumo q la data esta sin flip
670 670
671 671 self.nCohInt = 1#self.dataIn.nCohInt
672 672 self.nIncohInt = 1
673 673 self.ippSeconds = None#self.dataIn.ippSeconds, segun el filename/Setup/Tufile
674 674 self.windowOfFilter = None#self.dataIn.windowOfFilter
675 675
676 676 self.timeInterval = None#self.dataIn.timeInterval*self.dataOut.nFFTPoints*self.dataOut.nIncohInt
677 677 self.frequency = None#self.dataIn.frequency
678 678 self.realtime = 0#self.dataIn.realtime
679 679
680 680 #actualizar en la lectura de datos
681 681 self.heightList = None#self.dataIn.heightList
682 self.nProfiles = None#self.dataOut.nFFTPoints
682 self.nProfiles = None#Number of samples or nFFTPoints
683 self.nRecords = None
684 self.nBeams = None
683 685 self.nBaud = None#self.dataIn.nBaud
684 686 self.nCode = None#self.dataIn.nCode
685 687 self.code = None#self.dataIn.code
686 688
687 689 #consideracion para los Beams
688 690 self.beamCodeDict = None
689 691 self.beamRangeDict = None
690 692
691 693 def copy(self, inputObj=None):
692 694
693 695 if inputObj == None:
694 696 return copy.deepcopy(self)
695 697
696 698 for key in inputObj.__dict__.keys():
697 699 self.__dict__[key] = inputObj.__dict__[key]
698 700
699 701
700 702 def isEmpty(self):
701 703
702 704 return self.flagNoData No newline at end of file
@@ -1,3926 +1,3933
1 1 '''
2 2
3 3 $Author: murco $
4 4 $Id: JRODataIO.py 169 2012-11-19 21:57:03Z murco $
5 5 '''
6 6
7 7 import os, sys
8 8 import glob
9 9 import time
10 10 import numpy
11 11 import fnmatch
12 12 import time, datetime
13 13 import h5py
14 14 import re
15 15 from xml.etree.ElementTree import Element, SubElement, ElementTree
16 16 try:
17 17 import pyfits
18 18 except:
19 19 print "pyfits module has not been imported, it should be installed to save files in fits format"
20 20
21 21 from jrodata import *
22 22 from jroheaderIO import *
23 23 from jroprocessing import *
24 24
25 25 LOCALTIME = True #-18000
26 26
27 27 def isNumber(str):
28 28 """
29 29 Chequea si el conjunto de caracteres que componen un string puede ser convertidos a un numero.
30 30
31 31 Excepciones:
32 32 Si un determinado string no puede ser convertido a numero
33 33 Input:
34 34 str, string al cual se le analiza para determinar si convertible a un numero o no
35 35
36 36 Return:
37 37 True : si el string es uno numerico
38 38 False : no es un string numerico
39 39 """
40 40 try:
41 41 float( str )
42 42 return True
43 43 except:
44 44 return False
45 45
46 46 def isThisFileinRange(filename, startUTSeconds, endUTSeconds):
47 47 """
48 48 Esta funcion determina si un archivo de datos se encuentra o no dentro del rango de fecha especificado.
49 49
50 50 Inputs:
51 51 filename : nombre completo del archivo de datos en formato Jicamarca (.r)
52 52
53 53 startUTSeconds : fecha inicial del rango seleccionado. La fecha esta dada en
54 54 segundos contados desde 01/01/1970.
55 55 endUTSeconds : fecha final del rango seleccionado. La fecha esta dada en
56 56 segundos contados desde 01/01/1970.
57 57
58 58 Return:
59 59 Boolean : Retorna True si el archivo de datos contiene datos en el rango de
60 60 fecha especificado, de lo contrario retorna False.
61 61
62 62 Excepciones:
63 63 Si el archivo no existe o no puede ser abierto
64 64 Si la cabecera no puede ser leida.
65 65
66 66 """
67 67 basicHeaderObj = BasicHeader(LOCALTIME)
68 68
69 69 try:
70 70 fp = open(filename,'rb')
71 71 except:
72 72 raise IOError, "The file %s can't be opened" %(filename)
73 73
74 74 sts = basicHeaderObj.read(fp)
75 75 fp.close()
76 76
77 77 if not(sts):
78 78 print "Skipping the file %s because it has not a valid header" %(filename)
79 79 return 0
80 80
81 81 if not ((startUTSeconds <= basicHeaderObj.utc) and (endUTSeconds > basicHeaderObj.utc)):
82 82 return 0
83 83
84 84 return 1
85 85
86 86 def isFileinThisTime(filename, startTime, endTime):
87 87 """
88 88 Retorna 1 si el archivo de datos se encuentra dentro del rango de horas especificado.
89 89
90 90 Inputs:
91 91 filename : nombre completo del archivo de datos en formato Jicamarca (.r)
92 92
93 93 startTime : tiempo inicial del rango seleccionado en formato datetime.time
94 94
95 95 endTime : tiempo final del rango seleccionado en formato datetime.time
96 96
97 97 Return:
98 98 Boolean : Retorna True si el archivo de datos contiene datos en el rango de
99 99 fecha especificado, de lo contrario retorna False.
100 100
101 101 Excepciones:
102 102 Si el archivo no existe o no puede ser abierto
103 103 Si la cabecera no puede ser leida.
104 104
105 105 """
106 106
107 107
108 108 try:
109 109 fp = open(filename,'rb')
110 110 except:
111 111 raise IOError, "The file %s can't be opened" %(filename)
112 112
113 113 basicHeaderObj = BasicHeader(LOCALTIME)
114 114 sts = basicHeaderObj.read(fp)
115 115 fp.close()
116 116
117 117 thisDatetime = basicHeaderObj.datatime
118 118 thisTime = basicHeaderObj.datatime.time()
119 119
120 120 if not(sts):
121 121 print "Skipping the file %s because it has not a valid header" %(filename)
122 122 return None
123 123
124 124 if not ((startTime <= thisTime) and (endTime > thisTime)):
125 125 return None
126 126
127 127 return thisDatetime
128 128
129 129 def getFileFromSet(path,ext,set):
130 130 validFilelist = []
131 131 fileList = os.listdir(path)
132 132
133 133 # 0 1234 567 89A BCDE
134 134 # H YYYY DDD SSS .ext
135 135
136 136 for file in fileList:
137 137 try:
138 138 year = int(file[1:5])
139 139 doy = int(file[5:8])
140 140
141 141
142 142 except:
143 143 continue
144 144
145 145 if (os.path.splitext(file)[-1].lower() != ext.lower()):
146 146 continue
147 147
148 148 validFilelist.append(file)
149 149
150 150 myfile = fnmatch.filter(validFilelist,'*%4.4d%3.3d%3.3d*'%(year,doy,set))
151 151
152 152 if len(myfile)!= 0:
153 153 return myfile[0]
154 154 else:
155 155 filename = '*%4.4d%3.3d%3.3d%s'%(year,doy,set,ext.lower())
156 156 print 'the filename %s does not exist'%filename
157 157 print '...going to the last file: '
158 158
159 159 if validFilelist:
160 160 validFilelist = sorted( validFilelist, key=str.lower )
161 161 return validFilelist[-1]
162 162
163 163 return None
164 164
165 165
166 166 def getlastFileFromPath(path, ext):
167 167 """
168 168 Depura el fileList dejando solo los que cumplan el formato de "PYYYYDDDSSS.ext"
169 169 al final de la depuracion devuelve el ultimo file de la lista que quedo.
170 170
171 171 Input:
172 172 fileList : lista conteniendo todos los files (sin path) que componen una determinada carpeta
173 173 ext : extension de los files contenidos en una carpeta
174 174
175 175 Return:
176 176 El ultimo file de una determinada carpeta, no se considera el path.
177 177 """
178 178 validFilelist = []
179 179 fileList = os.listdir(path)
180 180
181 181 # 0 1234 567 89A BCDE
182 182 # H YYYY DDD SSS .ext
183 183
184 184 for file in fileList:
185 185 try:
186 186 year = int(file[1:5])
187 187 doy = int(file[5:8])
188 188
189 189
190 190 except:
191 191 continue
192 192
193 193 if (os.path.splitext(file)[-1].lower() != ext.lower()):
194 194 continue
195 195
196 196 validFilelist.append(file)
197 197
198 198 if validFilelist:
199 199 validFilelist = sorted( validFilelist, key=str.lower )
200 200 return validFilelist[-1]
201 201
202 202 return None
203 203
204 204 def checkForRealPath(path, foldercounter, year, doy, set, ext):
205 205 """
206 206 Por ser Linux Case Sensitive entonces checkForRealPath encuentra el nombre correcto de un path,
207 207 Prueba por varias combinaciones de nombres entre mayusculas y minusculas para determinar
208 208 el path exacto de un determinado file.
209 209
210 210 Example :
211 211 nombre correcto del file es .../.../D2009307/P2009307367.ext
212 212
213 213 Entonces la funcion prueba con las siguientes combinaciones
214 214 .../.../y2009307367.ext
215 215 .../.../Y2009307367.ext
216 216 .../.../x2009307/y2009307367.ext
217 217 .../.../x2009307/Y2009307367.ext
218 218 .../.../X2009307/y2009307367.ext
219 219 .../.../X2009307/Y2009307367.ext
220 220 siendo para este caso, la ultima combinacion de letras, identica al file buscado
221 221
222 222 Return:
223 223 Si encuentra la cobinacion adecuada devuelve el path completo y el nombre del file
224 224 caso contrario devuelve None como path y el la ultima combinacion de nombre en mayusculas
225 225 para el filename
226 226 """
227 227 fullfilename = None
228 228 find_flag = False
229 229 filename = None
230 230
231 231 prefixDirList = [None,'d','D']
232 232 if ext.lower() == ".r": #voltage
233 233 prefixFileList = ['d','D']
234 234 elif ext.lower() == ".pdata": #spectra
235 235 prefixFileList = ['p','P']
236 236 else:
237 237 return None, filename
238 238
239 239 #barrido por las combinaciones posibles
240 240 for prefixDir in prefixDirList:
241 241 thispath = path
242 242 if prefixDir != None:
243 243 #formo el nombre del directorio xYYYYDDD (x=d o x=D)
244 244 if foldercounter == 0:
245 245 thispath = os.path.join(path, "%s%04d%03d" % ( prefixDir, year, doy ))
246 246 else:
247 247 thispath = os.path.join(path, "%s%04d%03d_%02d" % ( prefixDir, year, doy , foldercounter))
248 248 for prefixFile in prefixFileList: #barrido por las dos combinaciones posibles de "D"
249 249 filename = "%s%04d%03d%03d%s" % ( prefixFile, year, doy, set, ext ) #formo el nombre del file xYYYYDDDSSS.ext
250 250 fullfilename = os.path.join( thispath, filename ) #formo el path completo
251 251
252 252 if os.path.exists( fullfilename ): #verifico que exista
253 253 find_flag = True
254 254 break
255 255 if find_flag:
256 256 break
257 257
258 258 if not(find_flag):
259 259 return None, filename
260 260
261 261 return fullfilename, filename
262 262
263 263 def isDoyFolder(folder):
264 264 try:
265 265 year = int(folder[1:5])
266 266 except:
267 267 return 0
268 268
269 269 try:
270 270 doy = int(folder[5:8])
271 271 except:
272 272 return 0
273 273
274 274 return 1
275 275
276 276 class JRODataIO:
277 277
278 278 c = 3E8
279 279
280 280 isConfig = False
281 281
282 282 basicHeaderObj = BasicHeader(LOCALTIME)
283 283
284 284 systemHeaderObj = SystemHeader()
285 285
286 286 radarControllerHeaderObj = RadarControllerHeader()
287 287
288 288 processingHeaderObj = ProcessingHeader()
289 289
290 290 online = 0
291 291
292 292 dtype = None
293 293
294 294 pathList = []
295 295
296 296 filenameList = []
297 297
298 298 filename = None
299 299
300 300 ext = None
301 301
302 302 flagIsNewFile = 1
303 303
304 304 flagTimeBlock = 0
305 305
306 306 flagIsNewBlock = 0
307 307
308 308 fp = None
309 309
310 310 firstHeaderSize = 0
311 311
312 312 basicHeaderSize = 24
313 313
314 314 versionFile = 1103
315 315
316 316 fileSize = None
317 317
318 318 ippSeconds = None
319 319
320 320 fileSizeByHeader = None
321 321
322 322 fileIndex = None
323 323
324 324 profileIndex = None
325 325
326 326 blockIndex = None
327 327
328 328 nTotalBlocks = None
329 329
330 330 maxTimeStep = 30
331 331
332 332 lastUTTime = None
333 333
334 334 datablock = None
335 335
336 336 dataOut = None
337 337
338 338 blocksize = None
339 339
340 340 def __init__(self):
341 341
342 342 raise ValueError, "Not implemented"
343 343
344 344 def run(self):
345 345
346 346 raise ValueError, "Not implemented"
347 347
348 348 def getOutput(self):
349 349
350 350 return self.dataOut
351 351
352 352 class JRODataReader(JRODataIO, ProcessingUnit):
353 353
354 354 nReadBlocks = 0
355 355
356 356 delay = 10 #number of seconds waiting a new file
357 357
358 358 nTries = 3 #quantity tries
359 359
360 360 nFiles = 3 #number of files for searching
361 361
362 362 path = None
363 363
364 364 foldercounter = 0
365 365
366 366 flagNoMoreFiles = 0
367 367
368 368 datetimeList = []
369 369
370 370 __isFirstTimeOnline = 1
371 371
372 372 __printInfo = True
373 373
374 374 profileIndex = None
375 375
376 376 def __init__(self):
377 377
378 378 """
379 379
380 380 """
381 381
382 382 raise ValueError, "This method has not been implemented"
383 383
384 384
385 385 def createObjByDefault(self):
386 386 """
387 387
388 388 """
389 389 raise ValueError, "This method has not been implemented"
390 390
391 391 def getBlockDimension(self):
392 392
393 393 raise ValueError, "No implemented"
394 394
395 395 def __searchFilesOffLine(self,
396 396 path,
397 397 startDate,
398 398 endDate,
399 399 startTime=datetime.time(0,0,0),
400 400 endTime=datetime.time(23,59,59),
401 401 set=None,
402 402 expLabel='',
403 403 ext='.r',
404 404 walk=True):
405 405
406 406 pathList = []
407 407
408 408 if not walk:
409 409 #pathList.append(path)
410 410 multi_path = path.split(',')
411 411 for single_path in multi_path:
412 412 pathList.append(single_path)
413 413
414 414 else:
415 415 #dirList = []
416 416 multi_path = path.split(',')
417 417 for single_path in multi_path:
418 418 dirList = []
419 419 for thisPath in os.listdir(single_path):
420 420 if not os.path.isdir(os.path.join(single_path,thisPath)):
421 421 continue
422 422 if not isDoyFolder(thisPath):
423 423 continue
424 424
425 425 dirList.append(thisPath)
426 426
427 427 if not(dirList):
428 428 return None, None
429 429
430 430 thisDate = startDate
431 431
432 432 while(thisDate <= endDate):
433 433 year = thisDate.timetuple().tm_year
434 434 doy = thisDate.timetuple().tm_yday
435 435
436 436 matchlist = fnmatch.filter(dirList, '?' + '%4.4d%3.3d' % (year,doy) + '*')
437 437 if len(matchlist) == 0:
438 438 thisDate += datetime.timedelta(1)
439 439 continue
440 440 for match in matchlist:
441 441 pathList.append(os.path.join(single_path,match,expLabel))
442 442
443 443 thisDate += datetime.timedelta(1)
444 444
445 445 if pathList == []:
446 446 print "Any folder was found for the date range: %s-%s" %(startDate, endDate)
447 447 return None, None
448 448
449 449 print "%d folder(s) was(were) found for the date range: %s - %s" %(len(pathList), startDate, endDate)
450 450
451 451 filenameList = []
452 452 datetimeList = []
453 453 pathDict = {}
454 454 filenameList_to_sort = []
455 455
456 456 for i in range(len(pathList)):
457 457
458 458 thisPath = pathList[i]
459 459
460 460 fileList = glob.glob1(thisPath, "*%s" %ext)
461 461 fileList.sort()
462 462 pathDict.setdefault(fileList[0])
463 463 pathDict[fileList[0]] = i
464 464 filenameList_to_sort.append(fileList[0])
465 465
466 466 filenameList_to_sort.sort()
467 467
468 468 for file in filenameList_to_sort:
469 469 thisPath = pathList[pathDict[file]]
470 470
471 471 fileList = glob.glob1(thisPath, "*%s" %ext)
472 472 fileList.sort()
473 473
474 474 for file in fileList:
475 475
476 476 filename = os.path.join(thisPath,file)
477 477 thisDatetime = isFileinThisTime(filename, startTime, endTime)
478 478
479 479 if not(thisDatetime):
480 480 continue
481 481
482 482 filenameList.append(filename)
483 483 datetimeList.append(thisDatetime)
484 484
485 485 if not(filenameList):
486 486 print "Any file was found for the time range %s - %s" %(startTime, endTime)
487 487 return None, None
488 488
489 489 print "%d file(s) was(were) found for the time range: %s - %s" %(len(filenameList), startTime, endTime)
490 490 print
491 491
492 492 for i in range(len(filenameList)):
493 493 print "%s -> [%s]" %(filenameList[i], datetimeList[i].ctime())
494 494
495 495 self.filenameList = filenameList
496 496 self.datetimeList = datetimeList
497 497
498 498 return pathList, filenameList
499 499
500 500 def __searchFilesOnLine(self, path, expLabel = "", ext = None, walk=True, set=None):
501 501
502 502 """
503 503 Busca el ultimo archivo de la ultima carpeta (determinada o no por startDateTime) y
504 504 devuelve el archivo encontrado ademas de otros datos.
505 505
506 506 Input:
507 507 path : carpeta donde estan contenidos los files que contiene data
508 508
509 509 expLabel : Nombre del subexperimento (subfolder)
510 510
511 511 ext : extension de los files
512 512
513 513 walk : Si es habilitado no realiza busquedas dentro de los ubdirectorios (doypath)
514 514
515 515 Return:
516 516 directory : eL directorio donde esta el file encontrado
517 517 filename : el ultimo file de una determinada carpeta
518 518 year : el anho
519 519 doy : el numero de dia del anho
520 520 set : el set del archivo
521 521
522 522
523 523 """
524 524 dirList = []
525 525
526 526 if not walk:
527 527 fullpath = path
528 528 foldercounter = 0
529 529 else:
530 530 #Filtra solo los directorios
531 531 for thisPath in os.listdir(path):
532 532 if not os.path.isdir(os.path.join(path,thisPath)):
533 533 continue
534 534 if not isDoyFolder(thisPath):
535 535 continue
536 536
537 537 dirList.append(thisPath)
538 538
539 539 if not(dirList):
540 540 return None, None, None, None, None, None
541 541
542 542 dirList = sorted( dirList, key=str.lower )
543 543
544 544 doypath = dirList[-1]
545 545 foldercounter = int(doypath.split('_')[1]) if len(doypath.split('_'))>1 else 0
546 546 fullpath = os.path.join(path, doypath, expLabel)
547 547
548 548
549 549 print "%s folder was found: " %(fullpath )
550 550
551 551 if set == None:
552 552 filename = getlastFileFromPath(fullpath, ext)
553 553 else:
554 554 filename = getFileFromSet(fullpath, ext, set)
555 555
556 556 if not(filename):
557 557 return None, None, None, None, None, None
558 558
559 559 print "%s file was found" %(filename)
560 560
561 561 if not(self.__verifyFile(os.path.join(fullpath, filename))):
562 562 return None, None, None, None, None, None
563 563
564 564 year = int( filename[1:5] )
565 565 doy = int( filename[5:8] )
566 566 set = int( filename[8:11] )
567 567
568 568 return fullpath, foldercounter, filename, year, doy, set
569 569
570 570 def __setNextFileOffline(self):
571 571
572 572 idFile = self.fileIndex
573 573
574 574 while (True):
575 575 idFile += 1
576 576 if not(idFile < len(self.filenameList)):
577 577 self.flagNoMoreFiles = 1
578 578 print "No more Files"
579 579 return 0
580 580
581 581 filename = self.filenameList[idFile]
582 582
583 583 if not(self.__verifyFile(filename)):
584 584 continue
585 585
586 586 fileSize = os.path.getsize(filename)
587 587 fp = open(filename,'rb')
588 588 break
589 589
590 590 self.flagIsNewFile = 1
591 591 self.fileIndex = idFile
592 592 self.filename = filename
593 593 self.fileSize = fileSize
594 594 self.fp = fp
595 595
596 596 print "Setting the file: %s"%self.filename
597 597
598 598 return 1
599 599
600 600 def __setNextFileOnline(self):
601 601 """
602 602 Busca el siguiente file que tenga suficiente data para ser leida, dentro de un folder especifico, si
603 603 no encuentra un file valido espera un tiempo determinado y luego busca en los posibles n files
604 604 siguientes.
605 605
606 606 Affected:
607 607 self.flagIsNewFile
608 608 self.filename
609 609 self.fileSize
610 610 self.fp
611 611 self.set
612 612 self.flagNoMoreFiles
613 613
614 614 Return:
615 615 0 : si luego de una busqueda del siguiente file valido este no pudo ser encontrado
616 616 1 : si el file fue abierto con exito y esta listo a ser leido
617 617
618 618 Excepciones:
619 619 Si un determinado file no puede ser abierto
620 620 """
621 621 nFiles = 0
622 622 fileOk_flag = False
623 623 firstTime_flag = True
624 624
625 625 self.set += 1
626 626
627 627 if self.set > 999:
628 628 self.set = 0
629 629 self.foldercounter += 1
630 630
631 631 #busca el 1er file disponible
632 632 fullfilename, filename = checkForRealPath( self.path, self.foldercounter, self.year, self.doy, self.set, self.ext )
633 633 if fullfilename:
634 634 if self.__verifyFile(fullfilename, False):
635 635 fileOk_flag = True
636 636
637 637 #si no encuentra un file entonces espera y vuelve a buscar
638 638 if not(fileOk_flag):
639 639 for nFiles in range(self.nFiles+1): #busco en los siguientes self.nFiles+1 files posibles
640 640
641 641 if firstTime_flag: #si es la 1era vez entonces hace el for self.nTries veces
642 642 tries = self.nTries
643 643 else:
644 644 tries = 1 #si no es la 1era vez entonces solo lo hace una vez
645 645
646 646 for nTries in range( tries ):
647 647 if firstTime_flag:
648 648 print "\tWaiting %0.2f sec for the file \"%s\" , try %03d ..." % ( self.delay, filename, nTries+1 )
649 649 time.sleep( self.delay )
650 650 else:
651 651 print "\tSearching next \"%s%04d%03d%03d%s\" file ..." % (self.optchar, self.year, self.doy, self.set, self.ext)
652 652
653 653 fullfilename, filename = checkForRealPath( self.path, self.foldercounter, self.year, self.doy, self.set, self.ext )
654 654 if fullfilename:
655 655 if self.__verifyFile(fullfilename):
656 656 fileOk_flag = True
657 657 break
658 658
659 659 if fileOk_flag:
660 660 break
661 661
662 662 firstTime_flag = False
663 663
664 664 print "\tSkipping the file \"%s\" due to this file doesn't exist" % filename
665 665 self.set += 1
666 666
667 667 if nFiles == (self.nFiles-1): #si no encuentro el file buscado cambio de carpeta y busco en la siguiente carpeta
668 668 self.set = 0
669 669 self.doy += 1
670 670 self.foldercounter = 0
671 671
672 672 if fileOk_flag:
673 673 self.fileSize = os.path.getsize( fullfilename )
674 674 self.filename = fullfilename
675 675 self.flagIsNewFile = 1
676 676 if self.fp != None: self.fp.close()
677 677 self.fp = open(fullfilename, 'rb')
678 678 self.flagNoMoreFiles = 0
679 679 print 'Setting the file: %s' % fullfilename
680 680 else:
681 681 self.fileSize = 0
682 682 self.filename = None
683 683 self.flagIsNewFile = 0
684 684 self.fp = None
685 685 self.flagNoMoreFiles = 1
686 686 print 'No more Files'
687 687
688 688 return fileOk_flag
689 689
690 690
691 691 def setNextFile(self):
692 692 if self.fp != None:
693 693 self.fp.close()
694 694
695 695 if self.online:
696 696 newFile = self.__setNextFileOnline()
697 697 else:
698 698 newFile = self.__setNextFileOffline()
699 699
700 700 if not(newFile):
701 701 return 0
702 702
703 703 self.__readFirstHeader()
704 704 self.nReadBlocks = 0
705 705 return 1
706 706
707 707 def __waitNewBlock(self):
708 708 """
709 709 Return 1 si se encontro un nuevo bloque de datos, 0 de otra forma.
710 710
711 711 Si el modo de lectura es OffLine siempre retorn 0
712 712 """
713 713 if not self.online:
714 714 return 0
715 715
716 716 if (self.nReadBlocks >= self.processingHeaderObj.dataBlocksPerFile):
717 717 return 0
718 718
719 719 currentPointer = self.fp.tell()
720 720
721 721 neededSize = self.processingHeaderObj.blockSize + self.basicHeaderSize
722 722
723 723 for nTries in range( self.nTries ):
724 724
725 725 self.fp.close()
726 726 self.fp = open( self.filename, 'rb' )
727 727 self.fp.seek( currentPointer )
728 728
729 729 self.fileSize = os.path.getsize( self.filename )
730 730 currentSize = self.fileSize - currentPointer
731 731
732 732 if ( currentSize >= neededSize ):
733 733 self.__rdBasicHeader()
734 734 return 1
735 735
736 736 if self.fileSize == self.fileSizeByHeader:
737 737 # self.flagEoF = True
738 738 return 0
739 739
740 740 print "\tWaiting %0.2f seconds for the next block, try %03d ..." % (self.delay, nTries+1)
741 741 time.sleep( self.delay )
742 742
743 743
744 744 return 0
745 745
746 746 def waitDataBlock(self,pointer_location):
747 747
748 748 currentPointer = pointer_location
749 749
750 750 neededSize = self.processingHeaderObj.blockSize #+ self.basicHeaderSize
751 751
752 752 for nTries in range( self.nTries ):
753 753 self.fp.close()
754 754 self.fp = open( self.filename, 'rb' )
755 755 self.fp.seek( currentPointer )
756 756
757 757 self.fileSize = os.path.getsize( self.filename )
758 758 currentSize = self.fileSize - currentPointer
759 759
760 760 if ( currentSize >= neededSize ):
761 761 return 1
762 762
763 763 print "\tWaiting %0.2f seconds for the next block, try %03d ..." % (self.delay, nTries+1)
764 764 time.sleep( self.delay )
765 765
766 766 return 0
767 767
768 768
769 769 def __jumpToLastBlock(self):
770 770
771 771 if not(self.__isFirstTimeOnline):
772 772 return
773 773
774 774 csize = self.fileSize - self.fp.tell()
775 775 blocksize = self.processingHeaderObj.blockSize
776 776
777 777 #salta el primer bloque de datos
778 778 if csize > self.processingHeaderObj.blockSize:
779 779 self.fp.seek(self.fp.tell() + blocksize)
780 780 else:
781 781 return
782 782
783 783 csize = self.fileSize - self.fp.tell()
784 784 neededsize = self.processingHeaderObj.blockSize + self.basicHeaderSize
785 785 while True:
786 786
787 787 if self.fp.tell()<self.fileSize:
788 788 self.fp.seek(self.fp.tell() + neededsize)
789 789 else:
790 790 self.fp.seek(self.fp.tell() - neededsize)
791 791 break
792 792
793 793 # csize = self.fileSize - self.fp.tell()
794 794 # neededsize = self.processingHeaderObj.blockSize + self.basicHeaderSize
795 795 # factor = int(csize/neededsize)
796 796 # if factor > 0:
797 797 # self.fp.seek(self.fp.tell() + factor*neededsize)
798 798
799 799 self.flagIsNewFile = 0
800 800 self.__isFirstTimeOnline = 0
801 801
802 802
803 803 def __setNewBlock(self):
804 804
805 805 if self.fp == None:
806 806 return 0
807 807
808 808 if self.online:
809 809 self.__jumpToLastBlock()
810 810
811 811 if self.flagIsNewFile:
812 812 return 1
813 813
814 814 self.lastUTTime = self.basicHeaderObj.utc
815 815 currentSize = self.fileSize - self.fp.tell()
816 816 neededSize = self.processingHeaderObj.blockSize + self.basicHeaderSize
817 817
818 818 if (currentSize >= neededSize):
819 819 self.__rdBasicHeader()
820 820 return 1
821 821
822 822 if self.__waitNewBlock():
823 823 return 1
824 824
825 825 if not(self.setNextFile()):
826 826 return 0
827 827
828 828 deltaTime = self.basicHeaderObj.utc - self.lastUTTime #
829 829
830 830 self.flagTimeBlock = 0
831 831
832 832 if deltaTime > self.maxTimeStep:
833 833 self.flagTimeBlock = 1
834 834
835 835 return 1
836 836
837 837
838 838 def readNextBlock(self):
839 839 if not(self.__setNewBlock()):
840 840 return 0
841 841
842 842 if not(self.readBlock()):
843 843 return 0
844 844
845 845 return 1
846 846
847 847 def __rdProcessingHeader(self, fp=None):
848 848 if fp == None:
849 849 fp = self.fp
850 850
851 851 self.processingHeaderObj.read(fp)
852 852
853 853 def __rdRadarControllerHeader(self, fp=None):
854 854 if fp == None:
855 855 fp = self.fp
856 856
857 857 self.radarControllerHeaderObj.read(fp)
858 858
859 859 def __rdSystemHeader(self, fp=None):
860 860 if fp == None:
861 861 fp = self.fp
862 862
863 863 self.systemHeaderObj.read(fp)
864 864
865 865 def __rdBasicHeader(self, fp=None):
866 866 if fp == None:
867 867 fp = self.fp
868 868
869 869 self.basicHeaderObj.read(fp)
870 870
871 871
872 872 def __readFirstHeader(self):
873 873 self.__rdBasicHeader()
874 874 self.__rdSystemHeader()
875 875 self.__rdRadarControllerHeader()
876 876 self.__rdProcessingHeader()
877 877
878 878 self.firstHeaderSize = self.basicHeaderObj.size
879 879
880 880 datatype = int(numpy.log2((self.processingHeaderObj.processFlags & PROCFLAG.DATATYPE_MASK))-numpy.log2(PROCFLAG.DATATYPE_CHAR))
881 881 if datatype == 0:
882 882 datatype_str = numpy.dtype([('real','<i1'),('imag','<i1')])
883 883 elif datatype == 1:
884 884 datatype_str = numpy.dtype([('real','<i2'),('imag','<i2')])
885 885 elif datatype == 2:
886 886 datatype_str = numpy.dtype([('real','<i4'),('imag','<i4')])
887 887 elif datatype == 3:
888 888 datatype_str = numpy.dtype([('real','<i8'),('imag','<i8')])
889 889 elif datatype == 4:
890 890 datatype_str = numpy.dtype([('real','<f4'),('imag','<f4')])
891 891 elif datatype == 5:
892 892 datatype_str = numpy.dtype([('real','<f8'),('imag','<f8')])
893 893 else:
894 894 raise ValueError, 'Data type was not defined'
895 895
896 896 self.dtype = datatype_str
897 897 self.ippSeconds = 2 * 1000 * self.radarControllerHeaderObj.ipp / self.c
898 898 self.fileSizeByHeader = self.processingHeaderObj.dataBlocksPerFile * self.processingHeaderObj.blockSize + self.firstHeaderSize + self.basicHeaderSize*(self.processingHeaderObj.dataBlocksPerFile - 1)
899 899 # self.dataOut.channelList = numpy.arange(self.systemHeaderObj.numChannels)
900 900 # self.dataOut.channelIndexList = numpy.arange(self.systemHeaderObj.numChannels)
901 901 self.getBlockDimension()
902 902
903 903
904 904 def __verifyFile(self, filename, msgFlag=True):
905 905 msg = None
906 906 try:
907 907 fp = open(filename, 'rb')
908 908 currentPosition = fp.tell()
909 909 except:
910 910 if msgFlag:
911 911 print "The file %s can't be opened" % (filename)
912 912 return False
913 913
914 914 neededSize = self.processingHeaderObj.blockSize + self.firstHeaderSize
915 915
916 916 if neededSize == 0:
917 917 basicHeaderObj = BasicHeader(LOCALTIME)
918 918 systemHeaderObj = SystemHeader()
919 919 radarControllerHeaderObj = RadarControllerHeader()
920 920 processingHeaderObj = ProcessingHeader()
921 921
922 922 try:
923 923 if not( basicHeaderObj.read(fp) ): raise IOError
924 924 if not( systemHeaderObj.read(fp) ): raise IOError
925 925 if not( radarControllerHeaderObj.read(fp) ): raise IOError
926 926 if not( processingHeaderObj.read(fp) ): raise IOError
927 927 data_type = int(numpy.log2((processingHeaderObj.processFlags & PROCFLAG.DATATYPE_MASK))-numpy.log2(PROCFLAG.DATATYPE_CHAR))
928 928
929 929 neededSize = processingHeaderObj.blockSize + basicHeaderObj.size
930 930
931 931 except:
932 932 if msgFlag:
933 933 print "\tThe file %s is empty or it hasn't enough data" % filename
934 934
935 935 fp.close()
936 936 return False
937 937 else:
938 938 msg = "\tSkipping the file %s due to it hasn't enough data" %filename
939 939
940 940 fp.close()
941 941 fileSize = os.path.getsize(filename)
942 942 currentSize = fileSize - currentPosition
943 943 if currentSize < neededSize:
944 944 if msgFlag and (msg != None):
945 945 print msg #print"\tSkipping the file %s due to it hasn't enough data" %filename
946 946 return False
947 947
948 948 return True
949 949
950 950 def setup(self,
951 951 path=None,
952 952 startDate=None,
953 953 endDate=None,
954 954 startTime=datetime.time(0,0,0),
955 955 endTime=datetime.time(23,59,59),
956 956 set=None,
957 957 expLabel = "",
958 958 ext = None,
959 959 online = False,
960 960 delay = 60,
961 961 walk = True):
962 962
963 963 if path == None:
964 964 raise ValueError, "The path is not valid"
965 965
966 966 if ext == None:
967 967 ext = self.ext
968 968
969 969 if online:
970 970 print "Searching files in online mode..."
971 971
972 972 for nTries in range( self.nTries ):
973 973 fullpath, foldercounter, file, year, doy, set = self.__searchFilesOnLine(path=path, expLabel=expLabel, ext=ext, walk=walk, set=set)
974 974
975 975 if fullpath:
976 976 break
977 977
978 978 print '\tWaiting %0.2f sec for an valid file in %s: try %02d ...' % (self.delay, path, nTries+1)
979 979 time.sleep( self.delay )
980 980
981 981 if not(fullpath):
982 982 print "There 'isn't valied files in %s" % path
983 983 return None
984 984
985 985 self.year = year
986 986 self.doy = doy
987 987 self.set = set - 1
988 988 self.path = path
989 989 self.foldercounter = foldercounter
990 990 last_set = None
991 991
992 992 else:
993 993 print "Searching files in offline mode ..."
994 994 pathList, filenameList = self.__searchFilesOffLine(path, startDate=startDate, endDate=endDate,
995 995 startTime=startTime, endTime=endTime,
996 996 set=set, expLabel=expLabel, ext=ext,
997 997 walk=walk)
998 998
999 999 if not(pathList):
1000 1000 print "No *%s files into the folder %s \nfor the range: %s - %s"%(ext, path,
1001 1001 datetime.datetime.combine(startDate,startTime).ctime(),
1002 1002 datetime.datetime.combine(endDate,endTime).ctime())
1003 1003
1004 1004 sys.exit(-1)
1005 1005
1006 1006
1007 1007 self.fileIndex = -1
1008 1008 self.pathList = pathList
1009 1009 self.filenameList = filenameList
1010 1010 file_name = os.path.basename(filenameList[-1])
1011 1011 basename, ext = os.path.splitext(file_name)
1012 1012 last_set = int(basename[-3:])
1013 1013
1014 1014 self.online = online
1015 1015 self.delay = delay
1016 1016 ext = ext.lower()
1017 1017 self.ext = ext
1018 1018
1019 1019 if not(self.setNextFile()):
1020 1020 if (startDate!=None) and (endDate!=None):
1021 1021 print "No files in range: %s - %s" %(datetime.datetime.combine(startDate,startTime).ctime(), datetime.datetime.combine(endDate,endTime).ctime())
1022 1022 elif startDate != None:
1023 1023 print "No files in range: %s" %(datetime.datetime.combine(startDate,startTime).ctime())
1024 1024 else:
1025 1025 print "No files"
1026 1026
1027 1027 sys.exit(-1)
1028 1028
1029 1029 # self.updateDataHeader()
1030 1030 if last_set != None:
1031 1031 self.dataOut.last_block = last_set * self.processingHeaderObj.dataBlocksPerFile + self.basicHeaderObj.dataBlock
1032 1032 return self.dataOut
1033 1033
1034 1034 def getBasicHeader(self):
1035 1035
1036 1036 self.dataOut.utctime = self.basicHeaderObj.utc + self.basicHeaderObj.miliSecond/1000. + self.profileIndex * self.ippSeconds
1037 1037
1038 1038 self.dataOut.flagTimeBlock = self.flagTimeBlock
1039 1039
1040 1040 self.dataOut.timeZone = self.basicHeaderObj.timeZone
1041 1041
1042 1042 self.dataOut.dstFlag = self.basicHeaderObj.dstFlag
1043 1043
1044 1044 self.dataOut.errorCount = self.basicHeaderObj.errorCount
1045 1045
1046 1046 self.dataOut.useLocalTime = self.basicHeaderObj.useLocalTime
1047 1047
1048 1048 def getFirstHeader(self):
1049 1049
1050 1050 raise ValueError, "This method has not been implemented"
1051 1051
1052 1052 def getData():
1053 1053
1054 1054 raise ValueError, "This method has not been implemented"
1055 1055
1056 1056 def hasNotDataInBuffer():
1057 1057
1058 1058 raise ValueError, "This method has not been implemented"
1059 1059
1060 1060 def readBlock():
1061 1061
1062 1062 raise ValueError, "This method has not been implemented"
1063 1063
1064 1064 def isEndProcess(self):
1065 1065
1066 1066 return self.flagNoMoreFiles
1067 1067
1068 1068 def printReadBlocks(self):
1069 1069
1070 1070 print "Number of read blocks per file %04d" %self.nReadBlocks
1071 1071
1072 1072 def printTotalBlocks(self):
1073 1073
1074 1074 print "Number of read blocks %04d" %self.nTotalBlocks
1075 1075
1076 1076 def printNumberOfBlock(self):
1077 1077
1078 1078 if self.flagIsNewBlock:
1079 1079 print "Block No. %04d, Total blocks %04d -> %s" %(self.basicHeaderObj.dataBlock, self.nTotalBlocks, self.dataOut.datatime.ctime())
1080 1080 self.dataOut.blocknow = self.basicHeaderObj.dataBlock
1081 1081 def printInfo(self):
1082 1082
1083 1083 if self.__printInfo == False:
1084 1084 return
1085 1085
1086 1086 self.basicHeaderObj.printInfo()
1087 1087 self.systemHeaderObj.printInfo()
1088 1088 self.radarControllerHeaderObj.printInfo()
1089 1089 self.processingHeaderObj.printInfo()
1090 1090
1091 1091 self.__printInfo = False
1092 1092
1093 1093
1094 1094 def run(self, **kwargs):
1095 1095
1096 1096 if not(self.isConfig):
1097 1097
1098 1098 # self.dataOut = dataOut
1099 1099 self.setup(**kwargs)
1100 1100 self.isConfig = True
1101 1101
1102 1102 self.getData()
1103 1103
1104 1104 class JRODataWriter(JRODataIO, Operation):
1105 1105
1106 1106 """
1107 1107 Esta clase permite escribir datos a archivos procesados (.r o ,pdata). La escritura
1108 1108 de los datos siempre se realiza por bloques.
1109 1109 """
1110 1110
1111 1111 blockIndex = 0
1112 1112
1113 1113 path = None
1114 1114
1115 1115 setFile = None
1116 1116
1117 1117 profilesPerBlock = None
1118 1118
1119 1119 blocksPerFile = None
1120 1120
1121 1121 nWriteBlocks = 0
1122 1122
1123 1123 def __init__(self, dataOut=None):
1124 1124 raise ValueError, "Not implemented"
1125 1125
1126 1126
1127 1127 def hasAllDataInBuffer(self):
1128 1128 raise ValueError, "Not implemented"
1129 1129
1130 1130
1131 1131 def setBlockDimension(self):
1132 1132 raise ValueError, "Not implemented"
1133 1133
1134 1134
1135 1135 def writeBlock(self):
1136 1136 raise ValueError, "No implemented"
1137 1137
1138 1138
1139 1139 def putData(self):
1140 1140 raise ValueError, "No implemented"
1141 1141
1142 1142
1143 1143 def setBasicHeader(self):
1144 1144
1145 1145 self.basicHeaderObj.size = self.basicHeaderSize #bytes
1146 1146 self.basicHeaderObj.version = self.versionFile
1147 1147 self.basicHeaderObj.dataBlock = self.nTotalBlocks
1148 1148
1149 1149 utc = numpy.floor(self.dataOut.utctime)
1150 1150 milisecond = (self.dataOut.utctime - utc)* 1000.0
1151 1151
1152 1152 self.basicHeaderObj.utc = utc
1153 1153 self.basicHeaderObj.miliSecond = milisecond
1154 1154 self.basicHeaderObj.timeZone = self.dataOut.timeZone
1155 1155 self.basicHeaderObj.dstFlag = self.dataOut.dstFlag
1156 1156 self.basicHeaderObj.errorCount = self.dataOut.errorCount
1157 1157
1158 1158 def setFirstHeader(self):
1159 1159 """
1160 1160 Obtiene una copia del First Header
1161 1161
1162 1162 Affected:
1163 1163
1164 1164 self.basicHeaderObj
1165 1165 self.systemHeaderObj
1166 1166 self.radarControllerHeaderObj
1167 1167 self.processingHeaderObj self.
1168 1168
1169 1169 Return:
1170 1170 None
1171 1171 """
1172 1172
1173 1173 raise ValueError, "No implemented"
1174 1174
1175 1175 def __writeFirstHeader(self):
1176 1176 """
1177 1177 Escribe el primer header del file es decir el Basic header y el Long header (SystemHeader, RadarControllerHeader, ProcessingHeader)
1178 1178
1179 1179 Affected:
1180 1180 __dataType
1181 1181
1182 1182 Return:
1183 1183 None
1184 1184 """
1185 1185
1186 1186 # CALCULAR PARAMETROS
1187 1187
1188 1188 sizeLongHeader = self.systemHeaderObj.size + self.radarControllerHeaderObj.size + self.processingHeaderObj.size
1189 1189 self.basicHeaderObj.size = self.basicHeaderSize + sizeLongHeader
1190 1190
1191 1191 self.basicHeaderObj.write(self.fp)
1192 1192 self.systemHeaderObj.write(self.fp)
1193 1193 self.radarControllerHeaderObj.write(self.fp)
1194 1194 self.processingHeaderObj.write(self.fp)
1195 1195
1196 1196 self.dtype = self.dataOut.dtype
1197 1197
1198 1198 def __setNewBlock(self):
1199 1199 """
1200 1200 Si es un nuevo file escribe el First Header caso contrario escribe solo el Basic Header
1201 1201
1202 1202 Return:
1203 1203 0 : si no pudo escribir nada
1204 1204 1 : Si escribio el Basic el First Header
1205 1205 """
1206 1206 if self.fp == None:
1207 1207 self.setNextFile()
1208 1208
1209 1209 if self.flagIsNewFile:
1210 1210 return 1
1211 1211
1212 1212 if self.blockIndex < self.processingHeaderObj.dataBlocksPerFile:
1213 1213 self.basicHeaderObj.write(self.fp)
1214 1214 return 1
1215 1215
1216 1216 if not( self.setNextFile() ):
1217 1217 return 0
1218 1218
1219 1219 return 1
1220 1220
1221 1221
1222 1222 def writeNextBlock(self):
1223 1223 """
1224 1224 Selecciona el bloque siguiente de datos y los escribe en un file
1225 1225
1226 1226 Return:
1227 1227 0 : Si no hizo pudo escribir el bloque de datos
1228 1228 1 : Si no pudo escribir el bloque de datos
1229 1229 """
1230 1230 if not( self.__setNewBlock() ):
1231 1231 return 0
1232 1232
1233 1233 self.writeBlock()
1234 1234
1235 1235 return 1
1236 1236
1237 1237 def setNextFile(self):
1238 1238 """
1239 1239 Determina el siguiente file que sera escrito
1240 1240
1241 1241 Affected:
1242 1242 self.filename
1243 1243 self.subfolder
1244 1244 self.fp
1245 1245 self.setFile
1246 1246 self.flagIsNewFile
1247 1247
1248 1248 Return:
1249 1249 0 : Si el archivo no puede ser escrito
1250 1250 1 : Si el archivo esta listo para ser escrito
1251 1251 """
1252 1252 ext = self.ext
1253 1253 path = self.path
1254 1254
1255 1255 if self.fp != None:
1256 1256 self.fp.close()
1257 1257
1258 1258 timeTuple = time.localtime( self.dataOut.utctime)
1259 1259 subfolder = 'd%4.4d%3.3d' % (timeTuple.tm_year,timeTuple.tm_yday)
1260 1260
1261 1261 fullpath = os.path.join( path, subfolder )
1262 1262 if not( os.path.exists(fullpath) ):
1263 1263 os.mkdir(fullpath)
1264 1264 self.setFile = -1 #inicializo mi contador de seteo
1265 1265 else:
1266 1266 filesList = os.listdir( fullpath )
1267 1267 if len( filesList ) > 0:
1268 1268 filesList = sorted( filesList, key=str.lower )
1269 1269 filen = filesList[-1]
1270 1270 # el filename debera tener el siguiente formato
1271 1271 # 0 1234 567 89A BCDE (hex)
1272 1272 # x YYYY DDD SSS .ext
1273 1273 if isNumber( filen[8:11] ):
1274 1274 self.setFile = int( filen[8:11] ) #inicializo mi contador de seteo al seteo del ultimo file
1275 1275 else:
1276 1276 self.setFile = -1
1277 1277 else:
1278 1278 self.setFile = -1 #inicializo mi contador de seteo
1279 1279
1280 1280 setFile = self.setFile
1281 1281 setFile += 1
1282 1282
1283 1283 file = '%s%4.4d%3.3d%3.3d%s' % (self.optchar,
1284 1284 timeTuple.tm_year,
1285 1285 timeTuple.tm_yday,
1286 1286 setFile,
1287 1287 ext )
1288 1288
1289 1289 filename = os.path.join( path, subfolder, file )
1290 1290
1291 1291 fp = open( filename,'wb' )
1292 1292
1293 1293 self.blockIndex = 0
1294 1294
1295 1295 #guardando atributos
1296 1296 self.filename = filename
1297 1297 self.subfolder = subfolder
1298 1298 self.fp = fp
1299 1299 self.setFile = setFile
1300 1300 self.flagIsNewFile = 1
1301 1301
1302 1302 self.setFirstHeader()
1303 1303
1304 1304 print 'Writing the file: %s'%self.filename
1305 1305
1306 1306 self.__writeFirstHeader()
1307 1307
1308 1308 return 1
1309 1309
1310 1310 def setup(self, dataOut, path, blocksPerFile, profilesPerBlock=64, set=0, ext=None):
1311 1311 """
1312 1312 Setea el tipo de formato en la cual sera guardada la data y escribe el First Header
1313 1313
1314 1314 Inputs:
1315 1315 path : el path destino en el cual se escribiran los files a crear
1316 1316 format : formato en el cual sera salvado un file
1317 1317 set : el setebo del file
1318 1318
1319 1319 Return:
1320 1320 0 : Si no realizo un buen seteo
1321 1321 1 : Si realizo un buen seteo
1322 1322 """
1323 1323
1324 1324 if ext == None:
1325 1325 ext = self.ext
1326 1326
1327 1327 ext = ext.lower()
1328 1328
1329 1329 self.ext = ext
1330 1330
1331 1331 self.path = path
1332 1332
1333 1333 self.setFile = set - 1
1334 1334
1335 1335 self.blocksPerFile = blocksPerFile
1336 1336
1337 1337 self.profilesPerBlock = profilesPerBlock
1338 1338
1339 1339 self.dataOut = dataOut
1340 1340
1341 1341 if not(self.setNextFile()):
1342 1342 print "There isn't a next file"
1343 1343 return 0
1344 1344
1345 1345 self.setBlockDimension()
1346 1346
1347 1347 return 1
1348 1348
1349 1349 def run(self, dataOut, **kwargs):
1350 1350
1351 1351 if not(self.isConfig):
1352 1352
1353 1353 self.setup(dataOut, **kwargs)
1354 1354 self.isConfig = True
1355 1355
1356 1356 self.putData()
1357 1357
1358 1358 class VoltageReader(JRODataReader):
1359 1359 """
1360 1360 Esta clase permite leer datos de voltage desde archivos en formato rawdata (.r). La lectura
1361 1361 de los datos siempre se realiza por bloques. Los datos leidos (array de 3 dimensiones:
1362 1362 perfiles*alturas*canales) son almacenados en la variable "buffer".
1363 1363
1364 1364 perfiles * alturas * canales
1365 1365
1366 1366 Esta clase contiene instancias (objetos) de las clases BasicHeader, SystemHeader,
1367 1367 RadarControllerHeader y Voltage. Los tres primeros se usan para almacenar informacion de la
1368 1368 cabecera de datos (metadata), y el cuarto (Voltage) para obtener y almacenar un perfil de
1369 1369 datos desde el "buffer" cada vez que se ejecute el metodo "getData".
1370 1370
1371 1371 Example:
1372 1372
1373 1373 dpath = "/home/myuser/data"
1374 1374
1375 1375 startTime = datetime.datetime(2010,1,20,0,0,0,0,0,0)
1376 1376
1377 1377 endTime = datetime.datetime(2010,1,21,23,59,59,0,0,0)
1378 1378
1379 1379 readerObj = VoltageReader()
1380 1380
1381 1381 readerObj.setup(dpath, startTime, endTime)
1382 1382
1383 1383 while(True):
1384 1384
1385 1385 #to get one profile
1386 1386 profile = readerObj.getData()
1387 1387
1388 1388 #print the profile
1389 1389 print profile
1390 1390
1391 1391 #If you want to see all datablock
1392 1392 print readerObj.datablock
1393 1393
1394 1394 if readerObj.flagNoMoreFiles:
1395 1395 break
1396 1396
1397 1397 """
1398 1398
1399 1399 ext = ".r"
1400 1400
1401 1401 optchar = "D"
1402 1402 dataOut = None
1403 1403
1404 1404
1405 1405 def __init__(self):
1406 1406 """
1407 1407 Inicializador de la clase VoltageReader para la lectura de datos de voltage.
1408 1408
1409 1409 Input:
1410 1410 dataOut : Objeto de la clase Voltage. Este objeto sera utilizado para
1411 1411 almacenar un perfil de datos cada vez que se haga un requerimiento
1412 1412 (getData). El perfil sera obtenido a partir del buffer de datos,
1413 1413 si el buffer esta vacio se hara un nuevo proceso de lectura de un
1414 1414 bloque de datos.
1415 1415 Si este parametro no es pasado se creara uno internamente.
1416 1416
1417 1417 Variables afectadas:
1418 1418 self.dataOut
1419 1419
1420 1420 Return:
1421 1421 None
1422 1422 """
1423 1423
1424 1424 self.isConfig = False
1425 1425
1426 1426 self.datablock = None
1427 1427
1428 1428 self.utc = 0
1429 1429
1430 1430 self.ext = ".r"
1431 1431
1432 1432 self.optchar = "D"
1433 1433
1434 1434 self.basicHeaderObj = BasicHeader(LOCALTIME)
1435 1435
1436 1436 self.systemHeaderObj = SystemHeader()
1437 1437
1438 1438 self.radarControllerHeaderObj = RadarControllerHeader()
1439 1439
1440 1440 self.processingHeaderObj = ProcessingHeader()
1441 1441
1442 1442 self.online = 0
1443 1443
1444 1444 self.fp = None
1445 1445
1446 1446 self.idFile = None
1447 1447
1448 1448 self.dtype = None
1449 1449
1450 1450 self.fileSizeByHeader = None
1451 1451
1452 1452 self.filenameList = []
1453 1453
1454 1454 self.filename = None
1455 1455
1456 1456 self.fileSize = None
1457 1457
1458 1458 self.firstHeaderSize = 0
1459 1459
1460 1460 self.basicHeaderSize = 24
1461 1461
1462 1462 self.pathList = []
1463 1463
1464 1464 self.filenameList = []
1465 1465
1466 1466 self.lastUTTime = 0
1467 1467
1468 1468 self.maxTimeStep = 30
1469 1469
1470 1470 self.flagNoMoreFiles = 0
1471 1471
1472 1472 self.set = 0
1473 1473
1474 1474 self.path = None
1475 1475
1476 1476 self.profileIndex = 2**32-1
1477 1477
1478 1478 self.delay = 3 #seconds
1479 1479
1480 1480 self.nTries = 3 #quantity tries
1481 1481
1482 1482 self.nFiles = 3 #number of files for searching
1483 1483
1484 1484 self.nReadBlocks = 0
1485 1485
1486 1486 self.flagIsNewFile = 1
1487 1487
1488 1488 self.__isFirstTimeOnline = 1
1489 1489
1490 1490 self.ippSeconds = 0
1491 1491
1492 1492 self.flagTimeBlock = 0
1493 1493
1494 1494 self.flagIsNewBlock = 0
1495 1495
1496 1496 self.nTotalBlocks = 0
1497 1497
1498 1498 self.blocksize = 0
1499 1499
1500 1500 self.dataOut = self.createObjByDefault()
1501 1501
1502 1502 def createObjByDefault(self):
1503 1503
1504 1504 dataObj = Voltage()
1505 1505
1506 1506 return dataObj
1507 1507
1508 1508 def __hasNotDataInBuffer(self):
1509 1509 if self.profileIndex >= self.processingHeaderObj.profilesPerBlock:
1510 1510 return 1
1511 1511 return 0
1512 1512
1513 1513
1514 1514 def getBlockDimension(self):
1515 1515 """
1516 1516 Obtiene la cantidad de puntos a leer por cada bloque de datos
1517 1517
1518 1518 Affected:
1519 1519 self.blocksize
1520 1520
1521 1521 Return:
1522 1522 None
1523 1523 """
1524 1524 pts2read = self.processingHeaderObj.profilesPerBlock * self.processingHeaderObj.nHeights * self.systemHeaderObj.nChannels
1525 1525 self.blocksize = pts2read
1526 1526
1527 1527
1528 1528 def readBlock(self):
1529 1529 """
1530 1530 readBlock lee el bloque de datos desde la posicion actual del puntero del archivo
1531 1531 (self.fp) y actualiza todos los parametros relacionados al bloque de datos
1532 1532 (metadata + data). La data leida es almacenada en el buffer y el contador del buffer
1533 1533 es seteado a 0
1534 1534
1535 1535 Inputs:
1536 1536 None
1537 1537
1538 1538 Return:
1539 1539 None
1540 1540
1541 1541 Affected:
1542 1542 self.profileIndex
1543 1543 self.datablock
1544 1544 self.flagIsNewFile
1545 1545 self.flagIsNewBlock
1546 1546 self.nTotalBlocks
1547 1547
1548 1548 Exceptions:
1549 1549 Si un bloque leido no es un bloque valido
1550 1550 """
1551 1551 current_pointer_location = self.fp.tell()
1552 1552 junk = numpy.fromfile( self.fp, self.dtype, self.blocksize )
1553 1553
1554 1554 try:
1555 1555 junk = junk.reshape( (self.processingHeaderObj.profilesPerBlock, self.processingHeaderObj.nHeights, self.systemHeaderObj.nChannels) )
1556 1556 except:
1557 1557 #print "The read block (%3d) has not enough data" %self.nReadBlocks
1558 1558
1559 1559 if self.waitDataBlock(pointer_location=current_pointer_location):
1560 1560 junk = numpy.fromfile( self.fp, self.dtype, self.blocksize )
1561 1561 junk = junk.reshape( (self.processingHeaderObj.profilesPerBlock, self.processingHeaderObj.nHeights, self.systemHeaderObj.nChannels) )
1562 1562 # return 0
1563 1563
1564 1564 junk = numpy.transpose(junk, (2,0,1))
1565 1565 self.datablock = junk['real'] + junk['imag']*1j
1566 1566
1567 1567 self.profileIndex = 0
1568 1568
1569 1569 self.flagIsNewFile = 0
1570 1570 self.flagIsNewBlock = 1
1571 1571
1572 1572 self.nTotalBlocks += 1
1573 1573 self.nReadBlocks += 1
1574 1574
1575 1575 return 1
1576 1576
1577 1577 def getFirstHeader(self):
1578 1578
1579 1579 self.dataOut.dtype = self.dtype
1580 1580
1581 1581 self.dataOut.nProfiles = self.processingHeaderObj.profilesPerBlock
1582 1582
1583 1583 xf = self.processingHeaderObj.firstHeight + self.processingHeaderObj.nHeights*self.processingHeaderObj.deltaHeight
1584 1584
1585 1585 self.dataOut.heightList = numpy.arange(self.processingHeaderObj.firstHeight, xf, self.processingHeaderObj.deltaHeight)
1586 1586
1587 1587 self.dataOut.channelList = range(self.systemHeaderObj.nChannels)
1588 1588
1589 1589 self.dataOut.ippSeconds = self.ippSeconds
1590 1590
1591 1591 self.dataOut.timeInterval = self.ippSeconds * self.processingHeaderObj.nCohInt
1592 1592
1593 1593 self.dataOut.nCohInt = self.processingHeaderObj.nCohInt
1594 1594
1595 1595 self.dataOut.flagShiftFFT = False
1596 1596
1597 1597 if self.radarControllerHeaderObj.code != None:
1598 1598
1599 1599 self.dataOut.nCode = self.radarControllerHeaderObj.nCode
1600 1600
1601 1601 self.dataOut.nBaud = self.radarControllerHeaderObj.nBaud
1602 1602
1603 1603 self.dataOut.code = self.radarControllerHeaderObj.code
1604 1604
1605 1605 self.dataOut.systemHeaderObj = self.systemHeaderObj.copy()
1606 1606
1607 1607 self.dataOut.radarControllerHeaderObj = self.radarControllerHeaderObj.copy()
1608 1608
1609 1609 self.dataOut.flagDecodeData = False #asumo q la data no esta decodificada
1610 1610
1611 1611 self.dataOut.flagDeflipData = False #asumo q la data no esta sin flip
1612 1612
1613 1613 self.dataOut.flagShiftFFT = False
1614 1614
1615 1615 def getData(self):
1616 1616 """
1617 1617 getData obtiene una unidad de datos del buffer de lectura y la copia a la clase "Voltage"
1618 1618 con todos los parametros asociados a este (metadata). cuando no hay datos en el buffer de
1619 1619 lectura es necesario hacer una nueva lectura de los bloques de datos usando "readNextBlock"
1620 1620
1621 1621 Ademas incrementa el contador del buffer en 1.
1622 1622
1623 1623 Return:
1624 1624 data : retorna un perfil de voltages (alturas * canales) copiados desde el
1625 1625 buffer. Si no hay mas archivos a leer retorna None.
1626 1626
1627 1627 Variables afectadas:
1628 1628 self.dataOut
1629 1629 self.profileIndex
1630 1630
1631 1631 Affected:
1632 1632 self.dataOut
1633 1633 self.profileIndex
1634 1634 self.flagTimeBlock
1635 1635 self.flagIsNewBlock
1636 1636 """
1637 1637
1638 1638 if self.flagNoMoreFiles:
1639 1639 self.dataOut.flagNoData = True
1640 1640 print 'Process finished'
1641 1641 return 0
1642 1642
1643 1643 self.flagTimeBlock = 0
1644 1644 self.flagIsNewBlock = 0
1645 1645
1646 1646 if self.__hasNotDataInBuffer():
1647 1647
1648 1648 if not( self.readNextBlock() ):
1649 1649 return 0
1650 1650
1651 1651 self.getFirstHeader()
1652 1652
1653 1653 if self.datablock == None:
1654 1654 self.dataOut.flagNoData = True
1655 1655 return 0
1656 1656
1657 1657 self.dataOut.data = self.datablock[:,self.profileIndex,:]
1658 1658
1659 1659 self.dataOut.flagNoData = False
1660 1660
1661 1661 self.getBasicHeader()
1662 1662
1663 1663 self.profileIndex += 1
1664 1664
1665 1665 self.dataOut.realtime = self.online
1666 1666
1667 1667 return self.dataOut.data
1668 1668
1669 1669
1670 1670 class VoltageWriter(JRODataWriter):
1671 1671 """
1672 1672 Esta clase permite escribir datos de voltajes a archivos procesados (.r). La escritura
1673 1673 de los datos siempre se realiza por bloques.
1674 1674 """
1675 1675
1676 1676 ext = ".r"
1677 1677
1678 1678 optchar = "D"
1679 1679
1680 1680 shapeBuffer = None
1681 1681
1682 1682
1683 1683 def __init__(self):
1684 1684 """
1685 1685 Inicializador de la clase VoltageWriter para la escritura de datos de espectros.
1686 1686
1687 1687 Affected:
1688 1688 self.dataOut
1689 1689
1690 1690 Return: None
1691 1691 """
1692 1692
1693 1693 self.nTotalBlocks = 0
1694 1694
1695 1695 self.profileIndex = 0
1696 1696
1697 1697 self.isConfig = False
1698 1698
1699 1699 self.fp = None
1700 1700
1701 1701 self.flagIsNewFile = 1
1702 1702
1703 1703 self.nTotalBlocks = 0
1704 1704
1705 1705 self.flagIsNewBlock = 0
1706 1706
1707 1707 self.setFile = None
1708 1708
1709 1709 self.dtype = None
1710 1710
1711 1711 self.path = None
1712 1712
1713 1713 self.filename = None
1714 1714
1715 1715 self.basicHeaderObj = BasicHeader(LOCALTIME)
1716 1716
1717 1717 self.systemHeaderObj = SystemHeader()
1718 1718
1719 1719 self.radarControllerHeaderObj = RadarControllerHeader()
1720 1720
1721 1721 self.processingHeaderObj = ProcessingHeader()
1722 1722
1723 1723 def hasAllDataInBuffer(self):
1724 1724 if self.profileIndex >= self.processingHeaderObj.profilesPerBlock:
1725 1725 return 1
1726 1726 return 0
1727 1727
1728 1728
1729 1729 def setBlockDimension(self):
1730 1730 """
1731 1731 Obtiene las formas dimensionales del los subbloques de datos que componen un bloque
1732 1732
1733 1733 Affected:
1734 1734 self.shape_spc_Buffer
1735 1735 self.shape_cspc_Buffer
1736 1736 self.shape_dc_Buffer
1737 1737
1738 1738 Return: None
1739 1739 """
1740 1740 self.shapeBuffer = (self.processingHeaderObj.profilesPerBlock,
1741 1741 self.processingHeaderObj.nHeights,
1742 1742 self.systemHeaderObj.nChannels)
1743 1743
1744 1744 self.datablock = numpy.zeros((self.systemHeaderObj.nChannels,
1745 1745 self.processingHeaderObj.profilesPerBlock,
1746 1746 self.processingHeaderObj.nHeights),
1747 1747 dtype=numpy.dtype('complex64'))
1748 1748
1749 1749
1750 1750 def writeBlock(self):
1751 1751 """
1752 1752 Escribe el buffer en el file designado
1753 1753
1754 1754 Affected:
1755 1755 self.profileIndex
1756 1756 self.flagIsNewFile
1757 1757 self.flagIsNewBlock
1758 1758 self.nTotalBlocks
1759 1759 self.blockIndex
1760 1760
1761 1761 Return: None
1762 1762 """
1763 1763 data = numpy.zeros( self.shapeBuffer, self.dtype )
1764 1764
1765 1765 junk = numpy.transpose(self.datablock, (1,2,0))
1766 1766
1767 1767 data['real'] = junk.real
1768 1768 data['imag'] = junk.imag
1769 1769
1770 1770 data = data.reshape( (-1) )
1771 1771
1772 1772 data.tofile( self.fp )
1773 1773
1774 1774 self.datablock.fill(0)
1775 1775
1776 1776 self.profileIndex = 0
1777 1777 self.flagIsNewFile = 0
1778 1778 self.flagIsNewBlock = 1
1779 1779
1780 1780 self.blockIndex += 1
1781 1781 self.nTotalBlocks += 1
1782 1782
1783 1783 def putData(self):
1784 1784 """
1785 1785 Setea un bloque de datos y luego los escribe en un file
1786 1786
1787 1787 Affected:
1788 1788 self.flagIsNewBlock
1789 1789 self.profileIndex
1790 1790
1791 1791 Return:
1792 1792 0 : Si no hay data o no hay mas files que puedan escribirse
1793 1793 1 : Si se escribio la data de un bloque en un file
1794 1794 """
1795 1795 if self.dataOut.flagNoData:
1796 1796 return 0
1797 1797
1798 1798 self.flagIsNewBlock = 0
1799 1799
1800 1800 if self.dataOut.flagTimeBlock:
1801 1801
1802 1802 self.datablock.fill(0)
1803 1803 self.profileIndex = 0
1804 1804 self.setNextFile()
1805 1805
1806 1806 if self.profileIndex == 0:
1807 1807 self.setBasicHeader()
1808 1808
1809 1809 self.datablock[:,self.profileIndex,:] = self.dataOut.data
1810 1810
1811 1811 self.profileIndex += 1
1812 1812
1813 1813 if self.hasAllDataInBuffer():
1814 1814 #if self.flagIsNewFile:
1815 1815 self.writeNextBlock()
1816 1816 # self.setFirstHeader()
1817 1817
1818 1818 return 1
1819 1819
1820 1820 def __getProcessFlags(self):
1821 1821
1822 1822 processFlags = 0
1823 1823
1824 1824 dtype0 = numpy.dtype([('real','<i1'),('imag','<i1')])
1825 1825 dtype1 = numpy.dtype([('real','<i2'),('imag','<i2')])
1826 1826 dtype2 = numpy.dtype([('real','<i4'),('imag','<i4')])
1827 1827 dtype3 = numpy.dtype([('real','<i8'),('imag','<i8')])
1828 1828 dtype4 = numpy.dtype([('real','<f4'),('imag','<f4')])
1829 1829 dtype5 = numpy.dtype([('real','<f8'),('imag','<f8')])
1830 1830
1831 1831 dtypeList = [dtype0, dtype1, dtype2, dtype3, dtype4, dtype5]
1832 1832
1833 1833
1834 1834
1835 1835 datatypeValueList = [PROCFLAG.DATATYPE_CHAR,
1836 1836 PROCFLAG.DATATYPE_SHORT,
1837 1837 PROCFLAG.DATATYPE_LONG,
1838 1838 PROCFLAG.DATATYPE_INT64,
1839 1839 PROCFLAG.DATATYPE_FLOAT,
1840 1840 PROCFLAG.DATATYPE_DOUBLE]
1841 1841
1842 1842
1843 1843 for index in range(len(dtypeList)):
1844 1844 if self.dataOut.dtype == dtypeList[index]:
1845 1845 dtypeValue = datatypeValueList[index]
1846 1846 break
1847 1847
1848 1848 processFlags += dtypeValue
1849 1849
1850 1850 if self.dataOut.flagDecodeData:
1851 1851 processFlags += PROCFLAG.DECODE_DATA
1852 1852
1853 1853 if self.dataOut.flagDeflipData:
1854 1854 processFlags += PROCFLAG.DEFLIP_DATA
1855 1855
1856 1856 if self.dataOut.code != None:
1857 1857 processFlags += PROCFLAG.DEFINE_PROCESS_CODE
1858 1858
1859 1859 if self.dataOut.nCohInt > 1:
1860 1860 processFlags += PROCFLAG.COHERENT_INTEGRATION
1861 1861
1862 1862 return processFlags
1863 1863
1864 1864
1865 1865 def __getBlockSize(self):
1866 1866 '''
1867 1867 Este metodos determina el cantidad de bytes para un bloque de datos de tipo Voltage
1868 1868 '''
1869 1869
1870 1870 dtype0 = numpy.dtype([('real','<i1'),('imag','<i1')])
1871 1871 dtype1 = numpy.dtype([('real','<i2'),('imag','<i2')])
1872 1872 dtype2 = numpy.dtype([('real','<i4'),('imag','<i4')])
1873 1873 dtype3 = numpy.dtype([('real','<i8'),('imag','<i8')])
1874 1874 dtype4 = numpy.dtype([('real','<f4'),('imag','<f4')])
1875 1875 dtype5 = numpy.dtype([('real','<f8'),('imag','<f8')])
1876 1876
1877 1877 dtypeList = [dtype0, dtype1, dtype2, dtype3, dtype4, dtype5]
1878 1878 datatypeValueList = [1,2,4,8,4,8]
1879 1879 for index in range(len(dtypeList)):
1880 1880 if self.dataOut.dtype == dtypeList[index]:
1881 1881 datatypeValue = datatypeValueList[index]
1882 1882 break
1883 1883
1884 1884 blocksize = int(self.dataOut.nHeights * self.dataOut.nChannels * self.profilesPerBlock * datatypeValue * 2)
1885 1885
1886 1886 return blocksize
1887 1887
1888 1888 def setFirstHeader(self):
1889 1889
1890 1890 """
1891 1891 Obtiene una copia del First Header
1892 1892
1893 1893 Affected:
1894 1894 self.systemHeaderObj
1895 1895 self.radarControllerHeaderObj
1896 1896 self.dtype
1897 1897
1898 1898 Return:
1899 1899 None
1900 1900 """
1901 1901
1902 1902 self.systemHeaderObj = self.dataOut.systemHeaderObj.copy()
1903 1903 self.systemHeaderObj.nChannels = self.dataOut.nChannels
1904 1904 self.radarControllerHeaderObj = self.dataOut.radarControllerHeaderObj.copy()
1905 1905
1906 1906 self.setBasicHeader()
1907 1907
1908 1908 processingHeaderSize = 40 # bytes
1909 1909 self.processingHeaderObj.dtype = 0 # Voltage
1910 1910 self.processingHeaderObj.blockSize = self.__getBlockSize()
1911 1911 self.processingHeaderObj.profilesPerBlock = self.profilesPerBlock
1912 1912 self.processingHeaderObj.dataBlocksPerFile = self.blocksPerFile
1913 1913 self.processingHeaderObj.nWindows = 1 #podria ser 1 o self.dataOut.processingHeaderObj.nWindows
1914 1914 self.processingHeaderObj.processFlags = self.__getProcessFlags()
1915 1915 self.processingHeaderObj.nCohInt = self.dataOut.nCohInt
1916 1916 self.processingHeaderObj.nIncohInt = 1 # Cuando la data de origen es de tipo Voltage
1917 1917 self.processingHeaderObj.totalSpectra = 0 # Cuando la data de origen es de tipo Voltage
1918 1918
1919 1919 # if self.dataOut.code != None:
1920 1920 # self.processingHeaderObj.code = self.dataOut.code
1921 1921 # self.processingHeaderObj.nCode = self.dataOut.nCode
1922 1922 # self.processingHeaderObj.nBaud = self.dataOut.nBaud
1923 1923 # codesize = int(8 + 4 * self.dataOut.nCode * self.dataOut.nBaud)
1924 1924 # processingHeaderSize += codesize
1925 1925
1926 1926 if self.processingHeaderObj.nWindows != 0:
1927 1927 self.processingHeaderObj.firstHeight = self.dataOut.heightList[0]
1928 1928 self.processingHeaderObj.deltaHeight = self.dataOut.heightList[1] - self.dataOut.heightList[0]
1929 1929 self.processingHeaderObj.nHeights = self.dataOut.nHeights
1930 1930 self.processingHeaderObj.samplesWin = self.dataOut.nHeights
1931 1931 processingHeaderSize += 12
1932 1932
1933 1933 self.processingHeaderObj.size = processingHeaderSize
1934 1934
1935 1935 class SpectraReader(JRODataReader):
1936 1936 """
1937 1937 Esta clase permite leer datos de espectros desde archivos procesados (.pdata). La lectura
1938 1938 de los datos siempre se realiza por bloques. Los datos leidos (array de 3 dimensiones)
1939 1939 son almacenados en tres buffer's para el Self Spectra, el Cross Spectra y el DC Channel.
1940 1940
1941 1941 paresCanalesIguales * alturas * perfiles (Self Spectra)
1942 1942 paresCanalesDiferentes * alturas * perfiles (Cross Spectra)
1943 1943 canales * alturas (DC Channels)
1944 1944
1945 1945 Esta clase contiene instancias (objetos) de las clases BasicHeader, SystemHeader,
1946 1946 RadarControllerHeader y Spectra. Los tres primeros se usan para almacenar informacion de la
1947 1947 cabecera de datos (metadata), y el cuarto (Spectra) para obtener y almacenar un bloque de
1948 1948 datos desde el "buffer" cada vez que se ejecute el metodo "getData".
1949 1949
1950 1950 Example:
1951 1951 dpath = "/home/myuser/data"
1952 1952
1953 1953 startTime = datetime.datetime(2010,1,20,0,0,0,0,0,0)
1954 1954
1955 1955 endTime = datetime.datetime(2010,1,21,23,59,59,0,0,0)
1956 1956
1957 1957 readerObj = SpectraReader()
1958 1958
1959 1959 readerObj.setup(dpath, startTime, endTime)
1960 1960
1961 1961 while(True):
1962 1962
1963 1963 readerObj.getData()
1964 1964
1965 1965 print readerObj.data_spc
1966 1966
1967 1967 print readerObj.data_cspc
1968 1968
1969 1969 print readerObj.data_dc
1970 1970
1971 1971 if readerObj.flagNoMoreFiles:
1972 1972 break
1973 1973
1974 1974 """
1975 1975
1976 1976 pts2read_SelfSpectra = 0
1977 1977
1978 1978 pts2read_CrossSpectra = 0
1979 1979
1980 1980 pts2read_DCchannels = 0
1981 1981
1982 1982 ext = ".pdata"
1983 1983
1984 1984 optchar = "P"
1985 1985
1986 1986 dataOut = None
1987 1987
1988 1988 nRdChannels = None
1989 1989
1990 1990 nRdPairs = None
1991 1991
1992 1992 rdPairList = []
1993 1993
1994 1994 def __init__(self):
1995 1995 """
1996 1996 Inicializador de la clase SpectraReader para la lectura de datos de espectros.
1997 1997
1998 1998 Inputs:
1999 1999 dataOut : Objeto de la clase Spectra. Este objeto sera utilizado para
2000 2000 almacenar un perfil de datos cada vez que se haga un requerimiento
2001 2001 (getData). El perfil sera obtenido a partir del buffer de datos,
2002 2002 si el buffer esta vacio se hara un nuevo proceso de lectura de un
2003 2003 bloque de datos.
2004 2004 Si este parametro no es pasado se creara uno internamente.
2005 2005
2006 2006 Affected:
2007 2007 self.dataOut
2008 2008
2009 2009 Return : None
2010 2010 """
2011 2011
2012 2012 self.isConfig = False
2013 2013
2014 2014 self.pts2read_SelfSpectra = 0
2015 2015
2016 2016 self.pts2read_CrossSpectra = 0
2017 2017
2018 2018 self.pts2read_DCchannels = 0
2019 2019
2020 2020 self.datablock = None
2021 2021
2022 2022 self.utc = None
2023 2023
2024 2024 self.ext = ".pdata"
2025 2025
2026 2026 self.optchar = "P"
2027 2027
2028 2028 self.basicHeaderObj = BasicHeader(LOCALTIME)
2029 2029
2030 2030 self.systemHeaderObj = SystemHeader()
2031 2031
2032 2032 self.radarControllerHeaderObj = RadarControllerHeader()
2033 2033
2034 2034 self.processingHeaderObj = ProcessingHeader()
2035 2035
2036 2036 self.online = 0
2037 2037
2038 2038 self.fp = None
2039 2039
2040 2040 self.idFile = None
2041 2041
2042 2042 self.dtype = None
2043 2043
2044 2044 self.fileSizeByHeader = None
2045 2045
2046 2046 self.filenameList = []
2047 2047
2048 2048 self.filename = None
2049 2049
2050 2050 self.fileSize = None
2051 2051
2052 2052 self.firstHeaderSize = 0
2053 2053
2054 2054 self.basicHeaderSize = 24
2055 2055
2056 2056 self.pathList = []
2057 2057
2058 2058 self.lastUTTime = 0
2059 2059
2060 2060 self.maxTimeStep = 30
2061 2061
2062 2062 self.flagNoMoreFiles = 0
2063 2063
2064 2064 self.set = 0
2065 2065
2066 2066 self.path = None
2067 2067
2068 2068 self.delay = 60 #seconds
2069 2069
2070 2070 self.nTries = 3 #quantity tries
2071 2071
2072 2072 self.nFiles = 3 #number of files for searching
2073 2073
2074 2074 self.nReadBlocks = 0
2075 2075
2076 2076 self.flagIsNewFile = 1
2077 2077
2078 2078 self.__isFirstTimeOnline = 1
2079 2079
2080 2080 self.ippSeconds = 0
2081 2081
2082 2082 self.flagTimeBlock = 0
2083 2083
2084 2084 self.flagIsNewBlock = 0
2085 2085
2086 2086 self.nTotalBlocks = 0
2087 2087
2088 2088 self.blocksize = 0
2089 2089
2090 2090 self.dataOut = self.createObjByDefault()
2091 2091
2092 2092 self.profileIndex = 1 #Always
2093 2093
2094 2094
2095 2095 def createObjByDefault(self):
2096 2096
2097 2097 dataObj = Spectra()
2098 2098
2099 2099 return dataObj
2100 2100
2101 2101 def __hasNotDataInBuffer(self):
2102 2102 return 1
2103 2103
2104 2104
2105 2105 def getBlockDimension(self):
2106 2106 """
2107 2107 Obtiene la cantidad de puntos a leer por cada bloque de datos
2108 2108
2109 2109 Affected:
2110 2110 self.nRdChannels
2111 2111 self.nRdPairs
2112 2112 self.pts2read_SelfSpectra
2113 2113 self.pts2read_CrossSpectra
2114 2114 self.pts2read_DCchannels
2115 2115 self.blocksize
2116 2116 self.dataOut.nChannels
2117 2117 self.dataOut.nPairs
2118 2118
2119 2119 Return:
2120 2120 None
2121 2121 """
2122 2122 self.nRdChannels = 0
2123 2123 self.nRdPairs = 0
2124 2124 self.rdPairList = []
2125 2125
2126 2126 for i in range(0, self.processingHeaderObj.totalSpectra*2, 2):
2127 2127 if self.processingHeaderObj.spectraComb[i] == self.processingHeaderObj.spectraComb[i+1]:
2128 2128 self.nRdChannels = self.nRdChannels + 1 #par de canales iguales
2129 2129 else:
2130 2130 self.nRdPairs = self.nRdPairs + 1 #par de canales diferentes
2131 2131 self.rdPairList.append((self.processingHeaderObj.spectraComb[i], self.processingHeaderObj.spectraComb[i+1]))
2132 2132
2133 2133 pts2read = self.processingHeaderObj.nHeights * self.processingHeaderObj.profilesPerBlock
2134 2134
2135 2135 self.pts2read_SelfSpectra = int(self.nRdChannels * pts2read)
2136 2136 self.blocksize = self.pts2read_SelfSpectra
2137 2137
2138 2138 if self.processingHeaderObj.flag_cspc:
2139 2139 self.pts2read_CrossSpectra = int(self.nRdPairs * pts2read)
2140 2140 self.blocksize += self.pts2read_CrossSpectra
2141 2141
2142 2142 if self.processingHeaderObj.flag_dc:
2143 2143 self.pts2read_DCchannels = int(self.systemHeaderObj.nChannels * self.processingHeaderObj.nHeights)
2144 2144 self.blocksize += self.pts2read_DCchannels
2145 2145
2146 2146 # self.blocksize = self.pts2read_SelfSpectra + self.pts2read_CrossSpectra + self.pts2read_DCchannels
2147 2147
2148 2148
2149 2149 def readBlock(self):
2150 2150 """
2151 2151 Lee el bloque de datos desde la posicion actual del puntero del archivo
2152 2152 (self.fp) y actualiza todos los parametros relacionados al bloque de datos
2153 2153 (metadata + data). La data leida es almacenada en el buffer y el contador del buffer
2154 2154 es seteado a 0
2155 2155
2156 2156 Return: None
2157 2157
2158 2158 Variables afectadas:
2159 2159
2160 2160 self.flagIsNewFile
2161 2161 self.flagIsNewBlock
2162 2162 self.nTotalBlocks
2163 2163 self.data_spc
2164 2164 self.data_cspc
2165 2165 self.data_dc
2166 2166
2167 2167 Exceptions:
2168 2168 Si un bloque leido no es un bloque valido
2169 2169 """
2170 2170 blockOk_flag = False
2171 2171 fpointer = self.fp.tell()
2172 2172
2173 2173 spc = numpy.fromfile( self.fp, self.dtype[0], self.pts2read_SelfSpectra )
2174 2174 spc = spc.reshape( (self.nRdChannels, self.processingHeaderObj.nHeights, self.processingHeaderObj.profilesPerBlock) ) #transforma a un arreglo 3D
2175 2175
2176 2176 if self.processingHeaderObj.flag_cspc:
2177 2177 cspc = numpy.fromfile( self.fp, self.dtype, self.pts2read_CrossSpectra )
2178 2178 cspc = cspc.reshape( (self.nRdPairs, self.processingHeaderObj.nHeights, self.processingHeaderObj.profilesPerBlock) ) #transforma a un arreglo 3D
2179 2179
2180 2180 if self.processingHeaderObj.flag_dc:
2181 2181 dc = numpy.fromfile( self.fp, self.dtype, self.pts2read_DCchannels ) #int(self.processingHeaderObj.nHeights*self.systemHeaderObj.nChannels) )
2182 2182 dc = dc.reshape( (self.systemHeaderObj.nChannels, self.processingHeaderObj.nHeights) ) #transforma a un arreglo 2D
2183 2183
2184 2184
2185 2185 if not(self.processingHeaderObj.shif_fft):
2186 2186 #desplaza a la derecha en el eje 2 determinadas posiciones
2187 2187 shift = int(self.processingHeaderObj.profilesPerBlock/2)
2188 2188 spc = numpy.roll( spc, shift , axis=2 )
2189 2189
2190 2190 if self.processingHeaderObj.flag_cspc:
2191 2191 #desplaza a la derecha en el eje 2 determinadas posiciones
2192 2192 cspc = numpy.roll( cspc, shift, axis=2 )
2193 2193
2194 2194 # self.processingHeaderObj.shif_fft = True
2195 2195
2196 2196 spc = numpy.transpose( spc, (0,2,1) )
2197 2197 self.data_spc = spc
2198 2198
2199 2199 if self.processingHeaderObj.flag_cspc:
2200 2200 cspc = numpy.transpose( cspc, (0,2,1) )
2201 2201 self.data_cspc = cspc['real'] + cspc['imag']*1j
2202 2202 else:
2203 2203 self.data_cspc = None
2204 2204
2205 2205 if self.processingHeaderObj.flag_dc:
2206 2206 self.data_dc = dc['real'] + dc['imag']*1j
2207 2207 else:
2208 2208 self.data_dc = None
2209 2209
2210 2210 self.flagIsNewFile = 0
2211 2211 self.flagIsNewBlock = 1
2212 2212
2213 2213 self.nTotalBlocks += 1
2214 2214 self.nReadBlocks += 1
2215 2215
2216 2216 return 1
2217 2217
2218 2218 def getFirstHeader(self):
2219 2219
2220 2220 self.dataOut.dtype = self.dtype
2221 2221
2222 2222 self.dataOut.nPairs = self.nRdPairs
2223 2223
2224 2224 self.dataOut.pairsList = self.rdPairList
2225 2225
2226 2226 self.dataOut.nProfiles = self.processingHeaderObj.profilesPerBlock
2227 2227
2228 2228 self.dataOut.nFFTPoints = self.processingHeaderObj.profilesPerBlock
2229 2229
2230 2230 self.dataOut.nCohInt = self.processingHeaderObj.nCohInt
2231 2231
2232 2232 self.dataOut.nIncohInt = self.processingHeaderObj.nIncohInt
2233 2233
2234 2234 xf = self.processingHeaderObj.firstHeight + self.processingHeaderObj.nHeights*self.processingHeaderObj.deltaHeight
2235 2235
2236 2236 self.dataOut.heightList = numpy.arange(self.processingHeaderObj.firstHeight, xf, self.processingHeaderObj.deltaHeight)
2237 2237
2238 2238 self.dataOut.channelList = range(self.systemHeaderObj.nChannels)
2239 2239
2240 2240 self.dataOut.ippSeconds = self.ippSeconds
2241 2241
2242 2242 self.dataOut.timeInterval = self.ippSeconds * self.processingHeaderObj.nCohInt * self.processingHeaderObj.nIncohInt * self.dataOut.nFFTPoints
2243 2243
2244 2244 self.dataOut.systemHeaderObj = self.systemHeaderObj.copy()
2245 2245
2246 2246 self.dataOut.radarControllerHeaderObj = self.radarControllerHeaderObj.copy()
2247 2247
2248 2248 self.dataOut.flagShiftFFT = self.processingHeaderObj.shif_fft
2249 2249
2250 2250 self.dataOut.flagDecodeData = False #asumo q la data no esta decodificada
2251 2251
2252 2252 self.dataOut.flagDeflipData = True #asumo q la data no esta sin flip
2253 2253
2254 2254 if self.radarControllerHeaderObj.code != None:
2255 2255
2256 2256 self.dataOut.nCode = self.radarControllerHeaderObj.nCode
2257 2257
2258 2258 self.dataOut.nBaud = self.radarControllerHeaderObj.nBaud
2259 2259
2260 2260 self.dataOut.code = self.radarControllerHeaderObj.code
2261 2261
2262 2262 self.dataOut.flagDecodeData = True
2263 2263
2264 2264 def getData(self):
2265 2265 """
2266 2266 Copia el buffer de lectura a la clase "Spectra",
2267 2267 con todos los parametros asociados a este (metadata). cuando no hay datos en el buffer de
2268 2268 lectura es necesario hacer una nueva lectura de los bloques de datos usando "readNextBlock"
2269 2269
2270 2270 Return:
2271 2271 0 : Si no hay mas archivos disponibles
2272 2272 1 : Si hizo una buena copia del buffer
2273 2273
2274 2274 Affected:
2275 2275 self.dataOut
2276 2276
2277 2277 self.flagTimeBlock
2278 2278 self.flagIsNewBlock
2279 2279 """
2280 2280
2281 2281 if self.flagNoMoreFiles:
2282 2282 self.dataOut.flagNoData = True
2283 2283 print 'Process finished'
2284 2284 return 0
2285 2285
2286 2286 self.flagTimeBlock = 0
2287 2287 self.flagIsNewBlock = 0
2288 2288
2289 2289 if self.__hasNotDataInBuffer():
2290 2290
2291 2291 if not( self.readNextBlock() ):
2292 2292 self.dataOut.flagNoData = True
2293 2293 return 0
2294 2294
2295 2295 #data es un numpy array de 3 dmensiones (perfiles, alturas y canales)
2296 2296
2297 2297 if self.data_dc == None:
2298 2298 self.dataOut.flagNoData = True
2299 2299 return 0
2300 2300
2301 2301 self.getBasicHeader()
2302 2302
2303 2303 self.getFirstHeader()
2304 2304
2305 2305 self.dataOut.data_spc = self.data_spc
2306 2306
2307 2307 self.dataOut.data_cspc = self.data_cspc
2308 2308
2309 2309 self.dataOut.data_dc = self.data_dc
2310 2310
2311 2311 self.dataOut.flagNoData = False
2312 2312
2313 2313 self.dataOut.realtime = self.online
2314 2314
2315 2315 return self.dataOut.data_spc
2316 2316
2317 2317
2318 2318 class SpectraWriter(JRODataWriter):
2319 2319
2320 2320 """
2321 2321 Esta clase permite escribir datos de espectros a archivos procesados (.pdata). La escritura
2322 2322 de los datos siempre se realiza por bloques.
2323 2323 """
2324 2324
2325 2325 ext = ".pdata"
2326 2326
2327 2327 optchar = "P"
2328 2328
2329 2329 shape_spc_Buffer = None
2330 2330
2331 2331 shape_cspc_Buffer = None
2332 2332
2333 2333 shape_dc_Buffer = None
2334 2334
2335 2335 data_spc = None
2336 2336
2337 2337 data_cspc = None
2338 2338
2339 2339 data_dc = None
2340 2340
2341 2341 # dataOut = None
2342 2342
2343 2343 def __init__(self):
2344 2344 """
2345 2345 Inicializador de la clase SpectraWriter para la escritura de datos de espectros.
2346 2346
2347 2347 Affected:
2348 2348 self.dataOut
2349 2349 self.basicHeaderObj
2350 2350 self.systemHeaderObj
2351 2351 self.radarControllerHeaderObj
2352 2352 self.processingHeaderObj
2353 2353
2354 2354 Return: None
2355 2355 """
2356 2356
2357 2357 self.isConfig = False
2358 2358
2359 2359 self.nTotalBlocks = 0
2360 2360
2361 2361 self.data_spc = None
2362 2362
2363 2363 self.data_cspc = None
2364 2364
2365 2365 self.data_dc = None
2366 2366
2367 2367 self.fp = None
2368 2368
2369 2369 self.flagIsNewFile = 1
2370 2370
2371 2371 self.nTotalBlocks = 0
2372 2372
2373 2373 self.flagIsNewBlock = 0
2374 2374
2375 2375 self.setFile = None
2376 2376
2377 2377 self.dtype = None
2378 2378
2379 2379 self.path = None
2380 2380
2381 2381 self.noMoreFiles = 0
2382 2382
2383 2383 self.filename = None
2384 2384
2385 2385 self.basicHeaderObj = BasicHeader(LOCALTIME)
2386 2386
2387 2387 self.systemHeaderObj = SystemHeader()
2388 2388
2389 2389 self.radarControllerHeaderObj = RadarControllerHeader()
2390 2390
2391 2391 self.processingHeaderObj = ProcessingHeader()
2392 2392
2393 2393
2394 2394 def hasAllDataInBuffer(self):
2395 2395 return 1
2396 2396
2397 2397
2398 2398 def setBlockDimension(self):
2399 2399 """
2400 2400 Obtiene las formas dimensionales del los subbloques de datos que componen un bloque
2401 2401
2402 2402 Affected:
2403 2403 self.shape_spc_Buffer
2404 2404 self.shape_cspc_Buffer
2405 2405 self.shape_dc_Buffer
2406 2406
2407 2407 Return: None
2408 2408 """
2409 2409 self.shape_spc_Buffer = (self.dataOut.nChannels,
2410 2410 self.processingHeaderObj.nHeights,
2411 2411 self.processingHeaderObj.profilesPerBlock)
2412 2412
2413 2413 self.shape_cspc_Buffer = (self.dataOut.nPairs,
2414 2414 self.processingHeaderObj.nHeights,
2415 2415 self.processingHeaderObj.profilesPerBlock)
2416 2416
2417 2417 self.shape_dc_Buffer = (self.dataOut.nChannels,
2418 2418 self.processingHeaderObj.nHeights)
2419 2419
2420 2420
2421 2421 def writeBlock(self):
2422 2422 """
2423 2423 Escribe el buffer en el file designado
2424 2424
2425 2425 Affected:
2426 2426 self.data_spc
2427 2427 self.data_cspc
2428 2428 self.data_dc
2429 2429 self.flagIsNewFile
2430 2430 self.flagIsNewBlock
2431 2431 self.nTotalBlocks
2432 2432 self.nWriteBlocks
2433 2433
2434 2434 Return: None
2435 2435 """
2436 2436
2437 2437 spc = numpy.transpose( self.data_spc, (0,2,1) )
2438 2438 if not( self.processingHeaderObj.shif_fft ):
2439 2439 spc = numpy.roll( spc, self.processingHeaderObj.profilesPerBlock/2, axis=2 ) #desplaza a la derecha en el eje 2 determinadas posiciones
2440 2440 data = spc.reshape((-1))
2441 2441 data = data.astype(self.dtype[0])
2442 2442 data.tofile(self.fp)
2443 2443
2444 2444 if self.data_cspc != None:
2445 2445 data = numpy.zeros( self.shape_cspc_Buffer, self.dtype )
2446 2446 cspc = numpy.transpose( self.data_cspc, (0,2,1) )
2447 2447 if not( self.processingHeaderObj.shif_fft ):
2448 2448 cspc = numpy.roll( cspc, self.processingHeaderObj.profilesPerBlock/2, axis=2 ) #desplaza a la derecha en el eje 2 determinadas posiciones
2449 2449 data['real'] = cspc.real
2450 2450 data['imag'] = cspc.imag
2451 2451 data = data.reshape((-1))
2452 2452 data.tofile(self.fp)
2453 2453
2454 2454 if self.data_dc != None:
2455 2455 data = numpy.zeros( self.shape_dc_Buffer, self.dtype )
2456 2456 dc = self.data_dc
2457 2457 data['real'] = dc.real
2458 2458 data['imag'] = dc.imag
2459 2459 data = data.reshape((-1))
2460 2460 data.tofile(self.fp)
2461 2461
2462 2462 self.data_spc.fill(0)
2463 2463
2464 2464 if self.data_dc != None:
2465 2465 self.data_dc.fill(0)
2466 2466
2467 2467 if self.data_cspc != None:
2468 2468 self.data_cspc.fill(0)
2469 2469
2470 2470 self.flagIsNewFile = 0
2471 2471 self.flagIsNewBlock = 1
2472 2472 self.nTotalBlocks += 1
2473 2473 self.nWriteBlocks += 1
2474 2474 self.blockIndex += 1
2475 2475
2476 2476
2477 2477 def putData(self):
2478 2478 """
2479 2479 Setea un bloque de datos y luego los escribe en un file
2480 2480
2481 2481 Affected:
2482 2482 self.data_spc
2483 2483 self.data_cspc
2484 2484 self.data_dc
2485 2485
2486 2486 Return:
2487 2487 0 : Si no hay data o no hay mas files que puedan escribirse
2488 2488 1 : Si se escribio la data de un bloque en un file
2489 2489 """
2490 2490
2491 2491 if self.dataOut.flagNoData:
2492 2492 return 0
2493 2493
2494 2494 self.flagIsNewBlock = 0
2495 2495
2496 2496 if self.dataOut.flagTimeBlock:
2497 2497 self.data_spc.fill(0)
2498 2498 self.data_cspc.fill(0)
2499 2499 self.data_dc.fill(0)
2500 2500 self.setNextFile()
2501 2501
2502 2502 if self.flagIsNewFile == 0:
2503 2503 self.setBasicHeader()
2504 2504
2505 2505 self.data_spc = self.dataOut.data_spc.copy()
2506 2506 if self.dataOut.data_cspc != None:
2507 2507 self.data_cspc = self.dataOut.data_cspc.copy()
2508 2508 self.data_dc = self.dataOut.data_dc.copy()
2509 2509
2510 2510 # #self.processingHeaderObj.dataBlocksPerFile)
2511 2511 if self.hasAllDataInBuffer():
2512 2512 # self.setFirstHeader()
2513 2513 self.writeNextBlock()
2514 2514
2515 2515 return 1
2516 2516
2517 2517
2518 2518 def __getProcessFlags(self):
2519 2519
2520 2520 processFlags = 0
2521 2521
2522 2522 dtype0 = numpy.dtype([('real','<i1'),('imag','<i1')])
2523 2523 dtype1 = numpy.dtype([('real','<i2'),('imag','<i2')])
2524 2524 dtype2 = numpy.dtype([('real','<i4'),('imag','<i4')])
2525 2525 dtype3 = numpy.dtype([('real','<i8'),('imag','<i8')])
2526 2526 dtype4 = numpy.dtype([('real','<f4'),('imag','<f4')])
2527 2527 dtype5 = numpy.dtype([('real','<f8'),('imag','<f8')])
2528 2528
2529 2529 dtypeList = [dtype0, dtype1, dtype2, dtype3, dtype4, dtype5]
2530 2530
2531 2531
2532 2532
2533 2533 datatypeValueList = [PROCFLAG.DATATYPE_CHAR,
2534 2534 PROCFLAG.DATATYPE_SHORT,
2535 2535 PROCFLAG.DATATYPE_LONG,
2536 2536 PROCFLAG.DATATYPE_INT64,
2537 2537 PROCFLAG.DATATYPE_FLOAT,
2538 2538 PROCFLAG.DATATYPE_DOUBLE]
2539 2539
2540 2540
2541 2541 for index in range(len(dtypeList)):
2542 2542 if self.dataOut.dtype == dtypeList[index]:
2543 2543 dtypeValue = datatypeValueList[index]
2544 2544 break
2545 2545
2546 2546 processFlags += dtypeValue
2547 2547
2548 2548 if self.dataOut.flagDecodeData:
2549 2549 processFlags += PROCFLAG.DECODE_DATA
2550 2550
2551 2551 if self.dataOut.flagDeflipData:
2552 2552 processFlags += PROCFLAG.DEFLIP_DATA
2553 2553
2554 2554 if self.dataOut.code != None:
2555 2555 processFlags += PROCFLAG.DEFINE_PROCESS_CODE
2556 2556
2557 2557 if self.dataOut.nIncohInt > 1:
2558 2558 processFlags += PROCFLAG.INCOHERENT_INTEGRATION
2559 2559
2560 2560 if self.dataOut.data_dc != None:
2561 2561 processFlags += PROCFLAG.SAVE_CHANNELS_DC
2562 2562
2563 2563 return processFlags
2564 2564
2565 2565
2566 2566 def __getBlockSize(self):
2567 2567 '''
2568 2568 Este metodos determina el cantidad de bytes para un bloque de datos de tipo Spectra
2569 2569 '''
2570 2570
2571 2571 dtype0 = numpy.dtype([('real','<i1'),('imag','<i1')])
2572 2572 dtype1 = numpy.dtype([('real','<i2'),('imag','<i2')])
2573 2573 dtype2 = numpy.dtype([('real','<i4'),('imag','<i4')])
2574 2574 dtype3 = numpy.dtype([('real','<i8'),('imag','<i8')])
2575 2575 dtype4 = numpy.dtype([('real','<f4'),('imag','<f4')])
2576 2576 dtype5 = numpy.dtype([('real','<f8'),('imag','<f8')])
2577 2577
2578 2578 dtypeList = [dtype0, dtype1, dtype2, dtype3, dtype4, dtype5]
2579 2579 datatypeValueList = [1,2,4,8,4,8]
2580 2580 for index in range(len(dtypeList)):
2581 2581 if self.dataOut.dtype == dtypeList[index]:
2582 2582 datatypeValue = datatypeValueList[index]
2583 2583 break
2584 2584
2585 2585
2586 2586 pts2write = self.dataOut.nHeights * self.dataOut.nFFTPoints
2587 2587
2588 2588 pts2write_SelfSpectra = int(self.dataOut.nChannels * pts2write)
2589 2589 blocksize = (pts2write_SelfSpectra*datatypeValue)
2590 2590
2591 2591 if self.dataOut.data_cspc != None:
2592 2592 pts2write_CrossSpectra = int(self.dataOut.nPairs * pts2write)
2593 2593 blocksize += (pts2write_CrossSpectra*datatypeValue*2)
2594 2594
2595 2595 if self.dataOut.data_dc != None:
2596 2596 pts2write_DCchannels = int(self.dataOut.nChannels * self.dataOut.nHeights)
2597 2597 blocksize += (pts2write_DCchannels*datatypeValue*2)
2598 2598
2599 2599 blocksize = blocksize #* datatypeValue * 2 #CORREGIR ESTO
2600 2600
2601 2601 return blocksize
2602 2602
2603 2603 def setFirstHeader(self):
2604 2604
2605 2605 """
2606 2606 Obtiene una copia del First Header
2607 2607
2608 2608 Affected:
2609 2609 self.systemHeaderObj
2610 2610 self.radarControllerHeaderObj
2611 2611 self.dtype
2612 2612
2613 2613 Return:
2614 2614 None
2615 2615 """
2616 2616
2617 2617 self.systemHeaderObj = self.dataOut.systemHeaderObj.copy()
2618 2618 self.systemHeaderObj.nChannels = self.dataOut.nChannels
2619 2619 self.radarControllerHeaderObj = self.dataOut.radarControllerHeaderObj.copy()
2620 2620 old_code_size = self.dataOut.radarControllerHeaderObj.code_size
2621 2621 new_code_size = int(numpy.ceil(self.dataOut.nBaud/32.))*self.dataOut.nCode*4
2622 2622 self.radarControllerHeaderObj.size = self.radarControllerHeaderObj.size - old_code_size + new_code_size
2623 2623
2624 2624 self.setBasicHeader()
2625 2625
2626 2626 processingHeaderSize = 40 # bytes
2627 2627 self.processingHeaderObj.dtype = 1 # Spectra
2628 2628 self.processingHeaderObj.blockSize = self.__getBlockSize()
2629 2629 self.processingHeaderObj.profilesPerBlock = self.dataOut.nFFTPoints
2630 2630 self.processingHeaderObj.dataBlocksPerFile = self.blocksPerFile
2631 2631 self.processingHeaderObj.nWindows = 1 #podria ser 1 o self.dataOut.processingHeaderObj.nWindows
2632 2632 self.processingHeaderObj.processFlags = self.__getProcessFlags()
2633 2633 self.processingHeaderObj.nCohInt = self.dataOut.nCohInt# Se requiere para determinar el valor de timeInterval
2634 2634 self.processingHeaderObj.nIncohInt = self.dataOut.nIncohInt
2635 2635 self.processingHeaderObj.totalSpectra = self.dataOut.nPairs + self.dataOut.nChannels
2636 2636 self.processingHeaderObj.shif_fft = self.dataOut.flagShiftFFT
2637 2637
2638 2638 if self.processingHeaderObj.totalSpectra > 0:
2639 2639 channelList = []
2640 2640 for channel in range(self.dataOut.nChannels):
2641 2641 channelList.append(channel)
2642 2642 channelList.append(channel)
2643 2643
2644 2644 pairsList = []
2645 2645 if self.dataOut.nPairs > 0:
2646 2646 for pair in self.dataOut.pairsList:
2647 2647 pairsList.append(pair[0])
2648 2648 pairsList.append(pair[1])
2649 2649
2650 2650 spectraComb = channelList + pairsList
2651 2651 spectraComb = numpy.array(spectraComb,dtype="u1")
2652 2652 self.processingHeaderObj.spectraComb = spectraComb
2653 2653 sizeOfSpcComb = len(spectraComb)
2654 2654 processingHeaderSize += sizeOfSpcComb
2655 2655
2656 2656 # The processing header should not have information about code
2657 2657 # if self.dataOut.code != None:
2658 2658 # self.processingHeaderObj.code = self.dataOut.code
2659 2659 # self.processingHeaderObj.nCode = self.dataOut.nCode
2660 2660 # self.processingHeaderObj.nBaud = self.dataOut.nBaud
2661 2661 # nCodeSize = 4 # bytes
2662 2662 # nBaudSize = 4 # bytes
2663 2663 # codeSize = 4 # bytes
2664 2664 # sizeOfCode = int(nCodeSize + nBaudSize + codeSize * self.dataOut.nCode * self.dataOut.nBaud)
2665 2665 # processingHeaderSize += sizeOfCode
2666 2666
2667 2667 if self.processingHeaderObj.nWindows != 0:
2668 2668 self.processingHeaderObj.firstHeight = self.dataOut.heightList[0]
2669 2669 self.processingHeaderObj.deltaHeight = self.dataOut.heightList[1] - self.dataOut.heightList[0]
2670 2670 self.processingHeaderObj.nHeights = self.dataOut.nHeights
2671 2671 self.processingHeaderObj.samplesWin = self.dataOut.nHeights
2672 2672 sizeOfFirstHeight = 4
2673 2673 sizeOfdeltaHeight = 4
2674 2674 sizeOfnHeights = 4
2675 2675 sizeOfWindows = (sizeOfFirstHeight + sizeOfdeltaHeight + sizeOfnHeights)*self.processingHeaderObj.nWindows
2676 2676 processingHeaderSize += sizeOfWindows
2677 2677
2678 2678 self.processingHeaderObj.size = processingHeaderSize
2679 2679
2680 2680 class SpectraHeisWriter(Operation):
2681 2681 # set = None
2682 2682 setFile = None
2683 2683 idblock = None
2684 2684 doypath = None
2685 2685 subfolder = None
2686 2686
2687 2687 def __init__(self):
2688 2688 self.wrObj = FITS()
2689 2689 # self.dataOut = dataOut
2690 2690 self.nTotalBlocks=0
2691 2691 # self.set = None
2692 2692 self.setFile = None
2693 2693 self.idblock = 0
2694 2694 self.wrpath = None
2695 2695 self.doypath = None
2696 2696 self.subfolder = None
2697 2697 self.isConfig = False
2698 2698
2699 2699 def isNumber(str):
2700 2700 """
2701 2701 Chequea si el conjunto de caracteres que componen un string puede ser convertidos a un numero.
2702 2702
2703 2703 Excepciones:
2704 2704 Si un determinado string no puede ser convertido a numero
2705 2705 Input:
2706 2706 str, string al cual se le analiza para determinar si convertible a un numero o no
2707 2707
2708 2708 Return:
2709 2709 True : si el string es uno numerico
2710 2710 False : no es un string numerico
2711 2711 """
2712 2712 try:
2713 2713 float( str )
2714 2714 return True
2715 2715 except:
2716 2716 return False
2717 2717
2718 2718 def setup(self, dataOut, wrpath):
2719 2719
2720 2720 if not(os.path.exists(wrpath)):
2721 2721 os.mkdir(wrpath)
2722 2722
2723 2723 self.wrpath = wrpath
2724 2724 # self.setFile = 0
2725 2725 self.dataOut = dataOut
2726 2726
2727 2727 def putData(self):
2728 2728 name= time.localtime( self.dataOut.utctime)
2729 2729 ext=".fits"
2730 2730
2731 2731 if self.doypath == None:
2732 2732 self.subfolder = 'F%4.4d%3.3d_%d' % (name.tm_year,name.tm_yday,time.mktime(datetime.datetime.now().timetuple()))
2733 2733 self.doypath = os.path.join( self.wrpath, self.subfolder )
2734 2734 os.mkdir(self.doypath)
2735 2735
2736 2736 if self.setFile == None:
2737 2737 # self.set = self.dataOut.set
2738 2738 self.setFile = 0
2739 2739 # if self.set != self.dataOut.set:
2740 2740 ## self.set = self.dataOut.set
2741 2741 # self.setFile = 0
2742 2742
2743 2743 #make the filename
2744 2744 file = 'D%4.4d%3.3d_%3.3d%s' % (name.tm_year,name.tm_yday,self.setFile,ext)
2745 2745
2746 2746 filename = os.path.join(self.wrpath,self.subfolder, file)
2747 2747
2748 2748 idblock = numpy.array([self.idblock],dtype="int64")
2749 2749 header=self.wrObj.cFImage(idblock=idblock,
2750 2750 year=time.gmtime(self.dataOut.utctime).tm_year,
2751 2751 month=time.gmtime(self.dataOut.utctime).tm_mon,
2752 2752 day=time.gmtime(self.dataOut.utctime).tm_mday,
2753 2753 hour=time.gmtime(self.dataOut.utctime).tm_hour,
2754 2754 minute=time.gmtime(self.dataOut.utctime).tm_min,
2755 2755 second=time.gmtime(self.dataOut.utctime).tm_sec)
2756 2756
2757 2757 c=3E8
2758 2758 deltaHeight = self.dataOut.heightList[1] - self.dataOut.heightList[0]
2759 2759 freq=numpy.arange(-1*self.dataOut.nHeights/2.,self.dataOut.nHeights/2.)*(c/(2*deltaHeight*1000))
2760 2760
2761 2761 colList = []
2762 2762
2763 2763 colFreq=self.wrObj.setColF(name="freq", format=str(self.dataOut.nFFTPoints)+'E', array=freq)
2764 2764
2765 2765 colList.append(colFreq)
2766 2766
2767 2767 nchannel=self.dataOut.nChannels
2768 2768
2769 2769 for i in range(nchannel):
2770 2770 col = self.wrObj.writeData(name="PCh"+str(i+1),
2771 2771 format=str(self.dataOut.nFFTPoints)+'E',
2772 2772 data=10*numpy.log10(self.dataOut.data_spc[i,:]))
2773 2773
2774 2774 colList.append(col)
2775 2775
2776 2776 data=self.wrObj.Ctable(colList=colList)
2777 2777
2778 2778 self.wrObj.CFile(header,data)
2779 2779
2780 2780 self.wrObj.wFile(filename)
2781 2781
2782 2782 #update the setFile
2783 2783 self.setFile += 1
2784 2784 self.idblock += 1
2785 2785
2786 2786 return 1
2787 2787
2788 2788 def run(self, dataOut, **kwargs):
2789 2789
2790 2790 if not(self.isConfig):
2791 2791
2792 2792 self.setup(dataOut, **kwargs)
2793 2793 self.isConfig = True
2794 2794
2795 2795 self.putData()
2796 2796
2797 2797
2798 2798
2799 2799 class ParameterConf:
2800 2800 ELEMENTNAME = 'Parameter'
2801 2801 def __init__(self):
2802 2802 self.name = ''
2803 2803 self.value = ''
2804 2804
2805 2805 def readXml(self, parmElement):
2806 2806 self.name = parmElement.get('name')
2807 2807 self.value = parmElement.get('value')
2808 2808
2809 2809 def getElementName(self):
2810 2810 return self.ELEMENTNAME
2811 2811
2812 2812 class Metadata:
2813 2813
2814 2814 def __init__(self, filename):
2815 2815 self.parmConfObjList = []
2816 2816 self.readXml(filename)
2817 2817
2818 2818 def readXml(self, filename):
2819 2819 self.projectElement = None
2820 2820 self.procUnitConfObjDict = {}
2821 2821 self.projectElement = ElementTree().parse(filename)
2822 2822 self.project = self.projectElement.tag
2823 2823
2824 2824 parmElementList = self.projectElement.getiterator(ParameterConf().getElementName())
2825 2825
2826 2826 for parmElement in parmElementList:
2827 2827 parmConfObj = ParameterConf()
2828 2828 parmConfObj.readXml(parmElement)
2829 2829 self.parmConfObjList.append(parmConfObj)
2830 2830
2831 2831 class FitsWriter(Operation):
2832 2832
2833 2833 def __init__(self):
2834 2834 self.isConfig = False
2835 2835 self.dataBlocksPerFile = None
2836 2836 self.blockIndex = 0
2837 2837 self.flagIsNewFile = 1
2838 2838 self.fitsObj = None
2839 2839 self.optchar = 'P'
2840 2840 self.ext = '.fits'
2841 2841 self.setFile = 0
2842 2842
2843 2843 def setFitsHeader(self, dataOut, metadatafile):
2844 2844
2845 2845 header_data = pyfits.PrimaryHDU()
2846 2846
2847 2847 metadata4fits = Metadata(metadatafile)
2848 2848 for parameter in metadata4fits.parmConfObjList:
2849 2849 parm_name = parameter.name
2850 2850 parm_value = parameter.value
2851 2851
2852 2852 # if parm_value == 'fromdatadatetime':
2853 2853 # value = time.strftime("%b %d %Y %H:%M:%S", dataOut.datatime.timetuple())
2854 2854 # elif parm_value == 'fromdataheights':
2855 2855 # value = dataOut.nHeights
2856 2856 # elif parm_value == 'fromdatachannel':
2857 2857 # value = dataOut.nChannels
2858 2858 # elif parm_value == 'fromdatasamples':
2859 2859 # value = dataOut.nFFTPoints
2860 2860 # else:
2861 2861 # value = parm_value
2862 2862
2863 2863 header_data.header[parm_name] = parm_value
2864 2864
2865 2865
2866 2866 header_data.header['DATETIME'] = time.strftime("%b %d %Y %H:%M:%S", dataOut.datatime.timetuple())
2867 2867 header_data.header['CHANNELLIST'] = str(dataOut.channelList)
2868 2868 header_data.header['NCHANNELS'] = dataOut.nChannels
2869 2869 #header_data.header['HEIGHTS'] = dataOut.heightList
2870 2870 header_data.header['NHEIGHTS'] = dataOut.nHeights
2871 2871
2872 2872 header_data.header['IPPSECONDS'] = dataOut.ippSeconds
2873 2873 header_data.header['NCOHINT'] = dataOut.nCohInt
2874 2874 header_data.header['NINCOHINT'] = dataOut.nIncohInt
2875 2875 header_data.header['TIMEZONE'] = dataOut.timeZone
2876 2876 header_data.header['NBLOCK'] = self.blockIndex
2877 2877
2878 2878 header_data.writeto(self.filename)
2879 2879
2880 2880 self.addExtension(dataOut.heightList,'HEIGHTLIST')
2881 2881
2882 2882
2883 2883 def setup(self, dataOut, path, dataBlocksPerFile, metadatafile):
2884 2884
2885 2885 self.path = path
2886 2886 self.dataOut = dataOut
2887 2887 self.metadatafile = metadatafile
2888 2888 self.dataBlocksPerFile = dataBlocksPerFile
2889 2889
2890 2890 def open(self):
2891 2891 self.fitsObj = pyfits.open(self.filename, mode='update')
2892 2892
2893 2893
2894 2894 def addExtension(self, data, tagname):
2895 2895 self.open()
2896 2896 extension = pyfits.ImageHDU(data=data, name=tagname)
2897 2897 #extension.header['TAG'] = tagname
2898 2898 self.fitsObj.append(extension)
2899 2899 self.write()
2900 2900
2901 2901 def addData(self, data):
2902 2902 self.open()
2903 2903 extension = pyfits.ImageHDU(data=data, name=self.fitsObj[0].header['DATATYPE'])
2904 2904 extension.header['UTCTIME'] = self.dataOut.utctime
2905 2905 self.fitsObj.append(extension)
2906 2906 self.blockIndex += 1
2907 2907 self.fitsObj[0].header['NBLOCK'] = self.blockIndex
2908 2908
2909 2909 self.write()
2910 2910
2911 2911 def write(self):
2912 2912
2913 2913 self.fitsObj.flush(verbose=True)
2914 2914 self.fitsObj.close()
2915 2915
2916 2916
2917 2917 def setNextFile(self):
2918 2918
2919 2919 ext = self.ext
2920 2920 path = self.path
2921 2921
2922 2922 timeTuple = time.localtime( self.dataOut.utctime)
2923 2923 subfolder = 'd%4.4d%3.3d' % (timeTuple.tm_year,timeTuple.tm_yday)
2924 2924
2925 2925 fullpath = os.path.join( path, subfolder )
2926 2926 if not( os.path.exists(fullpath) ):
2927 2927 os.mkdir(fullpath)
2928 2928 self.setFile = -1 #inicializo mi contador de seteo
2929 2929 else:
2930 2930 filesList = os.listdir( fullpath )
2931 2931 if len( filesList ) > 0:
2932 2932 filesList = sorted( filesList, key=str.lower )
2933 2933 filen = filesList[-1]
2934 2934
2935 2935 if isNumber( filen[8:11] ):
2936 2936 self.setFile = int( filen[8:11] ) #inicializo mi contador de seteo al seteo del ultimo file
2937 2937 else:
2938 2938 self.setFile = -1
2939 2939 else:
2940 2940 self.setFile = -1 #inicializo mi contador de seteo
2941 2941
2942 2942 setFile = self.setFile
2943 2943 setFile += 1
2944 2944
2945 2945 file = '%s%4.4d%3.3d%3.3d%s' % (self.optchar,
2946 2946 timeTuple.tm_year,
2947 2947 timeTuple.tm_yday,
2948 2948 setFile,
2949 2949 ext )
2950 2950
2951 2951 filename = os.path.join( path, subfolder, file )
2952 2952
2953 2953 self.blockIndex = 0
2954 2954 self.filename = filename
2955 2955 self.setFile = setFile
2956 2956 self.flagIsNewFile = 1
2957 2957
2958 2958 print 'Writing the file: %s'%self.filename
2959 2959
2960 2960 self.setFitsHeader(self.dataOut, self.metadatafile)
2961 2961
2962 2962 return 1
2963 2963
2964 2964 def writeBlock(self):
2965 2965 self.addData(self.dataOut.data_spc)
2966 2966 self.flagIsNewFile = 0
2967 2967
2968 2968
2969 2969 def __setNewBlock(self):
2970 2970
2971 2971 if self.flagIsNewFile:
2972 2972 return 1
2973 2973
2974 2974 if self.blockIndex < self.dataBlocksPerFile:
2975 2975 return 1
2976 2976
2977 2977 if not( self.setNextFile() ):
2978 2978 return 0
2979 2979
2980 2980 return 1
2981 2981
2982 2982 def writeNextBlock(self):
2983 2983 if not( self.__setNewBlock() ):
2984 2984 return 0
2985 2985 self.writeBlock()
2986 2986 return 1
2987 2987
2988 2988 def putData(self):
2989 2989 if self.flagIsNewFile:
2990 2990 self.setNextFile()
2991 2991 self.writeNextBlock()
2992 2992
2993 2993 def run(self, dataOut, **kwargs):
2994 2994 if not(self.isConfig):
2995 2995 self.setup(dataOut, **kwargs)
2996 2996 self.isConfig = True
2997 2997 self.putData()
2998 2998
2999 2999
3000 3000 class FitsReader(ProcessingUnit):
3001 3001
3002 3002 # __TIMEZONE = time.timezone
3003 3003
3004 3004 expName = None
3005 3005 datetimestr = None
3006 3006 utc = None
3007 3007 nChannels = None
3008 3008 nSamples = None
3009 3009 dataBlocksPerFile = None
3010 3010 comments = None
3011 3011 lastUTTime = None
3012 3012 header_dict = None
3013 3013 data = None
3014 3014 data_header_dict = None
3015 3015
3016 3016 def __init__(self):
3017 3017 self.isConfig = False
3018 3018 self.ext = '.fits'
3019 3019 self.setFile = 0
3020 3020 self.flagNoMoreFiles = 0
3021 3021 self.flagIsNewFile = 1
3022 3022 self.flagTimeBlock = None
3023 3023 self.fileIndex = None
3024 3024 self.filename = None
3025 3025 self.fileSize = None
3026 3026 self.fitsObj = None
3027 3027 self.timeZone = None
3028 3028 self.nReadBlocks = 0
3029 3029 self.nTotalBlocks = 0
3030 3030 self.dataOut = self.createObjByDefault()
3031 3031 self.maxTimeStep = 10# deberia ser definido por el usuario usando el metodo setup()
3032 3032 self.blockIndex = 1
3033 3033
3034 3034 def createObjByDefault(self):
3035 3035
3036 3036 dataObj = Fits()
3037 3037
3038 3038 return dataObj
3039 3039
3040 3040 def isFileinThisTime(self, filename, startTime, endTime, useLocalTime=False):
3041 3041 try:
3042 3042 fitsObj = pyfits.open(filename,'readonly')
3043 3043 except:
3044 3044 raise IOError, "The file %s can't be opened" %(filename)
3045 3045
3046 3046 header = fitsObj[0].header
3047 3047 struct_time = time.strptime(header['DATETIME'], "%b %d %Y %H:%M:%S")
3048 3048 utc = time.mktime(struct_time) - time.timezone #TIMEZONE debe ser un parametro del header FITS
3049 3049
3050 3050 ltc = utc
3051 3051 if useLocalTime:
3052 3052 ltc -= time.timezone
3053 3053 thisDatetime = datetime.datetime.utcfromtimestamp(ltc)
3054 3054 thisTime = thisDatetime.time()
3055 3055
3056 3056 if not ((startTime <= thisTime) and (endTime > thisTime)):
3057 3057 return None
3058 3058
3059 3059 return thisDatetime
3060 3060
3061 3061 def __setNextFileOnline(self):
3062 3062 raise ValueError, "No implemented"
3063 3063
3064 3064 def __setNextFileOffline(self):
3065 3065 idFile = self.fileIndex
3066 3066
3067 3067 while (True):
3068 3068 idFile += 1
3069 3069 if not(idFile < len(self.filenameList)):
3070 3070 self.flagNoMoreFiles = 1
3071 3071 print "No more Files"
3072 3072 return 0
3073 3073
3074 3074 filename = self.filenameList[idFile]
3075 3075
3076 3076 # if not(self.__verifyFile(filename)):
3077 3077 # continue
3078 3078
3079 3079 fileSize = os.path.getsize(filename)
3080 3080 fitsObj = pyfits.open(filename,'readonly')
3081 3081 break
3082 3082
3083 3083 self.flagIsNewFile = 1
3084 3084 self.fileIndex = idFile
3085 3085 self.filename = filename
3086 3086 self.fileSize = fileSize
3087 3087 self.fitsObj = fitsObj
3088 3088 self.blockIndex = 0
3089 3089 print "Setting the file: %s"%self.filename
3090 3090
3091 3091 return 1
3092 3092
3093 3093 def readHeader(self):
3094 3094 headerObj = self.fitsObj[0]
3095 3095
3096 3096 self.header_dict = headerObj.header
3097 3097 if 'EXPNAME' in headerObj.header.keys():
3098 3098 self.expName = headerObj.header['EXPNAME']
3099 3099
3100 3100 if 'DATATYPE' in headerObj.header.keys():
3101 3101 self.dataType = headerObj.header['DATATYPE']
3102 3102
3103 3103 self.datetimestr = headerObj.header['DATETIME']
3104 3104 channelList = headerObj.header['CHANNELLIST']
3105 3105 channelList = channelList.split('[')
3106 3106 channelList = channelList[1].split(']')
3107 3107 channelList = channelList[0].split(',')
3108 3108 channelList = [int(ch) for ch in channelList]
3109 3109 self.channelList = channelList
3110 3110 self.nChannels = headerObj.header['NCHANNELS']
3111 3111 self.nHeights = headerObj.header['NHEIGHTS']
3112 3112 self.ippSeconds = headerObj.header['IPPSECONDS']
3113 3113 self.nCohInt = headerObj.header['NCOHINT']
3114 3114 self.nIncohInt = headerObj.header['NINCOHINT']
3115 3115 self.dataBlocksPerFile = headerObj.header['NBLOCK']
3116 3116 self.timeZone = headerObj.header['TIMEZONE']
3117 3117
3118 3118 self.timeInterval = self.ippSeconds * self.nCohInt * self.nIncohInt
3119 3119
3120 3120 if 'COMMENT' in headerObj.header.keys():
3121 3121 self.comments = headerObj.header['COMMENT']
3122 3122
3123 3123 self.readHeightList()
3124 3124
3125 3125 def readHeightList(self):
3126 3126 self.blockIndex = self.blockIndex + 1
3127 3127 obj = self.fitsObj[self.blockIndex]
3128 3128 self.heightList = obj.data
3129 3129 self.blockIndex = self.blockIndex + 1
3130 3130
3131 3131 def readExtension(self):
3132 3132 obj = self.fitsObj[self.blockIndex]
3133 3133 self.heightList = obj.data
3134 3134 self.blockIndex = self.blockIndex + 1
3135 3135
3136 3136 def setNextFile(self):
3137 3137
3138 3138 if self.online:
3139 3139 newFile = self.__setNextFileOnline()
3140 3140 else:
3141 3141 newFile = self.__setNextFileOffline()
3142 3142
3143 3143 if not(newFile):
3144 3144 return 0
3145 3145
3146 3146 self.readHeader()
3147 3147
3148 3148 self.nReadBlocks = 0
3149 3149 # self.blockIndex = 1
3150 3150 return 1
3151 3151
3152 3152 def __searchFilesOffLine(self,
3153 3153 path,
3154 3154 startDate,
3155 3155 endDate,
3156 3156 startTime=datetime.time(0,0,0),
3157 3157 endTime=datetime.time(23,59,59),
3158 3158 set=None,
3159 3159 expLabel='',
3160 3160 ext='.fits',
3161 3161 walk=True):
3162 3162
3163 3163 pathList = []
3164 3164
3165 3165 if not walk:
3166 3166 pathList.append(path)
3167 3167
3168 3168 else:
3169 3169 dirList = []
3170 3170 for thisPath in os.listdir(path):
3171 3171 if not os.path.isdir(os.path.join(path,thisPath)):
3172 3172 continue
3173 3173 if not isDoyFolder(thisPath):
3174 3174 continue
3175 3175
3176 3176 dirList.append(thisPath)
3177 3177
3178 3178 if not(dirList):
3179 3179 return None, None
3180 3180
3181 3181 thisDate = startDate
3182 3182
3183 3183 while(thisDate <= endDate):
3184 3184 year = thisDate.timetuple().tm_year
3185 3185 doy = thisDate.timetuple().tm_yday
3186 3186
3187 3187 matchlist = fnmatch.filter(dirList, '?' + '%4.4d%3.3d' % (year,doy) + '*')
3188 3188 if len(matchlist) == 0:
3189 3189 thisDate += datetime.timedelta(1)
3190 3190 continue
3191 3191 for match in matchlist:
3192 3192 pathList.append(os.path.join(path,match,expLabel))
3193 3193
3194 3194 thisDate += datetime.timedelta(1)
3195 3195
3196 3196 if pathList == []:
3197 3197 print "Any folder was found for the date range: %s-%s" %(startDate, endDate)
3198 3198 return None, None
3199 3199
3200 3200 print "%d folder(s) was(were) found for the date range: %s - %s" %(len(pathList), startDate, endDate)
3201 3201
3202 3202 filenameList = []
3203 3203 datetimeList = []
3204 3204
3205 3205 for i in range(len(pathList)):
3206 3206
3207 3207 thisPath = pathList[i]
3208 3208
3209 3209 fileList = glob.glob1(thisPath, "*%s" %ext)
3210 3210 fileList.sort()
3211 3211
3212 3212 for file in fileList:
3213 3213
3214 3214 filename = os.path.join(thisPath,file)
3215 3215 thisDatetime = self.isFileinThisTime(filename, startTime, endTime)
3216 3216
3217 3217 if not(thisDatetime):
3218 3218 continue
3219 3219
3220 3220 filenameList.append(filename)
3221 3221 datetimeList.append(thisDatetime)
3222 3222
3223 3223 if not(filenameList):
3224 3224 print "Any file was found for the time range %s - %s" %(startTime, endTime)
3225 3225 return None, None
3226 3226
3227 3227 print "%d file(s) was(were) found for the time range: %s - %s" %(len(filenameList), startTime, endTime)
3228 3228 print
3229 3229
3230 3230 for i in range(len(filenameList)):
3231 3231 print "%s -> [%s]" %(filenameList[i], datetimeList[i].ctime())
3232 3232
3233 3233 self.filenameList = filenameList
3234 3234 self.datetimeList = datetimeList
3235 3235
3236 3236 return pathList, filenameList
3237 3237
3238 3238 def setup(self, path=None,
3239 3239 startDate=None,
3240 3240 endDate=None,
3241 3241 startTime=datetime.time(0,0,0),
3242 3242 endTime=datetime.time(23,59,59),
3243 3243 set=0,
3244 3244 expLabel = "",
3245 3245 ext = None,
3246 3246 online = False,
3247 3247 delay = 60,
3248 3248 walk = True):
3249 3249
3250 3250 if path == None:
3251 3251 raise ValueError, "The path is not valid"
3252 3252
3253 3253 if ext == None:
3254 3254 ext = self.ext
3255 3255
3256 3256 if not(online):
3257 3257 print "Searching files in offline mode ..."
3258 3258 pathList, filenameList = self.__searchFilesOffLine(path, startDate=startDate, endDate=endDate,
3259 3259 startTime=startTime, endTime=endTime,
3260 3260 set=set, expLabel=expLabel, ext=ext,
3261 3261 walk=walk)
3262 3262
3263 3263 if not(pathList):
3264 3264 print "No *%s files into the folder %s \nfor the range: %s - %s"%(ext, path,
3265 3265 datetime.datetime.combine(startDate,startTime).ctime(),
3266 3266 datetime.datetime.combine(endDate,endTime).ctime())
3267 3267
3268 3268 sys.exit(-1)
3269 3269
3270 3270 self.fileIndex = -1
3271 3271 self.pathList = pathList
3272 3272 self.filenameList = filenameList
3273 3273
3274 3274 self.online = online
3275 3275 self.delay = delay
3276 3276 ext = ext.lower()
3277 3277 self.ext = ext
3278 3278
3279 3279 if not(self.setNextFile()):
3280 3280 if (startDate!=None) and (endDate!=None):
3281 3281 print "No files in range: %s - %s" %(datetime.datetime.combine(startDate,startTime).ctime(), datetime.datetime.combine(endDate,endTime).ctime())
3282 3282 elif startDate != None:
3283 3283 print "No files in range: %s" %(datetime.datetime.combine(startDate,startTime).ctime())
3284 3284 else:
3285 3285 print "No files"
3286 3286
3287 3287 sys.exit(-1)
3288 3288
3289 3289
3290 3290
3291 3291 def readBlock(self):
3292 3292 dataObj = self.fitsObj[self.blockIndex]
3293 3293
3294 3294 self.data = dataObj.data
3295 3295 self.data_header_dict = dataObj.header
3296 3296 self.utc = self.data_header_dict['UTCTIME']
3297 3297
3298 3298 self.flagIsNewFile = 0
3299 3299 self.blockIndex += 1
3300 3300 self.nTotalBlocks += 1
3301 3301 self.nReadBlocks += 1
3302 3302
3303 3303 return 1
3304 3304
3305 3305 def __jumpToLastBlock(self):
3306 3306 raise ValueError, "No implemented"
3307 3307
3308 3308 def __waitNewBlock(self):
3309 3309 """
3310 3310 Return 1 si se encontro un nuevo bloque de datos, 0 de otra forma.
3311 3311
3312 3312 Si el modo de lectura es OffLine siempre retorn 0
3313 3313 """
3314 3314 if not self.online:
3315 3315 return 0
3316 3316
3317 3317 if (self.nReadBlocks >= self.dataBlocksPerFile):
3318 3318 return 0
3319 3319
3320 3320 currentPointer = self.fp.tell()
3321 3321
3322 3322 neededSize = self.processingHeaderObj.blockSize + self.basicHeaderSize
3323 3323
3324 3324 for nTries in range( self.nTries ):
3325 3325
3326 3326 self.fp.close()
3327 3327 self.fp = open( self.filename, 'rb' )
3328 3328 self.fp.seek( currentPointer )
3329 3329
3330 3330 self.fileSize = os.path.getsize( self.filename )
3331 3331 currentSize = self.fileSize - currentPointer
3332 3332
3333 3333 if ( currentSize >= neededSize ):
3334 3334 self.__rdBasicHeader()
3335 3335 return 1
3336 3336
3337 3337 print "\tWaiting %0.2f seconds for the next block, try %03d ..." % (self.delay, nTries+1)
3338 3338 time.sleep( self.delay )
3339 3339
3340 3340
3341 3341 return 0
3342 3342
3343 3343 def __setNewBlock(self):
3344 3344
3345 3345 if self.online:
3346 3346 self.__jumpToLastBlock()
3347 3347
3348 3348 if self.flagIsNewFile:
3349 3349 return 1
3350 3350
3351 3351 self.lastUTTime = self.utc
3352 3352
3353 3353 if self.online:
3354 3354 if self.__waitNewBlock():
3355 3355 return 1
3356 3356
3357 3357 if self.nReadBlocks < self.dataBlocksPerFile:
3358 3358 return 1
3359 3359
3360 3360 if not(self.setNextFile()):
3361 3361 return 0
3362 3362
3363 3363 deltaTime = self.utc - self.lastUTTime
3364 3364
3365 3365 self.flagTimeBlock = 0
3366 3366
3367 3367 if deltaTime > self.maxTimeStep:
3368 3368 self.flagTimeBlock = 1
3369 3369
3370 3370 return 1
3371 3371
3372 3372
3373 3373 def readNextBlock(self):
3374 3374 if not(self.__setNewBlock()):
3375 3375 return 0
3376 3376
3377 3377 if not(self.readBlock()):
3378 3378 return 0
3379 3379
3380 3380 return 1
3381 3381
3382 3382
3383 3383 def getData(self):
3384 3384
3385 3385 if self.flagNoMoreFiles:
3386 3386 self.dataOut.flagNoData = True
3387 3387 print 'Process finished'
3388 3388 return 0
3389 3389
3390 3390 self.flagTimeBlock = 0
3391 3391 self.flagIsNewBlock = 0
3392 3392
3393 3393 if not(self.readNextBlock()):
3394 3394 return 0
3395 3395
3396 3396 if self.data == None:
3397 3397 self.dataOut.flagNoData = True
3398 3398 return 0
3399 3399
3400 3400 self.dataOut.data = self.data
3401 3401 self.dataOut.data_header = self.data_header_dict
3402 3402 self.dataOut.utctime = self.utc
3403 3403
3404 3404 self.dataOut.header = self.header_dict
3405 3405 self.dataOut.expName = self.expName
3406 3406 self.dataOut.nChannels = self.nChannels
3407 3407 self.dataOut.timeZone = self.timeZone
3408 3408 self.dataOut.dataBlocksPerFile = self.dataBlocksPerFile
3409 3409 self.dataOut.comments = self.comments
3410 3410 self.dataOut.timeInterval = self.timeInterval
3411 3411 self.dataOut.channelList = self.channelList
3412 3412 self.dataOut.heightList = self.heightList
3413 3413 self.dataOut.flagNoData = False
3414 3414
3415 3415 return self.dataOut.data
3416 3416
3417 3417 def run(self, **kwargs):
3418 3418
3419 3419 if not(self.isConfig):
3420 3420 self.setup(**kwargs)
3421 3421 self.isConfig = True
3422 3422
3423 3423 self.getData()
3424 3424
3425 3425
3426 3426 class RadacHeader():
3427 3427 def __init__(self, fp):
3428 3428 header = 'Raw11/Data/RadacHeader'
3429 3429 self.beamCodeByPulse = fp.get(header+'/BeamCode')
3430 3430 self.beamCode = fp.get('Raw11/Data/Beamcodes')
3431 3431 self.code = fp.get(header+'/Code')
3432 3432 self.frameCount = fp.get(header+'/FrameCount')
3433 3433 self.modeGroup = fp.get(header+'/ModeGroup')
3434 3434 self.nsamplesPulse = fp.get(header+'/NSamplesPulse')
3435 3435 self.pulseCount = fp.get(header+'/PulseCount')
3436 3436 self.radacTime = fp.get(header+'/RadacTime')
3437 3437 self.timeCount = fp.get(header+'/TimeCount')
3438 3438 self.timeStatus = fp.get(header+'/TimeStatus')
3439 3439
3440 3440 self.nrecords = self.pulseCount.shape[0] #numero de bloques
3441 3441 self.npulses = self.pulseCount.shape[1] #numero de perfiles
3442 3442 self.nsamples = self.nsamplesPulse[0,0] #numero de alturas
3443 3443 self.nbeams = self.beamCode.shape[1] #numero de beams
3444 3444
3445 3445
3446 3446 def getIndexRangeToPulse(self, idrecord=0):
3447 3447 indexToZero = numpy.where(self.pulseCount.value[idrecord,:]==0)
3448 3448 startPulseCountId = indexToZero[0][0]
3449 3449 endPulseCountId = startPulseCountId - 1
3450 3450 range1 = numpy.arange(startPulseCountId,self.npulses,1)
3451 3451 range2 = numpy.arange(0,startPulseCountId,1)
3452 3452 return range1, range2
3453 3453
3454 3454
3455 3455 class AMISRReader(ProcessingUnit):
3456 3456
3457 3457 path = None
3458 3458 startDate = None
3459 3459 endDate = None
3460 3460 startTime = None
3461 3461 endTime = None
3462 3462 walk = None
3463 3463 isConfig = False
3464 3464
3465 3465 def __init__(self):
3466 3466 self.set = None
3467 3467 self.subset = None
3468 3468 self.extension_file = '.h5'
3469 3469 self.dtc_str = 'dtc'
3470 3470 self.dtc_id = 0
3471 3471 self.status = True
3472 3472 self.isConfig = False
3473 3473 self.dirnameList = []
3474 3474 self.filenameList = []
3475 3475 self.fileIndex = None
3476 3476 self.flagNoMoreFiles = False
3477 3477 self.flagIsNewFile = 0
3478 3478 self.filename = ''
3479 3479 self.amisrFilePointer = None
3480 3480 self.radacHeaderObj = None
3481 3481 self.dataOut = self.__createObjByDefault()
3482 3482 self.datablock = None
3483 3483 self.rest_datablock = None
3484 3484 self.range = None
3485 3485 self.idrecord_count = 0
3486 3486 self.profileIndex = 0
3487 3487 self.idpulse_range1 = None
3488 3488 self.idpulse_range2 = None
3489 3489 self.beamCodeByFrame = None
3490 3490 self.radacTimeByFrame = None
3491 3491 #atributos originales tal y como esta en el archivo de datos
3492 3492 self.beamCodesFromFile = None
3493 3493 self.radacTimeFromFile = None
3494 3494 self.rangeFromFile = None
3495 3495 self.dataByFrame = None
3496 3496 self.dataset = None
3497 3497
3498 3498 self.beamCodeDict = {}
3499 3499 self.beamRangeDict = {}
3500 3500
3501 3501 #experiment cgf file
3502 3502 self.npulsesint_fromfile = None
3503 3503 self.recordsperfile_fromfile = None
3504 3504 self.nbeamcodes_fromfile = None
3505 3505 self.ngates_fromfile = None
3506 3506 self.ippSeconds_fromfile = None
3507 3507 self.frequency_h5file = None
3508 3508
3509 3509
3510 3510 def __createObjByDefault(self):
3511 3511
3512 3512 dataObj = AMISR()
3513 3513
3514 3514 return dataObj
3515 3515
3516 3516 def __setParameters(self,path,startDate,endDate,startTime,endTime,walk):
3517 3517 self.path = path
3518 3518 self.startDate = startDate
3519 3519 self.endDate = endDate
3520 3520 self.startTime = startTime
3521 3521 self.endTime = endTime
3522 3522 self.walk = walk
3523 3523
3524 3524 def __checkPath(self):
3525 3525 if os.path.exists(self.path):
3526 3526 self.status = 1
3527 3527 else:
3528 3528 self.status = 0
3529 3529 print 'Path:%s does not exists'%self.path
3530 3530
3531 3531 return
3532 3532
3533 3533 def __selDates(self, amisr_dirname_format):
3534 try:
3534 3535 year = int(amisr_dirname_format[0:4])
3535 3536 month = int(amisr_dirname_format[4:6])
3536 3537 dom = int(amisr_dirname_format[6:8])
3537 3538 thisDate = datetime.date(year,month,dom)
3538 3539
3539 3540 if (thisDate>=self.startDate and thisDate <= self.endDate):
3540 3541 return amisr_dirname_format
3542 except:
3543 return None
3541 3544
3542 3545 def __findDataForDates(self):
3543 3546
3544 3547
3545 3548
3546 3549 if not(self.status):
3547 3550 return None
3548 3551
3549 3552 pat = '\d+.\d+'
3550 dirnameList = [re.search(pat,x).string for x in os.listdir(self.path)]
3553 dirnameList = [re.search(pat,x) for x in os.listdir(self.path)]
3554 dirnameList = filter(lambda x:x!=None,dirnameList)
3555 dirnameList = [x.string for x in dirnameList]
3551 3556 dirnameList = [self.__selDates(x) for x in dirnameList]
3552 3557 dirnameList = filter(lambda x:x!=None,dirnameList)
3553 3558 if len(dirnameList)>0:
3554 3559 self.status = 1
3555 3560 self.dirnameList = dirnameList
3556 3561 self.dirnameList.sort()
3557 3562 else:
3558 3563 self.status = 0
3559 3564 return None
3560 3565
3561 3566 def __getTimeFromData(self):
3562 3567 pass
3563 3568
3564 3569 def __filterByGlob1(self, dirName):
3565 3570 filter_files = glob.glob1(dirName, '*.*%s'%self.extension_file)
3566 3571 filterDict = {}
3567 3572 filterDict.setdefault(dirName)
3568 3573 filterDict[dirName] = filter_files
3569 3574 return filterDict
3570 3575
3571 3576 def __getFilenameList(self, fileListInKeys, dirList):
3572 3577 for value in fileListInKeys:
3573 3578 dirName = value.keys()[0]
3574 3579 for file in value[dirName]:
3575 3580 filename = os.path.join(dirName, file)
3576 3581 self.filenameList.append(filename)
3577 3582
3578 3583
3579 3584 def __selectDataForTimes(self):
3580 3585 #aun no esta implementado el filtro for tiempo
3581 3586 if not(self.status):
3582 3587 return None
3583 3588
3584 3589 dirList = [os.path.join(self.path,x) for x in self.dirnameList]
3585 3590
3586 3591 fileListInKeys = [self.__filterByGlob1(x) for x in dirList]
3587 3592
3588 3593 self.__getFilenameList(fileListInKeys, dirList)
3589 3594
3590 3595 if len(self.filenameList)>0:
3591 3596 self.status = 1
3592 3597 self.filenameList.sort()
3593 3598 else:
3594 3599 self.status = 0
3595 3600 return None
3596 3601
3597 3602
3598 3603 def __searchFilesOffline(self,
3599 3604 path,
3600 3605 startDate,
3601 3606 endDate,
3602 3607 startTime=datetime.time(0,0,0),
3603 3608 endTime=datetime.time(23,59,59),
3604 3609 walk=True):
3605 3610
3606 3611 self.__setParameters(path, startDate, endDate, startTime, endTime, walk)
3607 3612
3608 3613 self.__checkPath()
3609 3614
3610 3615 self.__findDataForDates()
3611 3616
3612 3617 self.__selectDataForTimes()
3613 3618
3614 3619 for i in range(len(self.filenameList)):
3615 3620 print "%s" %(self.filenameList[i])
3616 3621
3617 3622 return
3618 3623
3619 3624 def __setNextFileOffline(self):
3620 3625 idFile = self.fileIndex
3621 3626
3622 3627 while (True):
3623 3628 idFile += 1
3624 3629 if not(idFile < len(self.filenameList)):
3625 3630 self.flagNoMoreFiles = 1
3626 3631 print "No more Files"
3627 3632 return 0
3628 3633
3629 3634 filename = self.filenameList[idFile]
3630 3635
3631 3636 amisrFilePointer = h5py.File(filename,'r')
3632 3637
3633 3638 break
3634 3639
3635 3640 self.flagIsNewFile = 1
3636 3641 self.fileIndex = idFile
3637 3642 self.filename = filename
3638 3643
3639 3644 self.amisrFilePointer = amisrFilePointer
3640 3645
3641 3646 print "Setting the file: %s"%self.filename
3642 3647
3643 3648 return 1
3644 3649
3645 3650 def __readHeader(self):
3646 3651 self.radacHeaderObj = RadacHeader(self.amisrFilePointer)
3647 3652
3648 3653 #update values from experiment cfg file
3649 3654 self.radacHeaderObj.nrecords = self.recordsperfile_fromfile
3650 3655 self.radacHeaderObj.nbeams = self.nbeamcodes_fromfile
3651 3656 self.radacHeaderObj.npulses = self.npulsesint_fromfile
3652 3657 self.radacHeaderObj.nsamples = self.ngates_fromfile
3653 3658
3654 3659 #get tuning frequency
3655 3660 frequency_h5file_dataset = self.amisrFilePointer.get('Rx'+'/TuningFrequency')
3656 3661 self.frequency_h5file = frequency_h5file_dataset[0,0]
3657 3662
3658 3663 self.flagIsNewFile = 1
3659 3664
3660 3665 def __getBeamCode(self):
3661 3666 self.beamCodeDict = {}
3662 3667 self.beamRangeDict = {}
3663 3668
3664 3669 for i in range(len(self.radacHeaderObj.beamCode[0,:])):
3665 3670 self.beamCodeDict.setdefault(i)
3666 3671 self.beamRangeDict.setdefault(i)
3667 3672 self.beamCodeDict[i] = self.radacHeaderObj.beamCode[0,i]
3668 3673
3669 3674
3670 3675 just4record0 = self.radacHeaderObj.beamCodeByPulse[0,:]
3671 3676
3672 3677 for i in range(len(self.beamCodeDict.values())):
3673 3678 xx = numpy.where(just4record0==self.beamCodeDict.values()[i])
3674 3679 self.beamRangeDict[i] = xx[0]
3675 3680
3676 3681 def __getExpParameters(self):
3677 3682 if not(self.status):
3678 3683 return None
3679 3684
3680 3685 experimentCfgPath = os.path.join(self.path, self.dirnameList[0], 'Setup')
3681 3686
3682 3687 expFinder = glob.glob1(experimentCfgPath,'*.exp')
3683 3688 if len(expFinder)== 0:
3684 3689 self.status = 0
3685 3690 return None
3686 3691
3687 3692 experimentFilename = os.path.join(experimentCfgPath,expFinder[0])
3688 3693
3689 3694 f = open(experimentFilename)
3690 3695 lines = f.readlines()
3691 3696 f.close()
3692 3697
3693 3698 parmsList = ['npulsesint*','recordsperfile*','nbeamcodes*','ngates*']
3694 3699 filterList = [fnmatch.filter(lines, x) for x in parmsList]
3695 3700
3696 3701
3697 3702 values = [re.sub(r'\D',"",x[0]) for x in filterList]
3698 3703
3699 3704 self.npulsesint_fromfile = int(values[0])
3700 3705 self.recordsperfile_fromfile = int(values[1])
3701 3706 self.nbeamcodes_fromfile = int(values[2])
3702 3707 self.ngates_fromfile = int(values[3])
3703 3708
3704 3709 tufileFinder = fnmatch.filter(lines, 'tufile=*')
3705 3710 tufile = tufileFinder[0].split('=')[1].split('\n')[0]
3706 3711 tufilename = os.path.join(experimentCfgPath,tufile)
3707 3712
3708 3713 f = open(tufilename)
3709 3714 lines = f.readlines()
3710 3715 f.close()
3711 3716 self.ippSeconds_fromfile = float(lines[1].split()[2])/1E6
3712 3717
3713 3718
3714 3719 self.status = 1
3715 3720
3716 3721 def __setNextFile(self):
3717 3722
3718 3723 newFile = self.__setNextFileOffline()
3719 3724
3720 3725 if not(newFile):
3721 3726 return 0
3722 3727
3723 3728 self.__readHeader()
3724 3729 self.__getBeamCode()
3725 3730 self.readDataBlock()
3726 3731
3727 3732
3728 3733 def setup(self,path=None,
3729 3734 startDate=None,
3730 3735 endDate=None,
3731 3736 startTime=datetime.time(0,0,0),
3732 3737 endTime=datetime.time(23,59,59),
3733 3738 walk=True):
3734 3739
3735 3740 #Busqueda de archivos offline
3736 3741 self.__searchFilesOffline(path, startDate, endDate, startTime, endTime, walk)
3737 3742
3738 3743 if not(self.filenameList):
3739 3744 print "There is no files into the folder: %s"%(path)
3740 3745
3741 3746 sys.exit(-1)
3742 3747
3743 3748 self.__getExpParameters()
3744 3749
3745 3750 self.fileIndex = -1
3746 3751
3747 3752 self.__setNextFile()
3748 3753
3749 3754 def readRanges(self):
3750 3755 dataset = self.amisrFilePointer.get('Raw11/Data/Samples/Range')
3751 3756 #self.rangeFromFile = dataset.value
3752 3757 self.rangeFromFile = numpy.reshape(dataset.value,(-1))
3753 3758 return range
3754 3759
3755 3760
3756 3761 def readRadacTime(self,idrecord, range1, range2):
3757 3762 self.radacTimeFromFile = self.radacHeaderObj.radacTime.value
3758 3763
3759 3764 radacTimeByFrame = numpy.zeros((self.radacHeaderObj.npulses))
3760 3765 #radacTimeByFrame = dataset[idrecord - 1,range1]
3761 3766 #radacTimeByFrame = dataset[idrecord,range2]
3762 3767
3763 3768 return radacTimeByFrame
3764 3769
3765 3770 def readBeamCode(self, idrecord, range1, range2):
3766 3771 dataset = self.amisrFilePointer.get('Raw11/Data/RadacHeader/BeamCode')
3767 3772 beamcodeByFrame = numpy.zeros((self.radacHeaderObj.npulses))
3768 3773 self.beamCodesFromFile = dataset.value
3769 3774
3770 3775 #beamcodeByFrame[range1] = dataset[idrecord - 1, range1]
3771 3776 #beamcodeByFrame[range2] = dataset[idrecord, range2]
3772 3777 beamcodeByFrame[range1] = dataset[idrecord, range1]
3773 3778 beamcodeByFrame[range2] = dataset[idrecord, range2]
3774 3779
3775 3780 return beamcodeByFrame
3776 3781
3777 3782
3778 3783 def __setDataByFrame(self):
3779 3784 ndata = 2 # porque es complejo
3780 3785 dataByFrame = numpy.zeros((self.radacHeaderObj.npulses, self.radacHeaderObj.nsamples, ndata))
3781 3786 return dataByFrame
3782 3787
3783 3788 def __readDataSet(self):
3784 3789 dataset = self.amisrFilePointer.get('Raw11/Data/Samples/Data')
3785 3790 return dataset
3786 3791
3787 3792 def __setDataBlock(self,):
3788 3793 real = self.dataByFrame[:,:,0] #asumo que 0 es real
3789 3794 imag = self.dataByFrame[:,:,1] #asumo que 1 es imaginario
3790 3795 datablock = real + imag*1j #armo el complejo
3791 3796 return datablock
3792 3797
3793 3798 def readSamples_version1(self,idrecord):
3794 3799 #estas tres primeras lineas solo se deben ejecutar una vez
3795 3800 if self.flagIsNewFile:
3796 3801 self.idpulse_range1, self.idpulse_range2 = self.radacHeaderObj.getIndexRangeToPulse(0)
3797 3802 self.dataByFrame = self.__setDataByFrame()
3798 3803 self.beamCodeByFrame = self.readBeamCode(idrecord, self.idpulse_range1, self.idpulse_range2)
3799 3804 self.radacTimeByFrame = self.readRadacTime(idrecord, self.idpulse_range1, self.idpulse_range2)
3800 3805 #reading dataset
3801 3806 self.dataset = self.__readDataSet()
3802 3807
3803 3808 if idrecord == 0:
3804 3809
3805 3810 if len(numpy.where(self.dataByFrame!=0.0)[0]) or len(numpy.where(self.dataByFrame!=0.0)[1]) or len(numpy.where(self.dataByFrame!=0.0)[2]):
3806 3811 #falta agregar una condicion para datos discontinuos
3807 3812 #por defecto une los datos del record anterior
3808 3813 self.dataByFrame[self.idpulse_range2, :, :] = self.dataset[idrecord, self.idpulse_range2, :, :]
3809 3814 #timepulse
3810 3815 self.radacTimeByFrame[self.idpulse_range2] = self.radacHeaderObj.radacTime[idrecord, self.idpulse_range2]
3811 3816 else:
3812 3817 self.dataByFrame[self.idpulse_range1, :, :] = self.dataset[idrecord, self.idpulse_range1, :, :]
3813 3818
3814 3819 self.radacTimeByFrame[self.idpulse_range1] = self.radacHeaderObj.radacTime[idrecord, self.idpulse_range1]
3815 3820
3816 3821 datablock = self.__setDataBlock()
3817 3822
3818 3823 return datablock
3819 3824
3820 3825 self.dataByFrame[self.idpulse_range1, :, :] = self.dataset[idrecord - 1,self.idpulse_range1, :, :]
3821 3826 self.dataByFrame[self.idpulse_range2, :, :] = self.dataset[idrecord, self.idpulse_range2, :, :]
3822 3827 datablock = self.__setDataBlock()
3823 3828 self.flagIsNewFile = 0
3824 3829
3825 3830 self.dataByFrame[self.idpulse_range1, :, :] = self.dataset[idrecord, self.idpulse_range1, :, :]
3826 3831
3827 3832
3828 3833 return datablock
3829 3834
3830 3835
3831 3836 def readSamples(self,idrecord):
3832 3837 if self.flagIsNewFile:
3833 3838 self.dataByFrame = self.__setDataByFrame()
3834 3839 self.beamCodeByFrame = self.amisrFilePointer.get('Raw11/Data/RadacHeader/BeamCode').value[idrecord, :]
3835 3840
3836 3841 #reading ranges
3837 3842 self.readRanges()
3838 3843 #reading dataset
3839 3844 self.dataset = self.__readDataSet()
3840 3845
3841 3846 self.flagIsNewFile = 0
3842 3847 self.radacTimeByFrame = self.radacHeaderObj.radacTime.value[idrecord, :]
3843 3848 self.dataByFrame = self.dataset[idrecord, :, :, :]
3844 3849 datablock = self.__setDataBlock()
3845 3850 return datablock
3846 3851
3847 3852
3848 3853 def readDataBlock(self):
3849 3854
3850 3855 #self.datablock = self.readSamples(self.idrecord_count)
3851 3856 self.datablock = self.readSamples(self.idrecord_count)
3852 3857 #print 'record:', self.idrecord_count
3853 3858
3854 3859 self.idrecord_count += 1
3855 3860 self.profileIndex = 0
3856 3861
3857 3862 if self.idrecord_count >= self.radacHeaderObj.nrecords:
3858 3863 self.idrecord_count = 0
3859 3864 self.flagIsNewFile = 1
3860 3865
3861 3866 def readNextBlock(self):
3862 3867
3863 3868 self.readDataBlock()
3864 3869
3865 3870 if self.flagIsNewFile:
3866 3871 self.__setNextFile()
3867 3872 pass
3868 3873
3869 3874 def __hasNotDataInBuffer(self):
3870 3875 #self.radacHeaderObj.npulses debe ser otra variable para considerar el numero de pulsos a tomar en el primer y ultimo record
3871 3876 if self.profileIndex >= self.radacHeaderObj.npulses:
3872 3877 return 1
3873 3878 return 0
3874 3879
3875 3880 def printUTC(self):
3876 3881 print self.dataOut.utctime
3877 3882 print ''
3878 3883
3879 3884 def setObjProperties(self):
3880 3885 self.dataOut.heightList = self.rangeFromFile/1000.0 #km
3881 3886 self.dataOut.nProfiles = self.radacHeaderObj.npulses
3887 self.dataOut.nRecords = self.radacHeaderObj.nrecords
3888 self.dataOut.nBeams = self.radacHeaderObj.nbeams
3882 3889 self.dataOut.ippSeconds = self.ippSeconds_fromfile
3883 3890 self.dataOut.timeInterval = self.dataOut.ippSeconds * self.dataOut.nCohInt
3884 3891 self.dataOut.frequency = self.frequency_h5file
3885 3892 self.dataOut.nBaud = None
3886 3893 self.dataOut.nCode = None
3887 3894 self.dataOut.code = None
3888 3895
3889 3896 self.dataOut.beamCodeDict = self.beamCodeDict
3890 3897 self.dataOut.beamRangeDict = self.beamRangeDict
3891 3898
3892 3899 def getData(self):
3893 3900
3894 3901 if self.flagNoMoreFiles:
3895 3902 self.dataOut.flagNoData = True
3896 3903 print 'Process finished'
3897 3904 return 0
3898 3905
3899 3906 if self.__hasNotDataInBuffer():
3900 3907 self.readNextBlock()
3901 3908 # if not( self.readNextBlock() ):
3902 3909 # return 0
3903 3910 # self.getFirstHeader()
3904 3911
3905 3912 if self.datablock == None: # setear esta condicion cuando no hayan datos por leers
3906 3913 self.dataOut.flagNoData = True
3907 3914 return 0
3908 3915
3909 3916 self.dataOut.data = numpy.reshape(self.datablock[self.profileIndex,:],(1,-1))
3910 3917
3911 3918 self.dataOut.utctime = self.radacTimeByFrame[self.profileIndex]
3912 3919
3913 3920 self.dataOut.flagNoData = False
3914 3921
3915 3922 self.profileIndex += 1
3916 3923
3917 3924 return self.dataOut.data
3918 3925
3919 3926
3920 3927 def run(self, **kwargs):
3921 3928 if not(self.isConfig):
3922 3929 self.setup(**kwargs)
3923 3930 self.setObjProperties()
3924 3931 self.isConfig = True
3925 3932
3926 3933 self.getData()
@@ -1,2132 +1,2150
1 1 '''
2 2
3 3 $Author: dsuarez $
4 4 $Id: Processor.py 1 2012-11-12 18:56:07Z dsuarez $
5 5 '''
6 6 import os
7 7 import numpy
8 8 import datetime
9 9 import time
10 10 import math
11 11 from jrodata import *
12 12 from jrodataIO import *
13 13 from jroplot import *
14 14
15 15 try:
16 16 import cfunctions
17 17 except:
18 18 pass
19 19
20 20 class ProcessingUnit:
21 21
22 22 """
23 23 Esta es la clase base para el procesamiento de datos.
24 24
25 25 Contiene el metodo "call" para llamar operaciones. Las operaciones pueden ser:
26 26 - Metodos internos (callMethod)
27 27 - Objetos del tipo Operation (callObject). Antes de ser llamados, estos objetos
28 28 tienen que ser agreagados con el metodo "add".
29 29
30 30 """
31 31 # objeto de datos de entrada (Voltage, Spectra o Correlation)
32 32 dataIn = None
33 33
34 34 # objeto de datos de entrada (Voltage, Spectra o Correlation)
35 35 dataOut = None
36 36
37 37
38 38 objectDict = None
39 39
40 40 def __init__(self):
41 41
42 42 self.objectDict = {}
43 43
44 44 def init(self):
45 45
46 46 raise ValueError, "Not implemented"
47 47
48 48 def addOperation(self, object, objId):
49 49
50 50 """
51 51 Agrega el objeto "object" a la lista de objetos "self.objectList" y retorna el
52 52 identificador asociado a este objeto.
53 53
54 54 Input:
55 55
56 56 object : objeto de la clase "Operation"
57 57
58 58 Return:
59 59
60 60 objId : identificador del objeto, necesario para ejecutar la operacion
61 61 """
62 62
63 63 self.objectDict[objId] = object
64 64
65 65 return objId
66 66
67 67 def operation(self, **kwargs):
68 68
69 69 """
70 70 Operacion directa sobre la data (dataOut.data). Es necesario actualizar los valores de los
71 71 atributos del objeto dataOut
72 72
73 73 Input:
74 74
75 75 **kwargs : Diccionario de argumentos de la funcion a ejecutar
76 76 """
77 77
78 78 raise ValueError, "ImplementedError"
79 79
80 80 def callMethod(self, name, **kwargs):
81 81
82 82 """
83 83 Ejecuta el metodo con el nombre "name" y con argumentos **kwargs de la propia clase.
84 84
85 85 Input:
86 86 name : nombre del metodo a ejecutar
87 87
88 88 **kwargs : diccionario con los nombres y valores de la funcion a ejecutar.
89 89
90 90 """
91 91 if name != 'run':
92 92
93 93 if name == 'init' and self.dataIn.isEmpty():
94 94 self.dataOut.flagNoData = True
95 95 return False
96 96
97 97 if name != 'init' and self.dataOut.isEmpty():
98 98 return False
99 99
100 100 methodToCall = getattr(self, name)
101 101
102 102 methodToCall(**kwargs)
103 103
104 104 if name != 'run':
105 105 return True
106 106
107 107 if self.dataOut.isEmpty():
108 108 return False
109 109
110 110 return True
111 111
112 112 def callObject(self, objId, **kwargs):
113 113
114 114 """
115 115 Ejecuta la operacion asociada al identificador del objeto "objId"
116 116
117 117 Input:
118 118
119 119 objId : identificador del objeto a ejecutar
120 120
121 121 **kwargs : diccionario con los nombres y valores de la funcion a ejecutar.
122 122
123 123 Return:
124 124
125 125 None
126 126 """
127 127
128 128 if self.dataOut.isEmpty():
129 129 return False
130 130
131 131 object = self.objectDict[objId]
132 132
133 133 object.run(self.dataOut, **kwargs)
134 134
135 135 return True
136 136
137 137 def call(self, operationConf, **kwargs):
138 138
139 139 """
140 140 Return True si ejecuta la operacion "operationConf.name" con los
141 141 argumentos "**kwargs". False si la operacion no se ha ejecutado.
142 142 La operacion puede ser de dos tipos:
143 143
144 144 1. Un metodo propio de esta clase:
145 145
146 146 operation.type = "self"
147 147
148 148 2. El metodo "run" de un objeto del tipo Operation o de un derivado de ella:
149 149 operation.type = "other".
150 150
151 151 Este objeto de tipo Operation debe de haber sido agregado antes con el metodo:
152 152 "addOperation" e identificado con el operation.id
153 153
154 154
155 155 con el id de la operacion.
156 156
157 157 Input:
158 158
159 159 Operation : Objeto del tipo operacion con los atributos: name, type y id.
160 160
161 161 """
162 162
163 163 if operationConf.type == 'self':
164 164 sts = self.callMethod(operationConf.name, **kwargs)
165 165
166 166 if operationConf.type == 'other':
167 167 sts = self.callObject(operationConf.id, **kwargs)
168 168
169 169 return sts
170 170
171 171 def setInput(self, dataIn):
172 172
173 173 self.dataIn = dataIn
174 174
175 175 def getOutput(self):
176 176
177 177 return self.dataOut
178 178
179 179 class Operation():
180 180
181 181 """
182 182 Clase base para definir las operaciones adicionales que se pueden agregar a la clase ProcessingUnit
183 183 y necesiten acumular informacion previa de los datos a procesar. De preferencia usar un buffer de
184 184 acumulacion dentro de esta clase
185 185
186 186 Ejemplo: Integraciones coherentes, necesita la informacion previa de los n perfiles anteriores (bufffer)
187 187
188 188 """
189 189
190 190 __buffer = None
191 191 __isConfig = False
192 192
193 193 def __init__(self):
194 194
195 195 pass
196 196
197 197 def run(self, dataIn, **kwargs):
198 198
199 199 """
200 200 Realiza las operaciones necesarias sobre la dataIn.data y actualiza los atributos del objeto dataIn.
201 201
202 202 Input:
203 203
204 204 dataIn : objeto del tipo JROData
205 205
206 206 Return:
207 207
208 208 None
209 209
210 210 Affected:
211 211 __buffer : buffer de recepcion de datos.
212 212
213 213 """
214 214
215 215 raise ValueError, "ImplementedError"
216 216
217 217 class VoltageProc(ProcessingUnit):
218 218
219 219
220 220 def __init__(self):
221 221
222 222 self.objectDict = {}
223 223 self.dataOut = Voltage()
224 224 self.flip = 1
225 225
226 226 def __updateObjFromAmisrInput(self):
227 227
228 228 self.dataOut.timeZone = self.dataIn.timeZone
229 229 self.dataOut.dstFlag = self.dataIn.dstFlag
230 230 self.dataOut.errorCount = self.dataIn.errorCount
231 231 self.dataOut.useLocalTime = self.dataIn.useLocalTime
232 232
233 233 self.dataOut.flagNoData = self.dataIn.flagNoData
234 234 self.dataOut.data = self.dataIn.data
235 235 self.dataOut.utctime = self.dataIn.utctime
236 236 self.dataOut.channelList = self.dataIn.channelList
237 237 self.dataOut.timeInterval = self.dataIn.timeInterval
238 238 self.dataOut.heightList = self.dataIn.heightList
239 239 self.dataOut.nProfiles = self.dataIn.nProfiles
240 240
241 241 self.dataOut.nCohInt = self.dataIn.nCohInt
242 242 self.dataOut.ippSeconds = self.dataIn.ippSeconds
243 243 self.dataOut.frequency = self.dataIn.frequency
244 244
245 245 pass
246 246
247 247 def init(self):
248 248
249 249
250 250 if self.dataIn.type == 'AMISR':
251 251 self.__updateObjFromAmisrInput()
252 252
253 253 if self.dataIn.type == 'Voltage':
254 254 self.dataOut.copy(self.dataIn)
255 255 # No necesita copiar en cada init() los atributos de dataIn
256 256 # la copia deberia hacerse por cada nuevo bloque de datos
257 257
258 258 def selectChannels(self, channelList):
259 259
260 260 channelIndexList = []
261 261
262 262 for channel in channelList:
263 263 index = self.dataOut.channelList.index(channel)
264 264 channelIndexList.append(index)
265 265
266 266 self.selectChannelsByIndex(channelIndexList)
267 267
268 268 def selectChannelsByIndex(self, channelIndexList):
269 269 """
270 270 Selecciona un bloque de datos en base a canales segun el channelIndexList
271 271
272 272 Input:
273 273 channelIndexList : lista sencilla de canales a seleccionar por ej. [2,3,7]
274 274
275 275 Affected:
276 276 self.dataOut.data
277 277 self.dataOut.channelIndexList
278 278 self.dataOut.nChannels
279 279 self.dataOut.m_ProcessingHeader.totalSpectra
280 280 self.dataOut.systemHeaderObj.numChannels
281 281 self.dataOut.m_ProcessingHeader.blockSize
282 282
283 283 Return:
284 284 None
285 285 """
286 286
287 287 for channelIndex in channelIndexList:
288 288 if channelIndex not in self.dataOut.channelIndexList:
289 289 print channelIndexList
290 290 raise ValueError, "The value %d in channelIndexList is not valid" %channelIndex
291 291
292 292 nChannels = len(channelIndexList)
293 293
294 294 data = self.dataOut.data[channelIndexList,:]
295 295
296 296 self.dataOut.data = data
297 297 self.dataOut.channelList = [self.dataOut.channelList[i] for i in channelIndexList]
298 298 # self.dataOut.nChannels = nChannels
299 299
300 300 return 1
301 301
302 302 def selectHeights(self, minHei=None, maxHei=None):
303 303 """
304 304 Selecciona un bloque de datos en base a un grupo de valores de alturas segun el rango
305 305 minHei <= height <= maxHei
306 306
307 307 Input:
308 308 minHei : valor minimo de altura a considerar
309 309 maxHei : valor maximo de altura a considerar
310 310
311 311 Affected:
312 312 Indirectamente son cambiados varios valores a travez del metodo selectHeightsByIndex
313 313
314 314 Return:
315 315 1 si el metodo se ejecuto con exito caso contrario devuelve 0
316 316 """
317 317
318 318 if minHei == None:
319 319 minHei = self.dataOut.heightList[0]
320 320
321 321 if maxHei == None:
322 322 maxHei = self.dataOut.heightList[-1]
323 323
324 324 if (minHei < self.dataOut.heightList[0]) or (minHei > maxHei):
325 325 raise ValueError, "some value in (%d,%d) is not valid" % (minHei, maxHei)
326 326
327 327
328 328 if (maxHei > self.dataOut.heightList[-1]):
329 329 maxHei = self.dataOut.heightList[-1]
330 330 # raise ValueError, "some value in (%d,%d) is not valid" % (minHei, maxHei)
331 331
332 332 minIndex = 0
333 333 maxIndex = 0
334 334 heights = self.dataOut.heightList
335 335
336 336 inda = numpy.where(heights >= minHei)
337 337 indb = numpy.where(heights <= maxHei)
338 338
339 339 try:
340 340 minIndex = inda[0][0]
341 341 except:
342 342 minIndex = 0
343 343
344 344 try:
345 345 maxIndex = indb[0][-1]
346 346 except:
347 347 maxIndex = len(heights)
348 348
349 349 self.selectHeightsByIndex(minIndex, maxIndex)
350 350
351 351 return 1
352 352
353 353
354 354 def selectHeightsByIndex(self, minIndex, maxIndex):
355 355 """
356 356 Selecciona un bloque de datos en base a un grupo indices de alturas segun el rango
357 357 minIndex <= index <= maxIndex
358 358
359 359 Input:
360 360 minIndex : valor de indice minimo de altura a considerar
361 361 maxIndex : valor de indice maximo de altura a considerar
362 362
363 363 Affected:
364 364 self.dataOut.data
365 365 self.dataOut.heightList
366 366
367 367 Return:
368 368 1 si el metodo se ejecuto con exito caso contrario devuelve 0
369 369 """
370 370
371 371 if (minIndex < 0) or (minIndex > maxIndex):
372 372 raise ValueError, "some value in (%d,%d) is not valid" % (minIndex, maxIndex)
373 373
374 374 if (maxIndex >= self.dataOut.nHeights):
375 375 maxIndex = self.dataOut.nHeights-1
376 376 # raise ValueError, "some value in (%d,%d) is not valid" % (minIndex, maxIndex)
377 377
378 378 nHeights = maxIndex - minIndex + 1
379 379
380 380 #voltage
381 381 data = self.dataOut.data[:,minIndex:maxIndex+1]
382 382
383 383 firstHeight = self.dataOut.heightList[minIndex]
384 384
385 385 self.dataOut.data = data
386 386 self.dataOut.heightList = self.dataOut.heightList[minIndex:maxIndex+1]
387 387
388 388 return 1
389 389
390 390
391 391 def filterByHeights(self, window):
392 392 deltaHeight = self.dataOut.heightList[1] - self.dataOut.heightList[0]
393 393
394 394 if window == None:
395 395 window = (self.dataOut.radarControllerHeaderObj.txA/self.dataOut.radarControllerHeaderObj.nBaud) / deltaHeight
396 396
397 397 newdelta = deltaHeight * window
398 398 r = self.dataOut.data.shape[1] % window
399 399 buffer = self.dataOut.data[:,0:self.dataOut.data.shape[1]-r]
400 400 buffer = buffer.reshape(self.dataOut.data.shape[0],self.dataOut.data.shape[1]/window,window)
401 401 buffer = numpy.sum(buffer,2)
402 402 self.dataOut.data = buffer
403 403 self.dataOut.heightList = numpy.arange(self.dataOut.heightList[0],newdelta*(self.dataOut.nHeights-r)/window,newdelta)
404 404 self.dataOut.windowOfFilter = window
405 405
406 406 def deFlip(self):
407 407 self.dataOut.data *= self.flip
408 408 self.flip *= -1.
409 409
410 410 def setRadarFrequency(self, frequency=None):
411 411 if frequency != None:
412 412 self.dataOut.frequency = frequency
413 413
414 414 return 1
415 415
416 416 class CohInt(Operation):
417 417
418 418 __isConfig = False
419 419
420 420 __profIndex = 0
421 421 __withOverapping = False
422 422
423 423 __byTime = False
424 424 __initime = None
425 425 __lastdatatime = None
426 426 __integrationtime = None
427 427
428 428 __buffer = None
429 429
430 430 __dataReady = False
431 431
432 432 n = None
433 433
434 434
435 435 def __init__(self):
436 436
437 437 self.__isConfig = False
438 438
439 439 def setup(self, n=None, timeInterval=None, overlapping=False):
440 440 """
441 441 Set the parameters of the integration class.
442 442
443 443 Inputs:
444 444
445 445 n : Number of coherent integrations
446 446 timeInterval : Time of integration. If the parameter "n" is selected this one does not work
447 447 overlapping :
448 448
449 449 """
450 450
451 451 self.__initime = None
452 452 self.__lastdatatime = 0
453 453 self.__buffer = None
454 454 self.__dataReady = False
455 455
456 456
457 457 if n == None and timeInterval == None:
458 458 raise ValueError, "n or timeInterval should be specified ..."
459 459
460 460 if n != None:
461 461 self.n = n
462 462 self.__byTime = False
463 463 else:
464 464 self.__integrationtime = timeInterval * 60. #if (type(timeInterval)!=integer) -> change this line
465 465 self.n = 9999
466 466 self.__byTime = True
467 467
468 468 if overlapping:
469 469 self.__withOverapping = True
470 470 self.__buffer = None
471 471 else:
472 472 self.__withOverapping = False
473 473 self.__buffer = 0
474 474
475 475 self.__profIndex = 0
476 476
477 477 def putData(self, data):
478 478
479 479 """
480 480 Add a profile to the __buffer and increase in one the __profileIndex
481 481
482 482 """
483 483
484 484 if not self.__withOverapping:
485 485 self.__buffer += data.copy()
486 486 self.__profIndex += 1
487 487 return
488 488
489 489 #Overlapping data
490 490 nChannels, nHeis = data.shape
491 491 data = numpy.reshape(data, (1, nChannels, nHeis))
492 492
493 493 #If the buffer is empty then it takes the data value
494 494 if self.__buffer == None:
495 495 self.__buffer = data
496 496 self.__profIndex += 1
497 497 return
498 498
499 499 #If the buffer length is lower than n then stakcing the data value
500 500 if self.__profIndex < self.n:
501 501 self.__buffer = numpy.vstack((self.__buffer, data))
502 502 self.__profIndex += 1
503 503 return
504 504
505 505 #If the buffer length is equal to n then replacing the last buffer value with the data value
506 506 self.__buffer = numpy.roll(self.__buffer, -1, axis=0)
507 507 self.__buffer[self.n-1] = data
508 508 self.__profIndex = self.n
509 509 return
510 510
511 511
512 512 def pushData(self):
513 513 """
514 514 Return the sum of the last profiles and the profiles used in the sum.
515 515
516 516 Affected:
517 517
518 518 self.__profileIndex
519 519
520 520 """
521 521
522 522 if not self.__withOverapping:
523 523 data = self.__buffer
524 524 n = self.__profIndex
525 525
526 526 self.__buffer = 0
527 527 self.__profIndex = 0
528 528
529 529 return data, n
530 530
531 531 #Integration with Overlapping
532 532 data = numpy.sum(self.__buffer, axis=0)
533 533 n = self.__profIndex
534 534
535 535 return data, n
536 536
537 537 def byProfiles(self, data):
538 538
539 539 self.__dataReady = False
540 540 avgdata = None
541 541 n = None
542 542
543 543 self.putData(data)
544 544
545 545 if self.__profIndex == self.n:
546 546
547 547 avgdata, n = self.pushData()
548 548 self.__dataReady = True
549 549
550 550 return avgdata
551 551
552 552 def byTime(self, data, datatime):
553 553
554 554 self.__dataReady = False
555 555 avgdata = None
556 556 n = None
557 557
558 558 self.putData(data)
559 559
560 560 if (datatime - self.__initime) >= self.__integrationtime:
561 561 avgdata, n = self.pushData()
562 562 self.n = n
563 563 self.__dataReady = True
564 564
565 565 return avgdata
566 566
567 567 def integrate(self, data, datatime=None):
568 568
569 569 if self.__initime == None:
570 570 self.__initime = datatime
571 571
572 572 if self.__byTime:
573 573 avgdata = self.byTime(data, datatime)
574 574 else:
575 575 avgdata = self.byProfiles(data)
576 576
577 577
578 578 self.__lastdatatime = datatime
579 579
580 580 if avgdata == None:
581 581 return None, None
582 582
583 583 avgdatatime = self.__initime
584 584
585 585 deltatime = datatime -self.__lastdatatime
586 586
587 587 if not self.__withOverapping:
588 588 self.__initime = datatime
589 589 else:
590 590 self.__initime += deltatime
591 591
592 592 return avgdata, avgdatatime
593 593
594 594 def run(self, dataOut, **kwargs):
595 595
596 596 if not self.__isConfig:
597 597 self.setup(**kwargs)
598 598 self.__isConfig = True
599 599
600 600 avgdata, avgdatatime = self.integrate(dataOut.data, dataOut.utctime)
601 601
602 602 # dataOut.timeInterval *= n
603 603 dataOut.flagNoData = True
604 604
605 605 if self.__dataReady:
606 606 dataOut.data = avgdata
607 607 dataOut.nCohInt *= self.n
608 608 dataOut.utctime = avgdatatime
609 609 dataOut.timeInterval = dataOut.ippSeconds * dataOut.nCohInt
610 610 dataOut.flagNoData = False
611 611
612 612
613 613 class Decoder(Operation):
614 614
615 615 __isConfig = False
616 616 __profIndex = 0
617 617
618 618 code = None
619 619
620 620 nCode = None
621 621 nBaud = None
622 622
623 623 def __init__(self):
624 624
625 625 self.__isConfig = False
626 626
627 627 def setup(self, code, shape):
628 628
629 629 self.__profIndex = 0
630 630
631 631 self.code = code
632 632
633 633 self.nCode = len(code)
634 634 self.nBaud = len(code[0])
635 635
636 636 self.__nChannels, self.__nHeis = shape
637 637
638 638 __codeBuffer = numpy.zeros((self.nCode, self.__nHeis), dtype=numpy.complex)
639 639
640 640 __codeBuffer[:,0:self.nBaud] = self.code
641 641
642 642 self.fft_code = numpy.conj(numpy.fft.fft(__codeBuffer, axis=1))
643 643
644 644 self.ndatadec = self.__nHeis - self.nBaud + 1
645 645
646 646 self.datadecTime = numpy.zeros((self.__nChannels, self.ndatadec), dtype=numpy.complex)
647 647
648 648 def convolutionInFreq(self, data):
649 649
650 650 fft_code = self.fft_code[self.__profIndex].reshape(1,-1)
651 651
652 652 fft_data = numpy.fft.fft(data, axis=1)
653 653
654 654 conv = fft_data*fft_code
655 655
656 656 data = numpy.fft.ifft(conv,axis=1)
657 657
658 658 datadec = data[:,:-self.nBaud+1]
659 659
660 660 return datadec
661 661
662 662 def convolutionInFreqOpt(self, data):
663 663
664 664 fft_code = self.fft_code[self.__profIndex].reshape(1,-1)
665 665
666 666 data = cfunctions.decoder(fft_code, data)
667 667
668 668 datadec = data[:,:-self.nBaud+1]
669 669
670 670 return datadec
671 671
672 672 def convolutionInTime(self, data):
673 673
674 674 code = self.code[self.__profIndex]
675 675
676 676 for i in range(self.__nChannels):
677 677 self.datadecTime[i,:] = numpy.correlate(data[i,:], code, mode='valid')
678 678
679 679 return self.datadecTime
680 680
681 681 def run(self, dataOut, code=None, nCode=None, nBaud=None, mode = 0):
682 682
683 683 if code == None:
684 684 code = dataOut.code
685 685 else:
686 686 code = numpy.array(code).reshape(nCode,nBaud)
687 687 dataOut.code = code
688 688 dataOut.nCode = nCode
689 689 dataOut.nBaud = nBaud
690 690 dataOut.radarControllerHeaderObj.code = code
691 691 dataOut.radarControllerHeaderObj.nCode = nCode
692 692 dataOut.radarControllerHeaderObj.nBaud = nBaud
693 693
694 694
695 695 if not self.__isConfig:
696 696
697 697 self.setup(code, dataOut.data.shape)
698 698 self.__isConfig = True
699 699
700 700 if mode == 0:
701 701 datadec = self.convolutionInTime(dataOut.data)
702 702
703 703 if mode == 1:
704 704 datadec = self.convolutionInFreq(dataOut.data)
705 705
706 706 if mode == 2:
707 707 datadec = self.convolutionInFreqOpt(dataOut.data)
708 708
709 709 dataOut.data = datadec
710 710
711 711 dataOut.heightList = dataOut.heightList[0:self.ndatadec]
712 712
713 713 dataOut.flagDecodeData = True #asumo q la data no esta decodificada
714 714
715 715 if self.__profIndex == self.nCode-1:
716 716 self.__profIndex = 0
717 717 return 1
718 718
719 719 self.__profIndex += 1
720 720
721 721 return 1
722 722 # dataOut.flagDeflipData = True #asumo q la data no esta sin flip
723 723
724 724
725 725
726 726 class SpectraProc(ProcessingUnit):
727 727
728 728 def __init__(self):
729 729
730 730 self.objectDict = {}
731 731 self.buffer = None
732 732 self.firstdatatime = None
733 733 self.profIndex = 0
734 734 self.dataOut = Spectra()
735 735
736 736 def __updateObjFromInput(self):
737 737
738 738 self.dataOut.timeZone = self.dataIn.timeZone
739 739 self.dataOut.dstFlag = self.dataIn.dstFlag
740 740 self.dataOut.errorCount = self.dataIn.errorCount
741 741 self.dataOut.useLocalTime = self.dataIn.useLocalTime
742 742
743 743 self.dataOut.radarControllerHeaderObj = self.dataIn.radarControllerHeaderObj.copy()
744 744 self.dataOut.systemHeaderObj = self.dataIn.systemHeaderObj.copy()
745 745 self.dataOut.channelList = self.dataIn.channelList
746 746 self.dataOut.heightList = self.dataIn.heightList
747 747 self.dataOut.dtype = numpy.dtype([('real','<f4'),('imag','<f4')])
748 748 # self.dataOut.nHeights = self.dataIn.nHeights
749 749 # self.dataOut.nChannels = self.dataIn.nChannels
750 750 self.dataOut.nBaud = self.dataIn.nBaud
751 751 self.dataOut.nCode = self.dataIn.nCode
752 752 self.dataOut.code = self.dataIn.code
753 753 self.dataOut.nProfiles = self.dataOut.nFFTPoints
754 754 # self.dataOut.channelIndexList = self.dataIn.channelIndexList
755 755 self.dataOut.flagTimeBlock = self.dataIn.flagTimeBlock
756 756 self.dataOut.utctime = self.firstdatatime
757 757 self.dataOut.flagDecodeData = self.dataIn.flagDecodeData #asumo q la data esta decodificada
758 758 self.dataOut.flagDeflipData = self.dataIn.flagDeflipData #asumo q la data esta sin flip
759 759 # self.dataOut.flagShiftFFT = self.dataIn.flagShiftFFT
760 760 self.dataOut.nCohInt = self.dataIn.nCohInt
761 761 self.dataOut.nIncohInt = 1
762 762 self.dataOut.ippSeconds = self.dataIn.ippSeconds
763 763 self.dataOut.windowOfFilter = self.dataIn.windowOfFilter
764 764
765 765 self.dataOut.timeInterval = self.dataIn.timeInterval*self.dataOut.nFFTPoints*self.dataOut.nIncohInt
766 766 self.dataOut.frequency = self.dataIn.frequency
767 767 self.dataOut.realtime = self.dataIn.realtime
768 768
769 769 def __getFft(self):
770 770 """
771 771 Convierte valores de Voltaje a Spectra
772 772
773 773 Affected:
774 774 self.dataOut.data_spc
775 775 self.dataOut.data_cspc
776 776 self.dataOut.data_dc
777 777 self.dataOut.heightList
778 778 self.profIndex
779 779 self.buffer
780 780 self.dataOut.flagNoData
781 781 """
782 782 fft_volt = numpy.fft.fft(self.buffer,n=self.dataOut.nFFTPoints,axis=1)
783 783 fft_volt = fft_volt.astype(numpy.dtype('complex'))
784 784 dc = fft_volt[:,0,:]
785 785
786 786 #calculo de self-spectra
787 787 fft_volt = numpy.fft.fftshift(fft_volt,axes=(1,))
788 788 spc = fft_volt * numpy.conjugate(fft_volt)
789 789 spc = spc.real
790 790
791 791 blocksize = 0
792 792 blocksize += dc.size
793 793 blocksize += spc.size
794 794
795 795 cspc = None
796 796 pairIndex = 0
797 797 if self.dataOut.pairsList != None:
798 798 #calculo de cross-spectra
799 799 cspc = numpy.zeros((self.dataOut.nPairs, self.dataOut.nFFTPoints, self.dataOut.nHeights), dtype='complex')
800 800 for pair in self.dataOut.pairsList:
801 801 cspc[pairIndex,:,:] = fft_volt[pair[0],:,:] * numpy.conjugate(fft_volt[pair[1],:,:])
802 802 pairIndex += 1
803 803 blocksize += cspc.size
804 804
805 805 self.dataOut.data_spc = spc
806 806 self.dataOut.data_cspc = cspc
807 807 self.dataOut.data_dc = dc
808 808 self.dataOut.blockSize = blocksize
809 809 self.dataOut.flagShiftFFT = False
810 810
811 811 def init(self, nProfiles=None, nFFTPoints=None, pairsList=None, ippFactor=None):
812 812
813 813 self.dataOut.flagNoData = True
814 814
815 815 if self.dataIn.type == "Spectra":
816 816 self.dataOut.copy(self.dataIn)
817 817 return
818 818
819 819 if self.dataIn.type == "Voltage":
820 820
821 821 if nFFTPoints == None:
822 822 raise ValueError, "This SpectraProc.init() need nFFTPoints input variable"
823 823
824 824 if pairsList == None:
825 825 nPairs = 0
826 826 else:
827 827 nPairs = len(pairsList)
828 828
829 829 if ippFactor == None:
830 830 ippFactor = 1
831 831 self.dataOut.ippFactor = ippFactor
832 832
833 833 self.dataOut.nFFTPoints = nFFTPoints
834 834 self.dataOut.pairsList = pairsList
835 835 self.dataOut.nPairs = nPairs
836 836
837 837 if self.buffer == None:
838 838 self.buffer = numpy.zeros((self.dataIn.nChannels,
839 839 nProfiles,
840 840 self.dataIn.nHeights),
841 841 dtype='complex')
842 842
843 843
844 844 self.buffer[:,self.profIndex,:] = self.dataIn.data.copy()
845 845 self.profIndex += 1
846 846
847 847 if self.firstdatatime == None:
848 848 self.firstdatatime = self.dataIn.utctime
849 849
850 850 if self.profIndex == nProfiles:
851 851 self.__updateObjFromInput()
852 852 self.__getFft()
853 853
854 854 self.dataOut.flagNoData = False
855 855
856 856 self.buffer = None
857 857 self.firstdatatime = None
858 858 self.profIndex = 0
859 859
860 860 return
861 861
862 862 raise ValueError, "The type object %s is not valid"%(self.dataIn.type)
863 863
864 864 def selectChannels(self, channelList):
865 865
866 866 channelIndexList = []
867 867
868 868 for channel in channelList:
869 869 index = self.dataOut.channelList.index(channel)
870 870 channelIndexList.append(index)
871 871
872 872 self.selectChannelsByIndex(channelIndexList)
873 873
874 874 def selectChannelsByIndex(self, channelIndexList):
875 875 """
876 876 Selecciona un bloque de datos en base a canales segun el channelIndexList
877 877
878 878 Input:
879 879 channelIndexList : lista sencilla de canales a seleccionar por ej. [2,3,7]
880 880
881 881 Affected:
882 882 self.dataOut.data_spc
883 883 self.dataOut.channelIndexList
884 884 self.dataOut.nChannels
885 885
886 886 Return:
887 887 None
888 888 """
889 889
890 890 for channelIndex in channelIndexList:
891 891 if channelIndex not in self.dataOut.channelIndexList:
892 892 print channelIndexList
893 893 raise ValueError, "The value %d in channelIndexList is not valid" %channelIndex
894 894
895 895 nChannels = len(channelIndexList)
896 896
897 897 data_spc = self.dataOut.data_spc[channelIndexList,:]
898 898
899 899 self.dataOut.data_spc = data_spc
900 900 self.dataOut.channelList = [self.dataOut.channelList[i] for i in channelIndexList]
901 901 # self.dataOut.nChannels = nChannels
902 902
903 903 return 1
904 904
905 905 def selectHeights(self, minHei, maxHei):
906 906 """
907 907 Selecciona un bloque de datos en base a un grupo de valores de alturas segun el rango
908 908 minHei <= height <= maxHei
909 909
910 910 Input:
911 911 minHei : valor minimo de altura a considerar
912 912 maxHei : valor maximo de altura a considerar
913 913
914 914 Affected:
915 915 Indirectamente son cambiados varios valores a travez del metodo selectHeightsByIndex
916 916
917 917 Return:
918 918 1 si el metodo se ejecuto con exito caso contrario devuelve 0
919 919 """
920 920 if (minHei < self.dataOut.heightList[0]) or (minHei > maxHei):
921 921 raise ValueError, "some value in (%d,%d) is not valid" % (minHei, maxHei)
922 922
923 923 if (maxHei > self.dataOut.heightList[-1]):
924 924 maxHei = self.dataOut.heightList[-1]
925 925 # raise ValueError, "some value in (%d,%d) is not valid" % (minHei, maxHei)
926 926
927 927 minIndex = 0
928 928 maxIndex = 0
929 929 heights = self.dataOut.heightList
930 930
931 931 inda = numpy.where(heights >= minHei)
932 932 indb = numpy.where(heights <= maxHei)
933 933
934 934 try:
935 935 minIndex = inda[0][0]
936 936 except:
937 937 minIndex = 0
938 938
939 939 try:
940 940 maxIndex = indb[0][-1]
941 941 except:
942 942 maxIndex = len(heights)
943 943
944 944 self.selectHeightsByIndex(minIndex, maxIndex)
945 945
946 946 return 1
947 947
948 948 def getBeaconSignal(self, tauindex = 0, channelindex = 0, hei_ref=None):
949 949 newheis = numpy.where(self.dataOut.heightList>self.dataOut.radarControllerHeaderObj.Taus[tauindex])
950 950
951 951 if hei_ref != None:
952 952 newheis = numpy.where(self.dataOut.heightList>hei_ref)
953 953
954 954 minIndex = min(newheis[0])
955 955 maxIndex = max(newheis[0])
956 956 data_spc = self.dataOut.data_spc[:,:,minIndex:maxIndex+1]
957 957 heightList = self.dataOut.heightList[minIndex:maxIndex+1]
958 958
959 959 # determina indices
960 960 nheis = int(self.dataOut.radarControllerHeaderObj.txB/(self.dataOut.heightList[1]-self.dataOut.heightList[0]))
961 961 avg_dB = 10*numpy.log10(numpy.sum(data_spc[channelindex,:,:],axis=0))
962 962 beacon_dB = numpy.sort(avg_dB)[-nheis:]
963 963 beacon_heiIndexList = []
964 964 for val in avg_dB.tolist():
965 965 if val >= beacon_dB[0]:
966 966 beacon_heiIndexList.append(avg_dB.tolist().index(val))
967 967
968 968 #data_spc = data_spc[:,:,beacon_heiIndexList]
969 969 data_cspc = None
970 970 if self.dataOut.data_cspc != None:
971 971 data_cspc = self.dataOut.data_cspc[:,:,minIndex:maxIndex+1]
972 972 #data_cspc = data_cspc[:,:,beacon_heiIndexList]
973 973
974 974 data_dc = None
975 975 if self.dataOut.data_dc != None:
976 976 data_dc = self.dataOut.data_dc[:,minIndex:maxIndex+1]
977 977 #data_dc = data_dc[:,beacon_heiIndexList]
978 978
979 979 self.dataOut.data_spc = data_spc
980 980 self.dataOut.data_cspc = data_cspc
981 981 self.dataOut.data_dc = data_dc
982 982 self.dataOut.heightList = heightList
983 983 self.dataOut.beacon_heiIndexList = beacon_heiIndexList
984 984
985 985 return 1
986 986
987 987
988 988 def selectHeightsByIndex(self, minIndex, maxIndex):
989 989 """
990 990 Selecciona un bloque de datos en base a un grupo indices de alturas segun el rango
991 991 minIndex <= index <= maxIndex
992 992
993 993 Input:
994 994 minIndex : valor de indice minimo de altura a considerar
995 995 maxIndex : valor de indice maximo de altura a considerar
996 996
997 997 Affected:
998 998 self.dataOut.data_spc
999 999 self.dataOut.data_cspc
1000 1000 self.dataOut.data_dc
1001 1001 self.dataOut.heightList
1002 1002
1003 1003 Return:
1004 1004 1 si el metodo se ejecuto con exito caso contrario devuelve 0
1005 1005 """
1006 1006
1007 1007 if (minIndex < 0) or (minIndex > maxIndex):
1008 1008 raise ValueError, "some value in (%d,%d) is not valid" % (minIndex, maxIndex)
1009 1009
1010 1010 if (maxIndex >= self.dataOut.nHeights):
1011 1011 maxIndex = self.dataOut.nHeights-1
1012 1012 # raise ValueError, "some value in (%d,%d) is not valid" % (minIndex, maxIndex)
1013 1013
1014 1014 nHeights = maxIndex - minIndex + 1
1015 1015
1016 1016 #Spectra
1017 1017 data_spc = self.dataOut.data_spc[:,:,minIndex:maxIndex+1]
1018 1018
1019 1019 data_cspc = None
1020 1020 if self.dataOut.data_cspc != None:
1021 1021 data_cspc = self.dataOut.data_cspc[:,:,minIndex:maxIndex+1]
1022 1022
1023 1023 data_dc = None
1024 1024 if self.dataOut.data_dc != None:
1025 1025 data_dc = self.dataOut.data_dc[:,minIndex:maxIndex+1]
1026 1026
1027 1027 self.dataOut.data_spc = data_spc
1028 1028 self.dataOut.data_cspc = data_cspc
1029 1029 self.dataOut.data_dc = data_dc
1030 1030
1031 1031 self.dataOut.heightList = self.dataOut.heightList[minIndex:maxIndex+1]
1032 1032
1033 1033 return 1
1034 1034
1035 1035 def removeDC(self, mode = 2):
1036 1036 jspectra = self.dataOut.data_spc
1037 1037 jcspectra = self.dataOut.data_cspc
1038 1038
1039 1039
1040 1040 num_chan = jspectra.shape[0]
1041 1041 num_hei = jspectra.shape[2]
1042 1042
1043 1043 if jcspectra != None:
1044 1044 jcspectraExist = True
1045 1045 num_pairs = jcspectra.shape[0]
1046 1046 else: jcspectraExist = False
1047 1047
1048 1048 freq_dc = jspectra.shape[1]/2
1049 1049 ind_vel = numpy.array([-2,-1,1,2]) + freq_dc
1050 1050
1051 1051 if ind_vel[0]<0:
1052 1052 ind_vel[range(0,1)] = ind_vel[range(0,1)] + self.num_prof
1053 1053
1054 1054 if mode == 1:
1055 1055 jspectra[:,freq_dc,:] = (jspectra[:,ind_vel[1],:] + jspectra[:,ind_vel[2],:])/2 #CORRECCION
1056 1056
1057 1057 if jcspectraExist:
1058 1058 jcspectra[:,freq_dc,:] = (jcspectra[:,ind_vel[1],:] + jcspectra[:,ind_vel[2],:])/2
1059 1059
1060 1060 if mode == 2:
1061 1061
1062 1062 vel = numpy.array([-2,-1,1,2])
1063 1063 xx = numpy.zeros([4,4])
1064 1064
1065 1065 for fil in range(4):
1066 1066 xx[fil,:] = vel[fil]**numpy.asarray(range(4))
1067 1067
1068 1068 xx_inv = numpy.linalg.inv(xx)
1069 1069 xx_aux = xx_inv[0,:]
1070 1070
1071 1071 for ich in range(num_chan):
1072 1072 yy = jspectra[ich,ind_vel,:]
1073 1073 jspectra[ich,freq_dc,:] = numpy.dot(xx_aux,yy)
1074 1074
1075 1075 junkid = jspectra[ich,freq_dc,:]<=0
1076 1076 cjunkid = sum(junkid)
1077 1077
1078 1078 if cjunkid.any():
1079 1079 jspectra[ich,freq_dc,junkid.nonzero()] = (jspectra[ich,ind_vel[1],junkid] + jspectra[ich,ind_vel[2],junkid])/2
1080 1080
1081 1081 if jcspectraExist:
1082 1082 for ip in range(num_pairs):
1083 1083 yy = jcspectra[ip,ind_vel,:]
1084 1084 jcspectra[ip,freq_dc,:] = numpy.dot(xx_aux,yy)
1085 1085
1086 1086
1087 1087 self.dataOut.data_spc = jspectra
1088 1088 self.dataOut.data_cspc = jcspectra
1089 1089
1090 1090 return 1
1091 1091
1092 1092 def removeInterference(self, interf = 2,hei_interf = None, nhei_interf = None, offhei_interf = None):
1093 1093
1094 1094 jspectra = self.dataOut.data_spc
1095 1095 jcspectra = self.dataOut.data_cspc
1096 1096 jnoise = self.dataOut.getNoise()
1097 1097 num_incoh = self.dataOut.nIncohInt
1098 1098
1099 1099 num_channel = jspectra.shape[0]
1100 1100 num_prof = jspectra.shape[1]
1101 1101 num_hei = jspectra.shape[2]
1102 1102
1103 1103 #hei_interf
1104 1104 if hei_interf == None:
1105 1105 count_hei = num_hei/2 #Como es entero no importa
1106 1106 hei_interf = numpy.asmatrix(range(count_hei)) + num_hei - count_hei
1107 1107 hei_interf = numpy.asarray(hei_interf)[0]
1108 1108 #nhei_interf
1109 1109 if (nhei_interf == None):
1110 1110 nhei_interf = 5
1111 1111 if (nhei_interf < 1):
1112 1112 nhei_interf = 1
1113 1113 if (nhei_interf > count_hei):
1114 1114 nhei_interf = count_hei
1115 1115 if (offhei_interf == None):
1116 1116 offhei_interf = 0
1117 1117
1118 1118 ind_hei = range(num_hei)
1119 1119 # mask_prof = numpy.asarray(range(num_prof - 2)) + 1
1120 1120 # mask_prof[range(num_prof/2 - 1,len(mask_prof))] += 1
1121 1121 mask_prof = numpy.asarray(range(num_prof))
1122 1122 num_mask_prof = mask_prof.size
1123 1123 comp_mask_prof = [0, num_prof/2]
1124 1124
1125 1125
1126 1126 #noise_exist: Determina si la variable jnoise ha sido definida y contiene la informacion del ruido de cada canal
1127 1127 if (jnoise.size < num_channel or numpy.isnan(jnoise).any()):
1128 1128 jnoise = numpy.nan
1129 1129 noise_exist = jnoise[0] < numpy.Inf
1130 1130
1131 1131 #Subrutina de Remocion de la Interferencia
1132 1132 for ich in range(num_channel):
1133 1133 #Se ordena los espectros segun su potencia (menor a mayor)
1134 1134 power = jspectra[ich,mask_prof,:]
1135 1135 power = power[:,hei_interf]
1136 1136 power = power.sum(axis = 0)
1137 1137 psort = power.ravel().argsort()
1138 1138
1139 1139 #Se estima la interferencia promedio en los Espectros de Potencia empleando
1140 1140 junkspc_interf = jspectra[ich,:,hei_interf[psort[range(offhei_interf, nhei_interf + offhei_interf)]]]
1141 1141
1142 1142 if noise_exist:
1143 1143 # tmp_noise = jnoise[ich] / num_prof
1144 1144 tmp_noise = jnoise[ich]
1145 1145 junkspc_interf = junkspc_interf - tmp_noise
1146 1146 #junkspc_interf[:,comp_mask_prof] = 0
1147 1147
1148 1148 jspc_interf = junkspc_interf.sum(axis = 0) / nhei_interf
1149 1149 jspc_interf = jspc_interf.transpose()
1150 1150 #Calculando el espectro de interferencia promedio
1151 1151 noiseid = numpy.where(jspc_interf <= tmp_noise/ math.sqrt(num_incoh))
1152 1152 noiseid = noiseid[0]
1153 1153 cnoiseid = noiseid.size
1154 1154 interfid = numpy.where(jspc_interf > tmp_noise/ math.sqrt(num_incoh))
1155 1155 interfid = interfid[0]
1156 1156 cinterfid = interfid.size
1157 1157
1158 1158 if (cnoiseid > 0): jspc_interf[noiseid] = 0
1159 1159
1160 1160 #Expandiendo los perfiles a limpiar
1161 1161 if (cinterfid > 0):
1162 1162 new_interfid = (numpy.r_[interfid - 1, interfid, interfid + 1] + num_prof)%num_prof
1163 1163 new_interfid = numpy.asarray(new_interfid)
1164 1164 new_interfid = {x for x in new_interfid}
1165 1165 new_interfid = numpy.array(list(new_interfid))
1166 1166 new_cinterfid = new_interfid.size
1167 1167 else: new_cinterfid = 0
1168 1168
1169 1169 for ip in range(new_cinterfid):
1170 1170 ind = junkspc_interf[:,new_interfid[ip]].ravel().argsort()
1171 1171 jspc_interf[new_interfid[ip]] = junkspc_interf[ind[nhei_interf/2],new_interfid[ip]]
1172 1172
1173 1173
1174 1174 jspectra[ich,:,ind_hei] = jspectra[ich,:,ind_hei] - jspc_interf #Corregir indices
1175 1175
1176 1176 #Removiendo la interferencia del punto de mayor interferencia
1177 1177 ListAux = jspc_interf[mask_prof].tolist()
1178 1178 maxid = ListAux.index(max(ListAux))
1179 1179
1180 1180
1181 1181 if cinterfid > 0:
1182 1182 for ip in range(cinterfid*(interf == 2) - 1):
1183 1183 ind = (jspectra[ich,interfid[ip],:] < tmp_noise*(1 + 1/math.sqrt(num_incoh))).nonzero()
1184 1184 cind = len(ind)
1185 1185
1186 1186 if (cind > 0):
1187 1187 jspectra[ich,interfid[ip],ind] = tmp_noise*(1 + (numpy.random.uniform(cind) - 0.5)/math.sqrt(num_incoh))
1188 1188
1189 1189 ind = numpy.array([-2,-1,1,2])
1190 1190 xx = numpy.zeros([4,4])
1191 1191
1192 1192 for id1 in range(4):
1193 1193 xx[:,id1] = ind[id1]**numpy.asarray(range(4))
1194 1194
1195 1195 xx_inv = numpy.linalg.inv(xx)
1196 1196 xx = xx_inv[:,0]
1197 1197 ind = (ind + maxid + num_mask_prof)%num_mask_prof
1198 1198 yy = jspectra[ich,mask_prof[ind],:]
1199 1199 jspectra[ich,mask_prof[maxid],:] = numpy.dot(yy.transpose(),xx)
1200 1200
1201 1201
1202 1202 indAux = (jspectra[ich,:,:] < tmp_noise*(1-1/math.sqrt(num_incoh))).nonzero()
1203 1203 jspectra[ich,indAux[0],indAux[1]] = tmp_noise * (1 - 1/math.sqrt(num_incoh))
1204 1204
1205 1205 #Remocion de Interferencia en el Cross Spectra
1206 1206 if jcspectra == None: return jspectra, jcspectra
1207 1207 num_pairs = jcspectra.size/(num_prof*num_hei)
1208 1208 jcspectra = jcspectra.reshape(num_pairs, num_prof, num_hei)
1209 1209
1210 1210 for ip in range(num_pairs):
1211 1211
1212 1212 #-------------------------------------------
1213 1213
1214 1214 cspower = numpy.abs(jcspectra[ip,mask_prof,:])
1215 1215 cspower = cspower[:,hei_interf]
1216 1216 cspower = cspower.sum(axis = 0)
1217 1217
1218 1218 cspsort = cspower.ravel().argsort()
1219 1219 junkcspc_interf = jcspectra[ip,:,hei_interf[cspsort[range(offhei_interf, nhei_interf + offhei_interf)]]]
1220 1220 junkcspc_interf = junkcspc_interf.transpose()
1221 1221 jcspc_interf = junkcspc_interf.sum(axis = 1)/nhei_interf
1222 1222
1223 1223 ind = numpy.abs(jcspc_interf[mask_prof]).ravel().argsort()
1224 1224
1225 1225 median_real = numpy.median(numpy.real(junkcspc_interf[mask_prof[ind[range(3*num_prof/4)]],:]))
1226 1226 median_imag = numpy.median(numpy.imag(junkcspc_interf[mask_prof[ind[range(3*num_prof/4)]],:]))
1227 1227 junkcspc_interf[comp_mask_prof,:] = numpy.complex(median_real, median_imag)
1228 1228
1229 1229 for iprof in range(num_prof):
1230 1230 ind = numpy.abs(junkcspc_interf[iprof,:]).ravel().argsort()
1231 1231 jcspc_interf[iprof] = junkcspc_interf[iprof, ind[nhei_interf/2]]
1232 1232
1233 1233 #Removiendo la Interferencia
1234 1234 jcspectra[ip,:,ind_hei] = jcspectra[ip,:,ind_hei] - jcspc_interf
1235 1235
1236 1236 ListAux = numpy.abs(jcspc_interf[mask_prof]).tolist()
1237 1237 maxid = ListAux.index(max(ListAux))
1238 1238
1239 1239 ind = numpy.array([-2,-1,1,2])
1240 1240 xx = numpy.zeros([4,4])
1241 1241
1242 1242 for id1 in range(4):
1243 1243 xx[:,id1] = ind[id1]**numpy.asarray(range(4))
1244 1244
1245 1245 xx_inv = numpy.linalg.inv(xx)
1246 1246 xx = xx_inv[:,0]
1247 1247
1248 1248 ind = (ind + maxid + num_mask_prof)%num_mask_prof
1249 1249 yy = jcspectra[ip,mask_prof[ind],:]
1250 1250 jcspectra[ip,mask_prof[maxid],:] = numpy.dot(yy.transpose(),xx)
1251 1251
1252 1252 #Guardar Resultados
1253 1253 self.dataOut.data_spc = jspectra
1254 1254 self.dataOut.data_cspc = jcspectra
1255 1255
1256 1256 return 1
1257 1257
1258 1258 def setRadarFrequency(self, frequency=None):
1259 1259 if frequency != None:
1260 1260 self.dataOut.frequency = frequency
1261 1261
1262 1262 return 1
1263 1263
1264 1264 def getNoise(self, minHei=None, maxHei=None, minVel=None, maxVel=None):
1265 1265 #validacion de rango
1266 1266 if minHei == None:
1267 1267 minHei = self.dataOut.heightList[0]
1268 1268
1269 1269 if maxHei == None:
1270 1270 maxHei = self.dataOut.heightList[-1]
1271 1271
1272 1272 if (minHei < self.dataOut.heightList[0]) or (minHei > maxHei):
1273 1273 print 'minHei: %.2f is out of the heights range'%(minHei)
1274 1274 print 'minHei is setting to %.2f'%(self.dataOut.heightList[0])
1275 1275 minHei = self.dataOut.heightList[0]
1276 1276
1277 1277 if (maxHei > self.dataOut.heightList[-1]) or (maxHei < minHei):
1278 1278 print 'maxHei: %.2f is out of the heights range'%(maxHei)
1279 1279 print 'maxHei is setting to %.2f'%(self.dataOut.heightList[-1])
1280 1280 maxHei = self.dataOut.heightList[-1]
1281 1281
1282 1282 # validacion de velocidades
1283 1283 velrange = self.dataOut.getVelRange(1)
1284 1284
1285 1285 if minVel == None:
1286 1286 minVel = velrange[0]
1287 1287
1288 1288 if maxVel == None:
1289 1289 maxVel = velrange[-1]
1290 1290
1291 1291 if (minVel < velrange[0]) or (minVel > maxVel):
1292 1292 print 'minVel: %.2f is out of the velocity range'%(minVel)
1293 1293 print 'minVel is setting to %.2f'%(velrange[0])
1294 1294 minVel = velrange[0]
1295 1295
1296 1296 if (maxVel > velrange[-1]) or (maxVel < minVel):
1297 1297 print 'maxVel: %.2f is out of the velocity range'%(maxVel)
1298 1298 print 'maxVel is setting to %.2f'%(velrange[-1])
1299 1299 maxVel = velrange[-1]
1300 1300
1301 1301 # seleccion de indices para rango
1302 1302 minIndex = 0
1303 1303 maxIndex = 0
1304 1304 heights = self.dataOut.heightList
1305 1305
1306 1306 inda = numpy.where(heights >= minHei)
1307 1307 indb = numpy.where(heights <= maxHei)
1308 1308
1309 1309 try:
1310 1310 minIndex = inda[0][0]
1311 1311 except:
1312 1312 minIndex = 0
1313 1313
1314 1314 try:
1315 1315 maxIndex = indb[0][-1]
1316 1316 except:
1317 1317 maxIndex = len(heights)
1318 1318
1319 1319 if (minIndex < 0) or (minIndex > maxIndex):
1320 1320 raise ValueError, "some value in (%d,%d) is not valid" % (minIndex, maxIndex)
1321 1321
1322 1322 if (maxIndex >= self.dataOut.nHeights):
1323 1323 maxIndex = self.dataOut.nHeights-1
1324 1324
1325 1325 # seleccion de indices para velocidades
1326 1326 indminvel = numpy.where(velrange >= minVel)
1327 1327 indmaxvel = numpy.where(velrange <= maxVel)
1328 1328 try:
1329 1329 minIndexVel = indminvel[0][0]
1330 1330 except:
1331 1331 minIndexVel = 0
1332 1332
1333 1333 try:
1334 1334 maxIndexVel = indmaxvel[0][-1]
1335 1335 except:
1336 1336 maxIndexVel = len(velrange)
1337 1337
1338 1338 #seleccion del espectro
1339 1339 data_spc = self.dataOut.data_spc[:,minIndexVel:maxIndexVel+1,minIndex:maxIndex+1]
1340 1340 #estimacion de ruido
1341 1341 noise = numpy.zeros(self.dataOut.nChannels)
1342 1342
1343 1343 for channel in range(self.dataOut.nChannels):
1344 1344 daux = data_spc[channel,:,:]
1345 1345 noise[channel] = hildebrand_sekhon(daux, self.dataOut.nIncohInt)
1346 1346
1347 1347 self.dataOut.noise = noise.copy()
1348 1348
1349 1349 return 1
1350 1350
1351 1351
1352 1352 class IncohInt(Operation):
1353 1353
1354 1354
1355 1355 __profIndex = 0
1356 1356 __withOverapping = False
1357 1357
1358 1358 __byTime = False
1359 1359 __initime = None
1360 1360 __lastdatatime = None
1361 1361 __integrationtime = None
1362 1362
1363 1363 __buffer_spc = None
1364 1364 __buffer_cspc = None
1365 1365 __buffer_dc = None
1366 1366
1367 1367 __dataReady = False
1368 1368
1369 1369 __timeInterval = None
1370 1370
1371 1371 n = None
1372 1372
1373 1373
1374 1374
1375 1375 def __init__(self):
1376 1376
1377 1377 self.__isConfig = False
1378 1378
1379 1379 def setup(self, n=None, timeInterval=None, overlapping=False):
1380 1380 """
1381 1381 Set the parameters of the integration class.
1382 1382
1383 1383 Inputs:
1384 1384
1385 1385 n : Number of coherent integrations
1386 1386 timeInterval : Time of integration. If the parameter "n" is selected this one does not work
1387 1387 overlapping :
1388 1388
1389 1389 """
1390 1390
1391 1391 self.__initime = None
1392 1392 self.__lastdatatime = 0
1393 1393 self.__buffer_spc = None
1394 1394 self.__buffer_cspc = None
1395 1395 self.__buffer_dc = None
1396 1396 self.__dataReady = False
1397 1397
1398 1398
1399 1399 if n == None and timeInterval == None:
1400 1400 raise ValueError, "n or timeInterval should be specified ..."
1401 1401
1402 1402 if n != None:
1403 1403 self.n = n
1404 1404 self.__byTime = False
1405 1405 else:
1406 1406 self.__integrationtime = timeInterval #if (type(timeInterval)!=integer) -> change this line
1407 1407 self.n = 9999
1408 1408 self.__byTime = True
1409 1409
1410 1410 if overlapping:
1411 1411 self.__withOverapping = True
1412 1412 else:
1413 1413 self.__withOverapping = False
1414 1414 self.__buffer_spc = 0
1415 1415 self.__buffer_cspc = 0
1416 1416 self.__buffer_dc = 0
1417 1417
1418 1418 self.__profIndex = 0
1419 1419
1420 1420 def putData(self, data_spc, data_cspc, data_dc):
1421 1421
1422 1422 """
1423 1423 Add a profile to the __buffer_spc and increase in one the __profileIndex
1424 1424
1425 1425 """
1426 1426
1427 1427 if not self.__withOverapping:
1428 1428 self.__buffer_spc += data_spc
1429 1429
1430 1430 if data_cspc == None:
1431 1431 self.__buffer_cspc = None
1432 1432 else:
1433 1433 self.__buffer_cspc += data_cspc
1434 1434
1435 1435 if data_dc == None:
1436 1436 self.__buffer_dc = None
1437 1437 else:
1438 1438 self.__buffer_dc += data_dc
1439 1439
1440 1440 self.__profIndex += 1
1441 1441 return
1442 1442
1443 1443 #Overlapping data
1444 1444 nChannels, nFFTPoints, nHeis = data_spc.shape
1445 1445 data_spc = numpy.reshape(data_spc, (1, nChannels, nFFTPoints, nHeis))
1446 1446 if data_cspc != None:
1447 1447 data_cspc = numpy.reshape(data_cspc, (1, -1, nFFTPoints, nHeis))
1448 1448 if data_dc != None:
1449 1449 data_dc = numpy.reshape(data_dc, (1, -1, nHeis))
1450 1450
1451 1451 #If the buffer is empty then it takes the data value
1452 1452 if self.__buffer_spc == None:
1453 1453 self.__buffer_spc = data_spc
1454 1454
1455 1455 if data_cspc == None:
1456 1456 self.__buffer_cspc = None
1457 1457 else:
1458 1458 self.__buffer_cspc += data_cspc
1459 1459
1460 1460 if data_dc == None:
1461 1461 self.__buffer_dc = None
1462 1462 else:
1463 1463 self.__buffer_dc += data_dc
1464 1464
1465 1465 self.__profIndex += 1
1466 1466 return
1467 1467
1468 1468 #If the buffer length is lower than n then stakcing the data value
1469 1469 if self.__profIndex < self.n:
1470 1470 self.__buffer_spc = numpy.vstack((self.__buffer_spc, data_spc))
1471 1471
1472 1472 if data_cspc != None:
1473 1473 self.__buffer_cspc = numpy.vstack((self.__buffer_cspc, data_cspc))
1474 1474
1475 1475 if data_dc != None:
1476 1476 self.__buffer_dc = numpy.vstack((self.__buffer_dc, data_dc))
1477 1477
1478 1478 self.__profIndex += 1
1479 1479 return
1480 1480
1481 1481 #If the buffer length is equal to n then replacing the last buffer value with the data value
1482 1482 self.__buffer_spc = numpy.roll(self.__buffer_spc, -1, axis=0)
1483 1483 self.__buffer_spc[self.n-1] = data_spc
1484 1484
1485 1485 if data_cspc != None:
1486 1486 self.__buffer_cspc = numpy.roll(self.__buffer_cspc, -1, axis=0)
1487 1487 self.__buffer_cspc[self.n-1] = data_cspc
1488 1488
1489 1489 if data_dc != None:
1490 1490 self.__buffer_dc = numpy.roll(self.__buffer_dc, -1, axis=0)
1491 1491 self.__buffer_dc[self.n-1] = data_dc
1492 1492
1493 1493 self.__profIndex = self.n
1494 1494 return
1495 1495
1496 1496
1497 1497 def pushData(self):
1498 1498 """
1499 1499 Return the sum of the last profiles and the profiles used in the sum.
1500 1500
1501 1501 Affected:
1502 1502
1503 1503 self.__profileIndex
1504 1504
1505 1505 """
1506 1506 data_spc = None
1507 1507 data_cspc = None
1508 1508 data_dc = None
1509 1509
1510 1510 if not self.__withOverapping:
1511 1511 data_spc = self.__buffer_spc
1512 1512 data_cspc = self.__buffer_cspc
1513 1513 data_dc = self.__buffer_dc
1514 1514
1515 1515 n = self.__profIndex
1516 1516
1517 1517 self.__buffer_spc = 0
1518 1518 self.__buffer_cspc = 0
1519 1519 self.__buffer_dc = 0
1520 1520 self.__profIndex = 0
1521 1521
1522 1522 return data_spc, data_cspc, data_dc, n
1523 1523
1524 1524 #Integration with Overlapping
1525 1525 data_spc = numpy.sum(self.__buffer_spc, axis=0)
1526 1526
1527 1527 if self.__buffer_cspc != None:
1528 1528 data_cspc = numpy.sum(self.__buffer_cspc, axis=0)
1529 1529
1530 1530 if self.__buffer_dc != None:
1531 1531 data_dc = numpy.sum(self.__buffer_dc, axis=0)
1532 1532
1533 1533 n = self.__profIndex
1534 1534
1535 1535 return data_spc, data_cspc, data_dc, n
1536 1536
1537 1537 def byProfiles(self, *args):
1538 1538
1539 1539 self.__dataReady = False
1540 1540 avgdata_spc = None
1541 1541 avgdata_cspc = None
1542 1542 avgdata_dc = None
1543 1543 n = None
1544 1544
1545 1545 self.putData(*args)
1546 1546
1547 1547 if self.__profIndex == self.n:
1548 1548
1549 1549 avgdata_spc, avgdata_cspc, avgdata_dc, n = self.pushData()
1550 1550 self.__dataReady = True
1551 1551
1552 1552 return avgdata_spc, avgdata_cspc, avgdata_dc
1553 1553
1554 1554 def byTime(self, datatime, *args):
1555 1555
1556 1556 self.__dataReady = False
1557 1557 avgdata_spc = None
1558 1558 avgdata_cspc = None
1559 1559 avgdata_dc = None
1560 1560 n = None
1561 1561
1562 1562 self.putData(*args)
1563 1563
1564 1564 if (datatime - self.__initime) >= self.__integrationtime:
1565 1565 avgdata_spc, avgdata_cspc, avgdata_dc, n = self.pushData()
1566 1566 self.n = n
1567 1567 self.__dataReady = True
1568 1568
1569 1569 return avgdata_spc, avgdata_cspc, avgdata_dc
1570 1570
1571 1571 def integrate(self, datatime, *args):
1572 1572
1573 1573 if self.__initime == None:
1574 1574 self.__initime = datatime
1575 1575
1576 1576 if self.__byTime:
1577 1577 avgdata_spc, avgdata_cspc, avgdata_dc = self.byTime(datatime, *args)
1578 1578 else:
1579 1579 avgdata_spc, avgdata_cspc, avgdata_dc = self.byProfiles(*args)
1580 1580
1581 1581 self.__lastdatatime = datatime
1582 1582
1583 1583 if avgdata_spc == None:
1584 1584 return None, None, None, None
1585 1585
1586 1586 avgdatatime = self.__initime
1587 1587 try:
1588 1588 self.__timeInterval = (self.__lastdatatime - self.__initime)/(self.n - 1)
1589 1589 except:
1590 1590 self.__timeInterval = self.__lastdatatime - self.__initime
1591 1591
1592 1592 deltatime = datatime -self.__lastdatatime
1593 1593
1594 1594 if not self.__withOverapping:
1595 1595 self.__initime = datatime
1596 1596 else:
1597 1597 self.__initime += deltatime
1598 1598
1599 1599 return avgdatatime, avgdata_spc, avgdata_cspc, avgdata_dc
1600 1600
1601 1601 def run(self, dataOut, n=None, timeInterval=None, overlapping=False):
1602 1602
1603 1603 if n==1:
1604 1604 dataOut.flagNoData = False
1605 1605 return
1606 1606
1607 1607 if not self.__isConfig:
1608 1608 self.setup(n, timeInterval, overlapping)
1609 1609 self.__isConfig = True
1610 1610
1611 1611 avgdatatime, avgdata_spc, avgdata_cspc, avgdata_dc = self.integrate(dataOut.utctime,
1612 1612 dataOut.data_spc,
1613 1613 dataOut.data_cspc,
1614 1614 dataOut.data_dc)
1615 1615
1616 1616 # dataOut.timeInterval *= n
1617 1617 dataOut.flagNoData = True
1618 1618
1619 1619 if self.__dataReady:
1620 1620
1621 1621 dataOut.data_spc = avgdata_spc
1622 1622 dataOut.data_cspc = avgdata_cspc
1623 1623 dataOut.data_dc = avgdata_dc
1624 1624
1625 1625 dataOut.nIncohInt *= self.n
1626 1626 dataOut.utctime = avgdatatime
1627 1627 #dataOut.timeInterval = dataOut.ippSeconds * dataOut.nCohInt * dataOut.nIncohInt * dataOut.nFFTPoints
1628 1628 dataOut.timeInterval = self.__timeInterval*self.n
1629 1629 dataOut.flagNoData = False
1630 1630
1631 1631 class ProfileConcat(Operation):
1632 1632
1633 1633 __isConfig = False
1634 1634 buffer = None
1635 1635
1636 1636 def __init__(self):
1637 1637
1638 1638 self.profileIndex = 0
1639 1639
1640 1640 def reset(self):
1641 1641 self.buffer = numpy.zeros_like(self.buffer)
1642 1642 self.start_index = 0
1643 1643 self.times = 1
1644 1644
1645 1645 def setup(self, data, m, n=1):
1646 1646 self.buffer = numpy.zeros((data.shape[0],data.shape[1]*m),dtype=type(data[0,0]))
1647 1647 self.profiles = data.shape[1]
1648 1648 self.start_index = 0
1649 1649 self.times = 1
1650 1650
1651 1651 def concat(self, data):
1652 1652
1653 1653 self.buffer[:,self.start_index:self.profiles*self.times] = data.copy()
1654 1654 self.start_index = self.start_index + self.profiles
1655 1655
1656 1656 def run(self, dataOut, m):
1657 1657
1658 1658 dataOut.flagNoData = True
1659 1659
1660 1660 if not self.__isConfig:
1661 1661 self.setup(dataOut.data, m, 1)
1662 1662 self.__isConfig = True
1663 1663
1664 1664 self.concat(dataOut.data)
1665 1665 self.times += 1
1666 1666 if self.times > m:
1667 1667 dataOut.data = self.buffer
1668 1668 self.reset()
1669 1669 dataOut.flagNoData = False
1670 1670 # se deben actualizar mas propiedades del header y del objeto dataOut, por ejemplo, las alturas
1671 1671 deltaHeight = dataOut.heightList[1] - dataOut.heightList[0]
1672 1672 xf = dataOut.heightList[0] + dataOut.nHeights * deltaHeight * 5
1673 1673 dataOut.heightList = numpy.arange(dataOut.heightList[0], xf, deltaHeight)
1674 1674
1675 1675
1676 1676
1677 1677 class ProfileSelector(Operation):
1678 1678
1679 1679 profileIndex = None
1680 1680 # Tamanho total de los perfiles
1681 1681 nProfiles = None
1682 1682
1683 1683 def __init__(self):
1684 1684
1685 1685 self.profileIndex = 0
1686 1686
1687 1687 def incIndex(self):
1688 1688 self.profileIndex += 1
1689 1689
1690 1690 if self.profileIndex >= self.nProfiles:
1691 1691 self.profileIndex = 0
1692 1692
1693 1693 def isProfileInRange(self, minIndex, maxIndex):
1694 1694
1695 1695 if self.profileIndex < minIndex:
1696 1696 return False
1697 1697
1698 1698 if self.profileIndex > maxIndex:
1699 1699 return False
1700 1700
1701 1701 return True
1702 1702
1703 1703 def isProfileInList(self, profileList):
1704 1704
1705 1705 if self.profileIndex not in profileList:
1706 1706 return False
1707 1707
1708 1708 return True
1709 1709
1710 1710 def run(self, dataOut, profileList=None, profileRangeList=None, beam=None):
1711 1711
1712 1712 dataOut.flagNoData = True
1713 1713 self.nProfiles = dataOut.nProfiles
1714 1714
1715 1715 if profileList != None:
1716 1716 if self.isProfileInList(profileList):
1717 1717 dataOut.flagNoData = False
1718 1718
1719 1719 self.incIndex()
1720 1720 return 1
1721 1721
1722 1722
1723 1723 elif profileRangeList != None:
1724 1724 minIndex = profileRangeList[0]
1725 1725 maxIndex = profileRangeList[1]
1726 1726 if self.isProfileInRange(minIndex, maxIndex):
1727 1727 dataOut.flagNoData = False
1728 1728
1729 1729 self.incIndex()
1730 1730 return 1
1731 1731 elif beam != None:
1732 1732 if self.isProfileInList(dataOut.beamRangeDict[beam]):
1733 1733 dataOut.flagNoData = False
1734 1734
1735 1735 self.incIndex()
1736 1736 return 1
1737 1737
1738 1738 else:
1739 1739 raise ValueError, "ProfileSelector needs profileList or profileRangeList"
1740 1740
1741 1741 return 0
1742 1742
1743 1743 class SpectraHeisProc(ProcessingUnit):
1744 1744 def __init__(self):
1745 1745 self.objectDict = {}
1746 1746 # self.buffer = None
1747 1747 # self.firstdatatime = None
1748 1748 # self.profIndex = 0
1749 1749 self.dataOut = SpectraHeis()
1750 1750
1751 1751 def __updateObjFromInput(self):
1752 1752 self.dataOut.timeZone = self.dataIn.timeZone
1753 1753 self.dataOut.dstFlag = self.dataIn.dstFlag
1754 1754 self.dataOut.errorCount = self.dataIn.errorCount
1755 1755 self.dataOut.useLocalTime = self.dataIn.useLocalTime
1756 1756
1757 1757 self.dataOut.radarControllerHeaderObj = self.dataIn.radarControllerHeaderObj.copy()#
1758 1758 self.dataOut.systemHeaderObj = self.dataIn.systemHeaderObj.copy()#
1759 1759 self.dataOut.channelList = self.dataIn.channelList
1760 1760 self.dataOut.heightList = self.dataIn.heightList
1761 1761 # self.dataOut.dtype = self.dataIn.dtype
1762 1762 self.dataOut.dtype = numpy.dtype([('real','<f4'),('imag','<f4')])
1763 1763 # self.dataOut.nHeights = self.dataIn.nHeights
1764 1764 # self.dataOut.nChannels = self.dataIn.nChannels
1765 1765 self.dataOut.nBaud = self.dataIn.nBaud
1766 1766 self.dataOut.nCode = self.dataIn.nCode
1767 1767 self.dataOut.code = self.dataIn.code
1768 1768 # self.dataOut.nProfiles = 1
1769 1769 # self.dataOut.nProfiles = self.dataOut.nFFTPoints
1770 1770 self.dataOut.nFFTPoints = self.dataIn.nHeights
1771 1771 # self.dataOut.channelIndexList = self.dataIn.channelIndexList
1772 1772 # self.dataOut.flagNoData = self.dataIn.flagNoData
1773 1773 self.dataOut.flagTimeBlock = self.dataIn.flagTimeBlock
1774 1774 self.dataOut.utctime = self.dataIn.utctime
1775 1775 # self.dataOut.utctime = self.firstdatatime
1776 1776 self.dataOut.flagDecodeData = self.dataIn.flagDecodeData #asumo q la data esta decodificada
1777 1777 self.dataOut.flagDeflipData = self.dataIn.flagDeflipData #asumo q la data esta sin flip
1778 1778 # self.dataOut.flagShiftFFT = self.dataIn.flagShiftFFT
1779 1779 self.dataOut.nCohInt = self.dataIn.nCohInt
1780 1780 self.dataOut.nIncohInt = 1
1781 1781 self.dataOut.ippSeconds= self.dataIn.ippSeconds
1782 1782 self.dataOut.windowOfFilter = self.dataIn.windowOfFilter
1783 1783
1784 1784 self.dataOut.timeInterval = self.dataIn.timeInterval*self.dataOut.nIncohInt
1785 1785 # self.dataOut.set=self.dataIn.set
1786 1786 # self.dataOut.deltaHeight=self.dataIn.deltaHeight
1787 1787
1788 1788
1789 1789 def __updateObjFromFits(self):
1790 1790 self.dataOut.utctime = self.dataIn.utctime
1791 1791 self.dataOut.channelIndexList = self.dataIn.channelIndexList
1792 1792
1793 1793 self.dataOut.channelList = self.dataIn.channelList
1794 1794 self.dataOut.heightList = self.dataIn.heightList
1795 1795 self.dataOut.data_spc = self.dataIn.data
1796 1796 self.dataOut.timeInterval = self.dataIn.timeInterval
1797 1797 self.dataOut.timeZone = self.dataIn.timeZone
1798 1798 self.dataOut.useLocalTime = True
1799 1799 # self.dataOut.
1800 1800 # self.dataOut.
1801 1801
1802 1802 def __getFft(self):
1803 1803
1804 1804 fft_volt = numpy.fft.fft(self.dataIn.data, axis=1)
1805 1805 fft_volt = numpy.fft.fftshift(fft_volt,axes=(1,))
1806 1806 spc = numpy.abs(fft_volt * numpy.conjugate(fft_volt))/(self.dataOut.nFFTPoints)
1807 1807 self.dataOut.data_spc = spc
1808 1808
1809 1809 def init(self):
1810 1810
1811 1811 self.dataOut.flagNoData = True
1812 1812
1813 1813 if self.dataIn.type == "Fits":
1814 1814 self.__updateObjFromFits()
1815 1815 self.dataOut.flagNoData = False
1816 1816 return
1817 1817
1818 1818 if self.dataIn.type == "SpectraHeis":
1819 1819 self.dataOut.copy(self.dataIn)
1820 1820 return
1821 1821
1822 1822 if self.dataIn.type == "Voltage":
1823 1823 self.__updateObjFromInput()
1824 1824 self.__getFft()
1825 1825 self.dataOut.flagNoData = False
1826 1826
1827 1827 return
1828 1828
1829 1829 raise ValueError, "The type object %s is not valid"%(self.dataIn.type)
1830 1830
1831 1831
1832 1832 def selectChannels(self, channelList):
1833 1833
1834 1834 channelIndexList = []
1835 1835
1836 1836 for channel in channelList:
1837 1837 index = self.dataOut.channelList.index(channel)
1838 1838 channelIndexList.append(index)
1839 1839
1840 1840 self.selectChannelsByIndex(channelIndexList)
1841 1841
1842 1842 def selectChannelsByIndex(self, channelIndexList):
1843 1843 """
1844 1844 Selecciona un bloque de datos en base a canales segun el channelIndexList
1845 1845
1846 1846 Input:
1847 1847 channelIndexList : lista sencilla de canales a seleccionar por ej. [2,3,7]
1848 1848
1849 1849 Affected:
1850 1850 self.dataOut.data
1851 1851 self.dataOut.channelIndexList
1852 1852 self.dataOut.nChannels
1853 1853 self.dataOut.m_ProcessingHeader.totalSpectra
1854 1854 self.dataOut.systemHeaderObj.numChannels
1855 1855 self.dataOut.m_ProcessingHeader.blockSize
1856 1856
1857 1857 Return:
1858 1858 None
1859 1859 """
1860 1860
1861 1861 for channelIndex in channelIndexList:
1862 1862 if channelIndex not in self.dataOut.channelIndexList:
1863 1863 print channelIndexList
1864 1864 raise ValueError, "The value %d in channelIndexList is not valid" %channelIndex
1865 1865
1866 1866 nChannels = len(channelIndexList)
1867 1867
1868 1868 data_spc = self.dataOut.data_spc[channelIndexList,:]
1869 1869
1870 1870 self.dataOut.data_spc = data_spc
1871 1871 self.dataOut.channelList = [self.dataOut.channelList[i] for i in channelIndexList]
1872 1872
1873 1873 return 1
1874 1874
1875 1875 class IncohInt4SpectraHeis(Operation):
1876 1876
1877 1877 __isConfig = False
1878 1878
1879 1879 __profIndex = 0
1880 1880 __withOverapping = False
1881 1881
1882 1882 __byTime = False
1883 1883 __initime = None
1884 1884 __lastdatatime = None
1885 1885 __integrationtime = None
1886 1886
1887 1887 __buffer = None
1888 1888
1889 1889 __dataReady = False
1890 1890
1891 1891 n = None
1892 1892
1893 1893
1894 1894 def __init__(self):
1895 1895
1896 1896 self.__isConfig = False
1897 1897
1898 1898 def setup(self, n=None, timeInterval=None, overlapping=False):
1899 1899 """
1900 1900 Set the parameters of the integration class.
1901 1901
1902 1902 Inputs:
1903 1903
1904 1904 n : Number of coherent integrations
1905 1905 timeInterval : Time of integration. If the parameter "n" is selected this one does not work
1906 1906 overlapping :
1907 1907
1908 1908 """
1909 1909
1910 1910 self.__initime = None
1911 1911 self.__lastdatatime = 0
1912 1912 self.__buffer = None
1913 1913 self.__dataReady = False
1914 1914
1915 1915
1916 1916 if n == None and timeInterval == None:
1917 1917 raise ValueError, "n or timeInterval should be specified ..."
1918 1918
1919 1919 if n != None:
1920 1920 self.n = n
1921 1921 self.__byTime = False
1922 1922 else:
1923 1923 self.__integrationtime = timeInterval #* 60. #if (type(timeInterval)!=integer) -> change this line
1924 1924 self.n = 9999
1925 1925 self.__byTime = True
1926 1926
1927 1927 if overlapping:
1928 1928 self.__withOverapping = True
1929 1929 self.__buffer = None
1930 1930 else:
1931 1931 self.__withOverapping = False
1932 1932 self.__buffer = 0
1933 1933
1934 1934 self.__profIndex = 0
1935 1935
1936 1936 def putData(self, data):
1937 1937
1938 1938 """
1939 1939 Add a profile to the __buffer and increase in one the __profileIndex
1940 1940
1941 1941 """
1942 1942
1943 1943 if not self.__withOverapping:
1944 1944 self.__buffer += data.copy()
1945 1945 self.__profIndex += 1
1946 1946 return
1947 1947
1948 1948 #Overlapping data
1949 1949 nChannels, nHeis = data.shape
1950 1950 data = numpy.reshape(data, (1, nChannels, nHeis))
1951 1951
1952 1952 #If the buffer is empty then it takes the data value
1953 1953 if self.__buffer == None:
1954 1954 self.__buffer = data
1955 1955 self.__profIndex += 1
1956 1956 return
1957 1957
1958 1958 #If the buffer length is lower than n then stakcing the data value
1959 1959 if self.__profIndex < self.n:
1960 1960 self.__buffer = numpy.vstack((self.__buffer, data))
1961 1961 self.__profIndex += 1
1962 1962 return
1963 1963
1964 1964 #If the buffer length is equal to n then replacing the last buffer value with the data value
1965 1965 self.__buffer = numpy.roll(self.__buffer, -1, axis=0)
1966 1966 self.__buffer[self.n-1] = data
1967 1967 self.__profIndex = self.n
1968 1968 return
1969 1969
1970 1970
1971 1971 def pushData(self):
1972 1972 """
1973 1973 Return the sum of the last profiles and the profiles used in the sum.
1974 1974
1975 1975 Affected:
1976 1976
1977 1977 self.__profileIndex
1978 1978
1979 1979 """
1980 1980
1981 1981 if not self.__withOverapping:
1982 1982 data = self.__buffer
1983 1983 n = self.__profIndex
1984 1984
1985 1985 self.__buffer = 0
1986 1986 self.__profIndex = 0
1987 1987
1988 1988 return data, n
1989 1989
1990 1990 #Integration with Overlapping
1991 1991 data = numpy.sum(self.__buffer, axis=0)
1992 1992 n = self.__profIndex
1993 1993
1994 1994 return data, n
1995 1995
1996 1996 def byProfiles(self, data):
1997 1997
1998 1998 self.__dataReady = False
1999 1999 avgdata = None
2000 2000 n = None
2001 2001
2002 2002 self.putData(data)
2003 2003
2004 2004 if self.__profIndex == self.n:
2005 2005
2006 2006 avgdata, n = self.pushData()
2007 2007 self.__dataReady = True
2008 2008
2009 2009 return avgdata
2010 2010
2011 2011 def byTime(self, data, datatime):
2012 2012
2013 2013 self.__dataReady = False
2014 2014 avgdata = None
2015 2015 n = None
2016 2016
2017 2017 self.putData(data)
2018 2018
2019 2019 if (datatime - self.__initime) >= self.__integrationtime:
2020 2020 avgdata, n = self.pushData()
2021 2021 self.n = n
2022 2022 self.__dataReady = True
2023 2023
2024 2024 return avgdata
2025 2025
2026 2026 def integrate(self, data, datatime=None):
2027 2027
2028 2028 if self.__initime == None:
2029 2029 self.__initime = datatime
2030 2030
2031 2031 if self.__byTime:
2032 2032 avgdata = self.byTime(data, datatime)
2033 2033 else:
2034 2034 avgdata = self.byProfiles(data)
2035 2035
2036 2036
2037 2037 self.__lastdatatime = datatime
2038 2038
2039 2039 if avgdata == None:
2040 2040 return None, None
2041 2041
2042 2042 avgdatatime = self.__initime
2043 2043
2044 2044 deltatime = datatime -self.__lastdatatime
2045 2045
2046 2046 if not self.__withOverapping:
2047 2047 self.__initime = datatime
2048 2048 else:
2049 2049 self.__initime += deltatime
2050 2050
2051 2051 return avgdata, avgdatatime
2052 2052
2053 2053 def run(self, dataOut, **kwargs):
2054 2054
2055 2055 if not self.__isConfig:
2056 2056 self.setup(**kwargs)
2057 2057 self.__isConfig = True
2058 2058
2059 2059 avgdata, avgdatatime = self.integrate(dataOut.data_spc, dataOut.utctime)
2060 2060
2061 2061 # dataOut.timeInterval *= n
2062 2062 dataOut.flagNoData = True
2063 2063
2064 2064 if self.__dataReady:
2065 2065 dataOut.data_spc = avgdata
2066 2066 dataOut.nIncohInt *= self.n
2067 2067 # dataOut.nCohInt *= self.n
2068 2068 dataOut.utctime = avgdatatime
2069 2069 dataOut.timeInterval = dataOut.ippSeconds * dataOut.nIncohInt
2070 2070 # dataOut.timeInterval = self.__timeInterval*self.n
2071 2071 dataOut.flagNoData = False
2072 2072
2073 2073
2074 2074
2075 2075
2076 2076 class AMISRProc(ProcessingUnit):
2077 2077 def __init__(self):
2078 2078 self.objectDict = {}
2079 2079 self.dataOut = AMISR()
2080 2080
2081 2081 def init(self):
2082 2082 if self.dataIn.type == 'AMISR':
2083 2083 self.dataOut.copy(self.dataIn)
2084 2084
2085
2086 class PrintInfo(Operation):
2087 def __init__(self):
2088 pass
2089
2090 def run(self, dataOut):
2091
2092 print 'Number of Records by File: %d'%dataOut.nRecords
2093 print 'Number of Pulses: %d'%dataOut.nProfiles
2094 print 'Number of Samples by Pulse: %d'%len(dataOut.heightList)
2095 print 'Ipp Seconds: %f'%dataOut.ippSeconds
2096 print 'Number of Beams: %d'%dataOut.nBeams
2097 print 'BeamCodes:'
2098 beamStrList = ['Beam %d -> Code %d'%(k,v) for k,v in dataOut.beamCodeDict.items()]
2099 for b in beamStrList:
2100 print b
2101
2102
2085 2103 class BeamSelector(Operation):
2086 2104 profileIndex = None
2087 2105 # Tamanho total de los perfiles
2088 2106 nProfiles = None
2089 2107
2090 2108 def __init__(self):
2091 2109
2092 2110 self.profileIndex = 0
2093 2111
2094 2112 def incIndex(self):
2095 2113 self.profileIndex += 1
2096 2114
2097 2115 if self.profileIndex >= self.nProfiles:
2098 2116 self.profileIndex = 0
2099 2117
2100 2118 def isProfileInRange(self, minIndex, maxIndex):
2101 2119
2102 2120 if self.profileIndex < minIndex:
2103 2121 return False
2104 2122
2105 2123 if self.profileIndex > maxIndex:
2106 2124 return False
2107 2125
2108 2126 return True
2109 2127
2110 2128 def isProfileInList(self, profileList):
2111 2129
2112 2130 if self.profileIndex not in profileList:
2113 2131 return False
2114 2132
2115 2133 return True
2116 2134
2117 2135 def run(self, dataOut, beam=None):
2118 2136
2119 2137 dataOut.flagNoData = True
2120 2138 self.nProfiles = dataOut.nProfiles
2121 2139
2122 2140 if beam != None:
2123 2141 if self.isProfileInList(dataOut.beamRangeDict[beam]):
2124 2142 dataOut.flagNoData = False
2125 2143
2126 2144 self.incIndex()
2127 2145 return 1
2128 2146
2129 2147 else:
2130 2148 raise ValueError, "BeamSelector needs beam value"
2131 2149
2132 2150 return 0 No newline at end of file
General Comments 0
You need to be logged in to leave comments. Login now