##// END OF EJS Templates
Se corrige bug en el metodo filterByHeights....
Daniel Valdez -
r236:b12f3705eeef
parent child
Show More
@@ -1,2562 +1,2562
1 1 '''
2 2
3 3 $Author: murco $
4 4 $Id: JRODataIO.py 169 2012-11-19 21:57:03Z murco $
5 5 '''
6 6
7 7 import os, sys
8 8 import glob
9 9 import time
10 10 import numpy
11 11 import fnmatch
12 12 import time, datetime
13 13
14 14 from jrodata import *
15 15 from jroheaderIO import *
16 16 from jroprocessing import *
17 17
18 18 LOCALTIME = -18000
19 19
20 20 def isNumber(str):
21 21 """
22 22 Chequea si el conjunto de caracteres que componen un string puede ser convertidos a un numero.
23 23
24 24 Excepciones:
25 25 Si un determinado string no puede ser convertido a numero
26 26 Input:
27 27 str, string al cual se le analiza para determinar si convertible a un numero o no
28 28
29 29 Return:
30 30 True : si el string es uno numerico
31 31 False : no es un string numerico
32 32 """
33 33 try:
34 34 float( str )
35 35 return True
36 36 except:
37 37 return False
38 38
39 39 def isThisFileinRange(filename, startUTSeconds, endUTSeconds):
40 40 """
41 41 Esta funcion determina si un archivo de datos se encuentra o no dentro del rango de fecha especificado.
42 42
43 43 Inputs:
44 44 filename : nombre completo del archivo de datos en formato Jicamarca (.r)
45 45
46 46 startUTSeconds : fecha inicial del rango seleccionado. La fecha esta dada en
47 47 segundos contados desde 01/01/1970.
48 48 endUTSeconds : fecha final del rango seleccionado. La fecha esta dada en
49 49 segundos contados desde 01/01/1970.
50 50
51 51 Return:
52 52 Boolean : Retorna True si el archivo de datos contiene datos en el rango de
53 53 fecha especificado, de lo contrario retorna False.
54 54
55 55 Excepciones:
56 56 Si el archivo no existe o no puede ser abierto
57 57 Si la cabecera no puede ser leida.
58 58
59 59 """
60 60 basicHeaderObj = BasicHeader(LOCALTIME)
61 61
62 62 try:
63 63 fp = open(filename,'rb')
64 64 except:
65 65 raise IOError, "The file %s can't be opened" %(filename)
66 66
67 67 sts = basicHeaderObj.read(fp)
68 68 fp.close()
69 69
70 70 if not(sts):
71 71 print "Skipping the file %s because it has not a valid header" %(filename)
72 72 return 0
73 73
74 74 if not ((startUTSeconds <= basicHeaderObj.utc) and (endUTSeconds > basicHeaderObj.utc)):
75 75 return 0
76 76
77 77 return 1
78 78
79 79 def isFileinThisTime(filename, startTime, endTime):
80 80 """
81 81 Retorna 1 si el archivo de datos se encuentra dentro del rango de horas especificado.
82 82
83 83 Inputs:
84 84 filename : nombre completo del archivo de datos en formato Jicamarca (.r)
85 85
86 86 startTime : tiempo inicial del rango seleccionado en formato datetime.time
87 87
88 88 endTime : tiempo final del rango seleccionado en formato datetime.time
89 89
90 90 Return:
91 91 Boolean : Retorna True si el archivo de datos contiene datos en el rango de
92 92 fecha especificado, de lo contrario retorna False.
93 93
94 94 Excepciones:
95 95 Si el archivo no existe o no puede ser abierto
96 96 Si la cabecera no puede ser leida.
97 97
98 98 """
99 99
100 100
101 101 try:
102 102 fp = open(filename,'rb')
103 103 except:
104 104 raise IOError, "The file %s can't be opened" %(filename)
105 105
106 106 basicHeaderObj = BasicHeader(LOCALTIME)
107 107 sts = basicHeaderObj.read(fp)
108 108 fp.close()
109 109
110 110 thisTime = basicHeaderObj.datatime.time()
111 111
112 112 if not(sts):
113 113 print "Skipping the file %s because it has not a valid header" %(filename)
114 114 return 0
115 115
116 116 if not ((startTime <= thisTime) and (endTime > thisTime)):
117 117 return 0
118 118
119 119 return 1
120 120
121 121 def getlastFileFromPath(path, ext):
122 122 """
123 123 Depura el fileList dejando solo los que cumplan el formato de "PYYYYDDDSSS.ext"
124 124 al final de la depuracion devuelve el ultimo file de la lista que quedo.
125 125
126 126 Input:
127 127 fileList : lista conteniendo todos los files (sin path) que componen una determinada carpeta
128 128 ext : extension de los files contenidos en una carpeta
129 129
130 130 Return:
131 131 El ultimo file de una determinada carpeta, no se considera el path.
132 132 """
133 133 validFilelist = []
134 134 fileList = os.listdir(path)
135 135
136 136 # 0 1234 567 89A BCDE
137 137 # H YYYY DDD SSS .ext
138 138
139 139 for file in fileList:
140 140 try:
141 141 year = int(file[1:5])
142 142 doy = int(file[5:8])
143 143
144 144
145 145 except:
146 146 continue
147 147
148 148 if (os.path.splitext(file)[-1].lower() != ext.lower()):
149 149 continue
150 150
151 151 validFilelist.append(file)
152 152
153 153 if validFilelist:
154 154 validFilelist = sorted( validFilelist, key=str.lower )
155 155 return validFilelist[-1]
156 156
157 157 return None
158 158
159 159 def checkForRealPath(path, year, doy, set, ext):
160 160 """
161 161 Por ser Linux Case Sensitive entonces checkForRealPath encuentra el nombre correcto de un path,
162 162 Prueba por varias combinaciones de nombres entre mayusculas y minusculas para determinar
163 163 el path exacto de un determinado file.
164 164
165 165 Example :
166 166 nombre correcto del file es .../.../D2009307/P2009307367.ext
167 167
168 168 Entonces la funcion prueba con las siguientes combinaciones
169 169 .../.../y2009307367.ext
170 170 .../.../Y2009307367.ext
171 171 .../.../x2009307/y2009307367.ext
172 172 .../.../x2009307/Y2009307367.ext
173 173 .../.../X2009307/y2009307367.ext
174 174 .../.../X2009307/Y2009307367.ext
175 175 siendo para este caso, la ultima combinacion de letras, identica al file buscado
176 176
177 177 Return:
178 178 Si encuentra la cobinacion adecuada devuelve el path completo y el nombre del file
179 179 caso contrario devuelve None como path y el la ultima combinacion de nombre en mayusculas
180 180 para el filename
181 181 """
182 182 fullfilename = None
183 183 find_flag = False
184 184 filename = None
185 185
186 186 prefixDirList = [None,'d','D']
187 187 if ext.lower() == ".r": #voltage
188 188 prefixFileList = ['d','D']
189 189 elif ext.lower() == ".pdata": #spectra
190 190 prefixFileList = ['p','P']
191 191 else:
192 192 return None, filename
193 193
194 194 #barrido por las combinaciones posibles
195 195 for prefixDir in prefixDirList:
196 196 thispath = path
197 197 if prefixDir != None:
198 198 #formo el nombre del directorio xYYYYDDD (x=d o x=D)
199 199 thispath = os.path.join(path, "%s%04d%03d" % ( prefixDir, year, doy ))
200 200
201 201 for prefixFile in prefixFileList: #barrido por las dos combinaciones posibles de "D"
202 202 filename = "%s%04d%03d%03d%s" % ( prefixFile, year, doy, set, ext ) #formo el nombre del file xYYYYDDDSSS.ext
203 203 fullfilename = os.path.join( thispath, filename ) #formo el path completo
204 204
205 205 if os.path.exists( fullfilename ): #verifico que exista
206 206 find_flag = True
207 207 break
208 208 if find_flag:
209 209 break
210 210
211 211 if not(find_flag):
212 212 return None, filename
213 213
214 214 return fullfilename, filename
215 215
216 216 class JRODataIO:
217 217
218 218 c = 3E8
219 219
220 220 isConfig = False
221 221
222 222 basicHeaderObj = BasicHeader(LOCALTIME)
223 223
224 224 systemHeaderObj = SystemHeader()
225 225
226 226 radarControllerHeaderObj = RadarControllerHeader()
227 227
228 228 processingHeaderObj = ProcessingHeader()
229 229
230 230 online = 0
231 231
232 232 dtype = None
233 233
234 234 pathList = []
235 235
236 236 filenameList = []
237 237
238 238 filename = None
239 239
240 240 ext = None
241 241
242 242 flagIsNewFile = 1
243 243
244 244 flagTimeBlock = 0
245 245
246 246 flagIsNewBlock = 0
247 247
248 248 fp = None
249 249
250 250 firstHeaderSize = 0
251 251
252 252 basicHeaderSize = 24
253 253
254 254 versionFile = 1103
255 255
256 256 fileSize = None
257 257
258 258 ippSeconds = None
259 259
260 260 fileSizeByHeader = None
261 261
262 262 fileIndex = None
263 263
264 264 profileIndex = None
265 265
266 266 blockIndex = None
267 267
268 268 nTotalBlocks = None
269 269
270 270 maxTimeStep = 30
271 271
272 272 lastUTTime = None
273 273
274 274 datablock = None
275 275
276 276 dataOut = None
277 277
278 278 blocksize = None
279 279
280 280 def __init__(self):
281 281
282 282 raise ValueError, "Not implemented"
283 283
284 284 def run(self):
285 285
286 286 raise ValueError, "Not implemented"
287 287
288 288 def getOutput(self):
289 289
290 290 return self.dataOut
291 291
292 292 class JRODataReader(JRODataIO, ProcessingUnit):
293 293
294 294 nReadBlocks = 0
295 295
296 296 delay = 10 #number of seconds waiting a new file
297 297
298 298 nTries = 3 #quantity tries
299 299
300 300 nFiles = 3 #number of files for searching
301 301
302 302 flagNoMoreFiles = 0
303 303
304 304 def __init__(self):
305 305
306 306 """
307 307
308 308 """
309 309
310 310 raise ValueError, "This method has not been implemented"
311 311
312 312
313 313 def createObjByDefault(self):
314 314 """
315 315
316 316 """
317 317 raise ValueError, "This method has not been implemented"
318 318
319 319 def getBlockDimension(self):
320 320
321 321 raise ValueError, "No implemented"
322 322
323 323 def __searchFilesOffLine(self,
324 324 path,
325 325 startDate,
326 326 endDate,
327 327 startTime=datetime.time(0,0,0),
328 328 endTime=datetime.time(23,59,59),
329 329 set=None,
330 330 expLabel='',
331 331 ext='.r',
332 332 walk=True):
333 333
334 334 pathList = []
335 335
336 336 if not walk:
337 337 pathList.append(path)
338 338
339 339 else:
340 340 dirList = []
341 341 for thisPath in os.listdir(path):
342 342 if os.path.isdir(os.path.join(path,thisPath)):
343 343 dirList.append(thisPath)
344 344
345 345 if not(dirList):
346 346 return None, None
347 347
348 348 thisDate = startDate
349 349
350 350 while(thisDate <= endDate):
351 351 year = thisDate.timetuple().tm_year
352 352 doy = thisDate.timetuple().tm_yday
353 353
354 354 match = fnmatch.filter(dirList, '?' + '%4.4d%3.3d' % (year,doy))
355 355 if len(match) == 0:
356 356 thisDate += datetime.timedelta(1)
357 357 continue
358 358
359 359 pathList.append(os.path.join(path,match[0],expLabel))
360 360 thisDate += datetime.timedelta(1)
361 361
362 362 if pathList == []:
363 363 print "Any folder found into date range %s-%s" %(startDate, endDate)
364 364 return None, None
365 365
366 366 print "%d folder(s) found [%s, ...]" %(len(pathList), pathList[0])
367 367
368 368 filenameList = []
369 369 for thisPath in pathList:
370 370
371 371 fileList = glob.glob1(thisPath, "*%s" %ext)
372 372 fileList.sort()
373 373
374 374 for file in fileList:
375 375
376 376 filename = os.path.join(thisPath,file)
377 377
378 378 if isFileinThisTime(filename, startTime, endTime):
379 379 filenameList.append(filename)
380 380
381 381 if not(filenameList):
382 382 print "Any file found into time range %s-%s" %(startTime, endTime)
383 383 return None, None
384 384
385 385 self.filenameList = filenameList
386 386
387 387 return pathList, filenameList
388 388
389 389 def __searchFilesOnLine(self, path, expLabel = "", ext = None, walk=True):
390 390
391 391 """
392 392 Busca el ultimo archivo de la ultima carpeta (determinada o no por startDateTime) y
393 393 devuelve el archivo encontrado ademas de otros datos.
394 394
395 395 Input:
396 396 path : carpeta donde estan contenidos los files que contiene data
397 397
398 398 expLabel : Nombre del subexperimento (subfolder)
399 399
400 400 ext : extension de los files
401 401
402 402 walk : Si es habilitado no realiza busquedas dentro de los ubdirectorios (doypath)
403 403
404 404 Return:
405 405 directory : eL directorio donde esta el file encontrado
406 406 filename : el ultimo file de una determinada carpeta
407 407 year : el anho
408 408 doy : el numero de dia del anho
409 409 set : el set del archivo
410 410
411 411
412 412 """
413 413 dirList = []
414 414
415 415 if walk:
416 416
417 417 #Filtra solo los directorios
418 418 for thisPath in os.listdir(path):
419 419 if os.path.isdir(os.path.join(path, thisPath)):
420 420 dirList.append(thisPath)
421 421
422 422 if not(dirList):
423 423 return None, None, None, None, None
424 424
425 425 dirList = sorted( dirList, key=str.lower )
426 426
427 427 doypath = dirList[-1]
428 428 fullpath = os.path.join(path, doypath, expLabel)
429 429
430 430 else:
431 431 fullpath = path
432 432
433 433 filename = getlastFileFromPath(fullpath, ext)
434 434
435 435 if not(filename):
436 436 return None, None, None, None, None
437 437
438 438 if not(self.__verifyFile(os.path.join(fullpath, filename))):
439 439 return None, None, None, None, None
440 440
441 441 year = int( filename[1:5] )
442 442 doy = int( filename[5:8] )
443 443 set = int( filename[8:11] )
444 444
445 445 return fullpath, filename, year, doy, set
446 446
447 447
448 448
449 449 def __setNextFileOffline(self):
450 450
451 451 idFile = self.fileIndex
452 452
453 453 while (True):
454 454 idFile += 1
455 455 if not(idFile < len(self.filenameList)):
456 456 self.flagNoMoreFiles = 1
457 457 print "No more Files"
458 458 return 0
459 459
460 460 filename = self.filenameList[idFile]
461 461
462 462 if not(self.__verifyFile(filename)):
463 463 continue
464 464
465 465 fileSize = os.path.getsize(filename)
466 466 fp = open(filename,'rb')
467 467 break
468 468
469 469 self.flagIsNewFile = 1
470 470 self.fileIndex = idFile
471 471 self.filename = filename
472 472 self.fileSize = fileSize
473 473 self.fp = fp
474 474
475 475 print "Setting the file: %s"%self.filename
476 476
477 477 return 1
478 478
479 479 def __setNextFileOnline(self):
480 480 """
481 481 Busca el siguiente file que tenga suficiente data para ser leida, dentro de un folder especifico, si
482 482 no encuentra un file valido espera un tiempo determinado y luego busca en los posibles n files
483 483 siguientes.
484 484
485 485 Affected:
486 486 self.flagIsNewFile
487 487 self.filename
488 488 self.fileSize
489 489 self.fp
490 490 self.set
491 491 self.flagNoMoreFiles
492 492
493 493 Return:
494 494 0 : si luego de una busqueda del siguiente file valido este no pudo ser encontrado
495 495 1 : si el file fue abierto con exito y esta listo a ser leido
496 496
497 497 Excepciones:
498 498 Si un determinado file no puede ser abierto
499 499 """
500 500 nFiles = 0
501 501 fileOk_flag = False
502 502 firstTime_flag = True
503 503
504 504 self.set += 1
505 505
506 506 #busca el 1er file disponible
507 507 fullfilename, filename = checkForRealPath( self.path, self.year, self.doy, self.set, self.ext )
508 508 if fullfilename:
509 509 if self.__verifyFile(fullfilename, False):
510 510 fileOk_flag = True
511 511
512 512 #si no encuentra un file entonces espera y vuelve a buscar
513 513 if not(fileOk_flag):
514 514 for nFiles in range(self.nFiles+1): #busco en los siguientes self.nFiles+1 files posibles
515 515
516 516 if firstTime_flag: #si es la 1era vez entonces hace el for self.nTries veces
517 517 tries = self.nTries
518 518 else:
519 519 tries = 1 #si no es la 1era vez entonces solo lo hace una vez
520 520
521 521 for nTries in range( tries ):
522 522 if firstTime_flag:
523 523 print "\tWaiting %0.2f sec for the file \"%s\" , try %03d ..." % ( self.delay, filename, nTries+1 )
524 524 time.sleep( self.delay )
525 525 else:
526 526 print "\tSearching next \"%s%04d%03d%03d%s\" file ..." % (self.optchar, self.year, self.doy, self.set, self.ext)
527 527
528 528 fullfilename, filename = checkForRealPath( self.path, self.year, self.doy, self.set, self.ext )
529 529 if fullfilename:
530 530 if self.__verifyFile(fullfilename):
531 531 fileOk_flag = True
532 532 break
533 533
534 534 if fileOk_flag:
535 535 break
536 536
537 537 firstTime_flag = False
538 538
539 539 print "\tSkipping the file \"%s\" due to this file doesn't exist" % filename
540 540 self.set += 1
541 541
542 542 if nFiles == (self.nFiles-1): #si no encuentro el file buscado cambio de carpeta y busco en la siguiente carpeta
543 543 self.set = 0
544 544 self.doy += 1
545 545
546 546 if fileOk_flag:
547 547 self.fileSize = os.path.getsize( fullfilename )
548 548 self.filename = fullfilename
549 549 self.flagIsNewFile = 1
550 550 if self.fp != None: self.fp.close()
551 551 self.fp = open(fullfilename, 'rb')
552 552 self.flagNoMoreFiles = 0
553 553 print 'Setting the file: %s' % fullfilename
554 554 else:
555 555 self.fileSize = 0
556 556 self.filename = None
557 557 self.flagIsNewFile = 0
558 558 self.fp = None
559 559 self.flagNoMoreFiles = 1
560 560 print 'No more Files'
561 561
562 562 return fileOk_flag
563 563
564 564
565 565 def setNextFile(self):
566 566 if self.fp != None:
567 567 self.fp.close()
568 568
569 569 if self.online:
570 570 newFile = self.__setNextFileOnline()
571 571 else:
572 572 newFile = self.__setNextFileOffline()
573 573
574 574 if not(newFile):
575 575 return 0
576 576
577 577 self.__readFirstHeader()
578 578 self.nReadBlocks = 0
579 579 return 1
580 580
581 581 def __waitNewBlock(self):
582 582 """
583 583 Return 1 si se encontro un nuevo bloque de datos, 0 de otra forma.
584 584
585 585 Si el modo de lectura es OffLine siempre retorn 0
586 586 """
587 587 if not self.online:
588 588 return 0
589 589
590 590 if (self.nReadBlocks >= self.processingHeaderObj.dataBlocksPerFile):
591 591 return 0
592 592
593 593 currentPointer = self.fp.tell()
594 594
595 595 neededSize = self.processingHeaderObj.blockSize + self.basicHeaderSize
596 596
597 597 for nTries in range( self.nTries ):
598 598
599 599 self.fp.close()
600 600 self.fp = open( self.filename, 'rb' )
601 601 self.fp.seek( currentPointer )
602 602
603 603 self.fileSize = os.path.getsize( self.filename )
604 604 currentSize = self.fileSize - currentPointer
605 605
606 606 if ( currentSize >= neededSize ):
607 607 self.__rdBasicHeader()
608 608 return 1
609 609
610 610 print "\tWaiting %0.2f seconds for the next block, try %03d ..." % (self.delay, nTries+1)
611 611 time.sleep( self.delay )
612 612
613 613
614 614 return 0
615 615
616 616 def __setNewBlock(self):
617 617
618 618 if self.fp == None:
619 619 return 0
620 620
621 621 if self.flagIsNewFile:
622 622 return 1
623 623
624 624 self.lastUTTime = self.basicHeaderObj.utc
625 625 currentSize = self.fileSize - self.fp.tell()
626 626 neededSize = self.processingHeaderObj.blockSize + self.basicHeaderSize
627 627
628 628 if (currentSize >= neededSize):
629 629 self.__rdBasicHeader()
630 630 return 1
631 631
632 632 if self.__waitNewBlock():
633 633 return 1
634 634
635 635 if not(self.setNextFile()):
636 636 return 0
637 637
638 638 deltaTime = self.basicHeaderObj.utc - self.lastUTTime #
639 639
640 640 self.flagTimeBlock = 0
641 641
642 642 if deltaTime > self.maxTimeStep:
643 643 self.flagTimeBlock = 1
644 644
645 645 return 1
646 646
647 647
648 648 def readNextBlock(self):
649 649 if not(self.__setNewBlock()):
650 650 return 0
651 651
652 652 if not(self.readBlock()):
653 653 return 0
654 654
655 655 return 1
656 656
657 657 def __rdProcessingHeader(self, fp=None):
658 658 if fp == None:
659 659 fp = self.fp
660 660
661 661 self.processingHeaderObj.read(fp)
662 662
663 663 def __rdRadarControllerHeader(self, fp=None):
664 664 if fp == None:
665 665 fp = self.fp
666 666
667 667 self.radarControllerHeaderObj.read(fp)
668 668
669 669 def __rdSystemHeader(self, fp=None):
670 670 if fp == None:
671 671 fp = self.fp
672 672
673 673 self.systemHeaderObj.read(fp)
674 674
675 675 def __rdBasicHeader(self, fp=None):
676 676 if fp == None:
677 677 fp = self.fp
678 678
679 679 self.basicHeaderObj.read(fp)
680 680
681 681
682 682 def __readFirstHeader(self):
683 683 self.__rdBasicHeader()
684 684 self.__rdSystemHeader()
685 685 self.__rdRadarControllerHeader()
686 686 self.__rdProcessingHeader()
687 687
688 688 self.firstHeaderSize = self.basicHeaderObj.size
689 689
690 690 datatype = int(numpy.log2((self.processingHeaderObj.processFlags & PROCFLAG.DATATYPE_MASK))-numpy.log2(PROCFLAG.DATATYPE_CHAR))
691 691 if datatype == 0:
692 692 datatype_str = numpy.dtype([('real','<i1'),('imag','<i1')])
693 693 elif datatype == 1:
694 694 datatype_str = numpy.dtype([('real','<i2'),('imag','<i2')])
695 695 elif datatype == 2:
696 696 datatype_str = numpy.dtype([('real','<i4'),('imag','<i4')])
697 697 elif datatype == 3:
698 698 datatype_str = numpy.dtype([('real','<i8'),('imag','<i8')])
699 699 elif datatype == 4:
700 700 datatype_str = numpy.dtype([('real','<f4'),('imag','<f4')])
701 701 elif datatype == 5:
702 702 datatype_str = numpy.dtype([('real','<f8'),('imag','<f8')])
703 703 else:
704 704 raise ValueError, 'Data type was not defined'
705 705
706 706 self.dtype = datatype_str
707 707 self.ippSeconds = 2 * 1000 * self.radarControllerHeaderObj.ipp / self.c
708 708 self.fileSizeByHeader = self.processingHeaderObj.dataBlocksPerFile * self.processingHeaderObj.blockSize + self.firstHeaderSize + self.basicHeaderSize*(self.processingHeaderObj.dataBlocksPerFile - 1)
709 709 # self.dataOut.channelList = numpy.arange(self.systemHeaderObj.numChannels)
710 710 # self.dataOut.channelIndexList = numpy.arange(self.systemHeaderObj.numChannels)
711 711 self.getBlockDimension()
712 712
713 713
714 714 def __verifyFile(self, filename, msgFlag=True):
715 715 msg = None
716 716 try:
717 717 fp = open(filename, 'rb')
718 718 currentPosition = fp.tell()
719 719 except:
720 720 if msgFlag:
721 721 print "The file %s can't be opened" % (filename)
722 722 return False
723 723
724 724 neededSize = self.processingHeaderObj.blockSize + self.firstHeaderSize
725 725
726 726 if neededSize == 0:
727 727 basicHeaderObj = BasicHeader(LOCALTIME)
728 728 systemHeaderObj = SystemHeader()
729 729 radarControllerHeaderObj = RadarControllerHeader()
730 730 processingHeaderObj = ProcessingHeader()
731 731
732 732 try:
733 733 if not( basicHeaderObj.read(fp) ): raise IOError
734 734 if not( systemHeaderObj.read(fp) ): raise IOError
735 735 if not( radarControllerHeaderObj.read(fp) ): raise IOError
736 736 if not( processingHeaderObj.read(fp) ): raise IOError
737 737 data_type = int(numpy.log2((processingHeaderObj.processFlags & PROCFLAG.DATATYPE_MASK))-numpy.log2(PROCFLAG.DATATYPE_CHAR))
738 738
739 739 neededSize = processingHeaderObj.blockSize + basicHeaderObj.size
740 740
741 741 except:
742 742 if msgFlag:
743 743 print "\tThe file %s is empty or it hasn't enough data" % filename
744 744
745 745 fp.close()
746 746 return False
747 747 else:
748 748 msg = "\tSkipping the file %s due to it hasn't enough data" %filename
749 749
750 750 fp.close()
751 751 fileSize = os.path.getsize(filename)
752 752 currentSize = fileSize - currentPosition
753 753 if currentSize < neededSize:
754 754 if msgFlag and (msg != None):
755 755 print msg #print"\tSkipping the file %s due to it hasn't enough data" %filename
756 756 return False
757 757
758 758 return True
759 759
760 760 def setup(self,
761 761 path=None,
762 762 startDate=None,
763 763 endDate=None,
764 764 startTime=datetime.time(0,0,0),
765 765 endTime=datetime.time(23,59,59),
766 766 set=0,
767 767 expLabel = "",
768 768 ext = None,
769 769 online = False,
770 770 delay = 60,
771 771 walk = True):
772 772
773 773 if path == None:
774 774 raise ValueError, "The path is not valid"
775 775
776 776 if ext == None:
777 777 ext = self.ext
778 778
779 779 if online:
780 780 print "Searching files in online mode..."
781 781
782 782 for nTries in range( self.nTries ):
783 783 fullpath, file, year, doy, set = self.__searchFilesOnLine(path=path, expLabel=expLabel, ext=ext, walk=walk)
784 784
785 785 if fullpath:
786 786 break
787 787
788 788 print '\tWaiting %0.2f sec for an valid file in %s: try %02d ...' % (self.delay, path, nTries+1)
789 789 time.sleep( self.delay )
790 790
791 791 if not(fullpath):
792 792 print "There 'isn't valied files in %s" % path
793 793 return None
794 794
795 795 self.year = year
796 796 self.doy = doy
797 797 self.set = set - 1
798 798 self.path = path
799 799
800 800 else:
801 801 print "Searching files in offline mode ..."
802 802 pathList, filenameList = self.__searchFilesOffLine(path, startDate=startDate, endDate=endDate,
803 803 startTime=startTime, endTime=endTime,
804 804 set=set, expLabel=expLabel, ext=ext,
805 805 walk=walk)
806 806
807 807 if not(pathList):
808 808 print "No *%s files into the folder %s \nfor the range: %s - %s"%(ext, path,
809 809 datetime.datetime.combine(startDate,startTime).ctime(),
810 810 datetime.datetime.combine(endDate,endTime).ctime())
811 811
812 812 sys.exit(-1)
813 813
814 814
815 815 self.fileIndex = -1
816 816 self.pathList = pathList
817 817 self.filenameList = filenameList
818 818
819 819 self.online = online
820 820 self.delay = delay
821 821 ext = ext.lower()
822 822 self.ext = ext
823 823
824 824 if not(self.setNextFile()):
825 825 if (startDate!=None) and (endDate!=None):
826 826 print "No files in range: %s - %s" %(datetime.datetime.combine(startDate,startTime).ctime(), datetime.datetime.combine(endDate,endTime).ctime())
827 827 elif startDate != None:
828 828 print "No files in range: %s" %(datetime.datetime.combine(startDate,startTime).ctime())
829 829 else:
830 830 print "No files"
831 831
832 832 sys.exit(-1)
833 833
834 834 # self.updateDataHeader()
835 835
836 836 return self.dataOut
837 837
838 838 def getData():
839 839
840 840 raise ValueError, "This method has not been implemented"
841 841
842 842 def hasNotDataInBuffer():
843 843
844 844 raise ValueError, "This method has not been implemented"
845 845
846 846 def readBlock():
847 847
848 848 raise ValueError, "This method has not been implemented"
849 849
850 850 def isEndProcess(self):
851 851
852 852 return self.flagNoMoreFiles
853 853
854 854 def printReadBlocks(self):
855 855
856 856 print "Number of read blocks per file %04d" %self.nReadBlocks
857 857
858 858 def printTotalBlocks(self):
859 859
860 860 print "Number of read blocks %04d" %self.nTotalBlocks
861 861
862 862 def printInfo(self):
863 863
864 864 print self.basicHeaderObj.printInfo()
865 865 print self.systemHeaderObj.printInfo()
866 866 print self.radarControllerHeaderObj.printInfo()
867 867 print self.processingHeaderObj.printInfo()
868 868
869 869
870 870 def run(self, **kwargs):
871 871
872 872 if not(self.isConfig):
873 873
874 874 # self.dataOut = dataOut
875 875 self.setup(**kwargs)
876 876 self.isConfig = True
877 877
878 878 self.getData()
879 879
880 880 class JRODataWriter(JRODataIO, Operation):
881 881
882 882 """
883 883 Esta clase permite escribir datos a archivos procesados (.r o ,pdata). La escritura
884 884 de los datos siempre se realiza por bloques.
885 885 """
886 886
887 887 blockIndex = 0
888 888
889 889 path = None
890 890
891 891 setFile = None
892 892
893 893 profilesPerBlock = None
894 894
895 895 blocksPerFile = None
896 896
897 897 nWriteBlocks = 0
898 898
899 899 def __init__(self, dataOut=None):
900 900 raise ValueError, "Not implemented"
901 901
902 902
903 903 def hasAllDataInBuffer(self):
904 904 raise ValueError, "Not implemented"
905 905
906 906
907 907 def setBlockDimension(self):
908 908 raise ValueError, "Not implemented"
909 909
910 910
911 911 def writeBlock(self):
912 912 raise ValueError, "No implemented"
913 913
914 914
915 915 def putData(self):
916 916 raise ValueError, "No implemented"
917 917
918 918 def getDataHeader(self):
919 919 """
920 920 Obtiene una copia del First Header
921 921
922 922 Affected:
923 923
924 924 self.basicHeaderObj
925 925 self.systemHeaderObj
926 926 self.radarControllerHeaderObj
927 927 self.processingHeaderObj self.
928 928
929 929 Return:
930 930 None
931 931 """
932 932
933 933 raise ValueError, "No implemented"
934 934
935 935 def getBasicHeader(self):
936 936
937 937 self.basicHeaderObj.size = self.basicHeaderSize #bytes
938 938 self.basicHeaderObj.version = self.versionFile
939 939 self.basicHeaderObj.dataBlock = self.nTotalBlocks
940 940
941 941 utc = numpy.floor(self.dataOut.utctime)
942 942 milisecond = (self.dataOut.utctime - utc)* 1000.0
943 943
944 944 self.basicHeaderObj.utc = utc
945 945 self.basicHeaderObj.miliSecond = milisecond
946 946 self.basicHeaderObj.timeZone = 0
947 947 self.basicHeaderObj.dstFlag = 0
948 948 self.basicHeaderObj.errorCount = 0
949 949
950 950 def __writeFirstHeader(self):
951 951 """
952 952 Escribe el primer header del file es decir el Basic header y el Long header (SystemHeader, RadarControllerHeader, ProcessingHeader)
953 953
954 954 Affected:
955 955 __dataType
956 956
957 957 Return:
958 958 None
959 959 """
960 960
961 961 # CALCULAR PARAMETROS
962 962
963 963 sizeLongHeader = self.systemHeaderObj.size + self.radarControllerHeaderObj.size + self.processingHeaderObj.size
964 964 self.basicHeaderObj.size = self.basicHeaderSize + sizeLongHeader
965 965
966 966 self.basicHeaderObj.write(self.fp)
967 967 self.systemHeaderObj.write(self.fp)
968 968 self.radarControllerHeaderObj.write(self.fp)
969 969 self.processingHeaderObj.write(self.fp)
970 970
971 971 self.dtype = self.dataOut.dtype
972 972
973 973 def __setNewBlock(self):
974 974 """
975 975 Si es un nuevo file escribe el First Header caso contrario escribe solo el Basic Header
976 976
977 977 Return:
978 978 0 : si no pudo escribir nada
979 979 1 : Si escribio el Basic el First Header
980 980 """
981 981 if self.fp == None:
982 982 self.setNextFile()
983 983
984 984 if self.flagIsNewFile:
985 985 return 1
986 986
987 987 if self.blockIndex < self.processingHeaderObj.dataBlocksPerFile:
988 988 self.basicHeaderObj.write(self.fp)
989 989 return 1
990 990
991 991 if not( self.setNextFile() ):
992 992 return 0
993 993
994 994 return 1
995 995
996 996
997 997 def writeNextBlock(self):
998 998 """
999 999 Selecciona el bloque siguiente de datos y los escribe en un file
1000 1000
1001 1001 Return:
1002 1002 0 : Si no hizo pudo escribir el bloque de datos
1003 1003 1 : Si no pudo escribir el bloque de datos
1004 1004 """
1005 1005 if not( self.__setNewBlock() ):
1006 1006 return 0
1007 1007
1008 1008 self.writeBlock()
1009 1009
1010 1010 return 1
1011 1011
1012 1012 def setNextFile(self):
1013 1013 """
1014 1014 Determina el siguiente file que sera escrito
1015 1015
1016 1016 Affected:
1017 1017 self.filename
1018 1018 self.subfolder
1019 1019 self.fp
1020 1020 self.setFile
1021 1021 self.flagIsNewFile
1022 1022
1023 1023 Return:
1024 1024 0 : Si el archivo no puede ser escrito
1025 1025 1 : Si el archivo esta listo para ser escrito
1026 1026 """
1027 1027 ext = self.ext
1028 1028 path = self.path
1029 1029
1030 1030 if self.fp != None:
1031 1031 self.fp.close()
1032 1032
1033 1033 timeTuple = time.localtime( self.dataOut.dataUtcTime)
1034 1034 subfolder = 'D%4.4d%3.3d' % (timeTuple.tm_year,timeTuple.tm_yday)
1035 1035
1036 1036 fullpath = os.path.join( path, subfolder )
1037 1037 if not( os.path.exists(fullpath) ):
1038 1038 os.mkdir(fullpath)
1039 1039 self.setFile = -1 #inicializo mi contador de seteo
1040 1040 else:
1041 1041 filesList = os.listdir( fullpath )
1042 1042 if len( filesList ) > 0:
1043 1043 filesList = sorted( filesList, key=str.lower )
1044 1044 filen = filesList[-1]
1045 1045 # el filename debera tener el siguiente formato
1046 1046 # 0 1234 567 89A BCDE (hex)
1047 1047 # x YYYY DDD SSS .ext
1048 1048 if isNumber( filen[8:11] ):
1049 1049 self.setFile = int( filen[8:11] ) #inicializo mi contador de seteo al seteo del ultimo file
1050 1050 else:
1051 1051 self.setFile = -1
1052 1052 else:
1053 1053 self.setFile = -1 #inicializo mi contador de seteo
1054 1054
1055 1055 setFile = self.setFile
1056 1056 setFile += 1
1057 1057
1058 1058 file = '%s%4.4d%3.3d%3.3d%s' % (self.optchar,
1059 1059 timeTuple.tm_year,
1060 1060 timeTuple.tm_yday,
1061 1061 setFile,
1062 1062 ext )
1063 1063
1064 1064 filename = os.path.join( path, subfolder, file )
1065 1065
1066 1066 fp = open( filename,'wb' )
1067 1067
1068 1068 self.blockIndex = 0
1069 1069
1070 1070 #guardando atributos
1071 1071 self.filename = filename
1072 1072 self.subfolder = subfolder
1073 1073 self.fp = fp
1074 1074 self.setFile = setFile
1075 1075 self.flagIsNewFile = 1
1076 1076
1077 1077 self.getDataHeader()
1078 1078
1079 1079 print 'Writing the file: %s'%self.filename
1080 1080
1081 1081 self.__writeFirstHeader()
1082 1082
1083 1083 return 1
1084 1084
1085 1085 def setup(self, dataOut, path, blocksPerFile, profilesPerBlock=None, set=0, ext=None):
1086 1086 """
1087 1087 Setea el tipo de formato en la cual sera guardada la data y escribe el First Header
1088 1088
1089 1089 Inputs:
1090 1090 path : el path destino en el cual se escribiran los files a crear
1091 1091 format : formato en el cual sera salvado un file
1092 1092 set : el setebo del file
1093 1093
1094 1094 Return:
1095 1095 0 : Si no realizo un buen seteo
1096 1096 1 : Si realizo un buen seteo
1097 1097 """
1098 1098
1099 1099 if ext == None:
1100 1100 ext = self.ext
1101 1101
1102 1102 ext = ext.lower()
1103 1103
1104 1104 self.ext = ext
1105 1105
1106 1106 self.path = path
1107 1107
1108 1108 self.setFile = set - 1
1109 1109
1110 1110 self.blocksPerFile = blocksPerFile
1111 1111
1112 1112 self.profilesPerBlock = profilesPerBlock
1113 1113
1114 1114 self.dataOut = dataOut
1115 1115
1116 1116 if not(self.setNextFile()):
1117 1117 print "There isn't a next file"
1118 1118 return 0
1119 1119
1120 1120 self.setBlockDimension()
1121 1121
1122 1122 return 1
1123 1123
1124 1124 def run(self, dataOut, **kwargs):
1125 1125
1126 1126 if not(self.isConfig):
1127 1127
1128 1128 self.setup(dataOut, **kwargs)
1129 1129 self.isConfig = True
1130 1130
1131 1131 self.putData()
1132 1132
1133 1133 class VoltageReader(JRODataReader):
1134 1134 """
1135 1135 Esta clase permite leer datos de voltage desde archivos en formato rawdata (.r). La lectura
1136 1136 de los datos siempre se realiza por bloques. Los datos leidos (array de 3 dimensiones:
1137 1137 perfiles*alturas*canales) son almacenados en la variable "buffer".
1138 1138
1139 1139 perfiles * alturas * canales
1140 1140
1141 1141 Esta clase contiene instancias (objetos) de las clases BasicHeader, SystemHeader,
1142 1142 RadarControllerHeader y Voltage. Los tres primeros se usan para almacenar informacion de la
1143 1143 cabecera de datos (metadata), y el cuarto (Voltage) para obtener y almacenar un perfil de
1144 1144 datos desde el "buffer" cada vez que se ejecute el metodo "getData".
1145 1145
1146 1146 Example:
1147 1147
1148 1148 dpath = "/home/myuser/data"
1149 1149
1150 1150 startTime = datetime.datetime(2010,1,20,0,0,0,0,0,0)
1151 1151
1152 1152 endTime = datetime.datetime(2010,1,21,23,59,59,0,0,0)
1153 1153
1154 1154 readerObj = VoltageReader()
1155 1155
1156 1156 readerObj.setup(dpath, startTime, endTime)
1157 1157
1158 1158 while(True):
1159 1159
1160 1160 #to get one profile
1161 1161 profile = readerObj.getData()
1162 1162
1163 1163 #print the profile
1164 1164 print profile
1165 1165
1166 1166 #If you want to see all datablock
1167 1167 print readerObj.datablock
1168 1168
1169 1169 if readerObj.flagNoMoreFiles:
1170 1170 break
1171 1171
1172 1172 """
1173 1173
1174 1174 ext = ".r"
1175 1175
1176 1176 optchar = "D"
1177 1177 dataOut = None
1178 1178
1179 1179
1180 1180 def __init__(self):
1181 1181 """
1182 1182 Inicializador de la clase VoltageReader para la lectura de datos de voltage.
1183 1183
1184 1184 Input:
1185 1185 dataOut : Objeto de la clase Voltage. Este objeto sera utilizado para
1186 1186 almacenar un perfil de datos cada vez que se haga un requerimiento
1187 1187 (getData). El perfil sera obtenido a partir del buffer de datos,
1188 1188 si el buffer esta vacio se hara un nuevo proceso de lectura de un
1189 1189 bloque de datos.
1190 1190 Si este parametro no es pasado se creara uno internamente.
1191 1191
1192 1192 Variables afectadas:
1193 1193 self.dataOut
1194 1194
1195 1195 Return:
1196 1196 None
1197 1197 """
1198 1198
1199 1199 self.isConfig = False
1200 1200
1201 1201 self.datablock = None
1202 1202
1203 1203 self.utc = 0
1204 1204
1205 1205 self.ext = ".r"
1206 1206
1207 1207 self.optchar = "D"
1208 1208
1209 1209 self.basicHeaderObj = BasicHeader(LOCALTIME)
1210 1210
1211 1211 self.systemHeaderObj = SystemHeader()
1212 1212
1213 1213 self.radarControllerHeaderObj = RadarControllerHeader()
1214 1214
1215 1215 self.processingHeaderObj = ProcessingHeader()
1216 1216
1217 1217 self.online = 0
1218 1218
1219 1219 self.fp = None
1220 1220
1221 1221 self.idFile = None
1222 1222
1223 1223 self.dtype = None
1224 1224
1225 1225 self.fileSizeByHeader = None
1226 1226
1227 1227 self.filenameList = []
1228 1228
1229 1229 self.filename = None
1230 1230
1231 1231 self.fileSize = None
1232 1232
1233 1233 self.firstHeaderSize = 0
1234 1234
1235 1235 self.basicHeaderSize = 24
1236 1236
1237 1237 self.pathList = []
1238 1238
1239 1239 self.filenameList = []
1240 1240
1241 1241 self.lastUTTime = 0
1242 1242
1243 1243 self.maxTimeStep = 30
1244 1244
1245 1245 self.flagNoMoreFiles = 0
1246 1246
1247 1247 self.set = 0
1248 1248
1249 1249 self.path = None
1250 1250
1251 1251 self.profileIndex = 9999
1252 1252
1253 1253 self.delay = 3 #seconds
1254 1254
1255 1255 self.nTries = 3 #quantity tries
1256 1256
1257 1257 self.nFiles = 3 #number of files for searching
1258 1258
1259 1259 self.nReadBlocks = 0
1260 1260
1261 1261 self.flagIsNewFile = 1
1262 1262
1263 1263 self.ippSeconds = 0
1264 1264
1265 1265 self.flagTimeBlock = 0
1266 1266
1267 1267 self.flagIsNewBlock = 0
1268 1268
1269 1269 self.nTotalBlocks = 0
1270 1270
1271 1271 self.blocksize = 0
1272 1272
1273 1273 self.dataOut = self.createObjByDefault()
1274 1274
1275 1275 def createObjByDefault(self):
1276 1276
1277 1277 dataObj = Voltage()
1278 1278
1279 1279 return dataObj
1280 1280
1281 1281 def __hasNotDataInBuffer(self):
1282 1282 if self.profileIndex >= self.processingHeaderObj.profilesPerBlock:
1283 1283 return 1
1284 1284 return 0
1285 1285
1286 1286
1287 1287 def getBlockDimension(self):
1288 1288 """
1289 1289 Obtiene la cantidad de puntos a leer por cada bloque de datos
1290 1290
1291 1291 Affected:
1292 1292 self.blocksize
1293 1293
1294 1294 Return:
1295 1295 None
1296 1296 """
1297 1297 pts2read = self.processingHeaderObj.profilesPerBlock * self.processingHeaderObj.nHeights * self.systemHeaderObj.nChannels
1298 1298 self.blocksize = pts2read
1299 1299
1300 1300
1301 1301 def readBlock(self):
1302 1302 """
1303 1303 readBlock lee el bloque de datos desde la posicion actual del puntero del archivo
1304 1304 (self.fp) y actualiza todos los parametros relacionados al bloque de datos
1305 1305 (metadata + data). La data leida es almacenada en el buffer y el contador del buffer
1306 1306 es seteado a 0
1307 1307
1308 1308 Inputs:
1309 1309 None
1310 1310
1311 1311 Return:
1312 1312 None
1313 1313
1314 1314 Affected:
1315 1315 self.profileIndex
1316 1316 self.datablock
1317 1317 self.flagIsNewFile
1318 1318 self.flagIsNewBlock
1319 1319 self.nTotalBlocks
1320 1320
1321 1321 Exceptions:
1322 1322 Si un bloque leido no es un bloque valido
1323 1323 """
1324 1324
1325 1325 junk = numpy.fromfile( self.fp, self.dtype, self.blocksize )
1326 1326
1327 1327 try:
1328 1328 junk = junk.reshape( (self.processingHeaderObj.profilesPerBlock, self.processingHeaderObj.nHeights, self.systemHeaderObj.nChannels) )
1329 1329 except:
1330 1330 print "The read block (%3d) has not enough data" %self.nReadBlocks
1331 1331 return 0
1332 1332
1333 1333 junk = numpy.transpose(junk, (2,0,1))
1334 1334 self.datablock = junk['real'] + junk['imag']*1j
1335 1335
1336 1336 self.profileIndex = 0
1337 1337
1338 1338 self.flagIsNewFile = 0
1339 1339 self.flagIsNewBlock = 1
1340 1340
1341 1341 self.nTotalBlocks += 1
1342 1342 self.nReadBlocks += 1
1343 1343
1344 1344 return 1
1345 1345
1346 1346
1347 1347 def getData(self):
1348 1348 """
1349 1349 getData obtiene una unidad de datos del buffer de lectura y la copia a la clase "Voltage"
1350 1350 con todos los parametros asociados a este (metadata). cuando no hay datos en el buffer de
1351 1351 lectura es necesario hacer una nueva lectura de los bloques de datos usando "readNextBlock"
1352 1352
1353 1353 Ademas incrementa el contador del buffer en 1.
1354 1354
1355 1355 Return:
1356 1356 data : retorna un perfil de voltages (alturas * canales) copiados desde el
1357 1357 buffer. Si no hay mas archivos a leer retorna None.
1358 1358
1359 1359 Variables afectadas:
1360 1360 self.dataOut
1361 1361 self.profileIndex
1362 1362
1363 1363 Affected:
1364 1364 self.dataOut
1365 1365 self.profileIndex
1366 1366 self.flagTimeBlock
1367 1367 self.flagIsNewBlock
1368 1368 """
1369 1369
1370 1370 if self.flagNoMoreFiles:
1371 1371 self.dataOut.flagNoData = True
1372 1372 print 'Process finished'
1373 1373 return 0
1374 1374
1375 1375 self.flagTimeBlock = 0
1376 1376 self.flagIsNewBlock = 0
1377 1377
1378 1378 if self.__hasNotDataInBuffer():
1379 1379
1380 1380 if not( self.readNextBlock() ):
1381 1381 return 0
1382 1382
1383 1383 self.dataOut.dtype = self.dtype
1384 1384
1385 1385 self.dataOut.nProfiles = self.processingHeaderObj.profilesPerBlock
1386 1386
1387 1387 xf = self.processingHeaderObj.firstHeight + self.processingHeaderObj.nHeights*self.processingHeaderObj.deltaHeight
1388 1388
1389 1389 self.dataOut.heightList = numpy.arange(self.processingHeaderObj.firstHeight, xf, self.processingHeaderObj.deltaHeight)
1390 1390
1391 1391 self.dataOut.channelList = range(self.systemHeaderObj.nChannels)
1392 1392
1393 1393 self.dataOut.flagTimeBlock = self.flagTimeBlock
1394 1394
1395 1395 self.dataOut.ippSeconds = self.ippSeconds
1396 1396
1397 1397 self.dataOut.timeInterval = self.ippSeconds * self.processingHeaderObj.nCohInt
1398 1398
1399 1399 self.dataOut.nCohInt = self.processingHeaderObj.nCohInt
1400 1400
1401 1401 self.dataOut.flagShiftFFT = False
1402 1402
1403 if self.processingHeaderObj.code != None:
1403 if self.radarControllerHeaderObj.code != None:
1404 1404
1405 self.dataOut.nCode = self.processingHeaderObj.nCode
1405 self.dataOut.nCode = self.radarControllerHeaderObj.nCode
1406 1406
1407 self.dataOut.nBaud = self.processingHeaderObj.nBaud
1407 self.dataOut.nBaud = self.radarControllerHeaderObj.nBaud
1408 1408
1409 self.dataOut.code = self.processingHeaderObj.code
1409 self.dataOut.code = self.radarControllerHeaderObj.code
1410 1410
1411 1411 self.dataOut.systemHeaderObj = self.systemHeaderObj.copy()
1412 1412
1413 1413 self.dataOut.radarControllerHeaderObj = self.radarControllerHeaderObj.copy()
1414 1414
1415 1415 self.dataOut.flagDecodeData = False #asumo q la data no esta decodificada
1416 1416
1417 1417 self.dataOut.flagDeflipData = False #asumo q la data no esta sin flip
1418 1418
1419 1419 self.dataOut.flagShiftFFT = False
1420 1420
1421 1421
1422 1422 # self.updateDataHeader()
1423 1423
1424 1424 #data es un numpy array de 3 dmensiones (perfiles, alturas y canales)
1425 1425
1426 1426 if self.datablock == None:
1427 1427 self.dataOut.flagNoData = True
1428 1428 return 0
1429 1429
1430 1430 self.dataOut.data = self.datablock[:,self.profileIndex,:]
1431 1431
1432 1432 self.dataOut.utctime = self.basicHeaderObj.utc + self.basicHeaderObj.miliSecond/1000. + self.profileIndex * self.ippSeconds
1433 1433
1434 1434 self.profileIndex += 1
1435 1435
1436 1436 self.dataOut.flagNoData = False
1437 1437
1438 1438 # print self.profileIndex, self.dataOut.utctime
1439 1439 # if self.profileIndex == 800:
1440 1440 # a=1
1441 1441
1442 1442
1443 1443 return self.dataOut.data
1444 1444
1445 1445
1446 1446 class VoltageWriter(JRODataWriter):
1447 1447 """
1448 1448 Esta clase permite escribir datos de voltajes a archivos procesados (.r). La escritura
1449 1449 de los datos siempre se realiza por bloques.
1450 1450 """
1451 1451
1452 1452 ext = ".r"
1453 1453
1454 1454 optchar = "D"
1455 1455
1456 1456 shapeBuffer = None
1457 1457
1458 1458
1459 1459 def __init__(self):
1460 1460 """
1461 1461 Inicializador de la clase VoltageWriter para la escritura de datos de espectros.
1462 1462
1463 1463 Affected:
1464 1464 self.dataOut
1465 1465
1466 1466 Return: None
1467 1467 """
1468 1468
1469 1469 self.nTotalBlocks = 0
1470 1470
1471 1471 self.profileIndex = 0
1472 1472
1473 1473 self.isConfig = False
1474 1474
1475 1475 self.fp = None
1476 1476
1477 1477 self.flagIsNewFile = 1
1478 1478
1479 1479 self.nTotalBlocks = 0
1480 1480
1481 1481 self.flagIsNewBlock = 0
1482 1482
1483 1483 self.setFile = None
1484 1484
1485 1485 self.dtype = None
1486 1486
1487 1487 self.path = None
1488 1488
1489 1489 self.filename = None
1490 1490
1491 1491 self.basicHeaderObj = BasicHeader(LOCALTIME)
1492 1492
1493 1493 self.systemHeaderObj = SystemHeader()
1494 1494
1495 1495 self.radarControllerHeaderObj = RadarControllerHeader()
1496 1496
1497 1497 self.processingHeaderObj = ProcessingHeader()
1498 1498
1499 1499 def hasAllDataInBuffer(self):
1500 1500 if self.profileIndex >= self.processingHeaderObj.profilesPerBlock:
1501 1501 return 1
1502 1502 return 0
1503 1503
1504 1504
1505 1505 def setBlockDimension(self):
1506 1506 """
1507 1507 Obtiene las formas dimensionales del los subbloques de datos que componen un bloque
1508 1508
1509 1509 Affected:
1510 1510 self.shape_spc_Buffer
1511 1511 self.shape_cspc_Buffer
1512 1512 self.shape_dc_Buffer
1513 1513
1514 1514 Return: None
1515 1515 """
1516 1516 self.shapeBuffer = (self.processingHeaderObj.profilesPerBlock,
1517 1517 self.processingHeaderObj.nHeights,
1518 1518 self.systemHeaderObj.nChannels)
1519 1519
1520 1520 self.datablock = numpy.zeros((self.systemHeaderObj.nChannels,
1521 1521 self.processingHeaderObj.profilesPerBlock,
1522 1522 self.processingHeaderObj.nHeights),
1523 1523 dtype=numpy.dtype('complex'))
1524 1524
1525 1525
1526 1526 def writeBlock(self):
1527 1527 """
1528 1528 Escribe el buffer en el file designado
1529 1529
1530 1530 Affected:
1531 1531 self.profileIndex
1532 1532 self.flagIsNewFile
1533 1533 self.flagIsNewBlock
1534 1534 self.nTotalBlocks
1535 1535 self.blockIndex
1536 1536
1537 1537 Return: None
1538 1538 """
1539 1539 data = numpy.zeros( self.shapeBuffer, self.dtype )
1540 1540
1541 1541 junk = numpy.transpose(self.datablock, (1,2,0))
1542 1542
1543 1543 data['real'] = junk.real
1544 1544 data['imag'] = junk.imag
1545 1545
1546 1546 data = data.reshape( (-1) )
1547 1547
1548 1548 data.tofile( self.fp )
1549 1549
1550 1550 self.datablock.fill(0)
1551 1551
1552 1552 self.profileIndex = 0
1553 1553 self.flagIsNewFile = 0
1554 1554 self.flagIsNewBlock = 1
1555 1555
1556 1556 self.blockIndex += 1
1557 1557 self.nTotalBlocks += 1
1558 1558
1559 1559 def putData(self):
1560 1560 """
1561 1561 Setea un bloque de datos y luego los escribe en un file
1562 1562
1563 1563 Affected:
1564 1564 self.flagIsNewBlock
1565 1565 self.profileIndex
1566 1566
1567 1567 Return:
1568 1568 0 : Si no hay data o no hay mas files que puedan escribirse
1569 1569 1 : Si se escribio la data de un bloque en un file
1570 1570 """
1571 1571 if self.dataOut.flagNoData:
1572 1572 return 0
1573 1573
1574 1574 self.flagIsNewBlock = 0
1575 1575
1576 1576 if self.dataOut.flagTimeBlock:
1577 1577
1578 1578 self.datablock.fill(0)
1579 1579 self.profileIndex = 0
1580 1580 self.setNextFile()
1581 1581
1582 1582 if self.profileIndex == 0:
1583 1583 self.getBasicHeader()
1584 1584
1585 1585 self.datablock[:,self.profileIndex,:] = self.dataOut.data
1586 1586
1587 1587 self.profileIndex += 1
1588 1588
1589 1589 if self.hasAllDataInBuffer():
1590 1590 #if self.flagIsNewFile:
1591 1591 self.writeNextBlock()
1592 1592 # self.getDataHeader()
1593 1593
1594 1594 return 1
1595 1595
1596 1596 def __getProcessFlags(self):
1597 1597
1598 1598 processFlags = 0
1599 1599
1600 1600 dtype0 = numpy.dtype([('real','<i1'),('imag','<i1')])
1601 1601 dtype1 = numpy.dtype([('real','<i2'),('imag','<i2')])
1602 1602 dtype2 = numpy.dtype([('real','<i4'),('imag','<i4')])
1603 1603 dtype3 = numpy.dtype([('real','<i8'),('imag','<i8')])
1604 1604 dtype4 = numpy.dtype([('real','<f4'),('imag','<f4')])
1605 1605 dtype5 = numpy.dtype([('real','<f8'),('imag','<f8')])
1606 1606
1607 1607 dtypeList = [dtype0, dtype1, dtype2, dtype3, dtype4, dtype5]
1608 1608
1609 1609
1610 1610
1611 1611 datatypeValueList = [PROCFLAG.DATATYPE_CHAR,
1612 1612 PROCFLAG.DATATYPE_SHORT,
1613 1613 PROCFLAG.DATATYPE_LONG,
1614 1614 PROCFLAG.DATATYPE_INT64,
1615 1615 PROCFLAG.DATATYPE_FLOAT,
1616 1616 PROCFLAG.DATATYPE_DOUBLE]
1617 1617
1618 1618
1619 1619 for index in range(len(dtypeList)):
1620 1620 if self.dataOut.dtype == dtypeList[index]:
1621 1621 dtypeValue = datatypeValueList[index]
1622 1622 break
1623 1623
1624 1624 processFlags += dtypeValue
1625 1625
1626 1626 if self.dataOut.flagDecodeData:
1627 1627 processFlags += PROCFLAG.DECODE_DATA
1628 1628
1629 1629 if self.dataOut.flagDeflipData:
1630 1630 processFlags += PROCFLAG.DEFLIP_DATA
1631 1631
1632 1632 if self.dataOut.code != None:
1633 1633 processFlags += PROCFLAG.DEFINE_PROCESS_CODE
1634 1634
1635 1635 if self.dataOut.nCohInt > 1:
1636 1636 processFlags += PROCFLAG.COHERENT_INTEGRATION
1637 1637
1638 1638 return processFlags
1639 1639
1640 1640
1641 1641 def __getBlockSize(self):
1642 1642 '''
1643 1643 Este metodos determina el cantidad de bytes para un bloque de datos de tipo Voltage
1644 1644 '''
1645 1645
1646 1646 dtype0 = numpy.dtype([('real','<i1'),('imag','<i1')])
1647 1647 dtype1 = numpy.dtype([('real','<i2'),('imag','<i2')])
1648 1648 dtype2 = numpy.dtype([('real','<i4'),('imag','<i4')])
1649 1649 dtype3 = numpy.dtype([('real','<i8'),('imag','<i8')])
1650 1650 dtype4 = numpy.dtype([('real','<f4'),('imag','<f4')])
1651 1651 dtype5 = numpy.dtype([('real','<f8'),('imag','<f8')])
1652 1652
1653 1653 dtypeList = [dtype0, dtype1, dtype2, dtype3, dtype4, dtype5]
1654 1654 datatypeValueList = [1,2,4,8,4,8]
1655 1655 for index in range(len(dtypeList)):
1656 1656 if self.dataOut.dtype == dtypeList[index]:
1657 1657 datatypeValue = datatypeValueList[index]
1658 1658 break
1659 1659
1660 1660 blocksize = int(self.dataOut.nHeights * self.dataOut.nChannels * self.dataOut.nProfiles * datatypeValue * 2)
1661 1661
1662 1662 return blocksize
1663 1663
1664 1664 def getDataHeader(self):
1665 1665
1666 1666 """
1667 1667 Obtiene una copia del First Header
1668 1668
1669 1669 Affected:
1670 1670 self.systemHeaderObj
1671 1671 self.radarControllerHeaderObj
1672 1672 self.dtype
1673 1673
1674 1674 Return:
1675 1675 None
1676 1676 """
1677 1677
1678 1678 self.systemHeaderObj = self.dataOut.systemHeaderObj.copy()
1679 1679 self.systemHeaderObj.nChannels = self.dataOut.nChannels
1680 1680 self.radarControllerHeaderObj = self.dataOut.radarControllerHeaderObj.copy()
1681 1681
1682 1682 self.getBasicHeader()
1683 1683
1684 1684 processingHeaderSize = 40 # bytes
1685 1685 self.processingHeaderObj.dtype = 0 # Voltage
1686 1686 self.processingHeaderObj.blockSize = self.__getBlockSize()
1687 1687 self.processingHeaderObj.profilesPerBlock = self.profilesPerBlock
1688 1688 self.processingHeaderObj.dataBlocksPerFile = self.blocksPerFile
1689 1689 self.processingHeaderObj.nWindows = 1 #podria ser 1 o self.dataOut.processingHeaderObj.nWindows
1690 1690 self.processingHeaderObj.processFlags = self.__getProcessFlags()
1691 1691 self.processingHeaderObj.nCohInt = self.dataOut.nCohInt
1692 1692 self.processingHeaderObj.nIncohInt = 1 # Cuando la data de origen es de tipo Voltage
1693 1693 self.processingHeaderObj.totalSpectra = 0 # Cuando la data de origen es de tipo Voltage
1694 1694
1695 1695 if self.dataOut.code != None:
1696 1696 self.processingHeaderObj.code = self.dataOut.code
1697 1697 self.processingHeaderObj.nCode = self.dataOut.nCode
1698 1698 self.processingHeaderObj.nBaud = self.dataOut.nBaud
1699 1699 codesize = int(8 + 4 * self.dataOut.nCode * self.dataOut.nBaud)
1700 1700 processingHeaderSize += codesize
1701 1701
1702 1702 if self.processingHeaderObj.nWindows != 0:
1703 1703 self.processingHeaderObj.firstHeight = self.dataOut.heightList[0]
1704 1704 self.processingHeaderObj.deltaHeight = self.dataOut.heightList[1] - self.dataOut.heightList[0]
1705 1705 self.processingHeaderObj.nHeights = self.dataOut.nHeights
1706 1706 self.processingHeaderObj.samplesWin = self.dataOut.nHeights
1707 1707 processingHeaderSize += 12
1708 1708
1709 1709 self.processingHeaderObj.size = processingHeaderSize
1710 1710
1711 1711 class SpectraReader(JRODataReader):
1712 1712 """
1713 1713 Esta clase permite leer datos de espectros desde archivos procesados (.pdata). La lectura
1714 1714 de los datos siempre se realiza por bloques. Los datos leidos (array de 3 dimensiones)
1715 1715 son almacenados en tres buffer's para el Self Spectra, el Cross Spectra y el DC Channel.
1716 1716
1717 1717 paresCanalesIguales * alturas * perfiles (Self Spectra)
1718 1718 paresCanalesDiferentes * alturas * perfiles (Cross Spectra)
1719 1719 canales * alturas (DC Channels)
1720 1720
1721 1721 Esta clase contiene instancias (objetos) de las clases BasicHeader, SystemHeader,
1722 1722 RadarControllerHeader y Spectra. Los tres primeros se usan para almacenar informacion de la
1723 1723 cabecera de datos (metadata), y el cuarto (Spectra) para obtener y almacenar un bloque de
1724 1724 datos desde el "buffer" cada vez que se ejecute el metodo "getData".
1725 1725
1726 1726 Example:
1727 1727 dpath = "/home/myuser/data"
1728 1728
1729 1729 startTime = datetime.datetime(2010,1,20,0,0,0,0,0,0)
1730 1730
1731 1731 endTime = datetime.datetime(2010,1,21,23,59,59,0,0,0)
1732 1732
1733 1733 readerObj = SpectraReader()
1734 1734
1735 1735 readerObj.setup(dpath, startTime, endTime)
1736 1736
1737 1737 while(True):
1738 1738
1739 1739 readerObj.getData()
1740 1740
1741 1741 print readerObj.data_spc
1742 1742
1743 1743 print readerObj.data_cspc
1744 1744
1745 1745 print readerObj.data_dc
1746 1746
1747 1747 if readerObj.flagNoMoreFiles:
1748 1748 break
1749 1749
1750 1750 """
1751 1751
1752 1752 pts2read_SelfSpectra = 0
1753 1753
1754 1754 pts2read_CrossSpectra = 0
1755 1755
1756 1756 pts2read_DCchannels = 0
1757 1757
1758 1758 ext = ".pdata"
1759 1759
1760 1760 optchar = "P"
1761 1761
1762 1762 dataOut = None
1763 1763
1764 1764 nRdChannels = None
1765 1765
1766 1766 nRdPairs = None
1767 1767
1768 1768 rdPairList = []
1769 1769
1770 1770
1771 1771 def __init__(self):
1772 1772 """
1773 1773 Inicializador de la clase SpectraReader para la lectura de datos de espectros.
1774 1774
1775 1775 Inputs:
1776 1776 dataOut : Objeto de la clase Spectra. Este objeto sera utilizado para
1777 1777 almacenar un perfil de datos cada vez que se haga un requerimiento
1778 1778 (getData). El perfil sera obtenido a partir del buffer de datos,
1779 1779 si el buffer esta vacio se hara un nuevo proceso de lectura de un
1780 1780 bloque de datos.
1781 1781 Si este parametro no es pasado se creara uno internamente.
1782 1782
1783 1783 Affected:
1784 1784 self.dataOut
1785 1785
1786 1786 Return : None
1787 1787 """
1788 1788
1789 1789 self.isConfig = False
1790 1790
1791 1791 self.pts2read_SelfSpectra = 0
1792 1792
1793 1793 self.pts2read_CrossSpectra = 0
1794 1794
1795 1795 self.pts2read_DCchannels = 0
1796 1796
1797 1797 self.datablock = None
1798 1798
1799 1799 self.utc = None
1800 1800
1801 1801 self.ext = ".pdata"
1802 1802
1803 1803 self.optchar = "P"
1804 1804
1805 1805 self.basicHeaderObj = BasicHeader(LOCALTIME)
1806 1806
1807 1807 self.systemHeaderObj = SystemHeader()
1808 1808
1809 1809 self.radarControllerHeaderObj = RadarControllerHeader()
1810 1810
1811 1811 self.processingHeaderObj = ProcessingHeader()
1812 1812
1813 1813 self.online = 0
1814 1814
1815 1815 self.fp = None
1816 1816
1817 1817 self.idFile = None
1818 1818
1819 1819 self.dtype = None
1820 1820
1821 1821 self.fileSizeByHeader = None
1822 1822
1823 1823 self.filenameList = []
1824 1824
1825 1825 self.filename = None
1826 1826
1827 1827 self.fileSize = None
1828 1828
1829 1829 self.firstHeaderSize = 0
1830 1830
1831 1831 self.basicHeaderSize = 24
1832 1832
1833 1833 self.pathList = []
1834 1834
1835 1835 self.lastUTTime = 0
1836 1836
1837 1837 self.maxTimeStep = 30
1838 1838
1839 1839 self.flagNoMoreFiles = 0
1840 1840
1841 1841 self.set = 0
1842 1842
1843 1843 self.path = None
1844 1844
1845 1845 self.delay = 3 #seconds
1846 1846
1847 1847 self.nTries = 3 #quantity tries
1848 1848
1849 1849 self.nFiles = 3 #number of files for searching
1850 1850
1851 1851 self.nReadBlocks = 0
1852 1852
1853 1853 self.flagIsNewFile = 1
1854 1854
1855 1855 self.ippSeconds = 0
1856 1856
1857 1857 self.flagTimeBlock = 0
1858 1858
1859 1859 self.flagIsNewBlock = 0
1860 1860
1861 1861 self.nTotalBlocks = 0
1862 1862
1863 1863 self.blocksize = 0
1864 1864
1865 1865 self.dataOut = self.createObjByDefault()
1866 1866
1867 1867
1868 1868 def createObjByDefault(self):
1869 1869
1870 1870 dataObj = Spectra()
1871 1871
1872 1872 return dataObj
1873 1873
1874 1874 def __hasNotDataInBuffer(self):
1875 1875 return 1
1876 1876
1877 1877
1878 1878 def getBlockDimension(self):
1879 1879 """
1880 1880 Obtiene la cantidad de puntos a leer por cada bloque de datos
1881 1881
1882 1882 Affected:
1883 1883 self.nRdChannels
1884 1884 self.nRdPairs
1885 1885 self.pts2read_SelfSpectra
1886 1886 self.pts2read_CrossSpectra
1887 1887 self.pts2read_DCchannels
1888 1888 self.blocksize
1889 1889 self.dataOut.nChannels
1890 1890 self.dataOut.nPairs
1891 1891
1892 1892 Return:
1893 1893 None
1894 1894 """
1895 1895 self.nRdChannels = 0
1896 1896 self.nRdPairs = 0
1897 1897 self.rdPairList = []
1898 1898
1899 1899 for i in range(0, self.processingHeaderObj.totalSpectra*2, 2):
1900 1900 if self.processingHeaderObj.spectraComb[i] == self.processingHeaderObj.spectraComb[i+1]:
1901 1901 self.nRdChannels = self.nRdChannels + 1 #par de canales iguales
1902 1902 else:
1903 1903 self.nRdPairs = self.nRdPairs + 1 #par de canales diferentes
1904 1904 self.rdPairList.append((self.processingHeaderObj.spectraComb[i], self.processingHeaderObj.spectraComb[i+1]))
1905 1905
1906 1906 pts2read = self.processingHeaderObj.nHeights * self.processingHeaderObj.profilesPerBlock
1907 1907
1908 1908 self.pts2read_SelfSpectra = int(self.nRdChannels * pts2read)
1909 1909 self.blocksize = self.pts2read_SelfSpectra
1910 1910
1911 1911 if self.processingHeaderObj.flag_cspc:
1912 1912 self.pts2read_CrossSpectra = int(self.nRdPairs * pts2read)
1913 1913 self.blocksize += self.pts2read_CrossSpectra
1914 1914
1915 1915 if self.processingHeaderObj.flag_dc:
1916 1916 self.pts2read_DCchannels = int(self.systemHeaderObj.nChannels * self.processingHeaderObj.nHeights)
1917 1917 self.blocksize += self.pts2read_DCchannels
1918 1918
1919 1919 # self.blocksize = self.pts2read_SelfSpectra + self.pts2read_CrossSpectra + self.pts2read_DCchannels
1920 1920
1921 1921
1922 1922 def readBlock(self):
1923 1923 """
1924 1924 Lee el bloque de datos desde la posicion actual del puntero del archivo
1925 1925 (self.fp) y actualiza todos los parametros relacionados al bloque de datos
1926 1926 (metadata + data). La data leida es almacenada en el buffer y el contador del buffer
1927 1927 es seteado a 0
1928 1928
1929 1929 Return: None
1930 1930
1931 1931 Variables afectadas:
1932 1932
1933 1933 self.flagIsNewFile
1934 1934 self.flagIsNewBlock
1935 1935 self.nTotalBlocks
1936 1936 self.data_spc
1937 1937 self.data_cspc
1938 1938 self.data_dc
1939 1939
1940 1940 Exceptions:
1941 1941 Si un bloque leido no es un bloque valido
1942 1942 """
1943 1943 blockOk_flag = False
1944 1944 fpointer = self.fp.tell()
1945 1945
1946 1946 spc = numpy.fromfile( self.fp, self.dtype[0], self.pts2read_SelfSpectra )
1947 1947 spc = spc.reshape( (self.nRdChannels, self.processingHeaderObj.nHeights, self.processingHeaderObj.profilesPerBlock) ) #transforma a un arreglo 3D
1948 1948
1949 1949 if self.processingHeaderObj.flag_cspc:
1950 1950 cspc = numpy.fromfile( self.fp, self.dtype, self.pts2read_CrossSpectra )
1951 1951 cspc = cspc.reshape( (self.nRdPairs, self.processingHeaderObj.nHeights, self.processingHeaderObj.profilesPerBlock) ) #transforma a un arreglo 3D
1952 1952
1953 1953 if self.processingHeaderObj.flag_dc:
1954 1954 dc = numpy.fromfile( self.fp, self.dtype, self.pts2read_DCchannels ) #int(self.processingHeaderObj.nHeights*self.systemHeaderObj.nChannels) )
1955 1955 dc = dc.reshape( (self.systemHeaderObj.nChannels, self.processingHeaderObj.nHeights) ) #transforma a un arreglo 2D
1956 1956
1957 1957
1958 1958 if not(self.processingHeaderObj.shif_fft):
1959 1959 #desplaza a la derecha en el eje 2 determinadas posiciones
1960 1960 shift = int(self.processingHeaderObj.profilesPerBlock/2)
1961 1961 spc = numpy.roll( spc, shift , axis=2 )
1962 1962
1963 1963 if self.processingHeaderObj.flag_cspc:
1964 1964 #desplaza a la derecha en el eje 2 determinadas posiciones
1965 1965 cspc = numpy.roll( cspc, shift, axis=2 )
1966 1966
1967 1967
1968 1968 spc = numpy.transpose( spc, (0,2,1) )
1969 1969 self.data_spc = spc
1970 1970
1971 1971 if self.processingHeaderObj.flag_cspc:
1972 1972 cspc = numpy.transpose( cspc, (0,2,1) )
1973 1973 self.data_cspc = cspc['real'] + cspc['imag']*1j
1974 1974 else:
1975 1975 self.data_cspc = None
1976 1976
1977 1977 if self.processingHeaderObj.flag_dc:
1978 1978 self.data_dc = dc['real'] + dc['imag']*1j
1979 1979 else:
1980 1980 self.data_dc = None
1981 1981
1982 1982 self.flagIsNewFile = 0
1983 1983 self.flagIsNewBlock = 1
1984 1984
1985 1985 self.nTotalBlocks += 1
1986 1986 self.nReadBlocks += 1
1987 1987
1988 1988 return 1
1989 1989
1990 1990
1991 1991 def getData(self):
1992 1992 """
1993 1993 Copia el buffer de lectura a la clase "Spectra",
1994 1994 con todos los parametros asociados a este (metadata). cuando no hay datos en el buffer de
1995 1995 lectura es necesario hacer una nueva lectura de los bloques de datos usando "readNextBlock"
1996 1996
1997 1997 Return:
1998 1998 0 : Si no hay mas archivos disponibles
1999 1999 1 : Si hizo una buena copia del buffer
2000 2000
2001 2001 Affected:
2002 2002 self.dataOut
2003 2003
2004 2004 self.flagTimeBlock
2005 2005 self.flagIsNewBlock
2006 2006 """
2007 2007
2008 2008 if self.flagNoMoreFiles:
2009 2009 self.dataOut.flagNoData = True
2010 2010 print 'Process finished'
2011 2011 return 0
2012 2012
2013 2013 self.flagTimeBlock = 0
2014 2014 self.flagIsNewBlock = 0
2015 2015
2016 2016 if self.__hasNotDataInBuffer():
2017 2017
2018 2018 if not( self.readNextBlock() ):
2019 2019 self.dataOut.flagNoData = True
2020 2020 return 0
2021 2021
2022 2022 # self.updateDataHeader()
2023 2023
2024 2024 #data es un numpy array de 3 dmensiones (perfiles, alturas y canales)
2025 2025
2026 2026 if self.data_dc == None:
2027 2027 self.dataOut.flagNoData = True
2028 2028 return 0
2029 2029
2030 2030 self.dataOut.data_spc = self.data_spc
2031 2031
2032 2032 self.dataOut.data_cspc = self.data_cspc
2033 2033
2034 2034 self.dataOut.data_dc = self.data_dc
2035 2035
2036 2036 self.dataOut.flagTimeBlock = self.flagTimeBlock
2037 2037
2038 2038 self.dataOut.flagNoData = False
2039 2039
2040 2040 self.dataOut.dtype = self.dtype
2041 2041
2042 2042 # self.dataOut.nChannels = self.nRdChannels
2043 2043
2044 2044 self.dataOut.nPairs = self.nRdPairs
2045 2045
2046 2046 self.dataOut.pairsList = self.rdPairList
2047 2047
2048 2048 # self.dataOut.nHeights = self.processingHeaderObj.nHeights
2049 2049
2050 2050 self.dataOut.nProfiles = self.processingHeaderObj.profilesPerBlock
2051 2051
2052 2052 self.dataOut.nFFTPoints = self.processingHeaderObj.profilesPerBlock
2053 2053
2054 2054 self.dataOut.nCohInt = self.processingHeaderObj.nCohInt
2055 2055
2056 2056 self.dataOut.nIncohInt = self.processingHeaderObj.nIncohInt
2057 2057
2058 2058 xf = self.processingHeaderObj.firstHeight + self.processingHeaderObj.nHeights*self.processingHeaderObj.deltaHeight
2059 2059
2060 2060 self.dataOut.heightList = numpy.arange(self.processingHeaderObj.firstHeight, xf, self.processingHeaderObj.deltaHeight)
2061 2061
2062 2062 self.dataOut.channelList = range(self.systemHeaderObj.nChannels)
2063 2063
2064 2064 # self.dataOut.channelIndexList = range(self.systemHeaderObj.nChannels)
2065 2065
2066 2066 self.dataOut.utctime = self.basicHeaderObj.utc + self.basicHeaderObj.miliSecond/1000.#+ self.profileIndex * self.ippSeconds
2067 2067
2068 2068 self.dataOut.ippSeconds = self.ippSeconds
2069 2069
2070 2070 self.dataOut.timeInterval = self.ippSeconds * self.processingHeaderObj.nCohInt * self.processingHeaderObj.nIncohInt * self.dataOut.nFFTPoints
2071 2071
2072 2072 # self.profileIndex += 1
2073 2073
2074 2074 self.dataOut.systemHeaderObj = self.systemHeaderObj.copy()
2075 2075
2076 2076 self.dataOut.radarControllerHeaderObj = self.radarControllerHeaderObj.copy()
2077 2077
2078 2078 self.dataOut.flagShiftFFT = self.processingHeaderObj.shif_fft
2079 2079
2080 2080 self.dataOut.flagDecodeData = True #asumo q la data no esta decodificada
2081 2081
2082 2082 self.dataOut.flagDeflipData = True #asumo q la data no esta sin flip
2083 2083
2084 2084
2085 2085 return self.dataOut.data_spc
2086 2086
2087 2087
2088 2088 class SpectraWriter(JRODataWriter):
2089 2089
2090 2090 """
2091 2091 Esta clase permite escribir datos de espectros a archivos procesados (.pdata). La escritura
2092 2092 de los datos siempre se realiza por bloques.
2093 2093 """
2094 2094
2095 2095 ext = ".pdata"
2096 2096
2097 2097 optchar = "P"
2098 2098
2099 2099 shape_spc_Buffer = None
2100 2100
2101 2101 shape_cspc_Buffer = None
2102 2102
2103 2103 shape_dc_Buffer = None
2104 2104
2105 2105 data_spc = None
2106 2106
2107 2107 data_cspc = None
2108 2108
2109 2109 data_dc = None
2110 2110
2111 2111 # dataOut = None
2112 2112
2113 2113 def __init__(self):
2114 2114 """
2115 2115 Inicializador de la clase SpectraWriter para la escritura de datos de espectros.
2116 2116
2117 2117 Affected:
2118 2118 self.dataOut
2119 2119 self.basicHeaderObj
2120 2120 self.systemHeaderObj
2121 2121 self.radarControllerHeaderObj
2122 2122 self.processingHeaderObj
2123 2123
2124 2124 Return: None
2125 2125 """
2126 2126
2127 2127 self.isConfig = False
2128 2128
2129 2129 self.nTotalBlocks = 0
2130 2130
2131 2131 self.data_spc = None
2132 2132
2133 2133 self.data_cspc = None
2134 2134
2135 2135 self.data_dc = None
2136 2136
2137 2137 self.fp = None
2138 2138
2139 2139 self.flagIsNewFile = 1
2140 2140
2141 2141 self.nTotalBlocks = 0
2142 2142
2143 2143 self.flagIsNewBlock = 0
2144 2144
2145 2145 self.setFile = None
2146 2146
2147 2147 self.dtype = None
2148 2148
2149 2149 self.path = None
2150 2150
2151 2151 self.noMoreFiles = 0
2152 2152
2153 2153 self.filename = None
2154 2154
2155 2155 self.basicHeaderObj = BasicHeader(LOCALTIME)
2156 2156
2157 2157 self.systemHeaderObj = SystemHeader()
2158 2158
2159 2159 self.radarControllerHeaderObj = RadarControllerHeader()
2160 2160
2161 2161 self.processingHeaderObj = ProcessingHeader()
2162 2162
2163 2163
2164 2164 def hasAllDataInBuffer(self):
2165 2165 return 1
2166 2166
2167 2167
2168 2168 def setBlockDimension(self):
2169 2169 """
2170 2170 Obtiene las formas dimensionales del los subbloques de datos que componen un bloque
2171 2171
2172 2172 Affected:
2173 2173 self.shape_spc_Buffer
2174 2174 self.shape_cspc_Buffer
2175 2175 self.shape_dc_Buffer
2176 2176
2177 2177 Return: None
2178 2178 """
2179 2179 self.shape_spc_Buffer = (self.dataOut.nChannels,
2180 2180 self.processingHeaderObj.nHeights,
2181 2181 self.processingHeaderObj.profilesPerBlock)
2182 2182
2183 2183 self.shape_cspc_Buffer = (self.dataOut.nPairs,
2184 2184 self.processingHeaderObj.nHeights,
2185 2185 self.processingHeaderObj.profilesPerBlock)
2186 2186
2187 2187 self.shape_dc_Buffer = (self.dataOut.nChannels,
2188 2188 self.processingHeaderObj.nHeights)
2189 2189
2190 2190
2191 2191 def writeBlock(self):
2192 2192 """
2193 2193 Escribe el buffer en el file designado
2194 2194
2195 2195 Affected:
2196 2196 self.data_spc
2197 2197 self.data_cspc
2198 2198 self.data_dc
2199 2199 self.flagIsNewFile
2200 2200 self.flagIsNewBlock
2201 2201 self.nTotalBlocks
2202 2202 self.nWriteBlocks
2203 2203
2204 2204 Return: None
2205 2205 """
2206 2206
2207 2207 spc = numpy.transpose( self.data_spc, (0,2,1) )
2208 2208 if not( self.processingHeaderObj.shif_fft ):
2209 2209 spc = numpy.roll( spc, self.processingHeaderObj.profilesPerBlock/2, axis=2 ) #desplaza a la derecha en el eje 2 determinadas posiciones
2210 2210 data = spc.reshape((-1))
2211 2211 data.tofile(self.fp)
2212 2212
2213 2213 if self.data_cspc != None:
2214 2214 data = numpy.zeros( self.shape_cspc_Buffer, self.dtype )
2215 2215 cspc = numpy.transpose( self.data_cspc, (0,2,1) )
2216 2216 if not( self.processingHeaderObj.shif_fft ):
2217 2217 cspc = numpy.roll( cspc, self.processingHeaderObj.profilesPerBlock/2, axis=2 ) #desplaza a la derecha en el eje 2 determinadas posiciones
2218 2218 data['real'] = cspc.real
2219 2219 data['imag'] = cspc.imag
2220 2220 data = data.reshape((-1))
2221 2221 data.tofile(self.fp)
2222 2222
2223 2223 if self.data_dc != None:
2224 2224 data = numpy.zeros( self.shape_dc_Buffer, self.dtype )
2225 2225 dc = self.data_dc
2226 2226 data['real'] = dc.real
2227 2227 data['imag'] = dc.imag
2228 2228 data = data.reshape((-1))
2229 2229 data.tofile(self.fp)
2230 2230
2231 2231 self.data_spc.fill(0)
2232 2232 self.data_dc.fill(0)
2233 2233 if self.data_cspc != None:
2234 2234 self.data_cspc.fill(0)
2235 2235
2236 2236 self.flagIsNewFile = 0
2237 2237 self.flagIsNewBlock = 1
2238 2238 self.nTotalBlocks += 1
2239 2239 self.nWriteBlocks += 1
2240 2240 self.blockIndex += 1
2241 2241
2242 2242
2243 2243 def putData(self):
2244 2244 """
2245 2245 Setea un bloque de datos y luego los escribe en un file
2246 2246
2247 2247 Affected:
2248 2248 self.data_spc
2249 2249 self.data_cspc
2250 2250 self.data_dc
2251 2251
2252 2252 Return:
2253 2253 0 : Si no hay data o no hay mas files que puedan escribirse
2254 2254 1 : Si se escribio la data de un bloque en un file
2255 2255 """
2256 2256
2257 2257 if self.dataOut.flagNoData:
2258 2258 return 0
2259 2259
2260 2260 self.flagIsNewBlock = 0
2261 2261
2262 2262 if self.dataOut.flagTimeBlock:
2263 2263 self.data_spc.fill(0)
2264 2264 self.data_cspc.fill(0)
2265 2265 self.data_dc.fill(0)
2266 2266 self.setNextFile()
2267 2267
2268 2268 if self.flagIsNewFile == 0:
2269 2269 self.getBasicHeader()
2270 2270
2271 2271 self.data_spc = self.dataOut.data_spc
2272 2272 self.data_cspc = self.dataOut.data_cspc
2273 2273 self.data_dc = self.dataOut.data_dc
2274 2274
2275 2275 # #self.processingHeaderObj.dataBlocksPerFile)
2276 2276 if self.hasAllDataInBuffer():
2277 2277 # self.getDataHeader()
2278 2278 self.writeNextBlock()
2279 2279
2280 2280 return 1
2281 2281
2282 2282
2283 2283 def __getProcessFlags(self):
2284 2284
2285 2285 processFlags = 0
2286 2286
2287 2287 dtype0 = numpy.dtype([('real','<i1'),('imag','<i1')])
2288 2288 dtype1 = numpy.dtype([('real','<i2'),('imag','<i2')])
2289 2289 dtype2 = numpy.dtype([('real','<i4'),('imag','<i4')])
2290 2290 dtype3 = numpy.dtype([('real','<i8'),('imag','<i8')])
2291 2291 dtype4 = numpy.dtype([('real','<f4'),('imag','<f4')])
2292 2292 dtype5 = numpy.dtype([('real','<f8'),('imag','<f8')])
2293 2293
2294 2294 dtypeList = [dtype0, dtype1, dtype2, dtype3, dtype4, dtype5]
2295 2295
2296 2296
2297 2297
2298 2298 datatypeValueList = [PROCFLAG.DATATYPE_CHAR,
2299 2299 PROCFLAG.DATATYPE_SHORT,
2300 2300 PROCFLAG.DATATYPE_LONG,
2301 2301 PROCFLAG.DATATYPE_INT64,
2302 2302 PROCFLAG.DATATYPE_FLOAT,
2303 2303 PROCFLAG.DATATYPE_DOUBLE]
2304 2304
2305 2305
2306 2306 for index in range(len(dtypeList)):
2307 2307 if self.dataOut.dtype == dtypeList[index]:
2308 2308 dtypeValue = datatypeValueList[index]
2309 2309 break
2310 2310
2311 2311 processFlags += dtypeValue
2312 2312
2313 2313 if self.dataOut.flagDecodeData:
2314 2314 processFlags += PROCFLAG.DECODE_DATA
2315 2315
2316 2316 if self.dataOut.flagDeflipData:
2317 2317 processFlags += PROCFLAG.DEFLIP_DATA
2318 2318
2319 2319 if self.dataOut.code != None:
2320 2320 processFlags += PROCFLAG.DEFINE_PROCESS_CODE
2321 2321
2322 2322 if self.dataOut.nIncohInt > 1:
2323 2323 processFlags += PROCFLAG.INCOHERENT_INTEGRATION
2324 2324
2325 2325 if self.dataOut.data_dc != None:
2326 2326 processFlags += PROCFLAG.SAVE_CHANNELS_DC
2327 2327
2328 2328 return processFlags
2329 2329
2330 2330
2331 2331 def __getBlockSize(self):
2332 2332 '''
2333 2333 Este metodos determina el cantidad de bytes para un bloque de datos de tipo Spectra
2334 2334 '''
2335 2335
2336 2336 dtype0 = numpy.dtype([('real','<i1'),('imag','<i1')])
2337 2337 dtype1 = numpy.dtype([('real','<i2'),('imag','<i2')])
2338 2338 dtype2 = numpy.dtype([('real','<i4'),('imag','<i4')])
2339 2339 dtype3 = numpy.dtype([('real','<i8'),('imag','<i8')])
2340 2340 dtype4 = numpy.dtype([('real','<f4'),('imag','<f4')])
2341 2341 dtype5 = numpy.dtype([('real','<f8'),('imag','<f8')])
2342 2342
2343 2343 dtypeList = [dtype0, dtype1, dtype2, dtype3, dtype4, dtype5]
2344 2344 datatypeValueList = [1,2,4,8,4,8]
2345 2345 for index in range(len(dtypeList)):
2346 2346 if self.dataOut.dtype == dtypeList[index]:
2347 2347 datatypeValue = datatypeValueList[index]
2348 2348 break
2349 2349
2350 2350
2351 2351 pts2write = self.dataOut.nHeights * self.dataOut.nFFTPoints
2352 2352
2353 2353 pts2write_SelfSpectra = int(self.dataOut.nChannels * pts2write)
2354 2354 blocksize = (pts2write_SelfSpectra*datatypeValue)
2355 2355
2356 2356 if self.dataOut.data_cspc != None:
2357 2357 pts2write_CrossSpectra = int(self.dataOut.nPairs * pts2write)
2358 2358 blocksize += (pts2write_CrossSpectra*datatypeValue*2)
2359 2359
2360 2360 if self.dataOut.data_dc != None:
2361 2361 pts2write_DCchannels = int(self.dataOut.nChannels * self.dataOut.nHeights)
2362 2362 blocksize += (pts2write_DCchannels*datatypeValue*2)
2363 2363
2364 2364 blocksize = blocksize #* datatypeValue * 2 #CORREGIR ESTO
2365 2365
2366 2366 return blocksize
2367 2367
2368 2368 def getDataHeader(self):
2369 2369
2370 2370 """
2371 2371 Obtiene una copia del First Header
2372 2372
2373 2373 Affected:
2374 2374 self.systemHeaderObj
2375 2375 self.radarControllerHeaderObj
2376 2376 self.dtype
2377 2377
2378 2378 Return:
2379 2379 None
2380 2380 """
2381 2381
2382 2382 self.systemHeaderObj = self.dataOut.systemHeaderObj.copy()
2383 2383 self.systemHeaderObj.nChannels = self.dataOut.nChannels
2384 2384 self.radarControllerHeaderObj = self.dataOut.radarControllerHeaderObj.copy()
2385 2385
2386 2386 self.getBasicHeader()
2387 2387
2388 2388 processingHeaderSize = 40 # bytes
2389 2389 self.processingHeaderObj.dtype = 0 # Voltage
2390 2390 self.processingHeaderObj.blockSize = self.__getBlockSize()
2391 2391 self.processingHeaderObj.profilesPerBlock = self.dataOut.nFFTPoints
2392 2392 self.processingHeaderObj.dataBlocksPerFile = self.blocksPerFile
2393 2393 self.processingHeaderObj.nWindows = 1 #podria ser 1 o self.dataOut.processingHeaderObj.nWindows
2394 2394 self.processingHeaderObj.processFlags = self.__getProcessFlags()
2395 2395 self.processingHeaderObj.nCohInt = self.dataOut.nCohInt# Se requiere para determinar el valor de timeInterval
2396 2396 self.processingHeaderObj.nIncohInt = self.dataOut.nIncohInt
2397 2397 self.processingHeaderObj.totalSpectra = self.dataOut.nPairs + self.dataOut.nChannels
2398 2398
2399 2399 if self.processingHeaderObj.totalSpectra > 0:
2400 2400 channelList = []
2401 2401 for channel in range(self.dataOut.nChannels):
2402 2402 channelList.append(channel)
2403 2403 channelList.append(channel)
2404 2404
2405 2405 pairsList = []
2406 2406 for pair in self.dataOut.pairsList:
2407 2407 pairsList.append(pair[0])
2408 2408 pairsList.append(pair[1])
2409 2409 spectraComb = channelList + pairsList
2410 2410 spectraComb = numpy.array(spectraComb,dtype="u1")
2411 2411 self.processingHeaderObj.spectraComb = spectraComb
2412 2412 sizeOfSpcComb = len(spectraComb)
2413 2413 processingHeaderSize += sizeOfSpcComb
2414 2414
2415 2415 if self.dataOut.code != None:
2416 2416 self.processingHeaderObj.code = self.dataOut.code
2417 2417 self.processingHeaderObj.nCode = self.dataOut.nCode
2418 2418 self.processingHeaderObj.nBaud = self.dataOut.nBaud
2419 2419 nCodeSize = 4 # bytes
2420 2420 nBaudSize = 4 # bytes
2421 2421 codeSize = 4 # bytes
2422 2422 sizeOfCode = int(nCodeSize + nBaudSize + codeSize * self.dataOut.nCode * self.dataOut.nBaud)
2423 2423 processingHeaderSize += sizeOfCode
2424 2424
2425 2425 if self.processingHeaderObj.nWindows != 0:
2426 2426 self.processingHeaderObj.firstHeight = self.dataOut.heightList[0]
2427 2427 self.processingHeaderObj.deltaHeight = self.dataOut.heightList[1] - self.dataOut.heightList[0]
2428 2428 self.processingHeaderObj.nHeights = self.dataOut.nHeights
2429 2429 self.processingHeaderObj.samplesWin = self.dataOut.nHeights
2430 2430 sizeOfFirstHeight = 4
2431 2431 sizeOfdeltaHeight = 4
2432 2432 sizeOfnHeights = 4
2433 2433 sizeOfWindows = (sizeOfFirstHeight + sizeOfdeltaHeight + sizeOfnHeights)*self.processingHeaderObj.nWindows
2434 2434 processingHeaderSize += sizeOfWindows
2435 2435
2436 2436 self.processingHeaderObj.size = processingHeaderSize
2437 2437
2438 2438 class SpectraHeisWriter():
2439 2439
2440 2440 i=0
2441 2441
2442 2442 def __init__(self, dataOut):
2443 2443
2444 2444 self.wrObj = FITS()
2445 2445 self.dataOut = dataOut
2446 2446
2447 2447 def isNumber(str):
2448 2448 """
2449 2449 Chequea si el conjunto de caracteres que componen un string puede ser convertidos a un numero.
2450 2450
2451 2451 Excepciones:
2452 2452 Si un determinado string no puede ser convertido a numero
2453 2453 Input:
2454 2454 str, string al cual se le analiza para determinar si convertible a un numero o no
2455 2455
2456 2456 Return:
2457 2457 True : si el string es uno numerico
2458 2458 False : no es un string numerico
2459 2459 """
2460 2460 try:
2461 2461 float( str )
2462 2462 return True
2463 2463 except:
2464 2464 return False
2465 2465
2466 2466 def setup(self, wrpath,):
2467 2467
2468 2468 if not(os.path.exists(wrpath)):
2469 2469 os.mkdir(wrpath)
2470 2470
2471 2471 self.wrpath = wrpath
2472 2472 self.setFile = 0
2473 2473
2474 2474 def putData(self):
2475 2475 # self.wrObj.writeHeader(nChannels=self.dataOut.nChannels, nFFTPoints=self.dataOut.nFFTPoints)
2476 2476 #name = self.dataOut.utctime
2477 2477 name= time.localtime( self.dataOut.utctime)
2478 2478 ext=".fits"
2479 2479 #folder='D%4.4d%3.3d'%(name.tm_year,name.tm_yday)
2480 2480 subfolder = 'D%4.4d%3.3d' % (name.tm_year,name.tm_yday)
2481 2481
2482 2482 fullpath = os.path.join( self.wrpath, subfolder )
2483 2483 if not( os.path.exists(fullpath) ):
2484 2484 os.mkdir(fullpath)
2485 2485 self.setFile += 1
2486 2486 file = 'D%4.4d%3.3d%3.3d%s' % (name.tm_year,name.tm_yday,self.setFile,ext)
2487 2487
2488 2488 filename = os.path.join(self.wrpath,subfolder, file)
2489 2489
2490 2490 # print self.dataOut.ippSeconds
2491 2491 freq=numpy.arange(-1*self.dataOut.nHeights/2.,self.dataOut.nHeights/2.)/(2*self.dataOut.ippSeconds)
2492 2492
2493 2493 col1=self.wrObj.setColF(name="freq", format=str(self.dataOut.nFFTPoints)+'E', array=freq)
2494 2494 col2=self.wrObj.writeData(name="P_Ch1",format=str(self.dataOut.nFFTPoints)+'E',data=10*numpy.log10(self.dataOut.data_spc[0,:]))
2495 2495 col3=self.wrObj.writeData(name="P_Ch2",format=str(self.dataOut.nFFTPoints)+'E',data=10*numpy.log10(self.dataOut.data_spc[1,:]))
2496 2496 col4=self.wrObj.writeData(name="P_Ch3",format=str(self.dataOut.nFFTPoints)+'E',data=10*numpy.log10(self.dataOut.data_spc[2,:]))
2497 2497 col5=self.wrObj.writeData(name="P_Ch4",format=str(self.dataOut.nFFTPoints)+'E',data=10*numpy.log10(self.dataOut.data_spc[3,:]))
2498 2498 col6=self.wrObj.writeData(name="P_Ch5",format=str(self.dataOut.nFFTPoints)+'E',data=10*numpy.log10(self.dataOut.data_spc[4,:]))
2499 2499 col7=self.wrObj.writeData(name="P_Ch6",format=str(self.dataOut.nFFTPoints)+'E',data=10*numpy.log10(self.dataOut.data_spc[5,:]))
2500 2500 col8=self.wrObj.writeData(name="P_Ch7",format=str(self.dataOut.nFFTPoints)+'E',data=10*numpy.log10(self.dataOut.data_spc[6,:]))
2501 2501 col9=self.wrObj.writeData(name="P_Ch8",format=str(self.dataOut.nFFTPoints)+'E',data=10*numpy.log10(self.dataOut.data_spc[7,:]))
2502 2502 #n=numpy.arange((100))
2503 2503 n=self.dataOut.data_spc[6,:]
2504 2504 a=self.wrObj.cFImage(n)
2505 2505 b=self.wrObj.Ctable(col1,col2,col3,col4,col5,col6,col7,col8,col9)
2506 2506 self.wrObj.CFile(a,b)
2507 2507 self.wrObj.wFile(filename)
2508 2508 return 1
2509 2509
2510 2510 class FITS:
2511 2511
2512 2512 name=None
2513 2513 format=None
2514 2514 array =None
2515 2515 data =None
2516 2516 thdulist=None
2517 2517
2518 2518 def __init__(self):
2519 2519
2520 2520 pass
2521 2521
2522 2522 def setColF(self,name,format,array):
2523 2523 self.name=name
2524 2524 self.format=format
2525 2525 self.array=array
2526 2526 a1=numpy.array([self.array],dtype=numpy.float32)
2527 2527 self.col1 = pyfits.Column(name=self.name, format=self.format, array=a1)
2528 2528 return self.col1
2529 2529
2530 2530 # def setColP(self,name,format,data):
2531 2531 # self.name=name
2532 2532 # self.format=format
2533 2533 # self.data=data
2534 2534 # a2=numpy.array([self.data],dtype=numpy.float32)
2535 2535 # self.col2 = pyfits.Column(name=self.name, format=self.format, array=a2)
2536 2536 # return self.col2
2537 2537
2538 2538 def writeHeader(self,):
2539 2539 pass
2540 2540
2541 2541 def writeData(self,name,format,data):
2542 2542 self.name=name
2543 2543 self.format=format
2544 2544 self.data=data
2545 2545 a2=numpy.array([self.data],dtype=numpy.float32)
2546 2546 self.col2 = pyfits.Column(name=self.name, format=self.format, array=a2)
2547 2547 return self.col2
2548 2548
2549 2549 def cFImage(self,n):
2550 2550 self.hdu= pyfits.PrimaryHDU(n)
2551 2551 return self.hdu
2552 2552
2553 2553 def Ctable(self,col1,col2,col3,col4,col5,col6,col7,col8,col9):
2554 2554 self.cols=pyfits.ColDefs( [col1,col2,col3,col4,col5,col6,col7,col8,col9])
2555 2555 self.tbhdu = pyfits.new_table(self.cols)
2556 2556 return self.tbhdu
2557 2557
2558 2558 def CFile(self,hdu,tbhdu):
2559 2559 self.thdulist=pyfits.HDUList([hdu,tbhdu])
2560 2560
2561 2561 def wFile(self,filename):
2562 2562 self.thdulist.writeto(filename) No newline at end of file
@@ -1,1151 +1,1153
1 1 '''
2 2
3 3 $Author: dsuarez $
4 4 $Id: Processor.py 1 2012-11-12 18:56:07Z dsuarez $
5 5 '''
6 6 import os
7 7 import numpy
8 8 import datetime
9 9 import time
10 10
11 11 from jrodata import *
12 12 from jrodataIO import *
13 13 from jroplot import *
14 14
15 15 class ProcessingUnit:
16 16
17 17 """
18 18 Esta es la clase base para el procesamiento de datos.
19 19
20 20 Contiene el metodo "call" para llamar operaciones. Las operaciones pueden ser:
21 21 - Metodos internos (callMethod)
22 22 - Objetos del tipo Operation (callObject). Antes de ser llamados, estos objetos
23 23 tienen que ser agreagados con el metodo "add".
24 24
25 25 """
26 26 # objeto de datos de entrada (Voltage, Spectra o Correlation)
27 27 dataIn = None
28 28
29 29 # objeto de datos de entrada (Voltage, Spectra o Correlation)
30 30 dataOut = None
31 31
32 32
33 33 objectDict = None
34 34
35 35 def __init__(self):
36 36
37 37 self.objectDict = {}
38 38
39 39 def init(self):
40 40
41 41 raise ValueError, "Not implemented"
42 42
43 43 def addOperation(self, object, objId):
44 44
45 45 """
46 46 Agrega el objeto "object" a la lista de objetos "self.objectList" y retorna el
47 47 identificador asociado a este objeto.
48 48
49 49 Input:
50 50
51 51 object : objeto de la clase "Operation"
52 52
53 53 Return:
54 54
55 55 objId : identificador del objeto, necesario para ejecutar la operacion
56 56 """
57 57
58 58 self.objectDict[objId] = object
59 59
60 60 return objId
61 61
62 62 def operation(self, **kwargs):
63 63
64 64 """
65 65 Operacion directa sobre la data (dataOut.data). Es necesario actualizar los valores de los
66 66 atributos del objeto dataOut
67 67
68 68 Input:
69 69
70 70 **kwargs : Diccionario de argumentos de la funcion a ejecutar
71 71 """
72 72
73 73 raise ValueError, "ImplementedError"
74 74
75 75 def callMethod(self, name, **kwargs):
76 76
77 77 """
78 78 Ejecuta el metodo con el nombre "name" y con argumentos **kwargs de la propia clase.
79 79
80 80 Input:
81 81 name : nombre del metodo a ejecutar
82 82
83 83 **kwargs : diccionario con los nombres y valores de la funcion a ejecutar.
84 84
85 85 """
86 86 if name != 'run':
87 87
88 88 if name == 'init' and self.dataIn.isEmpty():
89 89 self.dataOut.flagNoData = True
90 90 return False
91 91
92 92 if name != 'init' and self.dataOut.isEmpty():
93 93 return False
94 94
95 95 methodToCall = getattr(self, name)
96 96
97 97 methodToCall(**kwargs)
98 98
99 99 if name != 'run':
100 100 return True
101 101
102 102 if self.dataOut.isEmpty():
103 103 return False
104 104
105 105 return True
106 106
107 107 def callObject(self, objId, **kwargs):
108 108
109 109 """
110 110 Ejecuta la operacion asociada al identificador del objeto "objId"
111 111
112 112 Input:
113 113
114 114 objId : identificador del objeto a ejecutar
115 115
116 116 **kwargs : diccionario con los nombres y valores de la funcion a ejecutar.
117 117
118 118 Return:
119 119
120 120 None
121 121 """
122 122
123 123 if self.dataOut.isEmpty():
124 124 return False
125 125
126 126 object = self.objectDict[objId]
127 127
128 128 object.run(self.dataOut, **kwargs)
129 129
130 130 return True
131 131
132 132 def call(self, operationConf, **kwargs):
133 133
134 134 """
135 135 Return True si ejecuta la operacion "operationConf.name" con los
136 136 argumentos "**kwargs". False si la operacion no se ha ejecutado.
137 137 La operacion puede ser de dos tipos:
138 138
139 139 1. Un metodo propio de esta clase:
140 140
141 141 operation.type = "self"
142 142
143 143 2. El metodo "run" de un objeto del tipo Operation o de un derivado de ella:
144 144 operation.type = "other".
145 145
146 146 Este objeto de tipo Operation debe de haber sido agregado antes con el metodo:
147 147 "addOperation" e identificado con el operation.id
148 148
149 149
150 150 con el id de la operacion.
151 151
152 152 Input:
153 153
154 154 Operation : Objeto del tipo operacion con los atributos: name, type y id.
155 155
156 156 """
157 157
158 158 if operationConf.type == 'self':
159 159 sts = self.callMethod(operationConf.name, **kwargs)
160 160
161 161 if operationConf.type == 'other':
162 162 sts = self.callObject(operationConf.id, **kwargs)
163 163
164 164 return sts
165 165
166 166 def setInput(self, dataIn):
167 167
168 168 self.dataIn = dataIn
169 169
170 170 def getOutput(self):
171 171
172 172 return self.dataOut
173 173
174 174 class Operation():
175 175
176 176 """
177 177 Clase base para definir las operaciones adicionales que se pueden agregar a la clase ProcessingUnit
178 178 y necesiten acumular informacion previa de los datos a procesar. De preferencia usar un buffer de
179 179 acumulacion dentro de esta clase
180 180
181 181 Ejemplo: Integraciones coherentes, necesita la informacion previa de los n perfiles anteriores (bufffer)
182 182
183 183 """
184 184
185 185 __buffer = None
186 186 __isConfig = False
187 187
188 188 def __init__(self):
189 189
190 190 pass
191 191
192 192 def run(self, dataIn, **kwargs):
193 193
194 194 """
195 195 Realiza las operaciones necesarias sobre la dataIn.data y actualiza los atributos del objeto dataIn.
196 196
197 197 Input:
198 198
199 199 dataIn : objeto del tipo JROData
200 200
201 201 Return:
202 202
203 203 None
204 204
205 205 Affected:
206 206 __buffer : buffer de recepcion de datos.
207 207
208 208 """
209 209
210 210 raise ValueError, "ImplementedError"
211 211
212 212 class VoltageProc(ProcessingUnit):
213 213
214 214
215 215 def __init__(self):
216 216
217 217 self.objectDict = {}
218 218 self.dataOut = Voltage()
219 219
220 220 def init(self):
221 221
222 222 self.dataOut.copy(self.dataIn)
223 223 # No necesita copiar en cada init() los atributos de dataIn
224 224 # la copia deberia hacerse por cada nuevo bloque de datos
225 225
226 226 def selectChannels(self, channelList):
227 227
228 228 channelIndexList = []
229 229
230 230 for channel in channelList:
231 231 index = self.dataOut.channelList.index(channel)
232 232 channelIndexList.append(index)
233 233
234 234 self.selectChannelsByIndex(channelIndexList)
235 235
236 236 def selectChannelsByIndex(self, channelIndexList):
237 237 """
238 238 Selecciona un bloque de datos en base a canales segun el channelIndexList
239 239
240 240 Input:
241 241 channelIndexList : lista sencilla de canales a seleccionar por ej. [2,3,7]
242 242
243 243 Affected:
244 244 self.dataOut.data
245 245 self.dataOut.channelIndexList
246 246 self.dataOut.nChannels
247 247 self.dataOut.m_ProcessingHeader.totalSpectra
248 248 self.dataOut.systemHeaderObj.numChannels
249 249 self.dataOut.m_ProcessingHeader.blockSize
250 250
251 251 Return:
252 252 None
253 253 """
254 254
255 255 for channelIndex in channelIndexList:
256 256 if channelIndex not in self.dataOut.channelIndexList:
257 257 print channelIndexList
258 258 raise ValueError, "The value %d in channelIndexList is not valid" %channelIndex
259 259
260 260 nChannels = len(channelIndexList)
261 261
262 262 data = self.dataOut.data[channelIndexList,:]
263 263
264 264 self.dataOut.data = data
265 265 self.dataOut.channelList = [self.dataOut.channelList[i] for i in channelIndexList]
266 266 # self.dataOut.nChannels = nChannels
267 267
268 268 return 1
269 269
270 270 def selectHeights(self, minHei, maxHei):
271 271 """
272 272 Selecciona un bloque de datos en base a un grupo de valores de alturas segun el rango
273 273 minHei <= height <= maxHei
274 274
275 275 Input:
276 276 minHei : valor minimo de altura a considerar
277 277 maxHei : valor maximo de altura a considerar
278 278
279 279 Affected:
280 280 Indirectamente son cambiados varios valores a travez del metodo selectHeightsByIndex
281 281
282 282 Return:
283 283 1 si el metodo se ejecuto con exito caso contrario devuelve 0
284 284 """
285 285 if (minHei < self.dataOut.heightList[0]) or (minHei > maxHei):
286 286 raise ValueError, "some value in (%d,%d) is not valid" % (minHei, maxHei)
287 287
288 288 if (maxHei > self.dataOut.heightList[-1]):
289 289 maxHei = self.dataOut.heightList[-1]
290 290 # raise ValueError, "some value in (%d,%d) is not valid" % (minHei, maxHei)
291 291
292 292 minIndex = 0
293 293 maxIndex = 0
294 294 data = self.dataOut.heightList
295 295
296 296 for i,val in enumerate(data):
297 297 if val < minHei:
298 298 continue
299 299 else:
300 300 minIndex = i;
301 301 break
302 302
303 303 for i,val in enumerate(data):
304 304 if val <= maxHei:
305 305 maxIndex = i;
306 306 else:
307 307 break
308 308
309 309 self.selectHeightsByIndex(minIndex, maxIndex)
310 310
311 311 return 1
312 312
313 313
314 314 def selectHeightsByIndex(self, minIndex, maxIndex):
315 315 """
316 316 Selecciona un bloque de datos en base a un grupo indices de alturas segun el rango
317 317 minIndex <= index <= maxIndex
318 318
319 319 Input:
320 320 minIndex : valor de indice minimo de altura a considerar
321 321 maxIndex : valor de indice maximo de altura a considerar
322 322
323 323 Affected:
324 324 self.dataOut.data
325 325 self.dataOut.heightList
326 326
327 327 Return:
328 328 1 si el metodo se ejecuto con exito caso contrario devuelve 0
329 329 """
330 330
331 331 if (minIndex < 0) or (minIndex > maxIndex):
332 332 raise ValueError, "some value in (%d,%d) is not valid" % (minIndex, maxIndex)
333 333
334 334 if (maxIndex >= self.dataOut.nHeights):
335 335 maxIndex = self.dataOut.nHeights-1
336 336 # raise ValueError, "some value in (%d,%d) is not valid" % (minIndex, maxIndex)
337 337
338 338 nHeights = maxIndex - minIndex + 1
339 339
340 340 #voltage
341 341 data = self.dataOut.data[:,minIndex:maxIndex+1]
342 342
343 343 firstHeight = self.dataOut.heightList[minIndex]
344 344
345 345 self.dataOut.data = data
346 346 self.dataOut.heightList = self.dataOut.heightList[minIndex:maxIndex+1]
347 347
348 348 return 1
349 349
350 350
351 351 def filterByHeights(self, window):
352 352 deltaHeight = self.dataOut.heightList[1] - self.dataOut.heightList[0]
353 353
354 354 if window == None:
355 355 window = self.dataOut.radarControllerHeaderObj.txA / deltaHeight
356 356
357 357 newdelta = deltaHeight * window
358 358 r = self.dataOut.data.shape[1] % window
359 359 buffer = self.dataOut.data[:,0:self.dataOut.data.shape[1]-r]
360 360 buffer = buffer.reshape(self.dataOut.data.shape[0],self.dataOut.data.shape[1]/window,window)
361 361 buffer = numpy.average(buffer,2)
362 362 self.dataOut.data = buffer
363 self.dataOut.heightList = numpy.arange(self.dataOut.heightList[0],newdelta*self.dataOut.nHeights/window,newdelta)
363 self.dataOut.heightList = numpy.arange(self.dataOut.heightList[0],newdelta*self.dataOut.nHeights/window-newdelta,newdelta)
364 364
365 365
366 366
367 367 class CohInt(Operation):
368 368
369 369 __isConfig = False
370 370
371 371 __profIndex = 0
372 372 __withOverapping = False
373 373
374 374 __byTime = False
375 375 __initime = None
376 376 __lastdatatime = None
377 377 __integrationtime = None
378 378
379 379 __buffer = None
380 380
381 381 __dataReady = False
382 382
383 383 n = None
384 384
385 385
386 386 def __init__(self):
387 387
388 388 self.__isConfig = False
389 389
390 390 def setup(self, n=None, timeInterval=None, overlapping=False):
391 391 """
392 392 Set the parameters of the integration class.
393 393
394 394 Inputs:
395 395
396 396 n : Number of coherent integrations
397 397 timeInterval : Time of integration. If the parameter "n" is selected this one does not work
398 398 overlapping :
399 399
400 400 """
401 401
402 402 self.__initime = None
403 403 self.__lastdatatime = 0
404 404 self.__buffer = None
405 405 self.__dataReady = False
406 406
407 407
408 408 if n == None and timeInterval == None:
409 409 raise ValueError, "n or timeInterval should be specified ..."
410 410
411 411 if n != None:
412 412 self.n = n
413 413 self.__byTime = False
414 414 else:
415 415 self.__integrationtime = timeInterval * 60. #if (type(timeInterval)!=integer) -> change this line
416 416 self.n = 9999
417 417 self.__byTime = True
418 418
419 419 if overlapping:
420 420 self.__withOverapping = True
421 421 self.__buffer = None
422 422 else:
423 423 self.__withOverapping = False
424 424 self.__buffer = 0
425 425
426 426 self.__profIndex = 0
427 427
428 428 def putData(self, data):
429 429
430 430 """
431 431 Add a profile to the __buffer and increase in one the __profileIndex
432 432
433 433 """
434 434
435 435 if not self.__withOverapping:
436 436 self.__buffer += data.copy()
437 437 self.__profIndex += 1
438 438 return
439 439
440 440 #Overlapping data
441 441 nChannels, nHeis = data.shape
442 442 data = numpy.reshape(data, (1, nChannels, nHeis))
443 443
444 444 #If the buffer is empty then it takes the data value
445 445 if self.__buffer == None:
446 446 self.__buffer = data
447 447 self.__profIndex += 1
448 448 return
449 449
450 450 #If the buffer length is lower than n then stakcing the data value
451 451 if self.__profIndex < self.n:
452 452 self.__buffer = numpy.vstack((self.__buffer, data))
453 453 self.__profIndex += 1
454 454 return
455 455
456 456 #If the buffer length is equal to n then replacing the last buffer value with the data value
457 457 self.__buffer = numpy.roll(self.__buffer, -1, axis=0)
458 458 self.__buffer[self.n-1] = data
459 459 self.__profIndex = self.n
460 460 return
461 461
462 462
463 463 def pushData(self):
464 464 """
465 465 Return the sum of the last profiles and the profiles used in the sum.
466 466
467 467 Affected:
468 468
469 469 self.__profileIndex
470 470
471 471 """
472 472
473 473 if not self.__withOverapping:
474 474 data = self.__buffer
475 475 n = self.__profIndex
476 476
477 477 self.__buffer = 0
478 478 self.__profIndex = 0
479 479
480 480 return data, n
481 481
482 482 #Integration with Overlapping
483 483 data = numpy.sum(self.__buffer, axis=0)
484 484 n = self.__profIndex
485 485
486 486 return data, n
487 487
488 488 def byProfiles(self, data):
489 489
490 490 self.__dataReady = False
491 491 avgdata = None
492 492 n = None
493 493
494 494 self.putData(data)
495 495
496 496 if self.__profIndex == self.n:
497 497
498 498 avgdata, n = self.pushData()
499 499 self.__dataReady = True
500 500
501 501 return avgdata
502 502
503 503 def byTime(self, data, datatime):
504 504
505 505 self.__dataReady = False
506 506 avgdata = None
507 507 n = None
508 508
509 509 self.putData(data)
510 510
511 511 if (datatime - self.__initime) >= self.__integrationtime:
512 512 avgdata, n = self.pushData()
513 513 self.n = n
514 514 self.__dataReady = True
515 515
516 516 return avgdata
517 517
518 518 def integrate(self, data, datatime=None):
519 519
520 520 if self.__initime == None:
521 521 self.__initime = datatime
522 522
523 523 if self.__byTime:
524 524 avgdata = self.byTime(data, datatime)
525 525 else:
526 526 avgdata = self.byProfiles(data)
527 527
528 528
529 529 self.__lastdatatime = datatime
530 530
531 531 if avgdata == None:
532 532 return None, None
533 533
534 534 avgdatatime = self.__initime
535 535
536 536 deltatime = datatime -self.__lastdatatime
537 537
538 538 if not self.__withOverapping:
539 539 self.__initime = datatime
540 540 else:
541 541 self.__initime += deltatime
542 542
543 543 return avgdata, avgdatatime
544 544
545 545 def run(self, dataOut, **kwargs):
546 546
547 547 if not self.__isConfig:
548 548 self.setup(**kwargs)
549 549 self.__isConfig = True
550 550
551 551 avgdata, avgdatatime = self.integrate(dataOut.data, dataOut.utctime)
552 552
553 553 # dataOut.timeInterval *= n
554 554 dataOut.flagNoData = True
555 555
556 556 if self.__dataReady:
557 557 dataOut.data = avgdata
558 558 dataOut.nCohInt *= self.n
559 559 dataOut.utctime = avgdatatime
560 560 dataOut.timeInterval = dataOut.ippSeconds * dataOut.nCohInt
561 561 dataOut.flagNoData = False
562 562
563 563
564 564 class Decoder(Operation):
565 565
566 566 __isConfig = False
567 567 __profIndex = 0
568 568
569 569 code = None
570 570
571 571 nCode = None
572 572 nBaud = None
573 573
574 574 def __init__(self):
575 575
576 576 self.__isConfig = False
577 577
578 578 def setup(self, code):
579 579
580 580 self.__profIndex = 0
581 581
582 582 self.code = code
583 583
584 584 self.nCode = len(code)
585 585 self.nBaud = len(code[0])
586 586
587 587 def convolutionInFreq(self, data):
588 588
589 ndata = data.shape[1]
589 nchannel, ndata = data.shape
590 590 newcode = numpy.zeros(ndata)
591 591 newcode[0:self.nBaud] = self.code[self.__profIndex]
592 592
593 593 fft_data = numpy.fft.fft(data, axis=1)
594 594 fft_code = numpy.conj(numpy.fft.fft(newcode))
595 595 fft_code = fft_code.reshape(1,len(fft_code))
596 596
597 597 # conv = fft_data.copy()
598 598 # conv.fill(0)
599 599
600 600 conv = fft_data*fft_code
601 601
602 602 data = numpy.fft.ifft(conv,axis=1)
603 603
604 604 datadec = data[:,:-self.nBaud+1]
605 605 ndatadec = ndata - self.nBaud + 1
606 606
607 if self.__profIndex == self.nCode:
607 if self.__profIndex == self.nCode-1:
608 608 self.__profIndex = 0
609 return ndatadec, datadec
609 610
610 611 self.__profIndex += 1
611 612
612 613 return ndatadec, datadec
613 614
614 615
615 616 def convolutionInTime(self, data):
616 617
617 nchannel = data.shape[1]
618 nchannel, ndata = data.shape
618 619 newcode = self.code[self.__profIndex]
620 ndatadec = ndata - self.nBaud + 1
619 621
620 datadec = data.copy()
622 datadec = numpy.zeros((nchannel, ndatadec))
621 623
622 624 for i in range(nchannel):
623 625 datadec[i,:] = numpy.correlate(data[i,:], newcode)
624 626
625 ndatadec = ndata - self.nBaud + 1
626
627 if self.__profIndex == self.nCode:
628 self.__profIndex = 0
627 if self.__profIndex == self.nCode-1:
628 self.__profIndex = 0
629 return ndatadec, datadec
629 630
630 631 self.__profIndex += 1
631 632
632 633 return ndatadec, datadec
633 634
634 635 def run(self, dataOut, code=None, mode = 0):
635 636
636 637 if not self.__isConfig:
637 638
638 639 if code == None:
639 640 code = dataOut.code
640 641
641 642 self.setup(code)
642 643 self.__isConfig = True
643 644
644 645 if mode == 0:
645 ndatadec, datadec = self.convolutionInFreq(data)
646 ndatadec, datadec = self.convolutionInFreq(dataOut.data)
646 647
647 648 if mode == 1:
648 ndatadec, datadec = self.convolutionInTime(data)
649 print "This function is not implemented"
650 # ndatadec, datadec = self.convolutionInTime(dataOut.data)
649 651
650 652 dataOut.data = datadec
651 653
652 dataOut.heightList = dataOut.heightList[0:ndatadec+1]
654 dataOut.heightList = dataOut.heightList[0:ndatadec]
653 655
654 656 dataOut.flagDecodeData = True #asumo q la data no esta decodificada
655 657
656 658 # dataOut.flagDeflipData = True #asumo q la data no esta sin flip
657 659
658 660
659 661 class SpectraProc(ProcessingUnit):
660 662
661 663 def __init__(self):
662 664
663 665 self.objectDict = {}
664 666 self.buffer = None
665 667 self.firstdatatime = None
666 668 self.profIndex = 0
667 669 self.dataOut = Spectra()
668 670
669 671 def __updateObjFromInput(self):
670 672
671 673 self.dataOut.radarControllerHeaderObj = self.dataIn.radarControllerHeaderObj.copy()
672 674 self.dataOut.systemHeaderObj = self.dataIn.systemHeaderObj.copy()
673 675 self.dataOut.channelList = self.dataIn.channelList
674 676 self.dataOut.heightList = self.dataIn.heightList
675 677 self.dataOut.dtype = self.dataIn.dtype
676 678 # self.dataOut.nHeights = self.dataIn.nHeights
677 679 # self.dataOut.nChannels = self.dataIn.nChannels
678 680 self.dataOut.nBaud = self.dataIn.nBaud
679 681 self.dataOut.nCode = self.dataIn.nCode
680 682 self.dataOut.code = self.dataIn.code
681 683 self.dataOut.nProfiles = self.dataOut.nFFTPoints
682 684 # self.dataOut.channelIndexList = self.dataIn.channelIndexList
683 685 self.dataOut.flagTimeBlock = self.dataIn.flagTimeBlock
684 686 self.dataOut.utctime = self.firstdatatime
685 687 self.dataOut.flagDecodeData = self.dataIn.flagDecodeData #asumo q la data esta decodificada
686 688 self.dataOut.flagDeflipData = self.dataIn.flagDeflipData #asumo q la data esta sin flip
687 689 self.dataOut.flagShiftFFT = self.dataIn.flagShiftFFT
688 690 self.dataOut.nCohInt = self.dataIn.nCohInt
689 691 self.dataOut.nIncohInt = 1
690 692 self.dataOut.ippSeconds = self.dataIn.ippSeconds
691 693
692 694 self.dataOut.timeInterval = self.dataIn.timeInterval*self.dataOut.nFFTPoints*self.dataOut.nIncohInt
693 695
694 696 def __getFft(self):
695 697 """
696 698 Convierte valores de Voltaje a Spectra
697 699
698 700 Affected:
699 701 self.dataOut.data_spc
700 702 self.dataOut.data_cspc
701 703 self.dataOut.data_dc
702 704 self.dataOut.heightList
703 705 self.profIndex
704 706 self.buffer
705 707 self.dataOut.flagNoData
706 708 """
707 709 fft_volt = numpy.fft.fft(self.buffer,axis=1)
708 710 dc = fft_volt[:,0,:]
709 711
710 712 #calculo de self-spectra
711 713 fft_volt = numpy.fft.fftshift(fft_volt,axes=(1,))
712 714 spc = fft_volt * numpy.conjugate(fft_volt)
713 715 spc = spc.real
714 716
715 717 blocksize = 0
716 718 blocksize += dc.size
717 719 blocksize += spc.size
718 720
719 721 cspc = None
720 722 pairIndex = 0
721 723 if self.dataOut.pairsList != None:
722 724 #calculo de cross-spectra
723 725 cspc = numpy.zeros((self.dataOut.nPairs, self.dataOut.nFFTPoints, self.dataOut.nHeights), dtype='complex')
724 726 for pair in self.dataOut.pairsList:
725 727 cspc[pairIndex,:,:] = fft_volt[pair[0],:,:] * numpy.conjugate(fft_volt[pair[1],:,:])
726 728 pairIndex += 1
727 729 blocksize += cspc.size
728 730
729 731 self.dataOut.data_spc = spc
730 732 self.dataOut.data_cspc = cspc
731 733 self.dataOut.data_dc = dc
732 734 self.dataOut.blockSize = blocksize
733 735
734 736 def init(self, nFFTPoints=None, pairsList=None):
735 737
736 738 self.dataOut.flagNoData = True
737 739
738 740 if self.dataIn.type == "Spectra":
739 741 self.dataOut.copy(self.dataIn)
740 742 return
741 743
742 744 if self.dataIn.type == "Voltage":
743 745
744 746 if nFFTPoints == None:
745 747 raise ValueError, "This SpectraProc.init() need nFFTPoints input variable"
746 748
747 749 if pairsList == None:
748 750 nPairs = 0
749 751 else:
750 752 nPairs = len(pairsList)
751 753
752 754 self.dataOut.nFFTPoints = nFFTPoints
753 755 self.dataOut.pairsList = pairsList
754 756 self.dataOut.nPairs = nPairs
755 757
756 758 if self.buffer == None:
757 759 self.buffer = numpy.zeros((self.dataIn.nChannels,
758 760 self.dataOut.nFFTPoints,
759 761 self.dataIn.nHeights),
760 762 dtype='complex')
761 763
762 764
763 765 self.buffer[:,self.profIndex,:] = self.dataIn.data.copy()
764 766 self.profIndex += 1
765 767
766 768 if self.firstdatatime == None:
767 769 self.firstdatatime = self.dataIn.utctime
768 770
769 771 if self.profIndex == self.dataOut.nFFTPoints:
770 772 self.__updateObjFromInput()
771 773 self.__getFft()
772 774
773 775 self.dataOut.flagNoData = False
774 776
775 777 self.buffer = None
776 778 self.firstdatatime = None
777 779 self.profIndex = 0
778 780
779 781 return
780 782
781 783 raise ValuError, "The type object %s is not valid"%(self.dataIn.type)
782 784
783 785 def selectChannels(self, channelList):
784 786
785 787 channelIndexList = []
786 788
787 789 for channel in channelList:
788 790 index = self.dataOut.channelList.index(channel)
789 791 channelIndexList.append(index)
790 792
791 793 self.selectChannelsByIndex(channelIndexList)
792 794
793 795 def selectChannelsByIndex(self, channelIndexList):
794 796 """
795 797 Selecciona un bloque de datos en base a canales segun el channelIndexList
796 798
797 799 Input:
798 800 channelIndexList : lista sencilla de canales a seleccionar por ej. [2,3,7]
799 801
800 802 Affected:
801 803 self.dataOut.data_spc
802 804 self.dataOut.channelIndexList
803 805 self.dataOut.nChannels
804 806
805 807 Return:
806 808 None
807 809 """
808 810
809 811 for channelIndex in channelIndexList:
810 812 if channelIndex not in self.dataOut.channelIndexList:
811 813 print channelIndexList
812 814 raise ValueError, "The value %d in channelIndexList is not valid" %channelIndex
813 815
814 816 nChannels = len(channelIndexList)
815 817
816 818 data_spc = self.dataOut.data_spc[channelIndexList,:]
817 819
818 820 self.dataOut.data_spc = data_spc
819 821 self.dataOut.channelList = [self.dataOut.channelList[i] for i in channelIndexList]
820 822 # self.dataOut.nChannels = nChannels
821 823
822 824 return 1
823 825
824 826
825 827 class IncohInt(Operation):
826 828
827 829
828 830 __profIndex = 0
829 831 __withOverapping = False
830 832
831 833 __byTime = False
832 834 __initime = None
833 835 __lastdatatime = None
834 836 __integrationtime = None
835 837
836 838 __buffer_spc = None
837 839 __buffer_cspc = None
838 840 __buffer_dc = None
839 841
840 842 __dataReady = False
841 843
842 844 n = None
843 845
844 846
845 847 def __init__(self):
846 848
847 849 self.__isConfig = False
848 850
849 851 def setup(self, n=None, timeInterval=None, overlapping=False):
850 852 """
851 853 Set the parameters of the integration class.
852 854
853 855 Inputs:
854 856
855 857 n : Number of coherent integrations
856 858 timeInterval : Time of integration. If the parameter "n" is selected this one does not work
857 859 overlapping :
858 860
859 861 """
860 862
861 863 self.__initime = None
862 864 self.__lastdatatime = 0
863 865 self.__buffer_spc = None
864 866 self.__buffer_cspc = None
865 867 self.__buffer_dc = None
866 868 self.__dataReady = False
867 869
868 870
869 871 if n == None and timeInterval == None:
870 872 raise ValueError, "n or timeInterval should be specified ..."
871 873
872 874 if n != None:
873 875 self.n = n
874 876 self.__byTime = False
875 877 else:
876 878 self.__integrationtime = timeInterval * 60. #if (type(timeInterval)!=integer) -> change this line
877 879 self.n = 9999
878 880 self.__byTime = True
879 881
880 882 if overlapping:
881 883 self.__withOverapping = True
882 884 else:
883 885 self.__withOverapping = False
884 886 self.__buffer_spc = 0
885 887 self.__buffer_cspc = 0
886 888 self.__buffer_dc = 0
887 889
888 890 self.__profIndex = 0
889 891
890 892 def putData(self, data_spc, data_cspc, data_dc):
891 893
892 894 """
893 895 Add a profile to the __buffer_spc and increase in one the __profileIndex
894 896
895 897 """
896 898
897 899 if not self.__withOverapping:
898 900 self.__buffer_spc += data_spc
899 901
900 902 if data_cspc == None:
901 903 self.__buffer_cspc = None
902 904 else:
903 905 self.__buffer_cspc += data_cspc
904 906
905 907 if data_dc == None:
906 908 self.__buffer_dc = None
907 909 else:
908 910 self.__buffer_dc += data_dc
909 911
910 912 self.__profIndex += 1
911 913 return
912 914
913 915 #Overlapping data
914 916 nChannels, nFFTPoints, nHeis = data_spc.shape
915 917 data_spc = numpy.reshape(data_spc, (1, nChannels, nFFTPoints, nHeis))
916 918 if data_cspc != None:
917 919 data_cspc = numpy.reshape(data_cspc, (1, -1, nFFTPoints, nHeis))
918 920 if data_dc != None:
919 921 data_dc = numpy.reshape(data_dc, (1, -1, nHeis))
920 922
921 923 #If the buffer is empty then it takes the data value
922 924 if self.__buffer_spc == None:
923 925 self.__buffer_spc = data_spc
924 926
925 927 if data_cspc == None:
926 928 self.__buffer_cspc = None
927 929 else:
928 930 self.__buffer_cspc += data_cspc
929 931
930 932 if data_dc == None:
931 933 self.__buffer_dc = None
932 934 else:
933 935 self.__buffer_dc += data_dc
934 936
935 937 self.__profIndex += 1
936 938 return
937 939
938 940 #If the buffer length is lower than n then stakcing the data value
939 941 if self.__profIndex < self.n:
940 942 self.__buffer_spc = numpy.vstack((self.__buffer_spc, data_spc))
941 943
942 944 if data_cspc != None:
943 945 self.__buffer_cspc = numpy.vstack((self.__buffer_cspc, data_cspc))
944 946
945 947 if data_dc != None:
946 948 self.__buffer_dc = numpy.vstack((self.__buffer_dc, data_dc))
947 949
948 950 self.__profIndex += 1
949 951 return
950 952
951 953 #If the buffer length is equal to n then replacing the last buffer value with the data value
952 954 self.__buffer_spc = numpy.roll(self.__buffer_spc, -1, axis=0)
953 955 self.__buffer_spc[self.n-1] = data_spc
954 956
955 957 if data_cspc != None:
956 958 self.__buffer_cspc = numpy.roll(self.__buffer_cspc, -1, axis=0)
957 959 self.__buffer_cspc[self.n-1] = data_cspc
958 960
959 961 if data_dc != None:
960 962 self.__buffer_dc = numpy.roll(self.__buffer_dc, -1, axis=0)
961 963 self.__buffer_dc[self.n-1] = data_dc
962 964
963 965 self.__profIndex = self.n
964 966 return
965 967
966 968
967 969 def pushData(self):
968 970 """
969 971 Return the sum of the last profiles and the profiles used in the sum.
970 972
971 973 Affected:
972 974
973 975 self.__profileIndex
974 976
975 977 """
976 978 data_spc = None
977 979 data_cspc = None
978 980 data_dc = None
979 981
980 982 if not self.__withOverapping:
981 983 data_spc = self.__buffer_spc
982 984 data_cspc = self.__buffer_cspc
983 985 data_dc = self.__buffer_dc
984 986
985 987 n = self.__profIndex
986 988
987 989 self.__buffer_spc = 0
988 990 self.__buffer_cspc = 0
989 991 self.__buffer_dc = 0
990 992 self.__profIndex = 0
991 993
992 994 return data_spc, data_cspc, data_dc, n
993 995
994 996 #Integration with Overlapping
995 997 data_spc = numpy.sum(self.__buffer_spc, axis=0)
996 998
997 999 if self.__buffer_cspc != None:
998 1000 data_cspc = numpy.sum(self.__buffer_cspc, axis=0)
999 1001
1000 1002 if self.__buffer_dc != None:
1001 1003 data_dc = numpy.sum(self.__buffer_dc, axis=0)
1002 1004
1003 1005 n = self.__profIndex
1004 1006
1005 1007 return data_spc, data_cspc, data_dc, n
1006 1008
1007 1009 def byProfiles(self, *args):
1008 1010
1009 1011 self.__dataReady = False
1010 1012 avgdata_spc = None
1011 1013 avgdata_cspc = None
1012 1014 avgdata_dc = None
1013 1015 n = None
1014 1016
1015 1017 self.putData(*args)
1016 1018
1017 1019 if self.__profIndex == self.n:
1018 1020
1019 1021 avgdata_spc, avgdata_cspc, avgdata_dc, n = self.pushData()
1020 1022 self.__dataReady = True
1021 1023
1022 1024 return avgdata_spc, avgdata_cspc, avgdata_dc
1023 1025
1024 1026 def byTime(self, datatime, *args):
1025 1027
1026 1028 self.__dataReady = False
1027 1029 avgdata_spc = None
1028 1030 avgdata_cspc = None
1029 1031 avgdata_dc = None
1030 1032 n = None
1031 1033
1032 1034 self.putData(*args)
1033 1035
1034 1036 if (datatime - self.__initime) >= self.__integrationtime:
1035 1037 avgdata_spc, avgdata_cspc, avgdata_dc, n = self.pushData()
1036 1038 self.n = n
1037 1039 self.__dataReady = True
1038 1040
1039 1041 return avgdata_spc, avgdata_cspc, avgdata_dc
1040 1042
1041 1043 def integrate(self, datatime, *args):
1042 1044
1043 1045 if self.__initime == None:
1044 1046 self.__initime = datatime
1045 1047
1046 1048 if self.__byTime:
1047 1049 avgdata_spc, avgdata_cspc, avgdata_dc = self.byTime(datatime, *args)
1048 1050 else:
1049 1051 avgdata_spc, avgdata_cspc, avgdata_dc = self.byProfiles(*args)
1050 1052
1051 1053 self.__lastdatatime = datatime
1052 1054
1053 1055 if avgdata_spc == None:
1054 1056 return None, None, None, None
1055 1057
1056 1058 avgdatatime = self.__initime
1057 1059
1058 1060 deltatime = datatime -self.__lastdatatime
1059 1061
1060 1062 if not self.__withOverapping:
1061 1063 self.__initime = datatime
1062 1064 else:
1063 1065 self.__initime += deltatime
1064 1066
1065 1067 return avgdatatime, avgdata_spc, avgdata_cspc, avgdata_dc
1066 1068
1067 1069 def run(self, dataOut, n=None, timeInterval=None, overlapping=False):
1068 1070
1069 1071 if not self.__isConfig:
1070 1072 self.setup(n, timeInterval, overlapping)
1071 1073 self.__isConfig = True
1072 1074
1073 1075 avgdatatime, avgdata_spc, avgdata_cspc, avgdata_dc = self.integrate(dataOut.utctime,
1074 1076 dataOut.data_spc,
1075 1077 dataOut.data_cspc,
1076 1078 dataOut.data_dc)
1077 1079
1078 1080 # dataOut.timeInterval *= n
1079 1081 dataOut.flagNoData = True
1080 1082
1081 1083 if self.__dataReady:
1082 1084
1083 1085 dataOut.data_spc = avgdata_spc / self.n
1084 1086 dataOut.data_cspc = avgdata_cspc / self.n
1085 1087 dataOut.data_dc = avgdata_dc / self.n
1086 1088
1087 1089 dataOut.nIncohInt *= self.n
1088 1090 dataOut.utctime = avgdatatime
1089 1091 dataOut.timeInterval = dataOut.ippSeconds * dataOut.nCohInt * dataOut.nIncohInt * dataOut.nFFTPoints
1090 1092 dataOut.flagNoData = False
1091 1093
1092 1094 class ProfileSelector(Operation):
1093 1095
1094 1096 profileIndex = None
1095 1097 # Tamanho total de los perfiles
1096 1098 nProfiles = None
1097 1099
1098 1100 def __init__(self):
1099 1101
1100 1102 self.profileIndex = 0
1101 1103
1102 1104 def incIndex(self):
1103 1105 self.profileIndex += 1
1104 1106
1105 1107 if self.profileIndex >= self.nProfiles:
1106 1108 self.profileIndex = 0
1107 1109
1108 1110 def isProfileInRange(self, minIndex, maxIndex):
1109 1111
1110 1112 if self.profileIndex < minIndex:
1111 1113 return False
1112 1114
1113 1115 if self.profileIndex > maxIndex:
1114 1116 return False
1115 1117
1116 1118 return True
1117 1119
1118 1120 def isProfileInList(self, profileList):
1119 1121
1120 1122 if self.profileIndex not in profileList:
1121 1123 return False
1122 1124
1123 1125 return True
1124 1126
1125 1127 def run(self, dataOut, profileList=None, profileRangeList=None):
1126 1128
1127 1129 dataOut.flagNoData = True
1128 1130 self.nProfiles = dataOut.nProfiles
1129 1131
1130 1132 if profileList != None:
1131 1133 if self.isProfileInList(profileList):
1132 1134 dataOut.flagNoData = False
1133 1135
1134 1136 self.incIndex()
1135 1137 return 1
1136 1138
1137 1139
1138 1140 elif profileRangeList != None:
1139 1141 minIndex = profileRangeList[0]
1140 1142 maxIndex = profileRangeList[1]
1141 1143 if self.isProfileInRange(minIndex, maxIndex):
1142 1144 dataOut.flagNoData = False
1143 1145
1144 1146 self.incIndex()
1145 1147 return 1
1146 1148
1147 1149 else:
1148 1150 raise ValueError, "ProfileSelector needs profileList or profileRangeList"
1149 1151
1150 1152 return 0
1151 1153
General Comments 0
You need to be logged in to leave comments. Login now