##// END OF EJS Templates
Modificaciones para escribir datos en formato FITS
Daniel Valdez -
r340:967f28151148
parent child
Show More
@@ -1,2660 +1,2716
1 1 '''
2 2
3 3 $Author: murco $
4 4 $Id: JRODataIO.py 169 2012-11-19 21:57:03Z murco $
5 5 '''
6 6
7 7 import os, sys
8 8 import glob
9 9 import time
10 10 import numpy
11 11 import fnmatch
12 12 import time, datetime
13 import pyfits
13 14
14 15 from jrodata import *
15 16 from jroheaderIO import *
16 17 from jroprocessing import *
17 18
18 19 LOCALTIME = -18000
19 20
20 21 def isNumber(str):
21 22 """
22 23 Chequea si el conjunto de caracteres que componen un string puede ser convertidos a un numero.
23 24
24 25 Excepciones:
25 26 Si un determinado string no puede ser convertido a numero
26 27 Input:
27 28 str, string al cual se le analiza para determinar si convertible a un numero o no
28 29
29 30 Return:
30 31 True : si el string es uno numerico
31 32 False : no es un string numerico
32 33 """
33 34 try:
34 35 float( str )
35 36 return True
36 37 except:
37 38 return False
38 39
39 40 def isThisFileinRange(filename, startUTSeconds, endUTSeconds):
40 41 """
41 42 Esta funcion determina si un archivo de datos se encuentra o no dentro del rango de fecha especificado.
42 43
43 44 Inputs:
44 45 filename : nombre completo del archivo de datos en formato Jicamarca (.r)
45 46
46 47 startUTSeconds : fecha inicial del rango seleccionado. La fecha esta dada en
47 48 segundos contados desde 01/01/1970.
48 49 endUTSeconds : fecha final del rango seleccionado. La fecha esta dada en
49 50 segundos contados desde 01/01/1970.
50 51
51 52 Return:
52 53 Boolean : Retorna True si el archivo de datos contiene datos en el rango de
53 54 fecha especificado, de lo contrario retorna False.
54 55
55 56 Excepciones:
56 57 Si el archivo no existe o no puede ser abierto
57 58 Si la cabecera no puede ser leida.
58 59
59 60 """
60 61 basicHeaderObj = BasicHeader(LOCALTIME)
61 62
62 63 try:
63 64 fp = open(filename,'rb')
64 65 except:
65 66 raise IOError, "The file %s can't be opened" %(filename)
66 67
67 68 sts = basicHeaderObj.read(fp)
68 69 fp.close()
69 70
70 71 if not(sts):
71 72 print "Skipping the file %s because it has not a valid header" %(filename)
72 73 return 0
73 74
74 75 if not ((startUTSeconds <= basicHeaderObj.utc) and (endUTSeconds > basicHeaderObj.utc)):
75 76 return 0
76 77
77 78 return 1
78 79
79 80 def isFileinThisTime(filename, startTime, endTime):
80 81 """
81 82 Retorna 1 si el archivo de datos se encuentra dentro del rango de horas especificado.
82 83
83 84 Inputs:
84 85 filename : nombre completo del archivo de datos en formato Jicamarca (.r)
85 86
86 87 startTime : tiempo inicial del rango seleccionado en formato datetime.time
87 88
88 89 endTime : tiempo final del rango seleccionado en formato datetime.time
89 90
90 91 Return:
91 92 Boolean : Retorna True si el archivo de datos contiene datos en el rango de
92 93 fecha especificado, de lo contrario retorna False.
93 94
94 95 Excepciones:
95 96 Si el archivo no existe o no puede ser abierto
96 97 Si la cabecera no puede ser leida.
97 98
98 99 """
99 100
100 101
101 102 try:
102 103 fp = open(filename,'rb')
103 104 except:
104 105 raise IOError, "The file %s can't be opened" %(filename)
105 106
106 107 basicHeaderObj = BasicHeader(LOCALTIME)
107 108 sts = basicHeaderObj.read(fp)
108 109 fp.close()
109 110
110 111 thisDatetime = basicHeaderObj.datatime
111 112 thisTime = basicHeaderObj.datatime.time()
112 113
113 114 if not(sts):
114 115 print "Skipping the file %s because it has not a valid header" %(filename)
115 116 return None
116 117
117 118 if not ((startTime <= thisTime) and (endTime > thisTime)):
118 119 return None
119 120
120 121 return thisDatetime
121 122
122 123 def getlastFileFromPath(path, ext):
123 124 """
124 125 Depura el fileList dejando solo los que cumplan el formato de "PYYYYDDDSSS.ext"
125 126 al final de la depuracion devuelve el ultimo file de la lista que quedo.
126 127
127 128 Input:
128 129 fileList : lista conteniendo todos los files (sin path) que componen una determinada carpeta
129 130 ext : extension de los files contenidos en una carpeta
130 131
131 132 Return:
132 133 El ultimo file de una determinada carpeta, no se considera el path.
133 134 """
134 135 validFilelist = []
135 136 fileList = os.listdir(path)
136 137
137 138 # 0 1234 567 89A BCDE
138 139 # H YYYY DDD SSS .ext
139 140
140 141 for file in fileList:
141 142 try:
142 143 year = int(file[1:5])
143 144 doy = int(file[5:8])
144 145
145 146
146 147 except:
147 148 continue
148 149
149 150 if (os.path.splitext(file)[-1].lower() != ext.lower()):
150 151 continue
151 152
152 153 validFilelist.append(file)
153 154
154 155 if validFilelist:
155 156 validFilelist = sorted( validFilelist, key=str.lower )
156 157 return validFilelist[-1]
157 158
158 159 return None
159 160
160 161 def checkForRealPath(path, year, doy, set, ext):
161 162 """
162 163 Por ser Linux Case Sensitive entonces checkForRealPath encuentra el nombre correcto de un path,
163 164 Prueba por varias combinaciones de nombres entre mayusculas y minusculas para determinar
164 165 el path exacto de un determinado file.
165 166
166 167 Example :
167 168 nombre correcto del file es .../.../D2009307/P2009307367.ext
168 169
169 170 Entonces la funcion prueba con las siguientes combinaciones
170 171 .../.../y2009307367.ext
171 172 .../.../Y2009307367.ext
172 173 .../.../x2009307/y2009307367.ext
173 174 .../.../x2009307/Y2009307367.ext
174 175 .../.../X2009307/y2009307367.ext
175 176 .../.../X2009307/Y2009307367.ext
176 177 siendo para este caso, la ultima combinacion de letras, identica al file buscado
177 178
178 179 Return:
179 180 Si encuentra la cobinacion adecuada devuelve el path completo y el nombre del file
180 181 caso contrario devuelve None como path y el la ultima combinacion de nombre en mayusculas
181 182 para el filename
182 183 """
183 184 fullfilename = None
184 185 find_flag = False
185 186 filename = None
186 187
187 188 prefixDirList = [None,'d','D']
188 189 if ext.lower() == ".r": #voltage
189 190 prefixFileList = ['d','D']
190 191 elif ext.lower() == ".pdata": #spectra
191 192 prefixFileList = ['p','P']
192 193 else:
193 194 return None, filename
194 195
195 196 #barrido por las combinaciones posibles
196 197 for prefixDir in prefixDirList:
197 198 thispath = path
198 199 if prefixDir != None:
199 200 #formo el nombre del directorio xYYYYDDD (x=d o x=D)
200 201 thispath = os.path.join(path, "%s%04d%03d" % ( prefixDir, year, doy ))
201 202
202 203 for prefixFile in prefixFileList: #barrido por las dos combinaciones posibles de "D"
203 204 filename = "%s%04d%03d%03d%s" % ( prefixFile, year, doy, set, ext ) #formo el nombre del file xYYYYDDDSSS.ext
204 205 fullfilename = os.path.join( thispath, filename ) #formo el path completo
205 206
206 207 if os.path.exists( fullfilename ): #verifico que exista
207 208 find_flag = True
208 209 break
209 210 if find_flag:
210 211 break
211 212
212 213 if not(find_flag):
213 214 return None, filename
214 215
215 216 return fullfilename, filename
216 217
217 218 def isDoyFolder(folder):
218 219 try:
219 220 year = int(folder[1:5])
220 221 except:
221 222 return 0
222 223
223 224 try:
224 225 doy = int(folder[5:8])
225 226 except:
226 227 return 0
227 228
228 229 return 1
229 230
230 231 class JRODataIO:
231 232
232 233 c = 3E8
233 234
234 235 isConfig = False
235 236
236 237 basicHeaderObj = BasicHeader(LOCALTIME)
237 238
238 239 systemHeaderObj = SystemHeader()
239 240
240 241 radarControllerHeaderObj = RadarControllerHeader()
241 242
242 243 processingHeaderObj = ProcessingHeader()
243 244
244 245 online = 0
245 246
246 247 dtype = None
247 248
248 249 pathList = []
249 250
250 251 filenameList = []
251 252
252 253 filename = None
253 254
254 255 ext = None
255 256
256 257 flagIsNewFile = 1
257 258
258 259 flagTimeBlock = 0
259 260
260 261 flagIsNewBlock = 0
261 262
262 263 fp = None
263 264
264 265 firstHeaderSize = 0
265 266
266 267 basicHeaderSize = 24
267 268
268 269 versionFile = 1103
269 270
270 271 fileSize = None
271 272
272 273 ippSeconds = None
273 274
274 275 fileSizeByHeader = None
275 276
276 277 fileIndex = None
277 278
278 279 profileIndex = None
279 280
280 281 blockIndex = None
281 282
282 283 nTotalBlocks = None
283 284
284 285 maxTimeStep = 30
285 286
286 287 lastUTTime = None
287 288
288 289 datablock = None
289 290
290 291 dataOut = None
291 292
292 293 blocksize = None
293 294
294 295 def __init__(self):
295 296
296 297 raise ValueError, "Not implemented"
297 298
298 299 def run(self):
299 300
300 301 raise ValueError, "Not implemented"
301 302
302 303 def getOutput(self):
303 304
304 305 return self.dataOut
305 306
306 307 class JRODataReader(JRODataIO, ProcessingUnit):
307 308
308 309 nReadBlocks = 0
309 310
310 311 delay = 10 #number of seconds waiting a new file
311 312
312 313 nTries = 3 #quantity tries
313 314
314 315 nFiles = 3 #number of files for searching
315 316
316 317 flagNoMoreFiles = 0
317 318
318 319 datetimeList = []
319 320
320 321 __isFirstTimeOnline = 1
321 322
322 323 __printInfo = True
323 324
324 325 def __init__(self):
325 326
326 327 """
327 328
328 329 """
329 330
330 331 raise ValueError, "This method has not been implemented"
331 332
332 333
333 334 def createObjByDefault(self):
334 335 """
335 336
336 337 """
337 338 raise ValueError, "This method has not been implemented"
338 339
339 340 def getBlockDimension(self):
340 341
341 342 raise ValueError, "No implemented"
342 343
343 344 def __searchFilesOffLine(self,
344 345 path,
345 346 startDate,
346 347 endDate,
347 348 startTime=datetime.time(0,0,0),
348 349 endTime=datetime.time(23,59,59),
349 350 set=None,
350 351 expLabel='',
351 352 ext='.r',
352 353 walk=True):
353 354
354 355 pathList = []
355 356
356 357 if not walk:
357 358 pathList.append(path)
358 359
359 360 else:
360 361 dirList = []
361 362 for thisPath in os.listdir(path):
362 363 if not os.path.isdir(os.path.join(path,thisPath)):
363 364 continue
364 365 if not isDoyFolder(thisPath):
365 366 continue
366 367
367 368 dirList.append(thisPath)
368 369
369 370 if not(dirList):
370 371 return None, None
371 372
372 373 thisDate = startDate
373 374
374 375 while(thisDate <= endDate):
375 376 year = thisDate.timetuple().tm_year
376 377 doy = thisDate.timetuple().tm_yday
377 378
378 379 match = fnmatch.filter(dirList, '?' + '%4.4d%3.3d' % (year,doy))
379 380 if len(match) == 0:
380 381 thisDate += datetime.timedelta(1)
381 382 continue
382 383
383 384 pathList.append(os.path.join(path,match[0],expLabel))
384 385
385 386 thisDate += datetime.timedelta(1)
386 387
387 388 if pathList == []:
388 389 print "Any folder was found for the date range: %s-%s" %(startDate, endDate)
389 390 return None, None
390 391
391 392 print "%d folder(s) was(were) found for the date range: %s - %s" %(len(pathList), startDate, endDate)
392 393
393 394 filenameList = []
394 395 datetimeList = []
395 396
396 397 for i in range(len(pathList)):
397 398
398 399 thisPath = pathList[i]
399 400
400 401 fileList = glob.glob1(thisPath, "*%s" %ext)
401 402 fileList.sort()
402 403
403 404 for file in fileList:
404 405
405 406 filename = os.path.join(thisPath,file)
406 407 thisDatetime = isFileinThisTime(filename, startTime, endTime)
407 408
408 409 if not(thisDatetime):
409 410 continue
410 411
411 412 filenameList.append(filename)
412 413 datetimeList.append(thisDatetime)
413 414
414 415 if not(filenameList):
415 416 print "Any file was found for the time range %s - %s" %(startTime, endTime)
416 417 return None, None
417 418
418 419 print "%d file(s) was(were) found for the time range: %s - %s" %(len(filenameList), startTime, endTime)
419 420 print
420 421
421 422 for i in range(len(filenameList)):
422 423 print "%s -> [%s]" %(filenameList[i], datetimeList[i].ctime())
423 424
424 425 self.filenameList = filenameList
425 426 self.datetimeList = datetimeList
426 427
427 428 return pathList, filenameList
428 429
429 430 def __searchFilesOnLine(self, path, expLabel = "", ext = None, walk=True):
430 431
431 432 """
432 433 Busca el ultimo archivo de la ultima carpeta (determinada o no por startDateTime) y
433 434 devuelve el archivo encontrado ademas de otros datos.
434 435
435 436 Input:
436 437 path : carpeta donde estan contenidos los files que contiene data
437 438
438 439 expLabel : Nombre del subexperimento (subfolder)
439 440
440 441 ext : extension de los files
441 442
442 443 walk : Si es habilitado no realiza busquedas dentro de los ubdirectorios (doypath)
443 444
444 445 Return:
445 446 directory : eL directorio donde esta el file encontrado
446 447 filename : el ultimo file de una determinada carpeta
447 448 year : el anho
448 449 doy : el numero de dia del anho
449 450 set : el set del archivo
450 451
451 452
452 453 """
453 454 dirList = []
454 455
455 456 if not walk:
456 457 fullpath = path
457 458
458 459 else:
459 460 #Filtra solo los directorios
460 461 for thisPath in os.listdir(path):
461 462 if not os.path.isdir(os.path.join(path,thisPath)):
462 463 continue
463 464 if not isDoyFolder(thisPath):
464 465 continue
465 466
466 467 dirList.append(thisPath)
467 468
468 469 if not(dirList):
469 470 return None, None, None, None, None
470 471
471 472 dirList = sorted( dirList, key=str.lower )
472 473
473 474 doypath = dirList[-1]
474 475 fullpath = os.path.join(path, doypath, expLabel)
475 476
476 477
477 478 print "%s folder was found: " %(fullpath )
478 479
479 480 filename = getlastFileFromPath(fullpath, ext)
480 481
481 482 if not(filename):
482 483 return None, None, None, None, None
483 484
484 485 print "%s file was found" %(filename)
485 486
486 487 if not(self.__verifyFile(os.path.join(fullpath, filename))):
487 488 return None, None, None, None, None
488 489
489 490 year = int( filename[1:5] )
490 491 doy = int( filename[5:8] )
491 492 set = int( filename[8:11] )
492 493
493 494 return fullpath, filename, year, doy, set
494 495
495 496 def __setNextFileOffline(self):
496 497
497 498 idFile = self.fileIndex
498 499
499 500 while (True):
500 501 idFile += 1
501 502 if not(idFile < len(self.filenameList)):
502 503 self.flagNoMoreFiles = 1
503 504 print "No more Files"
504 505 return 0
505 506
506 507 filename = self.filenameList[idFile]
507 508
508 509 if not(self.__verifyFile(filename)):
509 510 continue
510 511
511 512 fileSize = os.path.getsize(filename)
512 513 fp = open(filename,'rb')
513 514 break
514 515
515 516 self.flagIsNewFile = 1
516 517 self.fileIndex = idFile
517 518 self.filename = filename
518 519 self.fileSize = fileSize
519 520 self.fp = fp
520 521
521 522 print "Setting the file: %s"%self.filename
522 523
523 524 return 1
524 525
525 526 def __setNextFileOnline(self):
526 527 """
527 528 Busca el siguiente file que tenga suficiente data para ser leida, dentro de un folder especifico, si
528 529 no encuentra un file valido espera un tiempo determinado y luego busca en los posibles n files
529 530 siguientes.
530 531
531 532 Affected:
532 533 self.flagIsNewFile
533 534 self.filename
534 535 self.fileSize
535 536 self.fp
536 537 self.set
537 538 self.flagNoMoreFiles
538 539
539 540 Return:
540 541 0 : si luego de una busqueda del siguiente file valido este no pudo ser encontrado
541 542 1 : si el file fue abierto con exito y esta listo a ser leido
542 543
543 544 Excepciones:
544 545 Si un determinado file no puede ser abierto
545 546 """
546 547 nFiles = 0
547 548 fileOk_flag = False
548 549 firstTime_flag = True
549 550
550 551 self.set += 1
551 552
552 553 #busca el 1er file disponible
553 554 fullfilename, filename = checkForRealPath( self.path, self.year, self.doy, self.set, self.ext )
554 555 if fullfilename:
555 556 if self.__verifyFile(fullfilename, False):
556 557 fileOk_flag = True
557 558
558 559 #si no encuentra un file entonces espera y vuelve a buscar
559 560 if not(fileOk_flag):
560 561 for nFiles in range(self.nFiles+1): #busco en los siguientes self.nFiles+1 files posibles
561 562
562 563 if firstTime_flag: #si es la 1era vez entonces hace el for self.nTries veces
563 564 tries = self.nTries
564 565 else:
565 566 tries = 1 #si no es la 1era vez entonces solo lo hace una vez
566 567
567 568 for nTries in range( tries ):
568 569 if firstTime_flag:
569 570 print "\tWaiting %0.2f sec for the file \"%s\" , try %03d ..." % ( self.delay, filename, nTries+1 )
570 571 time.sleep( self.delay )
571 572 else:
572 573 print "\tSearching next \"%s%04d%03d%03d%s\" file ..." % (self.optchar, self.year, self.doy, self.set, self.ext)
573 574
574 575 fullfilename, filename = checkForRealPath( self.path, self.year, self.doy, self.set, self.ext )
575 576 if fullfilename:
576 577 if self.__verifyFile(fullfilename):
577 578 fileOk_flag = True
578 579 break
579 580
580 581 if fileOk_flag:
581 582 break
582 583
583 584 firstTime_flag = False
584 585
585 586 print "\tSkipping the file \"%s\" due to this file doesn't exist" % filename
586 587 self.set += 1
587 588
588 589 if nFiles == (self.nFiles-1): #si no encuentro el file buscado cambio de carpeta y busco en la siguiente carpeta
589 590 self.set = 0
590 591 self.doy += 1
591 592
592 593 if fileOk_flag:
593 594 self.fileSize = os.path.getsize( fullfilename )
594 595 self.filename = fullfilename
595 596 self.flagIsNewFile = 1
596 597 if self.fp != None: self.fp.close()
597 598 self.fp = open(fullfilename, 'rb')
598 599 self.flagNoMoreFiles = 0
599 600 print 'Setting the file: %s' % fullfilename
600 601 else:
601 602 self.fileSize = 0
602 603 self.filename = None
603 604 self.flagIsNewFile = 0
604 605 self.fp = None
605 606 self.flagNoMoreFiles = 1
606 607 print 'No more Files'
607 608
608 609 return fileOk_flag
609 610
610 611
611 612 def setNextFile(self):
612 613 if self.fp != None:
613 614 self.fp.close()
614 615
615 616 if self.online:
616 617 newFile = self.__setNextFileOnline()
617 618 else:
618 619 newFile = self.__setNextFileOffline()
619 620
620 621 if not(newFile):
621 622 return 0
622 623
623 624 self.__readFirstHeader()
624 625 self.nReadBlocks = 0
625 626 return 1
626 627
627 628 def __waitNewBlock(self):
628 629 """
629 630 Return 1 si se encontro un nuevo bloque de datos, 0 de otra forma.
630 631
631 632 Si el modo de lectura es OffLine siempre retorn 0
632 633 """
633 634 if not self.online:
634 635 return 0
635 636
636 637 if (self.nReadBlocks >= self.processingHeaderObj.dataBlocksPerFile):
637 638 return 0
638 639
639 640 currentPointer = self.fp.tell()
640 641
641 642 neededSize = self.processingHeaderObj.blockSize + self.basicHeaderSize
642 643
643 644 for nTries in range( self.nTries ):
644 645
645 646 self.fp.close()
646 647 self.fp = open( self.filename, 'rb' )
647 648 self.fp.seek( currentPointer )
648 649
649 650 self.fileSize = os.path.getsize( self.filename )
650 651 currentSize = self.fileSize - currentPointer
651 652
652 653 if ( currentSize >= neededSize ):
653 654 self.__rdBasicHeader()
654 655 return 1
655 656
656 657 print "\tWaiting %0.2f seconds for the next block, try %03d ..." % (self.delay, nTries+1)
657 658 time.sleep( self.delay )
658 659
659 660
660 661 return 0
661 662
662 663 def __jumpToLastBlock(self):
663 664
664 665 if not(self.__isFirstTimeOnline):
665 666 return
666 667
667 668 csize = self.fileSize - self.fp.tell()
668 669
669 670 #sata el primer bloque de datos
670 671 if csize > self.processingHeaderObj.blockSize:
671 672 self.fp.seek(self.fp.tell() + self.processingHeaderObj.blockSize)
672 673 else:
673 674 return
674 675
675 676 csize = self.fileSize - self.fp.tell()
676 677 neededsize = self.processingHeaderObj.blockSize + self.basicHeaderSize
677 678 factor = int(csize/neededsize)
678 679 if factor > 0:
679 680 self.fp.seek(self.fp.tell() + factor*neededsize)
680 681
681 682 self.flagIsNewFile = 0
682 683 self.__isFirstTimeOnline = 0
683 684
684 685
685 686 def __setNewBlock(self):
686 687
687 688 if self.fp == None:
688 689 return 0
689 690
690 691 if self.online:
691 692 self.__jumpToLastBlock()
692 693
693 694 if self.flagIsNewFile:
694 695 return 1
695 696
696 697 self.lastUTTime = self.basicHeaderObj.utc
697 698 currentSize = self.fileSize - self.fp.tell()
698 699 neededSize = self.processingHeaderObj.blockSize + self.basicHeaderSize
699 700
700 701 if (currentSize >= neededSize):
701 702 self.__rdBasicHeader()
702 703 return 1
703 704
704 705 if self.__waitNewBlock():
705 706 return 1
706 707
707 708 if not(self.setNextFile()):
708 709 return 0
709 710
710 711 deltaTime = self.basicHeaderObj.utc - self.lastUTTime #
711 712
712 713 self.flagTimeBlock = 0
713 714
714 715 if deltaTime > self.maxTimeStep:
715 716 self.flagTimeBlock = 1
716 717
717 718 return 1
718 719
719 720
720 721 def readNextBlock(self):
721 722 if not(self.__setNewBlock()):
722 723 return 0
723 724
724 725 if not(self.readBlock()):
725 726 return 0
726 727
727 728 return 1
728 729
729 730 def __rdProcessingHeader(self, fp=None):
730 731 if fp == None:
731 732 fp = self.fp
732 733
733 734 self.processingHeaderObj.read(fp)
734 735
735 736 def __rdRadarControllerHeader(self, fp=None):
736 737 if fp == None:
737 738 fp = self.fp
738 739
739 740 self.radarControllerHeaderObj.read(fp)
740 741
741 742 def __rdSystemHeader(self, fp=None):
742 743 if fp == None:
743 744 fp = self.fp
744 745
745 746 self.systemHeaderObj.read(fp)
746 747
747 748 def __rdBasicHeader(self, fp=None):
748 749 if fp == None:
749 750 fp = self.fp
750 751
751 752 self.basicHeaderObj.read(fp)
752 753
753 754
754 755 def __readFirstHeader(self):
755 756 self.__rdBasicHeader()
756 757 self.__rdSystemHeader()
757 758 self.__rdRadarControllerHeader()
758 759 self.__rdProcessingHeader()
759 760
760 761 self.firstHeaderSize = self.basicHeaderObj.size
761 762
762 763 datatype = int(numpy.log2((self.processingHeaderObj.processFlags & PROCFLAG.DATATYPE_MASK))-numpy.log2(PROCFLAG.DATATYPE_CHAR))
763 764 if datatype == 0:
764 765 datatype_str = numpy.dtype([('real','<i1'),('imag','<i1')])
765 766 elif datatype == 1:
766 767 datatype_str = numpy.dtype([('real','<i2'),('imag','<i2')])
767 768 elif datatype == 2:
768 769 datatype_str = numpy.dtype([('real','<i4'),('imag','<i4')])
769 770 elif datatype == 3:
770 771 datatype_str = numpy.dtype([('real','<i8'),('imag','<i8')])
771 772 elif datatype == 4:
772 773 datatype_str = numpy.dtype([('real','<f4'),('imag','<f4')])
773 774 elif datatype == 5:
774 775 datatype_str = numpy.dtype([('real','<f8'),('imag','<f8')])
775 776 else:
776 777 raise ValueError, 'Data type was not defined'
777 778
778 779 self.dtype = datatype_str
779 780 self.ippSeconds = 2 * 1000 * self.radarControllerHeaderObj.ipp / self.c
780 781 self.fileSizeByHeader = self.processingHeaderObj.dataBlocksPerFile * self.processingHeaderObj.blockSize + self.firstHeaderSize + self.basicHeaderSize*(self.processingHeaderObj.dataBlocksPerFile - 1)
781 782 # self.dataOut.channelList = numpy.arange(self.systemHeaderObj.numChannels)
782 783 # self.dataOut.channelIndexList = numpy.arange(self.systemHeaderObj.numChannels)
783 784 self.getBlockDimension()
784 785
785 786
786 787 def __verifyFile(self, filename, msgFlag=True):
787 788 msg = None
788 789 try:
789 790 fp = open(filename, 'rb')
790 791 currentPosition = fp.tell()
791 792 except:
792 793 if msgFlag:
793 794 print "The file %s can't be opened" % (filename)
794 795 return False
795 796
796 797 neededSize = self.processingHeaderObj.blockSize + self.firstHeaderSize
797 798
798 799 if neededSize == 0:
799 800 basicHeaderObj = BasicHeader(LOCALTIME)
800 801 systemHeaderObj = SystemHeader()
801 802 radarControllerHeaderObj = RadarControllerHeader()
802 803 processingHeaderObj = ProcessingHeader()
803 804
804 805 try:
805 806 if not( basicHeaderObj.read(fp) ): raise IOError
806 807 if not( systemHeaderObj.read(fp) ): raise IOError
807 808 if not( radarControllerHeaderObj.read(fp) ): raise IOError
808 809 if not( processingHeaderObj.read(fp) ): raise IOError
809 810 data_type = int(numpy.log2((processingHeaderObj.processFlags & PROCFLAG.DATATYPE_MASK))-numpy.log2(PROCFLAG.DATATYPE_CHAR))
810 811
811 812 neededSize = processingHeaderObj.blockSize + basicHeaderObj.size
812 813
813 814 except:
814 815 if msgFlag:
815 816 print "\tThe file %s is empty or it hasn't enough data" % filename
816 817
817 818 fp.close()
818 819 return False
819 820 else:
820 821 msg = "\tSkipping the file %s due to it hasn't enough data" %filename
821 822
822 823 fp.close()
823 824 fileSize = os.path.getsize(filename)
824 825 currentSize = fileSize - currentPosition
825 826 if currentSize < neededSize:
826 827 if msgFlag and (msg != None):
827 828 print msg #print"\tSkipping the file %s due to it hasn't enough data" %filename
828 829 return False
829 830
830 831 return True
831 832
832 833 def setup(self,
833 834 path=None,
834 835 startDate=None,
835 836 endDate=None,
836 837 startTime=datetime.time(0,0,0),
837 838 endTime=datetime.time(23,59,59),
838 839 set=0,
839 840 expLabel = "",
840 841 ext = None,
841 842 online = False,
842 843 delay = 60,
843 844 walk = True):
844 845
845 846 if path == None:
846 847 raise ValueError, "The path is not valid"
847 848
848 849 if ext == None:
849 850 ext = self.ext
850 851
851 852 if online:
852 853 print "Searching files in online mode..."
853 854
854 855 for nTries in range( self.nTries ):
855 856 fullpath, file, year, doy, set = self.__searchFilesOnLine(path=path, expLabel=expLabel, ext=ext, walk=walk)
856 857
857 858 if fullpath:
858 859 break
859 860
860 861 print '\tWaiting %0.2f sec for an valid file in %s: try %02d ...' % (self.delay, path, nTries+1)
861 862 time.sleep( self.delay )
862 863
863 864 if not(fullpath):
864 865 print "There 'isn't valied files in %s" % path
865 866 return None
866 867
867 868 self.year = year
868 869 self.doy = doy
869 870 self.set = set - 1
870 871 self.path = path
871 872
872 873 else:
873 874 print "Searching files in offline mode ..."
874 875 pathList, filenameList = self.__searchFilesOffLine(path, startDate=startDate, endDate=endDate,
875 876 startTime=startTime, endTime=endTime,
876 877 set=set, expLabel=expLabel, ext=ext,
877 878 walk=walk)
878 879
879 880 if not(pathList):
880 881 print "No *%s files into the folder %s \nfor the range: %s - %s"%(ext, path,
881 882 datetime.datetime.combine(startDate,startTime).ctime(),
882 883 datetime.datetime.combine(endDate,endTime).ctime())
883 884
884 885 sys.exit(-1)
885 886
886 887
887 888 self.fileIndex = -1
888 889 self.pathList = pathList
889 890 self.filenameList = filenameList
890 891
891 892 self.online = online
892 893 self.delay = delay
893 894 ext = ext.lower()
894 895 self.ext = ext
895 896
896 897 if not(self.setNextFile()):
897 898 if (startDate!=None) and (endDate!=None):
898 899 print "No files in range: %s - %s" %(datetime.datetime.combine(startDate,startTime).ctime(), datetime.datetime.combine(endDate,endTime).ctime())
899 900 elif startDate != None:
900 901 print "No files in range: %s" %(datetime.datetime.combine(startDate,startTime).ctime())
901 902 else:
902 903 print "No files"
903 904
904 905 sys.exit(-1)
905 906
906 907 # self.updateDataHeader()
907 908
908 909 return self.dataOut
909 910
910 911 def getData():
911 912
912 913 raise ValueError, "This method has not been implemented"
913 914
914 915 def hasNotDataInBuffer():
915 916
916 917 raise ValueError, "This method has not been implemented"
917 918
918 919 def readBlock():
919 920
920 921 raise ValueError, "This method has not been implemented"
921 922
922 923 def isEndProcess(self):
923 924
924 925 return self.flagNoMoreFiles
925 926
926 927 def printReadBlocks(self):
927 928
928 929 print "Number of read blocks per file %04d" %self.nReadBlocks
929 930
930 931 def printTotalBlocks(self):
931 932
932 933 print "Number of read blocks %04d" %self.nTotalBlocks
933 934
934 935 def printNumberOfBlock(self):
935 936
936 937 if self.flagIsNewBlock:
937 938 print "Block No. %04d, Total blocks %04d -> %s" %(self.basicHeaderObj.dataBlock, self.nTotalBlocks, self.dataOut.datatime.ctime())
938 939
939 940 def printInfo(self):
940 941
941 942 if self.__printInfo == False:
942 943 return
943 944
944 945 self.basicHeaderObj.printInfo()
945 946 self.systemHeaderObj.printInfo()
946 947 self.radarControllerHeaderObj.printInfo()
947 948 self.processingHeaderObj.printInfo()
948 949
949 950 self.__printInfo = False
950 951
951 952
952 953 def run(self, **kwargs):
953 954
954 955 if not(self.isConfig):
955 956
956 957 # self.dataOut = dataOut
957 958 self.setup(**kwargs)
958 959 self.isConfig = True
959 960
960 961 self.getData()
961 962
962 963 class JRODataWriter(JRODataIO, Operation):
963 964
964 965 """
965 966 Esta clase permite escribir datos a archivos procesados (.r o ,pdata). La escritura
966 967 de los datos siempre se realiza por bloques.
967 968 """
968 969
969 970 blockIndex = 0
970 971
971 972 path = None
972 973
973 974 setFile = None
974 975
975 976 profilesPerBlock = None
976 977
977 978 blocksPerFile = None
978 979
979 980 nWriteBlocks = 0
980 981
981 982 def __init__(self, dataOut=None):
982 983 raise ValueError, "Not implemented"
983 984
984 985
985 986 def hasAllDataInBuffer(self):
986 987 raise ValueError, "Not implemented"
987 988
988 989
989 990 def setBlockDimension(self):
990 991 raise ValueError, "Not implemented"
991 992
992 993
993 994 def writeBlock(self):
994 995 raise ValueError, "No implemented"
995 996
996 997
997 998 def putData(self):
998 999 raise ValueError, "No implemented"
999 1000
1000 1001 def getDataHeader(self):
1001 1002 """
1002 1003 Obtiene una copia del First Header
1003 1004
1004 1005 Affected:
1005 1006
1006 1007 self.basicHeaderObj
1007 1008 self.systemHeaderObj
1008 1009 self.radarControllerHeaderObj
1009 1010 self.processingHeaderObj self.
1010 1011
1011 1012 Return:
1012 1013 None
1013 1014 """
1014 1015
1015 1016 raise ValueError, "No implemented"
1016 1017
1017 1018 def getBasicHeader(self):
1018 1019
1019 1020 self.basicHeaderObj.size = self.basicHeaderSize #bytes
1020 1021 self.basicHeaderObj.version = self.versionFile
1021 1022 self.basicHeaderObj.dataBlock = self.nTotalBlocks
1022 1023
1023 1024 utc = numpy.floor(self.dataOut.utctime)
1024 1025 milisecond = (self.dataOut.utctime - utc)* 1000.0
1025 1026
1026 1027 self.basicHeaderObj.utc = utc
1027 1028 self.basicHeaderObj.miliSecond = milisecond
1028 1029 self.basicHeaderObj.timeZone = 0
1029 1030 self.basicHeaderObj.dstFlag = 0
1030 1031 self.basicHeaderObj.errorCount = 0
1031 1032
1032 1033 def __writeFirstHeader(self):
1033 1034 """
1034 1035 Escribe el primer header del file es decir el Basic header y el Long header (SystemHeader, RadarControllerHeader, ProcessingHeader)
1035 1036
1036 1037 Affected:
1037 1038 __dataType
1038 1039
1039 1040 Return:
1040 1041 None
1041 1042 """
1042 1043
1043 1044 # CALCULAR PARAMETROS
1044 1045
1045 1046 sizeLongHeader = self.systemHeaderObj.size + self.radarControllerHeaderObj.size + self.processingHeaderObj.size
1046 1047 self.basicHeaderObj.size = self.basicHeaderSize + sizeLongHeader
1047 1048
1048 1049 self.basicHeaderObj.write(self.fp)
1049 1050 self.systemHeaderObj.write(self.fp)
1050 1051 self.radarControllerHeaderObj.write(self.fp)
1051 1052 self.processingHeaderObj.write(self.fp)
1052 1053
1053 1054 self.dtype = self.dataOut.dtype
1054 1055
1055 1056 def __setNewBlock(self):
1056 1057 """
1057 1058 Si es un nuevo file escribe el First Header caso contrario escribe solo el Basic Header
1058 1059
1059 1060 Return:
1060 1061 0 : si no pudo escribir nada
1061 1062 1 : Si escribio el Basic el First Header
1062 1063 """
1063 1064 if self.fp == None:
1064 1065 self.setNextFile()
1065 1066
1066 1067 if self.flagIsNewFile:
1067 1068 return 1
1068 1069
1069 1070 if self.blockIndex < self.processingHeaderObj.dataBlocksPerFile:
1070 1071 self.basicHeaderObj.write(self.fp)
1071 1072 return 1
1072 1073
1073 1074 if not( self.setNextFile() ):
1074 1075 return 0
1075 1076
1076 1077 return 1
1077 1078
1078 1079
1079 1080 def writeNextBlock(self):
1080 1081 """
1081 1082 Selecciona el bloque siguiente de datos y los escribe en un file
1082 1083
1083 1084 Return:
1084 1085 0 : Si no hizo pudo escribir el bloque de datos
1085 1086 1 : Si no pudo escribir el bloque de datos
1086 1087 """
1087 1088 if not( self.__setNewBlock() ):
1088 1089 return 0
1089 1090
1090 1091 self.writeBlock()
1091 1092
1092 1093 return 1
1093 1094
1094 1095 def setNextFile(self):
1095 1096 """
1096 1097 Determina el siguiente file que sera escrito
1097 1098
1098 1099 Affected:
1099 1100 self.filename
1100 1101 self.subfolder
1101 1102 self.fp
1102 1103 self.setFile
1103 1104 self.flagIsNewFile
1104 1105
1105 1106 Return:
1106 1107 0 : Si el archivo no puede ser escrito
1107 1108 1 : Si el archivo esta listo para ser escrito
1108 1109 """
1109 1110 ext = self.ext
1110 1111 path = self.path
1111 1112
1112 1113 if self.fp != None:
1113 1114 self.fp.close()
1114 1115
1115 1116 timeTuple = time.localtime( self.dataOut.utctime)
1116 1117 subfolder = 'd%4.4d%3.3d' % (timeTuple.tm_year,timeTuple.tm_yday)
1117 1118
1118 1119 fullpath = os.path.join( path, subfolder )
1119 1120 if not( os.path.exists(fullpath) ):
1120 1121 os.mkdir(fullpath)
1121 1122 self.setFile = -1 #inicializo mi contador de seteo
1122 1123 else:
1123 1124 filesList = os.listdir( fullpath )
1124 1125 if len( filesList ) > 0:
1125 1126 filesList = sorted( filesList, key=str.lower )
1126 1127 filen = filesList[-1]
1127 1128 # el filename debera tener el siguiente formato
1128 1129 # 0 1234 567 89A BCDE (hex)
1129 1130 # x YYYY DDD SSS .ext
1130 1131 if isNumber( filen[8:11] ):
1131 1132 self.setFile = int( filen[8:11] ) #inicializo mi contador de seteo al seteo del ultimo file
1132 1133 else:
1133 1134 self.setFile = -1
1134 1135 else:
1135 1136 self.setFile = -1 #inicializo mi contador de seteo
1136 1137
1137 1138 setFile = self.setFile
1138 1139 setFile += 1
1139 1140
1140 1141 file = '%s%4.4d%3.3d%3.3d%s' % (self.optchar,
1141 1142 timeTuple.tm_year,
1142 1143 timeTuple.tm_yday,
1143 1144 setFile,
1144 1145 ext )
1145 1146
1146 1147 filename = os.path.join( path, subfolder, file )
1147 1148
1148 1149 fp = open( filename,'wb' )
1149 1150
1150 1151 self.blockIndex = 0
1151 1152
1152 1153 #guardando atributos
1153 1154 self.filename = filename
1154 1155 self.subfolder = subfolder
1155 1156 self.fp = fp
1156 1157 self.setFile = setFile
1157 1158 self.flagIsNewFile = 1
1158 1159
1159 1160 self.getDataHeader()
1160 1161
1161 1162 print 'Writing the file: %s'%self.filename
1162 1163
1163 1164 self.__writeFirstHeader()
1164 1165
1165 1166 return 1
1166 1167
1167 1168 def setup(self, dataOut, path, blocksPerFile, profilesPerBlock=None, set=0, ext=None):
1168 1169 """
1169 1170 Setea el tipo de formato en la cual sera guardada la data y escribe el First Header
1170 1171
1171 1172 Inputs:
1172 1173 path : el path destino en el cual se escribiran los files a crear
1173 1174 format : formato en el cual sera salvado un file
1174 1175 set : el setebo del file
1175 1176
1176 1177 Return:
1177 1178 0 : Si no realizo un buen seteo
1178 1179 1 : Si realizo un buen seteo
1179 1180 """
1180 1181
1181 1182 if ext == None:
1182 1183 ext = self.ext
1183 1184
1184 1185 ext = ext.lower()
1185 1186
1186 1187 self.ext = ext
1187 1188
1188 1189 self.path = path
1189 1190
1190 1191 self.setFile = set - 1
1191 1192
1192 1193 self.blocksPerFile = blocksPerFile
1193 1194
1194 1195 self.profilesPerBlock = profilesPerBlock
1195 1196
1196 1197 self.dataOut = dataOut
1197 1198
1198 1199 if not(self.setNextFile()):
1199 1200 print "There isn't a next file"
1200 1201 return 0
1201 1202
1202 1203 self.setBlockDimension()
1203 1204
1204 1205 return 1
1205 1206
1206 1207 def run(self, dataOut, **kwargs):
1207 1208
1208 1209 if not(self.isConfig):
1209 1210
1210 1211 self.setup(dataOut, **kwargs)
1211 1212 self.isConfig = True
1212 1213
1213 1214 self.putData()
1214 1215
1215 1216 class VoltageReader(JRODataReader):
1216 1217 """
1217 1218 Esta clase permite leer datos de voltage desde archivos en formato rawdata (.r). La lectura
1218 1219 de los datos siempre se realiza por bloques. Los datos leidos (array de 3 dimensiones:
1219 1220 perfiles*alturas*canales) son almacenados en la variable "buffer".
1220 1221
1221 1222 perfiles * alturas * canales
1222 1223
1223 1224 Esta clase contiene instancias (objetos) de las clases BasicHeader, SystemHeader,
1224 1225 RadarControllerHeader y Voltage. Los tres primeros se usan para almacenar informacion de la
1225 1226 cabecera de datos (metadata), y el cuarto (Voltage) para obtener y almacenar un perfil de
1226 1227 datos desde el "buffer" cada vez que se ejecute el metodo "getData".
1227 1228
1228 1229 Example:
1229 1230
1230 1231 dpath = "/home/myuser/data"
1231 1232
1232 1233 startTime = datetime.datetime(2010,1,20,0,0,0,0,0,0)
1233 1234
1234 1235 endTime = datetime.datetime(2010,1,21,23,59,59,0,0,0)
1235 1236
1236 1237 readerObj = VoltageReader()
1237 1238
1238 1239 readerObj.setup(dpath, startTime, endTime)
1239 1240
1240 1241 while(True):
1241 1242
1242 1243 #to get one profile
1243 1244 profile = readerObj.getData()
1244 1245
1245 1246 #print the profile
1246 1247 print profile
1247 1248
1248 1249 #If you want to see all datablock
1249 1250 print readerObj.datablock
1250 1251
1251 1252 if readerObj.flagNoMoreFiles:
1252 1253 break
1253 1254
1254 1255 """
1255 1256
1256 1257 ext = ".r"
1257 1258
1258 1259 optchar = "D"
1259 1260 dataOut = None
1260 1261
1261 1262
1262 1263 def __init__(self):
1263 1264 """
1264 1265 Inicializador de la clase VoltageReader para la lectura de datos de voltage.
1265 1266
1266 1267 Input:
1267 1268 dataOut : Objeto de la clase Voltage. Este objeto sera utilizado para
1268 1269 almacenar un perfil de datos cada vez que se haga un requerimiento
1269 1270 (getData). El perfil sera obtenido a partir del buffer de datos,
1270 1271 si el buffer esta vacio se hara un nuevo proceso de lectura de un
1271 1272 bloque de datos.
1272 1273 Si este parametro no es pasado se creara uno internamente.
1273 1274
1274 1275 Variables afectadas:
1275 1276 self.dataOut
1276 1277
1277 1278 Return:
1278 1279 None
1279 1280 """
1280 1281
1281 1282 self.isConfig = False
1282 1283
1283 1284 self.datablock = None
1284 1285
1285 1286 self.utc = 0
1286 1287
1287 1288 self.ext = ".r"
1288 1289
1289 1290 self.optchar = "D"
1290 1291
1291 1292 self.basicHeaderObj = BasicHeader(LOCALTIME)
1292 1293
1293 1294 self.systemHeaderObj = SystemHeader()
1294 1295
1295 1296 self.radarControllerHeaderObj = RadarControllerHeader()
1296 1297
1297 1298 self.processingHeaderObj = ProcessingHeader()
1298 1299
1299 1300 self.online = 0
1300 1301
1301 1302 self.fp = None
1302 1303
1303 1304 self.idFile = None
1304 1305
1305 1306 self.dtype = None
1306 1307
1307 1308 self.fileSizeByHeader = None
1308 1309
1309 1310 self.filenameList = []
1310 1311
1311 1312 self.filename = None
1312 1313
1313 1314 self.fileSize = None
1314 1315
1315 1316 self.firstHeaderSize = 0
1316 1317
1317 1318 self.basicHeaderSize = 24
1318 1319
1319 1320 self.pathList = []
1320 1321
1321 1322 self.filenameList = []
1322 1323
1323 1324 self.lastUTTime = 0
1324 1325
1325 1326 self.maxTimeStep = 30
1326 1327
1327 1328 self.flagNoMoreFiles = 0
1328 1329
1329 1330 self.set = 0
1330 1331
1331 1332 self.path = None
1332 1333
1333 1334 self.profileIndex = 2**32-1
1334 1335
1335 1336 self.delay = 3 #seconds
1336 1337
1337 1338 self.nTries = 3 #quantity tries
1338 1339
1339 1340 self.nFiles = 3 #number of files for searching
1340 1341
1341 1342 self.nReadBlocks = 0
1342 1343
1343 1344 self.flagIsNewFile = 1
1344 1345
1345 1346 self.__isFirstTimeOnline = 1
1346 1347
1347 1348 self.ippSeconds = 0
1348 1349
1349 1350 self.flagTimeBlock = 0
1350 1351
1351 1352 self.flagIsNewBlock = 0
1352 1353
1353 1354 self.nTotalBlocks = 0
1354 1355
1355 1356 self.blocksize = 0
1356 1357
1357 1358 self.dataOut = self.createObjByDefault()
1358 1359
1359 1360 def createObjByDefault(self):
1360 1361
1361 1362 dataObj = Voltage()
1362 1363
1363 1364 return dataObj
1364 1365
1365 1366 def __hasNotDataInBuffer(self):
1366 1367 if self.profileIndex >= self.processingHeaderObj.profilesPerBlock:
1367 1368 return 1
1368 1369 return 0
1369 1370
1370 1371
1371 1372 def getBlockDimension(self):
1372 1373 """
1373 1374 Obtiene la cantidad de puntos a leer por cada bloque de datos
1374 1375
1375 1376 Affected:
1376 1377 self.blocksize
1377 1378
1378 1379 Return:
1379 1380 None
1380 1381 """
1381 1382 pts2read = self.processingHeaderObj.profilesPerBlock * self.processingHeaderObj.nHeights * self.systemHeaderObj.nChannels
1382 1383 self.blocksize = pts2read
1383 1384
1384 1385
1385 1386 def readBlock(self):
1386 1387 """
1387 1388 readBlock lee el bloque de datos desde la posicion actual del puntero del archivo
1388 1389 (self.fp) y actualiza todos los parametros relacionados al bloque de datos
1389 1390 (metadata + data). La data leida es almacenada en el buffer y el contador del buffer
1390 1391 es seteado a 0
1391 1392
1392 1393 Inputs:
1393 1394 None
1394 1395
1395 1396 Return:
1396 1397 None
1397 1398
1398 1399 Affected:
1399 1400 self.profileIndex
1400 1401 self.datablock
1401 1402 self.flagIsNewFile
1402 1403 self.flagIsNewBlock
1403 1404 self.nTotalBlocks
1404 1405
1405 1406 Exceptions:
1406 1407 Si un bloque leido no es un bloque valido
1407 1408 """
1408 1409
1409 1410 junk = numpy.fromfile( self.fp, self.dtype, self.blocksize )
1410 1411
1411 1412 try:
1412 1413 junk = junk.reshape( (self.processingHeaderObj.profilesPerBlock, self.processingHeaderObj.nHeights, self.systemHeaderObj.nChannels) )
1413 1414 except:
1414 1415 print "The read block (%3d) has not enough data" %self.nReadBlocks
1415 1416 return 0
1416 1417
1417 1418 junk = numpy.transpose(junk, (2,0,1))
1418 1419 self.datablock = junk['real'] + junk['imag']*1j
1419 1420
1420 1421 self.profileIndex = 0
1421 1422
1422 1423 self.flagIsNewFile = 0
1423 1424 self.flagIsNewBlock = 1
1424 1425
1425 1426 self.nTotalBlocks += 1
1426 1427 self.nReadBlocks += 1
1427 1428
1428 1429 return 1
1429 1430
1430 1431
1431 1432 def getData(self):
1432 1433 """
1433 1434 getData obtiene una unidad de datos del buffer de lectura y la copia a la clase "Voltage"
1434 1435 con todos los parametros asociados a este (metadata). cuando no hay datos en el buffer de
1435 1436 lectura es necesario hacer una nueva lectura de los bloques de datos usando "readNextBlock"
1436 1437
1437 1438 Ademas incrementa el contador del buffer en 1.
1438 1439
1439 1440 Return:
1440 1441 data : retorna un perfil de voltages (alturas * canales) copiados desde el
1441 1442 buffer. Si no hay mas archivos a leer retorna None.
1442 1443
1443 1444 Variables afectadas:
1444 1445 self.dataOut
1445 1446 self.profileIndex
1446 1447
1447 1448 Affected:
1448 1449 self.dataOut
1449 1450 self.profileIndex
1450 1451 self.flagTimeBlock
1451 1452 self.flagIsNewBlock
1452 1453 """
1453 1454
1454 1455 if self.flagNoMoreFiles:
1455 1456 self.dataOut.flagNoData = True
1456 1457 print 'Process finished'
1457 1458 return 0
1458 1459
1459 1460 self.flagTimeBlock = 0
1460 1461 self.flagIsNewBlock = 0
1461 1462
1462 1463 if self.__hasNotDataInBuffer():
1463 1464
1464 1465 if not( self.readNextBlock() ):
1465 1466 return 0
1466 1467
1467 1468 self.dataOut.dtype = self.dtype
1468 1469
1469 1470 self.dataOut.nProfiles = self.processingHeaderObj.profilesPerBlock
1470 1471
1471 1472 xf = self.processingHeaderObj.firstHeight + self.processingHeaderObj.nHeights*self.processingHeaderObj.deltaHeight
1472 1473
1473 1474 self.dataOut.heightList = numpy.arange(self.processingHeaderObj.firstHeight, xf, self.processingHeaderObj.deltaHeight)
1474 1475
1475 1476 self.dataOut.channelList = range(self.systemHeaderObj.nChannels)
1476 1477
1477 1478 self.dataOut.flagTimeBlock = self.flagTimeBlock
1478 1479
1479 1480 self.dataOut.ippSeconds = self.ippSeconds
1480 1481
1481 1482 self.dataOut.timeInterval = self.ippSeconds * self.processingHeaderObj.nCohInt
1482 1483
1483 1484 self.dataOut.nCohInt = self.processingHeaderObj.nCohInt
1484 1485
1485 1486 self.dataOut.flagShiftFFT = False
1486 1487
1487 1488 if self.radarControllerHeaderObj.code != None:
1488 1489
1489 1490 self.dataOut.nCode = self.radarControllerHeaderObj.nCode
1490 1491
1491 1492 self.dataOut.nBaud = self.radarControllerHeaderObj.nBaud
1492 1493
1493 1494 self.dataOut.code = self.radarControllerHeaderObj.code
1494 1495
1495 1496 self.dataOut.systemHeaderObj = self.systemHeaderObj.copy()
1496 1497
1497 1498 self.dataOut.radarControllerHeaderObj = self.radarControllerHeaderObj.copy()
1498 1499
1499 1500 self.dataOut.flagDecodeData = False #asumo q la data no esta decodificada
1500 1501
1501 1502 self.dataOut.flagDeflipData = False #asumo q la data no esta sin flip
1502 1503
1503 1504 self.dataOut.flagShiftFFT = False
1504 1505
1505 1506
1506 1507 # self.updateDataHeader()
1507 1508
1508 1509 #data es un numpy array de 3 dmensiones (perfiles, alturas y canales)
1509 1510
1510 1511 if self.datablock == None:
1511 1512 self.dataOut.flagNoData = True
1512 1513 return 0
1513 1514
1514 1515 self.dataOut.data = self.datablock[:,self.profileIndex,:]
1515 1516
1516 1517 self.dataOut.utctime = self.basicHeaderObj.utc + self.basicHeaderObj.miliSecond/1000. + self.profileIndex * self.ippSeconds
1517 1518
1518 1519 self.profileIndex += 1
1519 1520
1520 1521 self.dataOut.flagNoData = False
1521 1522
1522 1523 # print self.profileIndex, self.dataOut.utctime
1523 1524 # if self.profileIndex == 800:
1524 1525 # a=1
1525 1526
1526 1527
1527 1528 return self.dataOut.data
1528 1529
1529 1530
1530 1531 class VoltageWriter(JRODataWriter):
1531 1532 """
1532 1533 Esta clase permite escribir datos de voltajes a archivos procesados (.r). La escritura
1533 1534 de los datos siempre se realiza por bloques.
1534 1535 """
1535 1536
1536 1537 ext = ".r"
1537 1538
1538 1539 optchar = "D"
1539 1540
1540 1541 shapeBuffer = None
1541 1542
1542 1543
1543 1544 def __init__(self):
1544 1545 """
1545 1546 Inicializador de la clase VoltageWriter para la escritura de datos de espectros.
1546 1547
1547 1548 Affected:
1548 1549 self.dataOut
1549 1550
1550 1551 Return: None
1551 1552 """
1552 1553
1553 1554 self.nTotalBlocks = 0
1554 1555
1555 1556 self.profileIndex = 0
1556 1557
1557 1558 self.isConfig = False
1558 1559
1559 1560 self.fp = None
1560 1561
1561 1562 self.flagIsNewFile = 1
1562 1563
1563 1564 self.nTotalBlocks = 0
1564 1565
1565 1566 self.flagIsNewBlock = 0
1566 1567
1567 1568 self.setFile = None
1568 1569
1569 1570 self.dtype = None
1570 1571
1571 1572 self.path = None
1572 1573
1573 1574 self.filename = None
1574 1575
1575 1576 self.basicHeaderObj = BasicHeader(LOCALTIME)
1576 1577
1577 1578 self.systemHeaderObj = SystemHeader()
1578 1579
1579 1580 self.radarControllerHeaderObj = RadarControllerHeader()
1580 1581
1581 1582 self.processingHeaderObj = ProcessingHeader()
1582 1583
1583 1584 def hasAllDataInBuffer(self):
1584 1585 if self.profileIndex >= self.processingHeaderObj.profilesPerBlock:
1585 1586 return 1
1586 1587 return 0
1587 1588
1588 1589
1589 1590 def setBlockDimension(self):
1590 1591 """
1591 1592 Obtiene las formas dimensionales del los subbloques de datos que componen un bloque
1592 1593
1593 1594 Affected:
1594 1595 self.shape_spc_Buffer
1595 1596 self.shape_cspc_Buffer
1596 1597 self.shape_dc_Buffer
1597 1598
1598 1599 Return: None
1599 1600 """
1600 1601 self.shapeBuffer = (self.processingHeaderObj.profilesPerBlock,
1601 1602 self.processingHeaderObj.nHeights,
1602 1603 self.systemHeaderObj.nChannels)
1603 1604
1604 1605 self.datablock = numpy.zeros((self.systemHeaderObj.nChannels,
1605 1606 self.processingHeaderObj.profilesPerBlock,
1606 1607 self.processingHeaderObj.nHeights),
1607 1608 dtype=numpy.dtype('complex64'))
1608 1609
1609 1610
1610 1611 def writeBlock(self):
1611 1612 """
1612 1613 Escribe el buffer en el file designado
1613 1614
1614 1615 Affected:
1615 1616 self.profileIndex
1616 1617 self.flagIsNewFile
1617 1618 self.flagIsNewBlock
1618 1619 self.nTotalBlocks
1619 1620 self.blockIndex
1620 1621
1621 1622 Return: None
1622 1623 """
1623 1624 data = numpy.zeros( self.shapeBuffer, self.dtype )
1624 1625
1625 1626 junk = numpy.transpose(self.datablock, (1,2,0))
1626 1627
1627 1628 data['real'] = junk.real
1628 1629 data['imag'] = junk.imag
1629 1630
1630 1631 data = data.reshape( (-1) )
1631 1632
1632 1633 data.tofile( self.fp )
1633 1634
1634 1635 self.datablock.fill(0)
1635 1636
1636 1637 self.profileIndex = 0
1637 1638 self.flagIsNewFile = 0
1638 1639 self.flagIsNewBlock = 1
1639 1640
1640 1641 self.blockIndex += 1
1641 1642 self.nTotalBlocks += 1
1642 1643
1643 1644 def putData(self):
1644 1645 """
1645 1646 Setea un bloque de datos y luego los escribe en un file
1646 1647
1647 1648 Affected:
1648 1649 self.flagIsNewBlock
1649 1650 self.profileIndex
1650 1651
1651 1652 Return:
1652 1653 0 : Si no hay data o no hay mas files que puedan escribirse
1653 1654 1 : Si se escribio la data de un bloque en un file
1654 1655 """
1655 1656 if self.dataOut.flagNoData:
1656 1657 return 0
1657 1658
1658 1659 self.flagIsNewBlock = 0
1659 1660
1660 1661 if self.dataOut.flagTimeBlock:
1661 1662
1662 1663 self.datablock.fill(0)
1663 1664 self.profileIndex = 0
1664 1665 self.setNextFile()
1665 1666
1666 1667 if self.profileIndex == 0:
1667 1668 self.getBasicHeader()
1668 1669
1669 1670 self.datablock[:,self.profileIndex,:] = self.dataOut.data
1670 1671
1671 1672 self.profileIndex += 1
1672 1673
1673 1674 if self.hasAllDataInBuffer():
1674 1675 #if self.flagIsNewFile:
1675 1676 self.writeNextBlock()
1676 1677 # self.getDataHeader()
1677 1678
1678 1679 return 1
1679 1680
1680 1681 def __getProcessFlags(self):
1681 1682
1682 1683 processFlags = 0
1683 1684
1684 1685 dtype0 = numpy.dtype([('real','<i1'),('imag','<i1')])
1685 1686 dtype1 = numpy.dtype([('real','<i2'),('imag','<i2')])
1686 1687 dtype2 = numpy.dtype([('real','<i4'),('imag','<i4')])
1687 1688 dtype3 = numpy.dtype([('real','<i8'),('imag','<i8')])
1688 1689 dtype4 = numpy.dtype([('real','<f4'),('imag','<f4')])
1689 1690 dtype5 = numpy.dtype([('real','<f8'),('imag','<f8')])
1690 1691
1691 1692 dtypeList = [dtype0, dtype1, dtype2, dtype3, dtype4, dtype5]
1692 1693
1693 1694
1694 1695
1695 1696 datatypeValueList = [PROCFLAG.DATATYPE_CHAR,
1696 1697 PROCFLAG.DATATYPE_SHORT,
1697 1698 PROCFLAG.DATATYPE_LONG,
1698 1699 PROCFLAG.DATATYPE_INT64,
1699 1700 PROCFLAG.DATATYPE_FLOAT,
1700 1701 PROCFLAG.DATATYPE_DOUBLE]
1701 1702
1702 1703
1703 1704 for index in range(len(dtypeList)):
1704 1705 if self.dataOut.dtype == dtypeList[index]:
1705 1706 dtypeValue = datatypeValueList[index]
1706 1707 break
1707 1708
1708 1709 processFlags += dtypeValue
1709 1710
1710 1711 if self.dataOut.flagDecodeData:
1711 1712 processFlags += PROCFLAG.DECODE_DATA
1712 1713
1713 1714 if self.dataOut.flagDeflipData:
1714 1715 processFlags += PROCFLAG.DEFLIP_DATA
1715 1716
1716 1717 if self.dataOut.code != None:
1717 1718 processFlags += PROCFLAG.DEFINE_PROCESS_CODE
1718 1719
1719 1720 if self.dataOut.nCohInt > 1:
1720 1721 processFlags += PROCFLAG.COHERENT_INTEGRATION
1721 1722
1722 1723 return processFlags
1723 1724
1724 1725
1725 1726 def __getBlockSize(self):
1726 1727 '''
1727 1728 Este metodos determina el cantidad de bytes para un bloque de datos de tipo Voltage
1728 1729 '''
1729 1730
1730 1731 dtype0 = numpy.dtype([('real','<i1'),('imag','<i1')])
1731 1732 dtype1 = numpy.dtype([('real','<i2'),('imag','<i2')])
1732 1733 dtype2 = numpy.dtype([('real','<i4'),('imag','<i4')])
1733 1734 dtype3 = numpy.dtype([('real','<i8'),('imag','<i8')])
1734 1735 dtype4 = numpy.dtype([('real','<f4'),('imag','<f4')])
1735 1736 dtype5 = numpy.dtype([('real','<f8'),('imag','<f8')])
1736 1737
1737 1738 dtypeList = [dtype0, dtype1, dtype2, dtype3, dtype4, dtype5]
1738 1739 datatypeValueList = [1,2,4,8,4,8]
1739 1740 for index in range(len(dtypeList)):
1740 1741 if self.dataOut.dtype == dtypeList[index]:
1741 1742 datatypeValue = datatypeValueList[index]
1742 1743 break
1743 1744
1744 1745 blocksize = int(self.dataOut.nHeights * self.dataOut.nChannels * self.dataOut.nProfiles * datatypeValue * 2)
1745 1746
1746 1747 return blocksize
1747 1748
1748 1749 def getDataHeader(self):
1749 1750
1750 1751 """
1751 1752 Obtiene una copia del First Header
1752 1753
1753 1754 Affected:
1754 1755 self.systemHeaderObj
1755 1756 self.radarControllerHeaderObj
1756 1757 self.dtype
1757 1758
1758 1759 Return:
1759 1760 None
1760 1761 """
1761 1762
1762 1763 self.systemHeaderObj = self.dataOut.systemHeaderObj.copy()
1763 1764 self.systemHeaderObj.nChannels = self.dataOut.nChannels
1764 1765 self.radarControllerHeaderObj = self.dataOut.radarControllerHeaderObj.copy()
1765 1766
1766 1767 self.getBasicHeader()
1767 1768
1768 1769 processingHeaderSize = 40 # bytes
1769 1770 self.processingHeaderObj.dtype = 0 # Voltage
1770 1771 self.processingHeaderObj.blockSize = self.__getBlockSize()
1771 1772 self.processingHeaderObj.profilesPerBlock = self.profilesPerBlock
1772 1773 self.processingHeaderObj.dataBlocksPerFile = self.blocksPerFile
1773 1774 self.processingHeaderObj.nWindows = 1 #podria ser 1 o self.dataOut.processingHeaderObj.nWindows
1774 1775 self.processingHeaderObj.processFlags = self.__getProcessFlags()
1775 1776 self.processingHeaderObj.nCohInt = self.dataOut.nCohInt
1776 1777 self.processingHeaderObj.nIncohInt = 1 # Cuando la data de origen es de tipo Voltage
1777 1778 self.processingHeaderObj.totalSpectra = 0 # Cuando la data de origen es de tipo Voltage
1778 1779
1779 1780 if self.dataOut.code != None:
1780 1781 self.processingHeaderObj.code = self.dataOut.code
1781 1782 self.processingHeaderObj.nCode = self.dataOut.nCode
1782 1783 self.processingHeaderObj.nBaud = self.dataOut.nBaud
1783 1784 codesize = int(8 + 4 * self.dataOut.nCode * self.dataOut.nBaud)
1784 1785 processingHeaderSize += codesize
1785 1786
1786 1787 if self.processingHeaderObj.nWindows != 0:
1787 1788 self.processingHeaderObj.firstHeight = self.dataOut.heightList[0]
1788 1789 self.processingHeaderObj.deltaHeight = self.dataOut.heightList[1] - self.dataOut.heightList[0]
1789 1790 self.processingHeaderObj.nHeights = self.dataOut.nHeights
1790 1791 self.processingHeaderObj.samplesWin = self.dataOut.nHeights
1791 1792 processingHeaderSize += 12
1792 1793
1793 1794 self.processingHeaderObj.size = processingHeaderSize
1794 1795
1795 1796 class SpectraReader(JRODataReader):
1796 1797 """
1797 1798 Esta clase permite leer datos de espectros desde archivos procesados (.pdata). La lectura
1798 1799 de los datos siempre se realiza por bloques. Los datos leidos (array de 3 dimensiones)
1799 1800 son almacenados en tres buffer's para el Self Spectra, el Cross Spectra y el DC Channel.
1800 1801
1801 1802 paresCanalesIguales * alturas * perfiles (Self Spectra)
1802 1803 paresCanalesDiferentes * alturas * perfiles (Cross Spectra)
1803 1804 canales * alturas (DC Channels)
1804 1805
1805 1806 Esta clase contiene instancias (objetos) de las clases BasicHeader, SystemHeader,
1806 1807 RadarControllerHeader y Spectra. Los tres primeros se usan para almacenar informacion de la
1807 1808 cabecera de datos (metadata), y el cuarto (Spectra) para obtener y almacenar un bloque de
1808 1809 datos desde el "buffer" cada vez que se ejecute el metodo "getData".
1809 1810
1810 1811 Example:
1811 1812 dpath = "/home/myuser/data"
1812 1813
1813 1814 startTime = datetime.datetime(2010,1,20,0,0,0,0,0,0)
1814 1815
1815 1816 endTime = datetime.datetime(2010,1,21,23,59,59,0,0,0)
1816 1817
1817 1818 readerObj = SpectraReader()
1818 1819
1819 1820 readerObj.setup(dpath, startTime, endTime)
1820 1821
1821 1822 while(True):
1822 1823
1823 1824 readerObj.getData()
1824 1825
1825 1826 print readerObj.data_spc
1826 1827
1827 1828 print readerObj.data_cspc
1828 1829
1829 1830 print readerObj.data_dc
1830 1831
1831 1832 if readerObj.flagNoMoreFiles:
1832 1833 break
1833 1834
1834 1835 """
1835 1836
1836 1837 pts2read_SelfSpectra = 0
1837 1838
1838 1839 pts2read_CrossSpectra = 0
1839 1840
1840 1841 pts2read_DCchannels = 0
1841 1842
1842 1843 ext = ".pdata"
1843 1844
1844 1845 optchar = "P"
1845 1846
1846 1847 dataOut = None
1847 1848
1848 1849 nRdChannels = None
1849 1850
1850 1851 nRdPairs = None
1851 1852
1852 1853 rdPairList = []
1853 1854
1854 1855
1855 1856 def __init__(self):
1856 1857 """
1857 1858 Inicializador de la clase SpectraReader para la lectura de datos de espectros.
1858 1859
1859 1860 Inputs:
1860 1861 dataOut : Objeto de la clase Spectra. Este objeto sera utilizado para
1861 1862 almacenar un perfil de datos cada vez que se haga un requerimiento
1862 1863 (getData). El perfil sera obtenido a partir del buffer de datos,
1863 1864 si el buffer esta vacio se hara un nuevo proceso de lectura de un
1864 1865 bloque de datos.
1865 1866 Si este parametro no es pasado se creara uno internamente.
1866 1867
1867 1868 Affected:
1868 1869 self.dataOut
1869 1870
1870 1871 Return : None
1871 1872 """
1872 1873
1873 1874 self.isConfig = False
1874 1875
1875 1876 self.pts2read_SelfSpectra = 0
1876 1877
1877 1878 self.pts2read_CrossSpectra = 0
1878 1879
1879 1880 self.pts2read_DCchannels = 0
1880 1881
1881 1882 self.datablock = None
1882 1883
1883 1884 self.utc = None
1884 1885
1885 1886 self.ext = ".pdata"
1886 1887
1887 1888 self.optchar = "P"
1888 1889
1889 1890 self.basicHeaderObj = BasicHeader(LOCALTIME)
1890 1891
1891 1892 self.systemHeaderObj = SystemHeader()
1892 1893
1893 1894 self.radarControllerHeaderObj = RadarControllerHeader()
1894 1895
1895 1896 self.processingHeaderObj = ProcessingHeader()
1896 1897
1897 1898 self.online = 0
1898 1899
1899 1900 self.fp = None
1900 1901
1901 1902 self.idFile = None
1902 1903
1903 1904 self.dtype = None
1904 1905
1905 1906 self.fileSizeByHeader = None
1906 1907
1907 1908 self.filenameList = []
1908 1909
1909 1910 self.filename = None
1910 1911
1911 1912 self.fileSize = None
1912 1913
1913 1914 self.firstHeaderSize = 0
1914 1915
1915 1916 self.basicHeaderSize = 24
1916 1917
1917 1918 self.pathList = []
1918 1919
1919 1920 self.lastUTTime = 0
1920 1921
1921 1922 self.maxTimeStep = 30
1922 1923
1923 1924 self.flagNoMoreFiles = 0
1924 1925
1925 1926 self.set = 0
1926 1927
1927 1928 self.path = None
1928 1929
1929 1930 self.delay = 60 #seconds
1930 1931
1931 1932 self.nTries = 3 #quantity tries
1932 1933
1933 1934 self.nFiles = 3 #number of files for searching
1934 1935
1935 1936 self.nReadBlocks = 0
1936 1937
1937 1938 self.flagIsNewFile = 1
1938 1939
1939 1940 self.__isFirstTimeOnline = 1
1940 1941
1941 1942 self.ippSeconds = 0
1942 1943
1943 1944 self.flagTimeBlock = 0
1944 1945
1945 1946 self.flagIsNewBlock = 0
1946 1947
1947 1948 self.nTotalBlocks = 0
1948 1949
1949 1950 self.blocksize = 0
1950 1951
1951 1952 self.dataOut = self.createObjByDefault()
1952 1953
1953 1954
1954 1955 def createObjByDefault(self):
1955 1956
1956 1957 dataObj = Spectra()
1957 1958
1958 1959 return dataObj
1959 1960
1960 1961 def __hasNotDataInBuffer(self):
1961 1962 return 1
1962 1963
1963 1964
1964 1965 def getBlockDimension(self):
1965 1966 """
1966 1967 Obtiene la cantidad de puntos a leer por cada bloque de datos
1967 1968
1968 1969 Affected:
1969 1970 self.nRdChannels
1970 1971 self.nRdPairs
1971 1972 self.pts2read_SelfSpectra
1972 1973 self.pts2read_CrossSpectra
1973 1974 self.pts2read_DCchannels
1974 1975 self.blocksize
1975 1976 self.dataOut.nChannels
1976 1977 self.dataOut.nPairs
1977 1978
1978 1979 Return:
1979 1980 None
1980 1981 """
1981 1982 self.nRdChannels = 0
1982 1983 self.nRdPairs = 0
1983 1984 self.rdPairList = []
1984 1985
1985 1986 for i in range(0, self.processingHeaderObj.totalSpectra*2, 2):
1986 1987 if self.processingHeaderObj.spectraComb[i] == self.processingHeaderObj.spectraComb[i+1]:
1987 1988 self.nRdChannels = self.nRdChannels + 1 #par de canales iguales
1988 1989 else:
1989 1990 self.nRdPairs = self.nRdPairs + 1 #par de canales diferentes
1990 1991 self.rdPairList.append((self.processingHeaderObj.spectraComb[i], self.processingHeaderObj.spectraComb[i+1]))
1991 1992
1992 1993 pts2read = self.processingHeaderObj.nHeights * self.processingHeaderObj.profilesPerBlock
1993 1994
1994 1995 self.pts2read_SelfSpectra = int(self.nRdChannels * pts2read)
1995 1996 self.blocksize = self.pts2read_SelfSpectra
1996 1997
1997 1998 if self.processingHeaderObj.flag_cspc:
1998 1999 self.pts2read_CrossSpectra = int(self.nRdPairs * pts2read)
1999 2000 self.blocksize += self.pts2read_CrossSpectra
2000 2001
2001 2002 if self.processingHeaderObj.flag_dc:
2002 2003 self.pts2read_DCchannels = int(self.systemHeaderObj.nChannels * self.processingHeaderObj.nHeights)
2003 2004 self.blocksize += self.pts2read_DCchannels
2004 2005
2005 2006 # self.blocksize = self.pts2read_SelfSpectra + self.pts2read_CrossSpectra + self.pts2read_DCchannels
2006 2007
2007 2008
2008 2009 def readBlock(self):
2009 2010 """
2010 2011 Lee el bloque de datos desde la posicion actual del puntero del archivo
2011 2012 (self.fp) y actualiza todos los parametros relacionados al bloque de datos
2012 2013 (metadata + data). La data leida es almacenada en el buffer y el contador del buffer
2013 2014 es seteado a 0
2014 2015
2015 2016 Return: None
2016 2017
2017 2018 Variables afectadas:
2018 2019
2019 2020 self.flagIsNewFile
2020 2021 self.flagIsNewBlock
2021 2022 self.nTotalBlocks
2022 2023 self.data_spc
2023 2024 self.data_cspc
2024 2025 self.data_dc
2025 2026
2026 2027 Exceptions:
2027 2028 Si un bloque leido no es un bloque valido
2028 2029 """
2029 2030 blockOk_flag = False
2030 2031 fpointer = self.fp.tell()
2031 2032
2032 2033 spc = numpy.fromfile( self.fp, self.dtype[0], self.pts2read_SelfSpectra )
2033 2034 spc = spc.reshape( (self.nRdChannels, self.processingHeaderObj.nHeights, self.processingHeaderObj.profilesPerBlock) ) #transforma a un arreglo 3D
2034 2035
2035 2036 if self.processingHeaderObj.flag_cspc:
2036 2037 cspc = numpy.fromfile( self.fp, self.dtype, self.pts2read_CrossSpectra )
2037 2038 cspc = cspc.reshape( (self.nRdPairs, self.processingHeaderObj.nHeights, self.processingHeaderObj.profilesPerBlock) ) #transforma a un arreglo 3D
2038 2039
2039 2040 if self.processingHeaderObj.flag_dc:
2040 2041 dc = numpy.fromfile( self.fp, self.dtype, self.pts2read_DCchannels ) #int(self.processingHeaderObj.nHeights*self.systemHeaderObj.nChannels) )
2041 2042 dc = dc.reshape( (self.systemHeaderObj.nChannels, self.processingHeaderObj.nHeights) ) #transforma a un arreglo 2D
2042 2043
2043 2044
2044 2045 if not(self.processingHeaderObj.shif_fft):
2045 2046 #desplaza a la derecha en el eje 2 determinadas posiciones
2046 2047 shift = int(self.processingHeaderObj.profilesPerBlock/2)
2047 2048 spc = numpy.roll( spc, shift , axis=2 )
2048 2049
2049 2050 if self.processingHeaderObj.flag_cspc:
2050 2051 #desplaza a la derecha en el eje 2 determinadas posiciones
2051 2052 cspc = numpy.roll( cspc, shift, axis=2 )
2052 2053
2053 2054 # self.processingHeaderObj.shif_fft = True
2054 2055
2055 2056 spc = numpy.transpose( spc, (0,2,1) )
2056 2057 self.data_spc = spc
2057 2058
2058 2059 if self.processingHeaderObj.flag_cspc:
2059 2060 cspc = numpy.transpose( cspc, (0,2,1) )
2060 2061 self.data_cspc = cspc['real'] + cspc['imag']*1j
2061 2062 else:
2062 2063 self.data_cspc = None
2063 2064
2064 2065 if self.processingHeaderObj.flag_dc:
2065 2066 self.data_dc = dc['real'] + dc['imag']*1j
2066 2067 else:
2067 2068 self.data_dc = None
2068 2069
2069 2070 self.flagIsNewFile = 0
2070 2071 self.flagIsNewBlock = 1
2071 2072
2072 2073 self.nTotalBlocks += 1
2073 2074 self.nReadBlocks += 1
2074 2075
2075 2076 return 1
2076 2077
2077 2078
2078 2079 def getData(self):
2079 2080 """
2080 2081 Copia el buffer de lectura a la clase "Spectra",
2081 2082 con todos los parametros asociados a este (metadata). cuando no hay datos en el buffer de
2082 2083 lectura es necesario hacer una nueva lectura de los bloques de datos usando "readNextBlock"
2083 2084
2084 2085 Return:
2085 2086 0 : Si no hay mas archivos disponibles
2086 2087 1 : Si hizo una buena copia del buffer
2087 2088
2088 2089 Affected:
2089 2090 self.dataOut
2090 2091
2091 2092 self.flagTimeBlock
2092 2093 self.flagIsNewBlock
2093 2094 """
2094 2095
2095 2096 if self.flagNoMoreFiles:
2096 2097 self.dataOut.flagNoData = True
2097 2098 print 'Process finished'
2098 2099 return 0
2099 2100
2100 2101 self.flagTimeBlock = 0
2101 2102 self.flagIsNewBlock = 0
2102 2103
2103 2104 if self.__hasNotDataInBuffer():
2104 2105
2105 2106 if not( self.readNextBlock() ):
2106 2107 self.dataOut.flagNoData = True
2107 2108 return 0
2108 2109
2109 2110 # self.updateDataHeader()
2110 2111
2111 2112 #data es un numpy array de 3 dmensiones (perfiles, alturas y canales)
2112 2113
2113 2114 if self.data_dc == None:
2114 2115 self.dataOut.flagNoData = True
2115 2116 return 0
2116 2117
2117 2118 self.dataOut.data_spc = self.data_spc
2118 2119
2119 2120 self.dataOut.data_cspc = self.data_cspc
2120 2121
2121 2122 self.dataOut.data_dc = self.data_dc
2122 2123
2123 2124 self.dataOut.flagTimeBlock = self.flagTimeBlock
2124 2125
2125 2126 self.dataOut.flagNoData = False
2126 2127
2127 2128 self.dataOut.dtype = self.dtype
2128 2129
2129 2130 # self.dataOut.nChannels = self.nRdChannels
2130 2131
2131 2132 self.dataOut.nPairs = self.nRdPairs
2132 2133
2133 2134 self.dataOut.pairsList = self.rdPairList
2134 2135
2135 2136 # self.dataOut.nHeights = self.processingHeaderObj.nHeights
2136 2137
2137 2138 self.dataOut.nProfiles = self.processingHeaderObj.profilesPerBlock
2138 2139
2139 2140 self.dataOut.nFFTPoints = self.processingHeaderObj.profilesPerBlock
2140 2141
2141 2142 self.dataOut.nCohInt = self.processingHeaderObj.nCohInt
2142 2143
2143 2144 self.dataOut.nIncohInt = self.processingHeaderObj.nIncohInt
2144 2145
2145 2146 xf = self.processingHeaderObj.firstHeight + self.processingHeaderObj.nHeights*self.processingHeaderObj.deltaHeight
2146 2147
2147 2148 self.dataOut.heightList = numpy.arange(self.processingHeaderObj.firstHeight, xf, self.processingHeaderObj.deltaHeight)
2148 2149
2149 2150 self.dataOut.channelList = range(self.systemHeaderObj.nChannels)
2150 2151
2151 2152 # self.dataOut.channelIndexList = range(self.systemHeaderObj.nChannels)
2152 2153
2153 2154 self.dataOut.utctime = self.basicHeaderObj.utc + self.basicHeaderObj.miliSecond/1000.#+ self.profileIndex * self.ippSeconds
2154 2155
2155 2156 self.dataOut.ippSeconds = self.ippSeconds
2156 2157
2157 2158 self.dataOut.timeInterval = self.ippSeconds * self.processingHeaderObj.nCohInt * self.processingHeaderObj.nIncohInt * self.dataOut.nFFTPoints
2158 2159
2159 2160 # self.profileIndex += 1
2160 2161
2161 2162 self.dataOut.systemHeaderObj = self.systemHeaderObj.copy()
2162 2163
2163 2164 self.dataOut.radarControllerHeaderObj = self.radarControllerHeaderObj.copy()
2164 2165
2165 2166 self.dataOut.flagShiftFFT = self.processingHeaderObj.shif_fft
2166 2167
2167 2168 self.dataOut.flagDecodeData = False #asumo q la data no esta decodificada
2168 2169
2169 2170 self.dataOut.flagDeflipData = True #asumo q la data no esta sin flip
2170 2171
2171 2172 if self.processingHeaderObj.code != None:
2172 2173
2173 2174 self.dataOut.nCode = self.processingHeaderObj.nCode
2174 2175
2175 2176 self.dataOut.nBaud = self.processingHeaderObj.nBaud
2176 2177
2177 2178 self.dataOut.code = self.processingHeaderObj.code
2178 2179
2179 2180 self.dataOut.flagDecodeData = True
2180 2181
2181 2182 return self.dataOut.data_spc
2182 2183
2183 2184
2184 2185 class SpectraWriter(JRODataWriter):
2185 2186
2186 2187 """
2187 2188 Esta clase permite escribir datos de espectros a archivos procesados (.pdata). La escritura
2188 2189 de los datos siempre se realiza por bloques.
2189 2190 """
2190 2191
2191 2192 ext = ".pdata"
2192 2193
2193 2194 optchar = "P"
2194 2195
2195 2196 shape_spc_Buffer = None
2196 2197
2197 2198 shape_cspc_Buffer = None
2198 2199
2199 2200 shape_dc_Buffer = None
2200 2201
2201 2202 data_spc = None
2202 2203
2203 2204 data_cspc = None
2204 2205
2205 2206 data_dc = None
2206 2207
2207 2208 # dataOut = None
2208 2209
2209 2210 def __init__(self):
2210 2211 """
2211 2212 Inicializador de la clase SpectraWriter para la escritura de datos de espectros.
2212 2213
2213 2214 Affected:
2214 2215 self.dataOut
2215 2216 self.basicHeaderObj
2216 2217 self.systemHeaderObj
2217 2218 self.radarControllerHeaderObj
2218 2219 self.processingHeaderObj
2219 2220
2220 2221 Return: None
2221 2222 """
2222 2223
2223 2224 self.isConfig = False
2224 2225
2225 2226 self.nTotalBlocks = 0
2226 2227
2227 2228 self.data_spc = None
2228 2229
2229 2230 self.data_cspc = None
2230 2231
2231 2232 self.data_dc = None
2232 2233
2233 2234 self.fp = None
2234 2235
2235 2236 self.flagIsNewFile = 1
2236 2237
2237 2238 self.nTotalBlocks = 0
2238 2239
2239 2240 self.flagIsNewBlock = 0
2240 2241
2241 2242 self.setFile = None
2242 2243
2243 2244 self.dtype = None
2244 2245
2245 2246 self.path = None
2246 2247
2247 2248 self.noMoreFiles = 0
2248 2249
2249 2250 self.filename = None
2250 2251
2251 2252 self.basicHeaderObj = BasicHeader(LOCALTIME)
2252 2253
2253 2254 self.systemHeaderObj = SystemHeader()
2254 2255
2255 2256 self.radarControllerHeaderObj = RadarControllerHeader()
2256 2257
2257 2258 self.processingHeaderObj = ProcessingHeader()
2258 2259
2259 2260
2260 2261 def hasAllDataInBuffer(self):
2261 2262 return 1
2262 2263
2263 2264
2264 2265 def setBlockDimension(self):
2265 2266 """
2266 2267 Obtiene las formas dimensionales del los subbloques de datos que componen un bloque
2267 2268
2268 2269 Affected:
2269 2270 self.shape_spc_Buffer
2270 2271 self.shape_cspc_Buffer
2271 2272 self.shape_dc_Buffer
2272 2273
2273 2274 Return: None
2274 2275 """
2275 2276 self.shape_spc_Buffer = (self.dataOut.nChannels,
2276 2277 self.processingHeaderObj.nHeights,
2277 2278 self.processingHeaderObj.profilesPerBlock)
2278 2279
2279 2280 self.shape_cspc_Buffer = (self.dataOut.nPairs,
2280 2281 self.processingHeaderObj.nHeights,
2281 2282 self.processingHeaderObj.profilesPerBlock)
2282 2283
2283 2284 self.shape_dc_Buffer = (self.dataOut.nChannels,
2284 2285 self.processingHeaderObj.nHeights)
2285 2286
2286 2287
2287 2288 def writeBlock(self):
2288 2289 """
2289 2290 Escribe el buffer en el file designado
2290 2291
2291 2292 Affected:
2292 2293 self.data_spc
2293 2294 self.data_cspc
2294 2295 self.data_dc
2295 2296 self.flagIsNewFile
2296 2297 self.flagIsNewBlock
2297 2298 self.nTotalBlocks
2298 2299 self.nWriteBlocks
2299 2300
2300 2301 Return: None
2301 2302 """
2302 2303
2303 2304 spc = numpy.transpose( self.data_spc, (0,2,1) )
2304 2305 if not( self.processingHeaderObj.shif_fft ):
2305 2306 spc = numpy.roll( spc, self.processingHeaderObj.profilesPerBlock/2, axis=2 ) #desplaza a la derecha en el eje 2 determinadas posiciones
2306 2307 data = spc.reshape((-1))
2307 2308 data = data.astype(self.dtype[0])
2308 2309 data.tofile(self.fp)
2309 2310
2310 2311 if self.data_cspc != None:
2311 2312 data = numpy.zeros( self.shape_cspc_Buffer, self.dtype )
2312 2313 cspc = numpy.transpose( self.data_cspc, (0,2,1) )
2313 2314 if not( self.processingHeaderObj.shif_fft ):
2314 2315 cspc = numpy.roll( cspc, self.processingHeaderObj.profilesPerBlock/2, axis=2 ) #desplaza a la derecha en el eje 2 determinadas posiciones
2315 2316 data['real'] = cspc.real
2316 2317 data['imag'] = cspc.imag
2317 2318 data = data.reshape((-1))
2318 2319 data.tofile(self.fp)
2319 2320
2320 2321 if self.data_dc != None:
2321 2322 data = numpy.zeros( self.shape_dc_Buffer, self.dtype )
2322 2323 dc = self.data_dc
2323 2324 data['real'] = dc.real
2324 2325 data['imag'] = dc.imag
2325 2326 data = data.reshape((-1))
2326 2327 data.tofile(self.fp)
2327 2328
2328 2329 self.data_spc.fill(0)
2329 2330 self.data_dc.fill(0)
2330 2331 if self.data_cspc != None:
2331 2332 self.data_cspc.fill(0)
2332 2333
2333 2334 self.flagIsNewFile = 0
2334 2335 self.flagIsNewBlock = 1
2335 2336 self.nTotalBlocks += 1
2336 2337 self.nWriteBlocks += 1
2337 2338 self.blockIndex += 1
2338 2339
2339 2340
2340 2341 def putData(self):
2341 2342 """
2342 2343 Setea un bloque de datos y luego los escribe en un file
2343 2344
2344 2345 Affected:
2345 2346 self.data_spc
2346 2347 self.data_cspc
2347 2348 self.data_dc
2348 2349
2349 2350 Return:
2350 2351 0 : Si no hay data o no hay mas files que puedan escribirse
2351 2352 1 : Si se escribio la data de un bloque en un file
2352 2353 """
2353 2354
2354 2355 if self.dataOut.flagNoData:
2355 2356 return 0
2356 2357
2357 2358 self.flagIsNewBlock = 0
2358 2359
2359 2360 if self.dataOut.flagTimeBlock:
2360 2361 self.data_spc.fill(0)
2361 2362 self.data_cspc.fill(0)
2362 2363 self.data_dc.fill(0)
2363 2364 self.setNextFile()
2364 2365
2365 2366 if self.flagIsNewFile == 0:
2366 2367 self.getBasicHeader()
2367 2368
2368 2369 self.data_spc = self.dataOut.data_spc.copy()
2369 2370 self.data_cspc = self.dataOut.data_cspc.copy()
2370 2371 self.data_dc = self.dataOut.data_dc.copy()
2371 2372
2372 2373 # #self.processingHeaderObj.dataBlocksPerFile)
2373 2374 if self.hasAllDataInBuffer():
2374 2375 # self.getDataHeader()
2375 2376 self.writeNextBlock()
2376 2377
2377 2378 return 1
2378 2379
2379 2380
2380 2381 def __getProcessFlags(self):
2381 2382
2382 2383 processFlags = 0
2383 2384
2384 2385 dtype0 = numpy.dtype([('real','<i1'),('imag','<i1')])
2385 2386 dtype1 = numpy.dtype([('real','<i2'),('imag','<i2')])
2386 2387 dtype2 = numpy.dtype([('real','<i4'),('imag','<i4')])
2387 2388 dtype3 = numpy.dtype([('real','<i8'),('imag','<i8')])
2388 2389 dtype4 = numpy.dtype([('real','<f4'),('imag','<f4')])
2389 2390 dtype5 = numpy.dtype([('real','<f8'),('imag','<f8')])
2390 2391
2391 2392 dtypeList = [dtype0, dtype1, dtype2, dtype3, dtype4, dtype5]
2392 2393
2393 2394
2394 2395
2395 2396 datatypeValueList = [PROCFLAG.DATATYPE_CHAR,
2396 2397 PROCFLAG.DATATYPE_SHORT,
2397 2398 PROCFLAG.DATATYPE_LONG,
2398 2399 PROCFLAG.DATATYPE_INT64,
2399 2400 PROCFLAG.DATATYPE_FLOAT,
2400 2401 PROCFLAG.DATATYPE_DOUBLE]
2401 2402
2402 2403
2403 2404 for index in range(len(dtypeList)):
2404 2405 if self.dataOut.dtype == dtypeList[index]:
2405 2406 dtypeValue = datatypeValueList[index]
2406 2407 break
2407 2408
2408 2409 processFlags += dtypeValue
2409 2410
2410 2411 if self.dataOut.flagDecodeData:
2411 2412 processFlags += PROCFLAG.DECODE_DATA
2412 2413
2413 2414 if self.dataOut.flagDeflipData:
2414 2415 processFlags += PROCFLAG.DEFLIP_DATA
2415 2416
2416 2417 if self.dataOut.code != None:
2417 2418 processFlags += PROCFLAG.DEFINE_PROCESS_CODE
2418 2419
2419 2420 if self.dataOut.nIncohInt > 1:
2420 2421 processFlags += PROCFLAG.INCOHERENT_INTEGRATION
2421 2422
2422 2423 if self.dataOut.data_dc != None:
2423 2424 processFlags += PROCFLAG.SAVE_CHANNELS_DC
2424 2425
2425 2426 return processFlags
2426 2427
2427 2428
2428 2429 def __getBlockSize(self):
2429 2430 '''
2430 2431 Este metodos determina el cantidad de bytes para un bloque de datos de tipo Spectra
2431 2432 '''
2432 2433
2433 2434 dtype0 = numpy.dtype([('real','<i1'),('imag','<i1')])
2434 2435 dtype1 = numpy.dtype([('real','<i2'),('imag','<i2')])
2435 2436 dtype2 = numpy.dtype([('real','<i4'),('imag','<i4')])
2436 2437 dtype3 = numpy.dtype([('real','<i8'),('imag','<i8')])
2437 2438 dtype4 = numpy.dtype([('real','<f4'),('imag','<f4')])
2438 2439 dtype5 = numpy.dtype([('real','<f8'),('imag','<f8')])
2439 2440
2440 2441 dtypeList = [dtype0, dtype1, dtype2, dtype3, dtype4, dtype5]
2441 2442 datatypeValueList = [1,2,4,8,4,8]
2442 2443 for index in range(len(dtypeList)):
2443 2444 if self.dataOut.dtype == dtypeList[index]:
2444 2445 datatypeValue = datatypeValueList[index]
2445 2446 break
2446 2447
2447 2448
2448 2449 pts2write = self.dataOut.nHeights * self.dataOut.nFFTPoints
2449 2450
2450 2451 pts2write_SelfSpectra = int(self.dataOut.nChannels * pts2write)
2451 2452 blocksize = (pts2write_SelfSpectra*datatypeValue)
2452 2453
2453 2454 if self.dataOut.data_cspc != None:
2454 2455 pts2write_CrossSpectra = int(self.dataOut.nPairs * pts2write)
2455 2456 blocksize += (pts2write_CrossSpectra*datatypeValue*2)
2456 2457
2457 2458 if self.dataOut.data_dc != None:
2458 2459 pts2write_DCchannels = int(self.dataOut.nChannels * self.dataOut.nHeights)
2459 2460 blocksize += (pts2write_DCchannels*datatypeValue*2)
2460 2461
2461 2462 blocksize = blocksize #* datatypeValue * 2 #CORREGIR ESTO
2462 2463
2463 2464 return blocksize
2464 2465
2465 2466 def getDataHeader(self):
2466 2467
2467 2468 """
2468 2469 Obtiene una copia del First Header
2469 2470
2470 2471 Affected:
2471 2472 self.systemHeaderObj
2472 2473 self.radarControllerHeaderObj
2473 2474 self.dtype
2474 2475
2475 2476 Return:
2476 2477 None
2477 2478 """
2478 2479
2479 2480 self.systemHeaderObj = self.dataOut.systemHeaderObj.copy()
2480 2481 self.systemHeaderObj.nChannels = self.dataOut.nChannels
2481 2482 self.radarControllerHeaderObj = self.dataOut.radarControllerHeaderObj.copy()
2482 2483
2483 2484 self.getBasicHeader()
2484 2485
2485 2486 processingHeaderSize = 40 # bytes
2486 2487 self.processingHeaderObj.dtype = 1 # Spectra
2487 2488 self.processingHeaderObj.blockSize = self.__getBlockSize()
2488 2489 self.processingHeaderObj.profilesPerBlock = self.dataOut.nFFTPoints
2489 2490 self.processingHeaderObj.dataBlocksPerFile = self.blocksPerFile
2490 2491 self.processingHeaderObj.nWindows = 1 #podria ser 1 o self.dataOut.processingHeaderObj.nWindows
2491 2492 self.processingHeaderObj.processFlags = self.__getProcessFlags()
2492 2493 self.processingHeaderObj.nCohInt = self.dataOut.nCohInt# Se requiere para determinar el valor de timeInterval
2493 2494 self.processingHeaderObj.nIncohInt = self.dataOut.nIncohInt
2494 2495 self.processingHeaderObj.totalSpectra = self.dataOut.nPairs + self.dataOut.nChannels
2495 2496
2496 2497 if self.processingHeaderObj.totalSpectra > 0:
2497 2498 channelList = []
2498 2499 for channel in range(self.dataOut.nChannels):
2499 2500 channelList.append(channel)
2500 2501 channelList.append(channel)
2501 2502
2502 2503 pairsList = []
2503 2504 for pair in self.dataOut.pairsList:
2504 2505 pairsList.append(pair[0])
2505 2506 pairsList.append(pair[1])
2506 2507 spectraComb = channelList + pairsList
2507 2508 spectraComb = numpy.array(spectraComb,dtype="u1")
2508 2509 self.processingHeaderObj.spectraComb = spectraComb
2509 2510 sizeOfSpcComb = len(spectraComb)
2510 2511 processingHeaderSize += sizeOfSpcComb
2511 2512
2512 2513 # The processing header should not have information about code
2513 2514 # if self.dataOut.code != None:
2514 2515 # self.processingHeaderObj.code = self.dataOut.code
2515 2516 # self.processingHeaderObj.nCode = self.dataOut.nCode
2516 2517 # self.processingHeaderObj.nBaud = self.dataOut.nBaud
2517 2518 # nCodeSize = 4 # bytes
2518 2519 # nBaudSize = 4 # bytes
2519 2520 # codeSize = 4 # bytes
2520 2521 # sizeOfCode = int(nCodeSize + nBaudSize + codeSize * self.dataOut.nCode * self.dataOut.nBaud)
2521 2522 # processingHeaderSize += sizeOfCode
2522 2523
2523 2524 if self.processingHeaderObj.nWindows != 0:
2524 2525 self.processingHeaderObj.firstHeight = self.dataOut.heightList[0]
2525 2526 self.processingHeaderObj.deltaHeight = self.dataOut.heightList[1] - self.dataOut.heightList[0]
2526 2527 self.processingHeaderObj.nHeights = self.dataOut.nHeights
2527 2528 self.processingHeaderObj.samplesWin = self.dataOut.nHeights
2528 2529 sizeOfFirstHeight = 4
2529 2530 sizeOfdeltaHeight = 4
2530 2531 sizeOfnHeights = 4
2531 2532 sizeOfWindows = (sizeOfFirstHeight + sizeOfdeltaHeight + sizeOfnHeights)*self.processingHeaderObj.nWindows
2532 2533 processingHeaderSize += sizeOfWindows
2533 2534
2534 2535 self.processingHeaderObj.size = processingHeaderSize
2535 2536
2536 class SpectraHeisWriter():
2537
2538 i=0
2537 class SpectraHeisWriter(Operation):
2538 # set = None
2539 setFile = None
2540 idblock = None
2541 doypath = None
2542 subfolder = None
2539 2543
2540 def __init__(self, dataOut):
2541
2544 def __init__(self):
2542 2545 self.wrObj = FITS()
2543 self.dataOut = dataOut
2546 # self.dataOut = dataOut
2547 self.nTotalBlocks=0
2548 # self.set = None
2549 self.setFile = None
2550 self.idblock = 0
2551 self.wrpath = None
2552 self.doypath = None
2553 self.subfolder = None
2554 self.isConfig = False
2544 2555
2545 2556 def isNumber(str):
2546 2557 """
2547 2558 Chequea si el conjunto de caracteres que componen un string puede ser convertidos a un numero.
2548 2559
2549 2560 Excepciones:
2550 2561 Si un determinado string no puede ser convertido a numero
2551 2562 Input:
2552 2563 str, string al cual se le analiza para determinar si convertible a un numero o no
2553 2564
2554 2565 Return:
2555 2566 True : si el string es uno numerico
2556 2567 False : no es un string numerico
2557 2568 """
2558 2569 try:
2559 2570 float( str )
2560 2571 return True
2561 2572 except:
2562 return False
2573 return False
2563 2574
2564 def setup(self, wrpath,):
2575 def setup(self, dataOut, wrpath):
2565 2576
2566 2577 if not(os.path.exists(wrpath)):
2567 2578 os.mkdir(wrpath)
2568 2579
2569 2580 self.wrpath = wrpath
2570 self.setFile = 0
2581 # self.setFile = 0
2582 self.dataOut = dataOut
2571 2583
2572 2584 def putData(self):
2573 # self.wrObj.writeHeader(nChannels=self.dataOut.nChannels, nFFTPoints=self.dataOut.nFFTPoints)
2574 #name = self.dataOut.utctime
2575 2585 name= time.localtime( self.dataOut.utctime)
2576 ext=".fits"
2577 #folder='D%4.4d%3.3d'%(name.tm_year,name.tm_yday)
2578 subfolder = 'D%4.4d%3.3d' % (name.tm_year,name.tm_yday)
2586 ext=".fits"
2587
2588 if self.doypath == None:
2589 self.subfolder = 'F%4.4d%3.3d_%d' % (name.tm_year,name.tm_yday,time.mktime(datetime.datetime.now().timetuple()))
2590 self.doypath = os.path.join( self.wrpath, self.subfolder )
2591 os.mkdir(self.doypath)
2592
2593 if self.setFile == None:
2594 # self.set = self.dataOut.set
2595 self.setFile = 0
2596 # if self.set != self.dataOut.set:
2597 ## self.set = self.dataOut.set
2598 # self.setFile = 0
2599
2600 #make the filename
2601 file = 'D%4.4d%3.3d_%3.3d%s' % (name.tm_year,name.tm_yday,self.setFile,ext)
2579 2602
2580 fullpath = os.path.join( self.wrpath, subfolder )
2581 if not( os.path.exists(fullpath) ):
2582 os.mkdir(fullpath)
2603 filename = os.path.join(self.wrpath,self.subfolder, file)
2604
2605 idblock = numpy.array([self.idblock],dtype="int64")
2606 header=self.wrObj.cFImage(idblock=idblock,
2607 year=time.gmtime(self.dataOut.utctime).tm_year,
2608 month=time.gmtime(self.dataOut.utctime).tm_mon,
2609 day=time.gmtime(self.dataOut.utctime).tm_mday,
2610 hour=time.gmtime(self.dataOut.utctime).tm_hour,
2611 minute=time.gmtime(self.dataOut.utctime).tm_min,
2612 second=time.gmtime(self.dataOut.utctime).tm_sec)
2613
2614 c=3E8
2615 deltaHeight = self.dataOut.heightList[1] - self.dataOut.heightList[0]
2616 freq=numpy.arange(-1*self.dataOut.nHeights/2.,self.dataOut.nHeights/2.)*(c/(2*deltaHeight*1000))
2617
2618 colList = []
2619
2620 colFreq=self.wrObj.setColF(name="freq", format=str(self.dataOut.nFFTPoints)+'E', array=freq)
2621
2622 colList.append(colFreq)
2623
2624 nchannel=self.dataOut.nChannels
2625
2626 for i in range(nchannel):
2627 col = self.wrObj.writeData(name="PCh"+str(i+1),
2628 format=str(self.dataOut.nFFTPoints)+'E',
2629 data=10*numpy.log10(self.dataOut.data_spc[i,:]))
2630
2631 colList.append(col)
2632
2633 data=self.wrObj.Ctable(colList=colList)
2634
2635 self.wrObj.CFile(header,data)
2636
2637 self.wrObj.wFile(filename)
2638
2639 #update the setFile
2583 2640 self.setFile += 1
2584 file = 'D%4.4d%3.3d%3.3d%s' % (name.tm_year,name.tm_yday,self.setFile,ext)
2585
2586 filename = os.path.join(self.wrpath,subfolder, file)
2587
2588 # print self.dataOut.ippSeconds
2589 freq=numpy.arange(-1*self.dataOut.nHeights/2.,self.dataOut.nHeights/2.)/(2*self.dataOut.ippSeconds)
2590
2591 col1=self.wrObj.setColF(name="freq", format=str(self.dataOut.nFFTPoints)+'E', array=freq)
2592 col2=self.wrObj.writeData(name="P_Ch1",format=str(self.dataOut.nFFTPoints)+'E',data=10*numpy.log10(self.dataOut.data_spc[0,:]))
2593 col3=self.wrObj.writeData(name="P_Ch2",format=str(self.dataOut.nFFTPoints)+'E',data=10*numpy.log10(self.dataOut.data_spc[1,:]))
2594 col4=self.wrObj.writeData(name="P_Ch3",format=str(self.dataOut.nFFTPoints)+'E',data=10*numpy.log10(self.dataOut.data_spc[2,:]))
2595 col5=self.wrObj.writeData(name="P_Ch4",format=str(self.dataOut.nFFTPoints)+'E',data=10*numpy.log10(self.dataOut.data_spc[3,:]))
2596 col6=self.wrObj.writeData(name="P_Ch5",format=str(self.dataOut.nFFTPoints)+'E',data=10*numpy.log10(self.dataOut.data_spc[4,:]))
2597 col7=self.wrObj.writeData(name="P_Ch6",format=str(self.dataOut.nFFTPoints)+'E',data=10*numpy.log10(self.dataOut.data_spc[5,:]))
2598 col8=self.wrObj.writeData(name="P_Ch7",format=str(self.dataOut.nFFTPoints)+'E',data=10*numpy.log10(self.dataOut.data_spc[6,:]))
2599 col9=self.wrObj.writeData(name="P_Ch8",format=str(self.dataOut.nFFTPoints)+'E',data=10*numpy.log10(self.dataOut.data_spc[7,:]))
2600 #n=numpy.arange((100))
2601 n=self.dataOut.data_spc[6,:]
2602 a=self.wrObj.cFImage(n)
2603 b=self.wrObj.Ctable(col1,col2,col3,col4,col5,col6,col7,col8,col9)
2604 self.wrObj.CFile(a,b)
2605 self.wrObj.wFile(filename)
2641 self.idblock += 1
2642
2606 2643 return 1
2644
2645 def run(self, dataOut, **kwargs):
2646
2647 if not(self.isConfig):
2648
2649 self.setup(dataOut, **kwargs)
2650 self.isConfig = True
2651
2652 self.putData()
2653
2607 2654
2608 2655 class FITS:
2609
2610 2656 name=None
2611 2657 format=None
2612 2658 array =None
2613 2659 data =None
2614 2660 thdulist=None
2661 prihdr=None
2662 hdu=None
2615 2663
2616 2664 def __init__(self):
2617 2665
2618 2666 pass
2619 2667
2620 2668 def setColF(self,name,format,array):
2621 2669 self.name=name
2622 2670 self.format=format
2623 2671 self.array=array
2624 2672 a1=numpy.array([self.array],dtype=numpy.float32)
2625 2673 self.col1 = pyfits.Column(name=self.name, format=self.format, array=a1)
2626 2674 return self.col1
2627 2675
2628 2676 # def setColP(self,name,format,data):
2629 2677 # self.name=name
2630 2678 # self.format=format
2631 2679 # self.data=data
2632 2680 # a2=numpy.array([self.data],dtype=numpy.float32)
2633 2681 # self.col2 = pyfits.Column(name=self.name, format=self.format, array=a2)
2634 2682 # return self.col2
2635 2683
2636 def writeHeader(self,):
2637 pass
2638
2684
2639 2685 def writeData(self,name,format,data):
2640 2686 self.name=name
2641 2687 self.format=format
2642 2688 self.data=data
2643 2689 a2=numpy.array([self.data],dtype=numpy.float32)
2644 2690 self.col2 = pyfits.Column(name=self.name, format=self.format, array=a2)
2645 2691 return self.col2
2646 2692
2647 def cFImage(self,n):
2648 self.hdu= pyfits.PrimaryHDU(n)
2649 return self.hdu
2650
2651 def Ctable(self,col1,col2,col3,col4,col5,col6,col7,col8,col9):
2652 self.cols=pyfits.ColDefs( [col1,col2,col3,col4,col5,col6,col7,col8,col9])
2693 def cFImage(self,idblock,year,month,day,hour,minute,second):
2694 self.hdu= pyfits.PrimaryHDU(idblock)
2695 self.hdu.header.set("Year",year)
2696 self.hdu.header.set("Month",month)
2697 self.hdu.header.set("Day",day)
2698 self.hdu.header.set("Hour",hour)
2699 self.hdu.header.set("Minute",minute)
2700 self.hdu.header.set("Second",second)
2701 return self.hdu
2702
2703
2704 def Ctable(self,colList):
2705 self.cols=pyfits.ColDefs(colList)
2653 2706 self.tbhdu = pyfits.new_table(self.cols)
2654 2707 return self.tbhdu
2655 2708
2709
2656 2710 def CFile(self,hdu,tbhdu):
2657 2711 self.thdulist=pyfits.HDUList([hdu,tbhdu])
2658
2712
2659 2713 def wFile(self,filename):
2660 self.thdulist.writeto(filename) No newline at end of file
2714 if os.path.isfile(filename):
2715 os.remove(filename)
2716 self.thdulist.writeto(filename)
@@ -1,1616 +1,1621
1 1 '''
2 2
3 3 $Author: dsuarez $
4 4 $Id: Processor.py 1 2012-11-12 18:56:07Z dsuarez $
5 5 '''
6 6 import os
7 7 import numpy
8 8 import datetime
9 9 import time
10 10
11 11 from jrodata import *
12 12 from jrodataIO import *
13 13 from jroplot import *
14 14
15 15 try:
16 16 import cfunctions
17 17 except:
18 18 pass
19 19
20 20 class ProcessingUnit:
21 21
22 22 """
23 23 Esta es la clase base para el procesamiento de datos.
24 24
25 25 Contiene el metodo "call" para llamar operaciones. Las operaciones pueden ser:
26 26 - Metodos internos (callMethod)
27 27 - Objetos del tipo Operation (callObject). Antes de ser llamados, estos objetos
28 28 tienen que ser agreagados con el metodo "add".
29 29
30 30 """
31 31 # objeto de datos de entrada (Voltage, Spectra o Correlation)
32 32 dataIn = None
33 33
34 34 # objeto de datos de entrada (Voltage, Spectra o Correlation)
35 35 dataOut = None
36 36
37 37
38 38 objectDict = None
39 39
40 40 def __init__(self):
41 41
42 42 self.objectDict = {}
43 43
44 44 def init(self):
45 45
46 46 raise ValueError, "Not implemented"
47 47
48 48 def addOperation(self, object, objId):
49 49
50 50 """
51 51 Agrega el objeto "object" a la lista de objetos "self.objectList" y retorna el
52 52 identificador asociado a este objeto.
53 53
54 54 Input:
55 55
56 56 object : objeto de la clase "Operation"
57 57
58 58 Return:
59 59
60 60 objId : identificador del objeto, necesario para ejecutar la operacion
61 61 """
62 62
63 63 self.objectDict[objId] = object
64 64
65 65 return objId
66 66
67 67 def operation(self, **kwargs):
68 68
69 69 """
70 70 Operacion directa sobre la data (dataOut.data). Es necesario actualizar los valores de los
71 71 atributos del objeto dataOut
72 72
73 73 Input:
74 74
75 75 **kwargs : Diccionario de argumentos de la funcion a ejecutar
76 76 """
77 77
78 78 raise ValueError, "ImplementedError"
79 79
80 80 def callMethod(self, name, **kwargs):
81 81
82 82 """
83 83 Ejecuta el metodo con el nombre "name" y con argumentos **kwargs de la propia clase.
84 84
85 85 Input:
86 86 name : nombre del metodo a ejecutar
87 87
88 88 **kwargs : diccionario con los nombres y valores de la funcion a ejecutar.
89 89
90 90 """
91 91 if name != 'run':
92 92
93 93 if name == 'init' and self.dataIn.isEmpty():
94 94 self.dataOut.flagNoData = True
95 95 return False
96 96
97 97 if name != 'init' and self.dataOut.isEmpty():
98 98 return False
99 99
100 100 methodToCall = getattr(self, name)
101 101
102 102 methodToCall(**kwargs)
103 103
104 104 if name != 'run':
105 105 return True
106 106
107 107 if self.dataOut.isEmpty():
108 108 return False
109 109
110 110 return True
111 111
112 112 def callObject(self, objId, **kwargs):
113 113
114 114 """
115 115 Ejecuta la operacion asociada al identificador del objeto "objId"
116 116
117 117 Input:
118 118
119 119 objId : identificador del objeto a ejecutar
120 120
121 121 **kwargs : diccionario con los nombres y valores de la funcion a ejecutar.
122 122
123 123 Return:
124 124
125 125 None
126 126 """
127 127
128 128 if self.dataOut.isEmpty():
129 129 return False
130 130
131 131 object = self.objectDict[objId]
132 132
133 133 object.run(self.dataOut, **kwargs)
134 134
135 135 return True
136 136
137 137 def call(self, operationConf, **kwargs):
138 138
139 139 """
140 140 Return True si ejecuta la operacion "operationConf.name" con los
141 141 argumentos "**kwargs". False si la operacion no se ha ejecutado.
142 142 La operacion puede ser de dos tipos:
143 143
144 144 1. Un metodo propio de esta clase:
145 145
146 146 operation.type = "self"
147 147
148 148 2. El metodo "run" de un objeto del tipo Operation o de un derivado de ella:
149 149 operation.type = "other".
150 150
151 151 Este objeto de tipo Operation debe de haber sido agregado antes con el metodo:
152 152 "addOperation" e identificado con el operation.id
153 153
154 154
155 155 con el id de la operacion.
156 156
157 157 Input:
158 158
159 159 Operation : Objeto del tipo operacion con los atributos: name, type y id.
160 160
161 161 """
162 162
163 163 if operationConf.type == 'self':
164 164 sts = self.callMethod(operationConf.name, **kwargs)
165 165
166 166 if operationConf.type == 'other':
167 167 sts = self.callObject(operationConf.id, **kwargs)
168 168
169 169 return sts
170 170
171 171 def setInput(self, dataIn):
172 172
173 173 self.dataIn = dataIn
174 174
175 175 def getOutput(self):
176 176
177 177 return self.dataOut
178 178
179 179 class Operation():
180 180
181 181 """
182 182 Clase base para definir las operaciones adicionales que se pueden agregar a la clase ProcessingUnit
183 183 y necesiten acumular informacion previa de los datos a procesar. De preferencia usar un buffer de
184 184 acumulacion dentro de esta clase
185 185
186 186 Ejemplo: Integraciones coherentes, necesita la informacion previa de los n perfiles anteriores (bufffer)
187 187
188 188 """
189 189
190 190 __buffer = None
191 191 __isConfig = False
192 192
193 193 def __init__(self):
194 194
195 195 pass
196 196
197 197 def run(self, dataIn, **kwargs):
198 198
199 199 """
200 200 Realiza las operaciones necesarias sobre la dataIn.data y actualiza los atributos del objeto dataIn.
201 201
202 202 Input:
203 203
204 204 dataIn : objeto del tipo JROData
205 205
206 206 Return:
207 207
208 208 None
209 209
210 210 Affected:
211 211 __buffer : buffer de recepcion de datos.
212 212
213 213 """
214 214
215 215 raise ValueError, "ImplementedError"
216 216
217 217 class VoltageProc(ProcessingUnit):
218 218
219 219
220 220 def __init__(self):
221 221
222 222 self.objectDict = {}
223 223 self.dataOut = Voltage()
224 224 self.flip = 1
225 225
226 226 def init(self):
227 227
228 228 self.dataOut.copy(self.dataIn)
229 229 # No necesita copiar en cada init() los atributos de dataIn
230 230 # la copia deberia hacerse por cada nuevo bloque de datos
231 231
232 232 def selectChannels(self, channelList):
233 233
234 234 channelIndexList = []
235 235
236 236 for channel in channelList:
237 237 index = self.dataOut.channelList.index(channel)
238 238 channelIndexList.append(index)
239 239
240 240 self.selectChannelsByIndex(channelIndexList)
241 241
242 242 def selectChannelsByIndex(self, channelIndexList):
243 243 """
244 244 Selecciona un bloque de datos en base a canales segun el channelIndexList
245 245
246 246 Input:
247 247 channelIndexList : lista sencilla de canales a seleccionar por ej. [2,3,7]
248 248
249 249 Affected:
250 250 self.dataOut.data
251 251 self.dataOut.channelIndexList
252 252 self.dataOut.nChannels
253 253 self.dataOut.m_ProcessingHeader.totalSpectra
254 254 self.dataOut.systemHeaderObj.numChannels
255 255 self.dataOut.m_ProcessingHeader.blockSize
256 256
257 257 Return:
258 258 None
259 259 """
260 260
261 261 for channelIndex in channelIndexList:
262 262 if channelIndex not in self.dataOut.channelIndexList:
263 263 print channelIndexList
264 264 raise ValueError, "The value %d in channelIndexList is not valid" %channelIndex
265 265
266 266 nChannels = len(channelIndexList)
267 267
268 268 data = self.dataOut.data[channelIndexList,:]
269 269
270 270 self.dataOut.data = data
271 271 self.dataOut.channelList = [self.dataOut.channelList[i] for i in channelIndexList]
272 272 # self.dataOut.nChannels = nChannels
273 273
274 274 return 1
275 275
276 276 def selectHeights(self, minHei, maxHei):
277 277 """
278 278 Selecciona un bloque de datos en base a un grupo de valores de alturas segun el rango
279 279 minHei <= height <= maxHei
280 280
281 281 Input:
282 282 minHei : valor minimo de altura a considerar
283 283 maxHei : valor maximo de altura a considerar
284 284
285 285 Affected:
286 286 Indirectamente son cambiados varios valores a travez del metodo selectHeightsByIndex
287 287
288 288 Return:
289 289 1 si el metodo se ejecuto con exito caso contrario devuelve 0
290 290 """
291 291 if (minHei < self.dataOut.heightList[0]) or (minHei > maxHei):
292 292 raise ValueError, "some value in (%d,%d) is not valid" % (minHei, maxHei)
293 293
294 294 if (maxHei > self.dataOut.heightList[-1]):
295 295 maxHei = self.dataOut.heightList[-1]
296 296 # raise ValueError, "some value in (%d,%d) is not valid" % (minHei, maxHei)
297 297
298 298 minIndex = 0
299 299 maxIndex = 0
300 300 heights = self.dataOut.heightList
301 301
302 302 inda = numpy.where(heights >= minHei)
303 303 indb = numpy.where(heights <= maxHei)
304 304
305 305 try:
306 306 minIndex = inda[0][0]
307 307 except:
308 308 minIndex = 0
309 309
310 310 try:
311 311 maxIndex = indb[0][-1]
312 312 except:
313 313 maxIndex = len(heights)
314 314
315 315 self.selectHeightsByIndex(minIndex, maxIndex)
316 316
317 317 return 1
318 318
319 319
320 320 def selectHeightsByIndex(self, minIndex, maxIndex):
321 321 """
322 322 Selecciona un bloque de datos en base a un grupo indices de alturas segun el rango
323 323 minIndex <= index <= maxIndex
324 324
325 325 Input:
326 326 minIndex : valor de indice minimo de altura a considerar
327 327 maxIndex : valor de indice maximo de altura a considerar
328 328
329 329 Affected:
330 330 self.dataOut.data
331 331 self.dataOut.heightList
332 332
333 333 Return:
334 334 1 si el metodo se ejecuto con exito caso contrario devuelve 0
335 335 """
336 336
337 337 if (minIndex < 0) or (minIndex > maxIndex):
338 338 raise ValueError, "some value in (%d,%d) is not valid" % (minIndex, maxIndex)
339 339
340 340 if (maxIndex >= self.dataOut.nHeights):
341 341 maxIndex = self.dataOut.nHeights-1
342 342 # raise ValueError, "some value in (%d,%d) is not valid" % (minIndex, maxIndex)
343 343
344 344 nHeights = maxIndex - minIndex + 1
345 345
346 346 #voltage
347 347 data = self.dataOut.data[:,minIndex:maxIndex+1]
348 348
349 349 firstHeight = self.dataOut.heightList[minIndex]
350 350
351 351 self.dataOut.data = data
352 352 self.dataOut.heightList = self.dataOut.heightList[minIndex:maxIndex+1]
353 353
354 354 return 1
355 355
356 356
357 357 def filterByHeights(self, window):
358 358 deltaHeight = self.dataOut.heightList[1] - self.dataOut.heightList[0]
359 359
360 360 if window == None:
361 361 window = self.dataOut.radarControllerHeaderObj.txA / deltaHeight
362 362
363 363 newdelta = deltaHeight * window
364 364 r = self.dataOut.data.shape[1] % window
365 365 buffer = self.dataOut.data[:,0:self.dataOut.data.shape[1]-r]
366 366 buffer = buffer.reshape(self.dataOut.data.shape[0],self.dataOut.data.shape[1]/window,window)
367 367 buffer = numpy.sum(buffer,2)
368 368 self.dataOut.data = buffer
369 369 self.dataOut.heightList = numpy.arange(self.dataOut.heightList[0],newdelta*self.dataOut.nHeights/window-newdelta,newdelta)
370 370 self.dataOut.windowOfFilter = window
371 371
372 372 def deFlip(self):
373 373 self.dataOut.data *= self.flip
374 374 self.flip *= -1.
375 375
376 376
377 377 class CohInt(Operation):
378 378
379 379 __isConfig = False
380 380
381 381 __profIndex = 0
382 382 __withOverapping = False
383 383
384 384 __byTime = False
385 385 __initime = None
386 386 __lastdatatime = None
387 387 __integrationtime = None
388 388
389 389 __buffer = None
390 390
391 391 __dataReady = False
392 392
393 393 n = None
394 394
395 395
396 396 def __init__(self):
397 397
398 398 self.__isConfig = False
399 399
400 400 def setup(self, n=None, timeInterval=None, overlapping=False):
401 401 """
402 402 Set the parameters of the integration class.
403 403
404 404 Inputs:
405 405
406 406 n : Number of coherent integrations
407 407 timeInterval : Time of integration. If the parameter "n" is selected this one does not work
408 408 overlapping :
409 409
410 410 """
411 411
412 412 self.__initime = None
413 413 self.__lastdatatime = 0
414 414 self.__buffer = None
415 415 self.__dataReady = False
416 416
417 417
418 418 if n == None and timeInterval == None:
419 419 raise ValueError, "n or timeInterval should be specified ..."
420 420
421 421 if n != None:
422 422 self.n = n
423 423 self.__byTime = False
424 424 else:
425 425 self.__integrationtime = timeInterval * 60. #if (type(timeInterval)!=integer) -> change this line
426 426 self.n = 9999
427 427 self.__byTime = True
428 428
429 429 if overlapping:
430 430 self.__withOverapping = True
431 431 self.__buffer = None
432 432 else:
433 433 self.__withOverapping = False
434 434 self.__buffer = 0
435 435
436 436 self.__profIndex = 0
437 437
438 438 def putData(self, data):
439 439
440 440 """
441 441 Add a profile to the __buffer and increase in one the __profileIndex
442 442
443 443 """
444 444
445 445 if not self.__withOverapping:
446 446 self.__buffer += data.copy()
447 447 self.__profIndex += 1
448 448 return
449 449
450 450 #Overlapping data
451 451 nChannels, nHeis = data.shape
452 452 data = numpy.reshape(data, (1, nChannels, nHeis))
453 453
454 454 #If the buffer is empty then it takes the data value
455 455 if self.__buffer == None:
456 456 self.__buffer = data
457 457 self.__profIndex += 1
458 458 return
459 459
460 460 #If the buffer length is lower than n then stakcing the data value
461 461 if self.__profIndex < self.n:
462 462 self.__buffer = numpy.vstack((self.__buffer, data))
463 463 self.__profIndex += 1
464 464 return
465 465
466 466 #If the buffer length is equal to n then replacing the last buffer value with the data value
467 467 self.__buffer = numpy.roll(self.__buffer, -1, axis=0)
468 468 self.__buffer[self.n-1] = data
469 469 self.__profIndex = self.n
470 470 return
471 471
472 472
473 473 def pushData(self):
474 474 """
475 475 Return the sum of the last profiles and the profiles used in the sum.
476 476
477 477 Affected:
478 478
479 479 self.__profileIndex
480 480
481 481 """
482 482
483 483 if not self.__withOverapping:
484 484 data = self.__buffer
485 485 n = self.__profIndex
486 486
487 487 self.__buffer = 0
488 488 self.__profIndex = 0
489 489
490 490 return data, n
491 491
492 492 #Integration with Overlapping
493 493 data = numpy.sum(self.__buffer, axis=0)
494 494 n = self.__profIndex
495 495
496 496 return data, n
497 497
498 498 def byProfiles(self, data):
499 499
500 500 self.__dataReady = False
501 501 avgdata = None
502 502 n = None
503 503
504 504 self.putData(data)
505 505
506 506 if self.__profIndex == self.n:
507 507
508 508 avgdata, n = self.pushData()
509 509 self.__dataReady = True
510 510
511 511 return avgdata
512 512
513 513 def byTime(self, data, datatime):
514 514
515 515 self.__dataReady = False
516 516 avgdata = None
517 517 n = None
518 518
519 519 self.putData(data)
520 520
521 521 if (datatime - self.__initime) >= self.__integrationtime:
522 522 avgdata, n = self.pushData()
523 523 self.n = n
524 524 self.__dataReady = True
525 525
526 526 return avgdata
527 527
528 528 def integrate(self, data, datatime=None):
529 529
530 530 if self.__initime == None:
531 531 self.__initime = datatime
532 532
533 533 if self.__byTime:
534 534 avgdata = self.byTime(data, datatime)
535 535 else:
536 536 avgdata = self.byProfiles(data)
537 537
538 538
539 539 self.__lastdatatime = datatime
540 540
541 541 if avgdata == None:
542 542 return None, None
543 543
544 544 avgdatatime = self.__initime
545 545
546 546 deltatime = datatime -self.__lastdatatime
547 547
548 548 if not self.__withOverapping:
549 549 self.__initime = datatime
550 550 else:
551 551 self.__initime += deltatime
552 552
553 553 return avgdata, avgdatatime
554 554
555 555 def run(self, dataOut, **kwargs):
556 556
557 557 if not self.__isConfig:
558 558 self.setup(**kwargs)
559 559 self.__isConfig = True
560 560
561 561 avgdata, avgdatatime = self.integrate(dataOut.data, dataOut.utctime)
562 562
563 563 # dataOut.timeInterval *= n
564 564 dataOut.flagNoData = True
565 565
566 566 if self.__dataReady:
567 567 dataOut.data = avgdata
568 568 dataOut.nCohInt *= self.n
569 569 dataOut.utctime = avgdatatime
570 570 dataOut.timeInterval = dataOut.ippSeconds * dataOut.nCohInt
571 571 dataOut.flagNoData = False
572 572
573 573
574 574 class Decoder(Operation):
575 575
576 576 __isConfig = False
577 577 __profIndex = 0
578 578
579 579 code = None
580 580
581 581 nCode = None
582 582 nBaud = None
583 583
584 584 def __init__(self):
585 585
586 586 self.__isConfig = False
587 587
588 588 def setup(self, code, shape):
589 589
590 590 self.__profIndex = 0
591 591
592 592 self.code = code
593 593
594 594 self.nCode = len(code)
595 595 self.nBaud = len(code[0])
596 596
597 597 self.__nChannels, self.__nHeis = shape
598 598
599 599 __codeBuffer = numpy.zeros((self.nCode, self.__nHeis), dtype=numpy.complex)
600 600
601 601 __codeBuffer[:,0:self.nBaud] = self.code
602 602
603 603 self.fft_code = numpy.conj(numpy.fft.fft(__codeBuffer, axis=1))
604 604
605 605 self.ndatadec = self.__nHeis - self.nBaud + 1
606 606
607 607 self.datadecTime = numpy.zeros((self.__nChannels, self.ndatadec), dtype=numpy.complex)
608 608
609 609 def convolutionInFreq(self, data):
610 610
611 611 fft_code = self.fft_code[self.__profIndex].reshape(1,-1)
612 612
613 613 fft_data = numpy.fft.fft(data, axis=1)
614 614
615 615 conv = fft_data*fft_code
616 616
617 617 data = numpy.fft.ifft(conv,axis=1)
618 618
619 619 datadec = data[:,:-self.nBaud+1]
620 620
621 621 return datadec
622 622
623 623 def convolutionInFreqOpt(self, data):
624 624
625 625 fft_code = self.fft_code[self.__profIndex].reshape(1,-1)
626 626
627 627 data = cfunctions.decoder(fft_code, data)
628 628
629 629 datadec = data[:,:-self.nBaud+1]
630 630
631 631 return datadec
632 632
633 633 def convolutionInTime(self, data):
634 634
635 635 code = self.code[self.__profIndex]
636 636
637 637 for i in range(self.__nChannels):
638 638 self.datadecTime[i,:] = numpy.correlate(data[i,:], code, mode='valid')
639 639
640 640 return self.datadecTime
641 641
642 642 def run(self, dataOut, code=None, nCode=None, nBaud=None, mode = 0):
643 643
644 644 if not self.__isConfig:
645 645
646 646 if code == None:
647 647 code = dataOut.code
648 648 else:
649 649 code = numpy.array(code).reshape(nCode,nBaud)
650 650 dataOut.code = code
651 651 dataOut.nCode = nCode
652 652 dataOut.nBaud = nBaud
653 653
654 654 if code == None:
655 655 return 1
656 656
657 657 self.setup(code, dataOut.data.shape)
658 658 self.__isConfig = True
659 659
660 660 if mode == 0:
661 661 datadec = self.convolutionInTime(dataOut.data)
662 662
663 663 if mode == 1:
664 664 datadec = self.convolutionInFreq(dataOut.data)
665 665
666 666 if mode == 2:
667 667 datadec = self.convolutionInFreqOpt(dataOut.data)
668 668
669 669 dataOut.data = datadec
670 670
671 671 dataOut.heightList = dataOut.heightList[0:self.ndatadec]
672 672
673 673 dataOut.flagDecodeData = True #asumo q la data no esta decodificada
674 674
675 675 if self.__profIndex == self.nCode-1:
676 676 self.__profIndex = 0
677 677 return 1
678 678
679 679 self.__profIndex += 1
680 680
681 681 return 1
682 682 # dataOut.flagDeflipData = True #asumo q la data no esta sin flip
683 683
684 684
685 685
686 686 class SpectraProc(ProcessingUnit):
687 687
688 688 def __init__(self):
689 689
690 690 self.objectDict = {}
691 691 self.buffer = None
692 692 self.firstdatatime = None
693 693 self.profIndex = 0
694 694 self.dataOut = Spectra()
695 695
696 696 def __updateObjFromInput(self):
697 697
698 698 self.dataOut.radarControllerHeaderObj = self.dataIn.radarControllerHeaderObj.copy()
699 699 self.dataOut.systemHeaderObj = self.dataIn.systemHeaderObj.copy()
700 700 self.dataOut.channelList = self.dataIn.channelList
701 701 self.dataOut.heightList = self.dataIn.heightList
702 702 self.dataOut.dtype = numpy.dtype([('real','<f4'),('imag','<f4')])
703 703 # self.dataOut.nHeights = self.dataIn.nHeights
704 704 # self.dataOut.nChannels = self.dataIn.nChannels
705 705 self.dataOut.nBaud = self.dataIn.nBaud
706 706 self.dataOut.nCode = self.dataIn.nCode
707 707 self.dataOut.code = self.dataIn.code
708 708 self.dataOut.nProfiles = self.dataOut.nFFTPoints
709 709 # self.dataOut.channelIndexList = self.dataIn.channelIndexList
710 710 self.dataOut.flagTimeBlock = self.dataIn.flagTimeBlock
711 711 self.dataOut.utctime = self.firstdatatime
712 712 self.dataOut.flagDecodeData = self.dataIn.flagDecodeData #asumo q la data esta decodificada
713 713 self.dataOut.flagDeflipData = self.dataIn.flagDeflipData #asumo q la data esta sin flip
714 714 self.dataOut.flagShiftFFT = self.dataIn.flagShiftFFT
715 715 self.dataOut.nCohInt = self.dataIn.nCohInt
716 716 self.dataOut.nIncohInt = 1
717 717 self.dataOut.ippSeconds = self.dataIn.ippSeconds
718 718 self.dataOut.windowOfFilter = self.dataIn.windowOfFilter
719 719
720 720 self.dataOut.timeInterval = self.dataIn.timeInterval*self.dataOut.nFFTPoints*self.dataOut.nIncohInt
721 721
722 722 def __getFft(self):
723 723 """
724 724 Convierte valores de Voltaje a Spectra
725 725
726 726 Affected:
727 727 self.dataOut.data_spc
728 728 self.dataOut.data_cspc
729 729 self.dataOut.data_dc
730 730 self.dataOut.heightList
731 731 self.profIndex
732 732 self.buffer
733 733 self.dataOut.flagNoData
734 734 """
735 735 fft_volt = numpy.fft.fft(self.buffer,axis=1)
736 736 fft_volt = fft_volt.astype(numpy.dtype('complex'))
737 737 dc = fft_volt[:,0,:]
738 738
739 739 #calculo de self-spectra
740 740 fft_volt = numpy.fft.fftshift(fft_volt,axes=(1,))
741 741 spc = fft_volt * numpy.conjugate(fft_volt)
742 742 spc = spc.real
743 743
744 744 blocksize = 0
745 745 blocksize += dc.size
746 746 blocksize += spc.size
747 747
748 748 cspc = None
749 749 pairIndex = 0
750 750 if self.dataOut.pairsList != None:
751 751 #calculo de cross-spectra
752 752 cspc = numpy.zeros((self.dataOut.nPairs, self.dataOut.nFFTPoints, self.dataOut.nHeights), dtype='complex')
753 753 for pair in self.dataOut.pairsList:
754 754 cspc[pairIndex,:,:] = fft_volt[pair[0],:,:] * numpy.conjugate(fft_volt[pair[1],:,:])
755 755 pairIndex += 1
756 756 blocksize += cspc.size
757 757
758 758 self.dataOut.data_spc = spc
759 759 self.dataOut.data_cspc = cspc
760 760 self.dataOut.data_dc = dc
761 761 self.dataOut.blockSize = blocksize
762 762
763 763 def init(self, nFFTPoints=None, pairsList=None):
764 764
765 765 self.dataOut.flagNoData = True
766 766
767 767 if self.dataIn.type == "Spectra":
768 768 self.dataOut.copy(self.dataIn)
769 769 return
770 770
771 771 if self.dataIn.type == "Voltage":
772 772
773 773 if nFFTPoints == None:
774 774 raise ValueError, "This SpectraProc.init() need nFFTPoints input variable"
775 775
776 776 if pairsList == None:
777 777 nPairs = 0
778 778 else:
779 779 nPairs = len(pairsList)
780 780
781 781 self.dataOut.nFFTPoints = nFFTPoints
782 782 self.dataOut.pairsList = pairsList
783 783 self.dataOut.nPairs = nPairs
784 784
785 785 if self.buffer == None:
786 786 self.buffer = numpy.zeros((self.dataIn.nChannels,
787 787 self.dataOut.nFFTPoints,
788 788 self.dataIn.nHeights),
789 789 dtype='complex')
790 790
791 791
792 792 self.buffer[:,self.profIndex,:] = self.dataIn.data.copy()
793 793 self.profIndex += 1
794 794
795 795 if self.firstdatatime == None:
796 796 self.firstdatatime = self.dataIn.utctime
797 797
798 798 if self.profIndex == self.dataOut.nFFTPoints:
799 799 self.__updateObjFromInput()
800 800 self.__getFft()
801 801
802 802 self.dataOut.flagNoData = False
803 803
804 804 self.buffer = None
805 805 self.firstdatatime = None
806 806 self.profIndex = 0
807 807
808 808 return
809 809
810 810 raise ValuError, "The type object %s is not valid"%(self.dataIn.type)
811 811
812 812 def selectChannels(self, channelList):
813 813
814 814 channelIndexList = []
815 815
816 816 for channel in channelList:
817 817 index = self.dataOut.channelList.index(channel)
818 818 channelIndexList.append(index)
819 819
820 820 self.selectChannelsByIndex(channelIndexList)
821 821
822 822 def selectChannelsByIndex(self, channelIndexList):
823 823 """
824 824 Selecciona un bloque de datos en base a canales segun el channelIndexList
825 825
826 826 Input:
827 827 channelIndexList : lista sencilla de canales a seleccionar por ej. [2,3,7]
828 828
829 829 Affected:
830 830 self.dataOut.data_spc
831 831 self.dataOut.channelIndexList
832 832 self.dataOut.nChannels
833 833
834 834 Return:
835 835 None
836 836 """
837 837
838 838 for channelIndex in channelIndexList:
839 839 if channelIndex not in self.dataOut.channelIndexList:
840 840 print channelIndexList
841 841 raise ValueError, "The value %d in channelIndexList is not valid" %channelIndex
842 842
843 843 nChannels = len(channelIndexList)
844 844
845 845 data_spc = self.dataOut.data_spc[channelIndexList,:]
846 846
847 847 self.dataOut.data_spc = data_spc
848 848 self.dataOut.channelList = [self.dataOut.channelList[i] for i in channelIndexList]
849 849 # self.dataOut.nChannels = nChannels
850 850
851 851 return 1
852 852
853 853 def selectHeights(self, minHei, maxHei):
854 854 """
855 855 Selecciona un bloque de datos en base a un grupo de valores de alturas segun el rango
856 856 minHei <= height <= maxHei
857 857
858 858 Input:
859 859 minHei : valor minimo de altura a considerar
860 860 maxHei : valor maximo de altura a considerar
861 861
862 862 Affected:
863 863 Indirectamente son cambiados varios valores a travez del metodo selectHeightsByIndex
864 864
865 865 Return:
866 866 1 si el metodo se ejecuto con exito caso contrario devuelve 0
867 867 """
868 868 if (minHei < self.dataOut.heightList[0]) or (minHei > maxHei):
869 869 raise ValueError, "some value in (%d,%d) is not valid" % (minHei, maxHei)
870 870
871 871 if (maxHei > self.dataOut.heightList[-1]):
872 872 maxHei = self.dataOut.heightList[-1]
873 873 # raise ValueError, "some value in (%d,%d) is not valid" % (minHei, maxHei)
874 874
875 875 minIndex = 0
876 876 maxIndex = 0
877 877 heights = self.dataOut.heightList
878 878
879 879 inda = numpy.where(heights >= minHei)
880 880 indb = numpy.where(heights <= maxHei)
881 881
882 882 try:
883 883 minIndex = inda[0][0]
884 884 except:
885 885 minIndex = 0
886 886
887 887 try:
888 888 maxIndex = indb[0][-1]
889 889 except:
890 890 maxIndex = len(heights)
891 891
892 892 self.selectHeightsByIndex(minIndex, maxIndex)
893 893
894 894 return 1
895 895
896 896
897 897 def selectHeightsByIndex(self, minIndex, maxIndex):
898 898 """
899 899 Selecciona un bloque de datos en base a un grupo indices de alturas segun el rango
900 900 minIndex <= index <= maxIndex
901 901
902 902 Input:
903 903 minIndex : valor de indice minimo de altura a considerar
904 904 maxIndex : valor de indice maximo de altura a considerar
905 905
906 906 Affected:
907 907 self.dataOut.data_spc
908 908 self.dataOut.data_cspc
909 909 self.dataOut.data_dc
910 910 self.dataOut.heightList
911 911
912 912 Return:
913 913 1 si el metodo se ejecuto con exito caso contrario devuelve 0
914 914 """
915 915
916 916 if (minIndex < 0) or (minIndex > maxIndex):
917 917 raise ValueError, "some value in (%d,%d) is not valid" % (minIndex, maxIndex)
918 918
919 919 if (maxIndex >= self.dataOut.nHeights):
920 920 maxIndex = self.dataOut.nHeights-1
921 921 # raise ValueError, "some value in (%d,%d) is not valid" % (minIndex, maxIndex)
922 922
923 923 nHeights = maxIndex - minIndex + 1
924 924
925 925 #Spectra
926 926 data_spc = self.dataOut.data_spc[:,:,minIndex:maxIndex+1]
927 927
928 928 data_cspc = None
929 929 if self.dataOut.data_cspc != None:
930 930 data_cspc = self.dataOut.data_cspc[:,:,minIndex:maxIndex+1]
931 931
932 932 data_dc = None
933 933 if self.dataOut.data_dc != None:
934 934 data_dc = self.dataOut.data_dc[:,minIndex:maxIndex+1]
935 935
936 936 self.dataOut.data_spc = data_spc
937 937 self.dataOut.data_cspc = data_cspc
938 938 self.dataOut.data_dc = data_dc
939 939
940 940 self.dataOut.heightList = self.dataOut.heightList[minIndex:maxIndex+1]
941 941
942 942 return 1
943 943
944 944 def removeDC(self, mode = 1):
945 945
946 946 dc_index = 0
947 947 freq_index = numpy.array([-2,-1,1,2])
948 948 data_spc = self.dataOut.data_spc
949 949 data_cspc = self.dataOut.data_cspc
950 950 data_dc = self.dataOut.data_dc
951 951
952 952 if self.dataOut.flagShiftFFT:
953 953 dc_index += self.dataOut.nFFTPoints/2
954 954 freq_index += self.dataOut.nFFTPoints/2
955 955
956 956 if mode == 1:
957 957 data_spc[dc_index] = (data_spc[:,freq_index[1],:] + data_spc[:,freq_index[2],:])/2
958 958 if data_cspc != None:
959 959 data_cspc[dc_index] = (data_cspc[:,freq_index[1],:] + data_cspc[:,freq_index[2],:])/2
960 960 return 1
961 961
962 962 if mode == 2:
963 963 pass
964 964
965 965 if mode == 3:
966 966 pass
967 967
968 968 raise ValueError, "mode parameter has to be 1, 2 or 3"
969 969
970 970 def removeInterference(self):
971 971
972 972 pass
973 973
974 974
975 975 class IncohInt(Operation):
976 976
977 977
978 978 __profIndex = 0
979 979 __withOverapping = False
980 980
981 981 __byTime = False
982 982 __initime = None
983 983 __lastdatatime = None
984 984 __integrationtime = None
985 985
986 986 __buffer_spc = None
987 987 __buffer_cspc = None
988 988 __buffer_dc = None
989 989
990 990 __dataReady = False
991 991
992 992 __timeInterval = None
993 993
994 994 n = None
995 995
996 996
997 997
998 998 def __init__(self):
999 999
1000 1000 self.__isConfig = False
1001 1001
1002 1002 def setup(self, n=None, timeInterval=None, overlapping=False):
1003 1003 """
1004 1004 Set the parameters of the integration class.
1005 1005
1006 1006 Inputs:
1007 1007
1008 1008 n : Number of coherent integrations
1009 1009 timeInterval : Time of integration. If the parameter "n" is selected this one does not work
1010 1010 overlapping :
1011 1011
1012 1012 """
1013 1013
1014 1014 self.__initime = None
1015 1015 self.__lastdatatime = 0
1016 1016 self.__buffer_spc = None
1017 1017 self.__buffer_cspc = None
1018 1018 self.__buffer_dc = None
1019 1019 self.__dataReady = False
1020 1020
1021 1021
1022 1022 if n == None and timeInterval == None:
1023 1023 raise ValueError, "n or timeInterval should be specified ..."
1024 1024
1025 1025 if n != None:
1026 1026 self.n = n
1027 1027 self.__byTime = False
1028 1028 else:
1029 1029 self.__integrationtime = timeInterval * 60. #if (type(timeInterval)!=integer) -> change this line
1030 1030 self.n = 9999
1031 1031 self.__byTime = True
1032 1032
1033 1033 if overlapping:
1034 1034 self.__withOverapping = True
1035 1035 else:
1036 1036 self.__withOverapping = False
1037 1037 self.__buffer_spc = 0
1038 1038 self.__buffer_cspc = 0
1039 1039 self.__buffer_dc = 0
1040 1040
1041 1041 self.__profIndex = 0
1042 1042
1043 1043 def putData(self, data_spc, data_cspc, data_dc):
1044 1044
1045 1045 """
1046 1046 Add a profile to the __buffer_spc and increase in one the __profileIndex
1047 1047
1048 1048 """
1049 1049
1050 1050 if not self.__withOverapping:
1051 1051 self.__buffer_spc += data_spc
1052 1052
1053 1053 if data_cspc == None:
1054 1054 self.__buffer_cspc = None
1055 1055 else:
1056 1056 self.__buffer_cspc += data_cspc
1057 1057
1058 1058 if data_dc == None:
1059 1059 self.__buffer_dc = None
1060 1060 else:
1061 1061 self.__buffer_dc += data_dc
1062 1062
1063 1063 self.__profIndex += 1
1064 1064 return
1065 1065
1066 1066 #Overlapping data
1067 1067 nChannels, nFFTPoints, nHeis = data_spc.shape
1068 1068 data_spc = numpy.reshape(data_spc, (1, nChannels, nFFTPoints, nHeis))
1069 1069 if data_cspc != None:
1070 1070 data_cspc = numpy.reshape(data_cspc, (1, -1, nFFTPoints, nHeis))
1071 1071 if data_dc != None:
1072 1072 data_dc = numpy.reshape(data_dc, (1, -1, nHeis))
1073 1073
1074 1074 #If the buffer is empty then it takes the data value
1075 1075 if self.__buffer_spc == None:
1076 1076 self.__buffer_spc = data_spc
1077 1077
1078 1078 if data_cspc == None:
1079 1079 self.__buffer_cspc = None
1080 1080 else:
1081 1081 self.__buffer_cspc += data_cspc
1082 1082
1083 1083 if data_dc == None:
1084 1084 self.__buffer_dc = None
1085 1085 else:
1086 1086 self.__buffer_dc += data_dc
1087 1087
1088 1088 self.__profIndex += 1
1089 1089 return
1090 1090
1091 1091 #If the buffer length is lower than n then stakcing the data value
1092 1092 if self.__profIndex < self.n:
1093 1093 self.__buffer_spc = numpy.vstack((self.__buffer_spc, data_spc))
1094 1094
1095 1095 if data_cspc != None:
1096 1096 self.__buffer_cspc = numpy.vstack((self.__buffer_cspc, data_cspc))
1097 1097
1098 1098 if data_dc != None:
1099 1099 self.__buffer_dc = numpy.vstack((self.__buffer_dc, data_dc))
1100 1100
1101 1101 self.__profIndex += 1
1102 1102 return
1103 1103
1104 1104 #If the buffer length is equal to n then replacing the last buffer value with the data value
1105 1105 self.__buffer_spc = numpy.roll(self.__buffer_spc, -1, axis=0)
1106 1106 self.__buffer_spc[self.n-1] = data_spc
1107 1107
1108 1108 if data_cspc != None:
1109 1109 self.__buffer_cspc = numpy.roll(self.__buffer_cspc, -1, axis=0)
1110 1110 self.__buffer_cspc[self.n-1] = data_cspc
1111 1111
1112 1112 if data_dc != None:
1113 1113 self.__buffer_dc = numpy.roll(self.__buffer_dc, -1, axis=0)
1114 1114 self.__buffer_dc[self.n-1] = data_dc
1115 1115
1116 1116 self.__profIndex = self.n
1117 1117 return
1118 1118
1119 1119
1120 1120 def pushData(self):
1121 1121 """
1122 1122 Return the sum of the last profiles and the profiles used in the sum.
1123 1123
1124 1124 Affected:
1125 1125
1126 1126 self.__profileIndex
1127 1127
1128 1128 """
1129 1129 data_spc = None
1130 1130 data_cspc = None
1131 1131 data_dc = None
1132 1132
1133 1133 if not self.__withOverapping:
1134 1134 data_spc = self.__buffer_spc
1135 1135 data_cspc = self.__buffer_cspc
1136 1136 data_dc = self.__buffer_dc
1137 1137
1138 1138 n = self.__profIndex
1139 1139
1140 1140 self.__buffer_spc = 0
1141 1141 self.__buffer_cspc = 0
1142 1142 self.__buffer_dc = 0
1143 1143 self.__profIndex = 0
1144 1144
1145 1145 return data_spc, data_cspc, data_dc, n
1146 1146
1147 1147 #Integration with Overlapping
1148 1148 data_spc = numpy.sum(self.__buffer_spc, axis=0)
1149 1149
1150 1150 if self.__buffer_cspc != None:
1151 1151 data_cspc = numpy.sum(self.__buffer_cspc, axis=0)
1152 1152
1153 1153 if self.__buffer_dc != None:
1154 1154 data_dc = numpy.sum(self.__buffer_dc, axis=0)
1155 1155
1156 1156 n = self.__profIndex
1157 1157
1158 1158 return data_spc, data_cspc, data_dc, n
1159 1159
1160 1160 def byProfiles(self, *args):
1161 1161
1162 1162 self.__dataReady = False
1163 1163 avgdata_spc = None
1164 1164 avgdata_cspc = None
1165 1165 avgdata_dc = None
1166 1166 n = None
1167 1167
1168 1168 self.putData(*args)
1169 1169
1170 1170 if self.__profIndex == self.n:
1171 1171
1172 1172 avgdata_spc, avgdata_cspc, avgdata_dc, n = self.pushData()
1173 1173 self.__dataReady = True
1174 1174
1175 1175 return avgdata_spc, avgdata_cspc, avgdata_dc
1176 1176
1177 1177 def byTime(self, datatime, *args):
1178 1178
1179 1179 self.__dataReady = False
1180 1180 avgdata_spc = None
1181 1181 avgdata_cspc = None
1182 1182 avgdata_dc = None
1183 1183 n = None
1184 1184
1185 1185 self.putData(*args)
1186 1186
1187 1187 if (datatime - self.__initime) >= self.__integrationtime:
1188 1188 avgdata_spc, avgdata_cspc, avgdata_dc, n = self.pushData()
1189 1189 self.n = n
1190 1190 self.__dataReady = True
1191 1191
1192 1192 return avgdata_spc, avgdata_cspc, avgdata_dc
1193 1193
1194 1194 def integrate(self, datatime, *args):
1195 1195
1196 1196 if self.__initime == None:
1197 1197 self.__initime = datatime
1198 1198
1199 1199 if self.__byTime:
1200 1200 avgdata_spc, avgdata_cspc, avgdata_dc = self.byTime(datatime, *args)
1201 1201 else:
1202 1202 avgdata_spc, avgdata_cspc, avgdata_dc = self.byProfiles(*args)
1203 1203
1204 1204 self.__lastdatatime = datatime
1205 1205
1206 1206 if avgdata_spc == None:
1207 1207 return None, None, None, None
1208 1208
1209 1209 avgdatatime = self.__initime
1210 1210 self.__timeInterval = (self.__lastdatatime - self.__initime)/(self.n - 1)
1211 1211
1212 1212 deltatime = datatime -self.__lastdatatime
1213 1213
1214 1214 if not self.__withOverapping:
1215 1215 self.__initime = datatime
1216 1216 else:
1217 1217 self.__initime += deltatime
1218 1218
1219 1219 return avgdatatime, avgdata_spc, avgdata_cspc, avgdata_dc
1220 1220
1221 1221 def run(self, dataOut, n=None, timeInterval=None, overlapping=False):
1222 1222
1223 1223 if n==1:
1224 1224 dataOut.flagNoData = False
1225 1225 return
1226 1226
1227 1227 if not self.__isConfig:
1228 1228 self.setup(n, timeInterval, overlapping)
1229 1229 self.__isConfig = True
1230 1230
1231 1231 avgdatatime, avgdata_spc, avgdata_cspc, avgdata_dc = self.integrate(dataOut.utctime,
1232 1232 dataOut.data_spc,
1233 1233 dataOut.data_cspc,
1234 1234 dataOut.data_dc)
1235 1235
1236 1236 # dataOut.timeInterval *= n
1237 1237 dataOut.flagNoData = True
1238 1238
1239 1239 if self.__dataReady:
1240 1240
1241 1241 dataOut.data_spc = avgdata_spc
1242 1242 dataOut.data_cspc = avgdata_cspc
1243 1243 dataOut.data_dc = avgdata_dc
1244 1244
1245 1245 dataOut.nIncohInt *= self.n
1246 1246 dataOut.utctime = avgdatatime
1247 1247 #dataOut.timeInterval = dataOut.ippSeconds * dataOut.nCohInt * dataOut.nIncohInt * dataOut.nFFTPoints
1248 1248 dataOut.timeInterval = self.__timeInterval*self.n
1249 1249 dataOut.flagNoData = False
1250 1250
1251 1251 class ProfileSelector(Operation):
1252 1252
1253 1253 profileIndex = None
1254 1254 # Tamanho total de los perfiles
1255 1255 nProfiles = None
1256 1256
1257 1257 def __init__(self):
1258 1258
1259 1259 self.profileIndex = 0
1260 1260
1261 1261 def incIndex(self):
1262 1262 self.profileIndex += 1
1263 1263
1264 1264 if self.profileIndex >= self.nProfiles:
1265 1265 self.profileIndex = 0
1266 1266
1267 1267 def isProfileInRange(self, minIndex, maxIndex):
1268 1268
1269 1269 if self.profileIndex < minIndex:
1270 1270 return False
1271 1271
1272 1272 if self.profileIndex > maxIndex:
1273 1273 return False
1274 1274
1275 1275 return True
1276 1276
1277 1277 def isProfileInList(self, profileList):
1278 1278
1279 1279 if self.profileIndex not in profileList:
1280 1280 return False
1281 1281
1282 1282 return True
1283 1283
1284 1284 def run(self, dataOut, profileList=None, profileRangeList=None):
1285 1285
1286 1286 dataOut.flagNoData = True
1287 1287 self.nProfiles = dataOut.nProfiles
1288 1288
1289 1289 if profileList != None:
1290 1290 if self.isProfileInList(profileList):
1291 1291 dataOut.flagNoData = False
1292 1292
1293 1293 self.incIndex()
1294 1294 return 1
1295 1295
1296 1296
1297 1297 elif profileRangeList != None:
1298 1298 minIndex = profileRangeList[0]
1299 1299 maxIndex = profileRangeList[1]
1300 1300 if self.isProfileInRange(minIndex, maxIndex):
1301 1301 dataOut.flagNoData = False
1302 1302
1303 1303 self.incIndex()
1304 1304 return 1
1305 1305
1306 1306 else:
1307 1307 raise ValueError, "ProfileSelector needs profileList or profileRangeList"
1308 1308
1309 1309 return 0
1310 1310
1311 1311 class SpectraHeisProc(ProcessingUnit):
1312 1312 def __init__(self):
1313 1313 self.objectDict = {}
1314 1314 # self.buffer = None
1315 1315 # self.firstdatatime = None
1316 1316 # self.profIndex = 0
1317 1317 self.dataOut = SpectraHeis()
1318 1318
1319 1319 def __updateObjFromInput(self):
1320 1320 self.dataOut.radarControllerHeaderObj = self.dataIn.radarControllerHeaderObj.copy()#
1321 1321 self.dataOut.systemHeaderObj = self.dataIn.systemHeaderObj.copy()#
1322 1322 self.dataOut.channelList = self.dataIn.channelList
1323 1323 self.dataOut.heightList = self.dataIn.heightList
1324 1324 # self.dataOut.dtype = self.dataIn.dtype
1325 1325 self.dataOut.dtype = numpy.dtype([('real','<f4'),('imag','<f4')])
1326 1326 # self.dataOut.nHeights = self.dataIn.nHeights
1327 1327 # self.dataOut.nChannels = self.dataIn.nChannels
1328 1328 self.dataOut.nBaud = self.dataIn.nBaud
1329 1329 self.dataOut.nCode = self.dataIn.nCode
1330 1330 self.dataOut.code = self.dataIn.code
1331 1331 # self.dataOut.nProfiles = 1
1332 1332 # self.dataOut.nProfiles = self.dataOut.nFFTPoints
1333 1333 self.dataOut.nFFTPoints = self.dataIn.nHeights
1334 1334 # self.dataOut.channelIndexList = self.dataIn.channelIndexList
1335 1335 # self.dataOut.flagNoData = self.dataIn.flagNoData
1336 1336 self.dataOut.flagTimeBlock = self.dataIn.flagTimeBlock
1337 1337 self.dataOut.utctime = self.dataIn.utctime
1338 1338 # self.dataOut.utctime = self.firstdatatime
1339 1339 self.dataOut.flagDecodeData = self.dataIn.flagDecodeData #asumo q la data esta decodificada
1340 1340 self.dataOut.flagDeflipData = self.dataIn.flagDeflipData #asumo q la data esta sin flip
1341 1341 self.dataOut.flagShiftFFT = self.dataIn.flagShiftFFT
1342 1342 self.dataOut.nCohInt = self.dataIn.nCohInt
1343 1343 self.dataOut.nIncohInt = 1
1344 1344 self.dataOut.ippSeconds= self.dataIn.ippSeconds
1345 1345 self.dataOut.windowOfFilter = self.dataIn.windowOfFilter
1346 1346
1347 1347 self.dataOut.timeInterval = self.dataIn.timeInterval*self.dataOut.nIncohInt
1348 1348 # self.dataOut.set=self.dataIn.set
1349 1349 # self.dataOut.deltaHeight=self.dataIn.deltaHeight
1350 1350
1351 1351
1352 1352 def __getFft(self):
1353 1353
1354 1354 fft_volt = numpy.fft.fft(self.dataIn.data, axis=1)
1355 1355 fft_volt = numpy.fft.fftshift(fft_volt,axes=(1,))
1356 1356 spc = numpy.abs(fft_volt * numpy.conjugate(fft_volt))/(self.dataOut.nFFTPoints)
1357 1357 self.dataOut.data_spc = spc
1358 1358
1359 1359 def init(self):
1360 1360
1361 1361 self.dataOut.flagNoData = True
1362 1362
1363 1363 if self.dataIn.type == "SpectraHeis":
1364 1364 self.dataOut.copy(self.dataIn)
1365 1365 return
1366 1366
1367 1367 if self.dataIn.type == "Voltage":
1368 1368 self.__updateObjFromInput()
1369 1369 self.__getFft()
1370 1370 self.dataOut.flagNoData = False
1371 1371
1372 1372 return
1373 1373
1374 1374 raise ValuError, "The type object %s is not valid"%(self.dataIn.type)
1375 1375
1376 1376
1377 1377 def selectChannels(self, channelList):
1378 1378
1379 1379 channelIndexList = []
1380 1380
1381 1381 for channel in channelList:
1382 1382 index = self.dataOut.channelList.index(channel)
1383 1383 channelIndexList.append(index)
1384 1384
1385 1385 self.selectChannelsByIndex(channelIndexList)
1386 1386
1387 1387 def selectChannelsByIndex(self, channelIndexList):
1388 1388 """
1389 1389 Selecciona un bloque de datos en base a canales segun el channelIndexList
1390 1390
1391 1391 Input:
1392 1392 channelIndexList : lista sencilla de canales a seleccionar por ej. [2,3,7]
1393 1393
1394 1394 Affected:
1395 1395 self.dataOut.data
1396 1396 self.dataOut.channelIndexList
1397 1397 self.dataOut.nChannels
1398 1398 self.dataOut.m_ProcessingHeader.totalSpectra
1399 1399 self.dataOut.systemHeaderObj.numChannels
1400 1400 self.dataOut.m_ProcessingHeader.blockSize
1401 1401
1402 1402 Return:
1403 1403 None
1404 1404 """
1405 1405
1406 1406 for channelIndex in channelIndexList:
1407 1407 if channelIndex not in self.dataOut.channelIndexList:
1408 1408 print channelIndexList
1409 1409 raise ValueError, "The value %d in channelIndexList is not valid" %channelIndex
1410 1410
1411 1411 nChannels = len(channelIndexList)
1412 1412
1413 1413 data_spc = self.dataOut.data_spc[channelIndexList,:]
1414 1414
1415 1415 self.dataOut.data_spc = data_spc
1416 1416 self.dataOut.channelList = [self.dataOut.channelList[i] for i in channelIndexList]
1417 1417
1418 1418 return 1
1419 1419
1420 1420 class IncohInt4SpectraHeis(Operation):
1421 1421
1422 1422 __isConfig = False
1423 1423
1424 1424 __profIndex = 0
1425 1425 __withOverapping = False
1426 1426
1427 1427 __byTime = False
1428 1428 __initime = None
1429 1429 __lastdatatime = None
1430 1430 __integrationtime = None
1431 1431
1432 1432 __buffer = None
1433 1433
1434 1434 __dataReady = False
1435 1435
1436 1436 n = None
1437 1437
1438 1438
1439 1439 def __init__(self):
1440 1440
1441 1441 self.__isConfig = False
1442 1442
1443 1443 def setup(self, n=None, timeInterval=None, overlapping=False):
1444 1444 """
1445 1445 Set the parameters of the integration class.
1446 1446
1447 1447 Inputs:
1448 1448
1449 1449 n : Number of coherent integrations
1450 1450 timeInterval : Time of integration. If the parameter "n" is selected this one does not work
1451 1451 overlapping :
1452 1452
1453 1453 """
1454 1454
1455 1455 self.__initime = None
1456 1456 self.__lastdatatime = 0
1457 1457 self.__buffer = None
1458 1458 self.__dataReady = False
1459 1459
1460 1460
1461 1461 if n == None and timeInterval == None:
1462 1462 raise ValueError, "n or timeInterval should be specified ..."
1463 1463
1464 1464 if n != None:
1465 1465 self.n = n
1466 1466 self.__byTime = False
1467 1467 else:
1468 1468 self.__integrationtime = timeInterval #* 60. #if (type(timeInterval)!=integer) -> change this line
1469 1469 self.n = 9999
1470 1470 self.__byTime = True
1471 1471
1472 1472 if overlapping:
1473 1473 self.__withOverapping = True
1474 1474 self.__buffer = None
1475 1475 else:
1476 1476 self.__withOverapping = False
1477 1477 self.__buffer = 0
1478 1478
1479 1479 self.__profIndex = 0
1480 1480
1481 1481 def putData(self, data):
1482 1482
1483 1483 """
1484 1484 Add a profile to the __buffer and increase in one the __profileIndex
1485 1485
1486 1486 """
1487 1487
1488 1488 if not self.__withOverapping:
1489 1489 self.__buffer += data.copy()
1490 1490 self.__profIndex += 1
1491 1491 return
1492 1492
1493 1493 #Overlapping data
1494 1494 nChannels, nHeis = data.shape
1495 1495 data = numpy.reshape(data, (1, nChannels, nHeis))
1496 1496
1497 1497 #If the buffer is empty then it takes the data value
1498 1498 if self.__buffer == None:
1499 1499 self.__buffer = data
1500 1500 self.__profIndex += 1
1501 1501 return
1502 1502
1503 1503 #If the buffer length is lower than n then stakcing the data value
1504 1504 if self.__profIndex < self.n:
1505 1505 self.__buffer = numpy.vstack((self.__buffer, data))
1506 1506 self.__profIndex += 1
1507 1507 return
1508 1508
1509 1509 #If the buffer length is equal to n then replacing the last buffer value with the data value
1510 1510 self.__buffer = numpy.roll(self.__buffer, -1, axis=0)
1511 1511 self.__buffer[self.n-1] = data
1512 1512 self.__profIndex = self.n
1513 1513 return
1514 1514
1515 1515
1516 1516 def pushData(self):
1517 1517 """
1518 1518 Return the sum of the last profiles and the profiles used in the sum.
1519 1519
1520 1520 Affected:
1521 1521
1522 1522 self.__profileIndex
1523 1523
1524 1524 """
1525 1525
1526 1526 if not self.__withOverapping:
1527 1527 data = self.__buffer
1528 1528 n = self.__profIndex
1529 1529
1530 1530 self.__buffer = 0
1531 1531 self.__profIndex = 0
1532 1532
1533 1533 return data, n
1534 1534
1535 1535 #Integration with Overlapping
1536 1536 data = numpy.sum(self.__buffer, axis=0)
1537 1537 n = self.__profIndex
1538 1538
1539 1539 return data, n
1540 1540
1541 1541 def byProfiles(self, data):
1542 1542
1543 1543 self.__dataReady = False
1544 1544 avgdata = None
1545 1545 n = None
1546 1546
1547 1547 self.putData(data)
1548 1548
1549 1549 if self.__profIndex == self.n:
1550 1550
1551 1551 avgdata, n = self.pushData()
1552 1552 self.__dataReady = True
1553 1553
1554 1554 return avgdata
1555 1555
1556 1556 def byTime(self, data, datatime):
1557 1557
1558 1558 self.__dataReady = False
1559 1559 avgdata = None
1560 1560 n = None
1561 1561
1562 1562 self.putData(data)
1563 1563
1564 1564 if (datatime - self.__initime) >= self.__integrationtime:
1565 1565 avgdata, n = self.pushData()
1566 1566 self.n = n
1567 1567 self.__dataReady = True
1568 1568
1569 1569 return avgdata
1570 1570
1571 1571 def integrate(self, data, datatime=None):
1572 1572
1573 1573 if self.__initime == None:
1574 1574 self.__initime = datatime
1575 1575
1576 1576 if self.__byTime:
1577 1577 avgdata = self.byTime(data, datatime)
1578 1578 else:
1579 1579 avgdata = self.byProfiles(data)
1580 1580
1581 1581
1582 1582 self.__lastdatatime = datatime
1583 1583
1584 1584 if avgdata == None:
1585 1585 return None, None
1586 1586
1587 1587 avgdatatime = self.__initime
1588 1588
1589 1589 deltatime = datatime -self.__lastdatatime
1590 1590
1591 1591 if not self.__withOverapping:
1592 1592 self.__initime = datatime
1593 1593 else:
1594 1594 self.__initime += deltatime
1595 1595
1596 1596 return avgdata, avgdatatime
1597 1597
1598 1598 def run(self, dataOut, **kwargs):
1599 1599
1600 1600 if not self.__isConfig:
1601 1601 self.setup(**kwargs)
1602 1602 self.__isConfig = True
1603 1603
1604 1604 avgdata, avgdatatime = self.integrate(dataOut.data_spc, dataOut.utctime)
1605 1605
1606 1606 # dataOut.timeInterval *= n
1607 1607 dataOut.flagNoData = True
1608 1608
1609 1609 if self.__dataReady:
1610 1610 dataOut.data_spc = avgdata
1611 1611 dataOut.nIncohInt *= self.n
1612 1612 # dataOut.nCohInt *= self.n
1613 1613 dataOut.utctime = avgdatatime
1614 1614 dataOut.timeInterval = dataOut.ippSeconds * dataOut.nIncohInt
1615 1615 # dataOut.timeInterval = self.__timeInterval*self.n
1616 dataOut.flagNoData = False No newline at end of file
1616 dataOut.flagNoData = False
1617
1618
1619
1620
1621 No newline at end of file
@@ -1,72 +1,76
1 1 import os, sys
2 2
3 3 path = os.path.split(os.getcwd())[0]
4 4 sys.path.append(path)
5 5
6 6 from controller import *
7 7
8 8 desc = "Sun Experiment Test"
9 9 filename = "sunexp.xml"
10 10
11 11 controllerObj = Project()
12 12
13 13 controllerObj.setup(id = '191', name='test01', description=desc)
14 14 #/Users/dsuarez/Documents/RadarData/SunExperiment
15 15 #/Volumes/data_e/PaseDelSol/Raw/100KHZ
16 16 readUnitConfObj = controllerObj.addReadUnit(datatype='Voltage',
17 17 path='/Users/dsuarez/Documents/RadarData/SunExperiment',
18 18 startDate='2013/02/06',
19 19 endDate='2013/12/31',
20 20 startTime='00:30:00',
21 21 endTime='17:40:59',
22 22 online=0,
23 23 delay=3,
24 24 walk=1)
25 25
26 26 procUnitConfObj0 = controllerObj.addProcUnit(datatype='Voltage', inputId=readUnitConfObj.getId())
27 27
28 28 procUnitConfObj1 = controllerObj.addProcUnit(datatype='SpectraHeis', inputId=procUnitConfObj0.getId())
29 29
30 30 opObj11 = procUnitConfObj1.addOperation(name='IncohInt4SpectraHeis', optype='other')
31 31 opObj11.addParameter(name='timeInterval', value='5', format='float')
32 32
33 33 opObj11 = procUnitConfObj1.addOperation(name='SpectraHeisScope', optype='other')
34 34 opObj11.addParameter(name='idfigure', value='10', format='int')
35 35 opObj11.addParameter(name='wintitle', value='SpectraHeisPlot', format='str')
36 36 #opObj11.addParameter(name='ymin', value='125', format='int')
37 37 #opObj11.addParameter(name='ymax', value='140', format='int')
38 38 #opObj11.addParameter(name='channelList', value='0,1,2', format='intlist')
39 39 #opObj11.addParameter(name='showprofile', value='1', format='int')
40 40 opObj11.addParameter(name='save', value='1', format='bool')
41 41 opObj11.addParameter(name='figfile', value='spc-noise.png', format='str')
42 42 opObj11.addParameter(name='figpath', value='/Users/dsuarez/Pictures/sun_pics', format='str')
43 43 opObj11.addParameter(name='ftp', value='1', format='int')
44 44 opObj11.addParameter(name='ftpratio', value='10', format='int')
45 45
46 46 opObj11 = procUnitConfObj1.addOperation(name='RTIfromSpectraHeis', optype='other')
47 47 opObj11.addParameter(name='idfigure', value='6', format='int')
48 48 opObj11.addParameter(name='wintitle', value='RTIPLot', format='str')
49 49 #opObj11.addParameter(name='zmin', value='10', format='int')
50 50 #opObj11.addParameter(name='zmax', value='40', format='int')
51 51 opObj11.addParameter(name='ymin', value='60', format='int')
52 52 opObj11.addParameter(name='ymax', value='85', format='int')
53 53 #opObj11.addParameter(name='channelList', value='0,1,2,3', format='intlist')
54 54 #opObj11.addParameter(name='timerange', value='600', format='int')
55 55 #opObj11.addParameter(name='showprofile', value='0', format='int')
56 56 opObj11.addParameter(name='save', value='1', format='bool')
57 57 opObj11.addParameter(name='figfile', value='rti-noise.png', format='str')
58 58 opObj11.addParameter(name='figpath', value='/Users/dsuarez/Pictures/sun_pics', format='str')
59 59 opObj11.addParameter(name='ftp', value='1', format='int')
60 60 opObj11.addParameter(name='ftpratio', value='10', format='int')
61 61
62 opObj11 = procUnitConfObj1.addOperation(name='SpectraHeisWriter', optype='other')
63 opObj11.addParameter(name='wrpath', value='/Users/dsuarez/Remote', format='str')
64 #opObj11.addParameter(name='blocksPerFile', value='200', format='int')
65
62 66
63 67 print "Escribiendo el archivo XML"
64 68 controllerObj.writeXml(filename)
65 69 print "Leyendo el archivo XML"
66 70 controllerObj.readXml(filename)
67 71
68 72 controllerObj.createObjects()
69 73 controllerObj.connectObjects()
70 74 controllerObj.run()
71 75
72 76 No newline at end of file
General Comments 0
You need to be logged in to leave comments. Login now