##// END OF EJS Templates
Se agrega el metodo __jumpToLastBlock para saltar al ultimo bloque datos (solo lectura online). Faltan hacer pruebas con el sistema de adquisicion operando en linea.
Daniel Valdez -
r321:dc367d7b08c2
parent child
Show More
@@ -1,2605 +1,2637
1 1 '''
2 2
3 3 $Author: murco $
4 4 $Id: JRODataIO.py 169 2012-11-19 21:57:03Z murco $
5 5 '''
6 6
7 7 import os, sys
8 8 import glob
9 9 import time
10 10 import numpy
11 11 import fnmatch
12 12 import time, datetime
13 13
14 14 from jrodata import *
15 15 from jroheaderIO import *
16 16 from jroprocessing import *
17 17
18 18 LOCALTIME = -18000
19 19
20 20 def isNumber(str):
21 21 """
22 22 Chequea si el conjunto de caracteres que componen un string puede ser convertidos a un numero.
23 23
24 24 Excepciones:
25 25 Si un determinado string no puede ser convertido a numero
26 26 Input:
27 27 str, string al cual se le analiza para determinar si convertible a un numero o no
28 28
29 29 Return:
30 30 True : si el string es uno numerico
31 31 False : no es un string numerico
32 32 """
33 33 try:
34 34 float( str )
35 35 return True
36 36 except:
37 37 return False
38 38
39 39 def isThisFileinRange(filename, startUTSeconds, endUTSeconds):
40 40 """
41 41 Esta funcion determina si un archivo de datos se encuentra o no dentro del rango de fecha especificado.
42 42
43 43 Inputs:
44 44 filename : nombre completo del archivo de datos en formato Jicamarca (.r)
45 45
46 46 startUTSeconds : fecha inicial del rango seleccionado. La fecha esta dada en
47 47 segundos contados desde 01/01/1970.
48 48 endUTSeconds : fecha final del rango seleccionado. La fecha esta dada en
49 49 segundos contados desde 01/01/1970.
50 50
51 51 Return:
52 52 Boolean : Retorna True si el archivo de datos contiene datos en el rango de
53 53 fecha especificado, de lo contrario retorna False.
54 54
55 55 Excepciones:
56 56 Si el archivo no existe o no puede ser abierto
57 57 Si la cabecera no puede ser leida.
58 58
59 59 """
60 60 basicHeaderObj = BasicHeader(LOCALTIME)
61 61
62 62 try:
63 63 fp = open(filename,'rb')
64 64 except:
65 65 raise IOError, "The file %s can't be opened" %(filename)
66 66
67 67 sts = basicHeaderObj.read(fp)
68 68 fp.close()
69 69
70 70 if not(sts):
71 71 print "Skipping the file %s because it has not a valid header" %(filename)
72 72 return 0
73 73
74 74 if not ((startUTSeconds <= basicHeaderObj.utc) and (endUTSeconds > basicHeaderObj.utc)):
75 75 return 0
76 76
77 77 return 1
78 78
79 79 def isFileinThisTime(filename, startTime, endTime):
80 80 """
81 81 Retorna 1 si el archivo de datos se encuentra dentro del rango de horas especificado.
82 82
83 83 Inputs:
84 84 filename : nombre completo del archivo de datos en formato Jicamarca (.r)
85 85
86 86 startTime : tiempo inicial del rango seleccionado en formato datetime.time
87 87
88 88 endTime : tiempo final del rango seleccionado en formato datetime.time
89 89
90 90 Return:
91 91 Boolean : Retorna True si el archivo de datos contiene datos en el rango de
92 92 fecha especificado, de lo contrario retorna False.
93 93
94 94 Excepciones:
95 95 Si el archivo no existe o no puede ser abierto
96 96 Si la cabecera no puede ser leida.
97 97
98 98 """
99 99
100 100
101 101 try:
102 102 fp = open(filename,'rb')
103 103 except:
104 104 raise IOError, "The file %s can't be opened" %(filename)
105 105
106 106 basicHeaderObj = BasicHeader(LOCALTIME)
107 107 sts = basicHeaderObj.read(fp)
108 108 fp.close()
109 109
110 110 thisTime = basicHeaderObj.datatime.time()
111 111
112 112 if not(sts):
113 113 print "Skipping the file %s because it has not a valid header" %(filename)
114 114 return 0
115 115
116 116 if not ((startTime <= thisTime) and (endTime > thisTime)):
117 117 return 0
118 118
119 119 return 1
120 120
121 121 def getlastFileFromPath(path, ext):
122 122 """
123 123 Depura el fileList dejando solo los que cumplan el formato de "PYYYYDDDSSS.ext"
124 124 al final de la depuracion devuelve el ultimo file de la lista que quedo.
125 125
126 126 Input:
127 127 fileList : lista conteniendo todos los files (sin path) que componen una determinada carpeta
128 128 ext : extension de los files contenidos en una carpeta
129 129
130 130 Return:
131 131 El ultimo file de una determinada carpeta, no se considera el path.
132 132 """
133 133 validFilelist = []
134 134 fileList = os.listdir(path)
135 135
136 136 # 0 1234 567 89A BCDE
137 137 # H YYYY DDD SSS .ext
138 138
139 139 for file in fileList:
140 140 try:
141 141 year = int(file[1:5])
142 142 doy = int(file[5:8])
143 143
144 144
145 145 except:
146 146 continue
147 147
148 148 if (os.path.splitext(file)[-1].lower() != ext.lower()):
149 149 continue
150 150
151 151 validFilelist.append(file)
152 152
153 153 if validFilelist:
154 154 validFilelist = sorted( validFilelist, key=str.lower )
155 155 return validFilelist[-1]
156 156
157 157 return None
158 158
159 159 def checkForRealPath(path, year, doy, set, ext):
160 160 """
161 161 Por ser Linux Case Sensitive entonces checkForRealPath encuentra el nombre correcto de un path,
162 162 Prueba por varias combinaciones de nombres entre mayusculas y minusculas para determinar
163 163 el path exacto de un determinado file.
164 164
165 165 Example :
166 166 nombre correcto del file es .../.../D2009307/P2009307367.ext
167 167
168 168 Entonces la funcion prueba con las siguientes combinaciones
169 169 .../.../y2009307367.ext
170 170 .../.../Y2009307367.ext
171 171 .../.../x2009307/y2009307367.ext
172 172 .../.../x2009307/Y2009307367.ext
173 173 .../.../X2009307/y2009307367.ext
174 174 .../.../X2009307/Y2009307367.ext
175 175 siendo para este caso, la ultima combinacion de letras, identica al file buscado
176 176
177 177 Return:
178 178 Si encuentra la cobinacion adecuada devuelve el path completo y el nombre del file
179 179 caso contrario devuelve None como path y el la ultima combinacion de nombre en mayusculas
180 180 para el filename
181 181 """
182 182 fullfilename = None
183 183 find_flag = False
184 184 filename = None
185 185
186 186 prefixDirList = [None,'d','D']
187 187 if ext.lower() == ".r": #voltage
188 188 prefixFileList = ['d','D']
189 189 elif ext.lower() == ".pdata": #spectra
190 190 prefixFileList = ['p','P']
191 191 else:
192 192 return None, filename
193 193
194 194 #barrido por las combinaciones posibles
195 195 for prefixDir in prefixDirList:
196 196 thispath = path
197 197 if prefixDir != None:
198 198 #formo el nombre del directorio xYYYYDDD (x=d o x=D)
199 199 thispath = os.path.join(path, "%s%04d%03d" % ( prefixDir, year, doy ))
200 200
201 201 for prefixFile in prefixFileList: #barrido por las dos combinaciones posibles de "D"
202 202 filename = "%s%04d%03d%03d%s" % ( prefixFile, year, doy, set, ext ) #formo el nombre del file xYYYYDDDSSS.ext
203 203 fullfilename = os.path.join( thispath, filename ) #formo el path completo
204 204
205 205 if os.path.exists( fullfilename ): #verifico que exista
206 206 find_flag = True
207 207 break
208 208 if find_flag:
209 209 break
210 210
211 211 if not(find_flag):
212 212 return None, filename
213 213
214 214 return fullfilename, filename
215 215
216 216 def isDoyFolder(folder):
217 217 try:
218 218 year = int(folder[1:5])
219 219 except:
220 220 return 0
221 221
222 222 try:
223 223 doy = int(folder[5:8])
224 224 except:
225 225 return 0
226 226
227 227 return 1
228 228
229 229 class JRODataIO:
230 230
231 231 c = 3E8
232 232
233 233 isConfig = False
234 234
235 235 basicHeaderObj = BasicHeader(LOCALTIME)
236 236
237 237 systemHeaderObj = SystemHeader()
238 238
239 239 radarControllerHeaderObj = RadarControllerHeader()
240 240
241 241 processingHeaderObj = ProcessingHeader()
242 242
243 243 online = 0
244 244
245 245 dtype = None
246 246
247 247 pathList = []
248 248
249 249 filenameList = []
250 250
251 251 filename = None
252 252
253 253 ext = None
254 254
255 255 flagIsNewFile = 1
256 256
257 257 flagTimeBlock = 0
258 258
259 259 flagIsNewBlock = 0
260 260
261 261 fp = None
262 262
263 263 firstHeaderSize = 0
264 264
265 265 basicHeaderSize = 24
266 266
267 267 versionFile = 1103
268 268
269 269 fileSize = None
270 270
271 271 ippSeconds = None
272 272
273 273 fileSizeByHeader = None
274 274
275 275 fileIndex = None
276 276
277 277 profileIndex = None
278 278
279 279 blockIndex = None
280 280
281 281 nTotalBlocks = None
282 282
283 283 maxTimeStep = 30
284 284
285 285 lastUTTime = None
286 286
287 287 datablock = None
288 288
289 289 dataOut = None
290 290
291 291 blocksize = None
292 292
293 293 def __init__(self):
294 294
295 295 raise ValueError, "Not implemented"
296 296
297 297 def run(self):
298 298
299 299 raise ValueError, "Not implemented"
300 300
301 301 def getOutput(self):
302 302
303 303 return self.dataOut
304 304
305 305 class JRODataReader(JRODataIO, ProcessingUnit):
306 306
307 307 nReadBlocks = 0
308 308
309 309 delay = 10 #number of seconds waiting a new file
310 310
311 311 nTries = 3 #quantity tries
312 312
313 313 nFiles = 3 #number of files for searching
314 314
315 315 flagNoMoreFiles = 0
316 316
317 __isFirstTimeOnline = 1
318
317 319 def __init__(self):
318 320
319 321 """
320 322
321 323 """
322 324
323 325 raise ValueError, "This method has not been implemented"
324 326
325 327
326 328 def createObjByDefault(self):
327 329 """
328 330
329 331 """
330 332 raise ValueError, "This method has not been implemented"
331 333
332 334 def getBlockDimension(self):
333 335
334 336 raise ValueError, "No implemented"
335 337
336 338 def __searchFilesOffLine(self,
337 339 path,
338 340 startDate,
339 341 endDate,
340 342 startTime=datetime.time(0,0,0),
341 343 endTime=datetime.time(23,59,59),
342 344 set=None,
343 345 expLabel='',
344 346 ext='.r',
345 347 walk=True):
346 348
347 349 pathList = []
348 350
349 351 if not walk:
350 352 pathList.append(path)
351 353
352 354 else:
353 355 dirList = []
354 356 for thisPath in os.listdir(path):
355 357 if not os.path.isdir(os.path.join(path,thisPath)):
356 358 continue
357 359 if not isDoyFolder(thisPath):
358 360 continue
359 361
360 362 dirList.append(thisPath)
361 363
362 364 if not(dirList):
363 365 return None, None
364 366
365 367 thisDate = startDate
366 368
367 369 while(thisDate <= endDate):
368 370 year = thisDate.timetuple().tm_year
369 371 doy = thisDate.timetuple().tm_yday
370 372
371 373 match = fnmatch.filter(dirList, '?' + '%4.4d%3.3d' % (year,doy))
372 374 if len(match) == 0:
373 375 thisDate += datetime.timedelta(1)
374 376 continue
375 377
376 378 pathList.append(os.path.join(path,match[0],expLabel))
377 379 thisDate += datetime.timedelta(1)
378 380
379 381 if pathList == []:
380 382 print "Any folder was found for the date range: %s-%s" %(startDate, endDate)
381 383 return None, None
382 384
383 385 print "%d folder(s) was(were) found for the date range: %s-%s" %(len(pathList), startDate, endDate)
384 386
385 387 filenameList = []
386 388 for thisPath in pathList:
387 389
388 390 fileList = glob.glob1(thisPath, "*%s" %ext)
389 391 fileList.sort()
390 392
391 393 for file in fileList:
392 394
393 395 filename = os.path.join(thisPath,file)
394 396
395 397 if isFileinThisTime(filename, startTime, endTime):
396 398 filenameList.append(filename)
397 399
398 400 if not(filenameList):
399 401 print "Any file was found for the time range %s - %s" %(startTime, endTime)
400 402 return None, None
401 403
402 404 print "%d file(s) was(were) found for the time range: %s - %s" %(len(filenameList), startTime, endTime)
403 405
404 406 self.filenameList = filenameList
405 407
406 408 return pathList, filenameList
407 409
408 410 def __searchFilesOnLine(self, path, expLabel = "", ext = None, walk=True):
409 411
410 412 """
411 413 Busca el ultimo archivo de la ultima carpeta (determinada o no por startDateTime) y
412 414 devuelve el archivo encontrado ademas de otros datos.
413 415
414 416 Input:
415 417 path : carpeta donde estan contenidos los files que contiene data
416 418
417 419 expLabel : Nombre del subexperimento (subfolder)
418 420
419 421 ext : extension de los files
420 422
421 423 walk : Si es habilitado no realiza busquedas dentro de los ubdirectorios (doypath)
422 424
423 425 Return:
424 426 directory : eL directorio donde esta el file encontrado
425 427 filename : el ultimo file de una determinada carpeta
426 428 year : el anho
427 429 doy : el numero de dia del anho
428 430 set : el set del archivo
429 431
430 432
431 433 """
432 434 dirList = []
433 435
434 436 if walk:
435 437
436 438 #Filtra solo los directorios
437 439 for thisPath in os.listdir(path):
438 440 if not os.path.isdir(os.path.join(path,thisPath)):
439 441 continue
440 442 if not isDoyFolder(thisPath):
441 443 continue
442 444
443 445 dirList.append(thisPath)
444 446
445 447 if not(dirList):
446 448 return None, None, None, None, None
447 449
448 450 dirList = sorted( dirList, key=str.lower )
449 451
450 452 doypath = dirList[-1]
451 453 fullpath = os.path.join(path, doypath, expLabel)
452 454
453 455 else:
454 456 fullpath = path
455 457
456 458 print "%s folder was found: " %(fullpath )
457 459
458 460 filename = getlastFileFromPath(fullpath, ext)
459 461
460 462 if not(filename):
461 463 return None, None, None, None, None
462 464
463 465 print "%s file was found" %(filename)
464 466
465 467 if not(self.__verifyFile(os.path.join(fullpath, filename))):
466 468 return None, None, None, None, None
467 469
468 470 year = int( filename[1:5] )
469 471 doy = int( filename[5:8] )
470 472 set = int( filename[8:11] )
471 473
472 474 return fullpath, filename, year, doy, set
473 475
474 476
475 477
476 478 def __setNextFileOffline(self):
477 479
478 480 idFile = self.fileIndex
479 481
480 482 while (True):
481 483 idFile += 1
482 484 if not(idFile < len(self.filenameList)):
483 485 self.flagNoMoreFiles = 1
484 486 print "No more Files"
485 487 return 0
486 488
487 489 filename = self.filenameList[idFile]
488 490
489 491 if not(self.__verifyFile(filename)):
490 492 continue
491 493
492 494 fileSize = os.path.getsize(filename)
493 495 fp = open(filename,'rb')
494 496 break
495 497
496 498 self.flagIsNewFile = 1
497 499 self.fileIndex = idFile
498 500 self.filename = filename
499 501 self.fileSize = fileSize
500 502 self.fp = fp
501 503
502 504 print "Setting the file: %s"%self.filename
503 505
504 506 return 1
505 507
506 508 def __setNextFileOnline(self):
507 509 """
508 510 Busca el siguiente file que tenga suficiente data para ser leida, dentro de un folder especifico, si
509 511 no encuentra un file valido espera un tiempo determinado y luego busca en los posibles n files
510 512 siguientes.
511 513
512 514 Affected:
513 515 self.flagIsNewFile
514 516 self.filename
515 517 self.fileSize
516 518 self.fp
517 519 self.set
518 520 self.flagNoMoreFiles
519 521
520 522 Return:
521 523 0 : si luego de una busqueda del siguiente file valido este no pudo ser encontrado
522 524 1 : si el file fue abierto con exito y esta listo a ser leido
523 525
524 526 Excepciones:
525 527 Si un determinado file no puede ser abierto
526 528 """
527 529 nFiles = 0
528 530 fileOk_flag = False
529 531 firstTime_flag = True
530 532
531 533 self.set += 1
532 534
533 535 #busca el 1er file disponible
534 536 fullfilename, filename = checkForRealPath( self.path, self.year, self.doy, self.set, self.ext )
535 537 if fullfilename:
536 538 if self.__verifyFile(fullfilename, False):
537 539 fileOk_flag = True
538 540
539 541 #si no encuentra un file entonces espera y vuelve a buscar
540 542 if not(fileOk_flag):
541 543 for nFiles in range(self.nFiles+1): #busco en los siguientes self.nFiles+1 files posibles
542 544
543 545 if firstTime_flag: #si es la 1era vez entonces hace el for self.nTries veces
544 546 tries = self.nTries
545 547 else:
546 548 tries = 1 #si no es la 1era vez entonces solo lo hace una vez
547 549
548 550 for nTries in range( tries ):
549 551 if firstTime_flag:
550 552 print "\tWaiting %0.2f sec for the file \"%s\" , try %03d ..." % ( self.delay, filename, nTries+1 )
551 553 time.sleep( self.delay )
552 554 else:
553 555 print "\tSearching next \"%s%04d%03d%03d%s\" file ..." % (self.optchar, self.year, self.doy, self.set, self.ext)
554 556
555 557 fullfilename, filename = checkForRealPath( self.path, self.year, self.doy, self.set, self.ext )
556 558 if fullfilename:
557 559 if self.__verifyFile(fullfilename):
558 560 fileOk_flag = True
559 561 break
560 562
561 563 if fileOk_flag:
562 564 break
563 565
564 566 firstTime_flag = False
565 567
566 568 print "\tSkipping the file \"%s\" due to this file doesn't exist" % filename
567 569 self.set += 1
568 570
569 571 if nFiles == (self.nFiles-1): #si no encuentro el file buscado cambio de carpeta y busco en la siguiente carpeta
570 572 self.set = 0
571 573 self.doy += 1
572 574
573 575 if fileOk_flag:
574 576 self.fileSize = os.path.getsize( fullfilename )
575 577 self.filename = fullfilename
576 578 self.flagIsNewFile = 1
577 579 if self.fp != None: self.fp.close()
578 580 self.fp = open(fullfilename, 'rb')
579 581 self.flagNoMoreFiles = 0
580 582 print 'Setting the file: %s' % fullfilename
581 583 else:
582 584 self.fileSize = 0
583 585 self.filename = None
584 586 self.flagIsNewFile = 0
585 587 self.fp = None
586 588 self.flagNoMoreFiles = 1
587 589 print 'No more Files'
588 590
589 591 return fileOk_flag
590 592
591 593
592 594 def setNextFile(self):
593 595 if self.fp != None:
594 596 self.fp.close()
595 597
596 598 if self.online:
597 599 newFile = self.__setNextFileOnline()
598 600 else:
599 601 newFile = self.__setNextFileOffline()
600 602
601 603 if not(newFile):
602 604 return 0
603 605
604 606 self.__readFirstHeader()
605 607 self.nReadBlocks = 0
606 608 return 1
607 609
608 610 def __waitNewBlock(self):
609 611 """
610 612 Return 1 si se encontro un nuevo bloque de datos, 0 de otra forma.
611 613
612 614 Si el modo de lectura es OffLine siempre retorn 0
613 615 """
614 616 if not self.online:
615 617 return 0
616 618
617 619 if (self.nReadBlocks >= self.processingHeaderObj.dataBlocksPerFile):
618 620 return 0
619 621
620 622 currentPointer = self.fp.tell()
621 623
622 624 neededSize = self.processingHeaderObj.blockSize + self.basicHeaderSize
623 625
624 626 for nTries in range( self.nTries ):
625 627
626 628 self.fp.close()
627 629 self.fp = open( self.filename, 'rb' )
628 630 self.fp.seek( currentPointer )
629 631
630 632 self.fileSize = os.path.getsize( self.filename )
631 633 currentSize = self.fileSize - currentPointer
632 634
633 635 if ( currentSize >= neededSize ):
634 636 self.__rdBasicHeader()
635 637 return 1
636 638
637 639 print "\tWaiting %0.2f seconds for the next block, try %03d ..." % (self.delay, nTries+1)
638 640 time.sleep( self.delay )
639 641
640 642
641 return 0
643 return 0
644
645 def __jumpToLastBlock(self):
646
647 if not(self.__isFirstTimeOnline):
648 return
649
650 csize = self.fileSize - self.fp.tell()
651
652 #sata el primer bloque de datos
653 if csize > self.processingHeaderObj.blockSize:
654 self.fp.seek(self.fp.tell() + self.processingHeaderObj.blockSize)
655 else:
656 return
657
658 csize = self.fileSize - self.fp.tell()
659 neededsize = self.processingHeaderObj.blockSize + self.basicHeaderSize
660 factor = int(csize/neededsize)
661 if factor > 0:
662 self.fp.seek(self.fp.tell() + factor*neededsize)
663
664 self.flagIsNewFile = 0
665 self.__isFirstTimeOnline = 0
666
642 667
643 668 def __setNewBlock(self):
644 669
645 670 if self.fp == None:
646 671 return 0
647
672
673 if self.online:
674 self.__jumpToLastBlock()
675
648 676 if self.flagIsNewFile:
649 677 return 1
650 678
651 679 self.lastUTTime = self.basicHeaderObj.utc
652 680 currentSize = self.fileSize - self.fp.tell()
653 681 neededSize = self.processingHeaderObj.blockSize + self.basicHeaderSize
654 682
655 683 if (currentSize >= neededSize):
656 684 self.__rdBasicHeader()
657 685 return 1
658 686
659 687 if self.__waitNewBlock():
660 688 return 1
661 689
662 690 if not(self.setNextFile()):
663 691 return 0
664 692
665 693 deltaTime = self.basicHeaderObj.utc - self.lastUTTime #
666
694
667 695 self.flagTimeBlock = 0
668 696
669 697 if deltaTime > self.maxTimeStep:
670 698 self.flagTimeBlock = 1
671 699
672 700 return 1
673 701
674 702
675 703 def readNextBlock(self):
676 704 if not(self.__setNewBlock()):
677 705 return 0
678 706
679 707 if not(self.readBlock()):
680 708 return 0
681 709
682 710 return 1
683 711
684 712 def __rdProcessingHeader(self, fp=None):
685 713 if fp == None:
686 714 fp = self.fp
687 715
688 716 self.processingHeaderObj.read(fp)
689 717
690 718 def __rdRadarControllerHeader(self, fp=None):
691 719 if fp == None:
692 720 fp = self.fp
693 721
694 722 self.radarControllerHeaderObj.read(fp)
695 723
696 724 def __rdSystemHeader(self, fp=None):
697 725 if fp == None:
698 726 fp = self.fp
699 727
700 728 self.systemHeaderObj.read(fp)
701 729
702 730 def __rdBasicHeader(self, fp=None):
703 731 if fp == None:
704 732 fp = self.fp
705 733
706 734 self.basicHeaderObj.read(fp)
707 735
708 736
709 737 def __readFirstHeader(self):
710 738 self.__rdBasicHeader()
711 739 self.__rdSystemHeader()
712 740 self.__rdRadarControllerHeader()
713 741 self.__rdProcessingHeader()
714 742
715 743 self.firstHeaderSize = self.basicHeaderObj.size
716 744
717 745 datatype = int(numpy.log2((self.processingHeaderObj.processFlags & PROCFLAG.DATATYPE_MASK))-numpy.log2(PROCFLAG.DATATYPE_CHAR))
718 746 if datatype == 0:
719 747 datatype_str = numpy.dtype([('real','<i1'),('imag','<i1')])
720 748 elif datatype == 1:
721 749 datatype_str = numpy.dtype([('real','<i2'),('imag','<i2')])
722 750 elif datatype == 2:
723 751 datatype_str = numpy.dtype([('real','<i4'),('imag','<i4')])
724 752 elif datatype == 3:
725 753 datatype_str = numpy.dtype([('real','<i8'),('imag','<i8')])
726 754 elif datatype == 4:
727 755 datatype_str = numpy.dtype([('real','<f4'),('imag','<f4')])
728 756 elif datatype == 5:
729 757 datatype_str = numpy.dtype([('real','<f8'),('imag','<f8')])
730 758 else:
731 759 raise ValueError, 'Data type was not defined'
732 760
733 761 self.dtype = datatype_str
734 762 self.ippSeconds = 2 * 1000 * self.radarControllerHeaderObj.ipp / self.c
735 763 self.fileSizeByHeader = self.processingHeaderObj.dataBlocksPerFile * self.processingHeaderObj.blockSize + self.firstHeaderSize + self.basicHeaderSize*(self.processingHeaderObj.dataBlocksPerFile - 1)
736 764 # self.dataOut.channelList = numpy.arange(self.systemHeaderObj.numChannels)
737 765 # self.dataOut.channelIndexList = numpy.arange(self.systemHeaderObj.numChannels)
738 766 self.getBlockDimension()
739 767
740 768
741 769 def __verifyFile(self, filename, msgFlag=True):
742 770 msg = None
743 771 try:
744 772 fp = open(filename, 'rb')
745 773 currentPosition = fp.tell()
746 774 except:
747 775 if msgFlag:
748 776 print "The file %s can't be opened" % (filename)
749 777 return False
750 778
751 779 neededSize = self.processingHeaderObj.blockSize + self.firstHeaderSize
752 780
753 781 if neededSize == 0:
754 782 basicHeaderObj = BasicHeader(LOCALTIME)
755 783 systemHeaderObj = SystemHeader()
756 784 radarControllerHeaderObj = RadarControllerHeader()
757 785 processingHeaderObj = ProcessingHeader()
758 786
759 787 try:
760 788 if not( basicHeaderObj.read(fp) ): raise IOError
761 789 if not( systemHeaderObj.read(fp) ): raise IOError
762 790 if not( radarControllerHeaderObj.read(fp) ): raise IOError
763 791 if not( processingHeaderObj.read(fp) ): raise IOError
764 792 data_type = int(numpy.log2((processingHeaderObj.processFlags & PROCFLAG.DATATYPE_MASK))-numpy.log2(PROCFLAG.DATATYPE_CHAR))
765 793
766 794 neededSize = processingHeaderObj.blockSize + basicHeaderObj.size
767 795
768 796 except:
769 797 if msgFlag:
770 798 print "\tThe file %s is empty or it hasn't enough data" % filename
771 799
772 800 fp.close()
773 801 return False
774 802 else:
775 803 msg = "\tSkipping the file %s due to it hasn't enough data" %filename
776 804
777 805 fp.close()
778 806 fileSize = os.path.getsize(filename)
779 807 currentSize = fileSize - currentPosition
780 808 if currentSize < neededSize:
781 809 if msgFlag and (msg != None):
782 810 print msg #print"\tSkipping the file %s due to it hasn't enough data" %filename
783 811 return False
784 812
785 813 return True
786 814
787 815 def setup(self,
788 816 path=None,
789 817 startDate=None,
790 818 endDate=None,
791 819 startTime=datetime.time(0,0,0),
792 820 endTime=datetime.time(23,59,59),
793 821 set=0,
794 822 expLabel = "",
795 823 ext = None,
796 824 online = False,
797 825 delay = 60,
798 826 walk = True):
799 827
800 828 if path == None:
801 829 raise ValueError, "The path is not valid"
802 830
803 831 if ext == None:
804 832 ext = self.ext
805 833
806 834 if online:
807 835 print "Searching files in online mode..."
808 836
809 837 for nTries in range( self.nTries ):
810 838 fullpath, file, year, doy, set = self.__searchFilesOnLine(path=path, expLabel=expLabel, ext=ext, walk=walk)
811 839
812 840 if fullpath:
813 841 break
814 842
815 843 print '\tWaiting %0.2f sec for an valid file in %s: try %02d ...' % (self.delay, path, nTries+1)
816 844 time.sleep( self.delay )
817 845
818 846 if not(fullpath):
819 847 print "There 'isn't valied files in %s" % path
820 848 return None
821 849
822 850 self.year = year
823 851 self.doy = doy
824 852 self.set = set - 1
825 853 self.path = path
826 854
827 855 else:
828 856 print "Searching files in offline mode ..."
829 857 pathList, filenameList = self.__searchFilesOffLine(path, startDate=startDate, endDate=endDate,
830 858 startTime=startTime, endTime=endTime,
831 859 set=set, expLabel=expLabel, ext=ext,
832 860 walk=walk)
833 861
834 862 if not(pathList):
835 863 print "No *%s files into the folder %s \nfor the range: %s - %s"%(ext, path,
836 864 datetime.datetime.combine(startDate,startTime).ctime(),
837 865 datetime.datetime.combine(endDate,endTime).ctime())
838 866
839 867 sys.exit(-1)
840 868
841 869
842 870 self.fileIndex = -1
843 871 self.pathList = pathList
844 872 self.filenameList = filenameList
845 873
846 874 self.online = online
847 875 self.delay = delay
848 876 ext = ext.lower()
849 877 self.ext = ext
850 878
851 879 if not(self.setNextFile()):
852 880 if (startDate!=None) and (endDate!=None):
853 881 print "No files in range: %s - %s" %(datetime.datetime.combine(startDate,startTime).ctime(), datetime.datetime.combine(endDate,endTime).ctime())
854 882 elif startDate != None:
855 883 print "No files in range: %s" %(datetime.datetime.combine(startDate,startTime).ctime())
856 884 else:
857 885 print "No files"
858 886
859 887 sys.exit(-1)
860 888
861 889 # self.updateDataHeader()
862 890
863 891 return self.dataOut
864 892
865 893 def getData():
866 894
867 895 raise ValueError, "This method has not been implemented"
868 896
869 897 def hasNotDataInBuffer():
870 898
871 899 raise ValueError, "This method has not been implemented"
872 900
873 901 def readBlock():
874 902
875 903 raise ValueError, "This method has not been implemented"
876 904
877 905 def isEndProcess(self):
878 906
879 907 return self.flagNoMoreFiles
880 908
881 909 def printReadBlocks(self):
882 910
883 911 print "Number of read blocks per file %04d" %self.nReadBlocks
884 912
885 913 def printTotalBlocks(self):
886 914
887 915 print "Number of read blocks %04d" %self.nTotalBlocks
888 916
889 917 def printNumberOfBlock(self):
890 918
891 919 if self.flagIsNewBlock:
892 920 print "Block No. %04d, Total blocks %04d" %(self.basicHeaderObj.dataBlock, self.nTotalBlocks)
893 921
894 922 def printInfo(self):
895 923
896 924 print self.basicHeaderObj.printInfo()
897 925 print self.systemHeaderObj.printInfo()
898 926 print self.radarControllerHeaderObj.printInfo()
899 927 print self.processingHeaderObj.printInfo()
900 928
901 929
902 930 def run(self, **kwargs):
903 931
904 932 if not(self.isConfig):
905 933
906 934 # self.dataOut = dataOut
907 935 self.setup(**kwargs)
908 936 self.isConfig = True
909 937
910 938 self.getData()
911 939
912 940 class JRODataWriter(JRODataIO, Operation):
913 941
914 942 """
915 943 Esta clase permite escribir datos a archivos procesados (.r o ,pdata). La escritura
916 944 de los datos siempre se realiza por bloques.
917 945 """
918 946
919 947 blockIndex = 0
920 948
921 949 path = None
922 950
923 951 setFile = None
924 952
925 953 profilesPerBlock = None
926 954
927 955 blocksPerFile = None
928 956
929 957 nWriteBlocks = 0
930 958
931 959 def __init__(self, dataOut=None):
932 960 raise ValueError, "Not implemented"
933 961
934 962
935 963 def hasAllDataInBuffer(self):
936 964 raise ValueError, "Not implemented"
937 965
938 966
939 967 def setBlockDimension(self):
940 968 raise ValueError, "Not implemented"
941 969
942 970
943 971 def writeBlock(self):
944 972 raise ValueError, "No implemented"
945 973
946 974
947 975 def putData(self):
948 976 raise ValueError, "No implemented"
949 977
950 978 def getDataHeader(self):
951 979 """
952 980 Obtiene una copia del First Header
953 981
954 982 Affected:
955 983
956 984 self.basicHeaderObj
957 985 self.systemHeaderObj
958 986 self.radarControllerHeaderObj
959 987 self.processingHeaderObj self.
960 988
961 989 Return:
962 990 None
963 991 """
964 992
965 993 raise ValueError, "No implemented"
966 994
967 995 def getBasicHeader(self):
968 996
969 997 self.basicHeaderObj.size = self.basicHeaderSize #bytes
970 998 self.basicHeaderObj.version = self.versionFile
971 999 self.basicHeaderObj.dataBlock = self.nTotalBlocks
972 1000
973 1001 utc = numpy.floor(self.dataOut.utctime)
974 1002 milisecond = (self.dataOut.utctime - utc)* 1000.0
975 1003
976 1004 self.basicHeaderObj.utc = utc
977 1005 self.basicHeaderObj.miliSecond = milisecond
978 1006 self.basicHeaderObj.timeZone = 0
979 1007 self.basicHeaderObj.dstFlag = 0
980 1008 self.basicHeaderObj.errorCount = 0
981 1009
982 1010 def __writeFirstHeader(self):
983 1011 """
984 1012 Escribe el primer header del file es decir el Basic header y el Long header (SystemHeader, RadarControllerHeader, ProcessingHeader)
985 1013
986 1014 Affected:
987 1015 __dataType
988 1016
989 1017 Return:
990 1018 None
991 1019 """
992 1020
993 1021 # CALCULAR PARAMETROS
994 1022
995 1023 sizeLongHeader = self.systemHeaderObj.size + self.radarControllerHeaderObj.size + self.processingHeaderObj.size
996 1024 self.basicHeaderObj.size = self.basicHeaderSize + sizeLongHeader
997 1025
998 1026 self.basicHeaderObj.write(self.fp)
999 1027 self.systemHeaderObj.write(self.fp)
1000 1028 self.radarControllerHeaderObj.write(self.fp)
1001 1029 self.processingHeaderObj.write(self.fp)
1002 1030
1003 1031 self.dtype = self.dataOut.dtype
1004 1032
1005 1033 def __setNewBlock(self):
1006 1034 """
1007 1035 Si es un nuevo file escribe el First Header caso contrario escribe solo el Basic Header
1008 1036
1009 1037 Return:
1010 1038 0 : si no pudo escribir nada
1011 1039 1 : Si escribio el Basic el First Header
1012 1040 """
1013 1041 if self.fp == None:
1014 1042 self.setNextFile()
1015 1043
1016 1044 if self.flagIsNewFile:
1017 1045 return 1
1018 1046
1019 1047 if self.blockIndex < self.processingHeaderObj.dataBlocksPerFile:
1020 1048 self.basicHeaderObj.write(self.fp)
1021 1049 return 1
1022 1050
1023 1051 if not( self.setNextFile() ):
1024 1052 return 0
1025 1053
1026 1054 return 1
1027 1055
1028 1056
1029 1057 def writeNextBlock(self):
1030 1058 """
1031 1059 Selecciona el bloque siguiente de datos y los escribe en un file
1032 1060
1033 1061 Return:
1034 1062 0 : Si no hizo pudo escribir el bloque de datos
1035 1063 1 : Si no pudo escribir el bloque de datos
1036 1064 """
1037 1065 if not( self.__setNewBlock() ):
1038 1066 return 0
1039 1067
1040 1068 self.writeBlock()
1041 1069
1042 1070 return 1
1043 1071
1044 1072 def setNextFile(self):
1045 1073 """
1046 1074 Determina el siguiente file que sera escrito
1047 1075
1048 1076 Affected:
1049 1077 self.filename
1050 1078 self.subfolder
1051 1079 self.fp
1052 1080 self.setFile
1053 1081 self.flagIsNewFile
1054 1082
1055 1083 Return:
1056 1084 0 : Si el archivo no puede ser escrito
1057 1085 1 : Si el archivo esta listo para ser escrito
1058 1086 """
1059 1087 ext = self.ext
1060 1088 path = self.path
1061 1089
1062 1090 if self.fp != None:
1063 1091 self.fp.close()
1064 1092
1065 1093 timeTuple = time.localtime( self.dataOut.utctime)
1066 1094 subfolder = 'd%4.4d%3.3d' % (timeTuple.tm_year,timeTuple.tm_yday)
1067 1095
1068 1096 fullpath = os.path.join( path, subfolder )
1069 1097 if not( os.path.exists(fullpath) ):
1070 1098 os.mkdir(fullpath)
1071 1099 self.setFile = -1 #inicializo mi contador de seteo
1072 1100 else:
1073 1101 filesList = os.listdir( fullpath )
1074 1102 if len( filesList ) > 0:
1075 1103 filesList = sorted( filesList, key=str.lower )
1076 1104 filen = filesList[-1]
1077 1105 # el filename debera tener el siguiente formato
1078 1106 # 0 1234 567 89A BCDE (hex)
1079 1107 # x YYYY DDD SSS .ext
1080 1108 if isNumber( filen[8:11] ):
1081 1109 self.setFile = int( filen[8:11] ) #inicializo mi contador de seteo al seteo del ultimo file
1082 1110 else:
1083 1111 self.setFile = -1
1084 1112 else:
1085 1113 self.setFile = -1 #inicializo mi contador de seteo
1086 1114
1087 1115 setFile = self.setFile
1088 1116 setFile += 1
1089 1117
1090 1118 file = '%s%4.4d%3.3d%3.3d%s' % (self.optchar,
1091 1119 timeTuple.tm_year,
1092 1120 timeTuple.tm_yday,
1093 1121 setFile,
1094 1122 ext )
1095 1123
1096 1124 filename = os.path.join( path, subfolder, file )
1097 1125
1098 1126 fp = open( filename,'wb' )
1099 1127
1100 1128 self.blockIndex = 0
1101 1129
1102 1130 #guardando atributos
1103 1131 self.filename = filename
1104 1132 self.subfolder = subfolder
1105 1133 self.fp = fp
1106 1134 self.setFile = setFile
1107 1135 self.flagIsNewFile = 1
1108 1136
1109 1137 self.getDataHeader()
1110 1138
1111 1139 print 'Writing the file: %s'%self.filename
1112 1140
1113 1141 self.__writeFirstHeader()
1114 1142
1115 1143 return 1
1116 1144
1117 1145 def setup(self, dataOut, path, blocksPerFile, profilesPerBlock=None, set=0, ext=None):
1118 1146 """
1119 1147 Setea el tipo de formato en la cual sera guardada la data y escribe el First Header
1120 1148
1121 1149 Inputs:
1122 1150 path : el path destino en el cual se escribiran los files a crear
1123 1151 format : formato en el cual sera salvado un file
1124 1152 set : el setebo del file
1125 1153
1126 1154 Return:
1127 1155 0 : Si no realizo un buen seteo
1128 1156 1 : Si realizo un buen seteo
1129 1157 """
1130 1158
1131 1159 if ext == None:
1132 1160 ext = self.ext
1133 1161
1134 1162 ext = ext.lower()
1135 1163
1136 1164 self.ext = ext
1137 1165
1138 1166 self.path = path
1139 1167
1140 1168 self.setFile = set - 1
1141 1169
1142 1170 self.blocksPerFile = blocksPerFile
1143 1171
1144 1172 self.profilesPerBlock = profilesPerBlock
1145 1173
1146 1174 self.dataOut = dataOut
1147 1175
1148 1176 if not(self.setNextFile()):
1149 1177 print "There isn't a next file"
1150 1178 return 0
1151 1179
1152 1180 self.setBlockDimension()
1153 1181
1154 1182 return 1
1155 1183
1156 1184 def run(self, dataOut, **kwargs):
1157 1185
1158 1186 if not(self.isConfig):
1159 1187
1160 1188 self.setup(dataOut, **kwargs)
1161 1189 self.isConfig = True
1162 1190
1163 1191 self.putData()
1164 1192
1165 1193 class VoltageReader(JRODataReader):
1166 1194 """
1167 1195 Esta clase permite leer datos de voltage desde archivos en formato rawdata (.r). La lectura
1168 1196 de los datos siempre se realiza por bloques. Los datos leidos (array de 3 dimensiones:
1169 1197 perfiles*alturas*canales) son almacenados en la variable "buffer".
1170 1198
1171 1199 perfiles * alturas * canales
1172 1200
1173 1201 Esta clase contiene instancias (objetos) de las clases BasicHeader, SystemHeader,
1174 1202 RadarControllerHeader y Voltage. Los tres primeros se usan para almacenar informacion de la
1175 1203 cabecera de datos (metadata), y el cuarto (Voltage) para obtener y almacenar un perfil de
1176 1204 datos desde el "buffer" cada vez que se ejecute el metodo "getData".
1177 1205
1178 1206 Example:
1179 1207
1180 1208 dpath = "/home/myuser/data"
1181 1209
1182 1210 startTime = datetime.datetime(2010,1,20,0,0,0,0,0,0)
1183 1211
1184 1212 endTime = datetime.datetime(2010,1,21,23,59,59,0,0,0)
1185 1213
1186 1214 readerObj = VoltageReader()
1187 1215
1188 1216 readerObj.setup(dpath, startTime, endTime)
1189 1217
1190 1218 while(True):
1191 1219
1192 1220 #to get one profile
1193 1221 profile = readerObj.getData()
1194 1222
1195 1223 #print the profile
1196 1224 print profile
1197 1225
1198 1226 #If you want to see all datablock
1199 1227 print readerObj.datablock
1200 1228
1201 1229 if readerObj.flagNoMoreFiles:
1202 1230 break
1203 1231
1204 1232 """
1205 1233
1206 1234 ext = ".r"
1207 1235
1208 1236 optchar = "D"
1209 1237 dataOut = None
1210 1238
1211 1239
1212 1240 def __init__(self):
1213 1241 """
1214 1242 Inicializador de la clase VoltageReader para la lectura de datos de voltage.
1215 1243
1216 1244 Input:
1217 1245 dataOut : Objeto de la clase Voltage. Este objeto sera utilizado para
1218 1246 almacenar un perfil de datos cada vez que se haga un requerimiento
1219 1247 (getData). El perfil sera obtenido a partir del buffer de datos,
1220 1248 si el buffer esta vacio se hara un nuevo proceso de lectura de un
1221 1249 bloque de datos.
1222 1250 Si este parametro no es pasado se creara uno internamente.
1223 1251
1224 1252 Variables afectadas:
1225 1253 self.dataOut
1226 1254
1227 1255 Return:
1228 1256 None
1229 1257 """
1230 1258
1231 1259 self.isConfig = False
1232 1260
1233 1261 self.datablock = None
1234 1262
1235 1263 self.utc = 0
1236 1264
1237 1265 self.ext = ".r"
1238 1266
1239 1267 self.optchar = "D"
1240 1268
1241 1269 self.basicHeaderObj = BasicHeader(LOCALTIME)
1242 1270
1243 1271 self.systemHeaderObj = SystemHeader()
1244 1272
1245 1273 self.radarControllerHeaderObj = RadarControllerHeader()
1246 1274
1247 1275 self.processingHeaderObj = ProcessingHeader()
1248 1276
1249 1277 self.online = 0
1250 1278
1251 1279 self.fp = None
1252 1280
1253 1281 self.idFile = None
1254 1282
1255 1283 self.dtype = None
1256 1284
1257 1285 self.fileSizeByHeader = None
1258 1286
1259 1287 self.filenameList = []
1260 1288
1261 1289 self.filename = None
1262 1290
1263 1291 self.fileSize = None
1264 1292
1265 1293 self.firstHeaderSize = 0
1266 1294
1267 1295 self.basicHeaderSize = 24
1268 1296
1269 1297 self.pathList = []
1270 1298
1271 1299 self.filenameList = []
1272 1300
1273 1301 self.lastUTTime = 0
1274 1302
1275 1303 self.maxTimeStep = 30
1276 1304
1277 1305 self.flagNoMoreFiles = 0
1278 1306
1279 1307 self.set = 0
1280 1308
1281 1309 self.path = None
1282 1310
1283 1311 self.profileIndex = 2**32-1
1284 1312
1285 1313 self.delay = 3 #seconds
1286 1314
1287 1315 self.nTries = 3 #quantity tries
1288 1316
1289 1317 self.nFiles = 3 #number of files for searching
1290 1318
1291 1319 self.nReadBlocks = 0
1292 1320
1293 1321 self.flagIsNewFile = 1
1322
1323 self.__isFirstTimeOnline = 1
1294 1324
1295 1325 self.ippSeconds = 0
1296 1326
1297 1327 self.flagTimeBlock = 0
1298 1328
1299 1329 self.flagIsNewBlock = 0
1300 1330
1301 1331 self.nTotalBlocks = 0
1302 1332
1303 1333 self.blocksize = 0
1304 1334
1305 1335 self.dataOut = self.createObjByDefault()
1306 1336
1307 1337 def createObjByDefault(self):
1308 1338
1309 1339 dataObj = Voltage()
1310 1340
1311 1341 return dataObj
1312 1342
1313 1343 def __hasNotDataInBuffer(self):
1314 1344 if self.profileIndex >= self.processingHeaderObj.profilesPerBlock:
1315 1345 return 1
1316 1346 return 0
1317 1347
1318 1348
1319 1349 def getBlockDimension(self):
1320 1350 """
1321 1351 Obtiene la cantidad de puntos a leer por cada bloque de datos
1322 1352
1323 1353 Affected:
1324 1354 self.blocksize
1325 1355
1326 1356 Return:
1327 1357 None
1328 1358 """
1329 1359 pts2read = self.processingHeaderObj.profilesPerBlock * self.processingHeaderObj.nHeights * self.systemHeaderObj.nChannels
1330 1360 self.blocksize = pts2read
1331 1361
1332 1362
1333 1363 def readBlock(self):
1334 1364 """
1335 1365 readBlock lee el bloque de datos desde la posicion actual del puntero del archivo
1336 1366 (self.fp) y actualiza todos los parametros relacionados al bloque de datos
1337 1367 (metadata + data). La data leida es almacenada en el buffer y el contador del buffer
1338 1368 es seteado a 0
1339 1369
1340 1370 Inputs:
1341 1371 None
1342 1372
1343 1373 Return:
1344 1374 None
1345 1375
1346 1376 Affected:
1347 1377 self.profileIndex
1348 1378 self.datablock
1349 1379 self.flagIsNewFile
1350 1380 self.flagIsNewBlock
1351 1381 self.nTotalBlocks
1352 1382
1353 1383 Exceptions:
1354 1384 Si un bloque leido no es un bloque valido
1355 1385 """
1356 1386
1357 1387 junk = numpy.fromfile( self.fp, self.dtype, self.blocksize )
1358 1388
1359 1389 try:
1360 1390 junk = junk.reshape( (self.processingHeaderObj.profilesPerBlock, self.processingHeaderObj.nHeights, self.systemHeaderObj.nChannels) )
1361 1391 except:
1362 1392 print "The read block (%3d) has not enough data" %self.nReadBlocks
1363 1393 return 0
1364 1394
1365 1395 junk = numpy.transpose(junk, (2,0,1))
1366 1396 self.datablock = junk['real'] + junk['imag']*1j
1367 1397
1368 1398 self.profileIndex = 0
1369 1399
1370 1400 self.flagIsNewFile = 0
1371 1401 self.flagIsNewBlock = 1
1372 1402
1373 1403 self.nTotalBlocks += 1
1374 1404 self.nReadBlocks += 1
1375 1405
1376 1406 return 1
1377 1407
1378 1408
1379 1409 def getData(self):
1380 1410 """
1381 1411 getData obtiene una unidad de datos del buffer de lectura y la copia a la clase "Voltage"
1382 1412 con todos los parametros asociados a este (metadata). cuando no hay datos en el buffer de
1383 1413 lectura es necesario hacer una nueva lectura de los bloques de datos usando "readNextBlock"
1384 1414
1385 1415 Ademas incrementa el contador del buffer en 1.
1386 1416
1387 1417 Return:
1388 1418 data : retorna un perfil de voltages (alturas * canales) copiados desde el
1389 1419 buffer. Si no hay mas archivos a leer retorna None.
1390 1420
1391 1421 Variables afectadas:
1392 1422 self.dataOut
1393 1423 self.profileIndex
1394 1424
1395 1425 Affected:
1396 1426 self.dataOut
1397 1427 self.profileIndex
1398 1428 self.flagTimeBlock
1399 1429 self.flagIsNewBlock
1400 1430 """
1401 1431
1402 1432 if self.flagNoMoreFiles:
1403 1433 self.dataOut.flagNoData = True
1404 1434 print 'Process finished'
1405 1435 return 0
1406 1436
1407 1437 self.flagTimeBlock = 0
1408 1438 self.flagIsNewBlock = 0
1409 1439
1410 1440 if self.__hasNotDataInBuffer():
1411 1441
1412 1442 if not( self.readNextBlock() ):
1413 1443 return 0
1414 1444
1415 1445 self.dataOut.dtype = self.dtype
1416 1446
1417 1447 self.dataOut.nProfiles = self.processingHeaderObj.profilesPerBlock
1418 1448
1419 1449 xf = self.processingHeaderObj.firstHeight + self.processingHeaderObj.nHeights*self.processingHeaderObj.deltaHeight
1420 1450
1421 1451 self.dataOut.heightList = numpy.arange(self.processingHeaderObj.firstHeight, xf, self.processingHeaderObj.deltaHeight)
1422 1452
1423 1453 self.dataOut.channelList = range(self.systemHeaderObj.nChannels)
1424 1454
1425 1455 self.dataOut.flagTimeBlock = self.flagTimeBlock
1426 1456
1427 1457 self.dataOut.ippSeconds = self.ippSeconds
1428 1458
1429 1459 self.dataOut.timeInterval = self.ippSeconds * self.processingHeaderObj.nCohInt
1430 1460
1431 1461 self.dataOut.nCohInt = self.processingHeaderObj.nCohInt
1432 1462
1433 1463 self.dataOut.flagShiftFFT = False
1434 1464
1435 1465 if self.radarControllerHeaderObj.code != None:
1436 1466
1437 1467 self.dataOut.nCode = self.radarControllerHeaderObj.nCode
1438 1468
1439 1469 self.dataOut.nBaud = self.radarControllerHeaderObj.nBaud
1440 1470
1441 1471 self.dataOut.code = self.radarControllerHeaderObj.code
1442 1472
1443 1473 self.dataOut.systemHeaderObj = self.systemHeaderObj.copy()
1444 1474
1445 1475 self.dataOut.radarControllerHeaderObj = self.radarControllerHeaderObj.copy()
1446 1476
1447 1477 self.dataOut.flagDecodeData = False #asumo q la data no esta decodificada
1448 1478
1449 1479 self.dataOut.flagDeflipData = False #asumo q la data no esta sin flip
1450 1480
1451 1481 self.dataOut.flagShiftFFT = False
1452 1482
1453 1483
1454 1484 # self.updateDataHeader()
1455 1485
1456 1486 #data es un numpy array de 3 dmensiones (perfiles, alturas y canales)
1457 1487
1458 1488 if self.datablock == None:
1459 1489 self.dataOut.flagNoData = True
1460 1490 return 0
1461 1491
1462 1492 self.dataOut.data = self.datablock[:,self.profileIndex,:]
1463 1493
1464 1494 self.dataOut.utctime = self.basicHeaderObj.utc + self.basicHeaderObj.miliSecond/1000. + self.profileIndex * self.ippSeconds
1465 1495
1466 1496 self.profileIndex += 1
1467 1497
1468 1498 self.dataOut.flagNoData = False
1469 1499
1470 1500 # print self.profileIndex, self.dataOut.utctime
1471 1501 # if self.profileIndex == 800:
1472 1502 # a=1
1473 1503
1474 1504
1475 1505 return self.dataOut.data
1476 1506
1477 1507
1478 1508 class VoltageWriter(JRODataWriter):
1479 1509 """
1480 1510 Esta clase permite escribir datos de voltajes a archivos procesados (.r). La escritura
1481 1511 de los datos siempre se realiza por bloques.
1482 1512 """
1483 1513
1484 1514 ext = ".r"
1485 1515
1486 1516 optchar = "D"
1487 1517
1488 1518 shapeBuffer = None
1489 1519
1490 1520
1491 1521 def __init__(self):
1492 1522 """
1493 1523 Inicializador de la clase VoltageWriter para la escritura de datos de espectros.
1494 1524
1495 1525 Affected:
1496 1526 self.dataOut
1497 1527
1498 1528 Return: None
1499 1529 """
1500 1530
1501 1531 self.nTotalBlocks = 0
1502 1532
1503 1533 self.profileIndex = 0
1504 1534
1505 1535 self.isConfig = False
1506 1536
1507 1537 self.fp = None
1508 1538
1509 1539 self.flagIsNewFile = 1
1510 1540
1511 1541 self.nTotalBlocks = 0
1512 1542
1513 1543 self.flagIsNewBlock = 0
1514 1544
1515 1545 self.setFile = None
1516 1546
1517 1547 self.dtype = None
1518 1548
1519 1549 self.path = None
1520 1550
1521 1551 self.filename = None
1522 1552
1523 1553 self.basicHeaderObj = BasicHeader(LOCALTIME)
1524 1554
1525 1555 self.systemHeaderObj = SystemHeader()
1526 1556
1527 1557 self.radarControllerHeaderObj = RadarControllerHeader()
1528 1558
1529 1559 self.processingHeaderObj = ProcessingHeader()
1530 1560
1531 1561 def hasAllDataInBuffer(self):
1532 1562 if self.profileIndex >= self.processingHeaderObj.profilesPerBlock:
1533 1563 return 1
1534 1564 return 0
1535 1565
1536 1566
1537 1567 def setBlockDimension(self):
1538 1568 """
1539 1569 Obtiene las formas dimensionales del los subbloques de datos que componen un bloque
1540 1570
1541 1571 Affected:
1542 1572 self.shape_spc_Buffer
1543 1573 self.shape_cspc_Buffer
1544 1574 self.shape_dc_Buffer
1545 1575
1546 1576 Return: None
1547 1577 """
1548 1578 self.shapeBuffer = (self.processingHeaderObj.profilesPerBlock,
1549 1579 self.processingHeaderObj.nHeights,
1550 1580 self.systemHeaderObj.nChannels)
1551 1581
1552 1582 self.datablock = numpy.zeros((self.systemHeaderObj.nChannels,
1553 1583 self.processingHeaderObj.profilesPerBlock,
1554 1584 self.processingHeaderObj.nHeights),
1555 1585 dtype=numpy.dtype('complex64'))
1556 1586
1557 1587
1558 1588 def writeBlock(self):
1559 1589 """
1560 1590 Escribe el buffer en el file designado
1561 1591
1562 1592 Affected:
1563 1593 self.profileIndex
1564 1594 self.flagIsNewFile
1565 1595 self.flagIsNewBlock
1566 1596 self.nTotalBlocks
1567 1597 self.blockIndex
1568 1598
1569 1599 Return: None
1570 1600 """
1571 1601 data = numpy.zeros( self.shapeBuffer, self.dtype )
1572 1602
1573 1603 junk = numpy.transpose(self.datablock, (1,2,0))
1574 1604
1575 1605 data['real'] = junk.real
1576 1606 data['imag'] = junk.imag
1577 1607
1578 1608 data = data.reshape( (-1) )
1579 1609
1580 1610 data.tofile( self.fp )
1581 1611
1582 1612 self.datablock.fill(0)
1583 1613
1584 1614 self.profileIndex = 0
1585 1615 self.flagIsNewFile = 0
1586 1616 self.flagIsNewBlock = 1
1587 1617
1588 1618 self.blockIndex += 1
1589 1619 self.nTotalBlocks += 1
1590 1620
1591 1621 def putData(self):
1592 1622 """
1593 1623 Setea un bloque de datos y luego los escribe en un file
1594 1624
1595 1625 Affected:
1596 1626 self.flagIsNewBlock
1597 1627 self.profileIndex
1598 1628
1599 1629 Return:
1600 1630 0 : Si no hay data o no hay mas files que puedan escribirse
1601 1631 1 : Si se escribio la data de un bloque en un file
1602 1632 """
1603 1633 if self.dataOut.flagNoData:
1604 1634 return 0
1605 1635
1606 1636 self.flagIsNewBlock = 0
1607 1637
1608 1638 if self.dataOut.flagTimeBlock:
1609 1639
1610 1640 self.datablock.fill(0)
1611 1641 self.profileIndex = 0
1612 1642 self.setNextFile()
1613 1643
1614 1644 if self.profileIndex == 0:
1615 1645 self.getBasicHeader()
1616 1646
1617 1647 self.datablock[:,self.profileIndex,:] = self.dataOut.data
1618 1648
1619 1649 self.profileIndex += 1
1620 1650
1621 1651 if self.hasAllDataInBuffer():
1622 1652 #if self.flagIsNewFile:
1623 1653 self.writeNextBlock()
1624 1654 # self.getDataHeader()
1625 1655
1626 1656 return 1
1627 1657
1628 1658 def __getProcessFlags(self):
1629 1659
1630 1660 processFlags = 0
1631 1661
1632 1662 dtype0 = numpy.dtype([('real','<i1'),('imag','<i1')])
1633 1663 dtype1 = numpy.dtype([('real','<i2'),('imag','<i2')])
1634 1664 dtype2 = numpy.dtype([('real','<i4'),('imag','<i4')])
1635 1665 dtype3 = numpy.dtype([('real','<i8'),('imag','<i8')])
1636 1666 dtype4 = numpy.dtype([('real','<f4'),('imag','<f4')])
1637 1667 dtype5 = numpy.dtype([('real','<f8'),('imag','<f8')])
1638 1668
1639 1669 dtypeList = [dtype0, dtype1, dtype2, dtype3, dtype4, dtype5]
1640 1670
1641 1671
1642 1672
1643 1673 datatypeValueList = [PROCFLAG.DATATYPE_CHAR,
1644 1674 PROCFLAG.DATATYPE_SHORT,
1645 1675 PROCFLAG.DATATYPE_LONG,
1646 1676 PROCFLAG.DATATYPE_INT64,
1647 1677 PROCFLAG.DATATYPE_FLOAT,
1648 1678 PROCFLAG.DATATYPE_DOUBLE]
1649 1679
1650 1680
1651 1681 for index in range(len(dtypeList)):
1652 1682 if self.dataOut.dtype == dtypeList[index]:
1653 1683 dtypeValue = datatypeValueList[index]
1654 1684 break
1655 1685
1656 1686 processFlags += dtypeValue
1657 1687
1658 1688 if self.dataOut.flagDecodeData:
1659 1689 processFlags += PROCFLAG.DECODE_DATA
1660 1690
1661 1691 if self.dataOut.flagDeflipData:
1662 1692 processFlags += PROCFLAG.DEFLIP_DATA
1663 1693
1664 1694 if self.dataOut.code != None:
1665 1695 processFlags += PROCFLAG.DEFINE_PROCESS_CODE
1666 1696
1667 1697 if self.dataOut.nCohInt > 1:
1668 1698 processFlags += PROCFLAG.COHERENT_INTEGRATION
1669 1699
1670 1700 return processFlags
1671 1701
1672 1702
1673 1703 def __getBlockSize(self):
1674 1704 '''
1675 1705 Este metodos determina el cantidad de bytes para un bloque de datos de tipo Voltage
1676 1706 '''
1677 1707
1678 1708 dtype0 = numpy.dtype([('real','<i1'),('imag','<i1')])
1679 1709 dtype1 = numpy.dtype([('real','<i2'),('imag','<i2')])
1680 1710 dtype2 = numpy.dtype([('real','<i4'),('imag','<i4')])
1681 1711 dtype3 = numpy.dtype([('real','<i8'),('imag','<i8')])
1682 1712 dtype4 = numpy.dtype([('real','<f4'),('imag','<f4')])
1683 1713 dtype5 = numpy.dtype([('real','<f8'),('imag','<f8')])
1684 1714
1685 1715 dtypeList = [dtype0, dtype1, dtype2, dtype3, dtype4, dtype5]
1686 1716 datatypeValueList = [1,2,4,8,4,8]
1687 1717 for index in range(len(dtypeList)):
1688 1718 if self.dataOut.dtype == dtypeList[index]:
1689 1719 datatypeValue = datatypeValueList[index]
1690 1720 break
1691 1721
1692 1722 blocksize = int(self.dataOut.nHeights * self.dataOut.nChannels * self.dataOut.nProfiles * datatypeValue * 2)
1693 1723
1694 1724 return blocksize
1695 1725
1696 1726 def getDataHeader(self):
1697 1727
1698 1728 """
1699 1729 Obtiene una copia del First Header
1700 1730
1701 1731 Affected:
1702 1732 self.systemHeaderObj
1703 1733 self.radarControllerHeaderObj
1704 1734 self.dtype
1705 1735
1706 1736 Return:
1707 1737 None
1708 1738 """
1709 1739
1710 1740 self.systemHeaderObj = self.dataOut.systemHeaderObj.copy()
1711 1741 self.systemHeaderObj.nChannels = self.dataOut.nChannels
1712 1742 self.radarControllerHeaderObj = self.dataOut.radarControllerHeaderObj.copy()
1713 1743
1714 1744 self.getBasicHeader()
1715 1745
1716 1746 processingHeaderSize = 40 # bytes
1717 1747 self.processingHeaderObj.dtype = 0 # Voltage
1718 1748 self.processingHeaderObj.blockSize = self.__getBlockSize()
1719 1749 self.processingHeaderObj.profilesPerBlock = self.profilesPerBlock
1720 1750 self.processingHeaderObj.dataBlocksPerFile = self.blocksPerFile
1721 1751 self.processingHeaderObj.nWindows = 1 #podria ser 1 o self.dataOut.processingHeaderObj.nWindows
1722 1752 self.processingHeaderObj.processFlags = self.__getProcessFlags()
1723 1753 self.processingHeaderObj.nCohInt = self.dataOut.nCohInt
1724 1754 self.processingHeaderObj.nIncohInt = 1 # Cuando la data de origen es de tipo Voltage
1725 1755 self.processingHeaderObj.totalSpectra = 0 # Cuando la data de origen es de tipo Voltage
1726 1756
1727 1757 if self.dataOut.code != None:
1728 1758 self.processingHeaderObj.code = self.dataOut.code
1729 1759 self.processingHeaderObj.nCode = self.dataOut.nCode
1730 1760 self.processingHeaderObj.nBaud = self.dataOut.nBaud
1731 1761 codesize = int(8 + 4 * self.dataOut.nCode * self.dataOut.nBaud)
1732 1762 processingHeaderSize += codesize
1733 1763
1734 1764 if self.processingHeaderObj.nWindows != 0:
1735 1765 self.processingHeaderObj.firstHeight = self.dataOut.heightList[0]
1736 1766 self.processingHeaderObj.deltaHeight = self.dataOut.heightList[1] - self.dataOut.heightList[0]
1737 1767 self.processingHeaderObj.nHeights = self.dataOut.nHeights
1738 1768 self.processingHeaderObj.samplesWin = self.dataOut.nHeights
1739 1769 processingHeaderSize += 12
1740 1770
1741 1771 self.processingHeaderObj.size = processingHeaderSize
1742 1772
1743 1773 class SpectraReader(JRODataReader):
1744 1774 """
1745 1775 Esta clase permite leer datos de espectros desde archivos procesados (.pdata). La lectura
1746 1776 de los datos siempre se realiza por bloques. Los datos leidos (array de 3 dimensiones)
1747 1777 son almacenados en tres buffer's para el Self Spectra, el Cross Spectra y el DC Channel.
1748 1778
1749 1779 paresCanalesIguales * alturas * perfiles (Self Spectra)
1750 1780 paresCanalesDiferentes * alturas * perfiles (Cross Spectra)
1751 1781 canales * alturas (DC Channels)
1752 1782
1753 1783 Esta clase contiene instancias (objetos) de las clases BasicHeader, SystemHeader,
1754 1784 RadarControllerHeader y Spectra. Los tres primeros se usan para almacenar informacion de la
1755 1785 cabecera de datos (metadata), y el cuarto (Spectra) para obtener y almacenar un bloque de
1756 1786 datos desde el "buffer" cada vez que se ejecute el metodo "getData".
1757 1787
1758 1788 Example:
1759 1789 dpath = "/home/myuser/data"
1760 1790
1761 1791 startTime = datetime.datetime(2010,1,20,0,0,0,0,0,0)
1762 1792
1763 1793 endTime = datetime.datetime(2010,1,21,23,59,59,0,0,0)
1764 1794
1765 1795 readerObj = SpectraReader()
1766 1796
1767 1797 readerObj.setup(dpath, startTime, endTime)
1768 1798
1769 1799 while(True):
1770 1800
1771 1801 readerObj.getData()
1772 1802
1773 1803 print readerObj.data_spc
1774 1804
1775 1805 print readerObj.data_cspc
1776 1806
1777 1807 print readerObj.data_dc
1778 1808
1779 1809 if readerObj.flagNoMoreFiles:
1780 1810 break
1781 1811
1782 1812 """
1783 1813
1784 1814 pts2read_SelfSpectra = 0
1785 1815
1786 1816 pts2read_CrossSpectra = 0
1787 1817
1788 1818 pts2read_DCchannels = 0
1789 1819
1790 1820 ext = ".pdata"
1791 1821
1792 1822 optchar = "P"
1793 1823
1794 1824 dataOut = None
1795 1825
1796 1826 nRdChannels = None
1797 1827
1798 1828 nRdPairs = None
1799 1829
1800 1830 rdPairList = []
1801 1831
1802 1832
1803 1833 def __init__(self):
1804 1834 """
1805 1835 Inicializador de la clase SpectraReader para la lectura de datos de espectros.
1806 1836
1807 1837 Inputs:
1808 1838 dataOut : Objeto de la clase Spectra. Este objeto sera utilizado para
1809 1839 almacenar un perfil de datos cada vez que se haga un requerimiento
1810 1840 (getData). El perfil sera obtenido a partir del buffer de datos,
1811 1841 si el buffer esta vacio se hara un nuevo proceso de lectura de un
1812 1842 bloque de datos.
1813 1843 Si este parametro no es pasado se creara uno internamente.
1814 1844
1815 1845 Affected:
1816 1846 self.dataOut
1817 1847
1818 1848 Return : None
1819 1849 """
1820 1850
1821 1851 self.isConfig = False
1822 1852
1823 1853 self.pts2read_SelfSpectra = 0
1824 1854
1825 1855 self.pts2read_CrossSpectra = 0
1826 1856
1827 1857 self.pts2read_DCchannels = 0
1828 1858
1829 1859 self.datablock = None
1830 1860
1831 1861 self.utc = None
1832 1862
1833 1863 self.ext = ".pdata"
1834 1864
1835 1865 self.optchar = "P"
1836 1866
1837 1867 self.basicHeaderObj = BasicHeader(LOCALTIME)
1838 1868
1839 1869 self.systemHeaderObj = SystemHeader()
1840 1870
1841 1871 self.radarControllerHeaderObj = RadarControllerHeader()
1842 1872
1843 1873 self.processingHeaderObj = ProcessingHeader()
1844 1874
1845 1875 self.online = 0
1846 1876
1847 1877 self.fp = None
1848 1878
1849 1879 self.idFile = None
1850 1880
1851 1881 self.dtype = None
1852 1882
1853 1883 self.fileSizeByHeader = None
1854 1884
1855 1885 self.filenameList = []
1856 1886
1857 1887 self.filename = None
1858 1888
1859 1889 self.fileSize = None
1860 1890
1861 1891 self.firstHeaderSize = 0
1862 1892
1863 1893 self.basicHeaderSize = 24
1864 1894
1865 1895 self.pathList = []
1866 1896
1867 1897 self.lastUTTime = 0
1868 1898
1869 1899 self.maxTimeStep = 30
1870 1900
1871 1901 self.flagNoMoreFiles = 0
1872 1902
1873 1903 self.set = 0
1874 1904
1875 1905 self.path = None
1876 1906
1877 1907 self.delay = 60 #seconds
1878 1908
1879 1909 self.nTries = 3 #quantity tries
1880 1910
1881 1911 self.nFiles = 3 #number of files for searching
1882 1912
1883 1913 self.nReadBlocks = 0
1884 1914
1885 1915 self.flagIsNewFile = 1
1916
1917 self.__isFirstTimeOnline = 1
1886 1918
1887 1919 self.ippSeconds = 0
1888 1920
1889 1921 self.flagTimeBlock = 0
1890 1922
1891 1923 self.flagIsNewBlock = 0
1892 1924
1893 1925 self.nTotalBlocks = 0
1894 1926
1895 1927 self.blocksize = 0
1896 1928
1897 1929 self.dataOut = self.createObjByDefault()
1898 1930
1899 1931
1900 1932 def createObjByDefault(self):
1901 1933
1902 1934 dataObj = Spectra()
1903 1935
1904 1936 return dataObj
1905 1937
1906 1938 def __hasNotDataInBuffer(self):
1907 1939 return 1
1908 1940
1909 1941
1910 1942 def getBlockDimension(self):
1911 1943 """
1912 1944 Obtiene la cantidad de puntos a leer por cada bloque de datos
1913 1945
1914 1946 Affected:
1915 1947 self.nRdChannels
1916 1948 self.nRdPairs
1917 1949 self.pts2read_SelfSpectra
1918 1950 self.pts2read_CrossSpectra
1919 1951 self.pts2read_DCchannels
1920 1952 self.blocksize
1921 1953 self.dataOut.nChannels
1922 1954 self.dataOut.nPairs
1923 1955
1924 1956 Return:
1925 1957 None
1926 1958 """
1927 1959 self.nRdChannels = 0
1928 1960 self.nRdPairs = 0
1929 1961 self.rdPairList = []
1930 1962
1931 1963 for i in range(0, self.processingHeaderObj.totalSpectra*2, 2):
1932 1964 if self.processingHeaderObj.spectraComb[i] == self.processingHeaderObj.spectraComb[i+1]:
1933 1965 self.nRdChannels = self.nRdChannels + 1 #par de canales iguales
1934 1966 else:
1935 1967 self.nRdPairs = self.nRdPairs + 1 #par de canales diferentes
1936 1968 self.rdPairList.append((self.processingHeaderObj.spectraComb[i], self.processingHeaderObj.spectraComb[i+1]))
1937 1969
1938 1970 pts2read = self.processingHeaderObj.nHeights * self.processingHeaderObj.profilesPerBlock
1939 1971
1940 1972 self.pts2read_SelfSpectra = int(self.nRdChannels * pts2read)
1941 1973 self.blocksize = self.pts2read_SelfSpectra
1942 1974
1943 1975 if self.processingHeaderObj.flag_cspc:
1944 1976 self.pts2read_CrossSpectra = int(self.nRdPairs * pts2read)
1945 1977 self.blocksize += self.pts2read_CrossSpectra
1946 1978
1947 1979 if self.processingHeaderObj.flag_dc:
1948 1980 self.pts2read_DCchannels = int(self.systemHeaderObj.nChannels * self.processingHeaderObj.nHeights)
1949 1981 self.blocksize += self.pts2read_DCchannels
1950 1982
1951 1983 # self.blocksize = self.pts2read_SelfSpectra + self.pts2read_CrossSpectra + self.pts2read_DCchannels
1952 1984
1953 1985
1954 1986 def readBlock(self):
1955 1987 """
1956 1988 Lee el bloque de datos desde la posicion actual del puntero del archivo
1957 1989 (self.fp) y actualiza todos los parametros relacionados al bloque de datos
1958 1990 (metadata + data). La data leida es almacenada en el buffer y el contador del buffer
1959 1991 es seteado a 0
1960 1992
1961 1993 Return: None
1962 1994
1963 1995 Variables afectadas:
1964 1996
1965 1997 self.flagIsNewFile
1966 1998 self.flagIsNewBlock
1967 1999 self.nTotalBlocks
1968 2000 self.data_spc
1969 2001 self.data_cspc
1970 2002 self.data_dc
1971 2003
1972 2004 Exceptions:
1973 2005 Si un bloque leido no es un bloque valido
1974 2006 """
1975 2007 blockOk_flag = False
1976 2008 fpointer = self.fp.tell()
1977 2009
1978 2010 spc = numpy.fromfile( self.fp, self.dtype[0], self.pts2read_SelfSpectra )
1979 2011 spc = spc.reshape( (self.nRdChannels, self.processingHeaderObj.nHeights, self.processingHeaderObj.profilesPerBlock) ) #transforma a un arreglo 3D
1980 2012
1981 2013 if self.processingHeaderObj.flag_cspc:
1982 2014 cspc = numpy.fromfile( self.fp, self.dtype, self.pts2read_CrossSpectra )
1983 2015 cspc = cspc.reshape( (self.nRdPairs, self.processingHeaderObj.nHeights, self.processingHeaderObj.profilesPerBlock) ) #transforma a un arreglo 3D
1984 2016
1985 2017 if self.processingHeaderObj.flag_dc:
1986 2018 dc = numpy.fromfile( self.fp, self.dtype, self.pts2read_DCchannels ) #int(self.processingHeaderObj.nHeights*self.systemHeaderObj.nChannels) )
1987 2019 dc = dc.reshape( (self.systemHeaderObj.nChannels, self.processingHeaderObj.nHeights) ) #transforma a un arreglo 2D
1988 2020
1989 2021
1990 2022 if not(self.processingHeaderObj.shif_fft):
1991 2023 #desplaza a la derecha en el eje 2 determinadas posiciones
1992 2024 shift = int(self.processingHeaderObj.profilesPerBlock/2)
1993 2025 spc = numpy.roll( spc, shift , axis=2 )
1994 2026
1995 2027 if self.processingHeaderObj.flag_cspc:
1996 2028 #desplaza a la derecha en el eje 2 determinadas posiciones
1997 2029 cspc = numpy.roll( cspc, shift, axis=2 )
1998 2030
1999 2031 # self.processingHeaderObj.shif_fft = True
2000 2032
2001 2033 spc = numpy.transpose( spc, (0,2,1) )
2002 2034 self.data_spc = spc
2003 2035
2004 2036 if self.processingHeaderObj.flag_cspc:
2005 2037 cspc = numpy.transpose( cspc, (0,2,1) )
2006 2038 self.data_cspc = cspc['real'] + cspc['imag']*1j
2007 2039 else:
2008 2040 self.data_cspc = None
2009 2041
2010 2042 if self.processingHeaderObj.flag_dc:
2011 2043 self.data_dc = dc['real'] + dc['imag']*1j
2012 2044 else:
2013 2045 self.data_dc = None
2014 2046
2015 2047 self.flagIsNewFile = 0
2016 2048 self.flagIsNewBlock = 1
2017 2049
2018 2050 self.nTotalBlocks += 1
2019 2051 self.nReadBlocks += 1
2020 2052
2021 2053 return 1
2022 2054
2023 2055
2024 2056 def getData(self):
2025 2057 """
2026 2058 Copia el buffer de lectura a la clase "Spectra",
2027 2059 con todos los parametros asociados a este (metadata). cuando no hay datos en el buffer de
2028 2060 lectura es necesario hacer una nueva lectura de los bloques de datos usando "readNextBlock"
2029 2061
2030 2062 Return:
2031 2063 0 : Si no hay mas archivos disponibles
2032 2064 1 : Si hizo una buena copia del buffer
2033 2065
2034 2066 Affected:
2035 2067 self.dataOut
2036 2068
2037 2069 self.flagTimeBlock
2038 2070 self.flagIsNewBlock
2039 2071 """
2040 2072
2041 2073 if self.flagNoMoreFiles:
2042 2074 self.dataOut.flagNoData = True
2043 2075 print 'Process finished'
2044 2076 return 0
2045 2077
2046 2078 self.flagTimeBlock = 0
2047 2079 self.flagIsNewBlock = 0
2048 2080
2049 2081 if self.__hasNotDataInBuffer():
2050 2082
2051 2083 if not( self.readNextBlock() ):
2052 2084 self.dataOut.flagNoData = True
2053 2085 return 0
2054 2086
2055 2087 # self.updateDataHeader()
2056 2088
2057 2089 #data es un numpy array de 3 dmensiones (perfiles, alturas y canales)
2058 2090
2059 2091 if self.data_dc == None:
2060 2092 self.dataOut.flagNoData = True
2061 2093 return 0
2062 2094
2063 2095 self.dataOut.data_spc = self.data_spc
2064 2096
2065 2097 self.dataOut.data_cspc = self.data_cspc
2066 2098
2067 2099 self.dataOut.data_dc = self.data_dc
2068 2100
2069 2101 self.dataOut.flagTimeBlock = self.flagTimeBlock
2070 2102
2071 2103 self.dataOut.flagNoData = False
2072 2104
2073 2105 self.dataOut.dtype = self.dtype
2074 2106
2075 2107 # self.dataOut.nChannels = self.nRdChannels
2076 2108
2077 2109 self.dataOut.nPairs = self.nRdPairs
2078 2110
2079 2111 self.dataOut.pairsList = self.rdPairList
2080 2112
2081 2113 # self.dataOut.nHeights = self.processingHeaderObj.nHeights
2082 2114
2083 2115 self.dataOut.nProfiles = self.processingHeaderObj.profilesPerBlock
2084 2116
2085 2117 self.dataOut.nFFTPoints = self.processingHeaderObj.profilesPerBlock
2086 2118
2087 2119 self.dataOut.nCohInt = self.processingHeaderObj.nCohInt
2088 2120
2089 2121 self.dataOut.nIncohInt = self.processingHeaderObj.nIncohInt
2090 2122
2091 2123 xf = self.processingHeaderObj.firstHeight + self.processingHeaderObj.nHeights*self.processingHeaderObj.deltaHeight
2092 2124
2093 2125 self.dataOut.heightList = numpy.arange(self.processingHeaderObj.firstHeight, xf, self.processingHeaderObj.deltaHeight)
2094 2126
2095 2127 self.dataOut.channelList = range(self.systemHeaderObj.nChannels)
2096 2128
2097 2129 # self.dataOut.channelIndexList = range(self.systemHeaderObj.nChannels)
2098 2130
2099 2131 self.dataOut.utctime = self.basicHeaderObj.utc + self.basicHeaderObj.miliSecond/1000.#+ self.profileIndex * self.ippSeconds
2100 2132
2101 2133 self.dataOut.ippSeconds = self.ippSeconds
2102 2134
2103 2135 self.dataOut.timeInterval = self.ippSeconds * self.processingHeaderObj.nCohInt * self.processingHeaderObj.nIncohInt * self.dataOut.nFFTPoints
2104 2136
2105 2137 # self.profileIndex += 1
2106 2138
2107 2139 self.dataOut.systemHeaderObj = self.systemHeaderObj.copy()
2108 2140
2109 2141 self.dataOut.radarControllerHeaderObj = self.radarControllerHeaderObj.copy()
2110 2142
2111 2143 self.dataOut.flagShiftFFT = self.processingHeaderObj.shif_fft
2112 2144
2113 2145 self.dataOut.flagDecodeData = False #asumo q la data no esta decodificada
2114 2146
2115 2147 self.dataOut.flagDeflipData = True #asumo q la data no esta sin flip
2116 2148
2117 2149 if self.processingHeaderObj.code != None:
2118 2150
2119 2151 self.dataOut.nCode = self.processingHeaderObj.nCode
2120 2152
2121 2153 self.dataOut.nBaud = self.processingHeaderObj.nBaud
2122 2154
2123 2155 self.dataOut.code = self.processingHeaderObj.code
2124 2156
2125 2157 self.dataOut.flagDecodeData = True
2126 2158
2127 2159 return self.dataOut.data_spc
2128 2160
2129 2161
2130 2162 class SpectraWriter(JRODataWriter):
2131 2163
2132 2164 """
2133 2165 Esta clase permite escribir datos de espectros a archivos procesados (.pdata). La escritura
2134 2166 de los datos siempre se realiza por bloques.
2135 2167 """
2136 2168
2137 2169 ext = ".pdata"
2138 2170
2139 2171 optchar = "P"
2140 2172
2141 2173 shape_spc_Buffer = None
2142 2174
2143 2175 shape_cspc_Buffer = None
2144 2176
2145 2177 shape_dc_Buffer = None
2146 2178
2147 2179 data_spc = None
2148 2180
2149 2181 data_cspc = None
2150 2182
2151 2183 data_dc = None
2152 2184
2153 2185 # dataOut = None
2154 2186
2155 2187 def __init__(self):
2156 2188 """
2157 2189 Inicializador de la clase SpectraWriter para la escritura de datos de espectros.
2158 2190
2159 2191 Affected:
2160 2192 self.dataOut
2161 2193 self.basicHeaderObj
2162 2194 self.systemHeaderObj
2163 2195 self.radarControllerHeaderObj
2164 2196 self.processingHeaderObj
2165 2197
2166 2198 Return: None
2167 2199 """
2168 2200
2169 2201 self.isConfig = False
2170 2202
2171 2203 self.nTotalBlocks = 0
2172 2204
2173 2205 self.data_spc = None
2174 2206
2175 2207 self.data_cspc = None
2176 2208
2177 2209 self.data_dc = None
2178 2210
2179 2211 self.fp = None
2180 2212
2181 2213 self.flagIsNewFile = 1
2182 2214
2183 2215 self.nTotalBlocks = 0
2184 2216
2185 2217 self.flagIsNewBlock = 0
2186 2218
2187 2219 self.setFile = None
2188 2220
2189 2221 self.dtype = None
2190 2222
2191 2223 self.path = None
2192 2224
2193 2225 self.noMoreFiles = 0
2194 2226
2195 2227 self.filename = None
2196 2228
2197 2229 self.basicHeaderObj = BasicHeader(LOCALTIME)
2198 2230
2199 2231 self.systemHeaderObj = SystemHeader()
2200 2232
2201 2233 self.radarControllerHeaderObj = RadarControllerHeader()
2202 2234
2203 2235 self.processingHeaderObj = ProcessingHeader()
2204 2236
2205 2237
2206 2238 def hasAllDataInBuffer(self):
2207 2239 return 1
2208 2240
2209 2241
2210 2242 def setBlockDimension(self):
2211 2243 """
2212 2244 Obtiene las formas dimensionales del los subbloques de datos que componen un bloque
2213 2245
2214 2246 Affected:
2215 2247 self.shape_spc_Buffer
2216 2248 self.shape_cspc_Buffer
2217 2249 self.shape_dc_Buffer
2218 2250
2219 2251 Return: None
2220 2252 """
2221 2253 self.shape_spc_Buffer = (self.dataOut.nChannels,
2222 2254 self.processingHeaderObj.nHeights,
2223 2255 self.processingHeaderObj.profilesPerBlock)
2224 2256
2225 2257 self.shape_cspc_Buffer = (self.dataOut.nPairs,
2226 2258 self.processingHeaderObj.nHeights,
2227 2259 self.processingHeaderObj.profilesPerBlock)
2228 2260
2229 2261 self.shape_dc_Buffer = (self.dataOut.nChannels,
2230 2262 self.processingHeaderObj.nHeights)
2231 2263
2232 2264
2233 2265 def writeBlock(self):
2234 2266 """
2235 2267 Escribe el buffer en el file designado
2236 2268
2237 2269 Affected:
2238 2270 self.data_spc
2239 2271 self.data_cspc
2240 2272 self.data_dc
2241 2273 self.flagIsNewFile
2242 2274 self.flagIsNewBlock
2243 2275 self.nTotalBlocks
2244 2276 self.nWriteBlocks
2245 2277
2246 2278 Return: None
2247 2279 """
2248 2280
2249 2281 spc = numpy.transpose( self.data_spc, (0,2,1) )
2250 2282 if not( self.processingHeaderObj.shif_fft ):
2251 2283 spc = numpy.roll( spc, self.processingHeaderObj.profilesPerBlock/2, axis=2 ) #desplaza a la derecha en el eje 2 determinadas posiciones
2252 2284 data = spc.reshape((-1))
2253 2285 data = data.astype(self.dtype[0])
2254 2286 data.tofile(self.fp)
2255 2287
2256 2288 if self.data_cspc != None:
2257 2289 data = numpy.zeros( self.shape_cspc_Buffer, self.dtype )
2258 2290 cspc = numpy.transpose( self.data_cspc, (0,2,1) )
2259 2291 if not( self.processingHeaderObj.shif_fft ):
2260 2292 cspc = numpy.roll( cspc, self.processingHeaderObj.profilesPerBlock/2, axis=2 ) #desplaza a la derecha en el eje 2 determinadas posiciones
2261 2293 data['real'] = cspc.real
2262 2294 data['imag'] = cspc.imag
2263 2295 data = data.reshape((-1))
2264 2296 data.tofile(self.fp)
2265 2297
2266 2298 if self.data_dc != None:
2267 2299 data = numpy.zeros( self.shape_dc_Buffer, self.dtype )
2268 2300 dc = self.data_dc
2269 2301 data['real'] = dc.real
2270 2302 data['imag'] = dc.imag
2271 2303 data = data.reshape((-1))
2272 2304 data.tofile(self.fp)
2273 2305
2274 2306 self.data_spc.fill(0)
2275 2307 self.data_dc.fill(0)
2276 2308 if self.data_cspc != None:
2277 2309 self.data_cspc.fill(0)
2278 2310
2279 2311 self.flagIsNewFile = 0
2280 2312 self.flagIsNewBlock = 1
2281 2313 self.nTotalBlocks += 1
2282 2314 self.nWriteBlocks += 1
2283 2315 self.blockIndex += 1
2284 2316
2285 2317
2286 2318 def putData(self):
2287 2319 """
2288 2320 Setea un bloque de datos y luego los escribe en un file
2289 2321
2290 2322 Affected:
2291 2323 self.data_spc
2292 2324 self.data_cspc
2293 2325 self.data_dc
2294 2326
2295 2327 Return:
2296 2328 0 : Si no hay data o no hay mas files que puedan escribirse
2297 2329 1 : Si se escribio la data de un bloque en un file
2298 2330 """
2299 2331
2300 2332 if self.dataOut.flagNoData:
2301 2333 return 0
2302 2334
2303 2335 self.flagIsNewBlock = 0
2304 2336
2305 2337 if self.dataOut.flagTimeBlock:
2306 2338 self.data_spc.fill(0)
2307 2339 self.data_cspc.fill(0)
2308 2340 self.data_dc.fill(0)
2309 2341 self.setNextFile()
2310 2342
2311 2343 if self.flagIsNewFile == 0:
2312 2344 self.getBasicHeader()
2313 2345
2314 2346 self.data_spc = self.dataOut.data_spc.copy()
2315 2347 self.data_cspc = self.dataOut.data_cspc.copy()
2316 2348 self.data_dc = self.dataOut.data_dc.copy()
2317 2349
2318 2350 # #self.processingHeaderObj.dataBlocksPerFile)
2319 2351 if self.hasAllDataInBuffer():
2320 2352 # self.getDataHeader()
2321 2353 self.writeNextBlock()
2322 2354
2323 2355 return 1
2324 2356
2325 2357
2326 2358 def __getProcessFlags(self):
2327 2359
2328 2360 processFlags = 0
2329 2361
2330 2362 dtype0 = numpy.dtype([('real','<i1'),('imag','<i1')])
2331 2363 dtype1 = numpy.dtype([('real','<i2'),('imag','<i2')])
2332 2364 dtype2 = numpy.dtype([('real','<i4'),('imag','<i4')])
2333 2365 dtype3 = numpy.dtype([('real','<i8'),('imag','<i8')])
2334 2366 dtype4 = numpy.dtype([('real','<f4'),('imag','<f4')])
2335 2367 dtype5 = numpy.dtype([('real','<f8'),('imag','<f8')])
2336 2368
2337 2369 dtypeList = [dtype0, dtype1, dtype2, dtype3, dtype4, dtype5]
2338 2370
2339 2371
2340 2372
2341 2373 datatypeValueList = [PROCFLAG.DATATYPE_CHAR,
2342 2374 PROCFLAG.DATATYPE_SHORT,
2343 2375 PROCFLAG.DATATYPE_LONG,
2344 2376 PROCFLAG.DATATYPE_INT64,
2345 2377 PROCFLAG.DATATYPE_FLOAT,
2346 2378 PROCFLAG.DATATYPE_DOUBLE]
2347 2379
2348 2380
2349 2381 for index in range(len(dtypeList)):
2350 2382 if self.dataOut.dtype == dtypeList[index]:
2351 2383 dtypeValue = datatypeValueList[index]
2352 2384 break
2353 2385
2354 2386 processFlags += dtypeValue
2355 2387
2356 2388 if self.dataOut.flagDecodeData:
2357 2389 processFlags += PROCFLAG.DECODE_DATA
2358 2390
2359 2391 if self.dataOut.flagDeflipData:
2360 2392 processFlags += PROCFLAG.DEFLIP_DATA
2361 2393
2362 2394 if self.dataOut.code != None:
2363 2395 processFlags += PROCFLAG.DEFINE_PROCESS_CODE
2364 2396
2365 2397 if self.dataOut.nIncohInt > 1:
2366 2398 processFlags += PROCFLAG.INCOHERENT_INTEGRATION
2367 2399
2368 2400 if self.dataOut.data_dc != None:
2369 2401 processFlags += PROCFLAG.SAVE_CHANNELS_DC
2370 2402
2371 2403 return processFlags
2372 2404
2373 2405
2374 2406 def __getBlockSize(self):
2375 2407 '''
2376 2408 Este metodos determina el cantidad de bytes para un bloque de datos de tipo Spectra
2377 2409 '''
2378 2410
2379 2411 dtype0 = numpy.dtype([('real','<i1'),('imag','<i1')])
2380 2412 dtype1 = numpy.dtype([('real','<i2'),('imag','<i2')])
2381 2413 dtype2 = numpy.dtype([('real','<i4'),('imag','<i4')])
2382 2414 dtype3 = numpy.dtype([('real','<i8'),('imag','<i8')])
2383 2415 dtype4 = numpy.dtype([('real','<f4'),('imag','<f4')])
2384 2416 dtype5 = numpy.dtype([('real','<f8'),('imag','<f8')])
2385 2417
2386 2418 dtypeList = [dtype0, dtype1, dtype2, dtype3, dtype4, dtype5]
2387 2419 datatypeValueList = [1,2,4,8,4,8]
2388 2420 for index in range(len(dtypeList)):
2389 2421 if self.dataOut.dtype == dtypeList[index]:
2390 2422 datatypeValue = datatypeValueList[index]
2391 2423 break
2392 2424
2393 2425
2394 2426 pts2write = self.dataOut.nHeights * self.dataOut.nFFTPoints
2395 2427
2396 2428 pts2write_SelfSpectra = int(self.dataOut.nChannels * pts2write)
2397 2429 blocksize = (pts2write_SelfSpectra*datatypeValue)
2398 2430
2399 2431 if self.dataOut.data_cspc != None:
2400 2432 pts2write_CrossSpectra = int(self.dataOut.nPairs * pts2write)
2401 2433 blocksize += (pts2write_CrossSpectra*datatypeValue*2)
2402 2434
2403 2435 if self.dataOut.data_dc != None:
2404 2436 pts2write_DCchannels = int(self.dataOut.nChannels * self.dataOut.nHeights)
2405 2437 blocksize += (pts2write_DCchannels*datatypeValue*2)
2406 2438
2407 2439 blocksize = blocksize #* datatypeValue * 2 #CORREGIR ESTO
2408 2440
2409 2441 return blocksize
2410 2442
2411 2443 def getDataHeader(self):
2412 2444
2413 2445 """
2414 2446 Obtiene una copia del First Header
2415 2447
2416 2448 Affected:
2417 2449 self.systemHeaderObj
2418 2450 self.radarControllerHeaderObj
2419 2451 self.dtype
2420 2452
2421 2453 Return:
2422 2454 None
2423 2455 """
2424 2456
2425 2457 self.systemHeaderObj = self.dataOut.systemHeaderObj.copy()
2426 2458 self.systemHeaderObj.nChannels = self.dataOut.nChannels
2427 2459 self.radarControllerHeaderObj = self.dataOut.radarControllerHeaderObj.copy()
2428 2460
2429 2461 self.getBasicHeader()
2430 2462
2431 2463 processingHeaderSize = 40 # bytes
2432 2464 self.processingHeaderObj.dtype = 0 # Voltage
2433 2465 self.processingHeaderObj.blockSize = self.__getBlockSize()
2434 2466 self.processingHeaderObj.profilesPerBlock = self.dataOut.nFFTPoints
2435 2467 self.processingHeaderObj.dataBlocksPerFile = self.blocksPerFile
2436 2468 self.processingHeaderObj.nWindows = 1 #podria ser 1 o self.dataOut.processingHeaderObj.nWindows
2437 2469 self.processingHeaderObj.processFlags = self.__getProcessFlags()
2438 2470 self.processingHeaderObj.nCohInt = self.dataOut.nCohInt# Se requiere para determinar el valor de timeInterval
2439 2471 self.processingHeaderObj.nIncohInt = self.dataOut.nIncohInt
2440 2472 self.processingHeaderObj.totalSpectra = self.dataOut.nPairs + self.dataOut.nChannels
2441 2473
2442 2474 if self.processingHeaderObj.totalSpectra > 0:
2443 2475 channelList = []
2444 2476 for channel in range(self.dataOut.nChannels):
2445 2477 channelList.append(channel)
2446 2478 channelList.append(channel)
2447 2479
2448 2480 pairsList = []
2449 2481 for pair in self.dataOut.pairsList:
2450 2482 pairsList.append(pair[0])
2451 2483 pairsList.append(pair[1])
2452 2484 spectraComb = channelList + pairsList
2453 2485 spectraComb = numpy.array(spectraComb,dtype="u1")
2454 2486 self.processingHeaderObj.spectraComb = spectraComb
2455 2487 sizeOfSpcComb = len(spectraComb)
2456 2488 processingHeaderSize += sizeOfSpcComb
2457 2489
2458 2490 if self.dataOut.code != None:
2459 2491 self.processingHeaderObj.code = self.dataOut.code
2460 2492 self.processingHeaderObj.nCode = self.dataOut.nCode
2461 2493 self.processingHeaderObj.nBaud = self.dataOut.nBaud
2462 2494 nCodeSize = 4 # bytes
2463 2495 nBaudSize = 4 # bytes
2464 2496 codeSize = 4 # bytes
2465 2497 sizeOfCode = int(nCodeSize + nBaudSize + codeSize * self.dataOut.nCode * self.dataOut.nBaud)
2466 2498 processingHeaderSize += sizeOfCode
2467 2499
2468 2500 if self.processingHeaderObj.nWindows != 0:
2469 2501 self.processingHeaderObj.firstHeight = self.dataOut.heightList[0]
2470 2502 self.processingHeaderObj.deltaHeight = self.dataOut.heightList[1] - self.dataOut.heightList[0]
2471 2503 self.processingHeaderObj.nHeights = self.dataOut.nHeights
2472 2504 self.processingHeaderObj.samplesWin = self.dataOut.nHeights
2473 2505 sizeOfFirstHeight = 4
2474 2506 sizeOfdeltaHeight = 4
2475 2507 sizeOfnHeights = 4
2476 2508 sizeOfWindows = (sizeOfFirstHeight + sizeOfdeltaHeight + sizeOfnHeights)*self.processingHeaderObj.nWindows
2477 2509 processingHeaderSize += sizeOfWindows
2478 2510
2479 2511 self.processingHeaderObj.size = processingHeaderSize
2480 2512
2481 2513 class SpectraHeisWriter():
2482 2514
2483 2515 i=0
2484 2516
2485 2517 def __init__(self, dataOut):
2486 2518
2487 2519 self.wrObj = FITS()
2488 2520 self.dataOut = dataOut
2489 2521
2490 2522 def isNumber(str):
2491 2523 """
2492 2524 Chequea si el conjunto de caracteres que componen un string puede ser convertidos a un numero.
2493 2525
2494 2526 Excepciones:
2495 2527 Si un determinado string no puede ser convertido a numero
2496 2528 Input:
2497 2529 str, string al cual se le analiza para determinar si convertible a un numero o no
2498 2530
2499 2531 Return:
2500 2532 True : si el string es uno numerico
2501 2533 False : no es un string numerico
2502 2534 """
2503 2535 try:
2504 2536 float( str )
2505 2537 return True
2506 2538 except:
2507 2539 return False
2508 2540
2509 2541 def setup(self, wrpath,):
2510 2542
2511 2543 if not(os.path.exists(wrpath)):
2512 2544 os.mkdir(wrpath)
2513 2545
2514 2546 self.wrpath = wrpath
2515 2547 self.setFile = 0
2516 2548
2517 2549 def putData(self):
2518 2550 # self.wrObj.writeHeader(nChannels=self.dataOut.nChannels, nFFTPoints=self.dataOut.nFFTPoints)
2519 2551 #name = self.dataOut.utctime
2520 2552 name= time.localtime( self.dataOut.utctime)
2521 2553 ext=".fits"
2522 2554 #folder='D%4.4d%3.3d'%(name.tm_year,name.tm_yday)
2523 2555 subfolder = 'D%4.4d%3.3d' % (name.tm_year,name.tm_yday)
2524 2556
2525 2557 fullpath = os.path.join( self.wrpath, subfolder )
2526 2558 if not( os.path.exists(fullpath) ):
2527 2559 os.mkdir(fullpath)
2528 2560 self.setFile += 1
2529 2561 file = 'D%4.4d%3.3d%3.3d%s' % (name.tm_year,name.tm_yday,self.setFile,ext)
2530 2562
2531 2563 filename = os.path.join(self.wrpath,subfolder, file)
2532 2564
2533 2565 # print self.dataOut.ippSeconds
2534 2566 freq=numpy.arange(-1*self.dataOut.nHeights/2.,self.dataOut.nHeights/2.)/(2*self.dataOut.ippSeconds)
2535 2567
2536 2568 col1=self.wrObj.setColF(name="freq", format=str(self.dataOut.nFFTPoints)+'E', array=freq)
2537 2569 col2=self.wrObj.writeData(name="P_Ch1",format=str(self.dataOut.nFFTPoints)+'E',data=10*numpy.log10(self.dataOut.data_spc[0,:]))
2538 2570 col3=self.wrObj.writeData(name="P_Ch2",format=str(self.dataOut.nFFTPoints)+'E',data=10*numpy.log10(self.dataOut.data_spc[1,:]))
2539 2571 col4=self.wrObj.writeData(name="P_Ch3",format=str(self.dataOut.nFFTPoints)+'E',data=10*numpy.log10(self.dataOut.data_spc[2,:]))
2540 2572 col5=self.wrObj.writeData(name="P_Ch4",format=str(self.dataOut.nFFTPoints)+'E',data=10*numpy.log10(self.dataOut.data_spc[3,:]))
2541 2573 col6=self.wrObj.writeData(name="P_Ch5",format=str(self.dataOut.nFFTPoints)+'E',data=10*numpy.log10(self.dataOut.data_spc[4,:]))
2542 2574 col7=self.wrObj.writeData(name="P_Ch6",format=str(self.dataOut.nFFTPoints)+'E',data=10*numpy.log10(self.dataOut.data_spc[5,:]))
2543 2575 col8=self.wrObj.writeData(name="P_Ch7",format=str(self.dataOut.nFFTPoints)+'E',data=10*numpy.log10(self.dataOut.data_spc[6,:]))
2544 2576 col9=self.wrObj.writeData(name="P_Ch8",format=str(self.dataOut.nFFTPoints)+'E',data=10*numpy.log10(self.dataOut.data_spc[7,:]))
2545 2577 #n=numpy.arange((100))
2546 2578 n=self.dataOut.data_spc[6,:]
2547 2579 a=self.wrObj.cFImage(n)
2548 2580 b=self.wrObj.Ctable(col1,col2,col3,col4,col5,col6,col7,col8,col9)
2549 2581 self.wrObj.CFile(a,b)
2550 2582 self.wrObj.wFile(filename)
2551 2583 return 1
2552 2584
2553 2585 class FITS:
2554 2586
2555 2587 name=None
2556 2588 format=None
2557 2589 array =None
2558 2590 data =None
2559 2591 thdulist=None
2560 2592
2561 2593 def __init__(self):
2562 2594
2563 2595 pass
2564 2596
2565 2597 def setColF(self,name,format,array):
2566 2598 self.name=name
2567 2599 self.format=format
2568 2600 self.array=array
2569 2601 a1=numpy.array([self.array],dtype=numpy.float32)
2570 2602 self.col1 = pyfits.Column(name=self.name, format=self.format, array=a1)
2571 2603 return self.col1
2572 2604
2573 2605 # def setColP(self,name,format,data):
2574 2606 # self.name=name
2575 2607 # self.format=format
2576 2608 # self.data=data
2577 2609 # a2=numpy.array([self.data],dtype=numpy.float32)
2578 2610 # self.col2 = pyfits.Column(name=self.name, format=self.format, array=a2)
2579 2611 # return self.col2
2580 2612
2581 2613 def writeHeader(self,):
2582 2614 pass
2583 2615
2584 2616 def writeData(self,name,format,data):
2585 2617 self.name=name
2586 2618 self.format=format
2587 2619 self.data=data
2588 2620 a2=numpy.array([self.data],dtype=numpy.float32)
2589 2621 self.col2 = pyfits.Column(name=self.name, format=self.format, array=a2)
2590 2622 return self.col2
2591 2623
2592 2624 def cFImage(self,n):
2593 2625 self.hdu= pyfits.PrimaryHDU(n)
2594 2626 return self.hdu
2595 2627
2596 2628 def Ctable(self,col1,col2,col3,col4,col5,col6,col7,col8,col9):
2597 2629 self.cols=pyfits.ColDefs( [col1,col2,col3,col4,col5,col6,col7,col8,col9])
2598 2630 self.tbhdu = pyfits.new_table(self.cols)
2599 2631 return self.tbhdu
2600 2632
2601 2633 def CFile(self,hdu,tbhdu):
2602 2634 self.thdulist=pyfits.HDUList([hdu,tbhdu])
2603 2635
2604 2636 def wFile(self,filename):
2605 2637 self.thdulist.writeto(filename) No newline at end of file
General Comments 0
You need to be logged in to leave comments. Login now