##// END OF EJS Templates
Miguel Valdez -
r295:097246e8113d
parent child
Show More
@@ -1,2601 +1,2603
1 1 '''
2 2
3 3 $Author: murco $
4 4 $Id: JRODataIO.py 169 2012-11-19 21:57:03Z murco $
5 5 '''
6 6
7 7 import os, sys
8 8 import glob
9 9 import time
10 10 import numpy
11 11 import fnmatch
12 12 import time, datetime
13 13
14 14 from jrodata import *
15 15 from jroheaderIO import *
16 16 from jroprocessing import *
17 17
18 18 LOCALTIME = -18000
19 19
20 20 def isNumber(str):
21 21 """
22 22 Chequea si el conjunto de caracteres que componen un string puede ser convertidos a un numero.
23 23
24 24 Excepciones:
25 25 Si un determinado string no puede ser convertido a numero
26 26 Input:
27 27 str, string al cual se le analiza para determinar si convertible a un numero o no
28 28
29 29 Return:
30 30 True : si el string es uno numerico
31 31 False : no es un string numerico
32 32 """
33 33 try:
34 34 float( str )
35 35 return True
36 36 except:
37 37 return False
38 38
39 39 def isThisFileinRange(filename, startUTSeconds, endUTSeconds):
40 40 """
41 41 Esta funcion determina si un archivo de datos se encuentra o no dentro del rango de fecha especificado.
42 42
43 43 Inputs:
44 44 filename : nombre completo del archivo de datos en formato Jicamarca (.r)
45 45
46 46 startUTSeconds : fecha inicial del rango seleccionado. La fecha esta dada en
47 47 segundos contados desde 01/01/1970.
48 48 endUTSeconds : fecha final del rango seleccionado. La fecha esta dada en
49 49 segundos contados desde 01/01/1970.
50 50
51 51 Return:
52 52 Boolean : Retorna True si el archivo de datos contiene datos en el rango de
53 53 fecha especificado, de lo contrario retorna False.
54 54
55 55 Excepciones:
56 56 Si el archivo no existe o no puede ser abierto
57 57 Si la cabecera no puede ser leida.
58 58
59 59 """
60 60 basicHeaderObj = BasicHeader(LOCALTIME)
61 61
62 62 try:
63 63 fp = open(filename,'rb')
64 64 except:
65 65 raise IOError, "The file %s can't be opened" %(filename)
66 66
67 67 sts = basicHeaderObj.read(fp)
68 68 fp.close()
69 69
70 70 if not(sts):
71 71 print "Skipping the file %s because it has not a valid header" %(filename)
72 72 return 0
73 73
74 74 if not ((startUTSeconds <= basicHeaderObj.utc) and (endUTSeconds > basicHeaderObj.utc)):
75 75 return 0
76 76
77 77 return 1
78 78
79 79 def isFileinThisTime(filename, startTime, endTime):
80 80 """
81 81 Retorna 1 si el archivo de datos se encuentra dentro del rango de horas especificado.
82 82
83 83 Inputs:
84 84 filename : nombre completo del archivo de datos en formato Jicamarca (.r)
85 85
86 86 startTime : tiempo inicial del rango seleccionado en formato datetime.time
87 87
88 88 endTime : tiempo final del rango seleccionado en formato datetime.time
89 89
90 90 Return:
91 91 Boolean : Retorna True si el archivo de datos contiene datos en el rango de
92 92 fecha especificado, de lo contrario retorna False.
93 93
94 94 Excepciones:
95 95 Si el archivo no existe o no puede ser abierto
96 96 Si la cabecera no puede ser leida.
97 97
98 98 """
99 99
100 100
101 101 try:
102 102 fp = open(filename,'rb')
103 103 except:
104 104 raise IOError, "The file %s can't be opened" %(filename)
105 105
106 106 basicHeaderObj = BasicHeader(LOCALTIME)
107 107 sts = basicHeaderObj.read(fp)
108 108 fp.close()
109 109
110 110 thisTime = basicHeaderObj.datatime.time()
111 111
112 112 if not(sts):
113 113 print "Skipping the file %s because it has not a valid header" %(filename)
114 114 return 0
115 115
116 116 if not ((startTime <= thisTime) and (endTime > thisTime)):
117 117 return 0
118 118
119 119 return 1
120 120
121 121 def getlastFileFromPath(path, ext):
122 122 """
123 123 Depura el fileList dejando solo los que cumplan el formato de "PYYYYDDDSSS.ext"
124 124 al final de la depuracion devuelve el ultimo file de la lista que quedo.
125 125
126 126 Input:
127 127 fileList : lista conteniendo todos los files (sin path) que componen una determinada carpeta
128 128 ext : extension de los files contenidos en una carpeta
129 129
130 130 Return:
131 131 El ultimo file de una determinada carpeta, no se considera el path.
132 132 """
133 133 validFilelist = []
134 134 fileList = os.listdir(path)
135 135
136 136 # 0 1234 567 89A BCDE
137 137 # H YYYY DDD SSS .ext
138 138
139 139 for file in fileList:
140 140 try:
141 141 year = int(file[1:5])
142 142 doy = int(file[5:8])
143 143
144 144
145 145 except:
146 146 continue
147 147
148 148 if (os.path.splitext(file)[-1].lower() != ext.lower()):
149 149 continue
150 150
151 151 validFilelist.append(file)
152 152
153 153 if validFilelist:
154 154 validFilelist = sorted( validFilelist, key=str.lower )
155 155 return validFilelist[-1]
156 156
157 157 return None
158 158
159 159 def checkForRealPath(path, year, doy, set, ext):
160 160 """
161 161 Por ser Linux Case Sensitive entonces checkForRealPath encuentra el nombre correcto de un path,
162 162 Prueba por varias combinaciones de nombres entre mayusculas y minusculas para determinar
163 163 el path exacto de un determinado file.
164 164
165 165 Example :
166 166 nombre correcto del file es .../.../D2009307/P2009307367.ext
167 167
168 168 Entonces la funcion prueba con las siguientes combinaciones
169 169 .../.../y2009307367.ext
170 170 .../.../Y2009307367.ext
171 171 .../.../x2009307/y2009307367.ext
172 172 .../.../x2009307/Y2009307367.ext
173 173 .../.../X2009307/y2009307367.ext
174 174 .../.../X2009307/Y2009307367.ext
175 175 siendo para este caso, la ultima combinacion de letras, identica al file buscado
176 176
177 177 Return:
178 178 Si encuentra la cobinacion adecuada devuelve el path completo y el nombre del file
179 179 caso contrario devuelve None como path y el la ultima combinacion de nombre en mayusculas
180 180 para el filename
181 181 """
182 182 fullfilename = None
183 183 find_flag = False
184 184 filename = None
185 185
186 186 prefixDirList = [None,'d','D']
187 187 if ext.lower() == ".r": #voltage
188 188 prefixFileList = ['d','D']
189 189 elif ext.lower() == ".pdata": #spectra
190 190 prefixFileList = ['p','P']
191 191 else:
192 192 return None, filename
193 193
194 194 #barrido por las combinaciones posibles
195 195 for prefixDir in prefixDirList:
196 196 thispath = path
197 197 if prefixDir != None:
198 198 #formo el nombre del directorio xYYYYDDD (x=d o x=D)
199 199 thispath = os.path.join(path, "%s%04d%03d" % ( prefixDir, year, doy ))
200 200
201 201 for prefixFile in prefixFileList: #barrido por las dos combinaciones posibles de "D"
202 202 filename = "%s%04d%03d%03d%s" % ( prefixFile, year, doy, set, ext ) #formo el nombre del file xYYYYDDDSSS.ext
203 203 fullfilename = os.path.join( thispath, filename ) #formo el path completo
204 204
205 205 if os.path.exists( fullfilename ): #verifico que exista
206 206 find_flag = True
207 207 break
208 208 if find_flag:
209 209 break
210 210
211 211 if not(find_flag):
212 212 return None, filename
213 213
214 214 return fullfilename, filename
215 215
216 216 def isDoyFolder(folder):
217
217 print folder
218 218 try:
219 219 year = int(folder[1:5])
220 print year
220 221 except:
221 222 return 0
222 223
223 224 try:
224 225 doy = int(folder[5:8])
226 print doy
225 227 except:
226 228 return 0
227 229 return 1
228 230
229 231 class JRODataIO:
230 232
231 233 c = 3E8
232 234
233 235 isConfig = False
234 236
235 237 basicHeaderObj = BasicHeader(LOCALTIME)
236 238
237 239 systemHeaderObj = SystemHeader()
238 240
239 241 radarControllerHeaderObj = RadarControllerHeader()
240 242
241 243 processingHeaderObj = ProcessingHeader()
242 244
243 245 online = 0
244 246
245 247 dtype = None
246 248
247 249 pathList = []
248 250
249 251 filenameList = []
250 252
251 253 filename = None
252 254
253 255 ext = None
254 256
255 257 flagIsNewFile = 1
256 258
257 259 flagTimeBlock = 0
258 260
259 261 flagIsNewBlock = 0
260 262
261 263 fp = None
262 264
263 265 firstHeaderSize = 0
264 266
265 267 basicHeaderSize = 24
266 268
267 269 versionFile = 1103
268 270
269 271 fileSize = None
270 272
271 273 ippSeconds = None
272 274
273 275 fileSizeByHeader = None
274 276
275 277 fileIndex = None
276 278
277 279 profileIndex = None
278 280
279 281 blockIndex = None
280 282
281 283 nTotalBlocks = None
282 284
283 285 maxTimeStep = 30
284 286
285 287 lastUTTime = None
286 288
287 289 datablock = None
288 290
289 291 dataOut = None
290 292
291 293 blocksize = None
292 294
293 295 def __init__(self):
294 296
295 297 raise ValueError, "Not implemented"
296 298
297 299 def run(self):
298 300
299 301 raise ValueError, "Not implemented"
300 302
301 303 def getOutput(self):
302 304
303 305 return self.dataOut
304 306
305 307 class JRODataReader(JRODataIO, ProcessingUnit):
306 308
307 309 nReadBlocks = 0
308 310
309 311 delay = 10 #number of seconds waiting a new file
310 312
311 313 nTries = 3 #quantity tries
312 314
313 315 nFiles = 3 #number of files for searching
314 316
315 317 flagNoMoreFiles = 0
316 318
317 319 def __init__(self):
318 320
319 321 """
320 322
321 323 """
322 324
323 325 raise ValueError, "This method has not been implemented"
324 326
325 327
326 328 def createObjByDefault(self):
327 329 """
328 330
329 331 """
330 332 raise ValueError, "This method has not been implemented"
331 333
332 334 def getBlockDimension(self):
333 335
334 336 raise ValueError, "No implemented"
335 337
336 338 def __searchFilesOffLine(self,
337 339 path,
338 340 startDate,
339 341 endDate,
340 342 startTime=datetime.time(0,0,0),
341 343 endTime=datetime.time(23,59,59),
342 344 set=None,
343 345 expLabel='',
344 346 ext='.r',
345 347 walk=True):
346 348
347 349 pathList = []
348 350
349 351 if not walk:
350 352 pathList.append(path)
351 353
352 354 else:
353 355 dirList = []
354 356 for thisPath in os.listdir(path):
355 357 if not os.path.isdir(os.path.join(path,thisPath)):
356 358 continue
357 359 if not isDoyFolder(thisPath):
358 360 continue
359 361
360 362 dirList.append(thisPath)
361 363
362 364 if not(dirList):
363 365 return None, None
364 366
365 367 thisDate = startDate
366 368
367 369 while(thisDate <= endDate):
368 370 year = thisDate.timetuple().tm_year
369 371 doy = thisDate.timetuple().tm_yday
370 372
371 373 match = fnmatch.filter(dirList, '?' + '%4.4d%3.3d' % (year,doy))
372 374 if len(match) == 0:
373 375 thisDate += datetime.timedelta(1)
374 376 continue
375 377
376 378 pathList.append(os.path.join(path,match[0],expLabel))
377 379 thisDate += datetime.timedelta(1)
378 380
379 381 if pathList == []:
380 382 print "Any folder was found for the date range: %s-%s" %(startDate, endDate)
381 383 return None, None
382 384
383 385 print "%d folder(s) was(were) found for the date range: %s-%s" %(len(pathList), startDate, endDate)
384 386
385 387 filenameList = []
386 388 for thisPath in pathList:
387 389
388 390 fileList = glob.glob1(thisPath, "*%s" %ext)
389 391 fileList.sort()
390 392
391 393 for file in fileList:
392 394
393 395 filename = os.path.join(thisPath,file)
394 396
395 397 if isFileinThisTime(filename, startTime, endTime):
396 398 filenameList.append(filename)
397 399
398 400 if not(filenameList):
399 401 print "Any file was found for the time range %s - %s" %(startTime, endTime)
400 402 return None, None
401 403
402 404 print "%d file(s) was(were) found for the time range: %s - %s" %(len(filenameList), startTime, endTime)
403 405
404 406 self.filenameList = filenameList
405 407
406 408 return pathList, filenameList
407 409
408 410 def __searchFilesOnLine(self, path, expLabel = "", ext = None, walk=True):
409 411
410 412 """
411 413 Busca el ultimo archivo de la ultima carpeta (determinada o no por startDateTime) y
412 414 devuelve el archivo encontrado ademas de otros datos.
413 415
414 416 Input:
415 417 path : carpeta donde estan contenidos los files que contiene data
416 418
417 419 expLabel : Nombre del subexperimento (subfolder)
418 420
419 421 ext : extension de los files
420 422
421 423 walk : Si es habilitado no realiza busquedas dentro de los ubdirectorios (doypath)
422 424
423 425 Return:
424 426 directory : eL directorio donde esta el file encontrado
425 427 filename : el ultimo file de una determinada carpeta
426 428 year : el anho
427 429 doy : el numero de dia del anho
428 430 set : el set del archivo
429 431
430 432
431 433 """
432 434 dirList = []
433 435
434 436 if walk:
435 437
436 438 #Filtra solo los directorios
437 439 for thisPath in os.listdir(path):
438 440 if os.path.isdir(os.path.join(path, thisPath)):
439 441 dirList.append(thisPath)
440 442
441 443 if not(dirList):
442 444 return None, None, None, None, None
443 445
444 446 dirList = sorted( dirList, key=str.lower )
445 447
446 448 doypath = dirList[-1]
447 449 fullpath = os.path.join(path, doypath, expLabel)
448 450
449 451 else:
450 452 fullpath = path
451 453
452 454 print "%s folder was found: " %(fullpath )
453 455
454 456 filename = getlastFileFromPath(fullpath, ext)
455 457
456 458 if not(filename):
457 459 return None, None, None, None, None
458 460
459 461 print "%s file was found" %(filename)
460 462
461 463 if not(self.__verifyFile(os.path.join(fullpath, filename))):
462 464 return None, None, None, None, None
463 465
464 466 year = int( filename[1:5] )
465 467 doy = int( filename[5:8] )
466 468 set = int( filename[8:11] )
467 469
468 470 return fullpath, filename, year, doy, set
469 471
470 472
471 473
472 474 def __setNextFileOffline(self):
473 475
474 476 idFile = self.fileIndex
475 477
476 478 while (True):
477 479 idFile += 1
478 480 if not(idFile < len(self.filenameList)):
479 481 self.flagNoMoreFiles = 1
480 482 print "No more Files"
481 483 return 0
482 484
483 485 filename = self.filenameList[idFile]
484 486
485 487 if not(self.__verifyFile(filename)):
486 488 continue
487 489
488 490 fileSize = os.path.getsize(filename)
489 491 fp = open(filename,'rb')
490 492 break
491 493
492 494 self.flagIsNewFile = 1
493 495 self.fileIndex = idFile
494 496 self.filename = filename
495 497 self.fileSize = fileSize
496 498 self.fp = fp
497 499
498 500 print "Setting the file: %s"%self.filename
499 501
500 502 return 1
501 503
502 504 def __setNextFileOnline(self):
503 505 """
504 506 Busca el siguiente file que tenga suficiente data para ser leida, dentro de un folder especifico, si
505 507 no encuentra un file valido espera un tiempo determinado y luego busca en los posibles n files
506 508 siguientes.
507 509
508 510 Affected:
509 511 self.flagIsNewFile
510 512 self.filename
511 513 self.fileSize
512 514 self.fp
513 515 self.set
514 516 self.flagNoMoreFiles
515 517
516 518 Return:
517 519 0 : si luego de una busqueda del siguiente file valido este no pudo ser encontrado
518 520 1 : si el file fue abierto con exito y esta listo a ser leido
519 521
520 522 Excepciones:
521 523 Si un determinado file no puede ser abierto
522 524 """
523 525 nFiles = 0
524 526 fileOk_flag = False
525 527 firstTime_flag = True
526 528
527 529 self.set += 1
528 530
529 531 #busca el 1er file disponible
530 532 fullfilename, filename = checkForRealPath( self.path, self.year, self.doy, self.set, self.ext )
531 533 if fullfilename:
532 534 if self.__verifyFile(fullfilename, False):
533 535 fileOk_flag = True
534 536
535 537 #si no encuentra un file entonces espera y vuelve a buscar
536 538 if not(fileOk_flag):
537 539 for nFiles in range(self.nFiles+1): #busco en los siguientes self.nFiles+1 files posibles
538 540
539 541 if firstTime_flag: #si es la 1era vez entonces hace el for self.nTries veces
540 542 tries = self.nTries
541 543 else:
542 544 tries = 1 #si no es la 1era vez entonces solo lo hace una vez
543 545
544 546 for nTries in range( tries ):
545 547 if firstTime_flag:
546 548 print "\tWaiting %0.2f sec for the file \"%s\" , try %03d ..." % ( self.delay, filename, nTries+1 )
547 549 time.sleep( self.delay )
548 550 else:
549 551 print "\tSearching next \"%s%04d%03d%03d%s\" file ..." % (self.optchar, self.year, self.doy, self.set, self.ext)
550 552
551 553 fullfilename, filename = checkForRealPath( self.path, self.year, self.doy, self.set, self.ext )
552 554 if fullfilename:
553 555 if self.__verifyFile(fullfilename):
554 556 fileOk_flag = True
555 557 break
556 558
557 559 if fileOk_flag:
558 560 break
559 561
560 562 firstTime_flag = False
561 563
562 564 print "\tSkipping the file \"%s\" due to this file doesn't exist" % filename
563 565 self.set += 1
564 566
565 567 if nFiles == (self.nFiles-1): #si no encuentro el file buscado cambio de carpeta y busco en la siguiente carpeta
566 568 self.set = 0
567 569 self.doy += 1
568 570
569 571 if fileOk_flag:
570 572 self.fileSize = os.path.getsize( fullfilename )
571 573 self.filename = fullfilename
572 574 self.flagIsNewFile = 1
573 575 if self.fp != None: self.fp.close()
574 576 self.fp = open(fullfilename, 'rb')
575 577 self.flagNoMoreFiles = 0
576 578 print 'Setting the file: %s' % fullfilename
577 579 else:
578 580 self.fileSize = 0
579 581 self.filename = None
580 582 self.flagIsNewFile = 0
581 583 self.fp = None
582 584 self.flagNoMoreFiles = 1
583 585 print 'No more Files'
584 586
585 587 return fileOk_flag
586 588
587 589
588 590 def setNextFile(self):
589 591 if self.fp != None:
590 592 self.fp.close()
591 593
592 594 if self.online:
593 595 newFile = self.__setNextFileOnline()
594 596 else:
595 597 newFile = self.__setNextFileOffline()
596 598
597 599 if not(newFile):
598 600 return 0
599 601
600 602 self.__readFirstHeader()
601 603 self.nReadBlocks = 0
602 604 return 1
603 605
604 606 def __waitNewBlock(self):
605 607 """
606 608 Return 1 si se encontro un nuevo bloque de datos, 0 de otra forma.
607 609
608 610 Si el modo de lectura es OffLine siempre retorn 0
609 611 """
610 612 if not self.online:
611 613 return 0
612 614
613 615 if (self.nReadBlocks >= self.processingHeaderObj.dataBlocksPerFile):
614 616 return 0
615 617
616 618 currentPointer = self.fp.tell()
617 619
618 620 neededSize = self.processingHeaderObj.blockSize + self.basicHeaderSize
619 621
620 622 for nTries in range( self.nTries ):
621 623
622 624 self.fp.close()
623 625 self.fp = open( self.filename, 'rb' )
624 626 self.fp.seek( currentPointer )
625 627
626 628 self.fileSize = os.path.getsize( self.filename )
627 629 currentSize = self.fileSize - currentPointer
628 630
629 631 if ( currentSize >= neededSize ):
630 632 self.__rdBasicHeader()
631 633 return 1
632 634
633 635 print "\tWaiting %0.2f seconds for the next block, try %03d ..." % (self.delay, nTries+1)
634 636 time.sleep( self.delay )
635 637
636 638
637 639 return 0
638 640
639 641 def __setNewBlock(self):
640 642
641 643 if self.fp == None:
642 644 return 0
643 645
644 646 if self.flagIsNewFile:
645 647 return 1
646 648
647 649 self.lastUTTime = self.basicHeaderObj.utc
648 650 currentSize = self.fileSize - self.fp.tell()
649 651 neededSize = self.processingHeaderObj.blockSize + self.basicHeaderSize
650 652
651 653 if (currentSize >= neededSize):
652 654 self.__rdBasicHeader()
653 655 return 1
654 656
655 657 if self.__waitNewBlock():
656 658 return 1
657 659
658 660 if not(self.setNextFile()):
659 661 return 0
660 662
661 663 deltaTime = self.basicHeaderObj.utc - self.lastUTTime #
662 664
663 665 self.flagTimeBlock = 0
664 666
665 667 if deltaTime > self.maxTimeStep:
666 668 self.flagTimeBlock = 1
667 669
668 670 return 1
669 671
670 672
671 673 def readNextBlock(self):
672 674 if not(self.__setNewBlock()):
673 675 return 0
674 676
675 677 if not(self.readBlock()):
676 678 return 0
677 679
678 680 return 1
679 681
680 682 def __rdProcessingHeader(self, fp=None):
681 683 if fp == None:
682 684 fp = self.fp
683 685
684 686 self.processingHeaderObj.read(fp)
685 687
686 688 def __rdRadarControllerHeader(self, fp=None):
687 689 if fp == None:
688 690 fp = self.fp
689 691
690 692 self.radarControllerHeaderObj.read(fp)
691 693
692 694 def __rdSystemHeader(self, fp=None):
693 695 if fp == None:
694 696 fp = self.fp
695 697
696 698 self.systemHeaderObj.read(fp)
697 699
698 700 def __rdBasicHeader(self, fp=None):
699 701 if fp == None:
700 702 fp = self.fp
701 703
702 704 self.basicHeaderObj.read(fp)
703 705
704 706
705 707 def __readFirstHeader(self):
706 708 self.__rdBasicHeader()
707 709 self.__rdSystemHeader()
708 710 self.__rdRadarControllerHeader()
709 711 self.__rdProcessingHeader()
710 712
711 713 self.firstHeaderSize = self.basicHeaderObj.size
712 714
713 715 datatype = int(numpy.log2((self.processingHeaderObj.processFlags & PROCFLAG.DATATYPE_MASK))-numpy.log2(PROCFLAG.DATATYPE_CHAR))
714 716 if datatype == 0:
715 717 datatype_str = numpy.dtype([('real','<i1'),('imag','<i1')])
716 718 elif datatype == 1:
717 719 datatype_str = numpy.dtype([('real','<i2'),('imag','<i2')])
718 720 elif datatype == 2:
719 721 datatype_str = numpy.dtype([('real','<i4'),('imag','<i4')])
720 722 elif datatype == 3:
721 723 datatype_str = numpy.dtype([('real','<i8'),('imag','<i8')])
722 724 elif datatype == 4:
723 725 datatype_str = numpy.dtype([('real','<f4'),('imag','<f4')])
724 726 elif datatype == 5:
725 727 datatype_str = numpy.dtype([('real','<f8'),('imag','<f8')])
726 728 else:
727 729 raise ValueError, 'Data type was not defined'
728 730
729 731 self.dtype = datatype_str
730 732 self.ippSeconds = 2 * 1000 * self.radarControllerHeaderObj.ipp / self.c
731 733 self.fileSizeByHeader = self.processingHeaderObj.dataBlocksPerFile * self.processingHeaderObj.blockSize + self.firstHeaderSize + self.basicHeaderSize*(self.processingHeaderObj.dataBlocksPerFile - 1)
732 734 # self.dataOut.channelList = numpy.arange(self.systemHeaderObj.numChannels)
733 735 # self.dataOut.channelIndexList = numpy.arange(self.systemHeaderObj.numChannels)
734 736 self.getBlockDimension()
735 737
736 738
737 739 def __verifyFile(self, filename, msgFlag=True):
738 740 msg = None
739 741 try:
740 742 fp = open(filename, 'rb')
741 743 currentPosition = fp.tell()
742 744 except:
743 745 if msgFlag:
744 746 print "The file %s can't be opened" % (filename)
745 747 return False
746 748
747 749 neededSize = self.processingHeaderObj.blockSize + self.firstHeaderSize
748 750
749 751 if neededSize == 0:
750 752 basicHeaderObj = BasicHeader(LOCALTIME)
751 753 systemHeaderObj = SystemHeader()
752 754 radarControllerHeaderObj = RadarControllerHeader()
753 755 processingHeaderObj = ProcessingHeader()
754 756
755 757 try:
756 758 if not( basicHeaderObj.read(fp) ): raise IOError
757 759 if not( systemHeaderObj.read(fp) ): raise IOError
758 760 if not( radarControllerHeaderObj.read(fp) ): raise IOError
759 761 if not( processingHeaderObj.read(fp) ): raise IOError
760 762 data_type = int(numpy.log2((processingHeaderObj.processFlags & PROCFLAG.DATATYPE_MASK))-numpy.log2(PROCFLAG.DATATYPE_CHAR))
761 763
762 764 neededSize = processingHeaderObj.blockSize + basicHeaderObj.size
763 765
764 766 except:
765 767 if msgFlag:
766 768 print "\tThe file %s is empty or it hasn't enough data" % filename
767 769
768 770 fp.close()
769 771 return False
770 772 else:
771 773 msg = "\tSkipping the file %s due to it hasn't enough data" %filename
772 774
773 775 fp.close()
774 776 fileSize = os.path.getsize(filename)
775 777 currentSize = fileSize - currentPosition
776 778 if currentSize < neededSize:
777 779 if msgFlag and (msg != None):
778 780 print msg #print"\tSkipping the file %s due to it hasn't enough data" %filename
779 781 return False
780 782
781 783 return True
782 784
783 785 def setup(self,
784 786 path=None,
785 787 startDate=None,
786 788 endDate=None,
787 789 startTime=datetime.time(0,0,0),
788 790 endTime=datetime.time(23,59,59),
789 791 set=0,
790 792 expLabel = "",
791 793 ext = None,
792 794 online = False,
793 795 delay = 60,
794 796 walk = True):
795 797
796 798 if path == None:
797 799 raise ValueError, "The path is not valid"
798 800
799 801 if ext == None:
800 802 ext = self.ext
801 803
802 804 if online:
803 805 print "Searching files in online mode..."
804 806
805 807 for nTries in range( self.nTries ):
806 808 fullpath, file, year, doy, set = self.__searchFilesOnLine(path=path, expLabel=expLabel, ext=ext, walk=walk)
807 809
808 810 if fullpath:
809 811 break
810 812
811 813 print '\tWaiting %0.2f sec for an valid file in %s: try %02d ...' % (self.delay, path, nTries+1)
812 814 time.sleep( self.delay )
813 815
814 816 if not(fullpath):
815 817 print "There 'isn't valied files in %s" % path
816 818 return None
817 819
818 820 self.year = year
819 821 self.doy = doy
820 822 self.set = set - 1
821 823 self.path = path
822 824
823 825 else:
824 826 print "Searching files in offline mode ..."
825 827 pathList, filenameList = self.__searchFilesOffLine(path, startDate=startDate, endDate=endDate,
826 828 startTime=startTime, endTime=endTime,
827 829 set=set, expLabel=expLabel, ext=ext,
828 830 walk=walk)
829 831
830 832 if not(pathList):
831 833 print "No *%s files into the folder %s \nfor the range: %s - %s"%(ext, path,
832 834 datetime.datetime.combine(startDate,startTime).ctime(),
833 835 datetime.datetime.combine(endDate,endTime).ctime())
834 836
835 837 sys.exit(-1)
836 838
837 839
838 840 self.fileIndex = -1
839 841 self.pathList = pathList
840 842 self.filenameList = filenameList
841 843
842 844 self.online = online
843 845 self.delay = delay
844 846 ext = ext.lower()
845 847 self.ext = ext
846 848
847 849 if not(self.setNextFile()):
848 850 if (startDate!=None) and (endDate!=None):
849 851 print "No files in range: %s - %s" %(datetime.datetime.combine(startDate,startTime).ctime(), datetime.datetime.combine(endDate,endTime).ctime())
850 852 elif startDate != None:
851 853 print "No files in range: %s" %(datetime.datetime.combine(startDate,startTime).ctime())
852 854 else:
853 855 print "No files"
854 856
855 857 sys.exit(-1)
856 858
857 859 # self.updateDataHeader()
858 860
859 861 return self.dataOut
860 862
861 863 def getData():
862 864
863 865 raise ValueError, "This method has not been implemented"
864 866
865 867 def hasNotDataInBuffer():
866 868
867 869 raise ValueError, "This method has not been implemented"
868 870
869 871 def readBlock():
870 872
871 873 raise ValueError, "This method has not been implemented"
872 874
873 875 def isEndProcess(self):
874 876
875 877 return self.flagNoMoreFiles
876 878
877 879 def printReadBlocks(self):
878 880
879 881 print "Number of read blocks per file %04d" %self.nReadBlocks
880 882
881 883 def printTotalBlocks(self):
882 884
883 885 print "Number of read blocks %04d" %self.nTotalBlocks
884 886
885 887 def printNumberOfBlock(self):
886 888
887 889 if self.flagIsNewBlock:
888 890 print "Block No. %04d, Total blocks %04d" %(self.basicHeaderObj.dataBlock, self.nTotalBlocks)
889 891
890 892 def printInfo(self):
891 893
892 894 print self.basicHeaderObj.printInfo()
893 895 print self.systemHeaderObj.printInfo()
894 896 print self.radarControllerHeaderObj.printInfo()
895 897 print self.processingHeaderObj.printInfo()
896 898
897 899
898 900 def run(self, **kwargs):
899 901
900 902 if not(self.isConfig):
901 903
902 904 # self.dataOut = dataOut
903 905 self.setup(**kwargs)
904 906 self.isConfig = True
905 907
906 908 self.getData()
907 909
908 910 class JRODataWriter(JRODataIO, Operation):
909 911
910 912 """
911 913 Esta clase permite escribir datos a archivos procesados (.r o ,pdata). La escritura
912 914 de los datos siempre se realiza por bloques.
913 915 """
914 916
915 917 blockIndex = 0
916 918
917 919 path = None
918 920
919 921 setFile = None
920 922
921 923 profilesPerBlock = None
922 924
923 925 blocksPerFile = None
924 926
925 927 nWriteBlocks = 0
926 928
927 929 def __init__(self, dataOut=None):
928 930 raise ValueError, "Not implemented"
929 931
930 932
931 933 def hasAllDataInBuffer(self):
932 934 raise ValueError, "Not implemented"
933 935
934 936
935 937 def setBlockDimension(self):
936 938 raise ValueError, "Not implemented"
937 939
938 940
939 941 def writeBlock(self):
940 942 raise ValueError, "No implemented"
941 943
942 944
943 945 def putData(self):
944 946 raise ValueError, "No implemented"
945 947
946 948 def getDataHeader(self):
947 949 """
948 950 Obtiene una copia del First Header
949 951
950 952 Affected:
951 953
952 954 self.basicHeaderObj
953 955 self.systemHeaderObj
954 956 self.radarControllerHeaderObj
955 957 self.processingHeaderObj self.
956 958
957 959 Return:
958 960 None
959 961 """
960 962
961 963 raise ValueError, "No implemented"
962 964
963 965 def getBasicHeader(self):
964 966
965 967 self.basicHeaderObj.size = self.basicHeaderSize #bytes
966 968 self.basicHeaderObj.version = self.versionFile
967 969 self.basicHeaderObj.dataBlock = self.nTotalBlocks
968 970
969 971 utc = numpy.floor(self.dataOut.utctime)
970 972 milisecond = (self.dataOut.utctime - utc)* 1000.0
971 973
972 974 self.basicHeaderObj.utc = utc
973 975 self.basicHeaderObj.miliSecond = milisecond
974 976 self.basicHeaderObj.timeZone = 0
975 977 self.basicHeaderObj.dstFlag = 0
976 978 self.basicHeaderObj.errorCount = 0
977 979
978 980 def __writeFirstHeader(self):
979 981 """
980 982 Escribe el primer header del file es decir el Basic header y el Long header (SystemHeader, RadarControllerHeader, ProcessingHeader)
981 983
982 984 Affected:
983 985 __dataType
984 986
985 987 Return:
986 988 None
987 989 """
988 990
989 991 # CALCULAR PARAMETROS
990 992
991 993 sizeLongHeader = self.systemHeaderObj.size + self.radarControllerHeaderObj.size + self.processingHeaderObj.size
992 994 self.basicHeaderObj.size = self.basicHeaderSize + sizeLongHeader
993 995
994 996 self.basicHeaderObj.write(self.fp)
995 997 self.systemHeaderObj.write(self.fp)
996 998 self.radarControllerHeaderObj.write(self.fp)
997 999 self.processingHeaderObj.write(self.fp)
998 1000
999 1001 self.dtype = self.dataOut.dtype
1000 1002
1001 1003 def __setNewBlock(self):
1002 1004 """
1003 1005 Si es un nuevo file escribe el First Header caso contrario escribe solo el Basic Header
1004 1006
1005 1007 Return:
1006 1008 0 : si no pudo escribir nada
1007 1009 1 : Si escribio el Basic el First Header
1008 1010 """
1009 1011 if self.fp == None:
1010 1012 self.setNextFile()
1011 1013
1012 1014 if self.flagIsNewFile:
1013 1015 return 1
1014 1016
1015 1017 if self.blockIndex < self.processingHeaderObj.dataBlocksPerFile:
1016 1018 self.basicHeaderObj.write(self.fp)
1017 1019 return 1
1018 1020
1019 1021 if not( self.setNextFile() ):
1020 1022 return 0
1021 1023
1022 1024 return 1
1023 1025
1024 1026
1025 1027 def writeNextBlock(self):
1026 1028 """
1027 1029 Selecciona el bloque siguiente de datos y los escribe en un file
1028 1030
1029 1031 Return:
1030 1032 0 : Si no hizo pudo escribir el bloque de datos
1031 1033 1 : Si no pudo escribir el bloque de datos
1032 1034 """
1033 1035 if not( self.__setNewBlock() ):
1034 1036 return 0
1035 1037
1036 1038 self.writeBlock()
1037 1039
1038 1040 return 1
1039 1041
1040 1042 def setNextFile(self):
1041 1043 """
1042 1044 Determina el siguiente file que sera escrito
1043 1045
1044 1046 Affected:
1045 1047 self.filename
1046 1048 self.subfolder
1047 1049 self.fp
1048 1050 self.setFile
1049 1051 self.flagIsNewFile
1050 1052
1051 1053 Return:
1052 1054 0 : Si el archivo no puede ser escrito
1053 1055 1 : Si el archivo esta listo para ser escrito
1054 1056 """
1055 1057 ext = self.ext
1056 1058 path = self.path
1057 1059
1058 1060 if self.fp != None:
1059 1061 self.fp.close()
1060 1062
1061 1063 timeTuple = time.localtime( self.dataOut.utctime)
1062 1064 subfolder = 'd%4.4d%3.3d' % (timeTuple.tm_year,timeTuple.tm_yday)
1063 1065
1064 1066 fullpath = os.path.join( path, subfolder )
1065 1067 if not( os.path.exists(fullpath) ):
1066 1068 os.mkdir(fullpath)
1067 1069 self.setFile = -1 #inicializo mi contador de seteo
1068 1070 else:
1069 1071 filesList = os.listdir( fullpath )
1070 1072 if len( filesList ) > 0:
1071 1073 filesList = sorted( filesList, key=str.lower )
1072 1074 filen = filesList[-1]
1073 1075 # el filename debera tener el siguiente formato
1074 1076 # 0 1234 567 89A BCDE (hex)
1075 1077 # x YYYY DDD SSS .ext
1076 1078 if isNumber( filen[8:11] ):
1077 1079 self.setFile = int( filen[8:11] ) #inicializo mi contador de seteo al seteo del ultimo file
1078 1080 else:
1079 1081 self.setFile = -1
1080 1082 else:
1081 1083 self.setFile = -1 #inicializo mi contador de seteo
1082 1084
1083 1085 setFile = self.setFile
1084 1086 setFile += 1
1085 1087
1086 1088 file = '%s%4.4d%3.3d%3.3d%s' % (self.optchar,
1087 1089 timeTuple.tm_year,
1088 1090 timeTuple.tm_yday,
1089 1091 setFile,
1090 1092 ext )
1091 1093
1092 1094 filename = os.path.join( path, subfolder, file )
1093 1095
1094 1096 fp = open( filename,'wb' )
1095 1097
1096 1098 self.blockIndex = 0
1097 1099
1098 1100 #guardando atributos
1099 1101 self.filename = filename
1100 1102 self.subfolder = subfolder
1101 1103 self.fp = fp
1102 1104 self.setFile = setFile
1103 1105 self.flagIsNewFile = 1
1104 1106
1105 1107 self.getDataHeader()
1106 1108
1107 1109 print 'Writing the file: %s'%self.filename
1108 1110
1109 1111 self.__writeFirstHeader()
1110 1112
1111 1113 return 1
1112 1114
1113 1115 def setup(self, dataOut, path, blocksPerFile, profilesPerBlock=None, set=0, ext=None):
1114 1116 """
1115 1117 Setea el tipo de formato en la cual sera guardada la data y escribe el First Header
1116 1118
1117 1119 Inputs:
1118 1120 path : el path destino en el cual se escribiran los files a crear
1119 1121 format : formato en el cual sera salvado un file
1120 1122 set : el setebo del file
1121 1123
1122 1124 Return:
1123 1125 0 : Si no realizo un buen seteo
1124 1126 1 : Si realizo un buen seteo
1125 1127 """
1126 1128
1127 1129 if ext == None:
1128 1130 ext = self.ext
1129 1131
1130 1132 ext = ext.lower()
1131 1133
1132 1134 self.ext = ext
1133 1135
1134 1136 self.path = path
1135 1137
1136 1138 self.setFile = set - 1
1137 1139
1138 1140 self.blocksPerFile = blocksPerFile
1139 1141
1140 1142 self.profilesPerBlock = profilesPerBlock
1141 1143
1142 1144 self.dataOut = dataOut
1143 1145
1144 1146 if not(self.setNextFile()):
1145 1147 print "There isn't a next file"
1146 1148 return 0
1147 1149
1148 1150 self.setBlockDimension()
1149 1151
1150 1152 return 1
1151 1153
1152 1154 def run(self, dataOut, **kwargs):
1153 1155
1154 1156 if not(self.isConfig):
1155 1157
1156 1158 self.setup(dataOut, **kwargs)
1157 1159 self.isConfig = True
1158 1160
1159 1161 self.putData()
1160 1162
1161 1163 class VoltageReader(JRODataReader):
1162 1164 """
1163 1165 Esta clase permite leer datos de voltage desde archivos en formato rawdata (.r). La lectura
1164 1166 de los datos siempre se realiza por bloques. Los datos leidos (array de 3 dimensiones:
1165 1167 perfiles*alturas*canales) son almacenados en la variable "buffer".
1166 1168
1167 1169 perfiles * alturas * canales
1168 1170
1169 1171 Esta clase contiene instancias (objetos) de las clases BasicHeader, SystemHeader,
1170 1172 RadarControllerHeader y Voltage. Los tres primeros se usan para almacenar informacion de la
1171 1173 cabecera de datos (metadata), y el cuarto (Voltage) para obtener y almacenar un perfil de
1172 1174 datos desde el "buffer" cada vez que se ejecute el metodo "getData".
1173 1175
1174 1176 Example:
1175 1177
1176 1178 dpath = "/home/myuser/data"
1177 1179
1178 1180 startTime = datetime.datetime(2010,1,20,0,0,0,0,0,0)
1179 1181
1180 1182 endTime = datetime.datetime(2010,1,21,23,59,59,0,0,0)
1181 1183
1182 1184 readerObj = VoltageReader()
1183 1185
1184 1186 readerObj.setup(dpath, startTime, endTime)
1185 1187
1186 1188 while(True):
1187 1189
1188 1190 #to get one profile
1189 1191 profile = readerObj.getData()
1190 1192
1191 1193 #print the profile
1192 1194 print profile
1193 1195
1194 1196 #If you want to see all datablock
1195 1197 print readerObj.datablock
1196 1198
1197 1199 if readerObj.flagNoMoreFiles:
1198 1200 break
1199 1201
1200 1202 """
1201 1203
1202 1204 ext = ".r"
1203 1205
1204 1206 optchar = "D"
1205 1207 dataOut = None
1206 1208
1207 1209
1208 1210 def __init__(self):
1209 1211 """
1210 1212 Inicializador de la clase VoltageReader para la lectura de datos de voltage.
1211 1213
1212 1214 Input:
1213 1215 dataOut : Objeto de la clase Voltage. Este objeto sera utilizado para
1214 1216 almacenar un perfil de datos cada vez que se haga un requerimiento
1215 1217 (getData). El perfil sera obtenido a partir del buffer de datos,
1216 1218 si el buffer esta vacio se hara un nuevo proceso de lectura de un
1217 1219 bloque de datos.
1218 1220 Si este parametro no es pasado se creara uno internamente.
1219 1221
1220 1222 Variables afectadas:
1221 1223 self.dataOut
1222 1224
1223 1225 Return:
1224 1226 None
1225 1227 """
1226 1228
1227 1229 self.isConfig = False
1228 1230
1229 1231 self.datablock = None
1230 1232
1231 1233 self.utc = 0
1232 1234
1233 1235 self.ext = ".r"
1234 1236
1235 1237 self.optchar = "D"
1236 1238
1237 1239 self.basicHeaderObj = BasicHeader(LOCALTIME)
1238 1240
1239 1241 self.systemHeaderObj = SystemHeader()
1240 1242
1241 1243 self.radarControllerHeaderObj = RadarControllerHeader()
1242 1244
1243 1245 self.processingHeaderObj = ProcessingHeader()
1244 1246
1245 1247 self.online = 0
1246 1248
1247 1249 self.fp = None
1248 1250
1249 1251 self.idFile = None
1250 1252
1251 1253 self.dtype = None
1252 1254
1253 1255 self.fileSizeByHeader = None
1254 1256
1255 1257 self.filenameList = []
1256 1258
1257 1259 self.filename = None
1258 1260
1259 1261 self.fileSize = None
1260 1262
1261 1263 self.firstHeaderSize = 0
1262 1264
1263 1265 self.basicHeaderSize = 24
1264 1266
1265 1267 self.pathList = []
1266 1268
1267 1269 self.filenameList = []
1268 1270
1269 1271 self.lastUTTime = 0
1270 1272
1271 1273 self.maxTimeStep = 30
1272 1274
1273 1275 self.flagNoMoreFiles = 0
1274 1276
1275 1277 self.set = 0
1276 1278
1277 1279 self.path = None
1278 1280
1279 1281 self.profileIndex = 9999
1280 1282
1281 1283 self.delay = 3 #seconds
1282 1284
1283 1285 self.nTries = 3 #quantity tries
1284 1286
1285 1287 self.nFiles = 3 #number of files for searching
1286 1288
1287 1289 self.nReadBlocks = 0
1288 1290
1289 1291 self.flagIsNewFile = 1
1290 1292
1291 1293 self.ippSeconds = 0
1292 1294
1293 1295 self.flagTimeBlock = 0
1294 1296
1295 1297 self.flagIsNewBlock = 0
1296 1298
1297 1299 self.nTotalBlocks = 0
1298 1300
1299 1301 self.blocksize = 0
1300 1302
1301 1303 self.dataOut = self.createObjByDefault()
1302 1304
1303 1305 def createObjByDefault(self):
1304 1306
1305 1307 dataObj = Voltage()
1306 1308
1307 1309 return dataObj
1308 1310
1309 1311 def __hasNotDataInBuffer(self):
1310 1312 if self.profileIndex >= self.processingHeaderObj.profilesPerBlock:
1311 1313 return 1
1312 1314 return 0
1313 1315
1314 1316
1315 1317 def getBlockDimension(self):
1316 1318 """
1317 1319 Obtiene la cantidad de puntos a leer por cada bloque de datos
1318 1320
1319 1321 Affected:
1320 1322 self.blocksize
1321 1323
1322 1324 Return:
1323 1325 None
1324 1326 """
1325 1327 pts2read = self.processingHeaderObj.profilesPerBlock * self.processingHeaderObj.nHeights * self.systemHeaderObj.nChannels
1326 1328 self.blocksize = pts2read
1327 1329
1328 1330
1329 1331 def readBlock(self):
1330 1332 """
1331 1333 readBlock lee el bloque de datos desde la posicion actual del puntero del archivo
1332 1334 (self.fp) y actualiza todos los parametros relacionados al bloque de datos
1333 1335 (metadata + data). La data leida es almacenada en el buffer y el contador del buffer
1334 1336 es seteado a 0
1335 1337
1336 1338 Inputs:
1337 1339 None
1338 1340
1339 1341 Return:
1340 1342 None
1341 1343
1342 1344 Affected:
1343 1345 self.profileIndex
1344 1346 self.datablock
1345 1347 self.flagIsNewFile
1346 1348 self.flagIsNewBlock
1347 1349 self.nTotalBlocks
1348 1350
1349 1351 Exceptions:
1350 1352 Si un bloque leido no es un bloque valido
1351 1353 """
1352 1354
1353 1355 junk = numpy.fromfile( self.fp, self.dtype, self.blocksize )
1354 1356
1355 1357 try:
1356 1358 junk = junk.reshape( (self.processingHeaderObj.profilesPerBlock, self.processingHeaderObj.nHeights, self.systemHeaderObj.nChannels) )
1357 1359 except:
1358 1360 print "The read block (%3d) has not enough data" %self.nReadBlocks
1359 1361 return 0
1360 1362
1361 1363 junk = numpy.transpose(junk, (2,0,1))
1362 1364 self.datablock = junk['real'] + junk['imag']*1j
1363 1365
1364 1366 self.profileIndex = 0
1365 1367
1366 1368 self.flagIsNewFile = 0
1367 1369 self.flagIsNewBlock = 1
1368 1370
1369 1371 self.nTotalBlocks += 1
1370 1372 self.nReadBlocks += 1
1371 1373
1372 1374 return 1
1373 1375
1374 1376
1375 1377 def getData(self):
1376 1378 """
1377 1379 getData obtiene una unidad de datos del buffer de lectura y la copia a la clase "Voltage"
1378 1380 con todos los parametros asociados a este (metadata). cuando no hay datos en el buffer de
1379 1381 lectura es necesario hacer una nueva lectura de los bloques de datos usando "readNextBlock"
1380 1382
1381 1383 Ademas incrementa el contador del buffer en 1.
1382 1384
1383 1385 Return:
1384 1386 data : retorna un perfil de voltages (alturas * canales) copiados desde el
1385 1387 buffer. Si no hay mas archivos a leer retorna None.
1386 1388
1387 1389 Variables afectadas:
1388 1390 self.dataOut
1389 1391 self.profileIndex
1390 1392
1391 1393 Affected:
1392 1394 self.dataOut
1393 1395 self.profileIndex
1394 1396 self.flagTimeBlock
1395 1397 self.flagIsNewBlock
1396 1398 """
1397 1399
1398 1400 if self.flagNoMoreFiles:
1399 1401 self.dataOut.flagNoData = True
1400 1402 print 'Process finished'
1401 1403 return 0
1402 1404
1403 1405 self.flagTimeBlock = 0
1404 1406 self.flagIsNewBlock = 0
1405 1407
1406 1408 if self.__hasNotDataInBuffer():
1407 1409
1408 1410 if not( self.readNextBlock() ):
1409 1411 return 0
1410 1412
1411 1413 self.dataOut.dtype = self.dtype
1412 1414
1413 1415 self.dataOut.nProfiles = self.processingHeaderObj.profilesPerBlock
1414 1416
1415 1417 xf = self.processingHeaderObj.firstHeight + self.processingHeaderObj.nHeights*self.processingHeaderObj.deltaHeight
1416 1418
1417 1419 self.dataOut.heightList = numpy.arange(self.processingHeaderObj.firstHeight, xf, self.processingHeaderObj.deltaHeight)
1418 1420
1419 1421 self.dataOut.channelList = range(self.systemHeaderObj.nChannels)
1420 1422
1421 1423 self.dataOut.flagTimeBlock = self.flagTimeBlock
1422 1424
1423 1425 self.dataOut.ippSeconds = self.ippSeconds
1424 1426
1425 1427 self.dataOut.timeInterval = self.ippSeconds * self.processingHeaderObj.nCohInt
1426 1428
1427 1429 self.dataOut.nCohInt = self.processingHeaderObj.nCohInt
1428 1430
1429 1431 self.dataOut.flagShiftFFT = False
1430 1432
1431 1433 if self.radarControllerHeaderObj.code != None:
1432 1434
1433 1435 self.dataOut.nCode = self.radarControllerHeaderObj.nCode
1434 1436
1435 1437 self.dataOut.nBaud = self.radarControllerHeaderObj.nBaud
1436 1438
1437 1439 self.dataOut.code = self.radarControllerHeaderObj.code
1438 1440
1439 1441 self.dataOut.systemHeaderObj = self.systemHeaderObj.copy()
1440 1442
1441 1443 self.dataOut.radarControllerHeaderObj = self.radarControllerHeaderObj.copy()
1442 1444
1443 1445 self.dataOut.flagDecodeData = False #asumo q la data no esta decodificada
1444 1446
1445 1447 self.dataOut.flagDeflipData = False #asumo q la data no esta sin flip
1446 1448
1447 1449 self.dataOut.flagShiftFFT = False
1448 1450
1449 1451
1450 1452 # self.updateDataHeader()
1451 1453
1452 1454 #data es un numpy array de 3 dmensiones (perfiles, alturas y canales)
1453 1455
1454 1456 if self.datablock == None:
1455 1457 self.dataOut.flagNoData = True
1456 1458 return 0
1457 1459
1458 1460 self.dataOut.data = self.datablock[:,self.profileIndex,:]
1459 1461
1460 1462 self.dataOut.utctime = self.basicHeaderObj.utc + self.basicHeaderObj.miliSecond/1000. + self.profileIndex * self.ippSeconds
1461 1463
1462 1464 self.profileIndex += 1
1463 1465
1464 1466 self.dataOut.flagNoData = False
1465 1467
1466 1468 # print self.profileIndex, self.dataOut.utctime
1467 1469 # if self.profileIndex == 800:
1468 1470 # a=1
1469 1471
1470 1472
1471 1473 return self.dataOut.data
1472 1474
1473 1475
1474 1476 class VoltageWriter(JRODataWriter):
1475 1477 """
1476 1478 Esta clase permite escribir datos de voltajes a archivos procesados (.r). La escritura
1477 1479 de los datos siempre se realiza por bloques.
1478 1480 """
1479 1481
1480 1482 ext = ".r"
1481 1483
1482 1484 optchar = "D"
1483 1485
1484 1486 shapeBuffer = None
1485 1487
1486 1488
1487 1489 def __init__(self):
1488 1490 """
1489 1491 Inicializador de la clase VoltageWriter para la escritura de datos de espectros.
1490 1492
1491 1493 Affected:
1492 1494 self.dataOut
1493 1495
1494 1496 Return: None
1495 1497 """
1496 1498
1497 1499 self.nTotalBlocks = 0
1498 1500
1499 1501 self.profileIndex = 0
1500 1502
1501 1503 self.isConfig = False
1502 1504
1503 1505 self.fp = None
1504 1506
1505 1507 self.flagIsNewFile = 1
1506 1508
1507 1509 self.nTotalBlocks = 0
1508 1510
1509 1511 self.flagIsNewBlock = 0
1510 1512
1511 1513 self.setFile = None
1512 1514
1513 1515 self.dtype = None
1514 1516
1515 1517 self.path = None
1516 1518
1517 1519 self.filename = None
1518 1520
1519 1521 self.basicHeaderObj = BasicHeader(LOCALTIME)
1520 1522
1521 1523 self.systemHeaderObj = SystemHeader()
1522 1524
1523 1525 self.radarControllerHeaderObj = RadarControllerHeader()
1524 1526
1525 1527 self.processingHeaderObj = ProcessingHeader()
1526 1528
1527 1529 def hasAllDataInBuffer(self):
1528 1530 if self.profileIndex >= self.processingHeaderObj.profilesPerBlock:
1529 1531 return 1
1530 1532 return 0
1531 1533
1532 1534
1533 1535 def setBlockDimension(self):
1534 1536 """
1535 1537 Obtiene las formas dimensionales del los subbloques de datos que componen un bloque
1536 1538
1537 1539 Affected:
1538 1540 self.shape_spc_Buffer
1539 1541 self.shape_cspc_Buffer
1540 1542 self.shape_dc_Buffer
1541 1543
1542 1544 Return: None
1543 1545 """
1544 1546 self.shapeBuffer = (self.processingHeaderObj.profilesPerBlock,
1545 1547 self.processingHeaderObj.nHeights,
1546 1548 self.systemHeaderObj.nChannels)
1547 1549
1548 1550 self.datablock = numpy.zeros((self.systemHeaderObj.nChannels,
1549 1551 self.processingHeaderObj.profilesPerBlock,
1550 1552 self.processingHeaderObj.nHeights),
1551 1553 dtype=numpy.dtype('complex64'))
1552 1554
1553 1555
1554 1556 def writeBlock(self):
1555 1557 """
1556 1558 Escribe el buffer en el file designado
1557 1559
1558 1560 Affected:
1559 1561 self.profileIndex
1560 1562 self.flagIsNewFile
1561 1563 self.flagIsNewBlock
1562 1564 self.nTotalBlocks
1563 1565 self.blockIndex
1564 1566
1565 1567 Return: None
1566 1568 """
1567 1569 data = numpy.zeros( self.shapeBuffer, self.dtype )
1568 1570
1569 1571 junk = numpy.transpose(self.datablock, (1,2,0))
1570 1572
1571 1573 data['real'] = junk.real
1572 1574 data['imag'] = junk.imag
1573 1575
1574 1576 data = data.reshape( (-1) )
1575 1577
1576 1578 data.tofile( self.fp )
1577 1579
1578 1580 self.datablock.fill(0)
1579 1581
1580 1582 self.profileIndex = 0
1581 1583 self.flagIsNewFile = 0
1582 1584 self.flagIsNewBlock = 1
1583 1585
1584 1586 self.blockIndex += 1
1585 1587 self.nTotalBlocks += 1
1586 1588
1587 1589 def putData(self):
1588 1590 """
1589 1591 Setea un bloque de datos y luego los escribe en un file
1590 1592
1591 1593 Affected:
1592 1594 self.flagIsNewBlock
1593 1595 self.profileIndex
1594 1596
1595 1597 Return:
1596 1598 0 : Si no hay data o no hay mas files que puedan escribirse
1597 1599 1 : Si se escribio la data de un bloque en un file
1598 1600 """
1599 1601 if self.dataOut.flagNoData:
1600 1602 return 0
1601 1603
1602 1604 self.flagIsNewBlock = 0
1603 1605
1604 1606 if self.dataOut.flagTimeBlock:
1605 1607
1606 1608 self.datablock.fill(0)
1607 1609 self.profileIndex = 0
1608 1610 self.setNextFile()
1609 1611
1610 1612 if self.profileIndex == 0:
1611 1613 self.getBasicHeader()
1612 1614
1613 1615 self.datablock[:,self.profileIndex,:] = self.dataOut.data
1614 1616
1615 1617 self.profileIndex += 1
1616 1618
1617 1619 if self.hasAllDataInBuffer():
1618 1620 #if self.flagIsNewFile:
1619 1621 self.writeNextBlock()
1620 1622 # self.getDataHeader()
1621 1623
1622 1624 return 1
1623 1625
1624 1626 def __getProcessFlags(self):
1625 1627
1626 1628 processFlags = 0
1627 1629
1628 1630 dtype0 = numpy.dtype([('real','<i1'),('imag','<i1')])
1629 1631 dtype1 = numpy.dtype([('real','<i2'),('imag','<i2')])
1630 1632 dtype2 = numpy.dtype([('real','<i4'),('imag','<i4')])
1631 1633 dtype3 = numpy.dtype([('real','<i8'),('imag','<i8')])
1632 1634 dtype4 = numpy.dtype([('real','<f4'),('imag','<f4')])
1633 1635 dtype5 = numpy.dtype([('real','<f8'),('imag','<f8')])
1634 1636
1635 1637 dtypeList = [dtype0, dtype1, dtype2, dtype3, dtype4, dtype5]
1636 1638
1637 1639
1638 1640
1639 1641 datatypeValueList = [PROCFLAG.DATATYPE_CHAR,
1640 1642 PROCFLAG.DATATYPE_SHORT,
1641 1643 PROCFLAG.DATATYPE_LONG,
1642 1644 PROCFLAG.DATATYPE_INT64,
1643 1645 PROCFLAG.DATATYPE_FLOAT,
1644 1646 PROCFLAG.DATATYPE_DOUBLE]
1645 1647
1646 1648
1647 1649 for index in range(len(dtypeList)):
1648 1650 if self.dataOut.dtype == dtypeList[index]:
1649 1651 dtypeValue = datatypeValueList[index]
1650 1652 break
1651 1653
1652 1654 processFlags += dtypeValue
1653 1655
1654 1656 if self.dataOut.flagDecodeData:
1655 1657 processFlags += PROCFLAG.DECODE_DATA
1656 1658
1657 1659 if self.dataOut.flagDeflipData:
1658 1660 processFlags += PROCFLAG.DEFLIP_DATA
1659 1661
1660 1662 if self.dataOut.code != None:
1661 1663 processFlags += PROCFLAG.DEFINE_PROCESS_CODE
1662 1664
1663 1665 if self.dataOut.nCohInt > 1:
1664 1666 processFlags += PROCFLAG.COHERENT_INTEGRATION
1665 1667
1666 1668 return processFlags
1667 1669
1668 1670
1669 1671 def __getBlockSize(self):
1670 1672 '''
1671 1673 Este metodos determina el cantidad de bytes para un bloque de datos de tipo Voltage
1672 1674 '''
1673 1675
1674 1676 dtype0 = numpy.dtype([('real','<i1'),('imag','<i1')])
1675 1677 dtype1 = numpy.dtype([('real','<i2'),('imag','<i2')])
1676 1678 dtype2 = numpy.dtype([('real','<i4'),('imag','<i4')])
1677 1679 dtype3 = numpy.dtype([('real','<i8'),('imag','<i8')])
1678 1680 dtype4 = numpy.dtype([('real','<f4'),('imag','<f4')])
1679 1681 dtype5 = numpy.dtype([('real','<f8'),('imag','<f8')])
1680 1682
1681 1683 dtypeList = [dtype0, dtype1, dtype2, dtype3, dtype4, dtype5]
1682 1684 datatypeValueList = [1,2,4,8,4,8]
1683 1685 for index in range(len(dtypeList)):
1684 1686 if self.dataOut.dtype == dtypeList[index]:
1685 1687 datatypeValue = datatypeValueList[index]
1686 1688 break
1687 1689
1688 1690 blocksize = int(self.dataOut.nHeights * self.dataOut.nChannels * self.dataOut.nProfiles * datatypeValue * 2)
1689 1691
1690 1692 return blocksize
1691 1693
1692 1694 def getDataHeader(self):
1693 1695
1694 1696 """
1695 1697 Obtiene una copia del First Header
1696 1698
1697 1699 Affected:
1698 1700 self.systemHeaderObj
1699 1701 self.radarControllerHeaderObj
1700 1702 self.dtype
1701 1703
1702 1704 Return:
1703 1705 None
1704 1706 """
1705 1707
1706 1708 self.systemHeaderObj = self.dataOut.systemHeaderObj.copy()
1707 1709 self.systemHeaderObj.nChannels = self.dataOut.nChannels
1708 1710 self.radarControllerHeaderObj = self.dataOut.radarControllerHeaderObj.copy()
1709 1711
1710 1712 self.getBasicHeader()
1711 1713
1712 1714 processingHeaderSize = 40 # bytes
1713 1715 self.processingHeaderObj.dtype = 0 # Voltage
1714 1716 self.processingHeaderObj.blockSize = self.__getBlockSize()
1715 1717 self.processingHeaderObj.profilesPerBlock = self.profilesPerBlock
1716 1718 self.processingHeaderObj.dataBlocksPerFile = self.blocksPerFile
1717 1719 self.processingHeaderObj.nWindows = 1 #podria ser 1 o self.dataOut.processingHeaderObj.nWindows
1718 1720 self.processingHeaderObj.processFlags = self.__getProcessFlags()
1719 1721 self.processingHeaderObj.nCohInt = self.dataOut.nCohInt
1720 1722 self.processingHeaderObj.nIncohInt = 1 # Cuando la data de origen es de tipo Voltage
1721 1723 self.processingHeaderObj.totalSpectra = 0 # Cuando la data de origen es de tipo Voltage
1722 1724
1723 1725 if self.dataOut.code != None:
1724 1726 self.processingHeaderObj.code = self.dataOut.code
1725 1727 self.processingHeaderObj.nCode = self.dataOut.nCode
1726 1728 self.processingHeaderObj.nBaud = self.dataOut.nBaud
1727 1729 codesize = int(8 + 4 * self.dataOut.nCode * self.dataOut.nBaud)
1728 1730 processingHeaderSize += codesize
1729 1731
1730 1732 if self.processingHeaderObj.nWindows != 0:
1731 1733 self.processingHeaderObj.firstHeight = self.dataOut.heightList[0]
1732 1734 self.processingHeaderObj.deltaHeight = self.dataOut.heightList[1] - self.dataOut.heightList[0]
1733 1735 self.processingHeaderObj.nHeights = self.dataOut.nHeights
1734 1736 self.processingHeaderObj.samplesWin = self.dataOut.nHeights
1735 1737 processingHeaderSize += 12
1736 1738
1737 1739 self.processingHeaderObj.size = processingHeaderSize
1738 1740
1739 1741 class SpectraReader(JRODataReader):
1740 1742 """
1741 1743 Esta clase permite leer datos de espectros desde archivos procesados (.pdata). La lectura
1742 1744 de los datos siempre se realiza por bloques. Los datos leidos (array de 3 dimensiones)
1743 1745 son almacenados en tres buffer's para el Self Spectra, el Cross Spectra y el DC Channel.
1744 1746
1745 1747 paresCanalesIguales * alturas * perfiles (Self Spectra)
1746 1748 paresCanalesDiferentes * alturas * perfiles (Cross Spectra)
1747 1749 canales * alturas (DC Channels)
1748 1750
1749 1751 Esta clase contiene instancias (objetos) de las clases BasicHeader, SystemHeader,
1750 1752 RadarControllerHeader y Spectra. Los tres primeros se usan para almacenar informacion de la
1751 1753 cabecera de datos (metadata), y el cuarto (Spectra) para obtener y almacenar un bloque de
1752 1754 datos desde el "buffer" cada vez que se ejecute el metodo "getData".
1753 1755
1754 1756 Example:
1755 1757 dpath = "/home/myuser/data"
1756 1758
1757 1759 startTime = datetime.datetime(2010,1,20,0,0,0,0,0,0)
1758 1760
1759 1761 endTime = datetime.datetime(2010,1,21,23,59,59,0,0,0)
1760 1762
1761 1763 readerObj = SpectraReader()
1762 1764
1763 1765 readerObj.setup(dpath, startTime, endTime)
1764 1766
1765 1767 while(True):
1766 1768
1767 1769 readerObj.getData()
1768 1770
1769 1771 print readerObj.data_spc
1770 1772
1771 1773 print readerObj.data_cspc
1772 1774
1773 1775 print readerObj.data_dc
1774 1776
1775 1777 if readerObj.flagNoMoreFiles:
1776 1778 break
1777 1779
1778 1780 """
1779 1781
1780 1782 pts2read_SelfSpectra = 0
1781 1783
1782 1784 pts2read_CrossSpectra = 0
1783 1785
1784 1786 pts2read_DCchannels = 0
1785 1787
1786 1788 ext = ".pdata"
1787 1789
1788 1790 optchar = "P"
1789 1791
1790 1792 dataOut = None
1791 1793
1792 1794 nRdChannels = None
1793 1795
1794 1796 nRdPairs = None
1795 1797
1796 1798 rdPairList = []
1797 1799
1798 1800
1799 1801 def __init__(self):
1800 1802 """
1801 1803 Inicializador de la clase SpectraReader para la lectura de datos de espectros.
1802 1804
1803 1805 Inputs:
1804 1806 dataOut : Objeto de la clase Spectra. Este objeto sera utilizado para
1805 1807 almacenar un perfil de datos cada vez que se haga un requerimiento
1806 1808 (getData). El perfil sera obtenido a partir del buffer de datos,
1807 1809 si el buffer esta vacio se hara un nuevo proceso de lectura de un
1808 1810 bloque de datos.
1809 1811 Si este parametro no es pasado se creara uno internamente.
1810 1812
1811 1813 Affected:
1812 1814 self.dataOut
1813 1815
1814 1816 Return : None
1815 1817 """
1816 1818
1817 1819 self.isConfig = False
1818 1820
1819 1821 self.pts2read_SelfSpectra = 0
1820 1822
1821 1823 self.pts2read_CrossSpectra = 0
1822 1824
1823 1825 self.pts2read_DCchannels = 0
1824 1826
1825 1827 self.datablock = None
1826 1828
1827 1829 self.utc = None
1828 1830
1829 1831 self.ext = ".pdata"
1830 1832
1831 1833 self.optchar = "P"
1832 1834
1833 1835 self.basicHeaderObj = BasicHeader(LOCALTIME)
1834 1836
1835 1837 self.systemHeaderObj = SystemHeader()
1836 1838
1837 1839 self.radarControllerHeaderObj = RadarControllerHeader()
1838 1840
1839 1841 self.processingHeaderObj = ProcessingHeader()
1840 1842
1841 1843 self.online = 0
1842 1844
1843 1845 self.fp = None
1844 1846
1845 1847 self.idFile = None
1846 1848
1847 1849 self.dtype = None
1848 1850
1849 1851 self.fileSizeByHeader = None
1850 1852
1851 1853 self.filenameList = []
1852 1854
1853 1855 self.filename = None
1854 1856
1855 1857 self.fileSize = None
1856 1858
1857 1859 self.firstHeaderSize = 0
1858 1860
1859 1861 self.basicHeaderSize = 24
1860 1862
1861 1863 self.pathList = []
1862 1864
1863 1865 self.lastUTTime = 0
1864 1866
1865 1867 self.maxTimeStep = 30
1866 1868
1867 1869 self.flagNoMoreFiles = 0
1868 1870
1869 1871 self.set = 0
1870 1872
1871 1873 self.path = None
1872 1874
1873 1875 self.delay = 60 #seconds
1874 1876
1875 1877 self.nTries = 3 #quantity tries
1876 1878
1877 1879 self.nFiles = 3 #number of files for searching
1878 1880
1879 1881 self.nReadBlocks = 0
1880 1882
1881 1883 self.flagIsNewFile = 1
1882 1884
1883 1885 self.ippSeconds = 0
1884 1886
1885 1887 self.flagTimeBlock = 0
1886 1888
1887 1889 self.flagIsNewBlock = 0
1888 1890
1889 1891 self.nTotalBlocks = 0
1890 1892
1891 1893 self.blocksize = 0
1892 1894
1893 1895 self.dataOut = self.createObjByDefault()
1894 1896
1895 1897
1896 1898 def createObjByDefault(self):
1897 1899
1898 1900 dataObj = Spectra()
1899 1901
1900 1902 return dataObj
1901 1903
1902 1904 def __hasNotDataInBuffer(self):
1903 1905 return 1
1904 1906
1905 1907
1906 1908 def getBlockDimension(self):
1907 1909 """
1908 1910 Obtiene la cantidad de puntos a leer por cada bloque de datos
1909 1911
1910 1912 Affected:
1911 1913 self.nRdChannels
1912 1914 self.nRdPairs
1913 1915 self.pts2read_SelfSpectra
1914 1916 self.pts2read_CrossSpectra
1915 1917 self.pts2read_DCchannels
1916 1918 self.blocksize
1917 1919 self.dataOut.nChannels
1918 1920 self.dataOut.nPairs
1919 1921
1920 1922 Return:
1921 1923 None
1922 1924 """
1923 1925 self.nRdChannels = 0
1924 1926 self.nRdPairs = 0
1925 1927 self.rdPairList = []
1926 1928
1927 1929 for i in range(0, self.processingHeaderObj.totalSpectra*2, 2):
1928 1930 if self.processingHeaderObj.spectraComb[i] == self.processingHeaderObj.spectraComb[i+1]:
1929 1931 self.nRdChannels = self.nRdChannels + 1 #par de canales iguales
1930 1932 else:
1931 1933 self.nRdPairs = self.nRdPairs + 1 #par de canales diferentes
1932 1934 self.rdPairList.append((self.processingHeaderObj.spectraComb[i], self.processingHeaderObj.spectraComb[i+1]))
1933 1935
1934 1936 pts2read = self.processingHeaderObj.nHeights * self.processingHeaderObj.profilesPerBlock
1935 1937
1936 1938 self.pts2read_SelfSpectra = int(self.nRdChannels * pts2read)
1937 1939 self.blocksize = self.pts2read_SelfSpectra
1938 1940
1939 1941 if self.processingHeaderObj.flag_cspc:
1940 1942 self.pts2read_CrossSpectra = int(self.nRdPairs * pts2read)
1941 1943 self.blocksize += self.pts2read_CrossSpectra
1942 1944
1943 1945 if self.processingHeaderObj.flag_dc:
1944 1946 self.pts2read_DCchannels = int(self.systemHeaderObj.nChannels * self.processingHeaderObj.nHeights)
1945 1947 self.blocksize += self.pts2read_DCchannels
1946 1948
1947 1949 # self.blocksize = self.pts2read_SelfSpectra + self.pts2read_CrossSpectra + self.pts2read_DCchannels
1948 1950
1949 1951
1950 1952 def readBlock(self):
1951 1953 """
1952 1954 Lee el bloque de datos desde la posicion actual del puntero del archivo
1953 1955 (self.fp) y actualiza todos los parametros relacionados al bloque de datos
1954 1956 (metadata + data). La data leida es almacenada en el buffer y el contador del buffer
1955 1957 es seteado a 0
1956 1958
1957 1959 Return: None
1958 1960
1959 1961 Variables afectadas:
1960 1962
1961 1963 self.flagIsNewFile
1962 1964 self.flagIsNewBlock
1963 1965 self.nTotalBlocks
1964 1966 self.data_spc
1965 1967 self.data_cspc
1966 1968 self.data_dc
1967 1969
1968 1970 Exceptions:
1969 1971 Si un bloque leido no es un bloque valido
1970 1972 """
1971 1973 blockOk_flag = False
1972 1974 fpointer = self.fp.tell()
1973 1975
1974 1976 spc = numpy.fromfile( self.fp, self.dtype[0], self.pts2read_SelfSpectra )
1975 1977 spc = spc.reshape( (self.nRdChannels, self.processingHeaderObj.nHeights, self.processingHeaderObj.profilesPerBlock) ) #transforma a un arreglo 3D
1976 1978
1977 1979 if self.processingHeaderObj.flag_cspc:
1978 1980 cspc = numpy.fromfile( self.fp, self.dtype, self.pts2read_CrossSpectra )
1979 1981 cspc = cspc.reshape( (self.nRdPairs, self.processingHeaderObj.nHeights, self.processingHeaderObj.profilesPerBlock) ) #transforma a un arreglo 3D
1980 1982
1981 1983 if self.processingHeaderObj.flag_dc:
1982 1984 dc = numpy.fromfile( self.fp, self.dtype, self.pts2read_DCchannels ) #int(self.processingHeaderObj.nHeights*self.systemHeaderObj.nChannels) )
1983 1985 dc = dc.reshape( (self.systemHeaderObj.nChannels, self.processingHeaderObj.nHeights) ) #transforma a un arreglo 2D
1984 1986
1985 1987
1986 1988 if not(self.processingHeaderObj.shif_fft):
1987 1989 #desplaza a la derecha en el eje 2 determinadas posiciones
1988 1990 shift = int(self.processingHeaderObj.profilesPerBlock/2)
1989 1991 spc = numpy.roll( spc, shift , axis=2 )
1990 1992
1991 1993 if self.processingHeaderObj.flag_cspc:
1992 1994 #desplaza a la derecha en el eje 2 determinadas posiciones
1993 1995 cspc = numpy.roll( cspc, shift, axis=2 )
1994 1996
1995 1997 # self.processingHeaderObj.shif_fft = True
1996 1998
1997 1999 spc = numpy.transpose( spc, (0,2,1) )
1998 2000 self.data_spc = spc
1999 2001
2000 2002 if self.processingHeaderObj.flag_cspc:
2001 2003 cspc = numpy.transpose( cspc, (0,2,1) )
2002 2004 self.data_cspc = cspc['real'] + cspc['imag']*1j
2003 2005 else:
2004 2006 self.data_cspc = None
2005 2007
2006 2008 if self.processingHeaderObj.flag_dc:
2007 2009 self.data_dc = dc['real'] + dc['imag']*1j
2008 2010 else:
2009 2011 self.data_dc = None
2010 2012
2011 2013 self.flagIsNewFile = 0
2012 2014 self.flagIsNewBlock = 1
2013 2015
2014 2016 self.nTotalBlocks += 1
2015 2017 self.nReadBlocks += 1
2016 2018
2017 2019 return 1
2018 2020
2019 2021
2020 2022 def getData(self):
2021 2023 """
2022 2024 Copia el buffer de lectura a la clase "Spectra",
2023 2025 con todos los parametros asociados a este (metadata). cuando no hay datos en el buffer de
2024 2026 lectura es necesario hacer una nueva lectura de los bloques de datos usando "readNextBlock"
2025 2027
2026 2028 Return:
2027 2029 0 : Si no hay mas archivos disponibles
2028 2030 1 : Si hizo una buena copia del buffer
2029 2031
2030 2032 Affected:
2031 2033 self.dataOut
2032 2034
2033 2035 self.flagTimeBlock
2034 2036 self.flagIsNewBlock
2035 2037 """
2036 2038
2037 2039 if self.flagNoMoreFiles:
2038 2040 self.dataOut.flagNoData = True
2039 2041 print 'Process finished'
2040 2042 return 0
2041 2043
2042 2044 self.flagTimeBlock = 0
2043 2045 self.flagIsNewBlock = 0
2044 2046
2045 2047 if self.__hasNotDataInBuffer():
2046 2048
2047 2049 if not( self.readNextBlock() ):
2048 2050 self.dataOut.flagNoData = True
2049 2051 return 0
2050 2052
2051 2053 # self.updateDataHeader()
2052 2054
2053 2055 #data es un numpy array de 3 dmensiones (perfiles, alturas y canales)
2054 2056
2055 2057 if self.data_dc == None:
2056 2058 self.dataOut.flagNoData = True
2057 2059 return 0
2058 2060
2059 2061 self.dataOut.data_spc = self.data_spc
2060 2062
2061 2063 self.dataOut.data_cspc = self.data_cspc
2062 2064
2063 2065 self.dataOut.data_dc = self.data_dc
2064 2066
2065 2067 self.dataOut.flagTimeBlock = self.flagTimeBlock
2066 2068
2067 2069 self.dataOut.flagNoData = False
2068 2070
2069 2071 self.dataOut.dtype = self.dtype
2070 2072
2071 2073 # self.dataOut.nChannels = self.nRdChannels
2072 2074
2073 2075 self.dataOut.nPairs = self.nRdPairs
2074 2076
2075 2077 self.dataOut.pairsList = self.rdPairList
2076 2078
2077 2079 # self.dataOut.nHeights = self.processingHeaderObj.nHeights
2078 2080
2079 2081 self.dataOut.nProfiles = self.processingHeaderObj.profilesPerBlock
2080 2082
2081 2083 self.dataOut.nFFTPoints = self.processingHeaderObj.profilesPerBlock
2082 2084
2083 2085 self.dataOut.nCohInt = self.processingHeaderObj.nCohInt
2084 2086
2085 2087 self.dataOut.nIncohInt = self.processingHeaderObj.nIncohInt
2086 2088
2087 2089 xf = self.processingHeaderObj.firstHeight + self.processingHeaderObj.nHeights*self.processingHeaderObj.deltaHeight
2088 2090
2089 2091 self.dataOut.heightList = numpy.arange(self.processingHeaderObj.firstHeight, xf, self.processingHeaderObj.deltaHeight)
2090 2092
2091 2093 self.dataOut.channelList = range(self.systemHeaderObj.nChannels)
2092 2094
2093 2095 # self.dataOut.channelIndexList = range(self.systemHeaderObj.nChannels)
2094 2096
2095 2097 self.dataOut.utctime = self.basicHeaderObj.utc + self.basicHeaderObj.miliSecond/1000.#+ self.profileIndex * self.ippSeconds
2096 2098
2097 2099 self.dataOut.ippSeconds = self.ippSeconds
2098 2100
2099 2101 self.dataOut.timeInterval = self.ippSeconds * self.processingHeaderObj.nCohInt * self.processingHeaderObj.nIncohInt * self.dataOut.nFFTPoints
2100 2102
2101 2103 # self.profileIndex += 1
2102 2104
2103 2105 self.dataOut.systemHeaderObj = self.systemHeaderObj.copy()
2104 2106
2105 2107 self.dataOut.radarControllerHeaderObj = self.radarControllerHeaderObj.copy()
2106 2108
2107 2109 self.dataOut.flagShiftFFT = self.processingHeaderObj.shif_fft
2108 2110
2109 2111 self.dataOut.flagDecodeData = False #asumo q la data no esta decodificada
2110 2112
2111 2113 self.dataOut.flagDeflipData = True #asumo q la data no esta sin flip
2112 2114
2113 2115 if self.processingHeaderObj.code != None:
2114 2116
2115 2117 self.dataOut.nCode = self.processingHeaderObj.nCode
2116 2118
2117 2119 self.dataOut.nBaud = self.processingHeaderObj.nBaud
2118 2120
2119 2121 self.dataOut.code = self.processingHeaderObj.code
2120 2122
2121 2123 self.dataOut.flagDecodeData = True
2122 2124
2123 2125 return self.dataOut.data_spc
2124 2126
2125 2127
2126 2128 class SpectraWriter(JRODataWriter):
2127 2129
2128 2130 """
2129 2131 Esta clase permite escribir datos de espectros a archivos procesados (.pdata). La escritura
2130 2132 de los datos siempre se realiza por bloques.
2131 2133 """
2132 2134
2133 2135 ext = ".pdata"
2134 2136
2135 2137 optchar = "P"
2136 2138
2137 2139 shape_spc_Buffer = None
2138 2140
2139 2141 shape_cspc_Buffer = None
2140 2142
2141 2143 shape_dc_Buffer = None
2142 2144
2143 2145 data_spc = None
2144 2146
2145 2147 data_cspc = None
2146 2148
2147 2149 data_dc = None
2148 2150
2149 2151 # dataOut = None
2150 2152
2151 2153 def __init__(self):
2152 2154 """
2153 2155 Inicializador de la clase SpectraWriter para la escritura de datos de espectros.
2154 2156
2155 2157 Affected:
2156 2158 self.dataOut
2157 2159 self.basicHeaderObj
2158 2160 self.systemHeaderObj
2159 2161 self.radarControllerHeaderObj
2160 2162 self.processingHeaderObj
2161 2163
2162 2164 Return: None
2163 2165 """
2164 2166
2165 2167 self.isConfig = False
2166 2168
2167 2169 self.nTotalBlocks = 0
2168 2170
2169 2171 self.data_spc = None
2170 2172
2171 2173 self.data_cspc = None
2172 2174
2173 2175 self.data_dc = None
2174 2176
2175 2177 self.fp = None
2176 2178
2177 2179 self.flagIsNewFile = 1
2178 2180
2179 2181 self.nTotalBlocks = 0
2180 2182
2181 2183 self.flagIsNewBlock = 0
2182 2184
2183 2185 self.setFile = None
2184 2186
2185 2187 self.dtype = None
2186 2188
2187 2189 self.path = None
2188 2190
2189 2191 self.noMoreFiles = 0
2190 2192
2191 2193 self.filename = None
2192 2194
2193 2195 self.basicHeaderObj = BasicHeader(LOCALTIME)
2194 2196
2195 2197 self.systemHeaderObj = SystemHeader()
2196 2198
2197 2199 self.radarControllerHeaderObj = RadarControllerHeader()
2198 2200
2199 2201 self.processingHeaderObj = ProcessingHeader()
2200 2202
2201 2203
2202 2204 def hasAllDataInBuffer(self):
2203 2205 return 1
2204 2206
2205 2207
2206 2208 def setBlockDimension(self):
2207 2209 """
2208 2210 Obtiene las formas dimensionales del los subbloques de datos que componen un bloque
2209 2211
2210 2212 Affected:
2211 2213 self.shape_spc_Buffer
2212 2214 self.shape_cspc_Buffer
2213 2215 self.shape_dc_Buffer
2214 2216
2215 2217 Return: None
2216 2218 """
2217 2219 self.shape_spc_Buffer = (self.dataOut.nChannels,
2218 2220 self.processingHeaderObj.nHeights,
2219 2221 self.processingHeaderObj.profilesPerBlock)
2220 2222
2221 2223 self.shape_cspc_Buffer = (self.dataOut.nPairs,
2222 2224 self.processingHeaderObj.nHeights,
2223 2225 self.processingHeaderObj.profilesPerBlock)
2224 2226
2225 2227 self.shape_dc_Buffer = (self.dataOut.nChannels,
2226 2228 self.processingHeaderObj.nHeights)
2227 2229
2228 2230
2229 2231 def writeBlock(self):
2230 2232 """
2231 2233 Escribe el buffer en el file designado
2232 2234
2233 2235 Affected:
2234 2236 self.data_spc
2235 2237 self.data_cspc
2236 2238 self.data_dc
2237 2239 self.flagIsNewFile
2238 2240 self.flagIsNewBlock
2239 2241 self.nTotalBlocks
2240 2242 self.nWriteBlocks
2241 2243
2242 2244 Return: None
2243 2245 """
2244 2246
2245 2247 spc = numpy.transpose( self.data_spc, (0,2,1) )
2246 2248 if not( self.processingHeaderObj.shif_fft ):
2247 2249 spc = numpy.roll( spc, self.processingHeaderObj.profilesPerBlock/2, axis=2 ) #desplaza a la derecha en el eje 2 determinadas posiciones
2248 2250 data = spc.reshape((-1))
2249 2251 data = data.astype(self.dtype[0])
2250 2252 data.tofile(self.fp)
2251 2253
2252 2254 if self.data_cspc != None:
2253 2255 data = numpy.zeros( self.shape_cspc_Buffer, self.dtype )
2254 2256 cspc = numpy.transpose( self.data_cspc, (0,2,1) )
2255 2257 if not( self.processingHeaderObj.shif_fft ):
2256 2258 cspc = numpy.roll( cspc, self.processingHeaderObj.profilesPerBlock/2, axis=2 ) #desplaza a la derecha en el eje 2 determinadas posiciones
2257 2259 data['real'] = cspc.real
2258 2260 data['imag'] = cspc.imag
2259 2261 data = data.reshape((-1))
2260 2262 data.tofile(self.fp)
2261 2263
2262 2264 if self.data_dc != None:
2263 2265 data = numpy.zeros( self.shape_dc_Buffer, self.dtype )
2264 2266 dc = self.data_dc
2265 2267 data['real'] = dc.real
2266 2268 data['imag'] = dc.imag
2267 2269 data = data.reshape((-1))
2268 2270 data.tofile(self.fp)
2269 2271
2270 2272 self.data_spc.fill(0)
2271 2273 self.data_dc.fill(0)
2272 2274 if self.data_cspc != None:
2273 2275 self.data_cspc.fill(0)
2274 2276
2275 2277 self.flagIsNewFile = 0
2276 2278 self.flagIsNewBlock = 1
2277 2279 self.nTotalBlocks += 1
2278 2280 self.nWriteBlocks += 1
2279 2281 self.blockIndex += 1
2280 2282
2281 2283
2282 2284 def putData(self):
2283 2285 """
2284 2286 Setea un bloque de datos y luego los escribe en un file
2285 2287
2286 2288 Affected:
2287 2289 self.data_spc
2288 2290 self.data_cspc
2289 2291 self.data_dc
2290 2292
2291 2293 Return:
2292 2294 0 : Si no hay data o no hay mas files que puedan escribirse
2293 2295 1 : Si se escribio la data de un bloque en un file
2294 2296 """
2295 2297
2296 2298 if self.dataOut.flagNoData:
2297 2299 return 0
2298 2300
2299 2301 self.flagIsNewBlock = 0
2300 2302
2301 2303 if self.dataOut.flagTimeBlock:
2302 2304 self.data_spc.fill(0)
2303 2305 self.data_cspc.fill(0)
2304 2306 self.data_dc.fill(0)
2305 2307 self.setNextFile()
2306 2308
2307 2309 if self.flagIsNewFile == 0:
2308 2310 self.getBasicHeader()
2309 2311
2310 2312 self.data_spc = self.dataOut.data_spc.copy()
2311 2313 self.data_cspc = self.dataOut.data_cspc.copy()
2312 2314 self.data_dc = self.dataOut.data_dc.copy()
2313 2315
2314 2316 # #self.processingHeaderObj.dataBlocksPerFile)
2315 2317 if self.hasAllDataInBuffer():
2316 2318 # self.getDataHeader()
2317 2319 self.writeNextBlock()
2318 2320
2319 2321 return 1
2320 2322
2321 2323
2322 2324 def __getProcessFlags(self):
2323 2325
2324 2326 processFlags = 0
2325 2327
2326 2328 dtype0 = numpy.dtype([('real','<i1'),('imag','<i1')])
2327 2329 dtype1 = numpy.dtype([('real','<i2'),('imag','<i2')])
2328 2330 dtype2 = numpy.dtype([('real','<i4'),('imag','<i4')])
2329 2331 dtype3 = numpy.dtype([('real','<i8'),('imag','<i8')])
2330 2332 dtype4 = numpy.dtype([('real','<f4'),('imag','<f4')])
2331 2333 dtype5 = numpy.dtype([('real','<f8'),('imag','<f8')])
2332 2334
2333 2335 dtypeList = [dtype0, dtype1, dtype2, dtype3, dtype4, dtype5]
2334 2336
2335 2337
2336 2338
2337 2339 datatypeValueList = [PROCFLAG.DATATYPE_CHAR,
2338 2340 PROCFLAG.DATATYPE_SHORT,
2339 2341 PROCFLAG.DATATYPE_LONG,
2340 2342 PROCFLAG.DATATYPE_INT64,
2341 2343 PROCFLAG.DATATYPE_FLOAT,
2342 2344 PROCFLAG.DATATYPE_DOUBLE]
2343 2345
2344 2346
2345 2347 for index in range(len(dtypeList)):
2346 2348 if self.dataOut.dtype == dtypeList[index]:
2347 2349 dtypeValue = datatypeValueList[index]
2348 2350 break
2349 2351
2350 2352 processFlags += dtypeValue
2351 2353
2352 2354 if self.dataOut.flagDecodeData:
2353 2355 processFlags += PROCFLAG.DECODE_DATA
2354 2356
2355 2357 if self.dataOut.flagDeflipData:
2356 2358 processFlags += PROCFLAG.DEFLIP_DATA
2357 2359
2358 2360 if self.dataOut.code != None:
2359 2361 processFlags += PROCFLAG.DEFINE_PROCESS_CODE
2360 2362
2361 2363 if self.dataOut.nIncohInt > 1:
2362 2364 processFlags += PROCFLAG.INCOHERENT_INTEGRATION
2363 2365
2364 2366 if self.dataOut.data_dc != None:
2365 2367 processFlags += PROCFLAG.SAVE_CHANNELS_DC
2366 2368
2367 2369 return processFlags
2368 2370
2369 2371
2370 2372 def __getBlockSize(self):
2371 2373 '''
2372 2374 Este metodos determina el cantidad de bytes para un bloque de datos de tipo Spectra
2373 2375 '''
2374 2376
2375 2377 dtype0 = numpy.dtype([('real','<i1'),('imag','<i1')])
2376 2378 dtype1 = numpy.dtype([('real','<i2'),('imag','<i2')])
2377 2379 dtype2 = numpy.dtype([('real','<i4'),('imag','<i4')])
2378 2380 dtype3 = numpy.dtype([('real','<i8'),('imag','<i8')])
2379 2381 dtype4 = numpy.dtype([('real','<f4'),('imag','<f4')])
2380 2382 dtype5 = numpy.dtype([('real','<f8'),('imag','<f8')])
2381 2383
2382 2384 dtypeList = [dtype0, dtype1, dtype2, dtype3, dtype4, dtype5]
2383 2385 datatypeValueList = [1,2,4,8,4,8]
2384 2386 for index in range(len(dtypeList)):
2385 2387 if self.dataOut.dtype == dtypeList[index]:
2386 2388 datatypeValue = datatypeValueList[index]
2387 2389 break
2388 2390
2389 2391
2390 2392 pts2write = self.dataOut.nHeights * self.dataOut.nFFTPoints
2391 2393
2392 2394 pts2write_SelfSpectra = int(self.dataOut.nChannels * pts2write)
2393 2395 blocksize = (pts2write_SelfSpectra*datatypeValue)
2394 2396
2395 2397 if self.dataOut.data_cspc != None:
2396 2398 pts2write_CrossSpectra = int(self.dataOut.nPairs * pts2write)
2397 2399 blocksize += (pts2write_CrossSpectra*datatypeValue*2)
2398 2400
2399 2401 if self.dataOut.data_dc != None:
2400 2402 pts2write_DCchannels = int(self.dataOut.nChannels * self.dataOut.nHeights)
2401 2403 blocksize += (pts2write_DCchannels*datatypeValue*2)
2402 2404
2403 2405 blocksize = blocksize #* datatypeValue * 2 #CORREGIR ESTO
2404 2406
2405 2407 return blocksize
2406 2408
2407 2409 def getDataHeader(self):
2408 2410
2409 2411 """
2410 2412 Obtiene una copia del First Header
2411 2413
2412 2414 Affected:
2413 2415 self.systemHeaderObj
2414 2416 self.radarControllerHeaderObj
2415 2417 self.dtype
2416 2418
2417 2419 Return:
2418 2420 None
2419 2421 """
2420 2422
2421 2423 self.systemHeaderObj = self.dataOut.systemHeaderObj.copy()
2422 2424 self.systemHeaderObj.nChannels = self.dataOut.nChannels
2423 2425 self.radarControllerHeaderObj = self.dataOut.radarControllerHeaderObj.copy()
2424 2426
2425 2427 self.getBasicHeader()
2426 2428
2427 2429 processingHeaderSize = 40 # bytes
2428 2430 self.processingHeaderObj.dtype = 0 # Voltage
2429 2431 self.processingHeaderObj.blockSize = self.__getBlockSize()
2430 2432 self.processingHeaderObj.profilesPerBlock = self.dataOut.nFFTPoints
2431 2433 self.processingHeaderObj.dataBlocksPerFile = self.blocksPerFile
2432 2434 self.processingHeaderObj.nWindows = 1 #podria ser 1 o self.dataOut.processingHeaderObj.nWindows
2433 2435 self.processingHeaderObj.processFlags = self.__getProcessFlags()
2434 2436 self.processingHeaderObj.nCohInt = self.dataOut.nCohInt# Se requiere para determinar el valor de timeInterval
2435 2437 self.processingHeaderObj.nIncohInt = self.dataOut.nIncohInt
2436 2438 self.processingHeaderObj.totalSpectra = self.dataOut.nPairs + self.dataOut.nChannels
2437 2439
2438 2440 if self.processingHeaderObj.totalSpectra > 0:
2439 2441 channelList = []
2440 2442 for channel in range(self.dataOut.nChannels):
2441 2443 channelList.append(channel)
2442 2444 channelList.append(channel)
2443 2445
2444 2446 pairsList = []
2445 2447 for pair in self.dataOut.pairsList:
2446 2448 pairsList.append(pair[0])
2447 2449 pairsList.append(pair[1])
2448 2450 spectraComb = channelList + pairsList
2449 2451 spectraComb = numpy.array(spectraComb,dtype="u1")
2450 2452 self.processingHeaderObj.spectraComb = spectraComb
2451 2453 sizeOfSpcComb = len(spectraComb)
2452 2454 processingHeaderSize += sizeOfSpcComb
2453 2455
2454 2456 if self.dataOut.code != None:
2455 2457 self.processingHeaderObj.code = self.dataOut.code
2456 2458 self.processingHeaderObj.nCode = self.dataOut.nCode
2457 2459 self.processingHeaderObj.nBaud = self.dataOut.nBaud
2458 2460 nCodeSize = 4 # bytes
2459 2461 nBaudSize = 4 # bytes
2460 2462 codeSize = 4 # bytes
2461 2463 sizeOfCode = int(nCodeSize + nBaudSize + codeSize * self.dataOut.nCode * self.dataOut.nBaud)
2462 2464 processingHeaderSize += sizeOfCode
2463 2465
2464 2466 if self.processingHeaderObj.nWindows != 0:
2465 2467 self.processingHeaderObj.firstHeight = self.dataOut.heightList[0]
2466 2468 self.processingHeaderObj.deltaHeight = self.dataOut.heightList[1] - self.dataOut.heightList[0]
2467 2469 self.processingHeaderObj.nHeights = self.dataOut.nHeights
2468 2470 self.processingHeaderObj.samplesWin = self.dataOut.nHeights
2469 2471 sizeOfFirstHeight = 4
2470 2472 sizeOfdeltaHeight = 4
2471 2473 sizeOfnHeights = 4
2472 2474 sizeOfWindows = (sizeOfFirstHeight + sizeOfdeltaHeight + sizeOfnHeights)*self.processingHeaderObj.nWindows
2473 2475 processingHeaderSize += sizeOfWindows
2474 2476
2475 2477 self.processingHeaderObj.size = processingHeaderSize
2476 2478
2477 2479 class SpectraHeisWriter():
2478 2480
2479 2481 i=0
2480 2482
2481 2483 def __init__(self, dataOut):
2482 2484
2483 2485 self.wrObj = FITS()
2484 2486 self.dataOut = dataOut
2485 2487
2486 2488 def isNumber(str):
2487 2489 """
2488 2490 Chequea si el conjunto de caracteres que componen un string puede ser convertidos a un numero.
2489 2491
2490 2492 Excepciones:
2491 2493 Si un determinado string no puede ser convertido a numero
2492 2494 Input:
2493 2495 str, string al cual se le analiza para determinar si convertible a un numero o no
2494 2496
2495 2497 Return:
2496 2498 True : si el string es uno numerico
2497 2499 False : no es un string numerico
2498 2500 """
2499 2501 try:
2500 2502 float( str )
2501 2503 return True
2502 2504 except:
2503 2505 return False
2504 2506
2505 2507 def setup(self, wrpath,):
2506 2508
2507 2509 if not(os.path.exists(wrpath)):
2508 2510 os.mkdir(wrpath)
2509 2511
2510 2512 self.wrpath = wrpath
2511 2513 self.setFile = 0
2512 2514
2513 2515 def putData(self):
2514 2516 # self.wrObj.writeHeader(nChannels=self.dataOut.nChannels, nFFTPoints=self.dataOut.nFFTPoints)
2515 2517 #name = self.dataOut.utctime
2516 2518 name= time.localtime( self.dataOut.utctime)
2517 2519 ext=".fits"
2518 2520 #folder='D%4.4d%3.3d'%(name.tm_year,name.tm_yday)
2519 2521 subfolder = 'D%4.4d%3.3d' % (name.tm_year,name.tm_yday)
2520 2522
2521 2523 fullpath = os.path.join( self.wrpath, subfolder )
2522 2524 if not( os.path.exists(fullpath) ):
2523 2525 os.mkdir(fullpath)
2524 2526 self.setFile += 1
2525 2527 file = 'D%4.4d%3.3d%3.3d%s' % (name.tm_year,name.tm_yday,self.setFile,ext)
2526 2528
2527 2529 filename = os.path.join(self.wrpath,subfolder, file)
2528 2530
2529 2531 # print self.dataOut.ippSeconds
2530 2532 freq=numpy.arange(-1*self.dataOut.nHeights/2.,self.dataOut.nHeights/2.)/(2*self.dataOut.ippSeconds)
2531 2533
2532 2534 col1=self.wrObj.setColF(name="freq", format=str(self.dataOut.nFFTPoints)+'E', array=freq)
2533 2535 col2=self.wrObj.writeData(name="P_Ch1",format=str(self.dataOut.nFFTPoints)+'E',data=10*numpy.log10(self.dataOut.data_spc[0,:]))
2534 2536 col3=self.wrObj.writeData(name="P_Ch2",format=str(self.dataOut.nFFTPoints)+'E',data=10*numpy.log10(self.dataOut.data_spc[1,:]))
2535 2537 col4=self.wrObj.writeData(name="P_Ch3",format=str(self.dataOut.nFFTPoints)+'E',data=10*numpy.log10(self.dataOut.data_spc[2,:]))
2536 2538 col5=self.wrObj.writeData(name="P_Ch4",format=str(self.dataOut.nFFTPoints)+'E',data=10*numpy.log10(self.dataOut.data_spc[3,:]))
2537 2539 col6=self.wrObj.writeData(name="P_Ch5",format=str(self.dataOut.nFFTPoints)+'E',data=10*numpy.log10(self.dataOut.data_spc[4,:]))
2538 2540 col7=self.wrObj.writeData(name="P_Ch6",format=str(self.dataOut.nFFTPoints)+'E',data=10*numpy.log10(self.dataOut.data_spc[5,:]))
2539 2541 col8=self.wrObj.writeData(name="P_Ch7",format=str(self.dataOut.nFFTPoints)+'E',data=10*numpy.log10(self.dataOut.data_spc[6,:]))
2540 2542 col9=self.wrObj.writeData(name="P_Ch8",format=str(self.dataOut.nFFTPoints)+'E',data=10*numpy.log10(self.dataOut.data_spc[7,:]))
2541 2543 #n=numpy.arange((100))
2542 2544 n=self.dataOut.data_spc[6,:]
2543 2545 a=self.wrObj.cFImage(n)
2544 2546 b=self.wrObj.Ctable(col1,col2,col3,col4,col5,col6,col7,col8,col9)
2545 2547 self.wrObj.CFile(a,b)
2546 2548 self.wrObj.wFile(filename)
2547 2549 return 1
2548 2550
2549 2551 class FITS:
2550 2552
2551 2553 name=None
2552 2554 format=None
2553 2555 array =None
2554 2556 data =None
2555 2557 thdulist=None
2556 2558
2557 2559 def __init__(self):
2558 2560
2559 2561 pass
2560 2562
2561 2563 def setColF(self,name,format,array):
2562 2564 self.name=name
2563 2565 self.format=format
2564 2566 self.array=array
2565 2567 a1=numpy.array([self.array],dtype=numpy.float32)
2566 2568 self.col1 = pyfits.Column(name=self.name, format=self.format, array=a1)
2567 2569 return self.col1
2568 2570
2569 2571 # def setColP(self,name,format,data):
2570 2572 # self.name=name
2571 2573 # self.format=format
2572 2574 # self.data=data
2573 2575 # a2=numpy.array([self.data],dtype=numpy.float32)
2574 2576 # self.col2 = pyfits.Column(name=self.name, format=self.format, array=a2)
2575 2577 # return self.col2
2576 2578
2577 2579 def writeHeader(self,):
2578 2580 pass
2579 2581
2580 2582 def writeData(self,name,format,data):
2581 2583 self.name=name
2582 2584 self.format=format
2583 2585 self.data=data
2584 2586 a2=numpy.array([self.data],dtype=numpy.float32)
2585 2587 self.col2 = pyfits.Column(name=self.name, format=self.format, array=a2)
2586 2588 return self.col2
2587 2589
2588 2590 def cFImage(self,n):
2589 2591 self.hdu= pyfits.PrimaryHDU(n)
2590 2592 return self.hdu
2591 2593
2592 2594 def Ctable(self,col1,col2,col3,col4,col5,col6,col7,col8,col9):
2593 2595 self.cols=pyfits.ColDefs( [col1,col2,col3,col4,col5,col6,col7,col8,col9])
2594 2596 self.tbhdu = pyfits.new_table(self.cols)
2595 2597 return self.tbhdu
2596 2598
2597 2599 def CFile(self,hdu,tbhdu):
2598 2600 self.thdulist=pyfits.HDUList([hdu,tbhdu])
2599 2601
2600 2602 def wFile(self,filename):
2601 2603 self.thdulist.writeto(filename) No newline at end of file
General Comments 0
You need to be logged in to leave comments. Login now