##// END OF EJS Templates
Bug fixed: Seleccion de directorio al leer en linea
Miguel Valdez -
r297:d38cd632ab05
parent child
Show More
@@ -1,2605 +1,2605
1 1 '''
2 2
3 3 $Author: murco $
4 4 $Id: JRODataIO.py 169 2012-11-19 21:57:03Z murco $
5 5 '''
6 6
7 7 import os, sys
8 8 import glob
9 9 import time
10 10 import numpy
11 11 import fnmatch
12 12 import time, datetime
13 13
14 14 from jrodata import *
15 15 from jroheaderIO import *
16 16 from jroprocessing import *
17 17
18 18 LOCALTIME = -18000
19 19
20 20 def isNumber(str):
21 21 """
22 22 Chequea si el conjunto de caracteres que componen un string puede ser convertidos a un numero.
23 23
24 24 Excepciones:
25 25 Si un determinado string no puede ser convertido a numero
26 26 Input:
27 27 str, string al cual se le analiza para determinar si convertible a un numero o no
28 28
29 29 Return:
30 30 True : si el string es uno numerico
31 31 False : no es un string numerico
32 32 """
33 33 try:
34 34 float( str )
35 35 return True
36 36 except:
37 37 return False
38 38
39 39 def isThisFileinRange(filename, startUTSeconds, endUTSeconds):
40 40 """
41 41 Esta funcion determina si un archivo de datos se encuentra o no dentro del rango de fecha especificado.
42 42
43 43 Inputs:
44 44 filename : nombre completo del archivo de datos en formato Jicamarca (.r)
45 45
46 46 startUTSeconds : fecha inicial del rango seleccionado. La fecha esta dada en
47 47 segundos contados desde 01/01/1970.
48 48 endUTSeconds : fecha final del rango seleccionado. La fecha esta dada en
49 49 segundos contados desde 01/01/1970.
50 50
51 51 Return:
52 52 Boolean : Retorna True si el archivo de datos contiene datos en el rango de
53 53 fecha especificado, de lo contrario retorna False.
54 54
55 55 Excepciones:
56 56 Si el archivo no existe o no puede ser abierto
57 57 Si la cabecera no puede ser leida.
58 58
59 59 """
60 60 basicHeaderObj = BasicHeader(LOCALTIME)
61 61
62 62 try:
63 63 fp = open(filename,'rb')
64 64 except:
65 65 raise IOError, "The file %s can't be opened" %(filename)
66 66
67 67 sts = basicHeaderObj.read(fp)
68 68 fp.close()
69 69
70 70 if not(sts):
71 71 print "Skipping the file %s because it has not a valid header" %(filename)
72 72 return 0
73 73
74 74 if not ((startUTSeconds <= basicHeaderObj.utc) and (endUTSeconds > basicHeaderObj.utc)):
75 75 return 0
76 76
77 77 return 1
78 78
79 79 def isFileinThisTime(filename, startTime, endTime):
80 80 """
81 81 Retorna 1 si el archivo de datos se encuentra dentro del rango de horas especificado.
82 82
83 83 Inputs:
84 84 filename : nombre completo del archivo de datos en formato Jicamarca (.r)
85 85
86 86 startTime : tiempo inicial del rango seleccionado en formato datetime.time
87 87
88 88 endTime : tiempo final del rango seleccionado en formato datetime.time
89 89
90 90 Return:
91 91 Boolean : Retorna True si el archivo de datos contiene datos en el rango de
92 92 fecha especificado, de lo contrario retorna False.
93 93
94 94 Excepciones:
95 95 Si el archivo no existe o no puede ser abierto
96 96 Si la cabecera no puede ser leida.
97 97
98 98 """
99 99
100 100
101 101 try:
102 102 fp = open(filename,'rb')
103 103 except:
104 104 raise IOError, "The file %s can't be opened" %(filename)
105 105
106 106 basicHeaderObj = BasicHeader(LOCALTIME)
107 107 sts = basicHeaderObj.read(fp)
108 108 fp.close()
109 109
110 110 thisTime = basicHeaderObj.datatime.time()
111 111
112 112 if not(sts):
113 113 print "Skipping the file %s because it has not a valid header" %(filename)
114 114 return 0
115 115
116 116 if not ((startTime <= thisTime) and (endTime > thisTime)):
117 117 return 0
118 118
119 119 return 1
120 120
121 121 def getlastFileFromPath(path, ext):
122 122 """
123 123 Depura el fileList dejando solo los que cumplan el formato de "PYYYYDDDSSS.ext"
124 124 al final de la depuracion devuelve el ultimo file de la lista que quedo.
125 125
126 126 Input:
127 127 fileList : lista conteniendo todos los files (sin path) que componen una determinada carpeta
128 128 ext : extension de los files contenidos en una carpeta
129 129
130 130 Return:
131 131 El ultimo file de una determinada carpeta, no se considera el path.
132 132 """
133 133 validFilelist = []
134 134 fileList = os.listdir(path)
135 135
136 136 # 0 1234 567 89A BCDE
137 137 # H YYYY DDD SSS .ext
138 138
139 139 for file in fileList:
140 140 try:
141 141 year = int(file[1:5])
142 142 doy = int(file[5:8])
143 143
144 144
145 145 except:
146 146 continue
147 147
148 148 if (os.path.splitext(file)[-1].lower() != ext.lower()):
149 149 continue
150 150
151 151 validFilelist.append(file)
152 152
153 153 if validFilelist:
154 154 validFilelist = sorted( validFilelist, key=str.lower )
155 155 return validFilelist[-1]
156 156
157 157 return None
158 158
159 159 def checkForRealPath(path, year, doy, set, ext):
160 160 """
161 161 Por ser Linux Case Sensitive entonces checkForRealPath encuentra el nombre correcto de un path,
162 162 Prueba por varias combinaciones de nombres entre mayusculas y minusculas para determinar
163 163 el path exacto de un determinado file.
164 164
165 165 Example :
166 166 nombre correcto del file es .../.../D2009307/P2009307367.ext
167 167
168 168 Entonces la funcion prueba con las siguientes combinaciones
169 169 .../.../y2009307367.ext
170 170 .../.../Y2009307367.ext
171 171 .../.../x2009307/y2009307367.ext
172 172 .../.../x2009307/Y2009307367.ext
173 173 .../.../X2009307/y2009307367.ext
174 174 .../.../X2009307/Y2009307367.ext
175 175 siendo para este caso, la ultima combinacion de letras, identica al file buscado
176 176
177 177 Return:
178 178 Si encuentra la cobinacion adecuada devuelve el path completo y el nombre del file
179 179 caso contrario devuelve None como path y el la ultima combinacion de nombre en mayusculas
180 180 para el filename
181 181 """
182 182 fullfilename = None
183 183 find_flag = False
184 184 filename = None
185 185
186 186 prefixDirList = [None,'d','D']
187 187 if ext.lower() == ".r": #voltage
188 188 prefixFileList = ['d','D']
189 189 elif ext.lower() == ".pdata": #spectra
190 190 prefixFileList = ['p','P']
191 191 else:
192 192 return None, filename
193 193
194 194 #barrido por las combinaciones posibles
195 195 for prefixDir in prefixDirList:
196 196 thispath = path
197 197 if prefixDir != None:
198 198 #formo el nombre del directorio xYYYYDDD (x=d o x=D)
199 199 thispath = os.path.join(path, "%s%04d%03d" % ( prefixDir, year, doy ))
200 200
201 201 for prefixFile in prefixFileList: #barrido por las dos combinaciones posibles de "D"
202 202 filename = "%s%04d%03d%03d%s" % ( prefixFile, year, doy, set, ext ) #formo el nombre del file xYYYYDDDSSS.ext
203 203 fullfilename = os.path.join( thispath, filename ) #formo el path completo
204 204
205 205 if os.path.exists( fullfilename ): #verifico que exista
206 206 find_flag = True
207 207 break
208 208 if find_flag:
209 209 break
210 210
211 211 if not(find_flag):
212 212 return None, filename
213 213
214 214 return fullfilename, filename
215 215
216 216 def isDoyFolder(folder):
217 print folder
218 217 try:
219 218 year = int(folder[1:5])
220 print year
221 219 except:
222 220 return 0
223 221
224 222 try:
225 223 doy = int(folder[5:8])
226 print doy
227 224 except:
228 225 return 0
226
229 227 return 1
230 228
231 229 class JRODataIO:
232 230
233 231 c = 3E8
234 232
235 233 isConfig = False
236 234
237 235 basicHeaderObj = BasicHeader(LOCALTIME)
238 236
239 237 systemHeaderObj = SystemHeader()
240 238
241 239 radarControllerHeaderObj = RadarControllerHeader()
242 240
243 241 processingHeaderObj = ProcessingHeader()
244 242
245 243 online = 0
246 244
247 245 dtype = None
248 246
249 247 pathList = []
250 248
251 249 filenameList = []
252 250
253 251 filename = None
254 252
255 253 ext = None
256 254
257 255 flagIsNewFile = 1
258 256
259 257 flagTimeBlock = 0
260 258
261 259 flagIsNewBlock = 0
262 260
263 261 fp = None
264 262
265 263 firstHeaderSize = 0
266 264
267 265 basicHeaderSize = 24
268 266
269 267 versionFile = 1103
270 268
271 269 fileSize = None
272 270
273 271 ippSeconds = None
274 272
275 273 fileSizeByHeader = None
276 274
277 275 fileIndex = None
278 276
279 277 profileIndex = None
280 278
281 279 blockIndex = None
282 280
283 281 nTotalBlocks = None
284 282
285 283 maxTimeStep = 30
286 284
287 285 lastUTTime = None
288 286
289 287 datablock = None
290 288
291 289 dataOut = None
292 290
293 291 blocksize = None
294 292
295 293 def __init__(self):
296 294
297 295 raise ValueError, "Not implemented"
298 296
299 297 def run(self):
300 298
301 299 raise ValueError, "Not implemented"
302 300
303 301 def getOutput(self):
304 302
305 303 return self.dataOut
306 304
307 305 class JRODataReader(JRODataIO, ProcessingUnit):
308 306
309 307 nReadBlocks = 0
310 308
311 309 delay = 10 #number of seconds waiting a new file
312 310
313 311 nTries = 3 #quantity tries
314 312
315 313 nFiles = 3 #number of files for searching
316 314
317 315 flagNoMoreFiles = 0
318 316
319 317 def __init__(self):
320 318
321 319 """
322 320
323 321 """
324 322
325 323 raise ValueError, "This method has not been implemented"
326 324
327 325
328 326 def createObjByDefault(self):
329 327 """
330 328
331 329 """
332 330 raise ValueError, "This method has not been implemented"
333 331
334 332 def getBlockDimension(self):
335 333
336 334 raise ValueError, "No implemented"
337 335
338 336 def __searchFilesOffLine(self,
339 337 path,
340 338 startDate,
341 339 endDate,
342 340 startTime=datetime.time(0,0,0),
343 341 endTime=datetime.time(23,59,59),
344 342 set=None,
345 343 expLabel='',
346 344 ext='.r',
347 345 walk=True):
348 346
349 347 pathList = []
350 348
351 349 if not walk:
352 350 pathList.append(path)
353 351
354 352 else:
355 353 dirList = []
356 354 for thisPath in os.listdir(path):
357 355 if not os.path.isdir(os.path.join(path,thisPath)):
358 356 continue
359 357 if not isDoyFolder(thisPath):
360 358 continue
361 359
362 360 dirList.append(thisPath)
363 361
364 362 if not(dirList):
365 363 return None, None
366 364
367 365 thisDate = startDate
368 366
369 367 while(thisDate <= endDate):
370 368 year = thisDate.timetuple().tm_year
371 369 doy = thisDate.timetuple().tm_yday
372 370
373 371 match = fnmatch.filter(dirList, '?' + '%4.4d%3.3d' % (year,doy))
374 372 if len(match) == 0:
375 373 thisDate += datetime.timedelta(1)
376 374 continue
377 375
378 376 pathList.append(os.path.join(path,match[0],expLabel))
379 377 thisDate += datetime.timedelta(1)
380 378
381 379 if pathList == []:
382 380 print "Any folder was found for the date range: %s-%s" %(startDate, endDate)
383 381 return None, None
384 382
385 383 print "%d folder(s) was(were) found for the date range: %s-%s" %(len(pathList), startDate, endDate)
386 384
387 385 filenameList = []
388 386 for thisPath in pathList:
389 387
390 388 fileList = glob.glob1(thisPath, "*%s" %ext)
391 389 fileList.sort()
392 390
393 391 for file in fileList:
394 392
395 393 filename = os.path.join(thisPath,file)
396 394
397 395 if isFileinThisTime(filename, startTime, endTime):
398 396 filenameList.append(filename)
399 397
400 398 if not(filenameList):
401 399 print "Any file was found for the time range %s - %s" %(startTime, endTime)
402 400 return None, None
403 401
404 402 print "%d file(s) was(were) found for the time range: %s - %s" %(len(filenameList), startTime, endTime)
405 403
406 404 self.filenameList = filenameList
407 405
408 406 return pathList, filenameList
409 407
410 408 def __searchFilesOnLine(self, path, expLabel = "", ext = None, walk=True):
411 409
412 410 """
413 411 Busca el ultimo archivo de la ultima carpeta (determinada o no por startDateTime) y
414 412 devuelve el archivo encontrado ademas de otros datos.
415 413
416 414 Input:
417 415 path : carpeta donde estan contenidos los files que contiene data
418 416
419 417 expLabel : Nombre del subexperimento (subfolder)
420 418
421 419 ext : extension de los files
422 420
423 421 walk : Si es habilitado no realiza busquedas dentro de los ubdirectorios (doypath)
424 422
425 423 Return:
426 424 directory : eL directorio donde esta el file encontrado
427 425 filename : el ultimo file de una determinada carpeta
428 426 year : el anho
429 427 doy : el numero de dia del anho
430 428 set : el set del archivo
431 429
432 430
433 431 """
434 432 dirList = []
435 433
436 434 if walk:
437 435
438 436 #Filtra solo los directorios
439 437 for thisPath in os.listdir(path):
440 438 if not os.path.isdir(os.path.join(path,thisPath)):
441 439 continue
442 440 if not isDoyFolder(thisPath):
443 441 continue
444 442
443 dirList.append(thisPath)
444
445 445 if not(dirList):
446 446 return None, None, None, None, None
447 447
448 448 dirList = sorted( dirList, key=str.lower )
449 449
450 450 doypath = dirList[-1]
451 451 fullpath = os.path.join(path, doypath, expLabel)
452 452
453 453 else:
454 454 fullpath = path
455 455
456 456 print "%s folder was found: " %(fullpath )
457 457
458 458 filename = getlastFileFromPath(fullpath, ext)
459 459
460 460 if not(filename):
461 461 return None, None, None, None, None
462 462
463 463 print "%s file was found" %(filename)
464 464
465 465 if not(self.__verifyFile(os.path.join(fullpath, filename))):
466 466 return None, None, None, None, None
467 467
468 468 year = int( filename[1:5] )
469 469 doy = int( filename[5:8] )
470 470 set = int( filename[8:11] )
471 471
472 472 return fullpath, filename, year, doy, set
473 473
474 474
475 475
476 476 def __setNextFileOffline(self):
477 477
478 478 idFile = self.fileIndex
479 479
480 480 while (True):
481 481 idFile += 1
482 482 if not(idFile < len(self.filenameList)):
483 483 self.flagNoMoreFiles = 1
484 484 print "No more Files"
485 485 return 0
486 486
487 487 filename = self.filenameList[idFile]
488 488
489 489 if not(self.__verifyFile(filename)):
490 490 continue
491 491
492 492 fileSize = os.path.getsize(filename)
493 493 fp = open(filename,'rb')
494 494 break
495 495
496 496 self.flagIsNewFile = 1
497 497 self.fileIndex = idFile
498 498 self.filename = filename
499 499 self.fileSize = fileSize
500 500 self.fp = fp
501 501
502 502 print "Setting the file: %s"%self.filename
503 503
504 504 return 1
505 505
506 506 def __setNextFileOnline(self):
507 507 """
508 508 Busca el siguiente file que tenga suficiente data para ser leida, dentro de un folder especifico, si
509 509 no encuentra un file valido espera un tiempo determinado y luego busca en los posibles n files
510 510 siguientes.
511 511
512 512 Affected:
513 513 self.flagIsNewFile
514 514 self.filename
515 515 self.fileSize
516 516 self.fp
517 517 self.set
518 518 self.flagNoMoreFiles
519 519
520 520 Return:
521 521 0 : si luego de una busqueda del siguiente file valido este no pudo ser encontrado
522 522 1 : si el file fue abierto con exito y esta listo a ser leido
523 523
524 524 Excepciones:
525 525 Si un determinado file no puede ser abierto
526 526 """
527 527 nFiles = 0
528 528 fileOk_flag = False
529 529 firstTime_flag = True
530 530
531 531 self.set += 1
532 532
533 533 #busca el 1er file disponible
534 534 fullfilename, filename = checkForRealPath( self.path, self.year, self.doy, self.set, self.ext )
535 535 if fullfilename:
536 536 if self.__verifyFile(fullfilename, False):
537 537 fileOk_flag = True
538 538
539 539 #si no encuentra un file entonces espera y vuelve a buscar
540 540 if not(fileOk_flag):
541 541 for nFiles in range(self.nFiles+1): #busco en los siguientes self.nFiles+1 files posibles
542 542
543 543 if firstTime_flag: #si es la 1era vez entonces hace el for self.nTries veces
544 544 tries = self.nTries
545 545 else:
546 546 tries = 1 #si no es la 1era vez entonces solo lo hace una vez
547 547
548 548 for nTries in range( tries ):
549 549 if firstTime_flag:
550 550 print "\tWaiting %0.2f sec for the file \"%s\" , try %03d ..." % ( self.delay, filename, nTries+1 )
551 551 time.sleep( self.delay )
552 552 else:
553 553 print "\tSearching next \"%s%04d%03d%03d%s\" file ..." % (self.optchar, self.year, self.doy, self.set, self.ext)
554 554
555 555 fullfilename, filename = checkForRealPath( self.path, self.year, self.doy, self.set, self.ext )
556 556 if fullfilename:
557 557 if self.__verifyFile(fullfilename):
558 558 fileOk_flag = True
559 559 break
560 560
561 561 if fileOk_flag:
562 562 break
563 563
564 564 firstTime_flag = False
565 565
566 566 print "\tSkipping the file \"%s\" due to this file doesn't exist" % filename
567 567 self.set += 1
568 568
569 569 if nFiles == (self.nFiles-1): #si no encuentro el file buscado cambio de carpeta y busco en la siguiente carpeta
570 570 self.set = 0
571 571 self.doy += 1
572 572
573 573 if fileOk_flag:
574 574 self.fileSize = os.path.getsize( fullfilename )
575 575 self.filename = fullfilename
576 576 self.flagIsNewFile = 1
577 577 if self.fp != None: self.fp.close()
578 578 self.fp = open(fullfilename, 'rb')
579 579 self.flagNoMoreFiles = 0
580 580 print 'Setting the file: %s' % fullfilename
581 581 else:
582 582 self.fileSize = 0
583 583 self.filename = None
584 584 self.flagIsNewFile = 0
585 585 self.fp = None
586 586 self.flagNoMoreFiles = 1
587 587 print 'No more Files'
588 588
589 589 return fileOk_flag
590 590
591 591
592 592 def setNextFile(self):
593 593 if self.fp != None:
594 594 self.fp.close()
595 595
596 596 if self.online:
597 597 newFile = self.__setNextFileOnline()
598 598 else:
599 599 newFile = self.__setNextFileOffline()
600 600
601 601 if not(newFile):
602 602 return 0
603 603
604 604 self.__readFirstHeader()
605 605 self.nReadBlocks = 0
606 606 return 1
607 607
608 608 def __waitNewBlock(self):
609 609 """
610 610 Return 1 si se encontro un nuevo bloque de datos, 0 de otra forma.
611 611
612 612 Si el modo de lectura es OffLine siempre retorn 0
613 613 """
614 614 if not self.online:
615 615 return 0
616 616
617 617 if (self.nReadBlocks >= self.processingHeaderObj.dataBlocksPerFile):
618 618 return 0
619 619
620 620 currentPointer = self.fp.tell()
621 621
622 622 neededSize = self.processingHeaderObj.blockSize + self.basicHeaderSize
623 623
624 624 for nTries in range( self.nTries ):
625 625
626 626 self.fp.close()
627 627 self.fp = open( self.filename, 'rb' )
628 628 self.fp.seek( currentPointer )
629 629
630 630 self.fileSize = os.path.getsize( self.filename )
631 631 currentSize = self.fileSize - currentPointer
632 632
633 633 if ( currentSize >= neededSize ):
634 634 self.__rdBasicHeader()
635 635 return 1
636 636
637 637 print "\tWaiting %0.2f seconds for the next block, try %03d ..." % (self.delay, nTries+1)
638 638 time.sleep( self.delay )
639 639
640 640
641 641 return 0
642 642
643 643 def __setNewBlock(self):
644 644
645 645 if self.fp == None:
646 646 return 0
647 647
648 648 if self.flagIsNewFile:
649 649 return 1
650 650
651 651 self.lastUTTime = self.basicHeaderObj.utc
652 652 currentSize = self.fileSize - self.fp.tell()
653 653 neededSize = self.processingHeaderObj.blockSize + self.basicHeaderSize
654 654
655 655 if (currentSize >= neededSize):
656 656 self.__rdBasicHeader()
657 657 return 1
658 658
659 659 if self.__waitNewBlock():
660 660 return 1
661 661
662 662 if not(self.setNextFile()):
663 663 return 0
664 664
665 665 deltaTime = self.basicHeaderObj.utc - self.lastUTTime #
666 666
667 667 self.flagTimeBlock = 0
668 668
669 669 if deltaTime > self.maxTimeStep:
670 670 self.flagTimeBlock = 1
671 671
672 672 return 1
673 673
674 674
675 675 def readNextBlock(self):
676 676 if not(self.__setNewBlock()):
677 677 return 0
678 678
679 679 if not(self.readBlock()):
680 680 return 0
681 681
682 682 return 1
683 683
684 684 def __rdProcessingHeader(self, fp=None):
685 685 if fp == None:
686 686 fp = self.fp
687 687
688 688 self.processingHeaderObj.read(fp)
689 689
690 690 def __rdRadarControllerHeader(self, fp=None):
691 691 if fp == None:
692 692 fp = self.fp
693 693
694 694 self.radarControllerHeaderObj.read(fp)
695 695
696 696 def __rdSystemHeader(self, fp=None):
697 697 if fp == None:
698 698 fp = self.fp
699 699
700 700 self.systemHeaderObj.read(fp)
701 701
702 702 def __rdBasicHeader(self, fp=None):
703 703 if fp == None:
704 704 fp = self.fp
705 705
706 706 self.basicHeaderObj.read(fp)
707 707
708 708
709 709 def __readFirstHeader(self):
710 710 self.__rdBasicHeader()
711 711 self.__rdSystemHeader()
712 712 self.__rdRadarControllerHeader()
713 713 self.__rdProcessingHeader()
714 714
715 715 self.firstHeaderSize = self.basicHeaderObj.size
716 716
717 717 datatype = int(numpy.log2((self.processingHeaderObj.processFlags & PROCFLAG.DATATYPE_MASK))-numpy.log2(PROCFLAG.DATATYPE_CHAR))
718 718 if datatype == 0:
719 719 datatype_str = numpy.dtype([('real','<i1'),('imag','<i1')])
720 720 elif datatype == 1:
721 721 datatype_str = numpy.dtype([('real','<i2'),('imag','<i2')])
722 722 elif datatype == 2:
723 723 datatype_str = numpy.dtype([('real','<i4'),('imag','<i4')])
724 724 elif datatype == 3:
725 725 datatype_str = numpy.dtype([('real','<i8'),('imag','<i8')])
726 726 elif datatype == 4:
727 727 datatype_str = numpy.dtype([('real','<f4'),('imag','<f4')])
728 728 elif datatype == 5:
729 729 datatype_str = numpy.dtype([('real','<f8'),('imag','<f8')])
730 730 else:
731 731 raise ValueError, 'Data type was not defined'
732 732
733 733 self.dtype = datatype_str
734 734 self.ippSeconds = 2 * 1000 * self.radarControllerHeaderObj.ipp / self.c
735 735 self.fileSizeByHeader = self.processingHeaderObj.dataBlocksPerFile * self.processingHeaderObj.blockSize + self.firstHeaderSize + self.basicHeaderSize*(self.processingHeaderObj.dataBlocksPerFile - 1)
736 736 # self.dataOut.channelList = numpy.arange(self.systemHeaderObj.numChannels)
737 737 # self.dataOut.channelIndexList = numpy.arange(self.systemHeaderObj.numChannels)
738 738 self.getBlockDimension()
739 739
740 740
741 741 def __verifyFile(self, filename, msgFlag=True):
742 742 msg = None
743 743 try:
744 744 fp = open(filename, 'rb')
745 745 currentPosition = fp.tell()
746 746 except:
747 747 if msgFlag:
748 748 print "The file %s can't be opened" % (filename)
749 749 return False
750 750
751 751 neededSize = self.processingHeaderObj.blockSize + self.firstHeaderSize
752 752
753 753 if neededSize == 0:
754 754 basicHeaderObj = BasicHeader(LOCALTIME)
755 755 systemHeaderObj = SystemHeader()
756 756 radarControllerHeaderObj = RadarControllerHeader()
757 757 processingHeaderObj = ProcessingHeader()
758 758
759 759 try:
760 760 if not( basicHeaderObj.read(fp) ): raise IOError
761 761 if not( systemHeaderObj.read(fp) ): raise IOError
762 762 if not( radarControllerHeaderObj.read(fp) ): raise IOError
763 763 if not( processingHeaderObj.read(fp) ): raise IOError
764 764 data_type = int(numpy.log2((processingHeaderObj.processFlags & PROCFLAG.DATATYPE_MASK))-numpy.log2(PROCFLAG.DATATYPE_CHAR))
765 765
766 766 neededSize = processingHeaderObj.blockSize + basicHeaderObj.size
767 767
768 768 except:
769 769 if msgFlag:
770 770 print "\tThe file %s is empty or it hasn't enough data" % filename
771 771
772 772 fp.close()
773 773 return False
774 774 else:
775 775 msg = "\tSkipping the file %s due to it hasn't enough data" %filename
776 776
777 777 fp.close()
778 778 fileSize = os.path.getsize(filename)
779 779 currentSize = fileSize - currentPosition
780 780 if currentSize < neededSize:
781 781 if msgFlag and (msg != None):
782 782 print msg #print"\tSkipping the file %s due to it hasn't enough data" %filename
783 783 return False
784 784
785 785 return True
786 786
787 787 def setup(self,
788 788 path=None,
789 789 startDate=None,
790 790 endDate=None,
791 791 startTime=datetime.time(0,0,0),
792 792 endTime=datetime.time(23,59,59),
793 793 set=0,
794 794 expLabel = "",
795 795 ext = None,
796 796 online = False,
797 797 delay = 60,
798 798 walk = True):
799 799
800 800 if path == None:
801 801 raise ValueError, "The path is not valid"
802 802
803 803 if ext == None:
804 804 ext = self.ext
805 805
806 806 if online:
807 807 print "Searching files in online mode..."
808 808
809 809 for nTries in range( self.nTries ):
810 810 fullpath, file, year, doy, set = self.__searchFilesOnLine(path=path, expLabel=expLabel, ext=ext, walk=walk)
811 811
812 812 if fullpath:
813 813 break
814 814
815 815 print '\tWaiting %0.2f sec for an valid file in %s: try %02d ...' % (self.delay, path, nTries+1)
816 816 time.sleep( self.delay )
817 817
818 818 if not(fullpath):
819 819 print "There 'isn't valied files in %s" % path
820 820 return None
821 821
822 822 self.year = year
823 823 self.doy = doy
824 824 self.set = set - 1
825 825 self.path = path
826 826
827 827 else:
828 828 print "Searching files in offline mode ..."
829 829 pathList, filenameList = self.__searchFilesOffLine(path, startDate=startDate, endDate=endDate,
830 830 startTime=startTime, endTime=endTime,
831 831 set=set, expLabel=expLabel, ext=ext,
832 832 walk=walk)
833 833
834 834 if not(pathList):
835 835 print "No *%s files into the folder %s \nfor the range: %s - %s"%(ext, path,
836 836 datetime.datetime.combine(startDate,startTime).ctime(),
837 837 datetime.datetime.combine(endDate,endTime).ctime())
838 838
839 839 sys.exit(-1)
840 840
841 841
842 842 self.fileIndex = -1
843 843 self.pathList = pathList
844 844 self.filenameList = filenameList
845 845
846 846 self.online = online
847 847 self.delay = delay
848 848 ext = ext.lower()
849 849 self.ext = ext
850 850
851 851 if not(self.setNextFile()):
852 852 if (startDate!=None) and (endDate!=None):
853 853 print "No files in range: %s - %s" %(datetime.datetime.combine(startDate,startTime).ctime(), datetime.datetime.combine(endDate,endTime).ctime())
854 854 elif startDate != None:
855 855 print "No files in range: %s" %(datetime.datetime.combine(startDate,startTime).ctime())
856 856 else:
857 857 print "No files"
858 858
859 859 sys.exit(-1)
860 860
861 861 # self.updateDataHeader()
862 862
863 863 return self.dataOut
864 864
865 865 def getData():
866 866
867 867 raise ValueError, "This method has not been implemented"
868 868
869 869 def hasNotDataInBuffer():
870 870
871 871 raise ValueError, "This method has not been implemented"
872 872
873 873 def readBlock():
874 874
875 875 raise ValueError, "This method has not been implemented"
876 876
877 877 def isEndProcess(self):
878 878
879 879 return self.flagNoMoreFiles
880 880
881 881 def printReadBlocks(self):
882 882
883 883 print "Number of read blocks per file %04d" %self.nReadBlocks
884 884
885 885 def printTotalBlocks(self):
886 886
887 887 print "Number of read blocks %04d" %self.nTotalBlocks
888 888
889 889 def printNumberOfBlock(self):
890 890
891 891 if self.flagIsNewBlock:
892 892 print "Block No. %04d, Total blocks %04d" %(self.basicHeaderObj.dataBlock, self.nTotalBlocks)
893 893
894 894 def printInfo(self):
895 895
896 896 print self.basicHeaderObj.printInfo()
897 897 print self.systemHeaderObj.printInfo()
898 898 print self.radarControllerHeaderObj.printInfo()
899 899 print self.processingHeaderObj.printInfo()
900 900
901 901
902 902 def run(self, **kwargs):
903 903
904 904 if not(self.isConfig):
905 905
906 906 # self.dataOut = dataOut
907 907 self.setup(**kwargs)
908 908 self.isConfig = True
909 909
910 910 self.getData()
911 911
912 912 class JRODataWriter(JRODataIO, Operation):
913 913
914 914 """
915 915 Esta clase permite escribir datos a archivos procesados (.r o ,pdata). La escritura
916 916 de los datos siempre se realiza por bloques.
917 917 """
918 918
919 919 blockIndex = 0
920 920
921 921 path = None
922 922
923 923 setFile = None
924 924
925 925 profilesPerBlock = None
926 926
927 927 blocksPerFile = None
928 928
929 929 nWriteBlocks = 0
930 930
931 931 def __init__(self, dataOut=None):
932 932 raise ValueError, "Not implemented"
933 933
934 934
935 935 def hasAllDataInBuffer(self):
936 936 raise ValueError, "Not implemented"
937 937
938 938
939 939 def setBlockDimension(self):
940 940 raise ValueError, "Not implemented"
941 941
942 942
943 943 def writeBlock(self):
944 944 raise ValueError, "No implemented"
945 945
946 946
947 947 def putData(self):
948 948 raise ValueError, "No implemented"
949 949
950 950 def getDataHeader(self):
951 951 """
952 952 Obtiene una copia del First Header
953 953
954 954 Affected:
955 955
956 956 self.basicHeaderObj
957 957 self.systemHeaderObj
958 958 self.radarControllerHeaderObj
959 959 self.processingHeaderObj self.
960 960
961 961 Return:
962 962 None
963 963 """
964 964
965 965 raise ValueError, "No implemented"
966 966
967 967 def getBasicHeader(self):
968 968
969 969 self.basicHeaderObj.size = self.basicHeaderSize #bytes
970 970 self.basicHeaderObj.version = self.versionFile
971 971 self.basicHeaderObj.dataBlock = self.nTotalBlocks
972 972
973 973 utc = numpy.floor(self.dataOut.utctime)
974 974 milisecond = (self.dataOut.utctime - utc)* 1000.0
975 975
976 976 self.basicHeaderObj.utc = utc
977 977 self.basicHeaderObj.miliSecond = milisecond
978 978 self.basicHeaderObj.timeZone = 0
979 979 self.basicHeaderObj.dstFlag = 0
980 980 self.basicHeaderObj.errorCount = 0
981 981
982 982 def __writeFirstHeader(self):
983 983 """
984 984 Escribe el primer header del file es decir el Basic header y el Long header (SystemHeader, RadarControllerHeader, ProcessingHeader)
985 985
986 986 Affected:
987 987 __dataType
988 988
989 989 Return:
990 990 None
991 991 """
992 992
993 993 # CALCULAR PARAMETROS
994 994
995 995 sizeLongHeader = self.systemHeaderObj.size + self.radarControllerHeaderObj.size + self.processingHeaderObj.size
996 996 self.basicHeaderObj.size = self.basicHeaderSize + sizeLongHeader
997 997
998 998 self.basicHeaderObj.write(self.fp)
999 999 self.systemHeaderObj.write(self.fp)
1000 1000 self.radarControllerHeaderObj.write(self.fp)
1001 1001 self.processingHeaderObj.write(self.fp)
1002 1002
1003 1003 self.dtype = self.dataOut.dtype
1004 1004
1005 1005 def __setNewBlock(self):
1006 1006 """
1007 1007 Si es un nuevo file escribe el First Header caso contrario escribe solo el Basic Header
1008 1008
1009 1009 Return:
1010 1010 0 : si no pudo escribir nada
1011 1011 1 : Si escribio el Basic el First Header
1012 1012 """
1013 1013 if self.fp == None:
1014 1014 self.setNextFile()
1015 1015
1016 1016 if self.flagIsNewFile:
1017 1017 return 1
1018 1018
1019 1019 if self.blockIndex < self.processingHeaderObj.dataBlocksPerFile:
1020 1020 self.basicHeaderObj.write(self.fp)
1021 1021 return 1
1022 1022
1023 1023 if not( self.setNextFile() ):
1024 1024 return 0
1025 1025
1026 1026 return 1
1027 1027
1028 1028
1029 1029 def writeNextBlock(self):
1030 1030 """
1031 1031 Selecciona el bloque siguiente de datos y los escribe en un file
1032 1032
1033 1033 Return:
1034 1034 0 : Si no hizo pudo escribir el bloque de datos
1035 1035 1 : Si no pudo escribir el bloque de datos
1036 1036 """
1037 1037 if not( self.__setNewBlock() ):
1038 1038 return 0
1039 1039
1040 1040 self.writeBlock()
1041 1041
1042 1042 return 1
1043 1043
1044 1044 def setNextFile(self):
1045 1045 """
1046 1046 Determina el siguiente file que sera escrito
1047 1047
1048 1048 Affected:
1049 1049 self.filename
1050 1050 self.subfolder
1051 1051 self.fp
1052 1052 self.setFile
1053 1053 self.flagIsNewFile
1054 1054
1055 1055 Return:
1056 1056 0 : Si el archivo no puede ser escrito
1057 1057 1 : Si el archivo esta listo para ser escrito
1058 1058 """
1059 1059 ext = self.ext
1060 1060 path = self.path
1061 1061
1062 1062 if self.fp != None:
1063 1063 self.fp.close()
1064 1064
1065 1065 timeTuple = time.localtime( self.dataOut.utctime)
1066 1066 subfolder = 'd%4.4d%3.3d' % (timeTuple.tm_year,timeTuple.tm_yday)
1067 1067
1068 1068 fullpath = os.path.join( path, subfolder )
1069 1069 if not( os.path.exists(fullpath) ):
1070 1070 os.mkdir(fullpath)
1071 1071 self.setFile = -1 #inicializo mi contador de seteo
1072 1072 else:
1073 1073 filesList = os.listdir( fullpath )
1074 1074 if len( filesList ) > 0:
1075 1075 filesList = sorted( filesList, key=str.lower )
1076 1076 filen = filesList[-1]
1077 1077 # el filename debera tener el siguiente formato
1078 1078 # 0 1234 567 89A BCDE (hex)
1079 1079 # x YYYY DDD SSS .ext
1080 1080 if isNumber( filen[8:11] ):
1081 1081 self.setFile = int( filen[8:11] ) #inicializo mi contador de seteo al seteo del ultimo file
1082 1082 else:
1083 1083 self.setFile = -1
1084 1084 else:
1085 1085 self.setFile = -1 #inicializo mi contador de seteo
1086 1086
1087 1087 setFile = self.setFile
1088 1088 setFile += 1
1089 1089
1090 1090 file = '%s%4.4d%3.3d%3.3d%s' % (self.optchar,
1091 1091 timeTuple.tm_year,
1092 1092 timeTuple.tm_yday,
1093 1093 setFile,
1094 1094 ext )
1095 1095
1096 1096 filename = os.path.join( path, subfolder, file )
1097 1097
1098 1098 fp = open( filename,'wb' )
1099 1099
1100 1100 self.blockIndex = 0
1101 1101
1102 1102 #guardando atributos
1103 1103 self.filename = filename
1104 1104 self.subfolder = subfolder
1105 1105 self.fp = fp
1106 1106 self.setFile = setFile
1107 1107 self.flagIsNewFile = 1
1108 1108
1109 1109 self.getDataHeader()
1110 1110
1111 1111 print 'Writing the file: %s'%self.filename
1112 1112
1113 1113 self.__writeFirstHeader()
1114 1114
1115 1115 return 1
1116 1116
1117 1117 def setup(self, dataOut, path, blocksPerFile, profilesPerBlock=None, set=0, ext=None):
1118 1118 """
1119 1119 Setea el tipo de formato en la cual sera guardada la data y escribe el First Header
1120 1120
1121 1121 Inputs:
1122 1122 path : el path destino en el cual se escribiran los files a crear
1123 1123 format : formato en el cual sera salvado un file
1124 1124 set : el setebo del file
1125 1125
1126 1126 Return:
1127 1127 0 : Si no realizo un buen seteo
1128 1128 1 : Si realizo un buen seteo
1129 1129 """
1130 1130
1131 1131 if ext == None:
1132 1132 ext = self.ext
1133 1133
1134 1134 ext = ext.lower()
1135 1135
1136 1136 self.ext = ext
1137 1137
1138 1138 self.path = path
1139 1139
1140 1140 self.setFile = set - 1
1141 1141
1142 1142 self.blocksPerFile = blocksPerFile
1143 1143
1144 1144 self.profilesPerBlock = profilesPerBlock
1145 1145
1146 1146 self.dataOut = dataOut
1147 1147
1148 1148 if not(self.setNextFile()):
1149 1149 print "There isn't a next file"
1150 1150 return 0
1151 1151
1152 1152 self.setBlockDimension()
1153 1153
1154 1154 return 1
1155 1155
1156 1156 def run(self, dataOut, **kwargs):
1157 1157
1158 1158 if not(self.isConfig):
1159 1159
1160 1160 self.setup(dataOut, **kwargs)
1161 1161 self.isConfig = True
1162 1162
1163 1163 self.putData()
1164 1164
1165 1165 class VoltageReader(JRODataReader):
1166 1166 """
1167 1167 Esta clase permite leer datos de voltage desde archivos en formato rawdata (.r). La lectura
1168 1168 de los datos siempre se realiza por bloques. Los datos leidos (array de 3 dimensiones:
1169 1169 perfiles*alturas*canales) son almacenados en la variable "buffer".
1170 1170
1171 1171 perfiles * alturas * canales
1172 1172
1173 1173 Esta clase contiene instancias (objetos) de las clases BasicHeader, SystemHeader,
1174 1174 RadarControllerHeader y Voltage. Los tres primeros se usan para almacenar informacion de la
1175 1175 cabecera de datos (metadata), y el cuarto (Voltage) para obtener y almacenar un perfil de
1176 1176 datos desde el "buffer" cada vez que se ejecute el metodo "getData".
1177 1177
1178 1178 Example:
1179 1179
1180 1180 dpath = "/home/myuser/data"
1181 1181
1182 1182 startTime = datetime.datetime(2010,1,20,0,0,0,0,0,0)
1183 1183
1184 1184 endTime = datetime.datetime(2010,1,21,23,59,59,0,0,0)
1185 1185
1186 1186 readerObj = VoltageReader()
1187 1187
1188 1188 readerObj.setup(dpath, startTime, endTime)
1189 1189
1190 1190 while(True):
1191 1191
1192 1192 #to get one profile
1193 1193 profile = readerObj.getData()
1194 1194
1195 1195 #print the profile
1196 1196 print profile
1197 1197
1198 1198 #If you want to see all datablock
1199 1199 print readerObj.datablock
1200 1200
1201 1201 if readerObj.flagNoMoreFiles:
1202 1202 break
1203 1203
1204 1204 """
1205 1205
1206 1206 ext = ".r"
1207 1207
1208 1208 optchar = "D"
1209 1209 dataOut = None
1210 1210
1211 1211
1212 1212 def __init__(self):
1213 1213 """
1214 1214 Inicializador de la clase VoltageReader para la lectura de datos de voltage.
1215 1215
1216 1216 Input:
1217 1217 dataOut : Objeto de la clase Voltage. Este objeto sera utilizado para
1218 1218 almacenar un perfil de datos cada vez que se haga un requerimiento
1219 1219 (getData). El perfil sera obtenido a partir del buffer de datos,
1220 1220 si el buffer esta vacio se hara un nuevo proceso de lectura de un
1221 1221 bloque de datos.
1222 1222 Si este parametro no es pasado se creara uno internamente.
1223 1223
1224 1224 Variables afectadas:
1225 1225 self.dataOut
1226 1226
1227 1227 Return:
1228 1228 None
1229 1229 """
1230 1230
1231 1231 self.isConfig = False
1232 1232
1233 1233 self.datablock = None
1234 1234
1235 1235 self.utc = 0
1236 1236
1237 1237 self.ext = ".r"
1238 1238
1239 1239 self.optchar = "D"
1240 1240
1241 1241 self.basicHeaderObj = BasicHeader(LOCALTIME)
1242 1242
1243 1243 self.systemHeaderObj = SystemHeader()
1244 1244
1245 1245 self.radarControllerHeaderObj = RadarControllerHeader()
1246 1246
1247 1247 self.processingHeaderObj = ProcessingHeader()
1248 1248
1249 1249 self.online = 0
1250 1250
1251 1251 self.fp = None
1252 1252
1253 1253 self.idFile = None
1254 1254
1255 1255 self.dtype = None
1256 1256
1257 1257 self.fileSizeByHeader = None
1258 1258
1259 1259 self.filenameList = []
1260 1260
1261 1261 self.filename = None
1262 1262
1263 1263 self.fileSize = None
1264 1264
1265 1265 self.firstHeaderSize = 0
1266 1266
1267 1267 self.basicHeaderSize = 24
1268 1268
1269 1269 self.pathList = []
1270 1270
1271 1271 self.filenameList = []
1272 1272
1273 1273 self.lastUTTime = 0
1274 1274
1275 1275 self.maxTimeStep = 30
1276 1276
1277 1277 self.flagNoMoreFiles = 0
1278 1278
1279 1279 self.set = 0
1280 1280
1281 1281 self.path = None
1282 1282
1283 1283 self.profileIndex = 9999
1284 1284
1285 1285 self.delay = 3 #seconds
1286 1286
1287 1287 self.nTries = 3 #quantity tries
1288 1288
1289 1289 self.nFiles = 3 #number of files for searching
1290 1290
1291 1291 self.nReadBlocks = 0
1292 1292
1293 1293 self.flagIsNewFile = 1
1294 1294
1295 1295 self.ippSeconds = 0
1296 1296
1297 1297 self.flagTimeBlock = 0
1298 1298
1299 1299 self.flagIsNewBlock = 0
1300 1300
1301 1301 self.nTotalBlocks = 0
1302 1302
1303 1303 self.blocksize = 0
1304 1304
1305 1305 self.dataOut = self.createObjByDefault()
1306 1306
1307 1307 def createObjByDefault(self):
1308 1308
1309 1309 dataObj = Voltage()
1310 1310
1311 1311 return dataObj
1312 1312
1313 1313 def __hasNotDataInBuffer(self):
1314 1314 if self.profileIndex >= self.processingHeaderObj.profilesPerBlock:
1315 1315 return 1
1316 1316 return 0
1317 1317
1318 1318
1319 1319 def getBlockDimension(self):
1320 1320 """
1321 1321 Obtiene la cantidad de puntos a leer por cada bloque de datos
1322 1322
1323 1323 Affected:
1324 1324 self.blocksize
1325 1325
1326 1326 Return:
1327 1327 None
1328 1328 """
1329 1329 pts2read = self.processingHeaderObj.profilesPerBlock * self.processingHeaderObj.nHeights * self.systemHeaderObj.nChannels
1330 1330 self.blocksize = pts2read
1331 1331
1332 1332
1333 1333 def readBlock(self):
1334 1334 """
1335 1335 readBlock lee el bloque de datos desde la posicion actual del puntero del archivo
1336 1336 (self.fp) y actualiza todos los parametros relacionados al bloque de datos
1337 1337 (metadata + data). La data leida es almacenada en el buffer y el contador del buffer
1338 1338 es seteado a 0
1339 1339
1340 1340 Inputs:
1341 1341 None
1342 1342
1343 1343 Return:
1344 1344 None
1345 1345
1346 1346 Affected:
1347 1347 self.profileIndex
1348 1348 self.datablock
1349 1349 self.flagIsNewFile
1350 1350 self.flagIsNewBlock
1351 1351 self.nTotalBlocks
1352 1352
1353 1353 Exceptions:
1354 1354 Si un bloque leido no es un bloque valido
1355 1355 """
1356 1356
1357 1357 junk = numpy.fromfile( self.fp, self.dtype, self.blocksize )
1358 1358
1359 1359 try:
1360 1360 junk = junk.reshape( (self.processingHeaderObj.profilesPerBlock, self.processingHeaderObj.nHeights, self.systemHeaderObj.nChannels) )
1361 1361 except:
1362 1362 print "The read block (%3d) has not enough data" %self.nReadBlocks
1363 1363 return 0
1364 1364
1365 1365 junk = numpy.transpose(junk, (2,0,1))
1366 1366 self.datablock = junk['real'] + junk['imag']*1j
1367 1367
1368 1368 self.profileIndex = 0
1369 1369
1370 1370 self.flagIsNewFile = 0
1371 1371 self.flagIsNewBlock = 1
1372 1372
1373 1373 self.nTotalBlocks += 1
1374 1374 self.nReadBlocks += 1
1375 1375
1376 1376 return 1
1377 1377
1378 1378
1379 1379 def getData(self):
1380 1380 """
1381 1381 getData obtiene una unidad de datos del buffer de lectura y la copia a la clase "Voltage"
1382 1382 con todos los parametros asociados a este (metadata). cuando no hay datos en el buffer de
1383 1383 lectura es necesario hacer una nueva lectura de los bloques de datos usando "readNextBlock"
1384 1384
1385 1385 Ademas incrementa el contador del buffer en 1.
1386 1386
1387 1387 Return:
1388 1388 data : retorna un perfil de voltages (alturas * canales) copiados desde el
1389 1389 buffer. Si no hay mas archivos a leer retorna None.
1390 1390
1391 1391 Variables afectadas:
1392 1392 self.dataOut
1393 1393 self.profileIndex
1394 1394
1395 1395 Affected:
1396 1396 self.dataOut
1397 1397 self.profileIndex
1398 1398 self.flagTimeBlock
1399 1399 self.flagIsNewBlock
1400 1400 """
1401 1401
1402 1402 if self.flagNoMoreFiles:
1403 1403 self.dataOut.flagNoData = True
1404 1404 print 'Process finished'
1405 1405 return 0
1406 1406
1407 1407 self.flagTimeBlock = 0
1408 1408 self.flagIsNewBlock = 0
1409 1409
1410 1410 if self.__hasNotDataInBuffer():
1411 1411
1412 1412 if not( self.readNextBlock() ):
1413 1413 return 0
1414 1414
1415 1415 self.dataOut.dtype = self.dtype
1416 1416
1417 1417 self.dataOut.nProfiles = self.processingHeaderObj.profilesPerBlock
1418 1418
1419 1419 xf = self.processingHeaderObj.firstHeight + self.processingHeaderObj.nHeights*self.processingHeaderObj.deltaHeight
1420 1420
1421 1421 self.dataOut.heightList = numpy.arange(self.processingHeaderObj.firstHeight, xf, self.processingHeaderObj.deltaHeight)
1422 1422
1423 1423 self.dataOut.channelList = range(self.systemHeaderObj.nChannels)
1424 1424
1425 1425 self.dataOut.flagTimeBlock = self.flagTimeBlock
1426 1426
1427 1427 self.dataOut.ippSeconds = self.ippSeconds
1428 1428
1429 1429 self.dataOut.timeInterval = self.ippSeconds * self.processingHeaderObj.nCohInt
1430 1430
1431 1431 self.dataOut.nCohInt = self.processingHeaderObj.nCohInt
1432 1432
1433 1433 self.dataOut.flagShiftFFT = False
1434 1434
1435 1435 if self.radarControllerHeaderObj.code != None:
1436 1436
1437 1437 self.dataOut.nCode = self.radarControllerHeaderObj.nCode
1438 1438
1439 1439 self.dataOut.nBaud = self.radarControllerHeaderObj.nBaud
1440 1440
1441 1441 self.dataOut.code = self.radarControllerHeaderObj.code
1442 1442
1443 1443 self.dataOut.systemHeaderObj = self.systemHeaderObj.copy()
1444 1444
1445 1445 self.dataOut.radarControllerHeaderObj = self.radarControllerHeaderObj.copy()
1446 1446
1447 1447 self.dataOut.flagDecodeData = False #asumo q la data no esta decodificada
1448 1448
1449 1449 self.dataOut.flagDeflipData = False #asumo q la data no esta sin flip
1450 1450
1451 1451 self.dataOut.flagShiftFFT = False
1452 1452
1453 1453
1454 1454 # self.updateDataHeader()
1455 1455
1456 1456 #data es un numpy array de 3 dmensiones (perfiles, alturas y canales)
1457 1457
1458 1458 if self.datablock == None:
1459 1459 self.dataOut.flagNoData = True
1460 1460 return 0
1461 1461
1462 1462 self.dataOut.data = self.datablock[:,self.profileIndex,:]
1463 1463
1464 1464 self.dataOut.utctime = self.basicHeaderObj.utc + self.basicHeaderObj.miliSecond/1000. + self.profileIndex * self.ippSeconds
1465 1465
1466 1466 self.profileIndex += 1
1467 1467
1468 1468 self.dataOut.flagNoData = False
1469 1469
1470 1470 # print self.profileIndex, self.dataOut.utctime
1471 1471 # if self.profileIndex == 800:
1472 1472 # a=1
1473 1473
1474 1474
1475 1475 return self.dataOut.data
1476 1476
1477 1477
1478 1478 class VoltageWriter(JRODataWriter):
1479 1479 """
1480 1480 Esta clase permite escribir datos de voltajes a archivos procesados (.r). La escritura
1481 1481 de los datos siempre se realiza por bloques.
1482 1482 """
1483 1483
1484 1484 ext = ".r"
1485 1485
1486 1486 optchar = "D"
1487 1487
1488 1488 shapeBuffer = None
1489 1489
1490 1490
1491 1491 def __init__(self):
1492 1492 """
1493 1493 Inicializador de la clase VoltageWriter para la escritura de datos de espectros.
1494 1494
1495 1495 Affected:
1496 1496 self.dataOut
1497 1497
1498 1498 Return: None
1499 1499 """
1500 1500
1501 1501 self.nTotalBlocks = 0
1502 1502
1503 1503 self.profileIndex = 0
1504 1504
1505 1505 self.isConfig = False
1506 1506
1507 1507 self.fp = None
1508 1508
1509 1509 self.flagIsNewFile = 1
1510 1510
1511 1511 self.nTotalBlocks = 0
1512 1512
1513 1513 self.flagIsNewBlock = 0
1514 1514
1515 1515 self.setFile = None
1516 1516
1517 1517 self.dtype = None
1518 1518
1519 1519 self.path = None
1520 1520
1521 1521 self.filename = None
1522 1522
1523 1523 self.basicHeaderObj = BasicHeader(LOCALTIME)
1524 1524
1525 1525 self.systemHeaderObj = SystemHeader()
1526 1526
1527 1527 self.radarControllerHeaderObj = RadarControllerHeader()
1528 1528
1529 1529 self.processingHeaderObj = ProcessingHeader()
1530 1530
1531 1531 def hasAllDataInBuffer(self):
1532 1532 if self.profileIndex >= self.processingHeaderObj.profilesPerBlock:
1533 1533 return 1
1534 1534 return 0
1535 1535
1536 1536
1537 1537 def setBlockDimension(self):
1538 1538 """
1539 1539 Obtiene las formas dimensionales del los subbloques de datos que componen un bloque
1540 1540
1541 1541 Affected:
1542 1542 self.shape_spc_Buffer
1543 1543 self.shape_cspc_Buffer
1544 1544 self.shape_dc_Buffer
1545 1545
1546 1546 Return: None
1547 1547 """
1548 1548 self.shapeBuffer = (self.processingHeaderObj.profilesPerBlock,
1549 1549 self.processingHeaderObj.nHeights,
1550 1550 self.systemHeaderObj.nChannels)
1551 1551
1552 1552 self.datablock = numpy.zeros((self.systemHeaderObj.nChannels,
1553 1553 self.processingHeaderObj.profilesPerBlock,
1554 1554 self.processingHeaderObj.nHeights),
1555 1555 dtype=numpy.dtype('complex64'))
1556 1556
1557 1557
1558 1558 def writeBlock(self):
1559 1559 """
1560 1560 Escribe el buffer en el file designado
1561 1561
1562 1562 Affected:
1563 1563 self.profileIndex
1564 1564 self.flagIsNewFile
1565 1565 self.flagIsNewBlock
1566 1566 self.nTotalBlocks
1567 1567 self.blockIndex
1568 1568
1569 1569 Return: None
1570 1570 """
1571 1571 data = numpy.zeros( self.shapeBuffer, self.dtype )
1572 1572
1573 1573 junk = numpy.transpose(self.datablock, (1,2,0))
1574 1574
1575 1575 data['real'] = junk.real
1576 1576 data['imag'] = junk.imag
1577 1577
1578 1578 data = data.reshape( (-1) )
1579 1579
1580 1580 data.tofile( self.fp )
1581 1581
1582 1582 self.datablock.fill(0)
1583 1583
1584 1584 self.profileIndex = 0
1585 1585 self.flagIsNewFile = 0
1586 1586 self.flagIsNewBlock = 1
1587 1587
1588 1588 self.blockIndex += 1
1589 1589 self.nTotalBlocks += 1
1590 1590
1591 1591 def putData(self):
1592 1592 """
1593 1593 Setea un bloque de datos y luego los escribe en un file
1594 1594
1595 1595 Affected:
1596 1596 self.flagIsNewBlock
1597 1597 self.profileIndex
1598 1598
1599 1599 Return:
1600 1600 0 : Si no hay data o no hay mas files que puedan escribirse
1601 1601 1 : Si se escribio la data de un bloque en un file
1602 1602 """
1603 1603 if self.dataOut.flagNoData:
1604 1604 return 0
1605 1605
1606 1606 self.flagIsNewBlock = 0
1607 1607
1608 1608 if self.dataOut.flagTimeBlock:
1609 1609
1610 1610 self.datablock.fill(0)
1611 1611 self.profileIndex = 0
1612 1612 self.setNextFile()
1613 1613
1614 1614 if self.profileIndex == 0:
1615 1615 self.getBasicHeader()
1616 1616
1617 1617 self.datablock[:,self.profileIndex,:] = self.dataOut.data
1618 1618
1619 1619 self.profileIndex += 1
1620 1620
1621 1621 if self.hasAllDataInBuffer():
1622 1622 #if self.flagIsNewFile:
1623 1623 self.writeNextBlock()
1624 1624 # self.getDataHeader()
1625 1625
1626 1626 return 1
1627 1627
1628 1628 def __getProcessFlags(self):
1629 1629
1630 1630 processFlags = 0
1631 1631
1632 1632 dtype0 = numpy.dtype([('real','<i1'),('imag','<i1')])
1633 1633 dtype1 = numpy.dtype([('real','<i2'),('imag','<i2')])
1634 1634 dtype2 = numpy.dtype([('real','<i4'),('imag','<i4')])
1635 1635 dtype3 = numpy.dtype([('real','<i8'),('imag','<i8')])
1636 1636 dtype4 = numpy.dtype([('real','<f4'),('imag','<f4')])
1637 1637 dtype5 = numpy.dtype([('real','<f8'),('imag','<f8')])
1638 1638
1639 1639 dtypeList = [dtype0, dtype1, dtype2, dtype3, dtype4, dtype5]
1640 1640
1641 1641
1642 1642
1643 1643 datatypeValueList = [PROCFLAG.DATATYPE_CHAR,
1644 1644 PROCFLAG.DATATYPE_SHORT,
1645 1645 PROCFLAG.DATATYPE_LONG,
1646 1646 PROCFLAG.DATATYPE_INT64,
1647 1647 PROCFLAG.DATATYPE_FLOAT,
1648 1648 PROCFLAG.DATATYPE_DOUBLE]
1649 1649
1650 1650
1651 1651 for index in range(len(dtypeList)):
1652 1652 if self.dataOut.dtype == dtypeList[index]:
1653 1653 dtypeValue = datatypeValueList[index]
1654 1654 break
1655 1655
1656 1656 processFlags += dtypeValue
1657 1657
1658 1658 if self.dataOut.flagDecodeData:
1659 1659 processFlags += PROCFLAG.DECODE_DATA
1660 1660
1661 1661 if self.dataOut.flagDeflipData:
1662 1662 processFlags += PROCFLAG.DEFLIP_DATA
1663 1663
1664 1664 if self.dataOut.code != None:
1665 1665 processFlags += PROCFLAG.DEFINE_PROCESS_CODE
1666 1666
1667 1667 if self.dataOut.nCohInt > 1:
1668 1668 processFlags += PROCFLAG.COHERENT_INTEGRATION
1669 1669
1670 1670 return processFlags
1671 1671
1672 1672
1673 1673 def __getBlockSize(self):
1674 1674 '''
1675 1675 Este metodos determina el cantidad de bytes para un bloque de datos de tipo Voltage
1676 1676 '''
1677 1677
1678 1678 dtype0 = numpy.dtype([('real','<i1'),('imag','<i1')])
1679 1679 dtype1 = numpy.dtype([('real','<i2'),('imag','<i2')])
1680 1680 dtype2 = numpy.dtype([('real','<i4'),('imag','<i4')])
1681 1681 dtype3 = numpy.dtype([('real','<i8'),('imag','<i8')])
1682 1682 dtype4 = numpy.dtype([('real','<f4'),('imag','<f4')])
1683 1683 dtype5 = numpy.dtype([('real','<f8'),('imag','<f8')])
1684 1684
1685 1685 dtypeList = [dtype0, dtype1, dtype2, dtype3, dtype4, dtype5]
1686 1686 datatypeValueList = [1,2,4,8,4,8]
1687 1687 for index in range(len(dtypeList)):
1688 1688 if self.dataOut.dtype == dtypeList[index]:
1689 1689 datatypeValue = datatypeValueList[index]
1690 1690 break
1691 1691
1692 1692 blocksize = int(self.dataOut.nHeights * self.dataOut.nChannels * self.dataOut.nProfiles * datatypeValue * 2)
1693 1693
1694 1694 return blocksize
1695 1695
1696 1696 def getDataHeader(self):
1697 1697
1698 1698 """
1699 1699 Obtiene una copia del First Header
1700 1700
1701 1701 Affected:
1702 1702 self.systemHeaderObj
1703 1703 self.radarControllerHeaderObj
1704 1704 self.dtype
1705 1705
1706 1706 Return:
1707 1707 None
1708 1708 """
1709 1709
1710 1710 self.systemHeaderObj = self.dataOut.systemHeaderObj.copy()
1711 1711 self.systemHeaderObj.nChannels = self.dataOut.nChannels
1712 1712 self.radarControllerHeaderObj = self.dataOut.radarControllerHeaderObj.copy()
1713 1713
1714 1714 self.getBasicHeader()
1715 1715
1716 1716 processingHeaderSize = 40 # bytes
1717 1717 self.processingHeaderObj.dtype = 0 # Voltage
1718 1718 self.processingHeaderObj.blockSize = self.__getBlockSize()
1719 1719 self.processingHeaderObj.profilesPerBlock = self.profilesPerBlock
1720 1720 self.processingHeaderObj.dataBlocksPerFile = self.blocksPerFile
1721 1721 self.processingHeaderObj.nWindows = 1 #podria ser 1 o self.dataOut.processingHeaderObj.nWindows
1722 1722 self.processingHeaderObj.processFlags = self.__getProcessFlags()
1723 1723 self.processingHeaderObj.nCohInt = self.dataOut.nCohInt
1724 1724 self.processingHeaderObj.nIncohInt = 1 # Cuando la data de origen es de tipo Voltage
1725 1725 self.processingHeaderObj.totalSpectra = 0 # Cuando la data de origen es de tipo Voltage
1726 1726
1727 1727 if self.dataOut.code != None:
1728 1728 self.processingHeaderObj.code = self.dataOut.code
1729 1729 self.processingHeaderObj.nCode = self.dataOut.nCode
1730 1730 self.processingHeaderObj.nBaud = self.dataOut.nBaud
1731 1731 codesize = int(8 + 4 * self.dataOut.nCode * self.dataOut.nBaud)
1732 1732 processingHeaderSize += codesize
1733 1733
1734 1734 if self.processingHeaderObj.nWindows != 0:
1735 1735 self.processingHeaderObj.firstHeight = self.dataOut.heightList[0]
1736 1736 self.processingHeaderObj.deltaHeight = self.dataOut.heightList[1] - self.dataOut.heightList[0]
1737 1737 self.processingHeaderObj.nHeights = self.dataOut.nHeights
1738 1738 self.processingHeaderObj.samplesWin = self.dataOut.nHeights
1739 1739 processingHeaderSize += 12
1740 1740
1741 1741 self.processingHeaderObj.size = processingHeaderSize
1742 1742
1743 1743 class SpectraReader(JRODataReader):
1744 1744 """
1745 1745 Esta clase permite leer datos de espectros desde archivos procesados (.pdata). La lectura
1746 1746 de los datos siempre se realiza por bloques. Los datos leidos (array de 3 dimensiones)
1747 1747 son almacenados en tres buffer's para el Self Spectra, el Cross Spectra y el DC Channel.
1748 1748
1749 1749 paresCanalesIguales * alturas * perfiles (Self Spectra)
1750 1750 paresCanalesDiferentes * alturas * perfiles (Cross Spectra)
1751 1751 canales * alturas (DC Channels)
1752 1752
1753 1753 Esta clase contiene instancias (objetos) de las clases BasicHeader, SystemHeader,
1754 1754 RadarControllerHeader y Spectra. Los tres primeros se usan para almacenar informacion de la
1755 1755 cabecera de datos (metadata), y el cuarto (Spectra) para obtener y almacenar un bloque de
1756 1756 datos desde el "buffer" cada vez que se ejecute el metodo "getData".
1757 1757
1758 1758 Example:
1759 1759 dpath = "/home/myuser/data"
1760 1760
1761 1761 startTime = datetime.datetime(2010,1,20,0,0,0,0,0,0)
1762 1762
1763 1763 endTime = datetime.datetime(2010,1,21,23,59,59,0,0,0)
1764 1764
1765 1765 readerObj = SpectraReader()
1766 1766
1767 1767 readerObj.setup(dpath, startTime, endTime)
1768 1768
1769 1769 while(True):
1770 1770
1771 1771 readerObj.getData()
1772 1772
1773 1773 print readerObj.data_spc
1774 1774
1775 1775 print readerObj.data_cspc
1776 1776
1777 1777 print readerObj.data_dc
1778 1778
1779 1779 if readerObj.flagNoMoreFiles:
1780 1780 break
1781 1781
1782 1782 """
1783 1783
1784 1784 pts2read_SelfSpectra = 0
1785 1785
1786 1786 pts2read_CrossSpectra = 0
1787 1787
1788 1788 pts2read_DCchannels = 0
1789 1789
1790 1790 ext = ".pdata"
1791 1791
1792 1792 optchar = "P"
1793 1793
1794 1794 dataOut = None
1795 1795
1796 1796 nRdChannels = None
1797 1797
1798 1798 nRdPairs = None
1799 1799
1800 1800 rdPairList = []
1801 1801
1802 1802
1803 1803 def __init__(self):
1804 1804 """
1805 1805 Inicializador de la clase SpectraReader para la lectura de datos de espectros.
1806 1806
1807 1807 Inputs:
1808 1808 dataOut : Objeto de la clase Spectra. Este objeto sera utilizado para
1809 1809 almacenar un perfil de datos cada vez que se haga un requerimiento
1810 1810 (getData). El perfil sera obtenido a partir del buffer de datos,
1811 1811 si el buffer esta vacio se hara un nuevo proceso de lectura de un
1812 1812 bloque de datos.
1813 1813 Si este parametro no es pasado se creara uno internamente.
1814 1814
1815 1815 Affected:
1816 1816 self.dataOut
1817 1817
1818 1818 Return : None
1819 1819 """
1820 1820
1821 1821 self.isConfig = False
1822 1822
1823 1823 self.pts2read_SelfSpectra = 0
1824 1824
1825 1825 self.pts2read_CrossSpectra = 0
1826 1826
1827 1827 self.pts2read_DCchannels = 0
1828 1828
1829 1829 self.datablock = None
1830 1830
1831 1831 self.utc = None
1832 1832
1833 1833 self.ext = ".pdata"
1834 1834
1835 1835 self.optchar = "P"
1836 1836
1837 1837 self.basicHeaderObj = BasicHeader(LOCALTIME)
1838 1838
1839 1839 self.systemHeaderObj = SystemHeader()
1840 1840
1841 1841 self.radarControllerHeaderObj = RadarControllerHeader()
1842 1842
1843 1843 self.processingHeaderObj = ProcessingHeader()
1844 1844
1845 1845 self.online = 0
1846 1846
1847 1847 self.fp = None
1848 1848
1849 1849 self.idFile = None
1850 1850
1851 1851 self.dtype = None
1852 1852
1853 1853 self.fileSizeByHeader = None
1854 1854
1855 1855 self.filenameList = []
1856 1856
1857 1857 self.filename = None
1858 1858
1859 1859 self.fileSize = None
1860 1860
1861 1861 self.firstHeaderSize = 0
1862 1862
1863 1863 self.basicHeaderSize = 24
1864 1864
1865 1865 self.pathList = []
1866 1866
1867 1867 self.lastUTTime = 0
1868 1868
1869 1869 self.maxTimeStep = 30
1870 1870
1871 1871 self.flagNoMoreFiles = 0
1872 1872
1873 1873 self.set = 0
1874 1874
1875 1875 self.path = None
1876 1876
1877 1877 self.delay = 60 #seconds
1878 1878
1879 1879 self.nTries = 3 #quantity tries
1880 1880
1881 1881 self.nFiles = 3 #number of files for searching
1882 1882
1883 1883 self.nReadBlocks = 0
1884 1884
1885 1885 self.flagIsNewFile = 1
1886 1886
1887 1887 self.ippSeconds = 0
1888 1888
1889 1889 self.flagTimeBlock = 0
1890 1890
1891 1891 self.flagIsNewBlock = 0
1892 1892
1893 1893 self.nTotalBlocks = 0
1894 1894
1895 1895 self.blocksize = 0
1896 1896
1897 1897 self.dataOut = self.createObjByDefault()
1898 1898
1899 1899
1900 1900 def createObjByDefault(self):
1901 1901
1902 1902 dataObj = Spectra()
1903 1903
1904 1904 return dataObj
1905 1905
1906 1906 def __hasNotDataInBuffer(self):
1907 1907 return 1
1908 1908
1909 1909
1910 1910 def getBlockDimension(self):
1911 1911 """
1912 1912 Obtiene la cantidad de puntos a leer por cada bloque de datos
1913 1913
1914 1914 Affected:
1915 1915 self.nRdChannels
1916 1916 self.nRdPairs
1917 1917 self.pts2read_SelfSpectra
1918 1918 self.pts2read_CrossSpectra
1919 1919 self.pts2read_DCchannels
1920 1920 self.blocksize
1921 1921 self.dataOut.nChannels
1922 1922 self.dataOut.nPairs
1923 1923
1924 1924 Return:
1925 1925 None
1926 1926 """
1927 1927 self.nRdChannels = 0
1928 1928 self.nRdPairs = 0
1929 1929 self.rdPairList = []
1930 1930
1931 1931 for i in range(0, self.processingHeaderObj.totalSpectra*2, 2):
1932 1932 if self.processingHeaderObj.spectraComb[i] == self.processingHeaderObj.spectraComb[i+1]:
1933 1933 self.nRdChannels = self.nRdChannels + 1 #par de canales iguales
1934 1934 else:
1935 1935 self.nRdPairs = self.nRdPairs + 1 #par de canales diferentes
1936 1936 self.rdPairList.append((self.processingHeaderObj.spectraComb[i], self.processingHeaderObj.spectraComb[i+1]))
1937 1937
1938 1938 pts2read = self.processingHeaderObj.nHeights * self.processingHeaderObj.profilesPerBlock
1939 1939
1940 1940 self.pts2read_SelfSpectra = int(self.nRdChannels * pts2read)
1941 1941 self.blocksize = self.pts2read_SelfSpectra
1942 1942
1943 1943 if self.processingHeaderObj.flag_cspc:
1944 1944 self.pts2read_CrossSpectra = int(self.nRdPairs * pts2read)
1945 1945 self.blocksize += self.pts2read_CrossSpectra
1946 1946
1947 1947 if self.processingHeaderObj.flag_dc:
1948 1948 self.pts2read_DCchannels = int(self.systemHeaderObj.nChannels * self.processingHeaderObj.nHeights)
1949 1949 self.blocksize += self.pts2read_DCchannels
1950 1950
1951 1951 # self.blocksize = self.pts2read_SelfSpectra + self.pts2read_CrossSpectra + self.pts2read_DCchannels
1952 1952
1953 1953
1954 1954 def readBlock(self):
1955 1955 """
1956 1956 Lee el bloque de datos desde la posicion actual del puntero del archivo
1957 1957 (self.fp) y actualiza todos los parametros relacionados al bloque de datos
1958 1958 (metadata + data). La data leida es almacenada en el buffer y el contador del buffer
1959 1959 es seteado a 0
1960 1960
1961 1961 Return: None
1962 1962
1963 1963 Variables afectadas:
1964 1964
1965 1965 self.flagIsNewFile
1966 1966 self.flagIsNewBlock
1967 1967 self.nTotalBlocks
1968 1968 self.data_spc
1969 1969 self.data_cspc
1970 1970 self.data_dc
1971 1971
1972 1972 Exceptions:
1973 1973 Si un bloque leido no es un bloque valido
1974 1974 """
1975 1975 blockOk_flag = False
1976 1976 fpointer = self.fp.tell()
1977 1977
1978 1978 spc = numpy.fromfile( self.fp, self.dtype[0], self.pts2read_SelfSpectra )
1979 1979 spc = spc.reshape( (self.nRdChannels, self.processingHeaderObj.nHeights, self.processingHeaderObj.profilesPerBlock) ) #transforma a un arreglo 3D
1980 1980
1981 1981 if self.processingHeaderObj.flag_cspc:
1982 1982 cspc = numpy.fromfile( self.fp, self.dtype, self.pts2read_CrossSpectra )
1983 1983 cspc = cspc.reshape( (self.nRdPairs, self.processingHeaderObj.nHeights, self.processingHeaderObj.profilesPerBlock) ) #transforma a un arreglo 3D
1984 1984
1985 1985 if self.processingHeaderObj.flag_dc:
1986 1986 dc = numpy.fromfile( self.fp, self.dtype, self.pts2read_DCchannels ) #int(self.processingHeaderObj.nHeights*self.systemHeaderObj.nChannels) )
1987 1987 dc = dc.reshape( (self.systemHeaderObj.nChannels, self.processingHeaderObj.nHeights) ) #transforma a un arreglo 2D
1988 1988
1989 1989
1990 1990 if not(self.processingHeaderObj.shif_fft):
1991 1991 #desplaza a la derecha en el eje 2 determinadas posiciones
1992 1992 shift = int(self.processingHeaderObj.profilesPerBlock/2)
1993 1993 spc = numpy.roll( spc, shift , axis=2 )
1994 1994
1995 1995 if self.processingHeaderObj.flag_cspc:
1996 1996 #desplaza a la derecha en el eje 2 determinadas posiciones
1997 1997 cspc = numpy.roll( cspc, shift, axis=2 )
1998 1998
1999 1999 # self.processingHeaderObj.shif_fft = True
2000 2000
2001 2001 spc = numpy.transpose( spc, (0,2,1) )
2002 2002 self.data_spc = spc
2003 2003
2004 2004 if self.processingHeaderObj.flag_cspc:
2005 2005 cspc = numpy.transpose( cspc, (0,2,1) )
2006 2006 self.data_cspc = cspc['real'] + cspc['imag']*1j
2007 2007 else:
2008 2008 self.data_cspc = None
2009 2009
2010 2010 if self.processingHeaderObj.flag_dc:
2011 2011 self.data_dc = dc['real'] + dc['imag']*1j
2012 2012 else:
2013 2013 self.data_dc = None
2014 2014
2015 2015 self.flagIsNewFile = 0
2016 2016 self.flagIsNewBlock = 1
2017 2017
2018 2018 self.nTotalBlocks += 1
2019 2019 self.nReadBlocks += 1
2020 2020
2021 2021 return 1
2022 2022
2023 2023
2024 2024 def getData(self):
2025 2025 """
2026 2026 Copia el buffer de lectura a la clase "Spectra",
2027 2027 con todos los parametros asociados a este (metadata). cuando no hay datos en el buffer de
2028 2028 lectura es necesario hacer una nueva lectura de los bloques de datos usando "readNextBlock"
2029 2029
2030 2030 Return:
2031 2031 0 : Si no hay mas archivos disponibles
2032 2032 1 : Si hizo una buena copia del buffer
2033 2033
2034 2034 Affected:
2035 2035 self.dataOut
2036 2036
2037 2037 self.flagTimeBlock
2038 2038 self.flagIsNewBlock
2039 2039 """
2040 2040
2041 2041 if self.flagNoMoreFiles:
2042 2042 self.dataOut.flagNoData = True
2043 2043 print 'Process finished'
2044 2044 return 0
2045 2045
2046 2046 self.flagTimeBlock = 0
2047 2047 self.flagIsNewBlock = 0
2048 2048
2049 2049 if self.__hasNotDataInBuffer():
2050 2050
2051 2051 if not( self.readNextBlock() ):
2052 2052 self.dataOut.flagNoData = True
2053 2053 return 0
2054 2054
2055 2055 # self.updateDataHeader()
2056 2056
2057 2057 #data es un numpy array de 3 dmensiones (perfiles, alturas y canales)
2058 2058
2059 2059 if self.data_dc == None:
2060 2060 self.dataOut.flagNoData = True
2061 2061 return 0
2062 2062
2063 2063 self.dataOut.data_spc = self.data_spc
2064 2064
2065 2065 self.dataOut.data_cspc = self.data_cspc
2066 2066
2067 2067 self.dataOut.data_dc = self.data_dc
2068 2068
2069 2069 self.dataOut.flagTimeBlock = self.flagTimeBlock
2070 2070
2071 2071 self.dataOut.flagNoData = False
2072 2072
2073 2073 self.dataOut.dtype = self.dtype
2074 2074
2075 2075 # self.dataOut.nChannels = self.nRdChannels
2076 2076
2077 2077 self.dataOut.nPairs = self.nRdPairs
2078 2078
2079 2079 self.dataOut.pairsList = self.rdPairList
2080 2080
2081 2081 # self.dataOut.nHeights = self.processingHeaderObj.nHeights
2082 2082
2083 2083 self.dataOut.nProfiles = self.processingHeaderObj.profilesPerBlock
2084 2084
2085 2085 self.dataOut.nFFTPoints = self.processingHeaderObj.profilesPerBlock
2086 2086
2087 2087 self.dataOut.nCohInt = self.processingHeaderObj.nCohInt
2088 2088
2089 2089 self.dataOut.nIncohInt = self.processingHeaderObj.nIncohInt
2090 2090
2091 2091 xf = self.processingHeaderObj.firstHeight + self.processingHeaderObj.nHeights*self.processingHeaderObj.deltaHeight
2092 2092
2093 2093 self.dataOut.heightList = numpy.arange(self.processingHeaderObj.firstHeight, xf, self.processingHeaderObj.deltaHeight)
2094 2094
2095 2095 self.dataOut.channelList = range(self.systemHeaderObj.nChannels)
2096 2096
2097 2097 # self.dataOut.channelIndexList = range(self.systemHeaderObj.nChannels)
2098 2098
2099 2099 self.dataOut.utctime = self.basicHeaderObj.utc + self.basicHeaderObj.miliSecond/1000.#+ self.profileIndex * self.ippSeconds
2100 2100
2101 2101 self.dataOut.ippSeconds = self.ippSeconds
2102 2102
2103 2103 self.dataOut.timeInterval = self.ippSeconds * self.processingHeaderObj.nCohInt * self.processingHeaderObj.nIncohInt * self.dataOut.nFFTPoints
2104 2104
2105 2105 # self.profileIndex += 1
2106 2106
2107 2107 self.dataOut.systemHeaderObj = self.systemHeaderObj.copy()
2108 2108
2109 2109 self.dataOut.radarControllerHeaderObj = self.radarControllerHeaderObj.copy()
2110 2110
2111 2111 self.dataOut.flagShiftFFT = self.processingHeaderObj.shif_fft
2112 2112
2113 2113 self.dataOut.flagDecodeData = False #asumo q la data no esta decodificada
2114 2114
2115 2115 self.dataOut.flagDeflipData = True #asumo q la data no esta sin flip
2116 2116
2117 2117 if self.processingHeaderObj.code != None:
2118 2118
2119 2119 self.dataOut.nCode = self.processingHeaderObj.nCode
2120 2120
2121 2121 self.dataOut.nBaud = self.processingHeaderObj.nBaud
2122 2122
2123 2123 self.dataOut.code = self.processingHeaderObj.code
2124 2124
2125 2125 self.dataOut.flagDecodeData = True
2126 2126
2127 2127 return self.dataOut.data_spc
2128 2128
2129 2129
2130 2130 class SpectraWriter(JRODataWriter):
2131 2131
2132 2132 """
2133 2133 Esta clase permite escribir datos de espectros a archivos procesados (.pdata). La escritura
2134 2134 de los datos siempre se realiza por bloques.
2135 2135 """
2136 2136
2137 2137 ext = ".pdata"
2138 2138
2139 2139 optchar = "P"
2140 2140
2141 2141 shape_spc_Buffer = None
2142 2142
2143 2143 shape_cspc_Buffer = None
2144 2144
2145 2145 shape_dc_Buffer = None
2146 2146
2147 2147 data_spc = None
2148 2148
2149 2149 data_cspc = None
2150 2150
2151 2151 data_dc = None
2152 2152
2153 2153 # dataOut = None
2154 2154
2155 2155 def __init__(self):
2156 2156 """
2157 2157 Inicializador de la clase SpectraWriter para la escritura de datos de espectros.
2158 2158
2159 2159 Affected:
2160 2160 self.dataOut
2161 2161 self.basicHeaderObj
2162 2162 self.systemHeaderObj
2163 2163 self.radarControllerHeaderObj
2164 2164 self.processingHeaderObj
2165 2165
2166 2166 Return: None
2167 2167 """
2168 2168
2169 2169 self.isConfig = False
2170 2170
2171 2171 self.nTotalBlocks = 0
2172 2172
2173 2173 self.data_spc = None
2174 2174
2175 2175 self.data_cspc = None
2176 2176
2177 2177 self.data_dc = None
2178 2178
2179 2179 self.fp = None
2180 2180
2181 2181 self.flagIsNewFile = 1
2182 2182
2183 2183 self.nTotalBlocks = 0
2184 2184
2185 2185 self.flagIsNewBlock = 0
2186 2186
2187 2187 self.setFile = None
2188 2188
2189 2189 self.dtype = None
2190 2190
2191 2191 self.path = None
2192 2192
2193 2193 self.noMoreFiles = 0
2194 2194
2195 2195 self.filename = None
2196 2196
2197 2197 self.basicHeaderObj = BasicHeader(LOCALTIME)
2198 2198
2199 2199 self.systemHeaderObj = SystemHeader()
2200 2200
2201 2201 self.radarControllerHeaderObj = RadarControllerHeader()
2202 2202
2203 2203 self.processingHeaderObj = ProcessingHeader()
2204 2204
2205 2205
2206 2206 def hasAllDataInBuffer(self):
2207 2207 return 1
2208 2208
2209 2209
2210 2210 def setBlockDimension(self):
2211 2211 """
2212 2212 Obtiene las formas dimensionales del los subbloques de datos que componen un bloque
2213 2213
2214 2214 Affected:
2215 2215 self.shape_spc_Buffer
2216 2216 self.shape_cspc_Buffer
2217 2217 self.shape_dc_Buffer
2218 2218
2219 2219 Return: None
2220 2220 """
2221 2221 self.shape_spc_Buffer = (self.dataOut.nChannels,
2222 2222 self.processingHeaderObj.nHeights,
2223 2223 self.processingHeaderObj.profilesPerBlock)
2224 2224
2225 2225 self.shape_cspc_Buffer = (self.dataOut.nPairs,
2226 2226 self.processingHeaderObj.nHeights,
2227 2227 self.processingHeaderObj.profilesPerBlock)
2228 2228
2229 2229 self.shape_dc_Buffer = (self.dataOut.nChannels,
2230 2230 self.processingHeaderObj.nHeights)
2231 2231
2232 2232
2233 2233 def writeBlock(self):
2234 2234 """
2235 2235 Escribe el buffer en el file designado
2236 2236
2237 2237 Affected:
2238 2238 self.data_spc
2239 2239 self.data_cspc
2240 2240 self.data_dc
2241 2241 self.flagIsNewFile
2242 2242 self.flagIsNewBlock
2243 2243 self.nTotalBlocks
2244 2244 self.nWriteBlocks
2245 2245
2246 2246 Return: None
2247 2247 """
2248 2248
2249 2249 spc = numpy.transpose( self.data_spc, (0,2,1) )
2250 2250 if not( self.processingHeaderObj.shif_fft ):
2251 2251 spc = numpy.roll( spc, self.processingHeaderObj.profilesPerBlock/2, axis=2 ) #desplaza a la derecha en el eje 2 determinadas posiciones
2252 2252 data = spc.reshape((-1))
2253 2253 data = data.astype(self.dtype[0])
2254 2254 data.tofile(self.fp)
2255 2255
2256 2256 if self.data_cspc != None:
2257 2257 data = numpy.zeros( self.shape_cspc_Buffer, self.dtype )
2258 2258 cspc = numpy.transpose( self.data_cspc, (0,2,1) )
2259 2259 if not( self.processingHeaderObj.shif_fft ):
2260 2260 cspc = numpy.roll( cspc, self.processingHeaderObj.profilesPerBlock/2, axis=2 ) #desplaza a la derecha en el eje 2 determinadas posiciones
2261 2261 data['real'] = cspc.real
2262 2262 data['imag'] = cspc.imag
2263 2263 data = data.reshape((-1))
2264 2264 data.tofile(self.fp)
2265 2265
2266 2266 if self.data_dc != None:
2267 2267 data = numpy.zeros( self.shape_dc_Buffer, self.dtype )
2268 2268 dc = self.data_dc
2269 2269 data['real'] = dc.real
2270 2270 data['imag'] = dc.imag
2271 2271 data = data.reshape((-1))
2272 2272 data.tofile(self.fp)
2273 2273
2274 2274 self.data_spc.fill(0)
2275 2275 self.data_dc.fill(0)
2276 2276 if self.data_cspc != None:
2277 2277 self.data_cspc.fill(0)
2278 2278
2279 2279 self.flagIsNewFile = 0
2280 2280 self.flagIsNewBlock = 1
2281 2281 self.nTotalBlocks += 1
2282 2282 self.nWriteBlocks += 1
2283 2283 self.blockIndex += 1
2284 2284
2285 2285
2286 2286 def putData(self):
2287 2287 """
2288 2288 Setea un bloque de datos y luego los escribe en un file
2289 2289
2290 2290 Affected:
2291 2291 self.data_spc
2292 2292 self.data_cspc
2293 2293 self.data_dc
2294 2294
2295 2295 Return:
2296 2296 0 : Si no hay data o no hay mas files que puedan escribirse
2297 2297 1 : Si se escribio la data de un bloque en un file
2298 2298 """
2299 2299
2300 2300 if self.dataOut.flagNoData:
2301 2301 return 0
2302 2302
2303 2303 self.flagIsNewBlock = 0
2304 2304
2305 2305 if self.dataOut.flagTimeBlock:
2306 2306 self.data_spc.fill(0)
2307 2307 self.data_cspc.fill(0)
2308 2308 self.data_dc.fill(0)
2309 2309 self.setNextFile()
2310 2310
2311 2311 if self.flagIsNewFile == 0:
2312 2312 self.getBasicHeader()
2313 2313
2314 2314 self.data_spc = self.dataOut.data_spc.copy()
2315 2315 self.data_cspc = self.dataOut.data_cspc.copy()
2316 2316 self.data_dc = self.dataOut.data_dc.copy()
2317 2317
2318 2318 # #self.processingHeaderObj.dataBlocksPerFile)
2319 2319 if self.hasAllDataInBuffer():
2320 2320 # self.getDataHeader()
2321 2321 self.writeNextBlock()
2322 2322
2323 2323 return 1
2324 2324
2325 2325
2326 2326 def __getProcessFlags(self):
2327 2327
2328 2328 processFlags = 0
2329 2329
2330 2330 dtype0 = numpy.dtype([('real','<i1'),('imag','<i1')])
2331 2331 dtype1 = numpy.dtype([('real','<i2'),('imag','<i2')])
2332 2332 dtype2 = numpy.dtype([('real','<i4'),('imag','<i4')])
2333 2333 dtype3 = numpy.dtype([('real','<i8'),('imag','<i8')])
2334 2334 dtype4 = numpy.dtype([('real','<f4'),('imag','<f4')])
2335 2335 dtype5 = numpy.dtype([('real','<f8'),('imag','<f8')])
2336 2336
2337 2337 dtypeList = [dtype0, dtype1, dtype2, dtype3, dtype4, dtype5]
2338 2338
2339 2339
2340 2340
2341 2341 datatypeValueList = [PROCFLAG.DATATYPE_CHAR,
2342 2342 PROCFLAG.DATATYPE_SHORT,
2343 2343 PROCFLAG.DATATYPE_LONG,
2344 2344 PROCFLAG.DATATYPE_INT64,
2345 2345 PROCFLAG.DATATYPE_FLOAT,
2346 2346 PROCFLAG.DATATYPE_DOUBLE]
2347 2347
2348 2348
2349 2349 for index in range(len(dtypeList)):
2350 2350 if self.dataOut.dtype == dtypeList[index]:
2351 2351 dtypeValue = datatypeValueList[index]
2352 2352 break
2353 2353
2354 2354 processFlags += dtypeValue
2355 2355
2356 2356 if self.dataOut.flagDecodeData:
2357 2357 processFlags += PROCFLAG.DECODE_DATA
2358 2358
2359 2359 if self.dataOut.flagDeflipData:
2360 2360 processFlags += PROCFLAG.DEFLIP_DATA
2361 2361
2362 2362 if self.dataOut.code != None:
2363 2363 processFlags += PROCFLAG.DEFINE_PROCESS_CODE
2364 2364
2365 2365 if self.dataOut.nIncohInt > 1:
2366 2366 processFlags += PROCFLAG.INCOHERENT_INTEGRATION
2367 2367
2368 2368 if self.dataOut.data_dc != None:
2369 2369 processFlags += PROCFLAG.SAVE_CHANNELS_DC
2370 2370
2371 2371 return processFlags
2372 2372
2373 2373
2374 2374 def __getBlockSize(self):
2375 2375 '''
2376 2376 Este metodos determina el cantidad de bytes para un bloque de datos de tipo Spectra
2377 2377 '''
2378 2378
2379 2379 dtype0 = numpy.dtype([('real','<i1'),('imag','<i1')])
2380 2380 dtype1 = numpy.dtype([('real','<i2'),('imag','<i2')])
2381 2381 dtype2 = numpy.dtype([('real','<i4'),('imag','<i4')])
2382 2382 dtype3 = numpy.dtype([('real','<i8'),('imag','<i8')])
2383 2383 dtype4 = numpy.dtype([('real','<f4'),('imag','<f4')])
2384 2384 dtype5 = numpy.dtype([('real','<f8'),('imag','<f8')])
2385 2385
2386 2386 dtypeList = [dtype0, dtype1, dtype2, dtype3, dtype4, dtype5]
2387 2387 datatypeValueList = [1,2,4,8,4,8]
2388 2388 for index in range(len(dtypeList)):
2389 2389 if self.dataOut.dtype == dtypeList[index]:
2390 2390 datatypeValue = datatypeValueList[index]
2391 2391 break
2392 2392
2393 2393
2394 2394 pts2write = self.dataOut.nHeights * self.dataOut.nFFTPoints
2395 2395
2396 2396 pts2write_SelfSpectra = int(self.dataOut.nChannels * pts2write)
2397 2397 blocksize = (pts2write_SelfSpectra*datatypeValue)
2398 2398
2399 2399 if self.dataOut.data_cspc != None:
2400 2400 pts2write_CrossSpectra = int(self.dataOut.nPairs * pts2write)
2401 2401 blocksize += (pts2write_CrossSpectra*datatypeValue*2)
2402 2402
2403 2403 if self.dataOut.data_dc != None:
2404 2404 pts2write_DCchannels = int(self.dataOut.nChannels * self.dataOut.nHeights)
2405 2405 blocksize += (pts2write_DCchannels*datatypeValue*2)
2406 2406
2407 2407 blocksize = blocksize #* datatypeValue * 2 #CORREGIR ESTO
2408 2408
2409 2409 return blocksize
2410 2410
2411 2411 def getDataHeader(self):
2412 2412
2413 2413 """
2414 2414 Obtiene una copia del First Header
2415 2415
2416 2416 Affected:
2417 2417 self.systemHeaderObj
2418 2418 self.radarControllerHeaderObj
2419 2419 self.dtype
2420 2420
2421 2421 Return:
2422 2422 None
2423 2423 """
2424 2424
2425 2425 self.systemHeaderObj = self.dataOut.systemHeaderObj.copy()
2426 2426 self.systemHeaderObj.nChannels = self.dataOut.nChannels
2427 2427 self.radarControllerHeaderObj = self.dataOut.radarControllerHeaderObj.copy()
2428 2428
2429 2429 self.getBasicHeader()
2430 2430
2431 2431 processingHeaderSize = 40 # bytes
2432 2432 self.processingHeaderObj.dtype = 0 # Voltage
2433 2433 self.processingHeaderObj.blockSize = self.__getBlockSize()
2434 2434 self.processingHeaderObj.profilesPerBlock = self.dataOut.nFFTPoints
2435 2435 self.processingHeaderObj.dataBlocksPerFile = self.blocksPerFile
2436 2436 self.processingHeaderObj.nWindows = 1 #podria ser 1 o self.dataOut.processingHeaderObj.nWindows
2437 2437 self.processingHeaderObj.processFlags = self.__getProcessFlags()
2438 2438 self.processingHeaderObj.nCohInt = self.dataOut.nCohInt# Se requiere para determinar el valor de timeInterval
2439 2439 self.processingHeaderObj.nIncohInt = self.dataOut.nIncohInt
2440 2440 self.processingHeaderObj.totalSpectra = self.dataOut.nPairs + self.dataOut.nChannels
2441 2441
2442 2442 if self.processingHeaderObj.totalSpectra > 0:
2443 2443 channelList = []
2444 2444 for channel in range(self.dataOut.nChannels):
2445 2445 channelList.append(channel)
2446 2446 channelList.append(channel)
2447 2447
2448 2448 pairsList = []
2449 2449 for pair in self.dataOut.pairsList:
2450 2450 pairsList.append(pair[0])
2451 2451 pairsList.append(pair[1])
2452 2452 spectraComb = channelList + pairsList
2453 2453 spectraComb = numpy.array(spectraComb,dtype="u1")
2454 2454 self.processingHeaderObj.spectraComb = spectraComb
2455 2455 sizeOfSpcComb = len(spectraComb)
2456 2456 processingHeaderSize += sizeOfSpcComb
2457 2457
2458 2458 if self.dataOut.code != None:
2459 2459 self.processingHeaderObj.code = self.dataOut.code
2460 2460 self.processingHeaderObj.nCode = self.dataOut.nCode
2461 2461 self.processingHeaderObj.nBaud = self.dataOut.nBaud
2462 2462 nCodeSize = 4 # bytes
2463 2463 nBaudSize = 4 # bytes
2464 2464 codeSize = 4 # bytes
2465 2465 sizeOfCode = int(nCodeSize + nBaudSize + codeSize * self.dataOut.nCode * self.dataOut.nBaud)
2466 2466 processingHeaderSize += sizeOfCode
2467 2467
2468 2468 if self.processingHeaderObj.nWindows != 0:
2469 2469 self.processingHeaderObj.firstHeight = self.dataOut.heightList[0]
2470 2470 self.processingHeaderObj.deltaHeight = self.dataOut.heightList[1] - self.dataOut.heightList[0]
2471 2471 self.processingHeaderObj.nHeights = self.dataOut.nHeights
2472 2472 self.processingHeaderObj.samplesWin = self.dataOut.nHeights
2473 2473 sizeOfFirstHeight = 4
2474 2474 sizeOfdeltaHeight = 4
2475 2475 sizeOfnHeights = 4
2476 2476 sizeOfWindows = (sizeOfFirstHeight + sizeOfdeltaHeight + sizeOfnHeights)*self.processingHeaderObj.nWindows
2477 2477 processingHeaderSize += sizeOfWindows
2478 2478
2479 2479 self.processingHeaderObj.size = processingHeaderSize
2480 2480
2481 2481 class SpectraHeisWriter():
2482 2482
2483 2483 i=0
2484 2484
2485 2485 def __init__(self, dataOut):
2486 2486
2487 2487 self.wrObj = FITS()
2488 2488 self.dataOut = dataOut
2489 2489
2490 2490 def isNumber(str):
2491 2491 """
2492 2492 Chequea si el conjunto de caracteres que componen un string puede ser convertidos a un numero.
2493 2493
2494 2494 Excepciones:
2495 2495 Si un determinado string no puede ser convertido a numero
2496 2496 Input:
2497 2497 str, string al cual se le analiza para determinar si convertible a un numero o no
2498 2498
2499 2499 Return:
2500 2500 True : si el string es uno numerico
2501 2501 False : no es un string numerico
2502 2502 """
2503 2503 try:
2504 2504 float( str )
2505 2505 return True
2506 2506 except:
2507 2507 return False
2508 2508
2509 2509 def setup(self, wrpath,):
2510 2510
2511 2511 if not(os.path.exists(wrpath)):
2512 2512 os.mkdir(wrpath)
2513 2513
2514 2514 self.wrpath = wrpath
2515 2515 self.setFile = 0
2516 2516
2517 2517 def putData(self):
2518 2518 # self.wrObj.writeHeader(nChannels=self.dataOut.nChannels, nFFTPoints=self.dataOut.nFFTPoints)
2519 2519 #name = self.dataOut.utctime
2520 2520 name= time.localtime( self.dataOut.utctime)
2521 2521 ext=".fits"
2522 2522 #folder='D%4.4d%3.3d'%(name.tm_year,name.tm_yday)
2523 2523 subfolder = 'D%4.4d%3.3d' % (name.tm_year,name.tm_yday)
2524 2524
2525 2525 fullpath = os.path.join( self.wrpath, subfolder )
2526 2526 if not( os.path.exists(fullpath) ):
2527 2527 os.mkdir(fullpath)
2528 2528 self.setFile += 1
2529 2529 file = 'D%4.4d%3.3d%3.3d%s' % (name.tm_year,name.tm_yday,self.setFile,ext)
2530 2530
2531 2531 filename = os.path.join(self.wrpath,subfolder, file)
2532 2532
2533 2533 # print self.dataOut.ippSeconds
2534 2534 freq=numpy.arange(-1*self.dataOut.nHeights/2.,self.dataOut.nHeights/2.)/(2*self.dataOut.ippSeconds)
2535 2535
2536 2536 col1=self.wrObj.setColF(name="freq", format=str(self.dataOut.nFFTPoints)+'E', array=freq)
2537 2537 col2=self.wrObj.writeData(name="P_Ch1",format=str(self.dataOut.nFFTPoints)+'E',data=10*numpy.log10(self.dataOut.data_spc[0,:]))
2538 2538 col3=self.wrObj.writeData(name="P_Ch2",format=str(self.dataOut.nFFTPoints)+'E',data=10*numpy.log10(self.dataOut.data_spc[1,:]))
2539 2539 col4=self.wrObj.writeData(name="P_Ch3",format=str(self.dataOut.nFFTPoints)+'E',data=10*numpy.log10(self.dataOut.data_spc[2,:]))
2540 2540 col5=self.wrObj.writeData(name="P_Ch4",format=str(self.dataOut.nFFTPoints)+'E',data=10*numpy.log10(self.dataOut.data_spc[3,:]))
2541 2541 col6=self.wrObj.writeData(name="P_Ch5",format=str(self.dataOut.nFFTPoints)+'E',data=10*numpy.log10(self.dataOut.data_spc[4,:]))
2542 2542 col7=self.wrObj.writeData(name="P_Ch6",format=str(self.dataOut.nFFTPoints)+'E',data=10*numpy.log10(self.dataOut.data_spc[5,:]))
2543 2543 col8=self.wrObj.writeData(name="P_Ch7",format=str(self.dataOut.nFFTPoints)+'E',data=10*numpy.log10(self.dataOut.data_spc[6,:]))
2544 2544 col9=self.wrObj.writeData(name="P_Ch8",format=str(self.dataOut.nFFTPoints)+'E',data=10*numpy.log10(self.dataOut.data_spc[7,:]))
2545 2545 #n=numpy.arange((100))
2546 2546 n=self.dataOut.data_spc[6,:]
2547 2547 a=self.wrObj.cFImage(n)
2548 2548 b=self.wrObj.Ctable(col1,col2,col3,col4,col5,col6,col7,col8,col9)
2549 2549 self.wrObj.CFile(a,b)
2550 2550 self.wrObj.wFile(filename)
2551 2551 return 1
2552 2552
2553 2553 class FITS:
2554 2554
2555 2555 name=None
2556 2556 format=None
2557 2557 array =None
2558 2558 data =None
2559 2559 thdulist=None
2560 2560
2561 2561 def __init__(self):
2562 2562
2563 2563 pass
2564 2564
2565 2565 def setColF(self,name,format,array):
2566 2566 self.name=name
2567 2567 self.format=format
2568 2568 self.array=array
2569 2569 a1=numpy.array([self.array],dtype=numpy.float32)
2570 2570 self.col1 = pyfits.Column(name=self.name, format=self.format, array=a1)
2571 2571 return self.col1
2572 2572
2573 2573 # def setColP(self,name,format,data):
2574 2574 # self.name=name
2575 2575 # self.format=format
2576 2576 # self.data=data
2577 2577 # a2=numpy.array([self.data],dtype=numpy.float32)
2578 2578 # self.col2 = pyfits.Column(name=self.name, format=self.format, array=a2)
2579 2579 # return self.col2
2580 2580
2581 2581 def writeHeader(self,):
2582 2582 pass
2583 2583
2584 2584 def writeData(self,name,format,data):
2585 2585 self.name=name
2586 2586 self.format=format
2587 2587 self.data=data
2588 2588 a2=numpy.array([self.data],dtype=numpy.float32)
2589 2589 self.col2 = pyfits.Column(name=self.name, format=self.format, array=a2)
2590 2590 return self.col2
2591 2591
2592 2592 def cFImage(self,n):
2593 2593 self.hdu= pyfits.PrimaryHDU(n)
2594 2594 return self.hdu
2595 2595
2596 2596 def Ctable(self,col1,col2,col3,col4,col5,col6,col7,col8,col9):
2597 2597 self.cols=pyfits.ColDefs( [col1,col2,col3,col4,col5,col6,col7,col8,col9])
2598 2598 self.tbhdu = pyfits.new_table(self.cols)
2599 2599 return self.tbhdu
2600 2600
2601 2601 def CFile(self,hdu,tbhdu):
2602 2602 self.thdulist=pyfits.HDUList([hdu,tbhdu])
2603 2603
2604 2604 def wFile(self,filename):
2605 2605 self.thdulist.writeto(filename) No newline at end of file
General Comments 0
You need to be logged in to leave comments. Login now