##// END OF EJS Templates
Bug fixed: Seleccion de directorio al leer en linea
Miguel Valdez -
r296:0873c470e12e
parent child
Show More
@@ -1,2603 +1,2605
1 1 '''
2 2
3 3 $Author: murco $
4 4 $Id: JRODataIO.py 169 2012-11-19 21:57:03Z murco $
5 5 '''
6 6
7 7 import os, sys
8 8 import glob
9 9 import time
10 10 import numpy
11 11 import fnmatch
12 12 import time, datetime
13 13
14 14 from jrodata import *
15 15 from jroheaderIO import *
16 16 from jroprocessing import *
17 17
18 18 LOCALTIME = -18000
19 19
20 20 def isNumber(str):
21 21 """
22 22 Chequea si el conjunto de caracteres que componen un string puede ser convertidos a un numero.
23 23
24 24 Excepciones:
25 25 Si un determinado string no puede ser convertido a numero
26 26 Input:
27 27 str, string al cual se le analiza para determinar si convertible a un numero o no
28 28
29 29 Return:
30 30 True : si el string es uno numerico
31 31 False : no es un string numerico
32 32 """
33 33 try:
34 34 float( str )
35 35 return True
36 36 except:
37 37 return False
38 38
39 39 def isThisFileinRange(filename, startUTSeconds, endUTSeconds):
40 40 """
41 41 Esta funcion determina si un archivo de datos se encuentra o no dentro del rango de fecha especificado.
42 42
43 43 Inputs:
44 44 filename : nombre completo del archivo de datos en formato Jicamarca (.r)
45 45
46 46 startUTSeconds : fecha inicial del rango seleccionado. La fecha esta dada en
47 47 segundos contados desde 01/01/1970.
48 48 endUTSeconds : fecha final del rango seleccionado. La fecha esta dada en
49 49 segundos contados desde 01/01/1970.
50 50
51 51 Return:
52 52 Boolean : Retorna True si el archivo de datos contiene datos en el rango de
53 53 fecha especificado, de lo contrario retorna False.
54 54
55 55 Excepciones:
56 56 Si el archivo no existe o no puede ser abierto
57 57 Si la cabecera no puede ser leida.
58 58
59 59 """
60 60 basicHeaderObj = BasicHeader(LOCALTIME)
61 61
62 62 try:
63 63 fp = open(filename,'rb')
64 64 except:
65 65 raise IOError, "The file %s can't be opened" %(filename)
66 66
67 67 sts = basicHeaderObj.read(fp)
68 68 fp.close()
69 69
70 70 if not(sts):
71 71 print "Skipping the file %s because it has not a valid header" %(filename)
72 72 return 0
73 73
74 74 if not ((startUTSeconds <= basicHeaderObj.utc) and (endUTSeconds > basicHeaderObj.utc)):
75 75 return 0
76 76
77 77 return 1
78 78
79 79 def isFileinThisTime(filename, startTime, endTime):
80 80 """
81 81 Retorna 1 si el archivo de datos se encuentra dentro del rango de horas especificado.
82 82
83 83 Inputs:
84 84 filename : nombre completo del archivo de datos en formato Jicamarca (.r)
85 85
86 86 startTime : tiempo inicial del rango seleccionado en formato datetime.time
87 87
88 88 endTime : tiempo final del rango seleccionado en formato datetime.time
89 89
90 90 Return:
91 91 Boolean : Retorna True si el archivo de datos contiene datos en el rango de
92 92 fecha especificado, de lo contrario retorna False.
93 93
94 94 Excepciones:
95 95 Si el archivo no existe o no puede ser abierto
96 96 Si la cabecera no puede ser leida.
97 97
98 98 """
99 99
100 100
101 101 try:
102 102 fp = open(filename,'rb')
103 103 except:
104 104 raise IOError, "The file %s can't be opened" %(filename)
105 105
106 106 basicHeaderObj = BasicHeader(LOCALTIME)
107 107 sts = basicHeaderObj.read(fp)
108 108 fp.close()
109 109
110 110 thisTime = basicHeaderObj.datatime.time()
111 111
112 112 if not(sts):
113 113 print "Skipping the file %s because it has not a valid header" %(filename)
114 114 return 0
115 115
116 116 if not ((startTime <= thisTime) and (endTime > thisTime)):
117 117 return 0
118 118
119 119 return 1
120 120
121 121 def getlastFileFromPath(path, ext):
122 122 """
123 123 Depura el fileList dejando solo los que cumplan el formato de "PYYYYDDDSSS.ext"
124 124 al final de la depuracion devuelve el ultimo file de la lista que quedo.
125 125
126 126 Input:
127 127 fileList : lista conteniendo todos los files (sin path) que componen una determinada carpeta
128 128 ext : extension de los files contenidos en una carpeta
129 129
130 130 Return:
131 131 El ultimo file de una determinada carpeta, no se considera el path.
132 132 """
133 133 validFilelist = []
134 134 fileList = os.listdir(path)
135 135
136 136 # 0 1234 567 89A BCDE
137 137 # H YYYY DDD SSS .ext
138 138
139 139 for file in fileList:
140 140 try:
141 141 year = int(file[1:5])
142 142 doy = int(file[5:8])
143 143
144 144
145 145 except:
146 146 continue
147 147
148 148 if (os.path.splitext(file)[-1].lower() != ext.lower()):
149 149 continue
150 150
151 151 validFilelist.append(file)
152 152
153 153 if validFilelist:
154 154 validFilelist = sorted( validFilelist, key=str.lower )
155 155 return validFilelist[-1]
156 156
157 157 return None
158 158
159 159 def checkForRealPath(path, year, doy, set, ext):
160 160 """
161 161 Por ser Linux Case Sensitive entonces checkForRealPath encuentra el nombre correcto de un path,
162 162 Prueba por varias combinaciones de nombres entre mayusculas y minusculas para determinar
163 163 el path exacto de un determinado file.
164 164
165 165 Example :
166 166 nombre correcto del file es .../.../D2009307/P2009307367.ext
167 167
168 168 Entonces la funcion prueba con las siguientes combinaciones
169 169 .../.../y2009307367.ext
170 170 .../.../Y2009307367.ext
171 171 .../.../x2009307/y2009307367.ext
172 172 .../.../x2009307/Y2009307367.ext
173 173 .../.../X2009307/y2009307367.ext
174 174 .../.../X2009307/Y2009307367.ext
175 175 siendo para este caso, la ultima combinacion de letras, identica al file buscado
176 176
177 177 Return:
178 178 Si encuentra la cobinacion adecuada devuelve el path completo y el nombre del file
179 179 caso contrario devuelve None como path y el la ultima combinacion de nombre en mayusculas
180 180 para el filename
181 181 """
182 182 fullfilename = None
183 183 find_flag = False
184 184 filename = None
185 185
186 186 prefixDirList = [None,'d','D']
187 187 if ext.lower() == ".r": #voltage
188 188 prefixFileList = ['d','D']
189 189 elif ext.lower() == ".pdata": #spectra
190 190 prefixFileList = ['p','P']
191 191 else:
192 192 return None, filename
193 193
194 194 #barrido por las combinaciones posibles
195 195 for prefixDir in prefixDirList:
196 196 thispath = path
197 197 if prefixDir != None:
198 198 #formo el nombre del directorio xYYYYDDD (x=d o x=D)
199 199 thispath = os.path.join(path, "%s%04d%03d" % ( prefixDir, year, doy ))
200 200
201 201 for prefixFile in prefixFileList: #barrido por las dos combinaciones posibles de "D"
202 202 filename = "%s%04d%03d%03d%s" % ( prefixFile, year, doy, set, ext ) #formo el nombre del file xYYYYDDDSSS.ext
203 203 fullfilename = os.path.join( thispath, filename ) #formo el path completo
204 204
205 205 if os.path.exists( fullfilename ): #verifico que exista
206 206 find_flag = True
207 207 break
208 208 if find_flag:
209 209 break
210 210
211 211 if not(find_flag):
212 212 return None, filename
213 213
214 214 return fullfilename, filename
215 215
216 216 def isDoyFolder(folder):
217 217 print folder
218 218 try:
219 219 year = int(folder[1:5])
220 220 print year
221 221 except:
222 222 return 0
223 223
224 224 try:
225 225 doy = int(folder[5:8])
226 226 print doy
227 227 except:
228 228 return 0
229 229 return 1
230 230
231 231 class JRODataIO:
232 232
233 233 c = 3E8
234 234
235 235 isConfig = False
236 236
237 237 basicHeaderObj = BasicHeader(LOCALTIME)
238 238
239 239 systemHeaderObj = SystemHeader()
240 240
241 241 radarControllerHeaderObj = RadarControllerHeader()
242 242
243 243 processingHeaderObj = ProcessingHeader()
244 244
245 245 online = 0
246 246
247 247 dtype = None
248 248
249 249 pathList = []
250 250
251 251 filenameList = []
252 252
253 253 filename = None
254 254
255 255 ext = None
256 256
257 257 flagIsNewFile = 1
258 258
259 259 flagTimeBlock = 0
260 260
261 261 flagIsNewBlock = 0
262 262
263 263 fp = None
264 264
265 265 firstHeaderSize = 0
266 266
267 267 basicHeaderSize = 24
268 268
269 269 versionFile = 1103
270 270
271 271 fileSize = None
272 272
273 273 ippSeconds = None
274 274
275 275 fileSizeByHeader = None
276 276
277 277 fileIndex = None
278 278
279 279 profileIndex = None
280 280
281 281 blockIndex = None
282 282
283 283 nTotalBlocks = None
284 284
285 285 maxTimeStep = 30
286 286
287 287 lastUTTime = None
288 288
289 289 datablock = None
290 290
291 291 dataOut = None
292 292
293 293 blocksize = None
294 294
295 295 def __init__(self):
296 296
297 297 raise ValueError, "Not implemented"
298 298
299 299 def run(self):
300 300
301 301 raise ValueError, "Not implemented"
302 302
303 303 def getOutput(self):
304 304
305 305 return self.dataOut
306 306
307 307 class JRODataReader(JRODataIO, ProcessingUnit):
308 308
309 309 nReadBlocks = 0
310 310
311 311 delay = 10 #number of seconds waiting a new file
312 312
313 313 nTries = 3 #quantity tries
314 314
315 315 nFiles = 3 #number of files for searching
316 316
317 317 flagNoMoreFiles = 0
318 318
319 319 def __init__(self):
320 320
321 321 """
322 322
323 323 """
324 324
325 325 raise ValueError, "This method has not been implemented"
326 326
327 327
328 328 def createObjByDefault(self):
329 329 """
330 330
331 331 """
332 332 raise ValueError, "This method has not been implemented"
333 333
334 334 def getBlockDimension(self):
335 335
336 336 raise ValueError, "No implemented"
337 337
338 338 def __searchFilesOffLine(self,
339 339 path,
340 340 startDate,
341 341 endDate,
342 342 startTime=datetime.time(0,0,0),
343 343 endTime=datetime.time(23,59,59),
344 344 set=None,
345 345 expLabel='',
346 346 ext='.r',
347 347 walk=True):
348 348
349 349 pathList = []
350 350
351 351 if not walk:
352 352 pathList.append(path)
353 353
354 354 else:
355 355 dirList = []
356 356 for thisPath in os.listdir(path):
357 357 if not os.path.isdir(os.path.join(path,thisPath)):
358 358 continue
359 359 if not isDoyFolder(thisPath):
360 360 continue
361 361
362 362 dirList.append(thisPath)
363 363
364 364 if not(dirList):
365 365 return None, None
366 366
367 367 thisDate = startDate
368 368
369 369 while(thisDate <= endDate):
370 370 year = thisDate.timetuple().tm_year
371 371 doy = thisDate.timetuple().tm_yday
372 372
373 373 match = fnmatch.filter(dirList, '?' + '%4.4d%3.3d' % (year,doy))
374 374 if len(match) == 0:
375 375 thisDate += datetime.timedelta(1)
376 376 continue
377 377
378 378 pathList.append(os.path.join(path,match[0],expLabel))
379 379 thisDate += datetime.timedelta(1)
380 380
381 381 if pathList == []:
382 382 print "Any folder was found for the date range: %s-%s" %(startDate, endDate)
383 383 return None, None
384 384
385 385 print "%d folder(s) was(were) found for the date range: %s-%s" %(len(pathList), startDate, endDate)
386 386
387 387 filenameList = []
388 388 for thisPath in pathList:
389 389
390 390 fileList = glob.glob1(thisPath, "*%s" %ext)
391 391 fileList.sort()
392 392
393 393 for file in fileList:
394 394
395 395 filename = os.path.join(thisPath,file)
396 396
397 397 if isFileinThisTime(filename, startTime, endTime):
398 398 filenameList.append(filename)
399 399
400 400 if not(filenameList):
401 401 print "Any file was found for the time range %s - %s" %(startTime, endTime)
402 402 return None, None
403 403
404 404 print "%d file(s) was(were) found for the time range: %s - %s" %(len(filenameList), startTime, endTime)
405 405
406 406 self.filenameList = filenameList
407 407
408 408 return pathList, filenameList
409 409
410 410 def __searchFilesOnLine(self, path, expLabel = "", ext = None, walk=True):
411 411
412 412 """
413 413 Busca el ultimo archivo de la ultima carpeta (determinada o no por startDateTime) y
414 414 devuelve el archivo encontrado ademas de otros datos.
415 415
416 416 Input:
417 417 path : carpeta donde estan contenidos los files que contiene data
418 418
419 419 expLabel : Nombre del subexperimento (subfolder)
420 420
421 421 ext : extension de los files
422 422
423 423 walk : Si es habilitado no realiza busquedas dentro de los ubdirectorios (doypath)
424 424
425 425 Return:
426 426 directory : eL directorio donde esta el file encontrado
427 427 filename : el ultimo file de una determinada carpeta
428 428 year : el anho
429 429 doy : el numero de dia del anho
430 430 set : el set del archivo
431 431
432 432
433 433 """
434 434 dirList = []
435 435
436 436 if walk:
437 437
438 438 #Filtra solo los directorios
439 439 for thisPath in os.listdir(path):
440 if os.path.isdir(os.path.join(path, thisPath)):
441 dirList.append(thisPath)
440 if not os.path.isdir(os.path.join(path,thisPath)):
441 continue
442 if not isDoyFolder(thisPath):
443 continue
442 444
443 445 if not(dirList):
444 446 return None, None, None, None, None
445 447
446 448 dirList = sorted( dirList, key=str.lower )
447 449
448 450 doypath = dirList[-1]
449 451 fullpath = os.path.join(path, doypath, expLabel)
450 452
451 453 else:
452 454 fullpath = path
453 455
454 456 print "%s folder was found: " %(fullpath )
455 457
456 458 filename = getlastFileFromPath(fullpath, ext)
457 459
458 460 if not(filename):
459 461 return None, None, None, None, None
460 462
461 463 print "%s file was found" %(filename)
462 464
463 465 if not(self.__verifyFile(os.path.join(fullpath, filename))):
464 466 return None, None, None, None, None
465 467
466 468 year = int( filename[1:5] )
467 469 doy = int( filename[5:8] )
468 470 set = int( filename[8:11] )
469 471
470 472 return fullpath, filename, year, doy, set
471 473
472 474
473 475
474 476 def __setNextFileOffline(self):
475 477
476 478 idFile = self.fileIndex
477 479
478 480 while (True):
479 481 idFile += 1
480 482 if not(idFile < len(self.filenameList)):
481 483 self.flagNoMoreFiles = 1
482 484 print "No more Files"
483 485 return 0
484 486
485 487 filename = self.filenameList[idFile]
486 488
487 489 if not(self.__verifyFile(filename)):
488 490 continue
489 491
490 492 fileSize = os.path.getsize(filename)
491 493 fp = open(filename,'rb')
492 494 break
493 495
494 496 self.flagIsNewFile = 1
495 497 self.fileIndex = idFile
496 498 self.filename = filename
497 499 self.fileSize = fileSize
498 500 self.fp = fp
499 501
500 502 print "Setting the file: %s"%self.filename
501 503
502 504 return 1
503 505
504 506 def __setNextFileOnline(self):
505 507 """
506 508 Busca el siguiente file que tenga suficiente data para ser leida, dentro de un folder especifico, si
507 509 no encuentra un file valido espera un tiempo determinado y luego busca en los posibles n files
508 510 siguientes.
509 511
510 512 Affected:
511 513 self.flagIsNewFile
512 514 self.filename
513 515 self.fileSize
514 516 self.fp
515 517 self.set
516 518 self.flagNoMoreFiles
517 519
518 520 Return:
519 521 0 : si luego de una busqueda del siguiente file valido este no pudo ser encontrado
520 522 1 : si el file fue abierto con exito y esta listo a ser leido
521 523
522 524 Excepciones:
523 525 Si un determinado file no puede ser abierto
524 526 """
525 527 nFiles = 0
526 528 fileOk_flag = False
527 529 firstTime_flag = True
528 530
529 531 self.set += 1
530 532
531 533 #busca el 1er file disponible
532 534 fullfilename, filename = checkForRealPath( self.path, self.year, self.doy, self.set, self.ext )
533 535 if fullfilename:
534 536 if self.__verifyFile(fullfilename, False):
535 537 fileOk_flag = True
536 538
537 539 #si no encuentra un file entonces espera y vuelve a buscar
538 540 if not(fileOk_flag):
539 541 for nFiles in range(self.nFiles+1): #busco en los siguientes self.nFiles+1 files posibles
540 542
541 543 if firstTime_flag: #si es la 1era vez entonces hace el for self.nTries veces
542 544 tries = self.nTries
543 545 else:
544 546 tries = 1 #si no es la 1era vez entonces solo lo hace una vez
545 547
546 548 for nTries in range( tries ):
547 549 if firstTime_flag:
548 550 print "\tWaiting %0.2f sec for the file \"%s\" , try %03d ..." % ( self.delay, filename, nTries+1 )
549 551 time.sleep( self.delay )
550 552 else:
551 553 print "\tSearching next \"%s%04d%03d%03d%s\" file ..." % (self.optchar, self.year, self.doy, self.set, self.ext)
552 554
553 555 fullfilename, filename = checkForRealPath( self.path, self.year, self.doy, self.set, self.ext )
554 556 if fullfilename:
555 557 if self.__verifyFile(fullfilename):
556 558 fileOk_flag = True
557 559 break
558 560
559 561 if fileOk_flag:
560 562 break
561 563
562 564 firstTime_flag = False
563 565
564 566 print "\tSkipping the file \"%s\" due to this file doesn't exist" % filename
565 567 self.set += 1
566 568
567 569 if nFiles == (self.nFiles-1): #si no encuentro el file buscado cambio de carpeta y busco en la siguiente carpeta
568 570 self.set = 0
569 571 self.doy += 1
570 572
571 573 if fileOk_flag:
572 574 self.fileSize = os.path.getsize( fullfilename )
573 575 self.filename = fullfilename
574 576 self.flagIsNewFile = 1
575 577 if self.fp != None: self.fp.close()
576 578 self.fp = open(fullfilename, 'rb')
577 579 self.flagNoMoreFiles = 0
578 580 print 'Setting the file: %s' % fullfilename
579 581 else:
580 582 self.fileSize = 0
581 583 self.filename = None
582 584 self.flagIsNewFile = 0
583 585 self.fp = None
584 586 self.flagNoMoreFiles = 1
585 587 print 'No more Files'
586 588
587 589 return fileOk_flag
588 590
589 591
590 592 def setNextFile(self):
591 593 if self.fp != None:
592 594 self.fp.close()
593 595
594 596 if self.online:
595 597 newFile = self.__setNextFileOnline()
596 598 else:
597 599 newFile = self.__setNextFileOffline()
598 600
599 601 if not(newFile):
600 602 return 0
601 603
602 604 self.__readFirstHeader()
603 605 self.nReadBlocks = 0
604 606 return 1
605 607
606 608 def __waitNewBlock(self):
607 609 """
608 610 Return 1 si se encontro un nuevo bloque de datos, 0 de otra forma.
609 611
610 612 Si el modo de lectura es OffLine siempre retorn 0
611 613 """
612 614 if not self.online:
613 615 return 0
614 616
615 617 if (self.nReadBlocks >= self.processingHeaderObj.dataBlocksPerFile):
616 618 return 0
617 619
618 620 currentPointer = self.fp.tell()
619 621
620 622 neededSize = self.processingHeaderObj.blockSize + self.basicHeaderSize
621 623
622 624 for nTries in range( self.nTries ):
623 625
624 626 self.fp.close()
625 627 self.fp = open( self.filename, 'rb' )
626 628 self.fp.seek( currentPointer )
627 629
628 630 self.fileSize = os.path.getsize( self.filename )
629 631 currentSize = self.fileSize - currentPointer
630 632
631 633 if ( currentSize >= neededSize ):
632 634 self.__rdBasicHeader()
633 635 return 1
634 636
635 637 print "\tWaiting %0.2f seconds for the next block, try %03d ..." % (self.delay, nTries+1)
636 638 time.sleep( self.delay )
637 639
638 640
639 641 return 0
640 642
641 643 def __setNewBlock(self):
642 644
643 645 if self.fp == None:
644 646 return 0
645 647
646 648 if self.flagIsNewFile:
647 649 return 1
648 650
649 651 self.lastUTTime = self.basicHeaderObj.utc
650 652 currentSize = self.fileSize - self.fp.tell()
651 653 neededSize = self.processingHeaderObj.blockSize + self.basicHeaderSize
652 654
653 655 if (currentSize >= neededSize):
654 656 self.__rdBasicHeader()
655 657 return 1
656 658
657 659 if self.__waitNewBlock():
658 660 return 1
659 661
660 662 if not(self.setNextFile()):
661 663 return 0
662 664
663 665 deltaTime = self.basicHeaderObj.utc - self.lastUTTime #
664 666
665 667 self.flagTimeBlock = 0
666 668
667 669 if deltaTime > self.maxTimeStep:
668 670 self.flagTimeBlock = 1
669 671
670 672 return 1
671 673
672 674
673 675 def readNextBlock(self):
674 676 if not(self.__setNewBlock()):
675 677 return 0
676 678
677 679 if not(self.readBlock()):
678 680 return 0
679 681
680 682 return 1
681 683
682 684 def __rdProcessingHeader(self, fp=None):
683 685 if fp == None:
684 686 fp = self.fp
685 687
686 688 self.processingHeaderObj.read(fp)
687 689
688 690 def __rdRadarControllerHeader(self, fp=None):
689 691 if fp == None:
690 692 fp = self.fp
691 693
692 694 self.radarControllerHeaderObj.read(fp)
693 695
694 696 def __rdSystemHeader(self, fp=None):
695 697 if fp == None:
696 698 fp = self.fp
697 699
698 700 self.systemHeaderObj.read(fp)
699 701
700 702 def __rdBasicHeader(self, fp=None):
701 703 if fp == None:
702 704 fp = self.fp
703 705
704 706 self.basicHeaderObj.read(fp)
705 707
706 708
707 709 def __readFirstHeader(self):
708 710 self.__rdBasicHeader()
709 711 self.__rdSystemHeader()
710 712 self.__rdRadarControllerHeader()
711 713 self.__rdProcessingHeader()
712 714
713 715 self.firstHeaderSize = self.basicHeaderObj.size
714 716
715 717 datatype = int(numpy.log2((self.processingHeaderObj.processFlags & PROCFLAG.DATATYPE_MASK))-numpy.log2(PROCFLAG.DATATYPE_CHAR))
716 718 if datatype == 0:
717 719 datatype_str = numpy.dtype([('real','<i1'),('imag','<i1')])
718 720 elif datatype == 1:
719 721 datatype_str = numpy.dtype([('real','<i2'),('imag','<i2')])
720 722 elif datatype == 2:
721 723 datatype_str = numpy.dtype([('real','<i4'),('imag','<i4')])
722 724 elif datatype == 3:
723 725 datatype_str = numpy.dtype([('real','<i8'),('imag','<i8')])
724 726 elif datatype == 4:
725 727 datatype_str = numpy.dtype([('real','<f4'),('imag','<f4')])
726 728 elif datatype == 5:
727 729 datatype_str = numpy.dtype([('real','<f8'),('imag','<f8')])
728 730 else:
729 731 raise ValueError, 'Data type was not defined'
730 732
731 733 self.dtype = datatype_str
732 734 self.ippSeconds = 2 * 1000 * self.radarControllerHeaderObj.ipp / self.c
733 735 self.fileSizeByHeader = self.processingHeaderObj.dataBlocksPerFile * self.processingHeaderObj.blockSize + self.firstHeaderSize + self.basicHeaderSize*(self.processingHeaderObj.dataBlocksPerFile - 1)
734 736 # self.dataOut.channelList = numpy.arange(self.systemHeaderObj.numChannels)
735 737 # self.dataOut.channelIndexList = numpy.arange(self.systemHeaderObj.numChannels)
736 738 self.getBlockDimension()
737 739
738 740
739 741 def __verifyFile(self, filename, msgFlag=True):
740 742 msg = None
741 743 try:
742 744 fp = open(filename, 'rb')
743 745 currentPosition = fp.tell()
744 746 except:
745 747 if msgFlag:
746 748 print "The file %s can't be opened" % (filename)
747 749 return False
748 750
749 751 neededSize = self.processingHeaderObj.blockSize + self.firstHeaderSize
750 752
751 753 if neededSize == 0:
752 754 basicHeaderObj = BasicHeader(LOCALTIME)
753 755 systemHeaderObj = SystemHeader()
754 756 radarControllerHeaderObj = RadarControllerHeader()
755 757 processingHeaderObj = ProcessingHeader()
756 758
757 759 try:
758 760 if not( basicHeaderObj.read(fp) ): raise IOError
759 761 if not( systemHeaderObj.read(fp) ): raise IOError
760 762 if not( radarControllerHeaderObj.read(fp) ): raise IOError
761 763 if not( processingHeaderObj.read(fp) ): raise IOError
762 764 data_type = int(numpy.log2((processingHeaderObj.processFlags & PROCFLAG.DATATYPE_MASK))-numpy.log2(PROCFLAG.DATATYPE_CHAR))
763 765
764 766 neededSize = processingHeaderObj.blockSize + basicHeaderObj.size
765 767
766 768 except:
767 769 if msgFlag:
768 770 print "\tThe file %s is empty or it hasn't enough data" % filename
769 771
770 772 fp.close()
771 773 return False
772 774 else:
773 775 msg = "\tSkipping the file %s due to it hasn't enough data" %filename
774 776
775 777 fp.close()
776 778 fileSize = os.path.getsize(filename)
777 779 currentSize = fileSize - currentPosition
778 780 if currentSize < neededSize:
779 781 if msgFlag and (msg != None):
780 782 print msg #print"\tSkipping the file %s due to it hasn't enough data" %filename
781 783 return False
782 784
783 785 return True
784 786
785 787 def setup(self,
786 788 path=None,
787 789 startDate=None,
788 790 endDate=None,
789 791 startTime=datetime.time(0,0,0),
790 792 endTime=datetime.time(23,59,59),
791 793 set=0,
792 794 expLabel = "",
793 795 ext = None,
794 796 online = False,
795 797 delay = 60,
796 798 walk = True):
797 799
798 800 if path == None:
799 801 raise ValueError, "The path is not valid"
800 802
801 803 if ext == None:
802 804 ext = self.ext
803 805
804 806 if online:
805 807 print "Searching files in online mode..."
806 808
807 809 for nTries in range( self.nTries ):
808 810 fullpath, file, year, doy, set = self.__searchFilesOnLine(path=path, expLabel=expLabel, ext=ext, walk=walk)
809 811
810 812 if fullpath:
811 813 break
812 814
813 815 print '\tWaiting %0.2f sec for an valid file in %s: try %02d ...' % (self.delay, path, nTries+1)
814 816 time.sleep( self.delay )
815 817
816 818 if not(fullpath):
817 819 print "There 'isn't valied files in %s" % path
818 820 return None
819 821
820 822 self.year = year
821 823 self.doy = doy
822 824 self.set = set - 1
823 825 self.path = path
824 826
825 827 else:
826 828 print "Searching files in offline mode ..."
827 829 pathList, filenameList = self.__searchFilesOffLine(path, startDate=startDate, endDate=endDate,
828 830 startTime=startTime, endTime=endTime,
829 831 set=set, expLabel=expLabel, ext=ext,
830 832 walk=walk)
831 833
832 834 if not(pathList):
833 835 print "No *%s files into the folder %s \nfor the range: %s - %s"%(ext, path,
834 836 datetime.datetime.combine(startDate,startTime).ctime(),
835 837 datetime.datetime.combine(endDate,endTime).ctime())
836 838
837 839 sys.exit(-1)
838 840
839 841
840 842 self.fileIndex = -1
841 843 self.pathList = pathList
842 844 self.filenameList = filenameList
843 845
844 846 self.online = online
845 847 self.delay = delay
846 848 ext = ext.lower()
847 849 self.ext = ext
848 850
849 851 if not(self.setNextFile()):
850 852 if (startDate!=None) and (endDate!=None):
851 853 print "No files in range: %s - %s" %(datetime.datetime.combine(startDate,startTime).ctime(), datetime.datetime.combine(endDate,endTime).ctime())
852 854 elif startDate != None:
853 855 print "No files in range: %s" %(datetime.datetime.combine(startDate,startTime).ctime())
854 856 else:
855 857 print "No files"
856 858
857 859 sys.exit(-1)
858 860
859 861 # self.updateDataHeader()
860 862
861 863 return self.dataOut
862 864
863 865 def getData():
864 866
865 867 raise ValueError, "This method has not been implemented"
866 868
867 869 def hasNotDataInBuffer():
868 870
869 871 raise ValueError, "This method has not been implemented"
870 872
871 873 def readBlock():
872 874
873 875 raise ValueError, "This method has not been implemented"
874 876
875 877 def isEndProcess(self):
876 878
877 879 return self.flagNoMoreFiles
878 880
879 881 def printReadBlocks(self):
880 882
881 883 print "Number of read blocks per file %04d" %self.nReadBlocks
882 884
883 885 def printTotalBlocks(self):
884 886
885 887 print "Number of read blocks %04d" %self.nTotalBlocks
886 888
887 889 def printNumberOfBlock(self):
888 890
889 891 if self.flagIsNewBlock:
890 892 print "Block No. %04d, Total blocks %04d" %(self.basicHeaderObj.dataBlock, self.nTotalBlocks)
891 893
892 894 def printInfo(self):
893 895
894 896 print self.basicHeaderObj.printInfo()
895 897 print self.systemHeaderObj.printInfo()
896 898 print self.radarControllerHeaderObj.printInfo()
897 899 print self.processingHeaderObj.printInfo()
898 900
899 901
900 902 def run(self, **kwargs):
901 903
902 904 if not(self.isConfig):
903 905
904 906 # self.dataOut = dataOut
905 907 self.setup(**kwargs)
906 908 self.isConfig = True
907 909
908 910 self.getData()
909 911
910 912 class JRODataWriter(JRODataIO, Operation):
911 913
912 914 """
913 915 Esta clase permite escribir datos a archivos procesados (.r o ,pdata). La escritura
914 916 de los datos siempre se realiza por bloques.
915 917 """
916 918
917 919 blockIndex = 0
918 920
919 921 path = None
920 922
921 923 setFile = None
922 924
923 925 profilesPerBlock = None
924 926
925 927 blocksPerFile = None
926 928
927 929 nWriteBlocks = 0
928 930
929 931 def __init__(self, dataOut=None):
930 932 raise ValueError, "Not implemented"
931 933
932 934
933 935 def hasAllDataInBuffer(self):
934 936 raise ValueError, "Not implemented"
935 937
936 938
937 939 def setBlockDimension(self):
938 940 raise ValueError, "Not implemented"
939 941
940 942
941 943 def writeBlock(self):
942 944 raise ValueError, "No implemented"
943 945
944 946
945 947 def putData(self):
946 948 raise ValueError, "No implemented"
947 949
948 950 def getDataHeader(self):
949 951 """
950 952 Obtiene una copia del First Header
951 953
952 954 Affected:
953 955
954 956 self.basicHeaderObj
955 957 self.systemHeaderObj
956 958 self.radarControllerHeaderObj
957 959 self.processingHeaderObj self.
958 960
959 961 Return:
960 962 None
961 963 """
962 964
963 965 raise ValueError, "No implemented"
964 966
965 967 def getBasicHeader(self):
966 968
967 969 self.basicHeaderObj.size = self.basicHeaderSize #bytes
968 970 self.basicHeaderObj.version = self.versionFile
969 971 self.basicHeaderObj.dataBlock = self.nTotalBlocks
970 972
971 973 utc = numpy.floor(self.dataOut.utctime)
972 974 milisecond = (self.dataOut.utctime - utc)* 1000.0
973 975
974 976 self.basicHeaderObj.utc = utc
975 977 self.basicHeaderObj.miliSecond = milisecond
976 978 self.basicHeaderObj.timeZone = 0
977 979 self.basicHeaderObj.dstFlag = 0
978 980 self.basicHeaderObj.errorCount = 0
979 981
980 982 def __writeFirstHeader(self):
981 983 """
982 984 Escribe el primer header del file es decir el Basic header y el Long header (SystemHeader, RadarControllerHeader, ProcessingHeader)
983 985
984 986 Affected:
985 987 __dataType
986 988
987 989 Return:
988 990 None
989 991 """
990 992
991 993 # CALCULAR PARAMETROS
992 994
993 995 sizeLongHeader = self.systemHeaderObj.size + self.radarControllerHeaderObj.size + self.processingHeaderObj.size
994 996 self.basicHeaderObj.size = self.basicHeaderSize + sizeLongHeader
995 997
996 998 self.basicHeaderObj.write(self.fp)
997 999 self.systemHeaderObj.write(self.fp)
998 1000 self.radarControllerHeaderObj.write(self.fp)
999 1001 self.processingHeaderObj.write(self.fp)
1000 1002
1001 1003 self.dtype = self.dataOut.dtype
1002 1004
1003 1005 def __setNewBlock(self):
1004 1006 """
1005 1007 Si es un nuevo file escribe el First Header caso contrario escribe solo el Basic Header
1006 1008
1007 1009 Return:
1008 1010 0 : si no pudo escribir nada
1009 1011 1 : Si escribio el Basic el First Header
1010 1012 """
1011 1013 if self.fp == None:
1012 1014 self.setNextFile()
1013 1015
1014 1016 if self.flagIsNewFile:
1015 1017 return 1
1016 1018
1017 1019 if self.blockIndex < self.processingHeaderObj.dataBlocksPerFile:
1018 1020 self.basicHeaderObj.write(self.fp)
1019 1021 return 1
1020 1022
1021 1023 if not( self.setNextFile() ):
1022 1024 return 0
1023 1025
1024 1026 return 1
1025 1027
1026 1028
1027 1029 def writeNextBlock(self):
1028 1030 """
1029 1031 Selecciona el bloque siguiente de datos y los escribe en un file
1030 1032
1031 1033 Return:
1032 1034 0 : Si no hizo pudo escribir el bloque de datos
1033 1035 1 : Si no pudo escribir el bloque de datos
1034 1036 """
1035 1037 if not( self.__setNewBlock() ):
1036 1038 return 0
1037 1039
1038 1040 self.writeBlock()
1039 1041
1040 1042 return 1
1041 1043
1042 1044 def setNextFile(self):
1043 1045 """
1044 1046 Determina el siguiente file que sera escrito
1045 1047
1046 1048 Affected:
1047 1049 self.filename
1048 1050 self.subfolder
1049 1051 self.fp
1050 1052 self.setFile
1051 1053 self.flagIsNewFile
1052 1054
1053 1055 Return:
1054 1056 0 : Si el archivo no puede ser escrito
1055 1057 1 : Si el archivo esta listo para ser escrito
1056 1058 """
1057 1059 ext = self.ext
1058 1060 path = self.path
1059 1061
1060 1062 if self.fp != None:
1061 1063 self.fp.close()
1062 1064
1063 1065 timeTuple = time.localtime( self.dataOut.utctime)
1064 1066 subfolder = 'd%4.4d%3.3d' % (timeTuple.tm_year,timeTuple.tm_yday)
1065 1067
1066 1068 fullpath = os.path.join( path, subfolder )
1067 1069 if not( os.path.exists(fullpath) ):
1068 1070 os.mkdir(fullpath)
1069 1071 self.setFile = -1 #inicializo mi contador de seteo
1070 1072 else:
1071 1073 filesList = os.listdir( fullpath )
1072 1074 if len( filesList ) > 0:
1073 1075 filesList = sorted( filesList, key=str.lower )
1074 1076 filen = filesList[-1]
1075 1077 # el filename debera tener el siguiente formato
1076 1078 # 0 1234 567 89A BCDE (hex)
1077 1079 # x YYYY DDD SSS .ext
1078 1080 if isNumber( filen[8:11] ):
1079 1081 self.setFile = int( filen[8:11] ) #inicializo mi contador de seteo al seteo del ultimo file
1080 1082 else:
1081 1083 self.setFile = -1
1082 1084 else:
1083 1085 self.setFile = -1 #inicializo mi contador de seteo
1084 1086
1085 1087 setFile = self.setFile
1086 1088 setFile += 1
1087 1089
1088 1090 file = '%s%4.4d%3.3d%3.3d%s' % (self.optchar,
1089 1091 timeTuple.tm_year,
1090 1092 timeTuple.tm_yday,
1091 1093 setFile,
1092 1094 ext )
1093 1095
1094 1096 filename = os.path.join( path, subfolder, file )
1095 1097
1096 1098 fp = open( filename,'wb' )
1097 1099
1098 1100 self.blockIndex = 0
1099 1101
1100 1102 #guardando atributos
1101 1103 self.filename = filename
1102 1104 self.subfolder = subfolder
1103 1105 self.fp = fp
1104 1106 self.setFile = setFile
1105 1107 self.flagIsNewFile = 1
1106 1108
1107 1109 self.getDataHeader()
1108 1110
1109 1111 print 'Writing the file: %s'%self.filename
1110 1112
1111 1113 self.__writeFirstHeader()
1112 1114
1113 1115 return 1
1114 1116
1115 1117 def setup(self, dataOut, path, blocksPerFile, profilesPerBlock=None, set=0, ext=None):
1116 1118 """
1117 1119 Setea el tipo de formato en la cual sera guardada la data y escribe el First Header
1118 1120
1119 1121 Inputs:
1120 1122 path : el path destino en el cual se escribiran los files a crear
1121 1123 format : formato en el cual sera salvado un file
1122 1124 set : el setebo del file
1123 1125
1124 1126 Return:
1125 1127 0 : Si no realizo un buen seteo
1126 1128 1 : Si realizo un buen seteo
1127 1129 """
1128 1130
1129 1131 if ext == None:
1130 1132 ext = self.ext
1131 1133
1132 1134 ext = ext.lower()
1133 1135
1134 1136 self.ext = ext
1135 1137
1136 1138 self.path = path
1137 1139
1138 1140 self.setFile = set - 1
1139 1141
1140 1142 self.blocksPerFile = blocksPerFile
1141 1143
1142 1144 self.profilesPerBlock = profilesPerBlock
1143 1145
1144 1146 self.dataOut = dataOut
1145 1147
1146 1148 if not(self.setNextFile()):
1147 1149 print "There isn't a next file"
1148 1150 return 0
1149 1151
1150 1152 self.setBlockDimension()
1151 1153
1152 1154 return 1
1153 1155
1154 1156 def run(self, dataOut, **kwargs):
1155 1157
1156 1158 if not(self.isConfig):
1157 1159
1158 1160 self.setup(dataOut, **kwargs)
1159 1161 self.isConfig = True
1160 1162
1161 1163 self.putData()
1162 1164
1163 1165 class VoltageReader(JRODataReader):
1164 1166 """
1165 1167 Esta clase permite leer datos de voltage desde archivos en formato rawdata (.r). La lectura
1166 1168 de los datos siempre se realiza por bloques. Los datos leidos (array de 3 dimensiones:
1167 1169 perfiles*alturas*canales) son almacenados en la variable "buffer".
1168 1170
1169 1171 perfiles * alturas * canales
1170 1172
1171 1173 Esta clase contiene instancias (objetos) de las clases BasicHeader, SystemHeader,
1172 1174 RadarControllerHeader y Voltage. Los tres primeros se usan para almacenar informacion de la
1173 1175 cabecera de datos (metadata), y el cuarto (Voltage) para obtener y almacenar un perfil de
1174 1176 datos desde el "buffer" cada vez que se ejecute el metodo "getData".
1175 1177
1176 1178 Example:
1177 1179
1178 1180 dpath = "/home/myuser/data"
1179 1181
1180 1182 startTime = datetime.datetime(2010,1,20,0,0,0,0,0,0)
1181 1183
1182 1184 endTime = datetime.datetime(2010,1,21,23,59,59,0,0,0)
1183 1185
1184 1186 readerObj = VoltageReader()
1185 1187
1186 1188 readerObj.setup(dpath, startTime, endTime)
1187 1189
1188 1190 while(True):
1189 1191
1190 1192 #to get one profile
1191 1193 profile = readerObj.getData()
1192 1194
1193 1195 #print the profile
1194 1196 print profile
1195 1197
1196 1198 #If you want to see all datablock
1197 1199 print readerObj.datablock
1198 1200
1199 1201 if readerObj.flagNoMoreFiles:
1200 1202 break
1201 1203
1202 1204 """
1203 1205
1204 1206 ext = ".r"
1205 1207
1206 1208 optchar = "D"
1207 1209 dataOut = None
1208 1210
1209 1211
1210 1212 def __init__(self):
1211 1213 """
1212 1214 Inicializador de la clase VoltageReader para la lectura de datos de voltage.
1213 1215
1214 1216 Input:
1215 1217 dataOut : Objeto de la clase Voltage. Este objeto sera utilizado para
1216 1218 almacenar un perfil de datos cada vez que se haga un requerimiento
1217 1219 (getData). El perfil sera obtenido a partir del buffer de datos,
1218 1220 si el buffer esta vacio se hara un nuevo proceso de lectura de un
1219 1221 bloque de datos.
1220 1222 Si este parametro no es pasado se creara uno internamente.
1221 1223
1222 1224 Variables afectadas:
1223 1225 self.dataOut
1224 1226
1225 1227 Return:
1226 1228 None
1227 1229 """
1228 1230
1229 1231 self.isConfig = False
1230 1232
1231 1233 self.datablock = None
1232 1234
1233 1235 self.utc = 0
1234 1236
1235 1237 self.ext = ".r"
1236 1238
1237 1239 self.optchar = "D"
1238 1240
1239 1241 self.basicHeaderObj = BasicHeader(LOCALTIME)
1240 1242
1241 1243 self.systemHeaderObj = SystemHeader()
1242 1244
1243 1245 self.radarControllerHeaderObj = RadarControllerHeader()
1244 1246
1245 1247 self.processingHeaderObj = ProcessingHeader()
1246 1248
1247 1249 self.online = 0
1248 1250
1249 1251 self.fp = None
1250 1252
1251 1253 self.idFile = None
1252 1254
1253 1255 self.dtype = None
1254 1256
1255 1257 self.fileSizeByHeader = None
1256 1258
1257 1259 self.filenameList = []
1258 1260
1259 1261 self.filename = None
1260 1262
1261 1263 self.fileSize = None
1262 1264
1263 1265 self.firstHeaderSize = 0
1264 1266
1265 1267 self.basicHeaderSize = 24
1266 1268
1267 1269 self.pathList = []
1268 1270
1269 1271 self.filenameList = []
1270 1272
1271 1273 self.lastUTTime = 0
1272 1274
1273 1275 self.maxTimeStep = 30
1274 1276
1275 1277 self.flagNoMoreFiles = 0
1276 1278
1277 1279 self.set = 0
1278 1280
1279 1281 self.path = None
1280 1282
1281 1283 self.profileIndex = 9999
1282 1284
1283 1285 self.delay = 3 #seconds
1284 1286
1285 1287 self.nTries = 3 #quantity tries
1286 1288
1287 1289 self.nFiles = 3 #number of files for searching
1288 1290
1289 1291 self.nReadBlocks = 0
1290 1292
1291 1293 self.flagIsNewFile = 1
1292 1294
1293 1295 self.ippSeconds = 0
1294 1296
1295 1297 self.flagTimeBlock = 0
1296 1298
1297 1299 self.flagIsNewBlock = 0
1298 1300
1299 1301 self.nTotalBlocks = 0
1300 1302
1301 1303 self.blocksize = 0
1302 1304
1303 1305 self.dataOut = self.createObjByDefault()
1304 1306
1305 1307 def createObjByDefault(self):
1306 1308
1307 1309 dataObj = Voltage()
1308 1310
1309 1311 return dataObj
1310 1312
1311 1313 def __hasNotDataInBuffer(self):
1312 1314 if self.profileIndex >= self.processingHeaderObj.profilesPerBlock:
1313 1315 return 1
1314 1316 return 0
1315 1317
1316 1318
1317 1319 def getBlockDimension(self):
1318 1320 """
1319 1321 Obtiene la cantidad de puntos a leer por cada bloque de datos
1320 1322
1321 1323 Affected:
1322 1324 self.blocksize
1323 1325
1324 1326 Return:
1325 1327 None
1326 1328 """
1327 1329 pts2read = self.processingHeaderObj.profilesPerBlock * self.processingHeaderObj.nHeights * self.systemHeaderObj.nChannels
1328 1330 self.blocksize = pts2read
1329 1331
1330 1332
1331 1333 def readBlock(self):
1332 1334 """
1333 1335 readBlock lee el bloque de datos desde la posicion actual del puntero del archivo
1334 1336 (self.fp) y actualiza todos los parametros relacionados al bloque de datos
1335 1337 (metadata + data). La data leida es almacenada en el buffer y el contador del buffer
1336 1338 es seteado a 0
1337 1339
1338 1340 Inputs:
1339 1341 None
1340 1342
1341 1343 Return:
1342 1344 None
1343 1345
1344 1346 Affected:
1345 1347 self.profileIndex
1346 1348 self.datablock
1347 1349 self.flagIsNewFile
1348 1350 self.flagIsNewBlock
1349 1351 self.nTotalBlocks
1350 1352
1351 1353 Exceptions:
1352 1354 Si un bloque leido no es un bloque valido
1353 1355 """
1354 1356
1355 1357 junk = numpy.fromfile( self.fp, self.dtype, self.blocksize )
1356 1358
1357 1359 try:
1358 1360 junk = junk.reshape( (self.processingHeaderObj.profilesPerBlock, self.processingHeaderObj.nHeights, self.systemHeaderObj.nChannels) )
1359 1361 except:
1360 1362 print "The read block (%3d) has not enough data" %self.nReadBlocks
1361 1363 return 0
1362 1364
1363 1365 junk = numpy.transpose(junk, (2,0,1))
1364 1366 self.datablock = junk['real'] + junk['imag']*1j
1365 1367
1366 1368 self.profileIndex = 0
1367 1369
1368 1370 self.flagIsNewFile = 0
1369 1371 self.flagIsNewBlock = 1
1370 1372
1371 1373 self.nTotalBlocks += 1
1372 1374 self.nReadBlocks += 1
1373 1375
1374 1376 return 1
1375 1377
1376 1378
1377 1379 def getData(self):
1378 1380 """
1379 1381 getData obtiene una unidad de datos del buffer de lectura y la copia a la clase "Voltage"
1380 1382 con todos los parametros asociados a este (metadata). cuando no hay datos en el buffer de
1381 1383 lectura es necesario hacer una nueva lectura de los bloques de datos usando "readNextBlock"
1382 1384
1383 1385 Ademas incrementa el contador del buffer en 1.
1384 1386
1385 1387 Return:
1386 1388 data : retorna un perfil de voltages (alturas * canales) copiados desde el
1387 1389 buffer. Si no hay mas archivos a leer retorna None.
1388 1390
1389 1391 Variables afectadas:
1390 1392 self.dataOut
1391 1393 self.profileIndex
1392 1394
1393 1395 Affected:
1394 1396 self.dataOut
1395 1397 self.profileIndex
1396 1398 self.flagTimeBlock
1397 1399 self.flagIsNewBlock
1398 1400 """
1399 1401
1400 1402 if self.flagNoMoreFiles:
1401 1403 self.dataOut.flagNoData = True
1402 1404 print 'Process finished'
1403 1405 return 0
1404 1406
1405 1407 self.flagTimeBlock = 0
1406 1408 self.flagIsNewBlock = 0
1407 1409
1408 1410 if self.__hasNotDataInBuffer():
1409 1411
1410 1412 if not( self.readNextBlock() ):
1411 1413 return 0
1412 1414
1413 1415 self.dataOut.dtype = self.dtype
1414 1416
1415 1417 self.dataOut.nProfiles = self.processingHeaderObj.profilesPerBlock
1416 1418
1417 1419 xf = self.processingHeaderObj.firstHeight + self.processingHeaderObj.nHeights*self.processingHeaderObj.deltaHeight
1418 1420
1419 1421 self.dataOut.heightList = numpy.arange(self.processingHeaderObj.firstHeight, xf, self.processingHeaderObj.deltaHeight)
1420 1422
1421 1423 self.dataOut.channelList = range(self.systemHeaderObj.nChannels)
1422 1424
1423 1425 self.dataOut.flagTimeBlock = self.flagTimeBlock
1424 1426
1425 1427 self.dataOut.ippSeconds = self.ippSeconds
1426 1428
1427 1429 self.dataOut.timeInterval = self.ippSeconds * self.processingHeaderObj.nCohInt
1428 1430
1429 1431 self.dataOut.nCohInt = self.processingHeaderObj.nCohInt
1430 1432
1431 1433 self.dataOut.flagShiftFFT = False
1432 1434
1433 1435 if self.radarControllerHeaderObj.code != None:
1434 1436
1435 1437 self.dataOut.nCode = self.radarControllerHeaderObj.nCode
1436 1438
1437 1439 self.dataOut.nBaud = self.radarControllerHeaderObj.nBaud
1438 1440
1439 1441 self.dataOut.code = self.radarControllerHeaderObj.code
1440 1442
1441 1443 self.dataOut.systemHeaderObj = self.systemHeaderObj.copy()
1442 1444
1443 1445 self.dataOut.radarControllerHeaderObj = self.radarControllerHeaderObj.copy()
1444 1446
1445 1447 self.dataOut.flagDecodeData = False #asumo q la data no esta decodificada
1446 1448
1447 1449 self.dataOut.flagDeflipData = False #asumo q la data no esta sin flip
1448 1450
1449 1451 self.dataOut.flagShiftFFT = False
1450 1452
1451 1453
1452 1454 # self.updateDataHeader()
1453 1455
1454 1456 #data es un numpy array de 3 dmensiones (perfiles, alturas y canales)
1455 1457
1456 1458 if self.datablock == None:
1457 1459 self.dataOut.flagNoData = True
1458 1460 return 0
1459 1461
1460 1462 self.dataOut.data = self.datablock[:,self.profileIndex,:]
1461 1463
1462 1464 self.dataOut.utctime = self.basicHeaderObj.utc + self.basicHeaderObj.miliSecond/1000. + self.profileIndex * self.ippSeconds
1463 1465
1464 1466 self.profileIndex += 1
1465 1467
1466 1468 self.dataOut.flagNoData = False
1467 1469
1468 1470 # print self.profileIndex, self.dataOut.utctime
1469 1471 # if self.profileIndex == 800:
1470 1472 # a=1
1471 1473
1472 1474
1473 1475 return self.dataOut.data
1474 1476
1475 1477
1476 1478 class VoltageWriter(JRODataWriter):
1477 1479 """
1478 1480 Esta clase permite escribir datos de voltajes a archivos procesados (.r). La escritura
1479 1481 de los datos siempre se realiza por bloques.
1480 1482 """
1481 1483
1482 1484 ext = ".r"
1483 1485
1484 1486 optchar = "D"
1485 1487
1486 1488 shapeBuffer = None
1487 1489
1488 1490
1489 1491 def __init__(self):
1490 1492 """
1491 1493 Inicializador de la clase VoltageWriter para la escritura de datos de espectros.
1492 1494
1493 1495 Affected:
1494 1496 self.dataOut
1495 1497
1496 1498 Return: None
1497 1499 """
1498 1500
1499 1501 self.nTotalBlocks = 0
1500 1502
1501 1503 self.profileIndex = 0
1502 1504
1503 1505 self.isConfig = False
1504 1506
1505 1507 self.fp = None
1506 1508
1507 1509 self.flagIsNewFile = 1
1508 1510
1509 1511 self.nTotalBlocks = 0
1510 1512
1511 1513 self.flagIsNewBlock = 0
1512 1514
1513 1515 self.setFile = None
1514 1516
1515 1517 self.dtype = None
1516 1518
1517 1519 self.path = None
1518 1520
1519 1521 self.filename = None
1520 1522
1521 1523 self.basicHeaderObj = BasicHeader(LOCALTIME)
1522 1524
1523 1525 self.systemHeaderObj = SystemHeader()
1524 1526
1525 1527 self.radarControllerHeaderObj = RadarControllerHeader()
1526 1528
1527 1529 self.processingHeaderObj = ProcessingHeader()
1528 1530
1529 1531 def hasAllDataInBuffer(self):
1530 1532 if self.profileIndex >= self.processingHeaderObj.profilesPerBlock:
1531 1533 return 1
1532 1534 return 0
1533 1535
1534 1536
1535 1537 def setBlockDimension(self):
1536 1538 """
1537 1539 Obtiene las formas dimensionales del los subbloques de datos que componen un bloque
1538 1540
1539 1541 Affected:
1540 1542 self.shape_spc_Buffer
1541 1543 self.shape_cspc_Buffer
1542 1544 self.shape_dc_Buffer
1543 1545
1544 1546 Return: None
1545 1547 """
1546 1548 self.shapeBuffer = (self.processingHeaderObj.profilesPerBlock,
1547 1549 self.processingHeaderObj.nHeights,
1548 1550 self.systemHeaderObj.nChannels)
1549 1551
1550 1552 self.datablock = numpy.zeros((self.systemHeaderObj.nChannels,
1551 1553 self.processingHeaderObj.profilesPerBlock,
1552 1554 self.processingHeaderObj.nHeights),
1553 1555 dtype=numpy.dtype('complex64'))
1554 1556
1555 1557
1556 1558 def writeBlock(self):
1557 1559 """
1558 1560 Escribe el buffer en el file designado
1559 1561
1560 1562 Affected:
1561 1563 self.profileIndex
1562 1564 self.flagIsNewFile
1563 1565 self.flagIsNewBlock
1564 1566 self.nTotalBlocks
1565 1567 self.blockIndex
1566 1568
1567 1569 Return: None
1568 1570 """
1569 1571 data = numpy.zeros( self.shapeBuffer, self.dtype )
1570 1572
1571 1573 junk = numpy.transpose(self.datablock, (1,2,0))
1572 1574
1573 1575 data['real'] = junk.real
1574 1576 data['imag'] = junk.imag
1575 1577
1576 1578 data = data.reshape( (-1) )
1577 1579
1578 1580 data.tofile( self.fp )
1579 1581
1580 1582 self.datablock.fill(0)
1581 1583
1582 1584 self.profileIndex = 0
1583 1585 self.flagIsNewFile = 0
1584 1586 self.flagIsNewBlock = 1
1585 1587
1586 1588 self.blockIndex += 1
1587 1589 self.nTotalBlocks += 1
1588 1590
1589 1591 def putData(self):
1590 1592 """
1591 1593 Setea un bloque de datos y luego los escribe en un file
1592 1594
1593 1595 Affected:
1594 1596 self.flagIsNewBlock
1595 1597 self.profileIndex
1596 1598
1597 1599 Return:
1598 1600 0 : Si no hay data o no hay mas files que puedan escribirse
1599 1601 1 : Si se escribio la data de un bloque en un file
1600 1602 """
1601 1603 if self.dataOut.flagNoData:
1602 1604 return 0
1603 1605
1604 1606 self.flagIsNewBlock = 0
1605 1607
1606 1608 if self.dataOut.flagTimeBlock:
1607 1609
1608 1610 self.datablock.fill(0)
1609 1611 self.profileIndex = 0
1610 1612 self.setNextFile()
1611 1613
1612 1614 if self.profileIndex == 0:
1613 1615 self.getBasicHeader()
1614 1616
1615 1617 self.datablock[:,self.profileIndex,:] = self.dataOut.data
1616 1618
1617 1619 self.profileIndex += 1
1618 1620
1619 1621 if self.hasAllDataInBuffer():
1620 1622 #if self.flagIsNewFile:
1621 1623 self.writeNextBlock()
1622 1624 # self.getDataHeader()
1623 1625
1624 1626 return 1
1625 1627
1626 1628 def __getProcessFlags(self):
1627 1629
1628 1630 processFlags = 0
1629 1631
1630 1632 dtype0 = numpy.dtype([('real','<i1'),('imag','<i1')])
1631 1633 dtype1 = numpy.dtype([('real','<i2'),('imag','<i2')])
1632 1634 dtype2 = numpy.dtype([('real','<i4'),('imag','<i4')])
1633 1635 dtype3 = numpy.dtype([('real','<i8'),('imag','<i8')])
1634 1636 dtype4 = numpy.dtype([('real','<f4'),('imag','<f4')])
1635 1637 dtype5 = numpy.dtype([('real','<f8'),('imag','<f8')])
1636 1638
1637 1639 dtypeList = [dtype0, dtype1, dtype2, dtype3, dtype4, dtype5]
1638 1640
1639 1641
1640 1642
1641 1643 datatypeValueList = [PROCFLAG.DATATYPE_CHAR,
1642 1644 PROCFLAG.DATATYPE_SHORT,
1643 1645 PROCFLAG.DATATYPE_LONG,
1644 1646 PROCFLAG.DATATYPE_INT64,
1645 1647 PROCFLAG.DATATYPE_FLOAT,
1646 1648 PROCFLAG.DATATYPE_DOUBLE]
1647 1649
1648 1650
1649 1651 for index in range(len(dtypeList)):
1650 1652 if self.dataOut.dtype == dtypeList[index]:
1651 1653 dtypeValue = datatypeValueList[index]
1652 1654 break
1653 1655
1654 1656 processFlags += dtypeValue
1655 1657
1656 1658 if self.dataOut.flagDecodeData:
1657 1659 processFlags += PROCFLAG.DECODE_DATA
1658 1660
1659 1661 if self.dataOut.flagDeflipData:
1660 1662 processFlags += PROCFLAG.DEFLIP_DATA
1661 1663
1662 1664 if self.dataOut.code != None:
1663 1665 processFlags += PROCFLAG.DEFINE_PROCESS_CODE
1664 1666
1665 1667 if self.dataOut.nCohInt > 1:
1666 1668 processFlags += PROCFLAG.COHERENT_INTEGRATION
1667 1669
1668 1670 return processFlags
1669 1671
1670 1672
1671 1673 def __getBlockSize(self):
1672 1674 '''
1673 1675 Este metodos determina el cantidad de bytes para un bloque de datos de tipo Voltage
1674 1676 '''
1675 1677
1676 1678 dtype0 = numpy.dtype([('real','<i1'),('imag','<i1')])
1677 1679 dtype1 = numpy.dtype([('real','<i2'),('imag','<i2')])
1678 1680 dtype2 = numpy.dtype([('real','<i4'),('imag','<i4')])
1679 1681 dtype3 = numpy.dtype([('real','<i8'),('imag','<i8')])
1680 1682 dtype4 = numpy.dtype([('real','<f4'),('imag','<f4')])
1681 1683 dtype5 = numpy.dtype([('real','<f8'),('imag','<f8')])
1682 1684
1683 1685 dtypeList = [dtype0, dtype1, dtype2, dtype3, dtype4, dtype5]
1684 1686 datatypeValueList = [1,2,4,8,4,8]
1685 1687 for index in range(len(dtypeList)):
1686 1688 if self.dataOut.dtype == dtypeList[index]:
1687 1689 datatypeValue = datatypeValueList[index]
1688 1690 break
1689 1691
1690 1692 blocksize = int(self.dataOut.nHeights * self.dataOut.nChannels * self.dataOut.nProfiles * datatypeValue * 2)
1691 1693
1692 1694 return blocksize
1693 1695
1694 1696 def getDataHeader(self):
1695 1697
1696 1698 """
1697 1699 Obtiene una copia del First Header
1698 1700
1699 1701 Affected:
1700 1702 self.systemHeaderObj
1701 1703 self.radarControllerHeaderObj
1702 1704 self.dtype
1703 1705
1704 1706 Return:
1705 1707 None
1706 1708 """
1707 1709
1708 1710 self.systemHeaderObj = self.dataOut.systemHeaderObj.copy()
1709 1711 self.systemHeaderObj.nChannels = self.dataOut.nChannels
1710 1712 self.radarControllerHeaderObj = self.dataOut.radarControllerHeaderObj.copy()
1711 1713
1712 1714 self.getBasicHeader()
1713 1715
1714 1716 processingHeaderSize = 40 # bytes
1715 1717 self.processingHeaderObj.dtype = 0 # Voltage
1716 1718 self.processingHeaderObj.blockSize = self.__getBlockSize()
1717 1719 self.processingHeaderObj.profilesPerBlock = self.profilesPerBlock
1718 1720 self.processingHeaderObj.dataBlocksPerFile = self.blocksPerFile
1719 1721 self.processingHeaderObj.nWindows = 1 #podria ser 1 o self.dataOut.processingHeaderObj.nWindows
1720 1722 self.processingHeaderObj.processFlags = self.__getProcessFlags()
1721 1723 self.processingHeaderObj.nCohInt = self.dataOut.nCohInt
1722 1724 self.processingHeaderObj.nIncohInt = 1 # Cuando la data de origen es de tipo Voltage
1723 1725 self.processingHeaderObj.totalSpectra = 0 # Cuando la data de origen es de tipo Voltage
1724 1726
1725 1727 if self.dataOut.code != None:
1726 1728 self.processingHeaderObj.code = self.dataOut.code
1727 1729 self.processingHeaderObj.nCode = self.dataOut.nCode
1728 1730 self.processingHeaderObj.nBaud = self.dataOut.nBaud
1729 1731 codesize = int(8 + 4 * self.dataOut.nCode * self.dataOut.nBaud)
1730 1732 processingHeaderSize += codesize
1731 1733
1732 1734 if self.processingHeaderObj.nWindows != 0:
1733 1735 self.processingHeaderObj.firstHeight = self.dataOut.heightList[0]
1734 1736 self.processingHeaderObj.deltaHeight = self.dataOut.heightList[1] - self.dataOut.heightList[0]
1735 1737 self.processingHeaderObj.nHeights = self.dataOut.nHeights
1736 1738 self.processingHeaderObj.samplesWin = self.dataOut.nHeights
1737 1739 processingHeaderSize += 12
1738 1740
1739 1741 self.processingHeaderObj.size = processingHeaderSize
1740 1742
1741 1743 class SpectraReader(JRODataReader):
1742 1744 """
1743 1745 Esta clase permite leer datos de espectros desde archivos procesados (.pdata). La lectura
1744 1746 de los datos siempre se realiza por bloques. Los datos leidos (array de 3 dimensiones)
1745 1747 son almacenados en tres buffer's para el Self Spectra, el Cross Spectra y el DC Channel.
1746 1748
1747 1749 paresCanalesIguales * alturas * perfiles (Self Spectra)
1748 1750 paresCanalesDiferentes * alturas * perfiles (Cross Spectra)
1749 1751 canales * alturas (DC Channels)
1750 1752
1751 1753 Esta clase contiene instancias (objetos) de las clases BasicHeader, SystemHeader,
1752 1754 RadarControllerHeader y Spectra. Los tres primeros se usan para almacenar informacion de la
1753 1755 cabecera de datos (metadata), y el cuarto (Spectra) para obtener y almacenar un bloque de
1754 1756 datos desde el "buffer" cada vez que se ejecute el metodo "getData".
1755 1757
1756 1758 Example:
1757 1759 dpath = "/home/myuser/data"
1758 1760
1759 1761 startTime = datetime.datetime(2010,1,20,0,0,0,0,0,0)
1760 1762
1761 1763 endTime = datetime.datetime(2010,1,21,23,59,59,0,0,0)
1762 1764
1763 1765 readerObj = SpectraReader()
1764 1766
1765 1767 readerObj.setup(dpath, startTime, endTime)
1766 1768
1767 1769 while(True):
1768 1770
1769 1771 readerObj.getData()
1770 1772
1771 1773 print readerObj.data_spc
1772 1774
1773 1775 print readerObj.data_cspc
1774 1776
1775 1777 print readerObj.data_dc
1776 1778
1777 1779 if readerObj.flagNoMoreFiles:
1778 1780 break
1779 1781
1780 1782 """
1781 1783
1782 1784 pts2read_SelfSpectra = 0
1783 1785
1784 1786 pts2read_CrossSpectra = 0
1785 1787
1786 1788 pts2read_DCchannels = 0
1787 1789
1788 1790 ext = ".pdata"
1789 1791
1790 1792 optchar = "P"
1791 1793
1792 1794 dataOut = None
1793 1795
1794 1796 nRdChannels = None
1795 1797
1796 1798 nRdPairs = None
1797 1799
1798 1800 rdPairList = []
1799 1801
1800 1802
1801 1803 def __init__(self):
1802 1804 """
1803 1805 Inicializador de la clase SpectraReader para la lectura de datos de espectros.
1804 1806
1805 1807 Inputs:
1806 1808 dataOut : Objeto de la clase Spectra. Este objeto sera utilizado para
1807 1809 almacenar un perfil de datos cada vez que se haga un requerimiento
1808 1810 (getData). El perfil sera obtenido a partir del buffer de datos,
1809 1811 si el buffer esta vacio se hara un nuevo proceso de lectura de un
1810 1812 bloque de datos.
1811 1813 Si este parametro no es pasado se creara uno internamente.
1812 1814
1813 1815 Affected:
1814 1816 self.dataOut
1815 1817
1816 1818 Return : None
1817 1819 """
1818 1820
1819 1821 self.isConfig = False
1820 1822
1821 1823 self.pts2read_SelfSpectra = 0
1822 1824
1823 1825 self.pts2read_CrossSpectra = 0
1824 1826
1825 1827 self.pts2read_DCchannels = 0
1826 1828
1827 1829 self.datablock = None
1828 1830
1829 1831 self.utc = None
1830 1832
1831 1833 self.ext = ".pdata"
1832 1834
1833 1835 self.optchar = "P"
1834 1836
1835 1837 self.basicHeaderObj = BasicHeader(LOCALTIME)
1836 1838
1837 1839 self.systemHeaderObj = SystemHeader()
1838 1840
1839 1841 self.radarControllerHeaderObj = RadarControllerHeader()
1840 1842
1841 1843 self.processingHeaderObj = ProcessingHeader()
1842 1844
1843 1845 self.online = 0
1844 1846
1845 1847 self.fp = None
1846 1848
1847 1849 self.idFile = None
1848 1850
1849 1851 self.dtype = None
1850 1852
1851 1853 self.fileSizeByHeader = None
1852 1854
1853 1855 self.filenameList = []
1854 1856
1855 1857 self.filename = None
1856 1858
1857 1859 self.fileSize = None
1858 1860
1859 1861 self.firstHeaderSize = 0
1860 1862
1861 1863 self.basicHeaderSize = 24
1862 1864
1863 1865 self.pathList = []
1864 1866
1865 1867 self.lastUTTime = 0
1866 1868
1867 1869 self.maxTimeStep = 30
1868 1870
1869 1871 self.flagNoMoreFiles = 0
1870 1872
1871 1873 self.set = 0
1872 1874
1873 1875 self.path = None
1874 1876
1875 1877 self.delay = 60 #seconds
1876 1878
1877 1879 self.nTries = 3 #quantity tries
1878 1880
1879 1881 self.nFiles = 3 #number of files for searching
1880 1882
1881 1883 self.nReadBlocks = 0
1882 1884
1883 1885 self.flagIsNewFile = 1
1884 1886
1885 1887 self.ippSeconds = 0
1886 1888
1887 1889 self.flagTimeBlock = 0
1888 1890
1889 1891 self.flagIsNewBlock = 0
1890 1892
1891 1893 self.nTotalBlocks = 0
1892 1894
1893 1895 self.blocksize = 0
1894 1896
1895 1897 self.dataOut = self.createObjByDefault()
1896 1898
1897 1899
1898 1900 def createObjByDefault(self):
1899 1901
1900 1902 dataObj = Spectra()
1901 1903
1902 1904 return dataObj
1903 1905
1904 1906 def __hasNotDataInBuffer(self):
1905 1907 return 1
1906 1908
1907 1909
1908 1910 def getBlockDimension(self):
1909 1911 """
1910 1912 Obtiene la cantidad de puntos a leer por cada bloque de datos
1911 1913
1912 1914 Affected:
1913 1915 self.nRdChannels
1914 1916 self.nRdPairs
1915 1917 self.pts2read_SelfSpectra
1916 1918 self.pts2read_CrossSpectra
1917 1919 self.pts2read_DCchannels
1918 1920 self.blocksize
1919 1921 self.dataOut.nChannels
1920 1922 self.dataOut.nPairs
1921 1923
1922 1924 Return:
1923 1925 None
1924 1926 """
1925 1927 self.nRdChannels = 0
1926 1928 self.nRdPairs = 0
1927 1929 self.rdPairList = []
1928 1930
1929 1931 for i in range(0, self.processingHeaderObj.totalSpectra*2, 2):
1930 1932 if self.processingHeaderObj.spectraComb[i] == self.processingHeaderObj.spectraComb[i+1]:
1931 1933 self.nRdChannels = self.nRdChannels + 1 #par de canales iguales
1932 1934 else:
1933 1935 self.nRdPairs = self.nRdPairs + 1 #par de canales diferentes
1934 1936 self.rdPairList.append((self.processingHeaderObj.spectraComb[i], self.processingHeaderObj.spectraComb[i+1]))
1935 1937
1936 1938 pts2read = self.processingHeaderObj.nHeights * self.processingHeaderObj.profilesPerBlock
1937 1939
1938 1940 self.pts2read_SelfSpectra = int(self.nRdChannels * pts2read)
1939 1941 self.blocksize = self.pts2read_SelfSpectra
1940 1942
1941 1943 if self.processingHeaderObj.flag_cspc:
1942 1944 self.pts2read_CrossSpectra = int(self.nRdPairs * pts2read)
1943 1945 self.blocksize += self.pts2read_CrossSpectra
1944 1946
1945 1947 if self.processingHeaderObj.flag_dc:
1946 1948 self.pts2read_DCchannels = int(self.systemHeaderObj.nChannels * self.processingHeaderObj.nHeights)
1947 1949 self.blocksize += self.pts2read_DCchannels
1948 1950
1949 1951 # self.blocksize = self.pts2read_SelfSpectra + self.pts2read_CrossSpectra + self.pts2read_DCchannels
1950 1952
1951 1953
1952 1954 def readBlock(self):
1953 1955 """
1954 1956 Lee el bloque de datos desde la posicion actual del puntero del archivo
1955 1957 (self.fp) y actualiza todos los parametros relacionados al bloque de datos
1956 1958 (metadata + data). La data leida es almacenada en el buffer y el contador del buffer
1957 1959 es seteado a 0
1958 1960
1959 1961 Return: None
1960 1962
1961 1963 Variables afectadas:
1962 1964
1963 1965 self.flagIsNewFile
1964 1966 self.flagIsNewBlock
1965 1967 self.nTotalBlocks
1966 1968 self.data_spc
1967 1969 self.data_cspc
1968 1970 self.data_dc
1969 1971
1970 1972 Exceptions:
1971 1973 Si un bloque leido no es un bloque valido
1972 1974 """
1973 1975 blockOk_flag = False
1974 1976 fpointer = self.fp.tell()
1975 1977
1976 1978 spc = numpy.fromfile( self.fp, self.dtype[0], self.pts2read_SelfSpectra )
1977 1979 spc = spc.reshape( (self.nRdChannels, self.processingHeaderObj.nHeights, self.processingHeaderObj.profilesPerBlock) ) #transforma a un arreglo 3D
1978 1980
1979 1981 if self.processingHeaderObj.flag_cspc:
1980 1982 cspc = numpy.fromfile( self.fp, self.dtype, self.pts2read_CrossSpectra )
1981 1983 cspc = cspc.reshape( (self.nRdPairs, self.processingHeaderObj.nHeights, self.processingHeaderObj.profilesPerBlock) ) #transforma a un arreglo 3D
1982 1984
1983 1985 if self.processingHeaderObj.flag_dc:
1984 1986 dc = numpy.fromfile( self.fp, self.dtype, self.pts2read_DCchannels ) #int(self.processingHeaderObj.nHeights*self.systemHeaderObj.nChannels) )
1985 1987 dc = dc.reshape( (self.systemHeaderObj.nChannels, self.processingHeaderObj.nHeights) ) #transforma a un arreglo 2D
1986 1988
1987 1989
1988 1990 if not(self.processingHeaderObj.shif_fft):
1989 1991 #desplaza a la derecha en el eje 2 determinadas posiciones
1990 1992 shift = int(self.processingHeaderObj.profilesPerBlock/2)
1991 1993 spc = numpy.roll( spc, shift , axis=2 )
1992 1994
1993 1995 if self.processingHeaderObj.flag_cspc:
1994 1996 #desplaza a la derecha en el eje 2 determinadas posiciones
1995 1997 cspc = numpy.roll( cspc, shift, axis=2 )
1996 1998
1997 1999 # self.processingHeaderObj.shif_fft = True
1998 2000
1999 2001 spc = numpy.transpose( spc, (0,2,1) )
2000 2002 self.data_spc = spc
2001 2003
2002 2004 if self.processingHeaderObj.flag_cspc:
2003 2005 cspc = numpy.transpose( cspc, (0,2,1) )
2004 2006 self.data_cspc = cspc['real'] + cspc['imag']*1j
2005 2007 else:
2006 2008 self.data_cspc = None
2007 2009
2008 2010 if self.processingHeaderObj.flag_dc:
2009 2011 self.data_dc = dc['real'] + dc['imag']*1j
2010 2012 else:
2011 2013 self.data_dc = None
2012 2014
2013 2015 self.flagIsNewFile = 0
2014 2016 self.flagIsNewBlock = 1
2015 2017
2016 2018 self.nTotalBlocks += 1
2017 2019 self.nReadBlocks += 1
2018 2020
2019 2021 return 1
2020 2022
2021 2023
2022 2024 def getData(self):
2023 2025 """
2024 2026 Copia el buffer de lectura a la clase "Spectra",
2025 2027 con todos los parametros asociados a este (metadata). cuando no hay datos en el buffer de
2026 2028 lectura es necesario hacer una nueva lectura de los bloques de datos usando "readNextBlock"
2027 2029
2028 2030 Return:
2029 2031 0 : Si no hay mas archivos disponibles
2030 2032 1 : Si hizo una buena copia del buffer
2031 2033
2032 2034 Affected:
2033 2035 self.dataOut
2034 2036
2035 2037 self.flagTimeBlock
2036 2038 self.flagIsNewBlock
2037 2039 """
2038 2040
2039 2041 if self.flagNoMoreFiles:
2040 2042 self.dataOut.flagNoData = True
2041 2043 print 'Process finished'
2042 2044 return 0
2043 2045
2044 2046 self.flagTimeBlock = 0
2045 2047 self.flagIsNewBlock = 0
2046 2048
2047 2049 if self.__hasNotDataInBuffer():
2048 2050
2049 2051 if not( self.readNextBlock() ):
2050 2052 self.dataOut.flagNoData = True
2051 2053 return 0
2052 2054
2053 2055 # self.updateDataHeader()
2054 2056
2055 2057 #data es un numpy array de 3 dmensiones (perfiles, alturas y canales)
2056 2058
2057 2059 if self.data_dc == None:
2058 2060 self.dataOut.flagNoData = True
2059 2061 return 0
2060 2062
2061 2063 self.dataOut.data_spc = self.data_spc
2062 2064
2063 2065 self.dataOut.data_cspc = self.data_cspc
2064 2066
2065 2067 self.dataOut.data_dc = self.data_dc
2066 2068
2067 2069 self.dataOut.flagTimeBlock = self.flagTimeBlock
2068 2070
2069 2071 self.dataOut.flagNoData = False
2070 2072
2071 2073 self.dataOut.dtype = self.dtype
2072 2074
2073 2075 # self.dataOut.nChannels = self.nRdChannels
2074 2076
2075 2077 self.dataOut.nPairs = self.nRdPairs
2076 2078
2077 2079 self.dataOut.pairsList = self.rdPairList
2078 2080
2079 2081 # self.dataOut.nHeights = self.processingHeaderObj.nHeights
2080 2082
2081 2083 self.dataOut.nProfiles = self.processingHeaderObj.profilesPerBlock
2082 2084
2083 2085 self.dataOut.nFFTPoints = self.processingHeaderObj.profilesPerBlock
2084 2086
2085 2087 self.dataOut.nCohInt = self.processingHeaderObj.nCohInt
2086 2088
2087 2089 self.dataOut.nIncohInt = self.processingHeaderObj.nIncohInt
2088 2090
2089 2091 xf = self.processingHeaderObj.firstHeight + self.processingHeaderObj.nHeights*self.processingHeaderObj.deltaHeight
2090 2092
2091 2093 self.dataOut.heightList = numpy.arange(self.processingHeaderObj.firstHeight, xf, self.processingHeaderObj.deltaHeight)
2092 2094
2093 2095 self.dataOut.channelList = range(self.systemHeaderObj.nChannels)
2094 2096
2095 2097 # self.dataOut.channelIndexList = range(self.systemHeaderObj.nChannels)
2096 2098
2097 2099 self.dataOut.utctime = self.basicHeaderObj.utc + self.basicHeaderObj.miliSecond/1000.#+ self.profileIndex * self.ippSeconds
2098 2100
2099 2101 self.dataOut.ippSeconds = self.ippSeconds
2100 2102
2101 2103 self.dataOut.timeInterval = self.ippSeconds * self.processingHeaderObj.nCohInt * self.processingHeaderObj.nIncohInt * self.dataOut.nFFTPoints
2102 2104
2103 2105 # self.profileIndex += 1
2104 2106
2105 2107 self.dataOut.systemHeaderObj = self.systemHeaderObj.copy()
2106 2108
2107 2109 self.dataOut.radarControllerHeaderObj = self.radarControllerHeaderObj.copy()
2108 2110
2109 2111 self.dataOut.flagShiftFFT = self.processingHeaderObj.shif_fft
2110 2112
2111 2113 self.dataOut.flagDecodeData = False #asumo q la data no esta decodificada
2112 2114
2113 2115 self.dataOut.flagDeflipData = True #asumo q la data no esta sin flip
2114 2116
2115 2117 if self.processingHeaderObj.code != None:
2116 2118
2117 2119 self.dataOut.nCode = self.processingHeaderObj.nCode
2118 2120
2119 2121 self.dataOut.nBaud = self.processingHeaderObj.nBaud
2120 2122
2121 2123 self.dataOut.code = self.processingHeaderObj.code
2122 2124
2123 2125 self.dataOut.flagDecodeData = True
2124 2126
2125 2127 return self.dataOut.data_spc
2126 2128
2127 2129
2128 2130 class SpectraWriter(JRODataWriter):
2129 2131
2130 2132 """
2131 2133 Esta clase permite escribir datos de espectros a archivos procesados (.pdata). La escritura
2132 2134 de los datos siempre se realiza por bloques.
2133 2135 """
2134 2136
2135 2137 ext = ".pdata"
2136 2138
2137 2139 optchar = "P"
2138 2140
2139 2141 shape_spc_Buffer = None
2140 2142
2141 2143 shape_cspc_Buffer = None
2142 2144
2143 2145 shape_dc_Buffer = None
2144 2146
2145 2147 data_spc = None
2146 2148
2147 2149 data_cspc = None
2148 2150
2149 2151 data_dc = None
2150 2152
2151 2153 # dataOut = None
2152 2154
2153 2155 def __init__(self):
2154 2156 """
2155 2157 Inicializador de la clase SpectraWriter para la escritura de datos de espectros.
2156 2158
2157 2159 Affected:
2158 2160 self.dataOut
2159 2161 self.basicHeaderObj
2160 2162 self.systemHeaderObj
2161 2163 self.radarControllerHeaderObj
2162 2164 self.processingHeaderObj
2163 2165
2164 2166 Return: None
2165 2167 """
2166 2168
2167 2169 self.isConfig = False
2168 2170
2169 2171 self.nTotalBlocks = 0
2170 2172
2171 2173 self.data_spc = None
2172 2174
2173 2175 self.data_cspc = None
2174 2176
2175 2177 self.data_dc = None
2176 2178
2177 2179 self.fp = None
2178 2180
2179 2181 self.flagIsNewFile = 1
2180 2182
2181 2183 self.nTotalBlocks = 0
2182 2184
2183 2185 self.flagIsNewBlock = 0
2184 2186
2185 2187 self.setFile = None
2186 2188
2187 2189 self.dtype = None
2188 2190
2189 2191 self.path = None
2190 2192
2191 2193 self.noMoreFiles = 0
2192 2194
2193 2195 self.filename = None
2194 2196
2195 2197 self.basicHeaderObj = BasicHeader(LOCALTIME)
2196 2198
2197 2199 self.systemHeaderObj = SystemHeader()
2198 2200
2199 2201 self.radarControllerHeaderObj = RadarControllerHeader()
2200 2202
2201 2203 self.processingHeaderObj = ProcessingHeader()
2202 2204
2203 2205
2204 2206 def hasAllDataInBuffer(self):
2205 2207 return 1
2206 2208
2207 2209
2208 2210 def setBlockDimension(self):
2209 2211 """
2210 2212 Obtiene las formas dimensionales del los subbloques de datos que componen un bloque
2211 2213
2212 2214 Affected:
2213 2215 self.shape_spc_Buffer
2214 2216 self.shape_cspc_Buffer
2215 2217 self.shape_dc_Buffer
2216 2218
2217 2219 Return: None
2218 2220 """
2219 2221 self.shape_spc_Buffer = (self.dataOut.nChannels,
2220 2222 self.processingHeaderObj.nHeights,
2221 2223 self.processingHeaderObj.profilesPerBlock)
2222 2224
2223 2225 self.shape_cspc_Buffer = (self.dataOut.nPairs,
2224 2226 self.processingHeaderObj.nHeights,
2225 2227 self.processingHeaderObj.profilesPerBlock)
2226 2228
2227 2229 self.shape_dc_Buffer = (self.dataOut.nChannels,
2228 2230 self.processingHeaderObj.nHeights)
2229 2231
2230 2232
2231 2233 def writeBlock(self):
2232 2234 """
2233 2235 Escribe el buffer en el file designado
2234 2236
2235 2237 Affected:
2236 2238 self.data_spc
2237 2239 self.data_cspc
2238 2240 self.data_dc
2239 2241 self.flagIsNewFile
2240 2242 self.flagIsNewBlock
2241 2243 self.nTotalBlocks
2242 2244 self.nWriteBlocks
2243 2245
2244 2246 Return: None
2245 2247 """
2246 2248
2247 2249 spc = numpy.transpose( self.data_spc, (0,2,1) )
2248 2250 if not( self.processingHeaderObj.shif_fft ):
2249 2251 spc = numpy.roll( spc, self.processingHeaderObj.profilesPerBlock/2, axis=2 ) #desplaza a la derecha en el eje 2 determinadas posiciones
2250 2252 data = spc.reshape((-1))
2251 2253 data = data.astype(self.dtype[0])
2252 2254 data.tofile(self.fp)
2253 2255
2254 2256 if self.data_cspc != None:
2255 2257 data = numpy.zeros( self.shape_cspc_Buffer, self.dtype )
2256 2258 cspc = numpy.transpose( self.data_cspc, (0,2,1) )
2257 2259 if not( self.processingHeaderObj.shif_fft ):
2258 2260 cspc = numpy.roll( cspc, self.processingHeaderObj.profilesPerBlock/2, axis=2 ) #desplaza a la derecha en el eje 2 determinadas posiciones
2259 2261 data['real'] = cspc.real
2260 2262 data['imag'] = cspc.imag
2261 2263 data = data.reshape((-1))
2262 2264 data.tofile(self.fp)
2263 2265
2264 2266 if self.data_dc != None:
2265 2267 data = numpy.zeros( self.shape_dc_Buffer, self.dtype )
2266 2268 dc = self.data_dc
2267 2269 data['real'] = dc.real
2268 2270 data['imag'] = dc.imag
2269 2271 data = data.reshape((-1))
2270 2272 data.tofile(self.fp)
2271 2273
2272 2274 self.data_spc.fill(0)
2273 2275 self.data_dc.fill(0)
2274 2276 if self.data_cspc != None:
2275 2277 self.data_cspc.fill(0)
2276 2278
2277 2279 self.flagIsNewFile = 0
2278 2280 self.flagIsNewBlock = 1
2279 2281 self.nTotalBlocks += 1
2280 2282 self.nWriteBlocks += 1
2281 2283 self.blockIndex += 1
2282 2284
2283 2285
2284 2286 def putData(self):
2285 2287 """
2286 2288 Setea un bloque de datos y luego los escribe en un file
2287 2289
2288 2290 Affected:
2289 2291 self.data_spc
2290 2292 self.data_cspc
2291 2293 self.data_dc
2292 2294
2293 2295 Return:
2294 2296 0 : Si no hay data o no hay mas files que puedan escribirse
2295 2297 1 : Si se escribio la data de un bloque en un file
2296 2298 """
2297 2299
2298 2300 if self.dataOut.flagNoData:
2299 2301 return 0
2300 2302
2301 2303 self.flagIsNewBlock = 0
2302 2304
2303 2305 if self.dataOut.flagTimeBlock:
2304 2306 self.data_spc.fill(0)
2305 2307 self.data_cspc.fill(0)
2306 2308 self.data_dc.fill(0)
2307 2309 self.setNextFile()
2308 2310
2309 2311 if self.flagIsNewFile == 0:
2310 2312 self.getBasicHeader()
2311 2313
2312 2314 self.data_spc = self.dataOut.data_spc.copy()
2313 2315 self.data_cspc = self.dataOut.data_cspc.copy()
2314 2316 self.data_dc = self.dataOut.data_dc.copy()
2315 2317
2316 2318 # #self.processingHeaderObj.dataBlocksPerFile)
2317 2319 if self.hasAllDataInBuffer():
2318 2320 # self.getDataHeader()
2319 2321 self.writeNextBlock()
2320 2322
2321 2323 return 1
2322 2324
2323 2325
2324 2326 def __getProcessFlags(self):
2325 2327
2326 2328 processFlags = 0
2327 2329
2328 2330 dtype0 = numpy.dtype([('real','<i1'),('imag','<i1')])
2329 2331 dtype1 = numpy.dtype([('real','<i2'),('imag','<i2')])
2330 2332 dtype2 = numpy.dtype([('real','<i4'),('imag','<i4')])
2331 2333 dtype3 = numpy.dtype([('real','<i8'),('imag','<i8')])
2332 2334 dtype4 = numpy.dtype([('real','<f4'),('imag','<f4')])
2333 2335 dtype5 = numpy.dtype([('real','<f8'),('imag','<f8')])
2334 2336
2335 2337 dtypeList = [dtype0, dtype1, dtype2, dtype3, dtype4, dtype5]
2336 2338
2337 2339
2338 2340
2339 2341 datatypeValueList = [PROCFLAG.DATATYPE_CHAR,
2340 2342 PROCFLAG.DATATYPE_SHORT,
2341 2343 PROCFLAG.DATATYPE_LONG,
2342 2344 PROCFLAG.DATATYPE_INT64,
2343 2345 PROCFLAG.DATATYPE_FLOAT,
2344 2346 PROCFLAG.DATATYPE_DOUBLE]
2345 2347
2346 2348
2347 2349 for index in range(len(dtypeList)):
2348 2350 if self.dataOut.dtype == dtypeList[index]:
2349 2351 dtypeValue = datatypeValueList[index]
2350 2352 break
2351 2353
2352 2354 processFlags += dtypeValue
2353 2355
2354 2356 if self.dataOut.flagDecodeData:
2355 2357 processFlags += PROCFLAG.DECODE_DATA
2356 2358
2357 2359 if self.dataOut.flagDeflipData:
2358 2360 processFlags += PROCFLAG.DEFLIP_DATA
2359 2361
2360 2362 if self.dataOut.code != None:
2361 2363 processFlags += PROCFLAG.DEFINE_PROCESS_CODE
2362 2364
2363 2365 if self.dataOut.nIncohInt > 1:
2364 2366 processFlags += PROCFLAG.INCOHERENT_INTEGRATION
2365 2367
2366 2368 if self.dataOut.data_dc != None:
2367 2369 processFlags += PROCFLAG.SAVE_CHANNELS_DC
2368 2370
2369 2371 return processFlags
2370 2372
2371 2373
2372 2374 def __getBlockSize(self):
2373 2375 '''
2374 2376 Este metodos determina el cantidad de bytes para un bloque de datos de tipo Spectra
2375 2377 '''
2376 2378
2377 2379 dtype0 = numpy.dtype([('real','<i1'),('imag','<i1')])
2378 2380 dtype1 = numpy.dtype([('real','<i2'),('imag','<i2')])
2379 2381 dtype2 = numpy.dtype([('real','<i4'),('imag','<i4')])
2380 2382 dtype3 = numpy.dtype([('real','<i8'),('imag','<i8')])
2381 2383 dtype4 = numpy.dtype([('real','<f4'),('imag','<f4')])
2382 2384 dtype5 = numpy.dtype([('real','<f8'),('imag','<f8')])
2383 2385
2384 2386 dtypeList = [dtype0, dtype1, dtype2, dtype3, dtype4, dtype5]
2385 2387 datatypeValueList = [1,2,4,8,4,8]
2386 2388 for index in range(len(dtypeList)):
2387 2389 if self.dataOut.dtype == dtypeList[index]:
2388 2390 datatypeValue = datatypeValueList[index]
2389 2391 break
2390 2392
2391 2393
2392 2394 pts2write = self.dataOut.nHeights * self.dataOut.nFFTPoints
2393 2395
2394 2396 pts2write_SelfSpectra = int(self.dataOut.nChannels * pts2write)
2395 2397 blocksize = (pts2write_SelfSpectra*datatypeValue)
2396 2398
2397 2399 if self.dataOut.data_cspc != None:
2398 2400 pts2write_CrossSpectra = int(self.dataOut.nPairs * pts2write)
2399 2401 blocksize += (pts2write_CrossSpectra*datatypeValue*2)
2400 2402
2401 2403 if self.dataOut.data_dc != None:
2402 2404 pts2write_DCchannels = int(self.dataOut.nChannels * self.dataOut.nHeights)
2403 2405 blocksize += (pts2write_DCchannels*datatypeValue*2)
2404 2406
2405 2407 blocksize = blocksize #* datatypeValue * 2 #CORREGIR ESTO
2406 2408
2407 2409 return blocksize
2408 2410
2409 2411 def getDataHeader(self):
2410 2412
2411 2413 """
2412 2414 Obtiene una copia del First Header
2413 2415
2414 2416 Affected:
2415 2417 self.systemHeaderObj
2416 2418 self.radarControllerHeaderObj
2417 2419 self.dtype
2418 2420
2419 2421 Return:
2420 2422 None
2421 2423 """
2422 2424
2423 2425 self.systemHeaderObj = self.dataOut.systemHeaderObj.copy()
2424 2426 self.systemHeaderObj.nChannels = self.dataOut.nChannels
2425 2427 self.radarControllerHeaderObj = self.dataOut.radarControllerHeaderObj.copy()
2426 2428
2427 2429 self.getBasicHeader()
2428 2430
2429 2431 processingHeaderSize = 40 # bytes
2430 2432 self.processingHeaderObj.dtype = 0 # Voltage
2431 2433 self.processingHeaderObj.blockSize = self.__getBlockSize()
2432 2434 self.processingHeaderObj.profilesPerBlock = self.dataOut.nFFTPoints
2433 2435 self.processingHeaderObj.dataBlocksPerFile = self.blocksPerFile
2434 2436 self.processingHeaderObj.nWindows = 1 #podria ser 1 o self.dataOut.processingHeaderObj.nWindows
2435 2437 self.processingHeaderObj.processFlags = self.__getProcessFlags()
2436 2438 self.processingHeaderObj.nCohInt = self.dataOut.nCohInt# Se requiere para determinar el valor de timeInterval
2437 2439 self.processingHeaderObj.nIncohInt = self.dataOut.nIncohInt
2438 2440 self.processingHeaderObj.totalSpectra = self.dataOut.nPairs + self.dataOut.nChannels
2439 2441
2440 2442 if self.processingHeaderObj.totalSpectra > 0:
2441 2443 channelList = []
2442 2444 for channel in range(self.dataOut.nChannels):
2443 2445 channelList.append(channel)
2444 2446 channelList.append(channel)
2445 2447
2446 2448 pairsList = []
2447 2449 for pair in self.dataOut.pairsList:
2448 2450 pairsList.append(pair[0])
2449 2451 pairsList.append(pair[1])
2450 2452 spectraComb = channelList + pairsList
2451 2453 spectraComb = numpy.array(spectraComb,dtype="u1")
2452 2454 self.processingHeaderObj.spectraComb = spectraComb
2453 2455 sizeOfSpcComb = len(spectraComb)
2454 2456 processingHeaderSize += sizeOfSpcComb
2455 2457
2456 2458 if self.dataOut.code != None:
2457 2459 self.processingHeaderObj.code = self.dataOut.code
2458 2460 self.processingHeaderObj.nCode = self.dataOut.nCode
2459 2461 self.processingHeaderObj.nBaud = self.dataOut.nBaud
2460 2462 nCodeSize = 4 # bytes
2461 2463 nBaudSize = 4 # bytes
2462 2464 codeSize = 4 # bytes
2463 2465 sizeOfCode = int(nCodeSize + nBaudSize + codeSize * self.dataOut.nCode * self.dataOut.nBaud)
2464 2466 processingHeaderSize += sizeOfCode
2465 2467
2466 2468 if self.processingHeaderObj.nWindows != 0:
2467 2469 self.processingHeaderObj.firstHeight = self.dataOut.heightList[0]
2468 2470 self.processingHeaderObj.deltaHeight = self.dataOut.heightList[1] - self.dataOut.heightList[0]
2469 2471 self.processingHeaderObj.nHeights = self.dataOut.nHeights
2470 2472 self.processingHeaderObj.samplesWin = self.dataOut.nHeights
2471 2473 sizeOfFirstHeight = 4
2472 2474 sizeOfdeltaHeight = 4
2473 2475 sizeOfnHeights = 4
2474 2476 sizeOfWindows = (sizeOfFirstHeight + sizeOfdeltaHeight + sizeOfnHeights)*self.processingHeaderObj.nWindows
2475 2477 processingHeaderSize += sizeOfWindows
2476 2478
2477 2479 self.processingHeaderObj.size = processingHeaderSize
2478 2480
2479 2481 class SpectraHeisWriter():
2480 2482
2481 2483 i=0
2482 2484
2483 2485 def __init__(self, dataOut):
2484 2486
2485 2487 self.wrObj = FITS()
2486 2488 self.dataOut = dataOut
2487 2489
2488 2490 def isNumber(str):
2489 2491 """
2490 2492 Chequea si el conjunto de caracteres que componen un string puede ser convertidos a un numero.
2491 2493
2492 2494 Excepciones:
2493 2495 Si un determinado string no puede ser convertido a numero
2494 2496 Input:
2495 2497 str, string al cual se le analiza para determinar si convertible a un numero o no
2496 2498
2497 2499 Return:
2498 2500 True : si el string es uno numerico
2499 2501 False : no es un string numerico
2500 2502 """
2501 2503 try:
2502 2504 float( str )
2503 2505 return True
2504 2506 except:
2505 2507 return False
2506 2508
2507 2509 def setup(self, wrpath,):
2508 2510
2509 2511 if not(os.path.exists(wrpath)):
2510 2512 os.mkdir(wrpath)
2511 2513
2512 2514 self.wrpath = wrpath
2513 2515 self.setFile = 0
2514 2516
2515 2517 def putData(self):
2516 2518 # self.wrObj.writeHeader(nChannels=self.dataOut.nChannels, nFFTPoints=self.dataOut.nFFTPoints)
2517 2519 #name = self.dataOut.utctime
2518 2520 name= time.localtime( self.dataOut.utctime)
2519 2521 ext=".fits"
2520 2522 #folder='D%4.4d%3.3d'%(name.tm_year,name.tm_yday)
2521 2523 subfolder = 'D%4.4d%3.3d' % (name.tm_year,name.tm_yday)
2522 2524
2523 2525 fullpath = os.path.join( self.wrpath, subfolder )
2524 2526 if not( os.path.exists(fullpath) ):
2525 2527 os.mkdir(fullpath)
2526 2528 self.setFile += 1
2527 2529 file = 'D%4.4d%3.3d%3.3d%s' % (name.tm_year,name.tm_yday,self.setFile,ext)
2528 2530
2529 2531 filename = os.path.join(self.wrpath,subfolder, file)
2530 2532
2531 2533 # print self.dataOut.ippSeconds
2532 2534 freq=numpy.arange(-1*self.dataOut.nHeights/2.,self.dataOut.nHeights/2.)/(2*self.dataOut.ippSeconds)
2533 2535
2534 2536 col1=self.wrObj.setColF(name="freq", format=str(self.dataOut.nFFTPoints)+'E', array=freq)
2535 2537 col2=self.wrObj.writeData(name="P_Ch1",format=str(self.dataOut.nFFTPoints)+'E',data=10*numpy.log10(self.dataOut.data_spc[0,:]))
2536 2538 col3=self.wrObj.writeData(name="P_Ch2",format=str(self.dataOut.nFFTPoints)+'E',data=10*numpy.log10(self.dataOut.data_spc[1,:]))
2537 2539 col4=self.wrObj.writeData(name="P_Ch3",format=str(self.dataOut.nFFTPoints)+'E',data=10*numpy.log10(self.dataOut.data_spc[2,:]))
2538 2540 col5=self.wrObj.writeData(name="P_Ch4",format=str(self.dataOut.nFFTPoints)+'E',data=10*numpy.log10(self.dataOut.data_spc[3,:]))
2539 2541 col6=self.wrObj.writeData(name="P_Ch5",format=str(self.dataOut.nFFTPoints)+'E',data=10*numpy.log10(self.dataOut.data_spc[4,:]))
2540 2542 col7=self.wrObj.writeData(name="P_Ch6",format=str(self.dataOut.nFFTPoints)+'E',data=10*numpy.log10(self.dataOut.data_spc[5,:]))
2541 2543 col8=self.wrObj.writeData(name="P_Ch7",format=str(self.dataOut.nFFTPoints)+'E',data=10*numpy.log10(self.dataOut.data_spc[6,:]))
2542 2544 col9=self.wrObj.writeData(name="P_Ch8",format=str(self.dataOut.nFFTPoints)+'E',data=10*numpy.log10(self.dataOut.data_spc[7,:]))
2543 2545 #n=numpy.arange((100))
2544 2546 n=self.dataOut.data_spc[6,:]
2545 2547 a=self.wrObj.cFImage(n)
2546 2548 b=self.wrObj.Ctable(col1,col2,col3,col4,col5,col6,col7,col8,col9)
2547 2549 self.wrObj.CFile(a,b)
2548 2550 self.wrObj.wFile(filename)
2549 2551 return 1
2550 2552
2551 2553 class FITS:
2552 2554
2553 2555 name=None
2554 2556 format=None
2555 2557 array =None
2556 2558 data =None
2557 2559 thdulist=None
2558 2560
2559 2561 def __init__(self):
2560 2562
2561 2563 pass
2562 2564
2563 2565 def setColF(self,name,format,array):
2564 2566 self.name=name
2565 2567 self.format=format
2566 2568 self.array=array
2567 2569 a1=numpy.array([self.array],dtype=numpy.float32)
2568 2570 self.col1 = pyfits.Column(name=self.name, format=self.format, array=a1)
2569 2571 return self.col1
2570 2572
2571 2573 # def setColP(self,name,format,data):
2572 2574 # self.name=name
2573 2575 # self.format=format
2574 2576 # self.data=data
2575 2577 # a2=numpy.array([self.data],dtype=numpy.float32)
2576 2578 # self.col2 = pyfits.Column(name=self.name, format=self.format, array=a2)
2577 2579 # return self.col2
2578 2580
2579 2581 def writeHeader(self,):
2580 2582 pass
2581 2583
2582 2584 def writeData(self,name,format,data):
2583 2585 self.name=name
2584 2586 self.format=format
2585 2587 self.data=data
2586 2588 a2=numpy.array([self.data],dtype=numpy.float32)
2587 2589 self.col2 = pyfits.Column(name=self.name, format=self.format, array=a2)
2588 2590 return self.col2
2589 2591
2590 2592 def cFImage(self,n):
2591 2593 self.hdu= pyfits.PrimaryHDU(n)
2592 2594 return self.hdu
2593 2595
2594 2596 def Ctable(self,col1,col2,col3,col4,col5,col6,col7,col8,col9):
2595 2597 self.cols=pyfits.ColDefs( [col1,col2,col3,col4,col5,col6,col7,col8,col9])
2596 2598 self.tbhdu = pyfits.new_table(self.cols)
2597 2599 return self.tbhdu
2598 2600
2599 2601 def CFile(self,hdu,tbhdu):
2600 2602 self.thdulist=pyfits.HDUList([hdu,tbhdu])
2601 2603
2602 2604 def wFile(self,filename):
2603 2605 self.thdulist.writeto(filename) No newline at end of file
General Comments 0
You need to be logged in to leave comments. Login now