##// END OF EJS Templates
Bug fixed: mensajes en la busqueda de datos en linea agregados
Miguel Valdez -
r293:e76e4bf50f11
parent child
Show More
@@ -1,2584 +1,2584
1 1 '''
2 2
3 3 $Author: murco $
4 4 $Id: JRODataIO.py 169 2012-11-19 21:57:03Z murco $
5 5 '''
6 6
7 7 import os, sys
8 8 import glob
9 9 import time
10 10 import numpy
11 11 import fnmatch
12 12 import time, datetime
13 13
14 14 from jrodata import *
15 15 from jroheaderIO import *
16 16 from jroprocessing import *
17 17
18 18 LOCALTIME = -18000
19 19
20 20 def isNumber(str):
21 21 """
22 22 Chequea si el conjunto de caracteres que componen un string puede ser convertidos a un numero.
23 23
24 24 Excepciones:
25 25 Si un determinado string no puede ser convertido a numero
26 26 Input:
27 27 str, string al cual se le analiza para determinar si convertible a un numero o no
28 28
29 29 Return:
30 30 True : si el string es uno numerico
31 31 False : no es un string numerico
32 32 """
33 33 try:
34 34 float( str )
35 35 return True
36 36 except:
37 37 return False
38 38
39 39 def isThisFileinRange(filename, startUTSeconds, endUTSeconds):
40 40 """
41 41 Esta funcion determina si un archivo de datos se encuentra o no dentro del rango de fecha especificado.
42 42
43 43 Inputs:
44 44 filename : nombre completo del archivo de datos en formato Jicamarca (.r)
45 45
46 46 startUTSeconds : fecha inicial del rango seleccionado. La fecha esta dada en
47 47 segundos contados desde 01/01/1970.
48 48 endUTSeconds : fecha final del rango seleccionado. La fecha esta dada en
49 49 segundos contados desde 01/01/1970.
50 50
51 51 Return:
52 52 Boolean : Retorna True si el archivo de datos contiene datos en el rango de
53 53 fecha especificado, de lo contrario retorna False.
54 54
55 55 Excepciones:
56 56 Si el archivo no existe o no puede ser abierto
57 57 Si la cabecera no puede ser leida.
58 58
59 59 """
60 60 basicHeaderObj = BasicHeader(LOCALTIME)
61 61
62 62 try:
63 63 fp = open(filename,'rb')
64 64 except:
65 65 raise IOError, "The file %s can't be opened" %(filename)
66 66
67 67 sts = basicHeaderObj.read(fp)
68 68 fp.close()
69 69
70 70 if not(sts):
71 71 print "Skipping the file %s because it has not a valid header" %(filename)
72 72 return 0
73 73
74 74 if not ((startUTSeconds <= basicHeaderObj.utc) and (endUTSeconds > basicHeaderObj.utc)):
75 75 return 0
76 76
77 77 return 1
78 78
79 79 def isFileinThisTime(filename, startTime, endTime):
80 80 """
81 81 Retorna 1 si el archivo de datos se encuentra dentro del rango de horas especificado.
82 82
83 83 Inputs:
84 84 filename : nombre completo del archivo de datos en formato Jicamarca (.r)
85 85
86 86 startTime : tiempo inicial del rango seleccionado en formato datetime.time
87 87
88 88 endTime : tiempo final del rango seleccionado en formato datetime.time
89 89
90 90 Return:
91 91 Boolean : Retorna True si el archivo de datos contiene datos en el rango de
92 92 fecha especificado, de lo contrario retorna False.
93 93
94 94 Excepciones:
95 95 Si el archivo no existe o no puede ser abierto
96 96 Si la cabecera no puede ser leida.
97 97
98 98 """
99 99
100 100
101 101 try:
102 102 fp = open(filename,'rb')
103 103 except:
104 104 raise IOError, "The file %s can't be opened" %(filename)
105 105
106 106 basicHeaderObj = BasicHeader(LOCALTIME)
107 107 sts = basicHeaderObj.read(fp)
108 108 fp.close()
109 109
110 110 thisTime = basicHeaderObj.datatime.time()
111 111
112 112 if not(sts):
113 113 print "Skipping the file %s because it has not a valid header" %(filename)
114 114 return 0
115 115
116 116 if not ((startTime <= thisTime) and (endTime > thisTime)):
117 117 return 0
118 118
119 119 return 1
120 120
121 121 def getlastFileFromPath(path, ext):
122 122 """
123 123 Depura el fileList dejando solo los que cumplan el formato de "PYYYYDDDSSS.ext"
124 124 al final de la depuracion devuelve el ultimo file de la lista que quedo.
125 125
126 126 Input:
127 127 fileList : lista conteniendo todos los files (sin path) que componen una determinada carpeta
128 128 ext : extension de los files contenidos en una carpeta
129 129
130 130 Return:
131 131 El ultimo file de una determinada carpeta, no se considera el path.
132 132 """
133 133 validFilelist = []
134 134 fileList = os.listdir(path)
135 135
136 136 # 0 1234 567 89A BCDE
137 137 # H YYYY DDD SSS .ext
138 138
139 139 for file in fileList:
140 140 try:
141 141 year = int(file[1:5])
142 142 doy = int(file[5:8])
143 143
144 144
145 145 except:
146 146 continue
147 147
148 148 if (os.path.splitext(file)[-1].lower() != ext.lower()):
149 149 continue
150 150
151 151 validFilelist.append(file)
152 152
153 153 if validFilelist:
154 154 validFilelist = sorted( validFilelist, key=str.lower )
155 155 return validFilelist[-1]
156 156
157 157 return None
158 158
159 159 def checkForRealPath(path, year, doy, set, ext):
160 160 """
161 161 Por ser Linux Case Sensitive entonces checkForRealPath encuentra el nombre correcto de un path,
162 162 Prueba por varias combinaciones de nombres entre mayusculas y minusculas para determinar
163 163 el path exacto de un determinado file.
164 164
165 165 Example :
166 166 nombre correcto del file es .../.../D2009307/P2009307367.ext
167 167
168 168 Entonces la funcion prueba con las siguientes combinaciones
169 169 .../.../y2009307367.ext
170 170 .../.../Y2009307367.ext
171 171 .../.../x2009307/y2009307367.ext
172 172 .../.../x2009307/Y2009307367.ext
173 173 .../.../X2009307/y2009307367.ext
174 174 .../.../X2009307/Y2009307367.ext
175 175 siendo para este caso, la ultima combinacion de letras, identica al file buscado
176 176
177 177 Return:
178 178 Si encuentra la cobinacion adecuada devuelve el path completo y el nombre del file
179 179 caso contrario devuelve None como path y el la ultima combinacion de nombre en mayusculas
180 180 para el filename
181 181 """
182 182 fullfilename = None
183 183 find_flag = False
184 184 filename = None
185 185
186 186 prefixDirList = [None,'d','D']
187 187 if ext.lower() == ".r": #voltage
188 188 prefixFileList = ['d','D']
189 189 elif ext.lower() == ".pdata": #spectra
190 190 prefixFileList = ['p','P']
191 191 else:
192 192 return None, filename
193 193
194 194 #barrido por las combinaciones posibles
195 195 for prefixDir in prefixDirList:
196 196 thispath = path
197 197 if prefixDir != None:
198 198 #formo el nombre del directorio xYYYYDDD (x=d o x=D)
199 199 thispath = os.path.join(path, "%s%04d%03d" % ( prefixDir, year, doy ))
200 200
201 201 for prefixFile in prefixFileList: #barrido por las dos combinaciones posibles de "D"
202 202 filename = "%s%04d%03d%03d%s" % ( prefixFile, year, doy, set, ext ) #formo el nombre del file xYYYYDDDSSS.ext
203 203 fullfilename = os.path.join( thispath, filename ) #formo el path completo
204 204
205 205 if os.path.exists( fullfilename ): #verifico que exista
206 206 find_flag = True
207 207 break
208 208 if find_flag:
209 209 break
210 210
211 211 if not(find_flag):
212 212 return None, filename
213 213
214 214 return fullfilename, filename
215 215
216 216 class JRODataIO:
217 217
218 218 c = 3E8
219 219
220 220 isConfig = False
221 221
222 222 basicHeaderObj = BasicHeader(LOCALTIME)
223 223
224 224 systemHeaderObj = SystemHeader()
225 225
226 226 radarControllerHeaderObj = RadarControllerHeader()
227 227
228 228 processingHeaderObj = ProcessingHeader()
229 229
230 230 online = 0
231 231
232 232 dtype = None
233 233
234 234 pathList = []
235 235
236 236 filenameList = []
237 237
238 238 filename = None
239 239
240 240 ext = None
241 241
242 242 flagIsNewFile = 1
243 243
244 244 flagTimeBlock = 0
245 245
246 246 flagIsNewBlock = 0
247 247
248 248 fp = None
249 249
250 250 firstHeaderSize = 0
251 251
252 252 basicHeaderSize = 24
253 253
254 254 versionFile = 1103
255 255
256 256 fileSize = None
257 257
258 258 ippSeconds = None
259 259
260 260 fileSizeByHeader = None
261 261
262 262 fileIndex = None
263 263
264 264 profileIndex = None
265 265
266 266 blockIndex = None
267 267
268 268 nTotalBlocks = None
269 269
270 270 maxTimeStep = 30
271 271
272 272 lastUTTime = None
273 273
274 274 datablock = None
275 275
276 276 dataOut = None
277 277
278 278 blocksize = None
279 279
280 280 def __init__(self):
281 281
282 282 raise ValueError, "Not implemented"
283 283
284 284 def run(self):
285 285
286 286 raise ValueError, "Not implemented"
287 287
288 288 def getOutput(self):
289 289
290 290 return self.dataOut
291 291
292 292 class JRODataReader(JRODataIO, ProcessingUnit):
293 293
294 294 nReadBlocks = 0
295 295
296 296 delay = 10 #number of seconds waiting a new file
297 297
298 298 nTries = 3 #quantity tries
299 299
300 300 nFiles = 3 #number of files for searching
301 301
302 302 flagNoMoreFiles = 0
303 303
304 304 def __init__(self):
305 305
306 306 """
307 307
308 308 """
309 309
310 310 raise ValueError, "This method has not been implemented"
311 311
312 312
313 313 def createObjByDefault(self):
314 314 """
315 315
316 316 """
317 317 raise ValueError, "This method has not been implemented"
318 318
319 319 def getBlockDimension(self):
320 320
321 321 raise ValueError, "No implemented"
322 322
323 323 def __searchFilesOffLine(self,
324 324 path,
325 325 startDate,
326 326 endDate,
327 327 startTime=datetime.time(0,0,0),
328 328 endTime=datetime.time(23,59,59),
329 329 set=None,
330 330 expLabel='',
331 331 ext='.r',
332 332 walk=True):
333 333
334 334 pathList = []
335 335
336 336 if not walk:
337 337 pathList.append(path)
338 338
339 339 else:
340 340 dirList = []
341 341 for thisPath in os.listdir(path):
342 342 if os.path.isdir(os.path.join(path,thisPath)):
343 343 dirList.append(thisPath)
344 344
345 345 if not(dirList):
346 346 return None, None
347 347
348 348 thisDate = startDate
349 349
350 350 while(thisDate <= endDate):
351 351 year = thisDate.timetuple().tm_year
352 352 doy = thisDate.timetuple().tm_yday
353 353
354 354 match = fnmatch.filter(dirList, '?' + '%4.4d%3.3d' % (year,doy))
355 355 if len(match) == 0:
356 356 thisDate += datetime.timedelta(1)
357 357 continue
358 358
359 359 pathList.append(os.path.join(path,match[0],expLabel))
360 360 thisDate += datetime.timedelta(1)
361 361
362 362 if pathList == []:
363 363 print "Any folder was found for the date range: %s-%s" %(startDate, endDate)
364 364 return None, None
365 365
366 366 print "%d folder(s) was(were) found for the date range: %s-%s" %(len(pathList), startDate, endDate)
367 367
368 368 filenameList = []
369 369 for thisPath in pathList:
370 370
371 371 fileList = glob.glob1(thisPath, "*%s" %ext)
372 372 fileList.sort()
373 373
374 374 for file in fileList:
375 375
376 376 filename = os.path.join(thisPath,file)
377 377
378 378 if isFileinThisTime(filename, startTime, endTime):
379 379 filenameList.append(filename)
380 380
381 381 if not(filenameList):
382 382 print "Any file was found for the time range %s - %s" %(startTime, endTime)
383 383 return None, None
384 384
385 385 print "%d file(s) was(were) found for the time range: %s - %s" %(len(filenameList), startTime, endTime)
386 386
387 387 self.filenameList = filenameList
388 388
389 389 return pathList, filenameList
390 390
391 391 def __searchFilesOnLine(self, path, expLabel = "", ext = None, walk=True):
392 392
393 393 """
394 394 Busca el ultimo archivo de la ultima carpeta (determinada o no por startDateTime) y
395 395 devuelve el archivo encontrado ademas de otros datos.
396 396
397 397 Input:
398 398 path : carpeta donde estan contenidos los files que contiene data
399 399
400 400 expLabel : Nombre del subexperimento (subfolder)
401 401
402 402 ext : extension de los files
403 403
404 404 walk : Si es habilitado no realiza busquedas dentro de los ubdirectorios (doypath)
405 405
406 406 Return:
407 407 directory : eL directorio donde esta el file encontrado
408 408 filename : el ultimo file de una determinada carpeta
409 409 year : el anho
410 410 doy : el numero de dia del anho
411 411 set : el set del archivo
412 412
413 413
414 414 """
415 415 dirList = []
416 416
417 417 if walk:
418 418
419 419 #Filtra solo los directorios
420 420 for thisPath in os.listdir(path):
421 421 if os.path.isdir(os.path.join(path, thisPath)):
422 422 dirList.append(thisPath)
423 423
424 424 if not(dirList):
425 425 return None, None, None, None, None
426 426
427 427 dirList = sorted( dirList, key=str.lower )
428 428
429 429 doypath = dirList[-1]
430 430 fullpath = os.path.join(path, doypath, expLabel)
431 431
432 432 else:
433 433 fullpath = path
434 434
435 print "%d folder was found: " %(fullpath )
435 print "%s folder was found: " %(fullpath )
436 436
437 437 filename = getlastFileFromPath(fullpath, ext)
438 438
439 439 if not(filename):
440 440 return None, None, None, None, None
441 441
442 442 print "%s file was found" %(filename)
443 443
444 444 if not(self.__verifyFile(os.path.join(fullpath, filename))):
445 445 return None, None, None, None, None
446 446
447 447 year = int( filename[1:5] )
448 448 doy = int( filename[5:8] )
449 449 set = int( filename[8:11] )
450 450
451 451 return fullpath, filename, year, doy, set
452 452
453 453
454 454
455 455 def __setNextFileOffline(self):
456 456
457 457 idFile = self.fileIndex
458 458
459 459 while (True):
460 460 idFile += 1
461 461 if not(idFile < len(self.filenameList)):
462 462 self.flagNoMoreFiles = 1
463 463 print "No more Files"
464 464 return 0
465 465
466 466 filename = self.filenameList[idFile]
467 467
468 468 if not(self.__verifyFile(filename)):
469 469 continue
470 470
471 471 fileSize = os.path.getsize(filename)
472 472 fp = open(filename,'rb')
473 473 break
474 474
475 475 self.flagIsNewFile = 1
476 476 self.fileIndex = idFile
477 477 self.filename = filename
478 478 self.fileSize = fileSize
479 479 self.fp = fp
480 480
481 481 print "Setting the file: %s"%self.filename
482 482
483 483 return 1
484 484
485 485 def __setNextFileOnline(self):
486 486 """
487 487 Busca el siguiente file que tenga suficiente data para ser leida, dentro de un folder especifico, si
488 488 no encuentra un file valido espera un tiempo determinado y luego busca en los posibles n files
489 489 siguientes.
490 490
491 491 Affected:
492 492 self.flagIsNewFile
493 493 self.filename
494 494 self.fileSize
495 495 self.fp
496 496 self.set
497 497 self.flagNoMoreFiles
498 498
499 499 Return:
500 500 0 : si luego de una busqueda del siguiente file valido este no pudo ser encontrado
501 501 1 : si el file fue abierto con exito y esta listo a ser leido
502 502
503 503 Excepciones:
504 504 Si un determinado file no puede ser abierto
505 505 """
506 506 nFiles = 0
507 507 fileOk_flag = False
508 508 firstTime_flag = True
509 509
510 510 self.set += 1
511 511
512 512 #busca el 1er file disponible
513 513 fullfilename, filename = checkForRealPath( self.path, self.year, self.doy, self.set, self.ext )
514 514 if fullfilename:
515 515 if self.__verifyFile(fullfilename, False):
516 516 fileOk_flag = True
517 517
518 518 #si no encuentra un file entonces espera y vuelve a buscar
519 519 if not(fileOk_flag):
520 520 for nFiles in range(self.nFiles+1): #busco en los siguientes self.nFiles+1 files posibles
521 521
522 522 if firstTime_flag: #si es la 1era vez entonces hace el for self.nTries veces
523 523 tries = self.nTries
524 524 else:
525 525 tries = 1 #si no es la 1era vez entonces solo lo hace una vez
526 526
527 527 for nTries in range( tries ):
528 528 if firstTime_flag:
529 529 print "\tWaiting %0.2f sec for the file \"%s\" , try %03d ..." % ( self.delay, filename, nTries+1 )
530 530 time.sleep( self.delay )
531 531 else:
532 532 print "\tSearching next \"%s%04d%03d%03d%s\" file ..." % (self.optchar, self.year, self.doy, self.set, self.ext)
533 533
534 534 fullfilename, filename = checkForRealPath( self.path, self.year, self.doy, self.set, self.ext )
535 535 if fullfilename:
536 536 if self.__verifyFile(fullfilename):
537 537 fileOk_flag = True
538 538 break
539 539
540 540 if fileOk_flag:
541 541 break
542 542
543 543 firstTime_flag = False
544 544
545 545 print "\tSkipping the file \"%s\" due to this file doesn't exist" % filename
546 546 self.set += 1
547 547
548 548 if nFiles == (self.nFiles-1): #si no encuentro el file buscado cambio de carpeta y busco en la siguiente carpeta
549 549 self.set = 0
550 550 self.doy += 1
551 551
552 552 if fileOk_flag:
553 553 self.fileSize = os.path.getsize( fullfilename )
554 554 self.filename = fullfilename
555 555 self.flagIsNewFile = 1
556 556 if self.fp != None: self.fp.close()
557 557 self.fp = open(fullfilename, 'rb')
558 558 self.flagNoMoreFiles = 0
559 559 print 'Setting the file: %s' % fullfilename
560 560 else:
561 561 self.fileSize = 0
562 562 self.filename = None
563 563 self.flagIsNewFile = 0
564 564 self.fp = None
565 565 self.flagNoMoreFiles = 1
566 566 print 'No more Files'
567 567
568 568 return fileOk_flag
569 569
570 570
571 571 def setNextFile(self):
572 572 if self.fp != None:
573 573 self.fp.close()
574 574
575 575 if self.online:
576 576 newFile = self.__setNextFileOnline()
577 577 else:
578 578 newFile = self.__setNextFileOffline()
579 579
580 580 if not(newFile):
581 581 return 0
582 582
583 583 self.__readFirstHeader()
584 584 self.nReadBlocks = 0
585 585 return 1
586 586
587 587 def __waitNewBlock(self):
588 588 """
589 589 Return 1 si se encontro un nuevo bloque de datos, 0 de otra forma.
590 590
591 591 Si el modo de lectura es OffLine siempre retorn 0
592 592 """
593 593 if not self.online:
594 594 return 0
595 595
596 596 if (self.nReadBlocks >= self.processingHeaderObj.dataBlocksPerFile):
597 597 return 0
598 598
599 599 currentPointer = self.fp.tell()
600 600
601 601 neededSize = self.processingHeaderObj.blockSize + self.basicHeaderSize
602 602
603 603 for nTries in range( self.nTries ):
604 604
605 605 self.fp.close()
606 606 self.fp = open( self.filename, 'rb' )
607 607 self.fp.seek( currentPointer )
608 608
609 609 self.fileSize = os.path.getsize( self.filename )
610 610 currentSize = self.fileSize - currentPointer
611 611
612 612 if ( currentSize >= neededSize ):
613 613 self.__rdBasicHeader()
614 614 return 1
615 615
616 616 print "\tWaiting %0.2f seconds for the next block, try %03d ..." % (self.delay, nTries+1)
617 617 time.sleep( self.delay )
618 618
619 619
620 620 return 0
621 621
622 622 def __setNewBlock(self):
623 623
624 624 if self.fp == None:
625 625 return 0
626 626
627 627 if self.flagIsNewFile:
628 628 return 1
629 629
630 630 self.lastUTTime = self.basicHeaderObj.utc
631 631 currentSize = self.fileSize - self.fp.tell()
632 632 neededSize = self.processingHeaderObj.blockSize + self.basicHeaderSize
633 633
634 634 if (currentSize >= neededSize):
635 635 self.__rdBasicHeader()
636 636 return 1
637 637
638 638 if self.__waitNewBlock():
639 639 return 1
640 640
641 641 if not(self.setNextFile()):
642 642 return 0
643 643
644 644 deltaTime = self.basicHeaderObj.utc - self.lastUTTime #
645 645
646 646 self.flagTimeBlock = 0
647 647
648 648 if deltaTime > self.maxTimeStep:
649 649 self.flagTimeBlock = 1
650 650
651 651 return 1
652 652
653 653
654 654 def readNextBlock(self):
655 655 if not(self.__setNewBlock()):
656 656 return 0
657 657
658 658 if not(self.readBlock()):
659 659 return 0
660 660
661 661 return 1
662 662
663 663 def __rdProcessingHeader(self, fp=None):
664 664 if fp == None:
665 665 fp = self.fp
666 666
667 667 self.processingHeaderObj.read(fp)
668 668
669 669 def __rdRadarControllerHeader(self, fp=None):
670 670 if fp == None:
671 671 fp = self.fp
672 672
673 673 self.radarControllerHeaderObj.read(fp)
674 674
675 675 def __rdSystemHeader(self, fp=None):
676 676 if fp == None:
677 677 fp = self.fp
678 678
679 679 self.systemHeaderObj.read(fp)
680 680
681 681 def __rdBasicHeader(self, fp=None):
682 682 if fp == None:
683 683 fp = self.fp
684 684
685 685 self.basicHeaderObj.read(fp)
686 686
687 687
688 688 def __readFirstHeader(self):
689 689 self.__rdBasicHeader()
690 690 self.__rdSystemHeader()
691 691 self.__rdRadarControllerHeader()
692 692 self.__rdProcessingHeader()
693 693
694 694 self.firstHeaderSize = self.basicHeaderObj.size
695 695
696 696 datatype = int(numpy.log2((self.processingHeaderObj.processFlags & PROCFLAG.DATATYPE_MASK))-numpy.log2(PROCFLAG.DATATYPE_CHAR))
697 697 if datatype == 0:
698 698 datatype_str = numpy.dtype([('real','<i1'),('imag','<i1')])
699 699 elif datatype == 1:
700 700 datatype_str = numpy.dtype([('real','<i2'),('imag','<i2')])
701 701 elif datatype == 2:
702 702 datatype_str = numpy.dtype([('real','<i4'),('imag','<i4')])
703 703 elif datatype == 3:
704 704 datatype_str = numpy.dtype([('real','<i8'),('imag','<i8')])
705 705 elif datatype == 4:
706 706 datatype_str = numpy.dtype([('real','<f4'),('imag','<f4')])
707 707 elif datatype == 5:
708 708 datatype_str = numpy.dtype([('real','<f8'),('imag','<f8')])
709 709 else:
710 710 raise ValueError, 'Data type was not defined'
711 711
712 712 self.dtype = datatype_str
713 713 self.ippSeconds = 2 * 1000 * self.radarControllerHeaderObj.ipp / self.c
714 714 self.fileSizeByHeader = self.processingHeaderObj.dataBlocksPerFile * self.processingHeaderObj.blockSize + self.firstHeaderSize + self.basicHeaderSize*(self.processingHeaderObj.dataBlocksPerFile - 1)
715 715 # self.dataOut.channelList = numpy.arange(self.systemHeaderObj.numChannels)
716 716 # self.dataOut.channelIndexList = numpy.arange(self.systemHeaderObj.numChannels)
717 717 self.getBlockDimension()
718 718
719 719
720 720 def __verifyFile(self, filename, msgFlag=True):
721 721 msg = None
722 722 try:
723 723 fp = open(filename, 'rb')
724 724 currentPosition = fp.tell()
725 725 except:
726 726 if msgFlag:
727 727 print "The file %s can't be opened" % (filename)
728 728 return False
729 729
730 730 neededSize = self.processingHeaderObj.blockSize + self.firstHeaderSize
731 731
732 732 if neededSize == 0:
733 733 basicHeaderObj = BasicHeader(LOCALTIME)
734 734 systemHeaderObj = SystemHeader()
735 735 radarControllerHeaderObj = RadarControllerHeader()
736 736 processingHeaderObj = ProcessingHeader()
737 737
738 738 try:
739 739 if not( basicHeaderObj.read(fp) ): raise IOError
740 740 if not( systemHeaderObj.read(fp) ): raise IOError
741 741 if not( radarControllerHeaderObj.read(fp) ): raise IOError
742 742 if not( processingHeaderObj.read(fp) ): raise IOError
743 743 data_type = int(numpy.log2((processingHeaderObj.processFlags & PROCFLAG.DATATYPE_MASK))-numpy.log2(PROCFLAG.DATATYPE_CHAR))
744 744
745 745 neededSize = processingHeaderObj.blockSize + basicHeaderObj.size
746 746
747 747 except:
748 748 if msgFlag:
749 749 print "\tThe file %s is empty or it hasn't enough data" % filename
750 750
751 751 fp.close()
752 752 return False
753 753 else:
754 754 msg = "\tSkipping the file %s due to it hasn't enough data" %filename
755 755
756 756 fp.close()
757 757 fileSize = os.path.getsize(filename)
758 758 currentSize = fileSize - currentPosition
759 759 if currentSize < neededSize:
760 760 if msgFlag and (msg != None):
761 761 print msg #print"\tSkipping the file %s due to it hasn't enough data" %filename
762 762 return False
763 763
764 764 return True
765 765
766 766 def setup(self,
767 767 path=None,
768 768 startDate=None,
769 769 endDate=None,
770 770 startTime=datetime.time(0,0,0),
771 771 endTime=datetime.time(23,59,59),
772 772 set=0,
773 773 expLabel = "",
774 774 ext = None,
775 775 online = False,
776 776 delay = 60,
777 777 walk = True):
778 778
779 779 if path == None:
780 780 raise ValueError, "The path is not valid"
781 781
782 782 if ext == None:
783 783 ext = self.ext
784 784
785 785 if online:
786 786 print "Searching files in online mode..."
787 787
788 788 for nTries in range( self.nTries ):
789 789 fullpath, file, year, doy, set = self.__searchFilesOnLine(path=path, expLabel=expLabel, ext=ext, walk=walk)
790 790
791 791 if fullpath:
792 792 break
793 793
794 794 print '\tWaiting %0.2f sec for an valid file in %s: try %02d ...' % (self.delay, path, nTries+1)
795 795 time.sleep( self.delay )
796 796
797 797 if not(fullpath):
798 798 print "There 'isn't valied files in %s" % path
799 799 return None
800 800
801 801 self.year = year
802 802 self.doy = doy
803 803 self.set = set - 1
804 804 self.path = path
805 805
806 806 else:
807 807 print "Searching files in offline mode ..."
808 808 pathList, filenameList = self.__searchFilesOffLine(path, startDate=startDate, endDate=endDate,
809 809 startTime=startTime, endTime=endTime,
810 810 set=set, expLabel=expLabel, ext=ext,
811 811 walk=walk)
812 812
813 813 if not(pathList):
814 814 print "No *%s files into the folder %s \nfor the range: %s - %s"%(ext, path,
815 815 datetime.datetime.combine(startDate,startTime).ctime(),
816 816 datetime.datetime.combine(endDate,endTime).ctime())
817 817
818 818 sys.exit(-1)
819 819
820 820
821 821 self.fileIndex = -1
822 822 self.pathList = pathList
823 823 self.filenameList = filenameList
824 824
825 825 self.online = online
826 826 self.delay = delay
827 827 ext = ext.lower()
828 828 self.ext = ext
829 829
830 830 if not(self.setNextFile()):
831 831 if (startDate!=None) and (endDate!=None):
832 832 print "No files in range: %s - %s" %(datetime.datetime.combine(startDate,startTime).ctime(), datetime.datetime.combine(endDate,endTime).ctime())
833 833 elif startDate != None:
834 834 print "No files in range: %s" %(datetime.datetime.combine(startDate,startTime).ctime())
835 835 else:
836 836 print "No files"
837 837
838 838 sys.exit(-1)
839 839
840 840 # self.updateDataHeader()
841 841
842 842 return self.dataOut
843 843
844 844 def getData():
845 845
846 846 raise ValueError, "This method has not been implemented"
847 847
848 848 def hasNotDataInBuffer():
849 849
850 850 raise ValueError, "This method has not been implemented"
851 851
852 852 def readBlock():
853 853
854 854 raise ValueError, "This method has not been implemented"
855 855
856 856 def isEndProcess(self):
857 857
858 858 return self.flagNoMoreFiles
859 859
860 860 def printReadBlocks(self):
861 861
862 862 print "Number of read blocks per file %04d" %self.nReadBlocks
863 863
864 864 def printTotalBlocks(self):
865 865
866 866 print "Number of read blocks %04d" %self.nTotalBlocks
867 867
868 868 def printNumberOfBlock(self):
869 869
870 870 if self.flagIsNewBlock:
871 871 print "Block No. %04d, Total blocks %04d" %(self.basicHeaderObj.dataBlock, self.nTotalBlocks)
872 872
873 873 def printInfo(self):
874 874
875 875 print self.basicHeaderObj.printInfo()
876 876 print self.systemHeaderObj.printInfo()
877 877 print self.radarControllerHeaderObj.printInfo()
878 878 print self.processingHeaderObj.printInfo()
879 879
880 880
881 881 def run(self, **kwargs):
882 882
883 883 if not(self.isConfig):
884 884
885 885 # self.dataOut = dataOut
886 886 self.setup(**kwargs)
887 887 self.isConfig = True
888 888
889 889 self.getData()
890 890
891 891 class JRODataWriter(JRODataIO, Operation):
892 892
893 893 """
894 894 Esta clase permite escribir datos a archivos procesados (.r o ,pdata). La escritura
895 895 de los datos siempre se realiza por bloques.
896 896 """
897 897
898 898 blockIndex = 0
899 899
900 900 path = None
901 901
902 902 setFile = None
903 903
904 904 profilesPerBlock = None
905 905
906 906 blocksPerFile = None
907 907
908 908 nWriteBlocks = 0
909 909
910 910 def __init__(self, dataOut=None):
911 911 raise ValueError, "Not implemented"
912 912
913 913
914 914 def hasAllDataInBuffer(self):
915 915 raise ValueError, "Not implemented"
916 916
917 917
918 918 def setBlockDimension(self):
919 919 raise ValueError, "Not implemented"
920 920
921 921
922 922 def writeBlock(self):
923 923 raise ValueError, "No implemented"
924 924
925 925
926 926 def putData(self):
927 927 raise ValueError, "No implemented"
928 928
929 929 def getDataHeader(self):
930 930 """
931 931 Obtiene una copia del First Header
932 932
933 933 Affected:
934 934
935 935 self.basicHeaderObj
936 936 self.systemHeaderObj
937 937 self.radarControllerHeaderObj
938 938 self.processingHeaderObj self.
939 939
940 940 Return:
941 941 None
942 942 """
943 943
944 944 raise ValueError, "No implemented"
945 945
946 946 def getBasicHeader(self):
947 947
948 948 self.basicHeaderObj.size = self.basicHeaderSize #bytes
949 949 self.basicHeaderObj.version = self.versionFile
950 950 self.basicHeaderObj.dataBlock = self.nTotalBlocks
951 951
952 952 utc = numpy.floor(self.dataOut.utctime)
953 953 milisecond = (self.dataOut.utctime - utc)* 1000.0
954 954
955 955 self.basicHeaderObj.utc = utc
956 956 self.basicHeaderObj.miliSecond = milisecond
957 957 self.basicHeaderObj.timeZone = 0
958 958 self.basicHeaderObj.dstFlag = 0
959 959 self.basicHeaderObj.errorCount = 0
960 960
961 961 def __writeFirstHeader(self):
962 962 """
963 963 Escribe el primer header del file es decir el Basic header y el Long header (SystemHeader, RadarControllerHeader, ProcessingHeader)
964 964
965 965 Affected:
966 966 __dataType
967 967
968 968 Return:
969 969 None
970 970 """
971 971
972 972 # CALCULAR PARAMETROS
973 973
974 974 sizeLongHeader = self.systemHeaderObj.size + self.radarControllerHeaderObj.size + self.processingHeaderObj.size
975 975 self.basicHeaderObj.size = self.basicHeaderSize + sizeLongHeader
976 976
977 977 self.basicHeaderObj.write(self.fp)
978 978 self.systemHeaderObj.write(self.fp)
979 979 self.radarControllerHeaderObj.write(self.fp)
980 980 self.processingHeaderObj.write(self.fp)
981 981
982 982 self.dtype = self.dataOut.dtype
983 983
984 984 def __setNewBlock(self):
985 985 """
986 986 Si es un nuevo file escribe el First Header caso contrario escribe solo el Basic Header
987 987
988 988 Return:
989 989 0 : si no pudo escribir nada
990 990 1 : Si escribio el Basic el First Header
991 991 """
992 992 if self.fp == None:
993 993 self.setNextFile()
994 994
995 995 if self.flagIsNewFile:
996 996 return 1
997 997
998 998 if self.blockIndex < self.processingHeaderObj.dataBlocksPerFile:
999 999 self.basicHeaderObj.write(self.fp)
1000 1000 return 1
1001 1001
1002 1002 if not( self.setNextFile() ):
1003 1003 return 0
1004 1004
1005 1005 return 1
1006 1006
1007 1007
1008 1008 def writeNextBlock(self):
1009 1009 """
1010 1010 Selecciona el bloque siguiente de datos y los escribe en un file
1011 1011
1012 1012 Return:
1013 1013 0 : Si no hizo pudo escribir el bloque de datos
1014 1014 1 : Si no pudo escribir el bloque de datos
1015 1015 """
1016 1016 if not( self.__setNewBlock() ):
1017 1017 return 0
1018 1018
1019 1019 self.writeBlock()
1020 1020
1021 1021 return 1
1022 1022
1023 1023 def setNextFile(self):
1024 1024 """
1025 1025 Determina el siguiente file que sera escrito
1026 1026
1027 1027 Affected:
1028 1028 self.filename
1029 1029 self.subfolder
1030 1030 self.fp
1031 1031 self.setFile
1032 1032 self.flagIsNewFile
1033 1033
1034 1034 Return:
1035 1035 0 : Si el archivo no puede ser escrito
1036 1036 1 : Si el archivo esta listo para ser escrito
1037 1037 """
1038 1038 ext = self.ext
1039 1039 path = self.path
1040 1040
1041 1041 if self.fp != None:
1042 1042 self.fp.close()
1043 1043
1044 1044 timeTuple = time.localtime( self.dataOut.utctime)
1045 1045 subfolder = 'd%4.4d%3.3d' % (timeTuple.tm_year,timeTuple.tm_yday)
1046 1046
1047 1047 fullpath = os.path.join( path, subfolder )
1048 1048 if not( os.path.exists(fullpath) ):
1049 1049 os.mkdir(fullpath)
1050 1050 self.setFile = -1 #inicializo mi contador de seteo
1051 1051 else:
1052 1052 filesList = os.listdir( fullpath )
1053 1053 if len( filesList ) > 0:
1054 1054 filesList = sorted( filesList, key=str.lower )
1055 1055 filen = filesList[-1]
1056 1056 # el filename debera tener el siguiente formato
1057 1057 # 0 1234 567 89A BCDE (hex)
1058 1058 # x YYYY DDD SSS .ext
1059 1059 if isNumber( filen[8:11] ):
1060 1060 self.setFile = int( filen[8:11] ) #inicializo mi contador de seteo al seteo del ultimo file
1061 1061 else:
1062 1062 self.setFile = -1
1063 1063 else:
1064 1064 self.setFile = -1 #inicializo mi contador de seteo
1065 1065
1066 1066 setFile = self.setFile
1067 1067 setFile += 1
1068 1068
1069 1069 file = '%s%4.4d%3.3d%3.3d%s' % (self.optchar,
1070 1070 timeTuple.tm_year,
1071 1071 timeTuple.tm_yday,
1072 1072 setFile,
1073 1073 ext )
1074 1074
1075 1075 filename = os.path.join( path, subfolder, file )
1076 1076
1077 1077 fp = open( filename,'wb' )
1078 1078
1079 1079 self.blockIndex = 0
1080 1080
1081 1081 #guardando atributos
1082 1082 self.filename = filename
1083 1083 self.subfolder = subfolder
1084 1084 self.fp = fp
1085 1085 self.setFile = setFile
1086 1086 self.flagIsNewFile = 1
1087 1087
1088 1088 self.getDataHeader()
1089 1089
1090 1090 print 'Writing the file: %s'%self.filename
1091 1091
1092 1092 self.__writeFirstHeader()
1093 1093
1094 1094 return 1
1095 1095
1096 1096 def setup(self, dataOut, path, blocksPerFile, profilesPerBlock=None, set=0, ext=None):
1097 1097 """
1098 1098 Setea el tipo de formato en la cual sera guardada la data y escribe el First Header
1099 1099
1100 1100 Inputs:
1101 1101 path : el path destino en el cual se escribiran los files a crear
1102 1102 format : formato en el cual sera salvado un file
1103 1103 set : el setebo del file
1104 1104
1105 1105 Return:
1106 1106 0 : Si no realizo un buen seteo
1107 1107 1 : Si realizo un buen seteo
1108 1108 """
1109 1109
1110 1110 if ext == None:
1111 1111 ext = self.ext
1112 1112
1113 1113 ext = ext.lower()
1114 1114
1115 1115 self.ext = ext
1116 1116
1117 1117 self.path = path
1118 1118
1119 1119 self.setFile = set - 1
1120 1120
1121 1121 self.blocksPerFile = blocksPerFile
1122 1122
1123 1123 self.profilesPerBlock = profilesPerBlock
1124 1124
1125 1125 self.dataOut = dataOut
1126 1126
1127 1127 if not(self.setNextFile()):
1128 1128 print "There isn't a next file"
1129 1129 return 0
1130 1130
1131 1131 self.setBlockDimension()
1132 1132
1133 1133 return 1
1134 1134
1135 1135 def run(self, dataOut, **kwargs):
1136 1136
1137 1137 if not(self.isConfig):
1138 1138
1139 1139 self.setup(dataOut, **kwargs)
1140 1140 self.isConfig = True
1141 1141
1142 1142 self.putData()
1143 1143
1144 1144 class VoltageReader(JRODataReader):
1145 1145 """
1146 1146 Esta clase permite leer datos de voltage desde archivos en formato rawdata (.r). La lectura
1147 1147 de los datos siempre se realiza por bloques. Los datos leidos (array de 3 dimensiones:
1148 1148 perfiles*alturas*canales) son almacenados en la variable "buffer".
1149 1149
1150 1150 perfiles * alturas * canales
1151 1151
1152 1152 Esta clase contiene instancias (objetos) de las clases BasicHeader, SystemHeader,
1153 1153 RadarControllerHeader y Voltage. Los tres primeros se usan para almacenar informacion de la
1154 1154 cabecera de datos (metadata), y el cuarto (Voltage) para obtener y almacenar un perfil de
1155 1155 datos desde el "buffer" cada vez que se ejecute el metodo "getData".
1156 1156
1157 1157 Example:
1158 1158
1159 1159 dpath = "/home/myuser/data"
1160 1160
1161 1161 startTime = datetime.datetime(2010,1,20,0,0,0,0,0,0)
1162 1162
1163 1163 endTime = datetime.datetime(2010,1,21,23,59,59,0,0,0)
1164 1164
1165 1165 readerObj = VoltageReader()
1166 1166
1167 1167 readerObj.setup(dpath, startTime, endTime)
1168 1168
1169 1169 while(True):
1170 1170
1171 1171 #to get one profile
1172 1172 profile = readerObj.getData()
1173 1173
1174 1174 #print the profile
1175 1175 print profile
1176 1176
1177 1177 #If you want to see all datablock
1178 1178 print readerObj.datablock
1179 1179
1180 1180 if readerObj.flagNoMoreFiles:
1181 1181 break
1182 1182
1183 1183 """
1184 1184
1185 1185 ext = ".r"
1186 1186
1187 1187 optchar = "D"
1188 1188 dataOut = None
1189 1189
1190 1190
1191 1191 def __init__(self):
1192 1192 """
1193 1193 Inicializador de la clase VoltageReader para la lectura de datos de voltage.
1194 1194
1195 1195 Input:
1196 1196 dataOut : Objeto de la clase Voltage. Este objeto sera utilizado para
1197 1197 almacenar un perfil de datos cada vez que se haga un requerimiento
1198 1198 (getData). El perfil sera obtenido a partir del buffer de datos,
1199 1199 si el buffer esta vacio se hara un nuevo proceso de lectura de un
1200 1200 bloque de datos.
1201 1201 Si este parametro no es pasado se creara uno internamente.
1202 1202
1203 1203 Variables afectadas:
1204 1204 self.dataOut
1205 1205
1206 1206 Return:
1207 1207 None
1208 1208 """
1209 1209
1210 1210 self.isConfig = False
1211 1211
1212 1212 self.datablock = None
1213 1213
1214 1214 self.utc = 0
1215 1215
1216 1216 self.ext = ".r"
1217 1217
1218 1218 self.optchar = "D"
1219 1219
1220 1220 self.basicHeaderObj = BasicHeader(LOCALTIME)
1221 1221
1222 1222 self.systemHeaderObj = SystemHeader()
1223 1223
1224 1224 self.radarControllerHeaderObj = RadarControllerHeader()
1225 1225
1226 1226 self.processingHeaderObj = ProcessingHeader()
1227 1227
1228 1228 self.online = 0
1229 1229
1230 1230 self.fp = None
1231 1231
1232 1232 self.idFile = None
1233 1233
1234 1234 self.dtype = None
1235 1235
1236 1236 self.fileSizeByHeader = None
1237 1237
1238 1238 self.filenameList = []
1239 1239
1240 1240 self.filename = None
1241 1241
1242 1242 self.fileSize = None
1243 1243
1244 1244 self.firstHeaderSize = 0
1245 1245
1246 1246 self.basicHeaderSize = 24
1247 1247
1248 1248 self.pathList = []
1249 1249
1250 1250 self.filenameList = []
1251 1251
1252 1252 self.lastUTTime = 0
1253 1253
1254 1254 self.maxTimeStep = 30
1255 1255
1256 1256 self.flagNoMoreFiles = 0
1257 1257
1258 1258 self.set = 0
1259 1259
1260 1260 self.path = None
1261 1261
1262 1262 self.profileIndex = 9999
1263 1263
1264 1264 self.delay = 3 #seconds
1265 1265
1266 1266 self.nTries = 3 #quantity tries
1267 1267
1268 1268 self.nFiles = 3 #number of files for searching
1269 1269
1270 1270 self.nReadBlocks = 0
1271 1271
1272 1272 self.flagIsNewFile = 1
1273 1273
1274 1274 self.ippSeconds = 0
1275 1275
1276 1276 self.flagTimeBlock = 0
1277 1277
1278 1278 self.flagIsNewBlock = 0
1279 1279
1280 1280 self.nTotalBlocks = 0
1281 1281
1282 1282 self.blocksize = 0
1283 1283
1284 1284 self.dataOut = self.createObjByDefault()
1285 1285
1286 1286 def createObjByDefault(self):
1287 1287
1288 1288 dataObj = Voltage()
1289 1289
1290 1290 return dataObj
1291 1291
1292 1292 def __hasNotDataInBuffer(self):
1293 1293 if self.profileIndex >= self.processingHeaderObj.profilesPerBlock:
1294 1294 return 1
1295 1295 return 0
1296 1296
1297 1297
1298 1298 def getBlockDimension(self):
1299 1299 """
1300 1300 Obtiene la cantidad de puntos a leer por cada bloque de datos
1301 1301
1302 1302 Affected:
1303 1303 self.blocksize
1304 1304
1305 1305 Return:
1306 1306 None
1307 1307 """
1308 1308 pts2read = self.processingHeaderObj.profilesPerBlock * self.processingHeaderObj.nHeights * self.systemHeaderObj.nChannels
1309 1309 self.blocksize = pts2read
1310 1310
1311 1311
1312 1312 def readBlock(self):
1313 1313 """
1314 1314 readBlock lee el bloque de datos desde la posicion actual del puntero del archivo
1315 1315 (self.fp) y actualiza todos los parametros relacionados al bloque de datos
1316 1316 (metadata + data). La data leida es almacenada en el buffer y el contador del buffer
1317 1317 es seteado a 0
1318 1318
1319 1319 Inputs:
1320 1320 None
1321 1321
1322 1322 Return:
1323 1323 None
1324 1324
1325 1325 Affected:
1326 1326 self.profileIndex
1327 1327 self.datablock
1328 1328 self.flagIsNewFile
1329 1329 self.flagIsNewBlock
1330 1330 self.nTotalBlocks
1331 1331
1332 1332 Exceptions:
1333 1333 Si un bloque leido no es un bloque valido
1334 1334 """
1335 1335
1336 1336 junk = numpy.fromfile( self.fp, self.dtype, self.blocksize )
1337 1337
1338 1338 try:
1339 1339 junk = junk.reshape( (self.processingHeaderObj.profilesPerBlock, self.processingHeaderObj.nHeights, self.systemHeaderObj.nChannels) )
1340 1340 except:
1341 1341 print "The read block (%3d) has not enough data" %self.nReadBlocks
1342 1342 return 0
1343 1343
1344 1344 junk = numpy.transpose(junk, (2,0,1))
1345 1345 self.datablock = junk['real'] + junk['imag']*1j
1346 1346
1347 1347 self.profileIndex = 0
1348 1348
1349 1349 self.flagIsNewFile = 0
1350 1350 self.flagIsNewBlock = 1
1351 1351
1352 1352 self.nTotalBlocks += 1
1353 1353 self.nReadBlocks += 1
1354 1354
1355 1355 return 1
1356 1356
1357 1357
1358 1358 def getData(self):
1359 1359 """
1360 1360 getData obtiene una unidad de datos del buffer de lectura y la copia a la clase "Voltage"
1361 1361 con todos los parametros asociados a este (metadata). cuando no hay datos en el buffer de
1362 1362 lectura es necesario hacer una nueva lectura de los bloques de datos usando "readNextBlock"
1363 1363
1364 1364 Ademas incrementa el contador del buffer en 1.
1365 1365
1366 1366 Return:
1367 1367 data : retorna un perfil de voltages (alturas * canales) copiados desde el
1368 1368 buffer. Si no hay mas archivos a leer retorna None.
1369 1369
1370 1370 Variables afectadas:
1371 1371 self.dataOut
1372 1372 self.profileIndex
1373 1373
1374 1374 Affected:
1375 1375 self.dataOut
1376 1376 self.profileIndex
1377 1377 self.flagTimeBlock
1378 1378 self.flagIsNewBlock
1379 1379 """
1380 1380
1381 1381 if self.flagNoMoreFiles:
1382 1382 self.dataOut.flagNoData = True
1383 1383 print 'Process finished'
1384 1384 return 0
1385 1385
1386 1386 self.flagTimeBlock = 0
1387 1387 self.flagIsNewBlock = 0
1388 1388
1389 1389 if self.__hasNotDataInBuffer():
1390 1390
1391 1391 if not( self.readNextBlock() ):
1392 1392 return 0
1393 1393
1394 1394 self.dataOut.dtype = self.dtype
1395 1395
1396 1396 self.dataOut.nProfiles = self.processingHeaderObj.profilesPerBlock
1397 1397
1398 1398 xf = self.processingHeaderObj.firstHeight + self.processingHeaderObj.nHeights*self.processingHeaderObj.deltaHeight
1399 1399
1400 1400 self.dataOut.heightList = numpy.arange(self.processingHeaderObj.firstHeight, xf, self.processingHeaderObj.deltaHeight)
1401 1401
1402 1402 self.dataOut.channelList = range(self.systemHeaderObj.nChannels)
1403 1403
1404 1404 self.dataOut.flagTimeBlock = self.flagTimeBlock
1405 1405
1406 1406 self.dataOut.ippSeconds = self.ippSeconds
1407 1407
1408 1408 self.dataOut.timeInterval = self.ippSeconds * self.processingHeaderObj.nCohInt
1409 1409
1410 1410 self.dataOut.nCohInt = self.processingHeaderObj.nCohInt
1411 1411
1412 1412 self.dataOut.flagShiftFFT = False
1413 1413
1414 1414 if self.radarControllerHeaderObj.code != None:
1415 1415
1416 1416 self.dataOut.nCode = self.radarControllerHeaderObj.nCode
1417 1417
1418 1418 self.dataOut.nBaud = self.radarControllerHeaderObj.nBaud
1419 1419
1420 1420 self.dataOut.code = self.radarControllerHeaderObj.code
1421 1421
1422 1422 self.dataOut.systemHeaderObj = self.systemHeaderObj.copy()
1423 1423
1424 1424 self.dataOut.radarControllerHeaderObj = self.radarControllerHeaderObj.copy()
1425 1425
1426 1426 self.dataOut.flagDecodeData = False #asumo q la data no esta decodificada
1427 1427
1428 1428 self.dataOut.flagDeflipData = False #asumo q la data no esta sin flip
1429 1429
1430 1430 self.dataOut.flagShiftFFT = False
1431 1431
1432 1432
1433 1433 # self.updateDataHeader()
1434 1434
1435 1435 #data es un numpy array de 3 dmensiones (perfiles, alturas y canales)
1436 1436
1437 1437 if self.datablock == None:
1438 1438 self.dataOut.flagNoData = True
1439 1439 return 0
1440 1440
1441 1441 self.dataOut.data = self.datablock[:,self.profileIndex,:]
1442 1442
1443 1443 self.dataOut.utctime = self.basicHeaderObj.utc + self.basicHeaderObj.miliSecond/1000. + self.profileIndex * self.ippSeconds
1444 1444
1445 1445 self.profileIndex += 1
1446 1446
1447 1447 self.dataOut.flagNoData = False
1448 1448
1449 1449 # print self.profileIndex, self.dataOut.utctime
1450 1450 # if self.profileIndex == 800:
1451 1451 # a=1
1452 1452
1453 1453
1454 1454 return self.dataOut.data
1455 1455
1456 1456
1457 1457 class VoltageWriter(JRODataWriter):
1458 1458 """
1459 1459 Esta clase permite escribir datos de voltajes a archivos procesados (.r). La escritura
1460 1460 de los datos siempre se realiza por bloques.
1461 1461 """
1462 1462
1463 1463 ext = ".r"
1464 1464
1465 1465 optchar = "D"
1466 1466
1467 1467 shapeBuffer = None
1468 1468
1469 1469
1470 1470 def __init__(self):
1471 1471 """
1472 1472 Inicializador de la clase VoltageWriter para la escritura de datos de espectros.
1473 1473
1474 1474 Affected:
1475 1475 self.dataOut
1476 1476
1477 1477 Return: None
1478 1478 """
1479 1479
1480 1480 self.nTotalBlocks = 0
1481 1481
1482 1482 self.profileIndex = 0
1483 1483
1484 1484 self.isConfig = False
1485 1485
1486 1486 self.fp = None
1487 1487
1488 1488 self.flagIsNewFile = 1
1489 1489
1490 1490 self.nTotalBlocks = 0
1491 1491
1492 1492 self.flagIsNewBlock = 0
1493 1493
1494 1494 self.setFile = None
1495 1495
1496 1496 self.dtype = None
1497 1497
1498 1498 self.path = None
1499 1499
1500 1500 self.filename = None
1501 1501
1502 1502 self.basicHeaderObj = BasicHeader(LOCALTIME)
1503 1503
1504 1504 self.systemHeaderObj = SystemHeader()
1505 1505
1506 1506 self.radarControllerHeaderObj = RadarControllerHeader()
1507 1507
1508 1508 self.processingHeaderObj = ProcessingHeader()
1509 1509
1510 1510 def hasAllDataInBuffer(self):
1511 1511 if self.profileIndex >= self.processingHeaderObj.profilesPerBlock:
1512 1512 return 1
1513 1513 return 0
1514 1514
1515 1515
1516 1516 def setBlockDimension(self):
1517 1517 """
1518 1518 Obtiene las formas dimensionales del los subbloques de datos que componen un bloque
1519 1519
1520 1520 Affected:
1521 1521 self.shape_spc_Buffer
1522 1522 self.shape_cspc_Buffer
1523 1523 self.shape_dc_Buffer
1524 1524
1525 1525 Return: None
1526 1526 """
1527 1527 self.shapeBuffer = (self.processingHeaderObj.profilesPerBlock,
1528 1528 self.processingHeaderObj.nHeights,
1529 1529 self.systemHeaderObj.nChannels)
1530 1530
1531 1531 self.datablock = numpy.zeros((self.systemHeaderObj.nChannels,
1532 1532 self.processingHeaderObj.profilesPerBlock,
1533 1533 self.processingHeaderObj.nHeights),
1534 1534 dtype=numpy.dtype('complex64'))
1535 1535
1536 1536
1537 1537 def writeBlock(self):
1538 1538 """
1539 1539 Escribe el buffer en el file designado
1540 1540
1541 1541 Affected:
1542 1542 self.profileIndex
1543 1543 self.flagIsNewFile
1544 1544 self.flagIsNewBlock
1545 1545 self.nTotalBlocks
1546 1546 self.blockIndex
1547 1547
1548 1548 Return: None
1549 1549 """
1550 1550 data = numpy.zeros( self.shapeBuffer, self.dtype )
1551 1551
1552 1552 junk = numpy.transpose(self.datablock, (1,2,0))
1553 1553
1554 1554 data['real'] = junk.real
1555 1555 data['imag'] = junk.imag
1556 1556
1557 1557 data = data.reshape( (-1) )
1558 1558
1559 1559 data.tofile( self.fp )
1560 1560
1561 1561 self.datablock.fill(0)
1562 1562
1563 1563 self.profileIndex = 0
1564 1564 self.flagIsNewFile = 0
1565 1565 self.flagIsNewBlock = 1
1566 1566
1567 1567 self.blockIndex += 1
1568 1568 self.nTotalBlocks += 1
1569 1569
1570 1570 def putData(self):
1571 1571 """
1572 1572 Setea un bloque de datos y luego los escribe en un file
1573 1573
1574 1574 Affected:
1575 1575 self.flagIsNewBlock
1576 1576 self.profileIndex
1577 1577
1578 1578 Return:
1579 1579 0 : Si no hay data o no hay mas files que puedan escribirse
1580 1580 1 : Si se escribio la data de un bloque en un file
1581 1581 """
1582 1582 if self.dataOut.flagNoData:
1583 1583 return 0
1584 1584
1585 1585 self.flagIsNewBlock = 0
1586 1586
1587 1587 if self.dataOut.flagTimeBlock:
1588 1588
1589 1589 self.datablock.fill(0)
1590 1590 self.profileIndex = 0
1591 1591 self.setNextFile()
1592 1592
1593 1593 if self.profileIndex == 0:
1594 1594 self.getBasicHeader()
1595 1595
1596 1596 self.datablock[:,self.profileIndex,:] = self.dataOut.data
1597 1597
1598 1598 self.profileIndex += 1
1599 1599
1600 1600 if self.hasAllDataInBuffer():
1601 1601 #if self.flagIsNewFile:
1602 1602 self.writeNextBlock()
1603 1603 # self.getDataHeader()
1604 1604
1605 1605 return 1
1606 1606
1607 1607 def __getProcessFlags(self):
1608 1608
1609 1609 processFlags = 0
1610 1610
1611 1611 dtype0 = numpy.dtype([('real','<i1'),('imag','<i1')])
1612 1612 dtype1 = numpy.dtype([('real','<i2'),('imag','<i2')])
1613 1613 dtype2 = numpy.dtype([('real','<i4'),('imag','<i4')])
1614 1614 dtype3 = numpy.dtype([('real','<i8'),('imag','<i8')])
1615 1615 dtype4 = numpy.dtype([('real','<f4'),('imag','<f4')])
1616 1616 dtype5 = numpy.dtype([('real','<f8'),('imag','<f8')])
1617 1617
1618 1618 dtypeList = [dtype0, dtype1, dtype2, dtype3, dtype4, dtype5]
1619 1619
1620 1620
1621 1621
1622 1622 datatypeValueList = [PROCFLAG.DATATYPE_CHAR,
1623 1623 PROCFLAG.DATATYPE_SHORT,
1624 1624 PROCFLAG.DATATYPE_LONG,
1625 1625 PROCFLAG.DATATYPE_INT64,
1626 1626 PROCFLAG.DATATYPE_FLOAT,
1627 1627 PROCFLAG.DATATYPE_DOUBLE]
1628 1628
1629 1629
1630 1630 for index in range(len(dtypeList)):
1631 1631 if self.dataOut.dtype == dtypeList[index]:
1632 1632 dtypeValue = datatypeValueList[index]
1633 1633 break
1634 1634
1635 1635 processFlags += dtypeValue
1636 1636
1637 1637 if self.dataOut.flagDecodeData:
1638 1638 processFlags += PROCFLAG.DECODE_DATA
1639 1639
1640 1640 if self.dataOut.flagDeflipData:
1641 1641 processFlags += PROCFLAG.DEFLIP_DATA
1642 1642
1643 1643 if self.dataOut.code != None:
1644 1644 processFlags += PROCFLAG.DEFINE_PROCESS_CODE
1645 1645
1646 1646 if self.dataOut.nCohInt > 1:
1647 1647 processFlags += PROCFLAG.COHERENT_INTEGRATION
1648 1648
1649 1649 return processFlags
1650 1650
1651 1651
1652 1652 def __getBlockSize(self):
1653 1653 '''
1654 1654 Este metodos determina el cantidad de bytes para un bloque de datos de tipo Voltage
1655 1655 '''
1656 1656
1657 1657 dtype0 = numpy.dtype([('real','<i1'),('imag','<i1')])
1658 1658 dtype1 = numpy.dtype([('real','<i2'),('imag','<i2')])
1659 1659 dtype2 = numpy.dtype([('real','<i4'),('imag','<i4')])
1660 1660 dtype3 = numpy.dtype([('real','<i8'),('imag','<i8')])
1661 1661 dtype4 = numpy.dtype([('real','<f4'),('imag','<f4')])
1662 1662 dtype5 = numpy.dtype([('real','<f8'),('imag','<f8')])
1663 1663
1664 1664 dtypeList = [dtype0, dtype1, dtype2, dtype3, dtype4, dtype5]
1665 1665 datatypeValueList = [1,2,4,8,4,8]
1666 1666 for index in range(len(dtypeList)):
1667 1667 if self.dataOut.dtype == dtypeList[index]:
1668 1668 datatypeValue = datatypeValueList[index]
1669 1669 break
1670 1670
1671 1671 blocksize = int(self.dataOut.nHeights * self.dataOut.nChannels * self.dataOut.nProfiles * datatypeValue * 2)
1672 1672
1673 1673 return blocksize
1674 1674
1675 1675 def getDataHeader(self):
1676 1676
1677 1677 """
1678 1678 Obtiene una copia del First Header
1679 1679
1680 1680 Affected:
1681 1681 self.systemHeaderObj
1682 1682 self.radarControllerHeaderObj
1683 1683 self.dtype
1684 1684
1685 1685 Return:
1686 1686 None
1687 1687 """
1688 1688
1689 1689 self.systemHeaderObj = self.dataOut.systemHeaderObj.copy()
1690 1690 self.systemHeaderObj.nChannels = self.dataOut.nChannels
1691 1691 self.radarControllerHeaderObj = self.dataOut.radarControllerHeaderObj.copy()
1692 1692
1693 1693 self.getBasicHeader()
1694 1694
1695 1695 processingHeaderSize = 40 # bytes
1696 1696 self.processingHeaderObj.dtype = 0 # Voltage
1697 1697 self.processingHeaderObj.blockSize = self.__getBlockSize()
1698 1698 self.processingHeaderObj.profilesPerBlock = self.profilesPerBlock
1699 1699 self.processingHeaderObj.dataBlocksPerFile = self.blocksPerFile
1700 1700 self.processingHeaderObj.nWindows = 1 #podria ser 1 o self.dataOut.processingHeaderObj.nWindows
1701 1701 self.processingHeaderObj.processFlags = self.__getProcessFlags()
1702 1702 self.processingHeaderObj.nCohInt = self.dataOut.nCohInt
1703 1703 self.processingHeaderObj.nIncohInt = 1 # Cuando la data de origen es de tipo Voltage
1704 1704 self.processingHeaderObj.totalSpectra = 0 # Cuando la data de origen es de tipo Voltage
1705 1705
1706 1706 if self.dataOut.code != None:
1707 1707 self.processingHeaderObj.code = self.dataOut.code
1708 1708 self.processingHeaderObj.nCode = self.dataOut.nCode
1709 1709 self.processingHeaderObj.nBaud = self.dataOut.nBaud
1710 1710 codesize = int(8 + 4 * self.dataOut.nCode * self.dataOut.nBaud)
1711 1711 processingHeaderSize += codesize
1712 1712
1713 1713 if self.processingHeaderObj.nWindows != 0:
1714 1714 self.processingHeaderObj.firstHeight = self.dataOut.heightList[0]
1715 1715 self.processingHeaderObj.deltaHeight = self.dataOut.heightList[1] - self.dataOut.heightList[0]
1716 1716 self.processingHeaderObj.nHeights = self.dataOut.nHeights
1717 1717 self.processingHeaderObj.samplesWin = self.dataOut.nHeights
1718 1718 processingHeaderSize += 12
1719 1719
1720 1720 self.processingHeaderObj.size = processingHeaderSize
1721 1721
1722 1722 class SpectraReader(JRODataReader):
1723 1723 """
1724 1724 Esta clase permite leer datos de espectros desde archivos procesados (.pdata). La lectura
1725 1725 de los datos siempre se realiza por bloques. Los datos leidos (array de 3 dimensiones)
1726 1726 son almacenados en tres buffer's para el Self Spectra, el Cross Spectra y el DC Channel.
1727 1727
1728 1728 paresCanalesIguales * alturas * perfiles (Self Spectra)
1729 1729 paresCanalesDiferentes * alturas * perfiles (Cross Spectra)
1730 1730 canales * alturas (DC Channels)
1731 1731
1732 1732 Esta clase contiene instancias (objetos) de las clases BasicHeader, SystemHeader,
1733 1733 RadarControllerHeader y Spectra. Los tres primeros se usan para almacenar informacion de la
1734 1734 cabecera de datos (metadata), y el cuarto (Spectra) para obtener y almacenar un bloque de
1735 1735 datos desde el "buffer" cada vez que se ejecute el metodo "getData".
1736 1736
1737 1737 Example:
1738 1738 dpath = "/home/myuser/data"
1739 1739
1740 1740 startTime = datetime.datetime(2010,1,20,0,0,0,0,0,0)
1741 1741
1742 1742 endTime = datetime.datetime(2010,1,21,23,59,59,0,0,0)
1743 1743
1744 1744 readerObj = SpectraReader()
1745 1745
1746 1746 readerObj.setup(dpath, startTime, endTime)
1747 1747
1748 1748 while(True):
1749 1749
1750 1750 readerObj.getData()
1751 1751
1752 1752 print readerObj.data_spc
1753 1753
1754 1754 print readerObj.data_cspc
1755 1755
1756 1756 print readerObj.data_dc
1757 1757
1758 1758 if readerObj.flagNoMoreFiles:
1759 1759 break
1760 1760
1761 1761 """
1762 1762
1763 1763 pts2read_SelfSpectra = 0
1764 1764
1765 1765 pts2read_CrossSpectra = 0
1766 1766
1767 1767 pts2read_DCchannels = 0
1768 1768
1769 1769 ext = ".pdata"
1770 1770
1771 1771 optchar = "P"
1772 1772
1773 1773 dataOut = None
1774 1774
1775 1775 nRdChannels = None
1776 1776
1777 1777 nRdPairs = None
1778 1778
1779 1779 rdPairList = []
1780 1780
1781 1781
1782 1782 def __init__(self):
1783 1783 """
1784 1784 Inicializador de la clase SpectraReader para la lectura de datos de espectros.
1785 1785
1786 1786 Inputs:
1787 1787 dataOut : Objeto de la clase Spectra. Este objeto sera utilizado para
1788 1788 almacenar un perfil de datos cada vez que se haga un requerimiento
1789 1789 (getData). El perfil sera obtenido a partir del buffer de datos,
1790 1790 si el buffer esta vacio se hara un nuevo proceso de lectura de un
1791 1791 bloque de datos.
1792 1792 Si este parametro no es pasado se creara uno internamente.
1793 1793
1794 1794 Affected:
1795 1795 self.dataOut
1796 1796
1797 1797 Return : None
1798 1798 """
1799 1799
1800 1800 self.isConfig = False
1801 1801
1802 1802 self.pts2read_SelfSpectra = 0
1803 1803
1804 1804 self.pts2read_CrossSpectra = 0
1805 1805
1806 1806 self.pts2read_DCchannels = 0
1807 1807
1808 1808 self.datablock = None
1809 1809
1810 1810 self.utc = None
1811 1811
1812 1812 self.ext = ".pdata"
1813 1813
1814 1814 self.optchar = "P"
1815 1815
1816 1816 self.basicHeaderObj = BasicHeader(LOCALTIME)
1817 1817
1818 1818 self.systemHeaderObj = SystemHeader()
1819 1819
1820 1820 self.radarControllerHeaderObj = RadarControllerHeader()
1821 1821
1822 1822 self.processingHeaderObj = ProcessingHeader()
1823 1823
1824 1824 self.online = 0
1825 1825
1826 1826 self.fp = None
1827 1827
1828 1828 self.idFile = None
1829 1829
1830 1830 self.dtype = None
1831 1831
1832 1832 self.fileSizeByHeader = None
1833 1833
1834 1834 self.filenameList = []
1835 1835
1836 1836 self.filename = None
1837 1837
1838 1838 self.fileSize = None
1839 1839
1840 1840 self.firstHeaderSize = 0
1841 1841
1842 1842 self.basicHeaderSize = 24
1843 1843
1844 1844 self.pathList = []
1845 1845
1846 1846 self.lastUTTime = 0
1847 1847
1848 1848 self.maxTimeStep = 30
1849 1849
1850 1850 self.flagNoMoreFiles = 0
1851 1851
1852 1852 self.set = 0
1853 1853
1854 1854 self.path = None
1855 1855
1856 1856 self.delay = 60 #seconds
1857 1857
1858 1858 self.nTries = 3 #quantity tries
1859 1859
1860 1860 self.nFiles = 3 #number of files for searching
1861 1861
1862 1862 self.nReadBlocks = 0
1863 1863
1864 1864 self.flagIsNewFile = 1
1865 1865
1866 1866 self.ippSeconds = 0
1867 1867
1868 1868 self.flagTimeBlock = 0
1869 1869
1870 1870 self.flagIsNewBlock = 0
1871 1871
1872 1872 self.nTotalBlocks = 0
1873 1873
1874 1874 self.blocksize = 0
1875 1875
1876 1876 self.dataOut = self.createObjByDefault()
1877 1877
1878 1878
1879 1879 def createObjByDefault(self):
1880 1880
1881 1881 dataObj = Spectra()
1882 1882
1883 1883 return dataObj
1884 1884
1885 1885 def __hasNotDataInBuffer(self):
1886 1886 return 1
1887 1887
1888 1888
1889 1889 def getBlockDimension(self):
1890 1890 """
1891 1891 Obtiene la cantidad de puntos a leer por cada bloque de datos
1892 1892
1893 1893 Affected:
1894 1894 self.nRdChannels
1895 1895 self.nRdPairs
1896 1896 self.pts2read_SelfSpectra
1897 1897 self.pts2read_CrossSpectra
1898 1898 self.pts2read_DCchannels
1899 1899 self.blocksize
1900 1900 self.dataOut.nChannels
1901 1901 self.dataOut.nPairs
1902 1902
1903 1903 Return:
1904 1904 None
1905 1905 """
1906 1906 self.nRdChannels = 0
1907 1907 self.nRdPairs = 0
1908 1908 self.rdPairList = []
1909 1909
1910 1910 for i in range(0, self.processingHeaderObj.totalSpectra*2, 2):
1911 1911 if self.processingHeaderObj.spectraComb[i] == self.processingHeaderObj.spectraComb[i+1]:
1912 1912 self.nRdChannels = self.nRdChannels + 1 #par de canales iguales
1913 1913 else:
1914 1914 self.nRdPairs = self.nRdPairs + 1 #par de canales diferentes
1915 1915 self.rdPairList.append((self.processingHeaderObj.spectraComb[i], self.processingHeaderObj.spectraComb[i+1]))
1916 1916
1917 1917 pts2read = self.processingHeaderObj.nHeights * self.processingHeaderObj.profilesPerBlock
1918 1918
1919 1919 self.pts2read_SelfSpectra = int(self.nRdChannels * pts2read)
1920 1920 self.blocksize = self.pts2read_SelfSpectra
1921 1921
1922 1922 if self.processingHeaderObj.flag_cspc:
1923 1923 self.pts2read_CrossSpectra = int(self.nRdPairs * pts2read)
1924 1924 self.blocksize += self.pts2read_CrossSpectra
1925 1925
1926 1926 if self.processingHeaderObj.flag_dc:
1927 1927 self.pts2read_DCchannels = int(self.systemHeaderObj.nChannels * self.processingHeaderObj.nHeights)
1928 1928 self.blocksize += self.pts2read_DCchannels
1929 1929
1930 1930 # self.blocksize = self.pts2read_SelfSpectra + self.pts2read_CrossSpectra + self.pts2read_DCchannels
1931 1931
1932 1932
1933 1933 def readBlock(self):
1934 1934 """
1935 1935 Lee el bloque de datos desde la posicion actual del puntero del archivo
1936 1936 (self.fp) y actualiza todos los parametros relacionados al bloque de datos
1937 1937 (metadata + data). La data leida es almacenada en el buffer y el contador del buffer
1938 1938 es seteado a 0
1939 1939
1940 1940 Return: None
1941 1941
1942 1942 Variables afectadas:
1943 1943
1944 1944 self.flagIsNewFile
1945 1945 self.flagIsNewBlock
1946 1946 self.nTotalBlocks
1947 1947 self.data_spc
1948 1948 self.data_cspc
1949 1949 self.data_dc
1950 1950
1951 1951 Exceptions:
1952 1952 Si un bloque leido no es un bloque valido
1953 1953 """
1954 1954 blockOk_flag = False
1955 1955 fpointer = self.fp.tell()
1956 1956
1957 1957 spc = numpy.fromfile( self.fp, self.dtype[0], self.pts2read_SelfSpectra )
1958 1958 spc = spc.reshape( (self.nRdChannels, self.processingHeaderObj.nHeights, self.processingHeaderObj.profilesPerBlock) ) #transforma a un arreglo 3D
1959 1959
1960 1960 if self.processingHeaderObj.flag_cspc:
1961 1961 cspc = numpy.fromfile( self.fp, self.dtype, self.pts2read_CrossSpectra )
1962 1962 cspc = cspc.reshape( (self.nRdPairs, self.processingHeaderObj.nHeights, self.processingHeaderObj.profilesPerBlock) ) #transforma a un arreglo 3D
1963 1963
1964 1964 if self.processingHeaderObj.flag_dc:
1965 1965 dc = numpy.fromfile( self.fp, self.dtype, self.pts2read_DCchannels ) #int(self.processingHeaderObj.nHeights*self.systemHeaderObj.nChannels) )
1966 1966 dc = dc.reshape( (self.systemHeaderObj.nChannels, self.processingHeaderObj.nHeights) ) #transforma a un arreglo 2D
1967 1967
1968 1968
1969 1969 if not(self.processingHeaderObj.shif_fft):
1970 1970 #desplaza a la derecha en el eje 2 determinadas posiciones
1971 1971 shift = int(self.processingHeaderObj.profilesPerBlock/2)
1972 1972 spc = numpy.roll( spc, shift , axis=2 )
1973 1973
1974 1974 if self.processingHeaderObj.flag_cspc:
1975 1975 #desplaza a la derecha en el eje 2 determinadas posiciones
1976 1976 cspc = numpy.roll( cspc, shift, axis=2 )
1977 1977
1978 1978 # self.processingHeaderObj.shif_fft = True
1979 1979
1980 1980 spc = numpy.transpose( spc, (0,2,1) )
1981 1981 self.data_spc = spc
1982 1982
1983 1983 if self.processingHeaderObj.flag_cspc:
1984 1984 cspc = numpy.transpose( cspc, (0,2,1) )
1985 1985 self.data_cspc = cspc['real'] + cspc['imag']*1j
1986 1986 else:
1987 1987 self.data_cspc = None
1988 1988
1989 1989 if self.processingHeaderObj.flag_dc:
1990 1990 self.data_dc = dc['real'] + dc['imag']*1j
1991 1991 else:
1992 1992 self.data_dc = None
1993 1993
1994 1994 self.flagIsNewFile = 0
1995 1995 self.flagIsNewBlock = 1
1996 1996
1997 1997 self.nTotalBlocks += 1
1998 1998 self.nReadBlocks += 1
1999 1999
2000 2000 return 1
2001 2001
2002 2002
2003 2003 def getData(self):
2004 2004 """
2005 2005 Copia el buffer de lectura a la clase "Spectra",
2006 2006 con todos los parametros asociados a este (metadata). cuando no hay datos en el buffer de
2007 2007 lectura es necesario hacer una nueva lectura de los bloques de datos usando "readNextBlock"
2008 2008
2009 2009 Return:
2010 2010 0 : Si no hay mas archivos disponibles
2011 2011 1 : Si hizo una buena copia del buffer
2012 2012
2013 2013 Affected:
2014 2014 self.dataOut
2015 2015
2016 2016 self.flagTimeBlock
2017 2017 self.flagIsNewBlock
2018 2018 """
2019 2019
2020 2020 if self.flagNoMoreFiles:
2021 2021 self.dataOut.flagNoData = True
2022 2022 print 'Process finished'
2023 2023 return 0
2024 2024
2025 2025 self.flagTimeBlock = 0
2026 2026 self.flagIsNewBlock = 0
2027 2027
2028 2028 if self.__hasNotDataInBuffer():
2029 2029
2030 2030 if not( self.readNextBlock() ):
2031 2031 self.dataOut.flagNoData = True
2032 2032 return 0
2033 2033
2034 2034 # self.updateDataHeader()
2035 2035
2036 2036 #data es un numpy array de 3 dmensiones (perfiles, alturas y canales)
2037 2037
2038 2038 if self.data_dc == None:
2039 2039 self.dataOut.flagNoData = True
2040 2040 return 0
2041 2041
2042 2042 self.dataOut.data_spc = self.data_spc
2043 2043
2044 2044 self.dataOut.data_cspc = self.data_cspc
2045 2045
2046 2046 self.dataOut.data_dc = self.data_dc
2047 2047
2048 2048 self.dataOut.flagTimeBlock = self.flagTimeBlock
2049 2049
2050 2050 self.dataOut.flagNoData = False
2051 2051
2052 2052 self.dataOut.dtype = self.dtype
2053 2053
2054 2054 # self.dataOut.nChannels = self.nRdChannels
2055 2055
2056 2056 self.dataOut.nPairs = self.nRdPairs
2057 2057
2058 2058 self.dataOut.pairsList = self.rdPairList
2059 2059
2060 2060 # self.dataOut.nHeights = self.processingHeaderObj.nHeights
2061 2061
2062 2062 self.dataOut.nProfiles = self.processingHeaderObj.profilesPerBlock
2063 2063
2064 2064 self.dataOut.nFFTPoints = self.processingHeaderObj.profilesPerBlock
2065 2065
2066 2066 self.dataOut.nCohInt = self.processingHeaderObj.nCohInt
2067 2067
2068 2068 self.dataOut.nIncohInt = self.processingHeaderObj.nIncohInt
2069 2069
2070 2070 xf = self.processingHeaderObj.firstHeight + self.processingHeaderObj.nHeights*self.processingHeaderObj.deltaHeight
2071 2071
2072 2072 self.dataOut.heightList = numpy.arange(self.processingHeaderObj.firstHeight, xf, self.processingHeaderObj.deltaHeight)
2073 2073
2074 2074 self.dataOut.channelList = range(self.systemHeaderObj.nChannels)
2075 2075
2076 2076 # self.dataOut.channelIndexList = range(self.systemHeaderObj.nChannels)
2077 2077
2078 2078 self.dataOut.utctime = self.basicHeaderObj.utc + self.basicHeaderObj.miliSecond/1000.#+ self.profileIndex * self.ippSeconds
2079 2079
2080 2080 self.dataOut.ippSeconds = self.ippSeconds
2081 2081
2082 2082 self.dataOut.timeInterval = self.ippSeconds * self.processingHeaderObj.nCohInt * self.processingHeaderObj.nIncohInt * self.dataOut.nFFTPoints
2083 2083
2084 2084 # self.profileIndex += 1
2085 2085
2086 2086 self.dataOut.systemHeaderObj = self.systemHeaderObj.copy()
2087 2087
2088 2088 self.dataOut.radarControllerHeaderObj = self.radarControllerHeaderObj.copy()
2089 2089
2090 2090 self.dataOut.flagShiftFFT = self.processingHeaderObj.shif_fft
2091 2091
2092 2092 self.dataOut.flagDecodeData = False #asumo q la data no esta decodificada
2093 2093
2094 2094 self.dataOut.flagDeflipData = True #asumo q la data no esta sin flip
2095 2095
2096 2096 if self.processingHeaderObj.code != None:
2097 2097
2098 2098 self.dataOut.nCode = self.processingHeaderObj.nCode
2099 2099
2100 2100 self.dataOut.nBaud = self.processingHeaderObj.nBaud
2101 2101
2102 2102 self.dataOut.code = self.processingHeaderObj.code
2103 2103
2104 2104 self.dataOut.flagDecodeData = True
2105 2105
2106 2106 return self.dataOut.data_spc
2107 2107
2108 2108
2109 2109 class SpectraWriter(JRODataWriter):
2110 2110
2111 2111 """
2112 2112 Esta clase permite escribir datos de espectros a archivos procesados (.pdata). La escritura
2113 2113 de los datos siempre se realiza por bloques.
2114 2114 """
2115 2115
2116 2116 ext = ".pdata"
2117 2117
2118 2118 optchar = "P"
2119 2119
2120 2120 shape_spc_Buffer = None
2121 2121
2122 2122 shape_cspc_Buffer = None
2123 2123
2124 2124 shape_dc_Buffer = None
2125 2125
2126 2126 data_spc = None
2127 2127
2128 2128 data_cspc = None
2129 2129
2130 2130 data_dc = None
2131 2131
2132 2132 # dataOut = None
2133 2133
2134 2134 def __init__(self):
2135 2135 """
2136 2136 Inicializador de la clase SpectraWriter para la escritura de datos de espectros.
2137 2137
2138 2138 Affected:
2139 2139 self.dataOut
2140 2140 self.basicHeaderObj
2141 2141 self.systemHeaderObj
2142 2142 self.radarControllerHeaderObj
2143 2143 self.processingHeaderObj
2144 2144
2145 2145 Return: None
2146 2146 """
2147 2147
2148 2148 self.isConfig = False
2149 2149
2150 2150 self.nTotalBlocks = 0
2151 2151
2152 2152 self.data_spc = None
2153 2153
2154 2154 self.data_cspc = None
2155 2155
2156 2156 self.data_dc = None
2157 2157
2158 2158 self.fp = None
2159 2159
2160 2160 self.flagIsNewFile = 1
2161 2161
2162 2162 self.nTotalBlocks = 0
2163 2163
2164 2164 self.flagIsNewBlock = 0
2165 2165
2166 2166 self.setFile = None
2167 2167
2168 2168 self.dtype = None
2169 2169
2170 2170 self.path = None
2171 2171
2172 2172 self.noMoreFiles = 0
2173 2173
2174 2174 self.filename = None
2175 2175
2176 2176 self.basicHeaderObj = BasicHeader(LOCALTIME)
2177 2177
2178 2178 self.systemHeaderObj = SystemHeader()
2179 2179
2180 2180 self.radarControllerHeaderObj = RadarControllerHeader()
2181 2181
2182 2182 self.processingHeaderObj = ProcessingHeader()
2183 2183
2184 2184
2185 2185 def hasAllDataInBuffer(self):
2186 2186 return 1
2187 2187
2188 2188
2189 2189 def setBlockDimension(self):
2190 2190 """
2191 2191 Obtiene las formas dimensionales del los subbloques de datos que componen un bloque
2192 2192
2193 2193 Affected:
2194 2194 self.shape_spc_Buffer
2195 2195 self.shape_cspc_Buffer
2196 2196 self.shape_dc_Buffer
2197 2197
2198 2198 Return: None
2199 2199 """
2200 2200 self.shape_spc_Buffer = (self.dataOut.nChannels,
2201 2201 self.processingHeaderObj.nHeights,
2202 2202 self.processingHeaderObj.profilesPerBlock)
2203 2203
2204 2204 self.shape_cspc_Buffer = (self.dataOut.nPairs,
2205 2205 self.processingHeaderObj.nHeights,
2206 2206 self.processingHeaderObj.profilesPerBlock)
2207 2207
2208 2208 self.shape_dc_Buffer = (self.dataOut.nChannels,
2209 2209 self.processingHeaderObj.nHeights)
2210 2210
2211 2211
2212 2212 def writeBlock(self):
2213 2213 """
2214 2214 Escribe el buffer en el file designado
2215 2215
2216 2216 Affected:
2217 2217 self.data_spc
2218 2218 self.data_cspc
2219 2219 self.data_dc
2220 2220 self.flagIsNewFile
2221 2221 self.flagIsNewBlock
2222 2222 self.nTotalBlocks
2223 2223 self.nWriteBlocks
2224 2224
2225 2225 Return: None
2226 2226 """
2227 2227
2228 2228 spc = numpy.transpose( self.data_spc, (0,2,1) )
2229 2229 if not( self.processingHeaderObj.shif_fft ):
2230 2230 spc = numpy.roll( spc, self.processingHeaderObj.profilesPerBlock/2, axis=2 ) #desplaza a la derecha en el eje 2 determinadas posiciones
2231 2231 data = spc.reshape((-1))
2232 2232 data = data.astype(self.dtype[0])
2233 2233 data.tofile(self.fp)
2234 2234
2235 2235 if self.data_cspc != None:
2236 2236 data = numpy.zeros( self.shape_cspc_Buffer, self.dtype )
2237 2237 cspc = numpy.transpose( self.data_cspc, (0,2,1) )
2238 2238 if not( self.processingHeaderObj.shif_fft ):
2239 2239 cspc = numpy.roll( cspc, self.processingHeaderObj.profilesPerBlock/2, axis=2 ) #desplaza a la derecha en el eje 2 determinadas posiciones
2240 2240 data['real'] = cspc.real
2241 2241 data['imag'] = cspc.imag
2242 2242 data = data.reshape((-1))
2243 2243 data.tofile(self.fp)
2244 2244
2245 2245 if self.data_dc != None:
2246 2246 data = numpy.zeros( self.shape_dc_Buffer, self.dtype )
2247 2247 dc = self.data_dc
2248 2248 data['real'] = dc.real
2249 2249 data['imag'] = dc.imag
2250 2250 data = data.reshape((-1))
2251 2251 data.tofile(self.fp)
2252 2252
2253 2253 self.data_spc.fill(0)
2254 2254 self.data_dc.fill(0)
2255 2255 if self.data_cspc != None:
2256 2256 self.data_cspc.fill(0)
2257 2257
2258 2258 self.flagIsNewFile = 0
2259 2259 self.flagIsNewBlock = 1
2260 2260 self.nTotalBlocks += 1
2261 2261 self.nWriteBlocks += 1
2262 2262 self.blockIndex += 1
2263 2263
2264 2264
2265 2265 def putData(self):
2266 2266 """
2267 2267 Setea un bloque de datos y luego los escribe en un file
2268 2268
2269 2269 Affected:
2270 2270 self.data_spc
2271 2271 self.data_cspc
2272 2272 self.data_dc
2273 2273
2274 2274 Return:
2275 2275 0 : Si no hay data o no hay mas files que puedan escribirse
2276 2276 1 : Si se escribio la data de un bloque en un file
2277 2277 """
2278 2278
2279 2279 if self.dataOut.flagNoData:
2280 2280 return 0
2281 2281
2282 2282 self.flagIsNewBlock = 0
2283 2283
2284 2284 if self.dataOut.flagTimeBlock:
2285 2285 self.data_spc.fill(0)
2286 2286 self.data_cspc.fill(0)
2287 2287 self.data_dc.fill(0)
2288 2288 self.setNextFile()
2289 2289
2290 2290 if self.flagIsNewFile == 0:
2291 2291 self.getBasicHeader()
2292 2292
2293 2293 self.data_spc = self.dataOut.data_spc.copy()
2294 2294 self.data_cspc = self.dataOut.data_cspc.copy()
2295 2295 self.data_dc = self.dataOut.data_dc.copy()
2296 2296
2297 2297 # #self.processingHeaderObj.dataBlocksPerFile)
2298 2298 if self.hasAllDataInBuffer():
2299 2299 # self.getDataHeader()
2300 2300 self.writeNextBlock()
2301 2301
2302 2302 return 1
2303 2303
2304 2304
2305 2305 def __getProcessFlags(self):
2306 2306
2307 2307 processFlags = 0
2308 2308
2309 2309 dtype0 = numpy.dtype([('real','<i1'),('imag','<i1')])
2310 2310 dtype1 = numpy.dtype([('real','<i2'),('imag','<i2')])
2311 2311 dtype2 = numpy.dtype([('real','<i4'),('imag','<i4')])
2312 2312 dtype3 = numpy.dtype([('real','<i8'),('imag','<i8')])
2313 2313 dtype4 = numpy.dtype([('real','<f4'),('imag','<f4')])
2314 2314 dtype5 = numpy.dtype([('real','<f8'),('imag','<f8')])
2315 2315
2316 2316 dtypeList = [dtype0, dtype1, dtype2, dtype3, dtype4, dtype5]
2317 2317
2318 2318
2319 2319
2320 2320 datatypeValueList = [PROCFLAG.DATATYPE_CHAR,
2321 2321 PROCFLAG.DATATYPE_SHORT,
2322 2322 PROCFLAG.DATATYPE_LONG,
2323 2323 PROCFLAG.DATATYPE_INT64,
2324 2324 PROCFLAG.DATATYPE_FLOAT,
2325 2325 PROCFLAG.DATATYPE_DOUBLE]
2326 2326
2327 2327
2328 2328 for index in range(len(dtypeList)):
2329 2329 if self.dataOut.dtype == dtypeList[index]:
2330 2330 dtypeValue = datatypeValueList[index]
2331 2331 break
2332 2332
2333 2333 processFlags += dtypeValue
2334 2334
2335 2335 if self.dataOut.flagDecodeData:
2336 2336 processFlags += PROCFLAG.DECODE_DATA
2337 2337
2338 2338 if self.dataOut.flagDeflipData:
2339 2339 processFlags += PROCFLAG.DEFLIP_DATA
2340 2340
2341 2341 if self.dataOut.code != None:
2342 2342 processFlags += PROCFLAG.DEFINE_PROCESS_CODE
2343 2343
2344 2344 if self.dataOut.nIncohInt > 1:
2345 2345 processFlags += PROCFLAG.INCOHERENT_INTEGRATION
2346 2346
2347 2347 if self.dataOut.data_dc != None:
2348 2348 processFlags += PROCFLAG.SAVE_CHANNELS_DC
2349 2349
2350 2350 return processFlags
2351 2351
2352 2352
2353 2353 def __getBlockSize(self):
2354 2354 '''
2355 2355 Este metodos determina el cantidad de bytes para un bloque de datos de tipo Spectra
2356 2356 '''
2357 2357
2358 2358 dtype0 = numpy.dtype([('real','<i1'),('imag','<i1')])
2359 2359 dtype1 = numpy.dtype([('real','<i2'),('imag','<i2')])
2360 2360 dtype2 = numpy.dtype([('real','<i4'),('imag','<i4')])
2361 2361 dtype3 = numpy.dtype([('real','<i8'),('imag','<i8')])
2362 2362 dtype4 = numpy.dtype([('real','<f4'),('imag','<f4')])
2363 2363 dtype5 = numpy.dtype([('real','<f8'),('imag','<f8')])
2364 2364
2365 2365 dtypeList = [dtype0, dtype1, dtype2, dtype3, dtype4, dtype5]
2366 2366 datatypeValueList = [1,2,4,8,4,8]
2367 2367 for index in range(len(dtypeList)):
2368 2368 if self.dataOut.dtype == dtypeList[index]:
2369 2369 datatypeValue = datatypeValueList[index]
2370 2370 break
2371 2371
2372 2372
2373 2373 pts2write = self.dataOut.nHeights * self.dataOut.nFFTPoints
2374 2374
2375 2375 pts2write_SelfSpectra = int(self.dataOut.nChannels * pts2write)
2376 2376 blocksize = (pts2write_SelfSpectra*datatypeValue)
2377 2377
2378 2378 if self.dataOut.data_cspc != None:
2379 2379 pts2write_CrossSpectra = int(self.dataOut.nPairs * pts2write)
2380 2380 blocksize += (pts2write_CrossSpectra*datatypeValue*2)
2381 2381
2382 2382 if self.dataOut.data_dc != None:
2383 2383 pts2write_DCchannels = int(self.dataOut.nChannels * self.dataOut.nHeights)
2384 2384 blocksize += (pts2write_DCchannels*datatypeValue*2)
2385 2385
2386 2386 blocksize = blocksize #* datatypeValue * 2 #CORREGIR ESTO
2387 2387
2388 2388 return blocksize
2389 2389
2390 2390 def getDataHeader(self):
2391 2391
2392 2392 """
2393 2393 Obtiene una copia del First Header
2394 2394
2395 2395 Affected:
2396 2396 self.systemHeaderObj
2397 2397 self.radarControllerHeaderObj
2398 2398 self.dtype
2399 2399
2400 2400 Return:
2401 2401 None
2402 2402 """
2403 2403
2404 2404 self.systemHeaderObj = self.dataOut.systemHeaderObj.copy()
2405 2405 self.systemHeaderObj.nChannels = self.dataOut.nChannels
2406 2406 self.radarControllerHeaderObj = self.dataOut.radarControllerHeaderObj.copy()
2407 2407
2408 2408 self.getBasicHeader()
2409 2409
2410 2410 processingHeaderSize = 40 # bytes
2411 2411 self.processingHeaderObj.dtype = 0 # Voltage
2412 2412 self.processingHeaderObj.blockSize = self.__getBlockSize()
2413 2413 self.processingHeaderObj.profilesPerBlock = self.dataOut.nFFTPoints
2414 2414 self.processingHeaderObj.dataBlocksPerFile = self.blocksPerFile
2415 2415 self.processingHeaderObj.nWindows = 1 #podria ser 1 o self.dataOut.processingHeaderObj.nWindows
2416 2416 self.processingHeaderObj.processFlags = self.__getProcessFlags()
2417 2417 self.processingHeaderObj.nCohInt = self.dataOut.nCohInt# Se requiere para determinar el valor de timeInterval
2418 2418 self.processingHeaderObj.nIncohInt = self.dataOut.nIncohInt
2419 2419 self.processingHeaderObj.totalSpectra = self.dataOut.nPairs + self.dataOut.nChannels
2420 2420
2421 2421 if self.processingHeaderObj.totalSpectra > 0:
2422 2422 channelList = []
2423 2423 for channel in range(self.dataOut.nChannels):
2424 2424 channelList.append(channel)
2425 2425 channelList.append(channel)
2426 2426
2427 2427 pairsList = []
2428 2428 for pair in self.dataOut.pairsList:
2429 2429 pairsList.append(pair[0])
2430 2430 pairsList.append(pair[1])
2431 2431 spectraComb = channelList + pairsList
2432 2432 spectraComb = numpy.array(spectraComb,dtype="u1")
2433 2433 self.processingHeaderObj.spectraComb = spectraComb
2434 2434 sizeOfSpcComb = len(spectraComb)
2435 2435 processingHeaderSize += sizeOfSpcComb
2436 2436
2437 2437 if self.dataOut.code != None:
2438 2438 self.processingHeaderObj.code = self.dataOut.code
2439 2439 self.processingHeaderObj.nCode = self.dataOut.nCode
2440 2440 self.processingHeaderObj.nBaud = self.dataOut.nBaud
2441 2441 nCodeSize = 4 # bytes
2442 2442 nBaudSize = 4 # bytes
2443 2443 codeSize = 4 # bytes
2444 2444 sizeOfCode = int(nCodeSize + nBaudSize + codeSize * self.dataOut.nCode * self.dataOut.nBaud)
2445 2445 processingHeaderSize += sizeOfCode
2446 2446
2447 2447 if self.processingHeaderObj.nWindows != 0:
2448 2448 self.processingHeaderObj.firstHeight = self.dataOut.heightList[0]
2449 2449 self.processingHeaderObj.deltaHeight = self.dataOut.heightList[1] - self.dataOut.heightList[0]
2450 2450 self.processingHeaderObj.nHeights = self.dataOut.nHeights
2451 2451 self.processingHeaderObj.samplesWin = self.dataOut.nHeights
2452 2452 sizeOfFirstHeight = 4
2453 2453 sizeOfdeltaHeight = 4
2454 2454 sizeOfnHeights = 4
2455 2455 sizeOfWindows = (sizeOfFirstHeight + sizeOfdeltaHeight + sizeOfnHeights)*self.processingHeaderObj.nWindows
2456 2456 processingHeaderSize += sizeOfWindows
2457 2457
2458 2458 self.processingHeaderObj.size = processingHeaderSize
2459 2459
2460 2460 class SpectraHeisWriter():
2461 2461
2462 2462 i=0
2463 2463
2464 2464 def __init__(self, dataOut):
2465 2465
2466 2466 self.wrObj = FITS()
2467 2467 self.dataOut = dataOut
2468 2468
2469 2469 def isNumber(str):
2470 2470 """
2471 2471 Chequea si el conjunto de caracteres que componen un string puede ser convertidos a un numero.
2472 2472
2473 2473 Excepciones:
2474 2474 Si un determinado string no puede ser convertido a numero
2475 2475 Input:
2476 2476 str, string al cual se le analiza para determinar si convertible a un numero o no
2477 2477
2478 2478 Return:
2479 2479 True : si el string es uno numerico
2480 2480 False : no es un string numerico
2481 2481 """
2482 2482 try:
2483 2483 float( str )
2484 2484 return True
2485 2485 except:
2486 2486 return False
2487 2487
2488 2488 def setup(self, wrpath,):
2489 2489
2490 2490 if not(os.path.exists(wrpath)):
2491 2491 os.mkdir(wrpath)
2492 2492
2493 2493 self.wrpath = wrpath
2494 2494 self.setFile = 0
2495 2495
2496 2496 def putData(self):
2497 2497 # self.wrObj.writeHeader(nChannels=self.dataOut.nChannels, nFFTPoints=self.dataOut.nFFTPoints)
2498 2498 #name = self.dataOut.utctime
2499 2499 name= time.localtime( self.dataOut.utctime)
2500 2500 ext=".fits"
2501 2501 #folder='D%4.4d%3.3d'%(name.tm_year,name.tm_yday)
2502 2502 subfolder = 'D%4.4d%3.3d' % (name.tm_year,name.tm_yday)
2503 2503
2504 2504 fullpath = os.path.join( self.wrpath, subfolder )
2505 2505 if not( os.path.exists(fullpath) ):
2506 2506 os.mkdir(fullpath)
2507 2507 self.setFile += 1
2508 2508 file = 'D%4.4d%3.3d%3.3d%s' % (name.tm_year,name.tm_yday,self.setFile,ext)
2509 2509
2510 2510 filename = os.path.join(self.wrpath,subfolder, file)
2511 2511
2512 2512 # print self.dataOut.ippSeconds
2513 2513 freq=numpy.arange(-1*self.dataOut.nHeights/2.,self.dataOut.nHeights/2.)/(2*self.dataOut.ippSeconds)
2514 2514
2515 2515 col1=self.wrObj.setColF(name="freq", format=str(self.dataOut.nFFTPoints)+'E', array=freq)
2516 2516 col2=self.wrObj.writeData(name="P_Ch1",format=str(self.dataOut.nFFTPoints)+'E',data=10*numpy.log10(self.dataOut.data_spc[0,:]))
2517 2517 col3=self.wrObj.writeData(name="P_Ch2",format=str(self.dataOut.nFFTPoints)+'E',data=10*numpy.log10(self.dataOut.data_spc[1,:]))
2518 2518 col4=self.wrObj.writeData(name="P_Ch3",format=str(self.dataOut.nFFTPoints)+'E',data=10*numpy.log10(self.dataOut.data_spc[2,:]))
2519 2519 col5=self.wrObj.writeData(name="P_Ch4",format=str(self.dataOut.nFFTPoints)+'E',data=10*numpy.log10(self.dataOut.data_spc[3,:]))
2520 2520 col6=self.wrObj.writeData(name="P_Ch5",format=str(self.dataOut.nFFTPoints)+'E',data=10*numpy.log10(self.dataOut.data_spc[4,:]))
2521 2521 col7=self.wrObj.writeData(name="P_Ch6",format=str(self.dataOut.nFFTPoints)+'E',data=10*numpy.log10(self.dataOut.data_spc[5,:]))
2522 2522 col8=self.wrObj.writeData(name="P_Ch7",format=str(self.dataOut.nFFTPoints)+'E',data=10*numpy.log10(self.dataOut.data_spc[6,:]))
2523 2523 col9=self.wrObj.writeData(name="P_Ch8",format=str(self.dataOut.nFFTPoints)+'E',data=10*numpy.log10(self.dataOut.data_spc[7,:]))
2524 2524 #n=numpy.arange((100))
2525 2525 n=self.dataOut.data_spc[6,:]
2526 2526 a=self.wrObj.cFImage(n)
2527 2527 b=self.wrObj.Ctable(col1,col2,col3,col4,col5,col6,col7,col8,col9)
2528 2528 self.wrObj.CFile(a,b)
2529 2529 self.wrObj.wFile(filename)
2530 2530 return 1
2531 2531
2532 2532 class FITS:
2533 2533
2534 2534 name=None
2535 2535 format=None
2536 2536 array =None
2537 2537 data =None
2538 2538 thdulist=None
2539 2539
2540 2540 def __init__(self):
2541 2541
2542 2542 pass
2543 2543
2544 2544 def setColF(self,name,format,array):
2545 2545 self.name=name
2546 2546 self.format=format
2547 2547 self.array=array
2548 2548 a1=numpy.array([self.array],dtype=numpy.float32)
2549 2549 self.col1 = pyfits.Column(name=self.name, format=self.format, array=a1)
2550 2550 return self.col1
2551 2551
2552 2552 # def setColP(self,name,format,data):
2553 2553 # self.name=name
2554 2554 # self.format=format
2555 2555 # self.data=data
2556 2556 # a2=numpy.array([self.data],dtype=numpy.float32)
2557 2557 # self.col2 = pyfits.Column(name=self.name, format=self.format, array=a2)
2558 2558 # return self.col2
2559 2559
2560 2560 def writeHeader(self,):
2561 2561 pass
2562 2562
2563 2563 def writeData(self,name,format,data):
2564 2564 self.name=name
2565 2565 self.format=format
2566 2566 self.data=data
2567 2567 a2=numpy.array([self.data],dtype=numpy.float32)
2568 2568 self.col2 = pyfits.Column(name=self.name, format=self.format, array=a2)
2569 2569 return self.col2
2570 2570
2571 2571 def cFImage(self,n):
2572 2572 self.hdu= pyfits.PrimaryHDU(n)
2573 2573 return self.hdu
2574 2574
2575 2575 def Ctable(self,col1,col2,col3,col4,col5,col6,col7,col8,col9):
2576 2576 self.cols=pyfits.ColDefs( [col1,col2,col3,col4,col5,col6,col7,col8,col9])
2577 2577 self.tbhdu = pyfits.new_table(self.cols)
2578 2578 return self.tbhdu
2579 2579
2580 2580 def CFile(self,hdu,tbhdu):
2581 2581 self.thdulist=pyfits.HDUList([hdu,tbhdu])
2582 2582
2583 2583 def wFile(self,filename):
2584 2584 self.thdulist.writeto(filename) No newline at end of file
General Comments 0
You need to be logged in to leave comments. Login now