##// END OF EJS Templates
Visualiza los archivos encontrados y la fecha de cada uno de ellos.
Miguel Valdez -
r324:a3f0879e8937
parent child
Show More
@@ -1,2637 +1,2656
1 1 '''
2 2
3 3 $Author: murco $
4 4 $Id: JRODataIO.py 169 2012-11-19 21:57:03Z murco $
5 5 '''
6 6
7 7 import os, sys
8 8 import glob
9 9 import time
10 10 import numpy
11 11 import fnmatch
12 12 import time, datetime
13 13
14 14 from jrodata import *
15 15 from jroheaderIO import *
16 16 from jroprocessing import *
17 17
18 18 LOCALTIME = -18000
19 19
20 20 def isNumber(str):
21 21 """
22 22 Chequea si el conjunto de caracteres que componen un string puede ser convertidos a un numero.
23 23
24 24 Excepciones:
25 25 Si un determinado string no puede ser convertido a numero
26 26 Input:
27 27 str, string al cual se le analiza para determinar si convertible a un numero o no
28 28
29 29 Return:
30 30 True : si el string es uno numerico
31 31 False : no es un string numerico
32 32 """
33 33 try:
34 34 float( str )
35 35 return True
36 36 except:
37 37 return False
38 38
39 39 def isThisFileinRange(filename, startUTSeconds, endUTSeconds):
40 40 """
41 41 Esta funcion determina si un archivo de datos se encuentra o no dentro del rango de fecha especificado.
42 42
43 43 Inputs:
44 44 filename : nombre completo del archivo de datos en formato Jicamarca (.r)
45 45
46 46 startUTSeconds : fecha inicial del rango seleccionado. La fecha esta dada en
47 47 segundos contados desde 01/01/1970.
48 48 endUTSeconds : fecha final del rango seleccionado. La fecha esta dada en
49 49 segundos contados desde 01/01/1970.
50 50
51 51 Return:
52 52 Boolean : Retorna True si el archivo de datos contiene datos en el rango de
53 53 fecha especificado, de lo contrario retorna False.
54 54
55 55 Excepciones:
56 56 Si el archivo no existe o no puede ser abierto
57 57 Si la cabecera no puede ser leida.
58 58
59 59 """
60 60 basicHeaderObj = BasicHeader(LOCALTIME)
61 61
62 62 try:
63 63 fp = open(filename,'rb')
64 64 except:
65 65 raise IOError, "The file %s can't be opened" %(filename)
66 66
67 67 sts = basicHeaderObj.read(fp)
68 68 fp.close()
69 69
70 70 if not(sts):
71 71 print "Skipping the file %s because it has not a valid header" %(filename)
72 72 return 0
73 73
74 74 if not ((startUTSeconds <= basicHeaderObj.utc) and (endUTSeconds > basicHeaderObj.utc)):
75 75 return 0
76 76
77 77 return 1
78 78
79 79 def isFileinThisTime(filename, startTime, endTime):
80 80 """
81 81 Retorna 1 si el archivo de datos se encuentra dentro del rango de horas especificado.
82 82
83 83 Inputs:
84 84 filename : nombre completo del archivo de datos en formato Jicamarca (.r)
85 85
86 86 startTime : tiempo inicial del rango seleccionado en formato datetime.time
87 87
88 88 endTime : tiempo final del rango seleccionado en formato datetime.time
89 89
90 90 Return:
91 91 Boolean : Retorna True si el archivo de datos contiene datos en el rango de
92 92 fecha especificado, de lo contrario retorna False.
93 93
94 94 Excepciones:
95 95 Si el archivo no existe o no puede ser abierto
96 96 Si la cabecera no puede ser leida.
97 97
98 98 """
99 99
100 100
101 101 try:
102 102 fp = open(filename,'rb')
103 103 except:
104 104 raise IOError, "The file %s can't be opened" %(filename)
105 105
106 106 basicHeaderObj = BasicHeader(LOCALTIME)
107 107 sts = basicHeaderObj.read(fp)
108 108 fp.close()
109 109
110 110 thisTime = basicHeaderObj.datatime.time()
111 111
112 112 if not(sts):
113 113 print "Skipping the file %s because it has not a valid header" %(filename)
114 114 return 0
115 115
116 116 if not ((startTime <= thisTime) and (endTime > thisTime)):
117 117 return 0
118 118
119 return 1
119 return thisTime
120 120
121 121 def getlastFileFromPath(path, ext):
122 122 """
123 123 Depura el fileList dejando solo los que cumplan el formato de "PYYYYDDDSSS.ext"
124 124 al final de la depuracion devuelve el ultimo file de la lista que quedo.
125 125
126 126 Input:
127 127 fileList : lista conteniendo todos los files (sin path) que componen una determinada carpeta
128 128 ext : extension de los files contenidos en una carpeta
129 129
130 130 Return:
131 131 El ultimo file de una determinada carpeta, no se considera el path.
132 132 """
133 133 validFilelist = []
134 134 fileList = os.listdir(path)
135 135
136 136 # 0 1234 567 89A BCDE
137 137 # H YYYY DDD SSS .ext
138 138
139 139 for file in fileList:
140 140 try:
141 141 year = int(file[1:5])
142 142 doy = int(file[5:8])
143 143
144 144
145 145 except:
146 146 continue
147 147
148 148 if (os.path.splitext(file)[-1].lower() != ext.lower()):
149 149 continue
150 150
151 151 validFilelist.append(file)
152 152
153 153 if validFilelist:
154 154 validFilelist = sorted( validFilelist, key=str.lower )
155 155 return validFilelist[-1]
156 156
157 157 return None
158 158
159 159 def checkForRealPath(path, year, doy, set, ext):
160 160 """
161 161 Por ser Linux Case Sensitive entonces checkForRealPath encuentra el nombre correcto de un path,
162 162 Prueba por varias combinaciones de nombres entre mayusculas y minusculas para determinar
163 163 el path exacto de un determinado file.
164 164
165 165 Example :
166 166 nombre correcto del file es .../.../D2009307/P2009307367.ext
167 167
168 168 Entonces la funcion prueba con las siguientes combinaciones
169 169 .../.../y2009307367.ext
170 170 .../.../Y2009307367.ext
171 171 .../.../x2009307/y2009307367.ext
172 172 .../.../x2009307/Y2009307367.ext
173 173 .../.../X2009307/y2009307367.ext
174 174 .../.../X2009307/Y2009307367.ext
175 175 siendo para este caso, la ultima combinacion de letras, identica al file buscado
176 176
177 177 Return:
178 178 Si encuentra la cobinacion adecuada devuelve el path completo y el nombre del file
179 179 caso contrario devuelve None como path y el la ultima combinacion de nombre en mayusculas
180 180 para el filename
181 181 """
182 182 fullfilename = None
183 183 find_flag = False
184 184 filename = None
185 185
186 186 prefixDirList = [None,'d','D']
187 187 if ext.lower() == ".r": #voltage
188 188 prefixFileList = ['d','D']
189 189 elif ext.lower() == ".pdata": #spectra
190 190 prefixFileList = ['p','P']
191 191 else:
192 192 return None, filename
193 193
194 194 #barrido por las combinaciones posibles
195 195 for prefixDir in prefixDirList:
196 196 thispath = path
197 197 if prefixDir != None:
198 198 #formo el nombre del directorio xYYYYDDD (x=d o x=D)
199 199 thispath = os.path.join(path, "%s%04d%03d" % ( prefixDir, year, doy ))
200 200
201 201 for prefixFile in prefixFileList: #barrido por las dos combinaciones posibles de "D"
202 202 filename = "%s%04d%03d%03d%s" % ( prefixFile, year, doy, set, ext ) #formo el nombre del file xYYYYDDDSSS.ext
203 203 fullfilename = os.path.join( thispath, filename ) #formo el path completo
204 204
205 205 if os.path.exists( fullfilename ): #verifico que exista
206 206 find_flag = True
207 207 break
208 208 if find_flag:
209 209 break
210 210
211 211 if not(find_flag):
212 212 return None, filename
213 213
214 214 return fullfilename, filename
215 215
216 216 def isDoyFolder(folder):
217 217 try:
218 218 year = int(folder[1:5])
219 219 except:
220 220 return 0
221 221
222 222 try:
223 223 doy = int(folder[5:8])
224 224 except:
225 225 return 0
226 226
227 227 return 1
228 228
229 229 class JRODataIO:
230 230
231 231 c = 3E8
232 232
233 233 isConfig = False
234 234
235 235 basicHeaderObj = BasicHeader(LOCALTIME)
236 236
237 237 systemHeaderObj = SystemHeader()
238 238
239 239 radarControllerHeaderObj = RadarControllerHeader()
240 240
241 241 processingHeaderObj = ProcessingHeader()
242 242
243 243 online = 0
244 244
245 245 dtype = None
246 246
247 247 pathList = []
248 248
249 249 filenameList = []
250 250
251 251 filename = None
252 252
253 253 ext = None
254 254
255 255 flagIsNewFile = 1
256 256
257 257 flagTimeBlock = 0
258 258
259 259 flagIsNewBlock = 0
260 260
261 261 fp = None
262 262
263 263 firstHeaderSize = 0
264 264
265 265 basicHeaderSize = 24
266 266
267 267 versionFile = 1103
268 268
269 269 fileSize = None
270 270
271 271 ippSeconds = None
272 272
273 273 fileSizeByHeader = None
274 274
275 275 fileIndex = None
276 276
277 277 profileIndex = None
278 278
279 279 blockIndex = None
280 280
281 281 nTotalBlocks = None
282 282
283 283 maxTimeStep = 30
284 284
285 285 lastUTTime = None
286 286
287 287 datablock = None
288 288
289 289 dataOut = None
290 290
291 291 blocksize = None
292 292
293 293 def __init__(self):
294 294
295 295 raise ValueError, "Not implemented"
296 296
297 297 def run(self):
298 298
299 299 raise ValueError, "Not implemented"
300 300
301 301 def getOutput(self):
302 302
303 303 return self.dataOut
304 304
305 305 class JRODataReader(JRODataIO, ProcessingUnit):
306 306
307 307 nReadBlocks = 0
308 308
309 309 delay = 10 #number of seconds waiting a new file
310 310
311 311 nTries = 3 #quantity tries
312 312
313 313 nFiles = 3 #number of files for searching
314 314
315 315 flagNoMoreFiles = 0
316 316
317 datetimeList = []
318
317 319 __isFirstTimeOnline = 1
318 320
319 321 def __init__(self):
320 322
321 323 """
322 324
323 325 """
324 326
325 327 raise ValueError, "This method has not been implemented"
326 328
327 329
328 330 def createObjByDefault(self):
329 331 """
330 332
331 333 """
332 334 raise ValueError, "This method has not been implemented"
333 335
334 336 def getBlockDimension(self):
335 337
336 338 raise ValueError, "No implemented"
337 339
338 340 def __searchFilesOffLine(self,
339 341 path,
340 342 startDate,
341 343 endDate,
342 344 startTime=datetime.time(0,0,0),
343 345 endTime=datetime.time(23,59,59),
344 346 set=None,
345 347 expLabel='',
346 348 ext='.r',
347 349 walk=True):
348 350
349 351 pathList = []
352 dateList = []
350 353
351 354 if not walk:
352 355 pathList.append(path)
353 356
354 357 else:
355 358 dirList = []
356 359 for thisPath in os.listdir(path):
357 360 if not os.path.isdir(os.path.join(path,thisPath)):
358 361 continue
359 362 if not isDoyFolder(thisPath):
360 363 continue
361 364
362 365 dirList.append(thisPath)
363 366
364 367 if not(dirList):
365 368 return None, None
366 369
367 370 thisDate = startDate
368 371
369 372 while(thisDate <= endDate):
370 373 year = thisDate.timetuple().tm_year
371 374 doy = thisDate.timetuple().tm_yday
372 375
373 376 match = fnmatch.filter(dirList, '?' + '%4.4d%3.3d' % (year,doy))
374 377 if len(match) == 0:
375 378 thisDate += datetime.timedelta(1)
376 379 continue
377 380
378 381 pathList.append(os.path.join(path,match[0],expLabel))
382 dateList.append(thisDate)
383
379 384 thisDate += datetime.timedelta(1)
380 385
381 386 if pathList == []:
382 387 print "Any folder was found for the date range: %s-%s" %(startDate, endDate)
383 388 return None, None
384 389
385 print "%d folder(s) was(were) found for the date range: %s-%s" %(len(pathList), startDate, endDate)
390 print "%d folder(s) was(were) found for the date range: %s - %s" %(len(pathList), startDate, endDate)
386 391
387 392 filenameList = []
388 for thisPath in pathList:
393 datetimeList = []
394
395 for i in range(len(pathList)):
396
397 thisPath = pathList[i]
398 thisDate = dateList[i]
389 399
390 400 fileList = glob.glob1(thisPath, "*%s" %ext)
391 401 fileList.sort()
392 402
393 403 for file in fileList:
394 404
395 405 filename = os.path.join(thisPath,file)
406 thisTime = isFileinThisTime(filename, startTime, endTime)
407
408 if thisTime == 0:
409 continue
410
411 filenameList.append(filename)
412 datetimeList.append(datetime.datetime.combine(thisDate,thisTime))
396 413
397 if isFileinThisTime(filename, startTime, endTime):
398 filenameList.append(filename)
399
400 414 if not(filenameList):
401 415 print "Any file was found for the time range %s - %s" %(startTime, endTime)
402 416 return None, None
403 417
404 418 print "%d file(s) was(were) found for the time range: %s - %s" %(len(filenameList), startTime, endTime)
419 print
420
421 for i in range(len(filenameList)):
422 print "%s -> [%s]" %(filenameList[i], datetimeList[i].ctime())
405 423
406 424 self.filenameList = filenameList
425 self.datetimeList = datetimeList
407 426
408 427 return pathList, filenameList
409 428
410 429 def __searchFilesOnLine(self, path, expLabel = "", ext = None, walk=True):
411 430
412 431 """
413 432 Busca el ultimo archivo de la ultima carpeta (determinada o no por startDateTime) y
414 433 devuelve el archivo encontrado ademas de otros datos.
415 434
416 435 Input:
417 436 path : carpeta donde estan contenidos los files que contiene data
418 437
419 438 expLabel : Nombre del subexperimento (subfolder)
420 439
421 440 ext : extension de los files
422 441
423 442 walk : Si es habilitado no realiza busquedas dentro de los ubdirectorios (doypath)
424 443
425 444 Return:
426 445 directory : eL directorio donde esta el file encontrado
427 446 filename : el ultimo file de una determinada carpeta
428 447 year : el anho
429 448 doy : el numero de dia del anho
430 449 set : el set del archivo
431 450
432 451
433 452 """
434 453 dirList = []
435 454
436 455 if walk:
437 456
438 457 #Filtra solo los directorios
439 458 for thisPath in os.listdir(path):
440 459 if not os.path.isdir(os.path.join(path,thisPath)):
441 460 continue
442 461 if not isDoyFolder(thisPath):
443 462 continue
444 463
445 464 dirList.append(thisPath)
446 465
447 466 if not(dirList):
448 467 return None, None, None, None, None
449 468
450 469 dirList = sorted( dirList, key=str.lower )
451 470
452 471 doypath = dirList[-1]
453 472 fullpath = os.path.join(path, doypath, expLabel)
454 473
455 474 else:
456 475 fullpath = path
457 476
458 477 print "%s folder was found: " %(fullpath )
459 478
460 479 filename = getlastFileFromPath(fullpath, ext)
461 480
462 481 if not(filename):
463 482 return None, None, None, None, None
464 483
465 484 print "%s file was found" %(filename)
466 485
467 486 if not(self.__verifyFile(os.path.join(fullpath, filename))):
468 487 return None, None, None, None, None
469 488
470 489 year = int( filename[1:5] )
471 490 doy = int( filename[5:8] )
472 491 set = int( filename[8:11] )
473 492
474 493 return fullpath, filename, year, doy, set
475 494
476 495
477 496
478 497 def __setNextFileOffline(self):
479 498
480 499 idFile = self.fileIndex
481 500
482 501 while (True):
483 502 idFile += 1
484 503 if not(idFile < len(self.filenameList)):
485 504 self.flagNoMoreFiles = 1
486 505 print "No more Files"
487 506 return 0
488 507
489 508 filename = self.filenameList[idFile]
490 509
491 510 if not(self.__verifyFile(filename)):
492 511 continue
493 512
494 513 fileSize = os.path.getsize(filename)
495 514 fp = open(filename,'rb')
496 515 break
497 516
498 517 self.flagIsNewFile = 1
499 518 self.fileIndex = idFile
500 519 self.filename = filename
501 520 self.fileSize = fileSize
502 521 self.fp = fp
503 522
504 523 print "Setting the file: %s"%self.filename
505 524
506 525 return 1
507 526
508 527 def __setNextFileOnline(self):
509 528 """
510 529 Busca el siguiente file que tenga suficiente data para ser leida, dentro de un folder especifico, si
511 530 no encuentra un file valido espera un tiempo determinado y luego busca en los posibles n files
512 531 siguientes.
513 532
514 533 Affected:
515 534 self.flagIsNewFile
516 535 self.filename
517 536 self.fileSize
518 537 self.fp
519 538 self.set
520 539 self.flagNoMoreFiles
521 540
522 541 Return:
523 542 0 : si luego de una busqueda del siguiente file valido este no pudo ser encontrado
524 543 1 : si el file fue abierto con exito y esta listo a ser leido
525 544
526 545 Excepciones:
527 546 Si un determinado file no puede ser abierto
528 547 """
529 548 nFiles = 0
530 549 fileOk_flag = False
531 550 firstTime_flag = True
532 551
533 552 self.set += 1
534 553
535 554 #busca el 1er file disponible
536 555 fullfilename, filename = checkForRealPath( self.path, self.year, self.doy, self.set, self.ext )
537 556 if fullfilename:
538 557 if self.__verifyFile(fullfilename, False):
539 558 fileOk_flag = True
540 559
541 560 #si no encuentra un file entonces espera y vuelve a buscar
542 561 if not(fileOk_flag):
543 562 for nFiles in range(self.nFiles+1): #busco en los siguientes self.nFiles+1 files posibles
544 563
545 564 if firstTime_flag: #si es la 1era vez entonces hace el for self.nTries veces
546 565 tries = self.nTries
547 566 else:
548 567 tries = 1 #si no es la 1era vez entonces solo lo hace una vez
549 568
550 569 for nTries in range( tries ):
551 570 if firstTime_flag:
552 571 print "\tWaiting %0.2f sec for the file \"%s\" , try %03d ..." % ( self.delay, filename, nTries+1 )
553 572 time.sleep( self.delay )
554 573 else:
555 574 print "\tSearching next \"%s%04d%03d%03d%s\" file ..." % (self.optchar, self.year, self.doy, self.set, self.ext)
556 575
557 576 fullfilename, filename = checkForRealPath( self.path, self.year, self.doy, self.set, self.ext )
558 577 if fullfilename:
559 578 if self.__verifyFile(fullfilename):
560 579 fileOk_flag = True
561 580 break
562 581
563 582 if fileOk_flag:
564 583 break
565 584
566 585 firstTime_flag = False
567 586
568 587 print "\tSkipping the file \"%s\" due to this file doesn't exist" % filename
569 588 self.set += 1
570 589
571 590 if nFiles == (self.nFiles-1): #si no encuentro el file buscado cambio de carpeta y busco en la siguiente carpeta
572 591 self.set = 0
573 592 self.doy += 1
574 593
575 594 if fileOk_flag:
576 595 self.fileSize = os.path.getsize( fullfilename )
577 596 self.filename = fullfilename
578 597 self.flagIsNewFile = 1
579 598 if self.fp != None: self.fp.close()
580 599 self.fp = open(fullfilename, 'rb')
581 600 self.flagNoMoreFiles = 0
582 601 print 'Setting the file: %s' % fullfilename
583 602 else:
584 603 self.fileSize = 0
585 604 self.filename = None
586 605 self.flagIsNewFile = 0
587 606 self.fp = None
588 607 self.flagNoMoreFiles = 1
589 608 print 'No more Files'
590 609
591 610 return fileOk_flag
592 611
593 612
594 613 def setNextFile(self):
595 614 if self.fp != None:
596 615 self.fp.close()
597 616
598 617 if self.online:
599 618 newFile = self.__setNextFileOnline()
600 619 else:
601 620 newFile = self.__setNextFileOffline()
602 621
603 622 if not(newFile):
604 623 return 0
605 624
606 625 self.__readFirstHeader()
607 626 self.nReadBlocks = 0
608 627 return 1
609 628
610 629 def __waitNewBlock(self):
611 630 """
612 631 Return 1 si se encontro un nuevo bloque de datos, 0 de otra forma.
613 632
614 633 Si el modo de lectura es OffLine siempre retorn 0
615 634 """
616 635 if not self.online:
617 636 return 0
618 637
619 638 if (self.nReadBlocks >= self.processingHeaderObj.dataBlocksPerFile):
620 639 return 0
621 640
622 641 currentPointer = self.fp.tell()
623 642
624 643 neededSize = self.processingHeaderObj.blockSize + self.basicHeaderSize
625 644
626 645 for nTries in range( self.nTries ):
627 646
628 647 self.fp.close()
629 648 self.fp = open( self.filename, 'rb' )
630 649 self.fp.seek( currentPointer )
631 650
632 651 self.fileSize = os.path.getsize( self.filename )
633 652 currentSize = self.fileSize - currentPointer
634 653
635 654 if ( currentSize >= neededSize ):
636 655 self.__rdBasicHeader()
637 656 return 1
638 657
639 658 print "\tWaiting %0.2f seconds for the next block, try %03d ..." % (self.delay, nTries+1)
640 659 time.sleep( self.delay )
641 660
642 661
643 662 return 0
644 663
645 664 def __jumpToLastBlock(self):
646 665
647 666 if not(self.__isFirstTimeOnline):
648 667 return
649 668
650 669 csize = self.fileSize - self.fp.tell()
651 670
652 671 #sata el primer bloque de datos
653 672 if csize > self.processingHeaderObj.blockSize:
654 673 self.fp.seek(self.fp.tell() + self.processingHeaderObj.blockSize)
655 674 else:
656 675 return
657 676
658 677 csize = self.fileSize - self.fp.tell()
659 678 neededsize = self.processingHeaderObj.blockSize + self.basicHeaderSize
660 679 factor = int(csize/neededsize)
661 680 if factor > 0:
662 681 self.fp.seek(self.fp.tell() + factor*neededsize)
663 682
664 683 self.flagIsNewFile = 0
665 684 self.__isFirstTimeOnline = 0
666 685
667 686
668 687 def __setNewBlock(self):
669 688
670 689 if self.fp == None:
671 690 return 0
672 691
673 692 if self.online:
674 693 self.__jumpToLastBlock()
675 694
676 695 if self.flagIsNewFile:
677 696 return 1
678 697
679 698 self.lastUTTime = self.basicHeaderObj.utc
680 699 currentSize = self.fileSize - self.fp.tell()
681 700 neededSize = self.processingHeaderObj.blockSize + self.basicHeaderSize
682 701
683 702 if (currentSize >= neededSize):
684 703 self.__rdBasicHeader()
685 704 return 1
686 705
687 706 if self.__waitNewBlock():
688 707 return 1
689 708
690 709 if not(self.setNextFile()):
691 710 return 0
692 711
693 712 deltaTime = self.basicHeaderObj.utc - self.lastUTTime #
694 713
695 714 self.flagTimeBlock = 0
696 715
697 716 if deltaTime > self.maxTimeStep:
698 717 self.flagTimeBlock = 1
699 718
700 719 return 1
701 720
702 721
703 722 def readNextBlock(self):
704 723 if not(self.__setNewBlock()):
705 724 return 0
706 725
707 726 if not(self.readBlock()):
708 727 return 0
709 728
710 729 return 1
711 730
712 731 def __rdProcessingHeader(self, fp=None):
713 732 if fp == None:
714 733 fp = self.fp
715 734
716 735 self.processingHeaderObj.read(fp)
717 736
718 737 def __rdRadarControllerHeader(self, fp=None):
719 738 if fp == None:
720 739 fp = self.fp
721 740
722 741 self.radarControllerHeaderObj.read(fp)
723 742
724 743 def __rdSystemHeader(self, fp=None):
725 744 if fp == None:
726 745 fp = self.fp
727 746
728 747 self.systemHeaderObj.read(fp)
729 748
730 749 def __rdBasicHeader(self, fp=None):
731 750 if fp == None:
732 751 fp = self.fp
733 752
734 753 self.basicHeaderObj.read(fp)
735 754
736 755
737 756 def __readFirstHeader(self):
738 757 self.__rdBasicHeader()
739 758 self.__rdSystemHeader()
740 759 self.__rdRadarControllerHeader()
741 760 self.__rdProcessingHeader()
742 761
743 762 self.firstHeaderSize = self.basicHeaderObj.size
744 763
745 764 datatype = int(numpy.log2((self.processingHeaderObj.processFlags & PROCFLAG.DATATYPE_MASK))-numpy.log2(PROCFLAG.DATATYPE_CHAR))
746 765 if datatype == 0:
747 766 datatype_str = numpy.dtype([('real','<i1'),('imag','<i1')])
748 767 elif datatype == 1:
749 768 datatype_str = numpy.dtype([('real','<i2'),('imag','<i2')])
750 769 elif datatype == 2:
751 770 datatype_str = numpy.dtype([('real','<i4'),('imag','<i4')])
752 771 elif datatype == 3:
753 772 datatype_str = numpy.dtype([('real','<i8'),('imag','<i8')])
754 773 elif datatype == 4:
755 774 datatype_str = numpy.dtype([('real','<f4'),('imag','<f4')])
756 775 elif datatype == 5:
757 776 datatype_str = numpy.dtype([('real','<f8'),('imag','<f8')])
758 777 else:
759 778 raise ValueError, 'Data type was not defined'
760 779
761 780 self.dtype = datatype_str
762 781 self.ippSeconds = 2 * 1000 * self.radarControllerHeaderObj.ipp / self.c
763 782 self.fileSizeByHeader = self.processingHeaderObj.dataBlocksPerFile * self.processingHeaderObj.blockSize + self.firstHeaderSize + self.basicHeaderSize*(self.processingHeaderObj.dataBlocksPerFile - 1)
764 783 # self.dataOut.channelList = numpy.arange(self.systemHeaderObj.numChannels)
765 784 # self.dataOut.channelIndexList = numpy.arange(self.systemHeaderObj.numChannels)
766 785 self.getBlockDimension()
767 786
768 787
769 788 def __verifyFile(self, filename, msgFlag=True):
770 789 msg = None
771 790 try:
772 791 fp = open(filename, 'rb')
773 792 currentPosition = fp.tell()
774 793 except:
775 794 if msgFlag:
776 795 print "The file %s can't be opened" % (filename)
777 796 return False
778 797
779 798 neededSize = self.processingHeaderObj.blockSize + self.firstHeaderSize
780 799
781 800 if neededSize == 0:
782 801 basicHeaderObj = BasicHeader(LOCALTIME)
783 802 systemHeaderObj = SystemHeader()
784 803 radarControllerHeaderObj = RadarControllerHeader()
785 804 processingHeaderObj = ProcessingHeader()
786 805
787 806 try:
788 807 if not( basicHeaderObj.read(fp) ): raise IOError
789 808 if not( systemHeaderObj.read(fp) ): raise IOError
790 809 if not( radarControllerHeaderObj.read(fp) ): raise IOError
791 810 if not( processingHeaderObj.read(fp) ): raise IOError
792 811 data_type = int(numpy.log2((processingHeaderObj.processFlags & PROCFLAG.DATATYPE_MASK))-numpy.log2(PROCFLAG.DATATYPE_CHAR))
793 812
794 813 neededSize = processingHeaderObj.blockSize + basicHeaderObj.size
795 814
796 815 except:
797 816 if msgFlag:
798 817 print "\tThe file %s is empty or it hasn't enough data" % filename
799 818
800 819 fp.close()
801 820 return False
802 821 else:
803 822 msg = "\tSkipping the file %s due to it hasn't enough data" %filename
804 823
805 824 fp.close()
806 825 fileSize = os.path.getsize(filename)
807 826 currentSize = fileSize - currentPosition
808 827 if currentSize < neededSize:
809 828 if msgFlag and (msg != None):
810 829 print msg #print"\tSkipping the file %s due to it hasn't enough data" %filename
811 830 return False
812 831
813 832 return True
814 833
815 834 def setup(self,
816 835 path=None,
817 836 startDate=None,
818 837 endDate=None,
819 838 startTime=datetime.time(0,0,0),
820 839 endTime=datetime.time(23,59,59),
821 840 set=0,
822 841 expLabel = "",
823 842 ext = None,
824 843 online = False,
825 844 delay = 60,
826 845 walk = True):
827 846
828 847 if path == None:
829 848 raise ValueError, "The path is not valid"
830 849
831 850 if ext == None:
832 851 ext = self.ext
833 852
834 853 if online:
835 854 print "Searching files in online mode..."
836 855
837 856 for nTries in range( self.nTries ):
838 857 fullpath, file, year, doy, set = self.__searchFilesOnLine(path=path, expLabel=expLabel, ext=ext, walk=walk)
839 858
840 859 if fullpath:
841 860 break
842 861
843 862 print '\tWaiting %0.2f sec for an valid file in %s: try %02d ...' % (self.delay, path, nTries+1)
844 863 time.sleep( self.delay )
845 864
846 865 if not(fullpath):
847 866 print "There 'isn't valied files in %s" % path
848 867 return None
849 868
850 869 self.year = year
851 870 self.doy = doy
852 871 self.set = set - 1
853 872 self.path = path
854 873
855 874 else:
856 875 print "Searching files in offline mode ..."
857 876 pathList, filenameList = self.__searchFilesOffLine(path, startDate=startDate, endDate=endDate,
858 877 startTime=startTime, endTime=endTime,
859 878 set=set, expLabel=expLabel, ext=ext,
860 879 walk=walk)
861 880
862 881 if not(pathList):
863 882 print "No *%s files into the folder %s \nfor the range: %s - %s"%(ext, path,
864 883 datetime.datetime.combine(startDate,startTime).ctime(),
865 884 datetime.datetime.combine(endDate,endTime).ctime())
866 885
867 886 sys.exit(-1)
868 887
869 888
870 889 self.fileIndex = -1
871 890 self.pathList = pathList
872 891 self.filenameList = filenameList
873 892
874 893 self.online = online
875 894 self.delay = delay
876 895 ext = ext.lower()
877 896 self.ext = ext
878 897
879 898 if not(self.setNextFile()):
880 899 if (startDate!=None) and (endDate!=None):
881 900 print "No files in range: %s - %s" %(datetime.datetime.combine(startDate,startTime).ctime(), datetime.datetime.combine(endDate,endTime).ctime())
882 901 elif startDate != None:
883 902 print "No files in range: %s" %(datetime.datetime.combine(startDate,startTime).ctime())
884 903 else:
885 904 print "No files"
886 905
887 906 sys.exit(-1)
888 907
889 908 # self.updateDataHeader()
890 909
891 910 return self.dataOut
892 911
893 912 def getData():
894 913
895 914 raise ValueError, "This method has not been implemented"
896 915
897 916 def hasNotDataInBuffer():
898 917
899 918 raise ValueError, "This method has not been implemented"
900 919
901 920 def readBlock():
902 921
903 922 raise ValueError, "This method has not been implemented"
904 923
905 924 def isEndProcess(self):
906 925
907 926 return self.flagNoMoreFiles
908 927
909 928 def printReadBlocks(self):
910 929
911 930 print "Number of read blocks per file %04d" %self.nReadBlocks
912 931
913 932 def printTotalBlocks(self):
914 933
915 934 print "Number of read blocks %04d" %self.nTotalBlocks
916 935
917 936 def printNumberOfBlock(self):
918 937
919 938 if self.flagIsNewBlock:
920 print "Block No. %04d, Total blocks %04d" %(self.basicHeaderObj.dataBlock, self.nTotalBlocks)
939 print "Block No. %04d, Total blocks %04d -> %s" %(self.basicHeaderObj.dataBlock, self.nTotalBlocks, self.dataOut.datatime.ctime())
921 940
922 941 def printInfo(self):
923 942
924 943 print self.basicHeaderObj.printInfo()
925 944 print self.systemHeaderObj.printInfo()
926 945 print self.radarControllerHeaderObj.printInfo()
927 946 print self.processingHeaderObj.printInfo()
928 947
929 948
930 949 def run(self, **kwargs):
931 950
932 951 if not(self.isConfig):
933 952
934 953 # self.dataOut = dataOut
935 954 self.setup(**kwargs)
936 955 self.isConfig = True
937 956
938 957 self.getData()
939 958
940 959 class JRODataWriter(JRODataIO, Operation):
941 960
942 961 """
943 962 Esta clase permite escribir datos a archivos procesados (.r o ,pdata). La escritura
944 963 de los datos siempre se realiza por bloques.
945 964 """
946 965
947 966 blockIndex = 0
948 967
949 968 path = None
950 969
951 970 setFile = None
952 971
953 972 profilesPerBlock = None
954 973
955 974 blocksPerFile = None
956 975
957 976 nWriteBlocks = 0
958 977
959 978 def __init__(self, dataOut=None):
960 979 raise ValueError, "Not implemented"
961 980
962 981
963 982 def hasAllDataInBuffer(self):
964 983 raise ValueError, "Not implemented"
965 984
966 985
967 986 def setBlockDimension(self):
968 987 raise ValueError, "Not implemented"
969 988
970 989
971 990 def writeBlock(self):
972 991 raise ValueError, "No implemented"
973 992
974 993
975 994 def putData(self):
976 995 raise ValueError, "No implemented"
977 996
978 997 def getDataHeader(self):
979 998 """
980 999 Obtiene una copia del First Header
981 1000
982 1001 Affected:
983 1002
984 1003 self.basicHeaderObj
985 1004 self.systemHeaderObj
986 1005 self.radarControllerHeaderObj
987 1006 self.processingHeaderObj self.
988 1007
989 1008 Return:
990 1009 None
991 1010 """
992 1011
993 1012 raise ValueError, "No implemented"
994 1013
995 1014 def getBasicHeader(self):
996 1015
997 1016 self.basicHeaderObj.size = self.basicHeaderSize #bytes
998 1017 self.basicHeaderObj.version = self.versionFile
999 1018 self.basicHeaderObj.dataBlock = self.nTotalBlocks
1000 1019
1001 1020 utc = numpy.floor(self.dataOut.utctime)
1002 1021 milisecond = (self.dataOut.utctime - utc)* 1000.0
1003 1022
1004 1023 self.basicHeaderObj.utc = utc
1005 1024 self.basicHeaderObj.miliSecond = milisecond
1006 1025 self.basicHeaderObj.timeZone = 0
1007 1026 self.basicHeaderObj.dstFlag = 0
1008 1027 self.basicHeaderObj.errorCount = 0
1009 1028
1010 1029 def __writeFirstHeader(self):
1011 1030 """
1012 1031 Escribe el primer header del file es decir el Basic header y el Long header (SystemHeader, RadarControllerHeader, ProcessingHeader)
1013 1032
1014 1033 Affected:
1015 1034 __dataType
1016 1035
1017 1036 Return:
1018 1037 None
1019 1038 """
1020 1039
1021 1040 # CALCULAR PARAMETROS
1022 1041
1023 1042 sizeLongHeader = self.systemHeaderObj.size + self.radarControllerHeaderObj.size + self.processingHeaderObj.size
1024 1043 self.basicHeaderObj.size = self.basicHeaderSize + sizeLongHeader
1025 1044
1026 1045 self.basicHeaderObj.write(self.fp)
1027 1046 self.systemHeaderObj.write(self.fp)
1028 1047 self.radarControllerHeaderObj.write(self.fp)
1029 1048 self.processingHeaderObj.write(self.fp)
1030 1049
1031 1050 self.dtype = self.dataOut.dtype
1032 1051
1033 1052 def __setNewBlock(self):
1034 1053 """
1035 1054 Si es un nuevo file escribe el First Header caso contrario escribe solo el Basic Header
1036 1055
1037 1056 Return:
1038 1057 0 : si no pudo escribir nada
1039 1058 1 : Si escribio el Basic el First Header
1040 1059 """
1041 1060 if self.fp == None:
1042 1061 self.setNextFile()
1043 1062
1044 1063 if self.flagIsNewFile:
1045 1064 return 1
1046 1065
1047 1066 if self.blockIndex < self.processingHeaderObj.dataBlocksPerFile:
1048 1067 self.basicHeaderObj.write(self.fp)
1049 1068 return 1
1050 1069
1051 1070 if not( self.setNextFile() ):
1052 1071 return 0
1053 1072
1054 1073 return 1
1055 1074
1056 1075
1057 1076 def writeNextBlock(self):
1058 1077 """
1059 1078 Selecciona el bloque siguiente de datos y los escribe en un file
1060 1079
1061 1080 Return:
1062 1081 0 : Si no hizo pudo escribir el bloque de datos
1063 1082 1 : Si no pudo escribir el bloque de datos
1064 1083 """
1065 1084 if not( self.__setNewBlock() ):
1066 1085 return 0
1067 1086
1068 1087 self.writeBlock()
1069 1088
1070 1089 return 1
1071 1090
1072 1091 def setNextFile(self):
1073 1092 """
1074 1093 Determina el siguiente file que sera escrito
1075 1094
1076 1095 Affected:
1077 1096 self.filename
1078 1097 self.subfolder
1079 1098 self.fp
1080 1099 self.setFile
1081 1100 self.flagIsNewFile
1082 1101
1083 1102 Return:
1084 1103 0 : Si el archivo no puede ser escrito
1085 1104 1 : Si el archivo esta listo para ser escrito
1086 1105 """
1087 1106 ext = self.ext
1088 1107 path = self.path
1089 1108
1090 1109 if self.fp != None:
1091 1110 self.fp.close()
1092 1111
1093 1112 timeTuple = time.localtime( self.dataOut.utctime)
1094 1113 subfolder = 'd%4.4d%3.3d' % (timeTuple.tm_year,timeTuple.tm_yday)
1095 1114
1096 1115 fullpath = os.path.join( path, subfolder )
1097 1116 if not( os.path.exists(fullpath) ):
1098 1117 os.mkdir(fullpath)
1099 1118 self.setFile = -1 #inicializo mi contador de seteo
1100 1119 else:
1101 1120 filesList = os.listdir( fullpath )
1102 1121 if len( filesList ) > 0:
1103 1122 filesList = sorted( filesList, key=str.lower )
1104 1123 filen = filesList[-1]
1105 1124 # el filename debera tener el siguiente formato
1106 1125 # 0 1234 567 89A BCDE (hex)
1107 1126 # x YYYY DDD SSS .ext
1108 1127 if isNumber( filen[8:11] ):
1109 1128 self.setFile = int( filen[8:11] ) #inicializo mi contador de seteo al seteo del ultimo file
1110 1129 else:
1111 1130 self.setFile = -1
1112 1131 else:
1113 1132 self.setFile = -1 #inicializo mi contador de seteo
1114 1133
1115 1134 setFile = self.setFile
1116 1135 setFile += 1
1117 1136
1118 1137 file = '%s%4.4d%3.3d%3.3d%s' % (self.optchar,
1119 1138 timeTuple.tm_year,
1120 1139 timeTuple.tm_yday,
1121 1140 setFile,
1122 1141 ext )
1123 1142
1124 1143 filename = os.path.join( path, subfolder, file )
1125 1144
1126 1145 fp = open( filename,'wb' )
1127 1146
1128 1147 self.blockIndex = 0
1129 1148
1130 1149 #guardando atributos
1131 1150 self.filename = filename
1132 1151 self.subfolder = subfolder
1133 1152 self.fp = fp
1134 1153 self.setFile = setFile
1135 1154 self.flagIsNewFile = 1
1136 1155
1137 1156 self.getDataHeader()
1138 1157
1139 1158 print 'Writing the file: %s'%self.filename
1140 1159
1141 1160 self.__writeFirstHeader()
1142 1161
1143 1162 return 1
1144 1163
1145 1164 def setup(self, dataOut, path, blocksPerFile, profilesPerBlock=None, set=0, ext=None):
1146 1165 """
1147 1166 Setea el tipo de formato en la cual sera guardada la data y escribe el First Header
1148 1167
1149 1168 Inputs:
1150 1169 path : el path destino en el cual se escribiran los files a crear
1151 1170 format : formato en el cual sera salvado un file
1152 1171 set : el setebo del file
1153 1172
1154 1173 Return:
1155 1174 0 : Si no realizo un buen seteo
1156 1175 1 : Si realizo un buen seteo
1157 1176 """
1158 1177
1159 1178 if ext == None:
1160 1179 ext = self.ext
1161 1180
1162 1181 ext = ext.lower()
1163 1182
1164 1183 self.ext = ext
1165 1184
1166 1185 self.path = path
1167 1186
1168 1187 self.setFile = set - 1
1169 1188
1170 1189 self.blocksPerFile = blocksPerFile
1171 1190
1172 1191 self.profilesPerBlock = profilesPerBlock
1173 1192
1174 1193 self.dataOut = dataOut
1175 1194
1176 1195 if not(self.setNextFile()):
1177 1196 print "There isn't a next file"
1178 1197 return 0
1179 1198
1180 1199 self.setBlockDimension()
1181 1200
1182 1201 return 1
1183 1202
1184 1203 def run(self, dataOut, **kwargs):
1185 1204
1186 1205 if not(self.isConfig):
1187 1206
1188 1207 self.setup(dataOut, **kwargs)
1189 1208 self.isConfig = True
1190 1209
1191 1210 self.putData()
1192 1211
1193 1212 class VoltageReader(JRODataReader):
1194 1213 """
1195 1214 Esta clase permite leer datos de voltage desde archivos en formato rawdata (.r). La lectura
1196 1215 de los datos siempre se realiza por bloques. Los datos leidos (array de 3 dimensiones:
1197 1216 perfiles*alturas*canales) son almacenados en la variable "buffer".
1198 1217
1199 1218 perfiles * alturas * canales
1200 1219
1201 1220 Esta clase contiene instancias (objetos) de las clases BasicHeader, SystemHeader,
1202 1221 RadarControllerHeader y Voltage. Los tres primeros se usan para almacenar informacion de la
1203 1222 cabecera de datos (metadata), y el cuarto (Voltage) para obtener y almacenar un perfil de
1204 1223 datos desde el "buffer" cada vez que se ejecute el metodo "getData".
1205 1224
1206 1225 Example:
1207 1226
1208 1227 dpath = "/home/myuser/data"
1209 1228
1210 1229 startTime = datetime.datetime(2010,1,20,0,0,0,0,0,0)
1211 1230
1212 1231 endTime = datetime.datetime(2010,1,21,23,59,59,0,0,0)
1213 1232
1214 1233 readerObj = VoltageReader()
1215 1234
1216 1235 readerObj.setup(dpath, startTime, endTime)
1217 1236
1218 1237 while(True):
1219 1238
1220 1239 #to get one profile
1221 1240 profile = readerObj.getData()
1222 1241
1223 1242 #print the profile
1224 1243 print profile
1225 1244
1226 1245 #If you want to see all datablock
1227 1246 print readerObj.datablock
1228 1247
1229 1248 if readerObj.flagNoMoreFiles:
1230 1249 break
1231 1250
1232 1251 """
1233 1252
1234 1253 ext = ".r"
1235 1254
1236 1255 optchar = "D"
1237 1256 dataOut = None
1238 1257
1239 1258
1240 1259 def __init__(self):
1241 1260 """
1242 1261 Inicializador de la clase VoltageReader para la lectura de datos de voltage.
1243 1262
1244 1263 Input:
1245 1264 dataOut : Objeto de la clase Voltage. Este objeto sera utilizado para
1246 1265 almacenar un perfil de datos cada vez que se haga un requerimiento
1247 1266 (getData). El perfil sera obtenido a partir del buffer de datos,
1248 1267 si el buffer esta vacio se hara un nuevo proceso de lectura de un
1249 1268 bloque de datos.
1250 1269 Si este parametro no es pasado se creara uno internamente.
1251 1270
1252 1271 Variables afectadas:
1253 1272 self.dataOut
1254 1273
1255 1274 Return:
1256 1275 None
1257 1276 """
1258 1277
1259 1278 self.isConfig = False
1260 1279
1261 1280 self.datablock = None
1262 1281
1263 1282 self.utc = 0
1264 1283
1265 1284 self.ext = ".r"
1266 1285
1267 1286 self.optchar = "D"
1268 1287
1269 1288 self.basicHeaderObj = BasicHeader(LOCALTIME)
1270 1289
1271 1290 self.systemHeaderObj = SystemHeader()
1272 1291
1273 1292 self.radarControllerHeaderObj = RadarControllerHeader()
1274 1293
1275 1294 self.processingHeaderObj = ProcessingHeader()
1276 1295
1277 1296 self.online = 0
1278 1297
1279 1298 self.fp = None
1280 1299
1281 1300 self.idFile = None
1282 1301
1283 1302 self.dtype = None
1284 1303
1285 1304 self.fileSizeByHeader = None
1286 1305
1287 1306 self.filenameList = []
1288 1307
1289 1308 self.filename = None
1290 1309
1291 1310 self.fileSize = None
1292 1311
1293 1312 self.firstHeaderSize = 0
1294 1313
1295 1314 self.basicHeaderSize = 24
1296 1315
1297 1316 self.pathList = []
1298 1317
1299 1318 self.filenameList = []
1300 1319
1301 1320 self.lastUTTime = 0
1302 1321
1303 1322 self.maxTimeStep = 30
1304 1323
1305 1324 self.flagNoMoreFiles = 0
1306 1325
1307 1326 self.set = 0
1308 1327
1309 1328 self.path = None
1310 1329
1311 1330 self.profileIndex = 2**32-1
1312 1331
1313 1332 self.delay = 3 #seconds
1314 1333
1315 1334 self.nTries = 3 #quantity tries
1316 1335
1317 1336 self.nFiles = 3 #number of files for searching
1318 1337
1319 1338 self.nReadBlocks = 0
1320 1339
1321 1340 self.flagIsNewFile = 1
1322 1341
1323 1342 self.__isFirstTimeOnline = 1
1324 1343
1325 1344 self.ippSeconds = 0
1326 1345
1327 1346 self.flagTimeBlock = 0
1328 1347
1329 1348 self.flagIsNewBlock = 0
1330 1349
1331 1350 self.nTotalBlocks = 0
1332 1351
1333 1352 self.blocksize = 0
1334 1353
1335 1354 self.dataOut = self.createObjByDefault()
1336 1355
1337 1356 def createObjByDefault(self):
1338 1357
1339 1358 dataObj = Voltage()
1340 1359
1341 1360 return dataObj
1342 1361
1343 1362 def __hasNotDataInBuffer(self):
1344 1363 if self.profileIndex >= self.processingHeaderObj.profilesPerBlock:
1345 1364 return 1
1346 1365 return 0
1347 1366
1348 1367
1349 1368 def getBlockDimension(self):
1350 1369 """
1351 1370 Obtiene la cantidad de puntos a leer por cada bloque de datos
1352 1371
1353 1372 Affected:
1354 1373 self.blocksize
1355 1374
1356 1375 Return:
1357 1376 None
1358 1377 """
1359 1378 pts2read = self.processingHeaderObj.profilesPerBlock * self.processingHeaderObj.nHeights * self.systemHeaderObj.nChannels
1360 1379 self.blocksize = pts2read
1361 1380
1362 1381
1363 1382 def readBlock(self):
1364 1383 """
1365 1384 readBlock lee el bloque de datos desde la posicion actual del puntero del archivo
1366 1385 (self.fp) y actualiza todos los parametros relacionados al bloque de datos
1367 1386 (metadata + data). La data leida es almacenada en el buffer y el contador del buffer
1368 1387 es seteado a 0
1369 1388
1370 1389 Inputs:
1371 1390 None
1372 1391
1373 1392 Return:
1374 1393 None
1375 1394
1376 1395 Affected:
1377 1396 self.profileIndex
1378 1397 self.datablock
1379 1398 self.flagIsNewFile
1380 1399 self.flagIsNewBlock
1381 1400 self.nTotalBlocks
1382 1401
1383 1402 Exceptions:
1384 1403 Si un bloque leido no es un bloque valido
1385 1404 """
1386 1405
1387 1406 junk = numpy.fromfile( self.fp, self.dtype, self.blocksize )
1388 1407
1389 1408 try:
1390 1409 junk = junk.reshape( (self.processingHeaderObj.profilesPerBlock, self.processingHeaderObj.nHeights, self.systemHeaderObj.nChannels) )
1391 1410 except:
1392 1411 print "The read block (%3d) has not enough data" %self.nReadBlocks
1393 1412 return 0
1394 1413
1395 1414 junk = numpy.transpose(junk, (2,0,1))
1396 1415 self.datablock = junk['real'] + junk['imag']*1j
1397 1416
1398 1417 self.profileIndex = 0
1399 1418
1400 1419 self.flagIsNewFile = 0
1401 1420 self.flagIsNewBlock = 1
1402 1421
1403 1422 self.nTotalBlocks += 1
1404 1423 self.nReadBlocks += 1
1405 1424
1406 1425 return 1
1407 1426
1408 1427
1409 1428 def getData(self):
1410 1429 """
1411 1430 getData obtiene una unidad de datos del buffer de lectura y la copia a la clase "Voltage"
1412 1431 con todos los parametros asociados a este (metadata). cuando no hay datos en el buffer de
1413 1432 lectura es necesario hacer una nueva lectura de los bloques de datos usando "readNextBlock"
1414 1433
1415 1434 Ademas incrementa el contador del buffer en 1.
1416 1435
1417 1436 Return:
1418 1437 data : retorna un perfil de voltages (alturas * canales) copiados desde el
1419 1438 buffer. Si no hay mas archivos a leer retorna None.
1420 1439
1421 1440 Variables afectadas:
1422 1441 self.dataOut
1423 1442 self.profileIndex
1424 1443
1425 1444 Affected:
1426 1445 self.dataOut
1427 1446 self.profileIndex
1428 1447 self.flagTimeBlock
1429 1448 self.flagIsNewBlock
1430 1449 """
1431 1450
1432 1451 if self.flagNoMoreFiles:
1433 1452 self.dataOut.flagNoData = True
1434 1453 print 'Process finished'
1435 1454 return 0
1436 1455
1437 1456 self.flagTimeBlock = 0
1438 1457 self.flagIsNewBlock = 0
1439 1458
1440 1459 if self.__hasNotDataInBuffer():
1441 1460
1442 1461 if not( self.readNextBlock() ):
1443 1462 return 0
1444 1463
1445 1464 self.dataOut.dtype = self.dtype
1446 1465
1447 1466 self.dataOut.nProfiles = self.processingHeaderObj.profilesPerBlock
1448 1467
1449 1468 xf = self.processingHeaderObj.firstHeight + self.processingHeaderObj.nHeights*self.processingHeaderObj.deltaHeight
1450 1469
1451 1470 self.dataOut.heightList = numpy.arange(self.processingHeaderObj.firstHeight, xf, self.processingHeaderObj.deltaHeight)
1452 1471
1453 1472 self.dataOut.channelList = range(self.systemHeaderObj.nChannels)
1454 1473
1455 1474 self.dataOut.flagTimeBlock = self.flagTimeBlock
1456 1475
1457 1476 self.dataOut.ippSeconds = self.ippSeconds
1458 1477
1459 1478 self.dataOut.timeInterval = self.ippSeconds * self.processingHeaderObj.nCohInt
1460 1479
1461 1480 self.dataOut.nCohInt = self.processingHeaderObj.nCohInt
1462 1481
1463 1482 self.dataOut.flagShiftFFT = False
1464 1483
1465 1484 if self.radarControllerHeaderObj.code != None:
1466 1485
1467 1486 self.dataOut.nCode = self.radarControllerHeaderObj.nCode
1468 1487
1469 1488 self.dataOut.nBaud = self.radarControllerHeaderObj.nBaud
1470 1489
1471 1490 self.dataOut.code = self.radarControllerHeaderObj.code
1472 1491
1473 1492 self.dataOut.systemHeaderObj = self.systemHeaderObj.copy()
1474 1493
1475 1494 self.dataOut.radarControllerHeaderObj = self.radarControllerHeaderObj.copy()
1476 1495
1477 1496 self.dataOut.flagDecodeData = False #asumo q la data no esta decodificada
1478 1497
1479 1498 self.dataOut.flagDeflipData = False #asumo q la data no esta sin flip
1480 1499
1481 1500 self.dataOut.flagShiftFFT = False
1482 1501
1483 1502
1484 1503 # self.updateDataHeader()
1485 1504
1486 1505 #data es un numpy array de 3 dmensiones (perfiles, alturas y canales)
1487 1506
1488 1507 if self.datablock == None:
1489 1508 self.dataOut.flagNoData = True
1490 1509 return 0
1491 1510
1492 1511 self.dataOut.data = self.datablock[:,self.profileIndex,:]
1493 1512
1494 1513 self.dataOut.utctime = self.basicHeaderObj.utc + self.basicHeaderObj.miliSecond/1000. + self.profileIndex * self.ippSeconds
1495 1514
1496 1515 self.profileIndex += 1
1497 1516
1498 1517 self.dataOut.flagNoData = False
1499 1518
1500 1519 # print self.profileIndex, self.dataOut.utctime
1501 1520 # if self.profileIndex == 800:
1502 1521 # a=1
1503 1522
1504 1523
1505 1524 return self.dataOut.data
1506 1525
1507 1526
1508 1527 class VoltageWriter(JRODataWriter):
1509 1528 """
1510 1529 Esta clase permite escribir datos de voltajes a archivos procesados (.r). La escritura
1511 1530 de los datos siempre se realiza por bloques.
1512 1531 """
1513 1532
1514 1533 ext = ".r"
1515 1534
1516 1535 optchar = "D"
1517 1536
1518 1537 shapeBuffer = None
1519 1538
1520 1539
1521 1540 def __init__(self):
1522 1541 """
1523 1542 Inicializador de la clase VoltageWriter para la escritura de datos de espectros.
1524 1543
1525 1544 Affected:
1526 1545 self.dataOut
1527 1546
1528 1547 Return: None
1529 1548 """
1530 1549
1531 1550 self.nTotalBlocks = 0
1532 1551
1533 1552 self.profileIndex = 0
1534 1553
1535 1554 self.isConfig = False
1536 1555
1537 1556 self.fp = None
1538 1557
1539 1558 self.flagIsNewFile = 1
1540 1559
1541 1560 self.nTotalBlocks = 0
1542 1561
1543 1562 self.flagIsNewBlock = 0
1544 1563
1545 1564 self.setFile = None
1546 1565
1547 1566 self.dtype = None
1548 1567
1549 1568 self.path = None
1550 1569
1551 1570 self.filename = None
1552 1571
1553 1572 self.basicHeaderObj = BasicHeader(LOCALTIME)
1554 1573
1555 1574 self.systemHeaderObj = SystemHeader()
1556 1575
1557 1576 self.radarControllerHeaderObj = RadarControllerHeader()
1558 1577
1559 1578 self.processingHeaderObj = ProcessingHeader()
1560 1579
1561 1580 def hasAllDataInBuffer(self):
1562 1581 if self.profileIndex >= self.processingHeaderObj.profilesPerBlock:
1563 1582 return 1
1564 1583 return 0
1565 1584
1566 1585
1567 1586 def setBlockDimension(self):
1568 1587 """
1569 1588 Obtiene las formas dimensionales del los subbloques de datos que componen un bloque
1570 1589
1571 1590 Affected:
1572 1591 self.shape_spc_Buffer
1573 1592 self.shape_cspc_Buffer
1574 1593 self.shape_dc_Buffer
1575 1594
1576 1595 Return: None
1577 1596 """
1578 1597 self.shapeBuffer = (self.processingHeaderObj.profilesPerBlock,
1579 1598 self.processingHeaderObj.nHeights,
1580 1599 self.systemHeaderObj.nChannels)
1581 1600
1582 1601 self.datablock = numpy.zeros((self.systemHeaderObj.nChannels,
1583 1602 self.processingHeaderObj.profilesPerBlock,
1584 1603 self.processingHeaderObj.nHeights),
1585 1604 dtype=numpy.dtype('complex64'))
1586 1605
1587 1606
1588 1607 def writeBlock(self):
1589 1608 """
1590 1609 Escribe el buffer en el file designado
1591 1610
1592 1611 Affected:
1593 1612 self.profileIndex
1594 1613 self.flagIsNewFile
1595 1614 self.flagIsNewBlock
1596 1615 self.nTotalBlocks
1597 1616 self.blockIndex
1598 1617
1599 1618 Return: None
1600 1619 """
1601 1620 data = numpy.zeros( self.shapeBuffer, self.dtype )
1602 1621
1603 1622 junk = numpy.transpose(self.datablock, (1,2,0))
1604 1623
1605 1624 data['real'] = junk.real
1606 1625 data['imag'] = junk.imag
1607 1626
1608 1627 data = data.reshape( (-1) )
1609 1628
1610 1629 data.tofile( self.fp )
1611 1630
1612 1631 self.datablock.fill(0)
1613 1632
1614 1633 self.profileIndex = 0
1615 1634 self.flagIsNewFile = 0
1616 1635 self.flagIsNewBlock = 1
1617 1636
1618 1637 self.blockIndex += 1
1619 1638 self.nTotalBlocks += 1
1620 1639
1621 1640 def putData(self):
1622 1641 """
1623 1642 Setea un bloque de datos y luego los escribe en un file
1624 1643
1625 1644 Affected:
1626 1645 self.flagIsNewBlock
1627 1646 self.profileIndex
1628 1647
1629 1648 Return:
1630 1649 0 : Si no hay data o no hay mas files que puedan escribirse
1631 1650 1 : Si se escribio la data de un bloque en un file
1632 1651 """
1633 1652 if self.dataOut.flagNoData:
1634 1653 return 0
1635 1654
1636 1655 self.flagIsNewBlock = 0
1637 1656
1638 1657 if self.dataOut.flagTimeBlock:
1639 1658
1640 1659 self.datablock.fill(0)
1641 1660 self.profileIndex = 0
1642 1661 self.setNextFile()
1643 1662
1644 1663 if self.profileIndex == 0:
1645 1664 self.getBasicHeader()
1646 1665
1647 1666 self.datablock[:,self.profileIndex,:] = self.dataOut.data
1648 1667
1649 1668 self.profileIndex += 1
1650 1669
1651 1670 if self.hasAllDataInBuffer():
1652 1671 #if self.flagIsNewFile:
1653 1672 self.writeNextBlock()
1654 1673 # self.getDataHeader()
1655 1674
1656 1675 return 1
1657 1676
1658 1677 def __getProcessFlags(self):
1659 1678
1660 1679 processFlags = 0
1661 1680
1662 1681 dtype0 = numpy.dtype([('real','<i1'),('imag','<i1')])
1663 1682 dtype1 = numpy.dtype([('real','<i2'),('imag','<i2')])
1664 1683 dtype2 = numpy.dtype([('real','<i4'),('imag','<i4')])
1665 1684 dtype3 = numpy.dtype([('real','<i8'),('imag','<i8')])
1666 1685 dtype4 = numpy.dtype([('real','<f4'),('imag','<f4')])
1667 1686 dtype5 = numpy.dtype([('real','<f8'),('imag','<f8')])
1668 1687
1669 1688 dtypeList = [dtype0, dtype1, dtype2, dtype3, dtype4, dtype5]
1670 1689
1671 1690
1672 1691
1673 1692 datatypeValueList = [PROCFLAG.DATATYPE_CHAR,
1674 1693 PROCFLAG.DATATYPE_SHORT,
1675 1694 PROCFLAG.DATATYPE_LONG,
1676 1695 PROCFLAG.DATATYPE_INT64,
1677 1696 PROCFLAG.DATATYPE_FLOAT,
1678 1697 PROCFLAG.DATATYPE_DOUBLE]
1679 1698
1680 1699
1681 1700 for index in range(len(dtypeList)):
1682 1701 if self.dataOut.dtype == dtypeList[index]:
1683 1702 dtypeValue = datatypeValueList[index]
1684 1703 break
1685 1704
1686 1705 processFlags += dtypeValue
1687 1706
1688 1707 if self.dataOut.flagDecodeData:
1689 1708 processFlags += PROCFLAG.DECODE_DATA
1690 1709
1691 1710 if self.dataOut.flagDeflipData:
1692 1711 processFlags += PROCFLAG.DEFLIP_DATA
1693 1712
1694 1713 if self.dataOut.code != None:
1695 1714 processFlags += PROCFLAG.DEFINE_PROCESS_CODE
1696 1715
1697 1716 if self.dataOut.nCohInt > 1:
1698 1717 processFlags += PROCFLAG.COHERENT_INTEGRATION
1699 1718
1700 1719 return processFlags
1701 1720
1702 1721
1703 1722 def __getBlockSize(self):
1704 1723 '''
1705 1724 Este metodos determina el cantidad de bytes para un bloque de datos de tipo Voltage
1706 1725 '''
1707 1726
1708 1727 dtype0 = numpy.dtype([('real','<i1'),('imag','<i1')])
1709 1728 dtype1 = numpy.dtype([('real','<i2'),('imag','<i2')])
1710 1729 dtype2 = numpy.dtype([('real','<i4'),('imag','<i4')])
1711 1730 dtype3 = numpy.dtype([('real','<i8'),('imag','<i8')])
1712 1731 dtype4 = numpy.dtype([('real','<f4'),('imag','<f4')])
1713 1732 dtype5 = numpy.dtype([('real','<f8'),('imag','<f8')])
1714 1733
1715 1734 dtypeList = [dtype0, dtype1, dtype2, dtype3, dtype4, dtype5]
1716 1735 datatypeValueList = [1,2,4,8,4,8]
1717 1736 for index in range(len(dtypeList)):
1718 1737 if self.dataOut.dtype == dtypeList[index]:
1719 1738 datatypeValue = datatypeValueList[index]
1720 1739 break
1721 1740
1722 1741 blocksize = int(self.dataOut.nHeights * self.dataOut.nChannels * self.dataOut.nProfiles * datatypeValue * 2)
1723 1742
1724 1743 return blocksize
1725 1744
1726 1745 def getDataHeader(self):
1727 1746
1728 1747 """
1729 1748 Obtiene una copia del First Header
1730 1749
1731 1750 Affected:
1732 1751 self.systemHeaderObj
1733 1752 self.radarControllerHeaderObj
1734 1753 self.dtype
1735 1754
1736 1755 Return:
1737 1756 None
1738 1757 """
1739 1758
1740 1759 self.systemHeaderObj = self.dataOut.systemHeaderObj.copy()
1741 1760 self.systemHeaderObj.nChannels = self.dataOut.nChannels
1742 1761 self.radarControllerHeaderObj = self.dataOut.radarControllerHeaderObj.copy()
1743 1762
1744 1763 self.getBasicHeader()
1745 1764
1746 1765 processingHeaderSize = 40 # bytes
1747 1766 self.processingHeaderObj.dtype = 0 # Voltage
1748 1767 self.processingHeaderObj.blockSize = self.__getBlockSize()
1749 1768 self.processingHeaderObj.profilesPerBlock = self.profilesPerBlock
1750 1769 self.processingHeaderObj.dataBlocksPerFile = self.blocksPerFile
1751 1770 self.processingHeaderObj.nWindows = 1 #podria ser 1 o self.dataOut.processingHeaderObj.nWindows
1752 1771 self.processingHeaderObj.processFlags = self.__getProcessFlags()
1753 1772 self.processingHeaderObj.nCohInt = self.dataOut.nCohInt
1754 1773 self.processingHeaderObj.nIncohInt = 1 # Cuando la data de origen es de tipo Voltage
1755 1774 self.processingHeaderObj.totalSpectra = 0 # Cuando la data de origen es de tipo Voltage
1756 1775
1757 1776 if self.dataOut.code != None:
1758 1777 self.processingHeaderObj.code = self.dataOut.code
1759 1778 self.processingHeaderObj.nCode = self.dataOut.nCode
1760 1779 self.processingHeaderObj.nBaud = self.dataOut.nBaud
1761 1780 codesize = int(8 + 4 * self.dataOut.nCode * self.dataOut.nBaud)
1762 1781 processingHeaderSize += codesize
1763 1782
1764 1783 if self.processingHeaderObj.nWindows != 0:
1765 1784 self.processingHeaderObj.firstHeight = self.dataOut.heightList[0]
1766 1785 self.processingHeaderObj.deltaHeight = self.dataOut.heightList[1] - self.dataOut.heightList[0]
1767 1786 self.processingHeaderObj.nHeights = self.dataOut.nHeights
1768 1787 self.processingHeaderObj.samplesWin = self.dataOut.nHeights
1769 1788 processingHeaderSize += 12
1770 1789
1771 1790 self.processingHeaderObj.size = processingHeaderSize
1772 1791
1773 1792 class SpectraReader(JRODataReader):
1774 1793 """
1775 1794 Esta clase permite leer datos de espectros desde archivos procesados (.pdata). La lectura
1776 1795 de los datos siempre se realiza por bloques. Los datos leidos (array de 3 dimensiones)
1777 1796 son almacenados en tres buffer's para el Self Spectra, el Cross Spectra y el DC Channel.
1778 1797
1779 1798 paresCanalesIguales * alturas * perfiles (Self Spectra)
1780 1799 paresCanalesDiferentes * alturas * perfiles (Cross Spectra)
1781 1800 canales * alturas (DC Channels)
1782 1801
1783 1802 Esta clase contiene instancias (objetos) de las clases BasicHeader, SystemHeader,
1784 1803 RadarControllerHeader y Spectra. Los tres primeros se usan para almacenar informacion de la
1785 1804 cabecera de datos (metadata), y el cuarto (Spectra) para obtener y almacenar un bloque de
1786 1805 datos desde el "buffer" cada vez que se ejecute el metodo "getData".
1787 1806
1788 1807 Example:
1789 1808 dpath = "/home/myuser/data"
1790 1809
1791 1810 startTime = datetime.datetime(2010,1,20,0,0,0,0,0,0)
1792 1811
1793 1812 endTime = datetime.datetime(2010,1,21,23,59,59,0,0,0)
1794 1813
1795 1814 readerObj = SpectraReader()
1796 1815
1797 1816 readerObj.setup(dpath, startTime, endTime)
1798 1817
1799 1818 while(True):
1800 1819
1801 1820 readerObj.getData()
1802 1821
1803 1822 print readerObj.data_spc
1804 1823
1805 1824 print readerObj.data_cspc
1806 1825
1807 1826 print readerObj.data_dc
1808 1827
1809 1828 if readerObj.flagNoMoreFiles:
1810 1829 break
1811 1830
1812 1831 """
1813 1832
1814 1833 pts2read_SelfSpectra = 0
1815 1834
1816 1835 pts2read_CrossSpectra = 0
1817 1836
1818 1837 pts2read_DCchannels = 0
1819 1838
1820 1839 ext = ".pdata"
1821 1840
1822 1841 optchar = "P"
1823 1842
1824 1843 dataOut = None
1825 1844
1826 1845 nRdChannels = None
1827 1846
1828 1847 nRdPairs = None
1829 1848
1830 1849 rdPairList = []
1831 1850
1832 1851
1833 1852 def __init__(self):
1834 1853 """
1835 1854 Inicializador de la clase SpectraReader para la lectura de datos de espectros.
1836 1855
1837 1856 Inputs:
1838 1857 dataOut : Objeto de la clase Spectra. Este objeto sera utilizado para
1839 1858 almacenar un perfil de datos cada vez que se haga un requerimiento
1840 1859 (getData). El perfil sera obtenido a partir del buffer de datos,
1841 1860 si el buffer esta vacio se hara un nuevo proceso de lectura de un
1842 1861 bloque de datos.
1843 1862 Si este parametro no es pasado se creara uno internamente.
1844 1863
1845 1864 Affected:
1846 1865 self.dataOut
1847 1866
1848 1867 Return : None
1849 1868 """
1850 1869
1851 1870 self.isConfig = False
1852 1871
1853 1872 self.pts2read_SelfSpectra = 0
1854 1873
1855 1874 self.pts2read_CrossSpectra = 0
1856 1875
1857 1876 self.pts2read_DCchannels = 0
1858 1877
1859 1878 self.datablock = None
1860 1879
1861 1880 self.utc = None
1862 1881
1863 1882 self.ext = ".pdata"
1864 1883
1865 1884 self.optchar = "P"
1866 1885
1867 1886 self.basicHeaderObj = BasicHeader(LOCALTIME)
1868 1887
1869 1888 self.systemHeaderObj = SystemHeader()
1870 1889
1871 1890 self.radarControllerHeaderObj = RadarControllerHeader()
1872 1891
1873 1892 self.processingHeaderObj = ProcessingHeader()
1874 1893
1875 1894 self.online = 0
1876 1895
1877 1896 self.fp = None
1878 1897
1879 1898 self.idFile = None
1880 1899
1881 1900 self.dtype = None
1882 1901
1883 1902 self.fileSizeByHeader = None
1884 1903
1885 1904 self.filenameList = []
1886 1905
1887 1906 self.filename = None
1888 1907
1889 1908 self.fileSize = None
1890 1909
1891 1910 self.firstHeaderSize = 0
1892 1911
1893 1912 self.basicHeaderSize = 24
1894 1913
1895 1914 self.pathList = []
1896 1915
1897 1916 self.lastUTTime = 0
1898 1917
1899 1918 self.maxTimeStep = 30
1900 1919
1901 1920 self.flagNoMoreFiles = 0
1902 1921
1903 1922 self.set = 0
1904 1923
1905 1924 self.path = None
1906 1925
1907 1926 self.delay = 60 #seconds
1908 1927
1909 1928 self.nTries = 3 #quantity tries
1910 1929
1911 1930 self.nFiles = 3 #number of files for searching
1912 1931
1913 1932 self.nReadBlocks = 0
1914 1933
1915 1934 self.flagIsNewFile = 1
1916 1935
1917 1936 self.__isFirstTimeOnline = 1
1918 1937
1919 1938 self.ippSeconds = 0
1920 1939
1921 1940 self.flagTimeBlock = 0
1922 1941
1923 1942 self.flagIsNewBlock = 0
1924 1943
1925 1944 self.nTotalBlocks = 0
1926 1945
1927 1946 self.blocksize = 0
1928 1947
1929 1948 self.dataOut = self.createObjByDefault()
1930 1949
1931 1950
1932 1951 def createObjByDefault(self):
1933 1952
1934 1953 dataObj = Spectra()
1935 1954
1936 1955 return dataObj
1937 1956
1938 1957 def __hasNotDataInBuffer(self):
1939 1958 return 1
1940 1959
1941 1960
1942 1961 def getBlockDimension(self):
1943 1962 """
1944 1963 Obtiene la cantidad de puntos a leer por cada bloque de datos
1945 1964
1946 1965 Affected:
1947 1966 self.nRdChannels
1948 1967 self.nRdPairs
1949 1968 self.pts2read_SelfSpectra
1950 1969 self.pts2read_CrossSpectra
1951 1970 self.pts2read_DCchannels
1952 1971 self.blocksize
1953 1972 self.dataOut.nChannels
1954 1973 self.dataOut.nPairs
1955 1974
1956 1975 Return:
1957 1976 None
1958 1977 """
1959 1978 self.nRdChannels = 0
1960 1979 self.nRdPairs = 0
1961 1980 self.rdPairList = []
1962 1981
1963 1982 for i in range(0, self.processingHeaderObj.totalSpectra*2, 2):
1964 1983 if self.processingHeaderObj.spectraComb[i] == self.processingHeaderObj.spectraComb[i+1]:
1965 1984 self.nRdChannels = self.nRdChannels + 1 #par de canales iguales
1966 1985 else:
1967 1986 self.nRdPairs = self.nRdPairs + 1 #par de canales diferentes
1968 1987 self.rdPairList.append((self.processingHeaderObj.spectraComb[i], self.processingHeaderObj.spectraComb[i+1]))
1969 1988
1970 1989 pts2read = self.processingHeaderObj.nHeights * self.processingHeaderObj.profilesPerBlock
1971 1990
1972 1991 self.pts2read_SelfSpectra = int(self.nRdChannels * pts2read)
1973 1992 self.blocksize = self.pts2read_SelfSpectra
1974 1993
1975 1994 if self.processingHeaderObj.flag_cspc:
1976 1995 self.pts2read_CrossSpectra = int(self.nRdPairs * pts2read)
1977 1996 self.blocksize += self.pts2read_CrossSpectra
1978 1997
1979 1998 if self.processingHeaderObj.flag_dc:
1980 1999 self.pts2read_DCchannels = int(self.systemHeaderObj.nChannels * self.processingHeaderObj.nHeights)
1981 2000 self.blocksize += self.pts2read_DCchannels
1982 2001
1983 2002 # self.blocksize = self.pts2read_SelfSpectra + self.pts2read_CrossSpectra + self.pts2read_DCchannels
1984 2003
1985 2004
1986 2005 def readBlock(self):
1987 2006 """
1988 2007 Lee el bloque de datos desde la posicion actual del puntero del archivo
1989 2008 (self.fp) y actualiza todos los parametros relacionados al bloque de datos
1990 2009 (metadata + data). La data leida es almacenada en el buffer y el contador del buffer
1991 2010 es seteado a 0
1992 2011
1993 2012 Return: None
1994 2013
1995 2014 Variables afectadas:
1996 2015
1997 2016 self.flagIsNewFile
1998 2017 self.flagIsNewBlock
1999 2018 self.nTotalBlocks
2000 2019 self.data_spc
2001 2020 self.data_cspc
2002 2021 self.data_dc
2003 2022
2004 2023 Exceptions:
2005 2024 Si un bloque leido no es un bloque valido
2006 2025 """
2007 2026 blockOk_flag = False
2008 2027 fpointer = self.fp.tell()
2009 2028
2010 2029 spc = numpy.fromfile( self.fp, self.dtype[0], self.pts2read_SelfSpectra )
2011 2030 spc = spc.reshape( (self.nRdChannels, self.processingHeaderObj.nHeights, self.processingHeaderObj.profilesPerBlock) ) #transforma a un arreglo 3D
2012 2031
2013 2032 if self.processingHeaderObj.flag_cspc:
2014 2033 cspc = numpy.fromfile( self.fp, self.dtype, self.pts2read_CrossSpectra )
2015 2034 cspc = cspc.reshape( (self.nRdPairs, self.processingHeaderObj.nHeights, self.processingHeaderObj.profilesPerBlock) ) #transforma a un arreglo 3D
2016 2035
2017 2036 if self.processingHeaderObj.flag_dc:
2018 2037 dc = numpy.fromfile( self.fp, self.dtype, self.pts2read_DCchannels ) #int(self.processingHeaderObj.nHeights*self.systemHeaderObj.nChannels) )
2019 2038 dc = dc.reshape( (self.systemHeaderObj.nChannels, self.processingHeaderObj.nHeights) ) #transforma a un arreglo 2D
2020 2039
2021 2040
2022 2041 if not(self.processingHeaderObj.shif_fft):
2023 2042 #desplaza a la derecha en el eje 2 determinadas posiciones
2024 2043 shift = int(self.processingHeaderObj.profilesPerBlock/2)
2025 2044 spc = numpy.roll( spc, shift , axis=2 )
2026 2045
2027 2046 if self.processingHeaderObj.flag_cspc:
2028 2047 #desplaza a la derecha en el eje 2 determinadas posiciones
2029 2048 cspc = numpy.roll( cspc, shift, axis=2 )
2030 2049
2031 2050 # self.processingHeaderObj.shif_fft = True
2032 2051
2033 2052 spc = numpy.transpose( spc, (0,2,1) )
2034 2053 self.data_spc = spc
2035 2054
2036 2055 if self.processingHeaderObj.flag_cspc:
2037 2056 cspc = numpy.transpose( cspc, (0,2,1) )
2038 2057 self.data_cspc = cspc['real'] + cspc['imag']*1j
2039 2058 else:
2040 2059 self.data_cspc = None
2041 2060
2042 2061 if self.processingHeaderObj.flag_dc:
2043 2062 self.data_dc = dc['real'] + dc['imag']*1j
2044 2063 else:
2045 2064 self.data_dc = None
2046 2065
2047 2066 self.flagIsNewFile = 0
2048 2067 self.flagIsNewBlock = 1
2049 2068
2050 2069 self.nTotalBlocks += 1
2051 2070 self.nReadBlocks += 1
2052 2071
2053 2072 return 1
2054 2073
2055 2074
2056 2075 def getData(self):
2057 2076 """
2058 2077 Copia el buffer de lectura a la clase "Spectra",
2059 2078 con todos los parametros asociados a este (metadata). cuando no hay datos en el buffer de
2060 2079 lectura es necesario hacer una nueva lectura de los bloques de datos usando "readNextBlock"
2061 2080
2062 2081 Return:
2063 2082 0 : Si no hay mas archivos disponibles
2064 2083 1 : Si hizo una buena copia del buffer
2065 2084
2066 2085 Affected:
2067 2086 self.dataOut
2068 2087
2069 2088 self.flagTimeBlock
2070 2089 self.flagIsNewBlock
2071 2090 """
2072 2091
2073 2092 if self.flagNoMoreFiles:
2074 2093 self.dataOut.flagNoData = True
2075 2094 print 'Process finished'
2076 2095 return 0
2077 2096
2078 2097 self.flagTimeBlock = 0
2079 2098 self.flagIsNewBlock = 0
2080 2099
2081 2100 if self.__hasNotDataInBuffer():
2082 2101
2083 2102 if not( self.readNextBlock() ):
2084 2103 self.dataOut.flagNoData = True
2085 2104 return 0
2086 2105
2087 2106 # self.updateDataHeader()
2088 2107
2089 2108 #data es un numpy array de 3 dmensiones (perfiles, alturas y canales)
2090 2109
2091 2110 if self.data_dc == None:
2092 2111 self.dataOut.flagNoData = True
2093 2112 return 0
2094 2113
2095 2114 self.dataOut.data_spc = self.data_spc
2096 2115
2097 2116 self.dataOut.data_cspc = self.data_cspc
2098 2117
2099 2118 self.dataOut.data_dc = self.data_dc
2100 2119
2101 2120 self.dataOut.flagTimeBlock = self.flagTimeBlock
2102 2121
2103 2122 self.dataOut.flagNoData = False
2104 2123
2105 2124 self.dataOut.dtype = self.dtype
2106 2125
2107 2126 # self.dataOut.nChannels = self.nRdChannels
2108 2127
2109 2128 self.dataOut.nPairs = self.nRdPairs
2110 2129
2111 2130 self.dataOut.pairsList = self.rdPairList
2112 2131
2113 2132 # self.dataOut.nHeights = self.processingHeaderObj.nHeights
2114 2133
2115 2134 self.dataOut.nProfiles = self.processingHeaderObj.profilesPerBlock
2116 2135
2117 2136 self.dataOut.nFFTPoints = self.processingHeaderObj.profilesPerBlock
2118 2137
2119 2138 self.dataOut.nCohInt = self.processingHeaderObj.nCohInt
2120 2139
2121 2140 self.dataOut.nIncohInt = self.processingHeaderObj.nIncohInt
2122 2141
2123 2142 xf = self.processingHeaderObj.firstHeight + self.processingHeaderObj.nHeights*self.processingHeaderObj.deltaHeight
2124 2143
2125 2144 self.dataOut.heightList = numpy.arange(self.processingHeaderObj.firstHeight, xf, self.processingHeaderObj.deltaHeight)
2126 2145
2127 2146 self.dataOut.channelList = range(self.systemHeaderObj.nChannels)
2128 2147
2129 2148 # self.dataOut.channelIndexList = range(self.systemHeaderObj.nChannels)
2130 2149
2131 2150 self.dataOut.utctime = self.basicHeaderObj.utc + self.basicHeaderObj.miliSecond/1000.#+ self.profileIndex * self.ippSeconds
2132 2151
2133 2152 self.dataOut.ippSeconds = self.ippSeconds
2134 2153
2135 2154 self.dataOut.timeInterval = self.ippSeconds * self.processingHeaderObj.nCohInt * self.processingHeaderObj.nIncohInt * self.dataOut.nFFTPoints
2136 2155
2137 2156 # self.profileIndex += 1
2138 2157
2139 2158 self.dataOut.systemHeaderObj = self.systemHeaderObj.copy()
2140 2159
2141 2160 self.dataOut.radarControllerHeaderObj = self.radarControllerHeaderObj.copy()
2142 2161
2143 2162 self.dataOut.flagShiftFFT = self.processingHeaderObj.shif_fft
2144 2163
2145 2164 self.dataOut.flagDecodeData = False #asumo q la data no esta decodificada
2146 2165
2147 2166 self.dataOut.flagDeflipData = True #asumo q la data no esta sin flip
2148 2167
2149 2168 if self.processingHeaderObj.code != None:
2150 2169
2151 2170 self.dataOut.nCode = self.processingHeaderObj.nCode
2152 2171
2153 2172 self.dataOut.nBaud = self.processingHeaderObj.nBaud
2154 2173
2155 2174 self.dataOut.code = self.processingHeaderObj.code
2156 2175
2157 2176 self.dataOut.flagDecodeData = True
2158 2177
2159 2178 return self.dataOut.data_spc
2160 2179
2161 2180
2162 2181 class SpectraWriter(JRODataWriter):
2163 2182
2164 2183 """
2165 2184 Esta clase permite escribir datos de espectros a archivos procesados (.pdata). La escritura
2166 2185 de los datos siempre se realiza por bloques.
2167 2186 """
2168 2187
2169 2188 ext = ".pdata"
2170 2189
2171 2190 optchar = "P"
2172 2191
2173 2192 shape_spc_Buffer = None
2174 2193
2175 2194 shape_cspc_Buffer = None
2176 2195
2177 2196 shape_dc_Buffer = None
2178 2197
2179 2198 data_spc = None
2180 2199
2181 2200 data_cspc = None
2182 2201
2183 2202 data_dc = None
2184 2203
2185 2204 # dataOut = None
2186 2205
2187 2206 def __init__(self):
2188 2207 """
2189 2208 Inicializador de la clase SpectraWriter para la escritura de datos de espectros.
2190 2209
2191 2210 Affected:
2192 2211 self.dataOut
2193 2212 self.basicHeaderObj
2194 2213 self.systemHeaderObj
2195 2214 self.radarControllerHeaderObj
2196 2215 self.processingHeaderObj
2197 2216
2198 2217 Return: None
2199 2218 """
2200 2219
2201 2220 self.isConfig = False
2202 2221
2203 2222 self.nTotalBlocks = 0
2204 2223
2205 2224 self.data_spc = None
2206 2225
2207 2226 self.data_cspc = None
2208 2227
2209 2228 self.data_dc = None
2210 2229
2211 2230 self.fp = None
2212 2231
2213 2232 self.flagIsNewFile = 1
2214 2233
2215 2234 self.nTotalBlocks = 0
2216 2235
2217 2236 self.flagIsNewBlock = 0
2218 2237
2219 2238 self.setFile = None
2220 2239
2221 2240 self.dtype = None
2222 2241
2223 2242 self.path = None
2224 2243
2225 2244 self.noMoreFiles = 0
2226 2245
2227 2246 self.filename = None
2228 2247
2229 2248 self.basicHeaderObj = BasicHeader(LOCALTIME)
2230 2249
2231 2250 self.systemHeaderObj = SystemHeader()
2232 2251
2233 2252 self.radarControllerHeaderObj = RadarControllerHeader()
2234 2253
2235 2254 self.processingHeaderObj = ProcessingHeader()
2236 2255
2237 2256
2238 2257 def hasAllDataInBuffer(self):
2239 2258 return 1
2240 2259
2241 2260
2242 2261 def setBlockDimension(self):
2243 2262 """
2244 2263 Obtiene las formas dimensionales del los subbloques de datos que componen un bloque
2245 2264
2246 2265 Affected:
2247 2266 self.shape_spc_Buffer
2248 2267 self.shape_cspc_Buffer
2249 2268 self.shape_dc_Buffer
2250 2269
2251 2270 Return: None
2252 2271 """
2253 2272 self.shape_spc_Buffer = (self.dataOut.nChannels,
2254 2273 self.processingHeaderObj.nHeights,
2255 2274 self.processingHeaderObj.profilesPerBlock)
2256 2275
2257 2276 self.shape_cspc_Buffer = (self.dataOut.nPairs,
2258 2277 self.processingHeaderObj.nHeights,
2259 2278 self.processingHeaderObj.profilesPerBlock)
2260 2279
2261 2280 self.shape_dc_Buffer = (self.dataOut.nChannels,
2262 2281 self.processingHeaderObj.nHeights)
2263 2282
2264 2283
2265 2284 def writeBlock(self):
2266 2285 """
2267 2286 Escribe el buffer en el file designado
2268 2287
2269 2288 Affected:
2270 2289 self.data_spc
2271 2290 self.data_cspc
2272 2291 self.data_dc
2273 2292 self.flagIsNewFile
2274 2293 self.flagIsNewBlock
2275 2294 self.nTotalBlocks
2276 2295 self.nWriteBlocks
2277 2296
2278 2297 Return: None
2279 2298 """
2280 2299
2281 2300 spc = numpy.transpose( self.data_spc, (0,2,1) )
2282 2301 if not( self.processingHeaderObj.shif_fft ):
2283 2302 spc = numpy.roll( spc, self.processingHeaderObj.profilesPerBlock/2, axis=2 ) #desplaza a la derecha en el eje 2 determinadas posiciones
2284 2303 data = spc.reshape((-1))
2285 2304 data = data.astype(self.dtype[0])
2286 2305 data.tofile(self.fp)
2287 2306
2288 2307 if self.data_cspc != None:
2289 2308 data = numpy.zeros( self.shape_cspc_Buffer, self.dtype )
2290 2309 cspc = numpy.transpose( self.data_cspc, (0,2,1) )
2291 2310 if not( self.processingHeaderObj.shif_fft ):
2292 2311 cspc = numpy.roll( cspc, self.processingHeaderObj.profilesPerBlock/2, axis=2 ) #desplaza a la derecha en el eje 2 determinadas posiciones
2293 2312 data['real'] = cspc.real
2294 2313 data['imag'] = cspc.imag
2295 2314 data = data.reshape((-1))
2296 2315 data.tofile(self.fp)
2297 2316
2298 2317 if self.data_dc != None:
2299 2318 data = numpy.zeros( self.shape_dc_Buffer, self.dtype )
2300 2319 dc = self.data_dc
2301 2320 data['real'] = dc.real
2302 2321 data['imag'] = dc.imag
2303 2322 data = data.reshape((-1))
2304 2323 data.tofile(self.fp)
2305 2324
2306 2325 self.data_spc.fill(0)
2307 2326 self.data_dc.fill(0)
2308 2327 if self.data_cspc != None:
2309 2328 self.data_cspc.fill(0)
2310 2329
2311 2330 self.flagIsNewFile = 0
2312 2331 self.flagIsNewBlock = 1
2313 2332 self.nTotalBlocks += 1
2314 2333 self.nWriteBlocks += 1
2315 2334 self.blockIndex += 1
2316 2335
2317 2336
2318 2337 def putData(self):
2319 2338 """
2320 2339 Setea un bloque de datos y luego los escribe en un file
2321 2340
2322 2341 Affected:
2323 2342 self.data_spc
2324 2343 self.data_cspc
2325 2344 self.data_dc
2326 2345
2327 2346 Return:
2328 2347 0 : Si no hay data o no hay mas files que puedan escribirse
2329 2348 1 : Si se escribio la data de un bloque en un file
2330 2349 """
2331 2350
2332 2351 if self.dataOut.flagNoData:
2333 2352 return 0
2334 2353
2335 2354 self.flagIsNewBlock = 0
2336 2355
2337 2356 if self.dataOut.flagTimeBlock:
2338 2357 self.data_spc.fill(0)
2339 2358 self.data_cspc.fill(0)
2340 2359 self.data_dc.fill(0)
2341 2360 self.setNextFile()
2342 2361
2343 2362 if self.flagIsNewFile == 0:
2344 2363 self.getBasicHeader()
2345 2364
2346 2365 self.data_spc = self.dataOut.data_spc.copy()
2347 2366 self.data_cspc = self.dataOut.data_cspc.copy()
2348 2367 self.data_dc = self.dataOut.data_dc.copy()
2349 2368
2350 2369 # #self.processingHeaderObj.dataBlocksPerFile)
2351 2370 if self.hasAllDataInBuffer():
2352 2371 # self.getDataHeader()
2353 2372 self.writeNextBlock()
2354 2373
2355 2374 return 1
2356 2375
2357 2376
2358 2377 def __getProcessFlags(self):
2359 2378
2360 2379 processFlags = 0
2361 2380
2362 2381 dtype0 = numpy.dtype([('real','<i1'),('imag','<i1')])
2363 2382 dtype1 = numpy.dtype([('real','<i2'),('imag','<i2')])
2364 2383 dtype2 = numpy.dtype([('real','<i4'),('imag','<i4')])
2365 2384 dtype3 = numpy.dtype([('real','<i8'),('imag','<i8')])
2366 2385 dtype4 = numpy.dtype([('real','<f4'),('imag','<f4')])
2367 2386 dtype5 = numpy.dtype([('real','<f8'),('imag','<f8')])
2368 2387
2369 2388 dtypeList = [dtype0, dtype1, dtype2, dtype3, dtype4, dtype5]
2370 2389
2371 2390
2372 2391
2373 2392 datatypeValueList = [PROCFLAG.DATATYPE_CHAR,
2374 2393 PROCFLAG.DATATYPE_SHORT,
2375 2394 PROCFLAG.DATATYPE_LONG,
2376 2395 PROCFLAG.DATATYPE_INT64,
2377 2396 PROCFLAG.DATATYPE_FLOAT,
2378 2397 PROCFLAG.DATATYPE_DOUBLE]
2379 2398
2380 2399
2381 2400 for index in range(len(dtypeList)):
2382 2401 if self.dataOut.dtype == dtypeList[index]:
2383 2402 dtypeValue = datatypeValueList[index]
2384 2403 break
2385 2404
2386 2405 processFlags += dtypeValue
2387 2406
2388 2407 if self.dataOut.flagDecodeData:
2389 2408 processFlags += PROCFLAG.DECODE_DATA
2390 2409
2391 2410 if self.dataOut.flagDeflipData:
2392 2411 processFlags += PROCFLAG.DEFLIP_DATA
2393 2412
2394 2413 if self.dataOut.code != None:
2395 2414 processFlags += PROCFLAG.DEFINE_PROCESS_CODE
2396 2415
2397 2416 if self.dataOut.nIncohInt > 1:
2398 2417 processFlags += PROCFLAG.INCOHERENT_INTEGRATION
2399 2418
2400 2419 if self.dataOut.data_dc != None:
2401 2420 processFlags += PROCFLAG.SAVE_CHANNELS_DC
2402 2421
2403 2422 return processFlags
2404 2423
2405 2424
2406 2425 def __getBlockSize(self):
2407 2426 '''
2408 2427 Este metodos determina el cantidad de bytes para un bloque de datos de tipo Spectra
2409 2428 '''
2410 2429
2411 2430 dtype0 = numpy.dtype([('real','<i1'),('imag','<i1')])
2412 2431 dtype1 = numpy.dtype([('real','<i2'),('imag','<i2')])
2413 2432 dtype2 = numpy.dtype([('real','<i4'),('imag','<i4')])
2414 2433 dtype3 = numpy.dtype([('real','<i8'),('imag','<i8')])
2415 2434 dtype4 = numpy.dtype([('real','<f4'),('imag','<f4')])
2416 2435 dtype5 = numpy.dtype([('real','<f8'),('imag','<f8')])
2417 2436
2418 2437 dtypeList = [dtype0, dtype1, dtype2, dtype3, dtype4, dtype5]
2419 2438 datatypeValueList = [1,2,4,8,4,8]
2420 2439 for index in range(len(dtypeList)):
2421 2440 if self.dataOut.dtype == dtypeList[index]:
2422 2441 datatypeValue = datatypeValueList[index]
2423 2442 break
2424 2443
2425 2444
2426 2445 pts2write = self.dataOut.nHeights * self.dataOut.nFFTPoints
2427 2446
2428 2447 pts2write_SelfSpectra = int(self.dataOut.nChannels * pts2write)
2429 2448 blocksize = (pts2write_SelfSpectra*datatypeValue)
2430 2449
2431 2450 if self.dataOut.data_cspc != None:
2432 2451 pts2write_CrossSpectra = int(self.dataOut.nPairs * pts2write)
2433 2452 blocksize += (pts2write_CrossSpectra*datatypeValue*2)
2434 2453
2435 2454 if self.dataOut.data_dc != None:
2436 2455 pts2write_DCchannels = int(self.dataOut.nChannels * self.dataOut.nHeights)
2437 2456 blocksize += (pts2write_DCchannels*datatypeValue*2)
2438 2457
2439 2458 blocksize = blocksize #* datatypeValue * 2 #CORREGIR ESTO
2440 2459
2441 2460 return blocksize
2442 2461
2443 2462 def getDataHeader(self):
2444 2463
2445 2464 """
2446 2465 Obtiene una copia del First Header
2447 2466
2448 2467 Affected:
2449 2468 self.systemHeaderObj
2450 2469 self.radarControllerHeaderObj
2451 2470 self.dtype
2452 2471
2453 2472 Return:
2454 2473 None
2455 2474 """
2456 2475
2457 2476 self.systemHeaderObj = self.dataOut.systemHeaderObj.copy()
2458 2477 self.systemHeaderObj.nChannels = self.dataOut.nChannels
2459 2478 self.radarControllerHeaderObj = self.dataOut.radarControllerHeaderObj.copy()
2460 2479
2461 2480 self.getBasicHeader()
2462 2481
2463 2482 processingHeaderSize = 40 # bytes
2464 2483 self.processingHeaderObj.dtype = 0 # Voltage
2465 2484 self.processingHeaderObj.blockSize = self.__getBlockSize()
2466 2485 self.processingHeaderObj.profilesPerBlock = self.dataOut.nFFTPoints
2467 2486 self.processingHeaderObj.dataBlocksPerFile = self.blocksPerFile
2468 2487 self.processingHeaderObj.nWindows = 1 #podria ser 1 o self.dataOut.processingHeaderObj.nWindows
2469 2488 self.processingHeaderObj.processFlags = self.__getProcessFlags()
2470 2489 self.processingHeaderObj.nCohInt = self.dataOut.nCohInt# Se requiere para determinar el valor de timeInterval
2471 2490 self.processingHeaderObj.nIncohInt = self.dataOut.nIncohInt
2472 2491 self.processingHeaderObj.totalSpectra = self.dataOut.nPairs + self.dataOut.nChannels
2473 2492
2474 2493 if self.processingHeaderObj.totalSpectra > 0:
2475 2494 channelList = []
2476 2495 for channel in range(self.dataOut.nChannels):
2477 2496 channelList.append(channel)
2478 2497 channelList.append(channel)
2479 2498
2480 2499 pairsList = []
2481 2500 for pair in self.dataOut.pairsList:
2482 2501 pairsList.append(pair[0])
2483 2502 pairsList.append(pair[1])
2484 2503 spectraComb = channelList + pairsList
2485 2504 spectraComb = numpy.array(spectraComb,dtype="u1")
2486 2505 self.processingHeaderObj.spectraComb = spectraComb
2487 2506 sizeOfSpcComb = len(spectraComb)
2488 2507 processingHeaderSize += sizeOfSpcComb
2489 2508
2490 2509 if self.dataOut.code != None:
2491 2510 self.processingHeaderObj.code = self.dataOut.code
2492 2511 self.processingHeaderObj.nCode = self.dataOut.nCode
2493 2512 self.processingHeaderObj.nBaud = self.dataOut.nBaud
2494 2513 nCodeSize = 4 # bytes
2495 2514 nBaudSize = 4 # bytes
2496 2515 codeSize = 4 # bytes
2497 2516 sizeOfCode = int(nCodeSize + nBaudSize + codeSize * self.dataOut.nCode * self.dataOut.nBaud)
2498 2517 processingHeaderSize += sizeOfCode
2499 2518
2500 2519 if self.processingHeaderObj.nWindows != 0:
2501 2520 self.processingHeaderObj.firstHeight = self.dataOut.heightList[0]
2502 2521 self.processingHeaderObj.deltaHeight = self.dataOut.heightList[1] - self.dataOut.heightList[0]
2503 2522 self.processingHeaderObj.nHeights = self.dataOut.nHeights
2504 2523 self.processingHeaderObj.samplesWin = self.dataOut.nHeights
2505 2524 sizeOfFirstHeight = 4
2506 2525 sizeOfdeltaHeight = 4
2507 2526 sizeOfnHeights = 4
2508 2527 sizeOfWindows = (sizeOfFirstHeight + sizeOfdeltaHeight + sizeOfnHeights)*self.processingHeaderObj.nWindows
2509 2528 processingHeaderSize += sizeOfWindows
2510 2529
2511 2530 self.processingHeaderObj.size = processingHeaderSize
2512 2531
2513 2532 class SpectraHeisWriter():
2514 2533
2515 2534 i=0
2516 2535
2517 2536 def __init__(self, dataOut):
2518 2537
2519 2538 self.wrObj = FITS()
2520 2539 self.dataOut = dataOut
2521 2540
2522 2541 def isNumber(str):
2523 2542 """
2524 2543 Chequea si el conjunto de caracteres que componen un string puede ser convertidos a un numero.
2525 2544
2526 2545 Excepciones:
2527 2546 Si un determinado string no puede ser convertido a numero
2528 2547 Input:
2529 2548 str, string al cual se le analiza para determinar si convertible a un numero o no
2530 2549
2531 2550 Return:
2532 2551 True : si el string es uno numerico
2533 2552 False : no es un string numerico
2534 2553 """
2535 2554 try:
2536 2555 float( str )
2537 2556 return True
2538 2557 except:
2539 2558 return False
2540 2559
2541 2560 def setup(self, wrpath,):
2542 2561
2543 2562 if not(os.path.exists(wrpath)):
2544 2563 os.mkdir(wrpath)
2545 2564
2546 2565 self.wrpath = wrpath
2547 2566 self.setFile = 0
2548 2567
2549 2568 def putData(self):
2550 2569 # self.wrObj.writeHeader(nChannels=self.dataOut.nChannels, nFFTPoints=self.dataOut.nFFTPoints)
2551 2570 #name = self.dataOut.utctime
2552 2571 name= time.localtime( self.dataOut.utctime)
2553 2572 ext=".fits"
2554 2573 #folder='D%4.4d%3.3d'%(name.tm_year,name.tm_yday)
2555 2574 subfolder = 'D%4.4d%3.3d' % (name.tm_year,name.tm_yday)
2556 2575
2557 2576 fullpath = os.path.join( self.wrpath, subfolder )
2558 2577 if not( os.path.exists(fullpath) ):
2559 2578 os.mkdir(fullpath)
2560 2579 self.setFile += 1
2561 2580 file = 'D%4.4d%3.3d%3.3d%s' % (name.tm_year,name.tm_yday,self.setFile,ext)
2562 2581
2563 2582 filename = os.path.join(self.wrpath,subfolder, file)
2564 2583
2565 2584 # print self.dataOut.ippSeconds
2566 2585 freq=numpy.arange(-1*self.dataOut.nHeights/2.,self.dataOut.nHeights/2.)/(2*self.dataOut.ippSeconds)
2567 2586
2568 2587 col1=self.wrObj.setColF(name="freq", format=str(self.dataOut.nFFTPoints)+'E', array=freq)
2569 2588 col2=self.wrObj.writeData(name="P_Ch1",format=str(self.dataOut.nFFTPoints)+'E',data=10*numpy.log10(self.dataOut.data_spc[0,:]))
2570 2589 col3=self.wrObj.writeData(name="P_Ch2",format=str(self.dataOut.nFFTPoints)+'E',data=10*numpy.log10(self.dataOut.data_spc[1,:]))
2571 2590 col4=self.wrObj.writeData(name="P_Ch3",format=str(self.dataOut.nFFTPoints)+'E',data=10*numpy.log10(self.dataOut.data_spc[2,:]))
2572 2591 col5=self.wrObj.writeData(name="P_Ch4",format=str(self.dataOut.nFFTPoints)+'E',data=10*numpy.log10(self.dataOut.data_spc[3,:]))
2573 2592 col6=self.wrObj.writeData(name="P_Ch5",format=str(self.dataOut.nFFTPoints)+'E',data=10*numpy.log10(self.dataOut.data_spc[4,:]))
2574 2593 col7=self.wrObj.writeData(name="P_Ch6",format=str(self.dataOut.nFFTPoints)+'E',data=10*numpy.log10(self.dataOut.data_spc[5,:]))
2575 2594 col8=self.wrObj.writeData(name="P_Ch7",format=str(self.dataOut.nFFTPoints)+'E',data=10*numpy.log10(self.dataOut.data_spc[6,:]))
2576 2595 col9=self.wrObj.writeData(name="P_Ch8",format=str(self.dataOut.nFFTPoints)+'E',data=10*numpy.log10(self.dataOut.data_spc[7,:]))
2577 2596 #n=numpy.arange((100))
2578 2597 n=self.dataOut.data_spc[6,:]
2579 2598 a=self.wrObj.cFImage(n)
2580 2599 b=self.wrObj.Ctable(col1,col2,col3,col4,col5,col6,col7,col8,col9)
2581 2600 self.wrObj.CFile(a,b)
2582 2601 self.wrObj.wFile(filename)
2583 2602 return 1
2584 2603
2585 2604 class FITS:
2586 2605
2587 2606 name=None
2588 2607 format=None
2589 2608 array =None
2590 2609 data =None
2591 2610 thdulist=None
2592 2611
2593 2612 def __init__(self):
2594 2613
2595 2614 pass
2596 2615
2597 2616 def setColF(self,name,format,array):
2598 2617 self.name=name
2599 2618 self.format=format
2600 2619 self.array=array
2601 2620 a1=numpy.array([self.array],dtype=numpy.float32)
2602 2621 self.col1 = pyfits.Column(name=self.name, format=self.format, array=a1)
2603 2622 return self.col1
2604 2623
2605 2624 # def setColP(self,name,format,data):
2606 2625 # self.name=name
2607 2626 # self.format=format
2608 2627 # self.data=data
2609 2628 # a2=numpy.array([self.data],dtype=numpy.float32)
2610 2629 # self.col2 = pyfits.Column(name=self.name, format=self.format, array=a2)
2611 2630 # return self.col2
2612 2631
2613 2632 def writeHeader(self,):
2614 2633 pass
2615 2634
2616 2635 def writeData(self,name,format,data):
2617 2636 self.name=name
2618 2637 self.format=format
2619 2638 self.data=data
2620 2639 a2=numpy.array([self.data],dtype=numpy.float32)
2621 2640 self.col2 = pyfits.Column(name=self.name, format=self.format, array=a2)
2622 2641 return self.col2
2623 2642
2624 2643 def cFImage(self,n):
2625 2644 self.hdu= pyfits.PrimaryHDU(n)
2626 2645 return self.hdu
2627 2646
2628 2647 def Ctable(self,col1,col2,col3,col4,col5,col6,col7,col8,col9):
2629 2648 self.cols=pyfits.ColDefs( [col1,col2,col3,col4,col5,col6,col7,col8,col9])
2630 2649 self.tbhdu = pyfits.new_table(self.cols)
2631 2650 return self.tbhdu
2632 2651
2633 2652 def CFile(self,hdu,tbhdu):
2634 2653 self.thdulist=pyfits.HDUList([hdu,tbhdu])
2635 2654
2636 2655 def wFile(self,filename):
2637 2656 self.thdulist.writeto(filename) No newline at end of file
General Comments 0
You need to be logged in to leave comments. Login now