##// END OF EJS Templates
Se importa modulo ElementTree a jrodataIO para leer archivo de configuracion FITS
Daniel Valdez -
r352:c7267cf690c5
parent child
Show More
@@ -1,2920 +1,2921
1 1 '''
2 2
3 3 $Author: murco $
4 4 $Id: JRODataIO.py 169 2012-11-19 21:57:03Z murco $
5 5 '''
6 6
7 7 import os, sys
8 8 import glob
9 9 import time
10 10 import numpy
11 11 import fnmatch
12 12 import time, datetime
13 from xml.etree.ElementTree import Element, SubElement, ElementTree
13 14 try:
14 15 import pyfits
15 16 except:
16 17 print "pyfits module has not been imported, it should be installed to save files in fits format"
17 18
18 19 from jrodata import *
19 20 from jroheaderIO import *
20 21 from jroprocessing import *
21 22
22 23 LOCALTIME = True #-18000
23 24
24 25 def isNumber(str):
25 26 """
26 27 Chequea si el conjunto de caracteres que componen un string puede ser convertidos a un numero.
27 28
28 29 Excepciones:
29 30 Si un determinado string no puede ser convertido a numero
30 31 Input:
31 32 str, string al cual se le analiza para determinar si convertible a un numero o no
32 33
33 34 Return:
34 35 True : si el string es uno numerico
35 36 False : no es un string numerico
36 37 """
37 38 try:
38 39 float( str )
39 40 return True
40 41 except:
41 42 return False
42 43
43 44 def isThisFileinRange(filename, startUTSeconds, endUTSeconds):
44 45 """
45 46 Esta funcion determina si un archivo de datos se encuentra o no dentro del rango de fecha especificado.
46 47
47 48 Inputs:
48 49 filename : nombre completo del archivo de datos en formato Jicamarca (.r)
49 50
50 51 startUTSeconds : fecha inicial del rango seleccionado. La fecha esta dada en
51 52 segundos contados desde 01/01/1970.
52 53 endUTSeconds : fecha final del rango seleccionado. La fecha esta dada en
53 54 segundos contados desde 01/01/1970.
54 55
55 56 Return:
56 57 Boolean : Retorna True si el archivo de datos contiene datos en el rango de
57 58 fecha especificado, de lo contrario retorna False.
58 59
59 60 Excepciones:
60 61 Si el archivo no existe o no puede ser abierto
61 62 Si la cabecera no puede ser leida.
62 63
63 64 """
64 65 basicHeaderObj = BasicHeader(LOCALTIME)
65 66
66 67 try:
67 68 fp = open(filename,'rb')
68 69 except:
69 70 raise IOError, "The file %s can't be opened" %(filename)
70 71
71 72 sts = basicHeaderObj.read(fp)
72 73 fp.close()
73 74
74 75 if not(sts):
75 76 print "Skipping the file %s because it has not a valid header" %(filename)
76 77 return 0
77 78
78 79 if not ((startUTSeconds <= basicHeaderObj.utc) and (endUTSeconds > basicHeaderObj.utc)):
79 80 return 0
80 81
81 82 return 1
82 83
83 84 def isFileinThisTime(filename, startTime, endTime):
84 85 """
85 86 Retorna 1 si el archivo de datos se encuentra dentro del rango de horas especificado.
86 87
87 88 Inputs:
88 89 filename : nombre completo del archivo de datos en formato Jicamarca (.r)
89 90
90 91 startTime : tiempo inicial del rango seleccionado en formato datetime.time
91 92
92 93 endTime : tiempo final del rango seleccionado en formato datetime.time
93 94
94 95 Return:
95 96 Boolean : Retorna True si el archivo de datos contiene datos en el rango de
96 97 fecha especificado, de lo contrario retorna False.
97 98
98 99 Excepciones:
99 100 Si el archivo no existe o no puede ser abierto
100 101 Si la cabecera no puede ser leida.
101 102
102 103 """
103 104
104 105
105 106 try:
106 107 fp = open(filename,'rb')
107 108 except:
108 109 raise IOError, "The file %s can't be opened" %(filename)
109 110
110 111 basicHeaderObj = BasicHeader(LOCALTIME)
111 112 sts = basicHeaderObj.read(fp)
112 113 fp.close()
113 114
114 115 thisDatetime = basicHeaderObj.datatime
115 116 thisTime = basicHeaderObj.datatime.time()
116 117
117 118 if not(sts):
118 119 print "Skipping the file %s because it has not a valid header" %(filename)
119 120 return None
120 121
121 122 if not ((startTime <= thisTime) and (endTime > thisTime)):
122 123 return None
123 124
124 125 return thisDatetime
125 126
126 127 def getlastFileFromPath(path, ext):
127 128 """
128 129 Depura el fileList dejando solo los que cumplan el formato de "PYYYYDDDSSS.ext"
129 130 al final de la depuracion devuelve el ultimo file de la lista que quedo.
130 131
131 132 Input:
132 133 fileList : lista conteniendo todos los files (sin path) que componen una determinada carpeta
133 134 ext : extension de los files contenidos en una carpeta
134 135
135 136 Return:
136 137 El ultimo file de una determinada carpeta, no se considera el path.
137 138 """
138 139 validFilelist = []
139 140 fileList = os.listdir(path)
140 141
141 142 # 0 1234 567 89A BCDE
142 143 # H YYYY DDD SSS .ext
143 144
144 145 for file in fileList:
145 146 try:
146 147 year = int(file[1:5])
147 148 doy = int(file[5:8])
148 149
149 150
150 151 except:
151 152 continue
152 153
153 154 if (os.path.splitext(file)[-1].lower() != ext.lower()):
154 155 continue
155 156
156 157 validFilelist.append(file)
157 158
158 159 if validFilelist:
159 160 validFilelist = sorted( validFilelist, key=str.lower )
160 161 return validFilelist[-1]
161 162
162 163 return None
163 164
164 165 def checkForRealPath(path, foldercounter, year, doy, set, ext):
165 166 """
166 167 Por ser Linux Case Sensitive entonces checkForRealPath encuentra el nombre correcto de un path,
167 168 Prueba por varias combinaciones de nombres entre mayusculas y minusculas para determinar
168 169 el path exacto de un determinado file.
169 170
170 171 Example :
171 172 nombre correcto del file es .../.../D2009307/P2009307367.ext
172 173
173 174 Entonces la funcion prueba con las siguientes combinaciones
174 175 .../.../y2009307367.ext
175 176 .../.../Y2009307367.ext
176 177 .../.../x2009307/y2009307367.ext
177 178 .../.../x2009307/Y2009307367.ext
178 179 .../.../X2009307/y2009307367.ext
179 180 .../.../X2009307/Y2009307367.ext
180 181 siendo para este caso, la ultima combinacion de letras, identica al file buscado
181 182
182 183 Return:
183 184 Si encuentra la cobinacion adecuada devuelve el path completo y el nombre del file
184 185 caso contrario devuelve None como path y el la ultima combinacion de nombre en mayusculas
185 186 para el filename
186 187 """
187 188 fullfilename = None
188 189 find_flag = False
189 190 filename = None
190 191
191 192 prefixDirList = [None,'d','D']
192 193 if ext.lower() == ".r": #voltage
193 194 prefixFileList = ['d','D']
194 195 elif ext.lower() == ".pdata": #spectra
195 196 prefixFileList = ['p','P']
196 197 else:
197 198 return None, filename
198 199
199 200 #barrido por las combinaciones posibles
200 201 for prefixDir in prefixDirList:
201 202 thispath = path
202 203 if prefixDir != None:
203 204 #formo el nombre del directorio xYYYYDDD (x=d o x=D)
204 205 if foldercounter == 0:
205 206 thispath = os.path.join(path, "%s%04d%03d" % ( prefixDir, year, doy ))
206 207 else:
207 208 thispath = os.path.join(path, "%s%04d%03d_%02d" % ( prefixDir, year, doy , foldercounter))
208 209 for prefixFile in prefixFileList: #barrido por las dos combinaciones posibles de "D"
209 210 filename = "%s%04d%03d%03d%s" % ( prefixFile, year, doy, set, ext ) #formo el nombre del file xYYYYDDDSSS.ext
210 211 fullfilename = os.path.join( thispath, filename ) #formo el path completo
211 212
212 213 if os.path.exists( fullfilename ): #verifico que exista
213 214 find_flag = True
214 215 break
215 216 if find_flag:
216 217 break
217 218
218 219 if not(find_flag):
219 220 return None, filename
220 221
221 222 return fullfilename, filename
222 223
223 224 def isDoyFolder(folder):
224 225 try:
225 226 year = int(folder[1:5])
226 227 except:
227 228 return 0
228 229
229 230 try:
230 231 doy = int(folder[5:8])
231 232 except:
232 233 return 0
233 234
234 235 return 1
235 236
236 237 class JRODataIO:
237 238
238 239 c = 3E8
239 240
240 241 isConfig = False
241 242
242 243 basicHeaderObj = BasicHeader(LOCALTIME)
243 244
244 245 systemHeaderObj = SystemHeader()
245 246
246 247 radarControllerHeaderObj = RadarControllerHeader()
247 248
248 249 processingHeaderObj = ProcessingHeader()
249 250
250 251 online = 0
251 252
252 253 dtype = None
253 254
254 255 pathList = []
255 256
256 257 filenameList = []
257 258
258 259 filename = None
259 260
260 261 ext = None
261 262
262 263 flagIsNewFile = 1
263 264
264 265 flagTimeBlock = 0
265 266
266 267 flagIsNewBlock = 0
267 268
268 269 fp = None
269 270
270 271 firstHeaderSize = 0
271 272
272 273 basicHeaderSize = 24
273 274
274 275 versionFile = 1103
275 276
276 277 fileSize = None
277 278
278 279 ippSeconds = None
279 280
280 281 fileSizeByHeader = None
281 282
282 283 fileIndex = None
283 284
284 285 profileIndex = None
285 286
286 287 blockIndex = None
287 288
288 289 nTotalBlocks = None
289 290
290 291 maxTimeStep = 30
291 292
292 293 lastUTTime = None
293 294
294 295 datablock = None
295 296
296 297 dataOut = None
297 298
298 299 blocksize = None
299 300
300 301 def __init__(self):
301 302
302 303 raise ValueError, "Not implemented"
303 304
304 305 def run(self):
305 306
306 307 raise ValueError, "Not implemented"
307 308
308 309 def getOutput(self):
309 310
310 311 return self.dataOut
311 312
312 313 class JRODataReader(JRODataIO, ProcessingUnit):
313 314
314 315 nReadBlocks = 0
315 316
316 317 delay = 10 #number of seconds waiting a new file
317 318
318 319 nTries = 3 #quantity tries
319 320
320 321 nFiles = 3 #number of files for searching
321 322
322 323 path = None
323 324
324 325 foldercounter = 0
325 326
326 327 flagNoMoreFiles = 0
327 328
328 329 datetimeList = []
329 330
330 331 __isFirstTimeOnline = 1
331 332
332 333 __printInfo = True
333 334
334 335 profileIndex = None
335 336
336 337 def __init__(self):
337 338
338 339 """
339 340
340 341 """
341 342
342 343 raise ValueError, "This method has not been implemented"
343 344
344 345
345 346 def createObjByDefault(self):
346 347 """
347 348
348 349 """
349 350 raise ValueError, "This method has not been implemented"
350 351
351 352 def getBlockDimension(self):
352 353
353 354 raise ValueError, "No implemented"
354 355
355 356 def __searchFilesOffLine(self,
356 357 path,
357 358 startDate,
358 359 endDate,
359 360 startTime=datetime.time(0,0,0),
360 361 endTime=datetime.time(23,59,59),
361 362 set=None,
362 363 expLabel='',
363 364 ext='.r',
364 365 walk=True):
365 366
366 367 pathList = []
367 368
368 369 if not walk:
369 370 pathList.append(path)
370 371
371 372 else:
372 373 dirList = []
373 374 for thisPath in os.listdir(path):
374 375 if not os.path.isdir(os.path.join(path,thisPath)):
375 376 continue
376 377 if not isDoyFolder(thisPath):
377 378 continue
378 379
379 380 dirList.append(thisPath)
380 381
381 382 if not(dirList):
382 383 return None, None
383 384
384 385 thisDate = startDate
385 386
386 387 while(thisDate <= endDate):
387 388 year = thisDate.timetuple().tm_year
388 389 doy = thisDate.timetuple().tm_yday
389 390
390 391 matchlist = fnmatch.filter(dirList, '?' + '%4.4d%3.3d' % (year,doy) + '*')
391 392 if len(matchlist) == 0:
392 393 thisDate += datetime.timedelta(1)
393 394 continue
394 395 for match in matchlist:
395 396 pathList.append(os.path.join(path,match,expLabel))
396 397
397 398 thisDate += datetime.timedelta(1)
398 399
399 400 if pathList == []:
400 401 print "Any folder was found for the date range: %s-%s" %(startDate, endDate)
401 402 return None, None
402 403
403 404 print "%d folder(s) was(were) found for the date range: %s - %s" %(len(pathList), startDate, endDate)
404 405
405 406 filenameList = []
406 407 datetimeList = []
407 408
408 409 for i in range(len(pathList)):
409 410
410 411 thisPath = pathList[i]
411 412
412 413 fileList = glob.glob1(thisPath, "*%s" %ext)
413 414 fileList.sort()
414 415
415 416 for file in fileList:
416 417
417 418 filename = os.path.join(thisPath,file)
418 419 thisDatetime = isFileinThisTime(filename, startTime, endTime)
419 420
420 421 if not(thisDatetime):
421 422 continue
422 423
423 424 filenameList.append(filename)
424 425 datetimeList.append(thisDatetime)
425 426
426 427 if not(filenameList):
427 428 print "Any file was found for the time range %s - %s" %(startTime, endTime)
428 429 return None, None
429 430
430 431 print "%d file(s) was(were) found for the time range: %s - %s" %(len(filenameList), startTime, endTime)
431 432 print
432 433
433 434 for i in range(len(filenameList)):
434 435 print "%s -> [%s]" %(filenameList[i], datetimeList[i].ctime())
435 436
436 437 self.filenameList = filenameList
437 438 self.datetimeList = datetimeList
438 439
439 440 return pathList, filenameList
440 441
441 442 def __searchFilesOnLine(self, path, expLabel = "", ext = None, walk=True):
442 443
443 444 """
444 445 Busca el ultimo archivo de la ultima carpeta (determinada o no por startDateTime) y
445 446 devuelve el archivo encontrado ademas de otros datos.
446 447
447 448 Input:
448 449 path : carpeta donde estan contenidos los files que contiene data
449 450
450 451 expLabel : Nombre del subexperimento (subfolder)
451 452
452 453 ext : extension de los files
453 454
454 455 walk : Si es habilitado no realiza busquedas dentro de los ubdirectorios (doypath)
455 456
456 457 Return:
457 458 directory : eL directorio donde esta el file encontrado
458 459 filename : el ultimo file de una determinada carpeta
459 460 year : el anho
460 461 doy : el numero de dia del anho
461 462 set : el set del archivo
462 463
463 464
464 465 """
465 466 dirList = []
466 467
467 468 if not walk:
468 469 fullpath = path
469 470
470 471 else:
471 472 #Filtra solo los directorios
472 473 for thisPath in os.listdir(path):
473 474 if not os.path.isdir(os.path.join(path,thisPath)):
474 475 continue
475 476 if not isDoyFolder(thisPath):
476 477 continue
477 478
478 479 dirList.append(thisPath)
479 480
480 481 if not(dirList):
481 482 return None, None, None, None, None
482 483
483 484 dirList = sorted( dirList, key=str.lower )
484 485
485 486 doypath = dirList[-1]
486 487 foldercounter = int(doypath.split('_')[1]) if len(doypath.split('_'))>1 else 0
487 488 fullpath = os.path.join(path, doypath, expLabel)
488 489
489 490
490 491 print "%s folder was found: " %(fullpath )
491 492
492 493 filename = getlastFileFromPath(fullpath, ext)
493 494
494 495 if not(filename):
495 496 return None, None, None, None, None
496 497
497 498 print "%s file was found" %(filename)
498 499
499 500 if not(self.__verifyFile(os.path.join(fullpath, filename))):
500 501 return None, None, None, None, None
501 502
502 503 year = int( filename[1:5] )
503 504 doy = int( filename[5:8] )
504 505 set = int( filename[8:11] )
505 506
506 507 return fullpath, foldercounter, filename, year, doy, set
507 508
508 509 def __setNextFileOffline(self):
509 510
510 511 idFile = self.fileIndex
511 512
512 513 while (True):
513 514 idFile += 1
514 515 if not(idFile < len(self.filenameList)):
515 516 self.flagNoMoreFiles = 1
516 517 print "No more Files"
517 518 return 0
518 519
519 520 filename = self.filenameList[idFile]
520 521
521 522 if not(self.__verifyFile(filename)):
522 523 continue
523 524
524 525 fileSize = os.path.getsize(filename)
525 526 fp = open(filename,'rb')
526 527 break
527 528
528 529 self.flagIsNewFile = 1
529 530 self.fileIndex = idFile
530 531 self.filename = filename
531 532 self.fileSize = fileSize
532 533 self.fp = fp
533 534
534 535 print "Setting the file: %s"%self.filename
535 536
536 537 return 1
537 538
538 539 def __setNextFileOnline(self):
539 540 """
540 541 Busca el siguiente file que tenga suficiente data para ser leida, dentro de un folder especifico, si
541 542 no encuentra un file valido espera un tiempo determinado y luego busca en los posibles n files
542 543 siguientes.
543 544
544 545 Affected:
545 546 self.flagIsNewFile
546 547 self.filename
547 548 self.fileSize
548 549 self.fp
549 550 self.set
550 551 self.flagNoMoreFiles
551 552
552 553 Return:
553 554 0 : si luego de una busqueda del siguiente file valido este no pudo ser encontrado
554 555 1 : si el file fue abierto con exito y esta listo a ser leido
555 556
556 557 Excepciones:
557 558 Si un determinado file no puede ser abierto
558 559 """
559 560 nFiles = 0
560 561 fileOk_flag = False
561 562 firstTime_flag = True
562 563
563 564 self.set += 1
564 565
565 566 if self.set > 999:
566 567 self.set = 0
567 568 self.foldercounter += 1
568 569
569 570 #busca el 1er file disponible
570 571 fullfilename, filename = checkForRealPath( self.path, self.foldercounter, self.year, self.doy, self.set, self.ext )
571 572 if fullfilename:
572 573 if self.__verifyFile(fullfilename, False):
573 574 fileOk_flag = True
574 575
575 576 #si no encuentra un file entonces espera y vuelve a buscar
576 577 if not(fileOk_flag):
577 578 for nFiles in range(self.nFiles+1): #busco en los siguientes self.nFiles+1 files posibles
578 579
579 580 if firstTime_flag: #si es la 1era vez entonces hace el for self.nTries veces
580 581 tries = self.nTries
581 582 else:
582 583 tries = 1 #si no es la 1era vez entonces solo lo hace una vez
583 584
584 585 for nTries in range( tries ):
585 586 if firstTime_flag:
586 587 print "\tWaiting %0.2f sec for the file \"%s\" , try %03d ..." % ( self.delay, filename, nTries+1 )
587 588 time.sleep( self.delay )
588 589 else:
589 590 print "\tSearching next \"%s%04d%03d%03d%s\" file ..." % (self.optchar, self.year, self.doy, self.set, self.ext)
590 591
591 592 fullfilename, filename = checkForRealPath( self.path, self.foldercounter, self.year, self.doy, self.set, self.ext )
592 593 if fullfilename:
593 594 if self.__verifyFile(fullfilename):
594 595 fileOk_flag = True
595 596 break
596 597
597 598 if fileOk_flag:
598 599 break
599 600
600 601 firstTime_flag = False
601 602
602 603 print "\tSkipping the file \"%s\" due to this file doesn't exist" % filename
603 604 self.set += 1
604 605
605 606 if nFiles == (self.nFiles-1): #si no encuentro el file buscado cambio de carpeta y busco en la siguiente carpeta
606 607 self.set = 0
607 608 self.doy += 1
608 609 self.foldercounter = 0
609 610
610 611 if fileOk_flag:
611 612 self.fileSize = os.path.getsize( fullfilename )
612 613 self.filename = fullfilename
613 614 self.flagIsNewFile = 1
614 615 if self.fp != None: self.fp.close()
615 616 self.fp = open(fullfilename, 'rb')
616 617 self.flagNoMoreFiles = 0
617 618 print 'Setting the file: %s' % fullfilename
618 619 else:
619 620 self.fileSize = 0
620 621 self.filename = None
621 622 self.flagIsNewFile = 0
622 623 self.fp = None
623 624 self.flagNoMoreFiles = 1
624 625 print 'No more Files'
625 626
626 627 return fileOk_flag
627 628
628 629
629 630 def setNextFile(self):
630 631 if self.fp != None:
631 632 self.fp.close()
632 633
633 634 if self.online:
634 635 newFile = self.__setNextFileOnline()
635 636 else:
636 637 newFile = self.__setNextFileOffline()
637 638
638 639 if not(newFile):
639 640 return 0
640 641
641 642 self.__readFirstHeader()
642 643 self.nReadBlocks = 0
643 644 return 1
644 645
645 646 def __waitNewBlock(self):
646 647 """
647 648 Return 1 si se encontro un nuevo bloque de datos, 0 de otra forma.
648 649
649 650 Si el modo de lectura es OffLine siempre retorn 0
650 651 """
651 652 if not self.online:
652 653 return 0
653 654
654 655 if (self.nReadBlocks >= self.processingHeaderObj.dataBlocksPerFile):
655 656 return 0
656 657
657 658 currentPointer = self.fp.tell()
658 659
659 660 neededSize = self.processingHeaderObj.blockSize + self.basicHeaderSize
660 661
661 662 for nTries in range( self.nTries ):
662 663
663 664 self.fp.close()
664 665 self.fp = open( self.filename, 'rb' )
665 666 self.fp.seek( currentPointer )
666 667
667 668 self.fileSize = os.path.getsize( self.filename )
668 669 currentSize = self.fileSize - currentPointer
669 670
670 671 if ( currentSize >= neededSize ):
671 672 self.__rdBasicHeader()
672 673 return 1
673 674
674 675 print "\tWaiting %0.2f seconds for the next block, try %03d ..." % (self.delay, nTries+1)
675 676 time.sleep( self.delay )
676 677
677 678
678 679 return 0
679 680
680 681 def __jumpToLastBlock(self):
681 682
682 683 if not(self.__isFirstTimeOnline):
683 684 return
684 685
685 686 csize = self.fileSize - self.fp.tell()
686 687
687 688 #sata el primer bloque de datos
688 689 if csize > self.processingHeaderObj.blockSize:
689 690 self.fp.seek(self.fp.tell() + self.processingHeaderObj.blockSize)
690 691 else:
691 692 return
692 693
693 694 csize = self.fileSize - self.fp.tell()
694 695 neededsize = self.processingHeaderObj.blockSize + self.basicHeaderSize
695 696 factor = int(csize/neededsize)
696 697 if factor > 0:
697 698 self.fp.seek(self.fp.tell() + factor*neededsize)
698 699
699 700 self.flagIsNewFile = 0
700 701 self.__isFirstTimeOnline = 0
701 702
702 703
703 704 def __setNewBlock(self):
704 705
705 706 if self.fp == None:
706 707 return 0
707 708
708 709 if self.online:
709 710 self.__jumpToLastBlock()
710 711
711 712 if self.flagIsNewFile:
712 713 return 1
713 714
714 715 self.lastUTTime = self.basicHeaderObj.utc
715 716 currentSize = self.fileSize - self.fp.tell()
716 717 neededSize = self.processingHeaderObj.blockSize + self.basicHeaderSize
717 718
718 719 if (currentSize >= neededSize):
719 720 self.__rdBasicHeader()
720 721 return 1
721 722
722 723 if self.__waitNewBlock():
723 724 return 1
724 725
725 726 if not(self.setNextFile()):
726 727 return 0
727 728
728 729 deltaTime = self.basicHeaderObj.utc - self.lastUTTime #
729 730
730 731 self.flagTimeBlock = 0
731 732
732 733 if deltaTime > self.maxTimeStep:
733 734 self.flagTimeBlock = 1
734 735
735 736 return 1
736 737
737 738
738 739 def readNextBlock(self):
739 740 if not(self.__setNewBlock()):
740 741 return 0
741 742
742 743 if not(self.readBlock()):
743 744 return 0
744 745
745 746 return 1
746 747
747 748 def __rdProcessingHeader(self, fp=None):
748 749 if fp == None:
749 750 fp = self.fp
750 751
751 752 self.processingHeaderObj.read(fp)
752 753
753 754 def __rdRadarControllerHeader(self, fp=None):
754 755 if fp == None:
755 756 fp = self.fp
756 757
757 758 self.radarControllerHeaderObj.read(fp)
758 759
759 760 def __rdSystemHeader(self, fp=None):
760 761 if fp == None:
761 762 fp = self.fp
762 763
763 764 self.systemHeaderObj.read(fp)
764 765
765 766 def __rdBasicHeader(self, fp=None):
766 767 if fp == None:
767 768 fp = self.fp
768 769
769 770 self.basicHeaderObj.read(fp)
770 771
771 772
772 773 def __readFirstHeader(self):
773 774 self.__rdBasicHeader()
774 775 self.__rdSystemHeader()
775 776 self.__rdRadarControllerHeader()
776 777 self.__rdProcessingHeader()
777 778
778 779 self.firstHeaderSize = self.basicHeaderObj.size
779 780
780 781 datatype = int(numpy.log2((self.processingHeaderObj.processFlags & PROCFLAG.DATATYPE_MASK))-numpy.log2(PROCFLAG.DATATYPE_CHAR))
781 782 if datatype == 0:
782 783 datatype_str = numpy.dtype([('real','<i1'),('imag','<i1')])
783 784 elif datatype == 1:
784 785 datatype_str = numpy.dtype([('real','<i2'),('imag','<i2')])
785 786 elif datatype == 2:
786 787 datatype_str = numpy.dtype([('real','<i4'),('imag','<i4')])
787 788 elif datatype == 3:
788 789 datatype_str = numpy.dtype([('real','<i8'),('imag','<i8')])
789 790 elif datatype == 4:
790 791 datatype_str = numpy.dtype([('real','<f4'),('imag','<f4')])
791 792 elif datatype == 5:
792 793 datatype_str = numpy.dtype([('real','<f8'),('imag','<f8')])
793 794 else:
794 795 raise ValueError, 'Data type was not defined'
795 796
796 797 self.dtype = datatype_str
797 798 self.ippSeconds = 2 * 1000 * self.radarControllerHeaderObj.ipp / self.c
798 799 self.fileSizeByHeader = self.processingHeaderObj.dataBlocksPerFile * self.processingHeaderObj.blockSize + self.firstHeaderSize + self.basicHeaderSize*(self.processingHeaderObj.dataBlocksPerFile - 1)
799 800 # self.dataOut.channelList = numpy.arange(self.systemHeaderObj.numChannels)
800 801 # self.dataOut.channelIndexList = numpy.arange(self.systemHeaderObj.numChannels)
801 802 self.getBlockDimension()
802 803
803 804
804 805 def __verifyFile(self, filename, msgFlag=True):
805 806 msg = None
806 807 try:
807 808 fp = open(filename, 'rb')
808 809 currentPosition = fp.tell()
809 810 except:
810 811 if msgFlag:
811 812 print "The file %s can't be opened" % (filename)
812 813 return False
813 814
814 815 neededSize = self.processingHeaderObj.blockSize + self.firstHeaderSize
815 816
816 817 if neededSize == 0:
817 818 basicHeaderObj = BasicHeader(LOCALTIME)
818 819 systemHeaderObj = SystemHeader()
819 820 radarControllerHeaderObj = RadarControllerHeader()
820 821 processingHeaderObj = ProcessingHeader()
821 822
822 823 try:
823 824 if not( basicHeaderObj.read(fp) ): raise IOError
824 825 if not( systemHeaderObj.read(fp) ): raise IOError
825 826 if not( radarControllerHeaderObj.read(fp) ): raise IOError
826 827 if not( processingHeaderObj.read(fp) ): raise IOError
827 828 data_type = int(numpy.log2((processingHeaderObj.processFlags & PROCFLAG.DATATYPE_MASK))-numpy.log2(PROCFLAG.DATATYPE_CHAR))
828 829
829 830 neededSize = processingHeaderObj.blockSize + basicHeaderObj.size
830 831
831 832 except:
832 833 if msgFlag:
833 834 print "\tThe file %s is empty or it hasn't enough data" % filename
834 835
835 836 fp.close()
836 837 return False
837 838 else:
838 839 msg = "\tSkipping the file %s due to it hasn't enough data" %filename
839 840
840 841 fp.close()
841 842 fileSize = os.path.getsize(filename)
842 843 currentSize = fileSize - currentPosition
843 844 if currentSize < neededSize:
844 845 if msgFlag and (msg != None):
845 846 print msg #print"\tSkipping the file %s due to it hasn't enough data" %filename
846 847 return False
847 848
848 849 return True
849 850
850 851 def setup(self,
851 852 path=None,
852 853 startDate=None,
853 854 endDate=None,
854 855 startTime=datetime.time(0,0,0),
855 856 endTime=datetime.time(23,59,59),
856 857 set=0,
857 858 expLabel = "",
858 859 ext = None,
859 860 online = False,
860 861 delay = 60,
861 862 walk = True):
862 863
863 864 if path == None:
864 865 raise ValueError, "The path is not valid"
865 866
866 867 if ext == None:
867 868 ext = self.ext
868 869
869 870 if online:
870 871 print "Searching files in online mode..."
871 872
872 873 for nTries in range( self.nTries ):
873 874 fullpath, foldercounter, file, year, doy, set = self.__searchFilesOnLine(path=path, expLabel=expLabel, ext=ext, walk=walk)
874 875
875 876 if fullpath:
876 877 break
877 878
878 879 print '\tWaiting %0.2f sec for an valid file in %s: try %02d ...' % (self.delay, path, nTries+1)
879 880 time.sleep( self.delay )
880 881
881 882 if not(fullpath):
882 883 print "There 'isn't valied files in %s" % path
883 884 return None
884 885
885 886 self.year = year
886 887 self.doy = doy
887 888 self.set = set - 1
888 889 self.path = path
889 890 self.foldercounter = foldercounter
890 891
891 892 else:
892 893 print "Searching files in offline mode ..."
893 894 pathList, filenameList = self.__searchFilesOffLine(path, startDate=startDate, endDate=endDate,
894 895 startTime=startTime, endTime=endTime,
895 896 set=set, expLabel=expLabel, ext=ext,
896 897 walk=walk)
897 898
898 899 if not(pathList):
899 900 print "No *%s files into the folder %s \nfor the range: %s - %s"%(ext, path,
900 901 datetime.datetime.combine(startDate,startTime).ctime(),
901 902 datetime.datetime.combine(endDate,endTime).ctime())
902 903
903 904 sys.exit(-1)
904 905
905 906
906 907 self.fileIndex = -1
907 908 self.pathList = pathList
908 909 self.filenameList = filenameList
909 910
910 911 self.online = online
911 912 self.delay = delay
912 913 ext = ext.lower()
913 914 self.ext = ext
914 915
915 916 if not(self.setNextFile()):
916 917 if (startDate!=None) and (endDate!=None):
917 918 print "No files in range: %s - %s" %(datetime.datetime.combine(startDate,startTime).ctime(), datetime.datetime.combine(endDate,endTime).ctime())
918 919 elif startDate != None:
919 920 print "No files in range: %s" %(datetime.datetime.combine(startDate,startTime).ctime())
920 921 else:
921 922 print "No files"
922 923
923 924 sys.exit(-1)
924 925
925 926 # self.updateDataHeader()
926 927
927 928 return self.dataOut
928 929
929 930 def getBasicHeader(self):
930 931
931 932 self.dataOut.utctime = self.basicHeaderObj.utc + self.basicHeaderObj.miliSecond/1000. + self.profileIndex * self.ippSeconds
932 933
933 934 self.dataOut.flagTimeBlock = self.flagTimeBlock
934 935
935 936 self.dataOut.timeZone = self.basicHeaderObj.timeZone
936 937
937 938 self.dataOut.dstFlag = self.basicHeaderObj.dstFlag
938 939
939 940 self.dataOut.errorCount = self.basicHeaderObj.errorCount
940 941
941 942 self.dataOut.useLocalTime = self.basicHeaderObj.useLocalTime
942 943
943 944 def getFirstHeader(self):
944 945
945 946 raise ValueError, "This method has not been implemented"
946 947
947 948 def getData():
948 949
949 950 raise ValueError, "This method has not been implemented"
950 951
951 952 def hasNotDataInBuffer():
952 953
953 954 raise ValueError, "This method has not been implemented"
954 955
955 956 def readBlock():
956 957
957 958 raise ValueError, "This method has not been implemented"
958 959
959 960 def isEndProcess(self):
960 961
961 962 return self.flagNoMoreFiles
962 963
963 964 def printReadBlocks(self):
964 965
965 966 print "Number of read blocks per file %04d" %self.nReadBlocks
966 967
967 968 def printTotalBlocks(self):
968 969
969 970 print "Number of read blocks %04d" %self.nTotalBlocks
970 971
971 972 def printNumberOfBlock(self):
972 973
973 974 if self.flagIsNewBlock:
974 975 print "Block No. %04d, Total blocks %04d -> %s" %(self.basicHeaderObj.dataBlock, self.nTotalBlocks, self.dataOut.datatime.ctime())
975 976
976 977 def printInfo(self):
977 978
978 979 if self.__printInfo == False:
979 980 return
980 981
981 982 self.basicHeaderObj.printInfo()
982 983 self.systemHeaderObj.printInfo()
983 984 self.radarControllerHeaderObj.printInfo()
984 985 self.processingHeaderObj.printInfo()
985 986
986 987 self.__printInfo = False
987 988
988 989
989 990 def run(self, **kwargs):
990 991
991 992 if not(self.isConfig):
992 993
993 994 # self.dataOut = dataOut
994 995 self.setup(**kwargs)
995 996 self.isConfig = True
996 997
997 998 self.getData()
998 999
999 1000 class JRODataWriter(JRODataIO, Operation):
1000 1001
1001 1002 """
1002 1003 Esta clase permite escribir datos a archivos procesados (.r o ,pdata). La escritura
1003 1004 de los datos siempre se realiza por bloques.
1004 1005 """
1005 1006
1006 1007 blockIndex = 0
1007 1008
1008 1009 path = None
1009 1010
1010 1011 setFile = None
1011 1012
1012 1013 profilesPerBlock = None
1013 1014
1014 1015 blocksPerFile = None
1015 1016
1016 1017 nWriteBlocks = 0
1017 1018
1018 1019 def __init__(self, dataOut=None):
1019 1020 raise ValueError, "Not implemented"
1020 1021
1021 1022
1022 1023 def hasAllDataInBuffer(self):
1023 1024 raise ValueError, "Not implemented"
1024 1025
1025 1026
1026 1027 def setBlockDimension(self):
1027 1028 raise ValueError, "Not implemented"
1028 1029
1029 1030
1030 1031 def writeBlock(self):
1031 1032 raise ValueError, "No implemented"
1032 1033
1033 1034
1034 1035 def putData(self):
1035 1036 raise ValueError, "No implemented"
1036 1037
1037 1038
1038 1039 def setBasicHeader(self):
1039 1040
1040 1041 self.basicHeaderObj.size = self.basicHeaderSize #bytes
1041 1042 self.basicHeaderObj.version = self.versionFile
1042 1043 self.basicHeaderObj.dataBlock = self.nTotalBlocks
1043 1044
1044 1045 utc = numpy.floor(self.dataOut.utctime)
1045 1046 milisecond = (self.dataOut.utctime - utc)* 1000.0
1046 1047
1047 1048 self.basicHeaderObj.utc = utc
1048 1049 self.basicHeaderObj.miliSecond = milisecond
1049 1050 self.basicHeaderObj.timeZone = self.dataOut.timeZone
1050 1051 self.basicHeaderObj.dstFlag = self.dataOut.dstFlag
1051 1052 self.basicHeaderObj.errorCount = self.dataOut.errorCount
1052 1053
1053 1054 def setFirstHeader(self):
1054 1055 """
1055 1056 Obtiene una copia del First Header
1056 1057
1057 1058 Affected:
1058 1059
1059 1060 self.basicHeaderObj
1060 1061 self.systemHeaderObj
1061 1062 self.radarControllerHeaderObj
1062 1063 self.processingHeaderObj self.
1063 1064
1064 1065 Return:
1065 1066 None
1066 1067 """
1067 1068
1068 1069 raise ValueError, "No implemented"
1069 1070
1070 1071 def __writeFirstHeader(self):
1071 1072 """
1072 1073 Escribe el primer header del file es decir el Basic header y el Long header (SystemHeader, RadarControllerHeader, ProcessingHeader)
1073 1074
1074 1075 Affected:
1075 1076 __dataType
1076 1077
1077 1078 Return:
1078 1079 None
1079 1080 """
1080 1081
1081 1082 # CALCULAR PARAMETROS
1082 1083
1083 1084 sizeLongHeader = self.systemHeaderObj.size + self.radarControllerHeaderObj.size + self.processingHeaderObj.size
1084 1085 self.basicHeaderObj.size = self.basicHeaderSize + sizeLongHeader
1085 1086
1086 1087 self.basicHeaderObj.write(self.fp)
1087 1088 self.systemHeaderObj.write(self.fp)
1088 1089 self.radarControllerHeaderObj.write(self.fp)
1089 1090 self.processingHeaderObj.write(self.fp)
1090 1091
1091 1092 self.dtype = self.dataOut.dtype
1092 1093
1093 1094 def __setNewBlock(self):
1094 1095 """
1095 1096 Si es un nuevo file escribe el First Header caso contrario escribe solo el Basic Header
1096 1097
1097 1098 Return:
1098 1099 0 : si no pudo escribir nada
1099 1100 1 : Si escribio el Basic el First Header
1100 1101 """
1101 1102 if self.fp == None:
1102 1103 self.setNextFile()
1103 1104
1104 1105 if self.flagIsNewFile:
1105 1106 return 1
1106 1107
1107 1108 if self.blockIndex < self.processingHeaderObj.dataBlocksPerFile:
1108 1109 self.basicHeaderObj.write(self.fp)
1109 1110 return 1
1110 1111
1111 1112 if not( self.setNextFile() ):
1112 1113 return 0
1113 1114
1114 1115 return 1
1115 1116
1116 1117
1117 1118 def writeNextBlock(self):
1118 1119 """
1119 1120 Selecciona el bloque siguiente de datos y los escribe en un file
1120 1121
1121 1122 Return:
1122 1123 0 : Si no hizo pudo escribir el bloque de datos
1123 1124 1 : Si no pudo escribir el bloque de datos
1124 1125 """
1125 1126 if not( self.__setNewBlock() ):
1126 1127 return 0
1127 1128
1128 1129 self.writeBlock()
1129 1130
1130 1131 return 1
1131 1132
1132 1133 def setNextFile(self):
1133 1134 """
1134 1135 Determina el siguiente file que sera escrito
1135 1136
1136 1137 Affected:
1137 1138 self.filename
1138 1139 self.subfolder
1139 1140 self.fp
1140 1141 self.setFile
1141 1142 self.flagIsNewFile
1142 1143
1143 1144 Return:
1144 1145 0 : Si el archivo no puede ser escrito
1145 1146 1 : Si el archivo esta listo para ser escrito
1146 1147 """
1147 1148 ext = self.ext
1148 1149 path = self.path
1149 1150
1150 1151 if self.fp != None:
1151 1152 self.fp.close()
1152 1153
1153 1154 timeTuple = time.localtime( self.dataOut.utctime)
1154 1155 subfolder = 'd%4.4d%3.3d' % (timeTuple.tm_year,timeTuple.tm_yday)
1155 1156
1156 1157 fullpath = os.path.join( path, subfolder )
1157 1158 if not( os.path.exists(fullpath) ):
1158 1159 os.mkdir(fullpath)
1159 1160 self.setFile = -1 #inicializo mi contador de seteo
1160 1161 else:
1161 1162 filesList = os.listdir( fullpath )
1162 1163 if len( filesList ) > 0:
1163 1164 filesList = sorted( filesList, key=str.lower )
1164 1165 filen = filesList[-1]
1165 1166 # el filename debera tener el siguiente formato
1166 1167 # 0 1234 567 89A BCDE (hex)
1167 1168 # x YYYY DDD SSS .ext
1168 1169 if isNumber( filen[8:11] ):
1169 1170 self.setFile = int( filen[8:11] ) #inicializo mi contador de seteo al seteo del ultimo file
1170 1171 else:
1171 1172 self.setFile = -1
1172 1173 else:
1173 1174 self.setFile = -1 #inicializo mi contador de seteo
1174 1175
1175 1176 setFile = self.setFile
1176 1177 setFile += 1
1177 1178
1178 1179 file = '%s%4.4d%3.3d%3.3d%s' % (self.optchar,
1179 1180 timeTuple.tm_year,
1180 1181 timeTuple.tm_yday,
1181 1182 setFile,
1182 1183 ext )
1183 1184
1184 1185 filename = os.path.join( path, subfolder, file )
1185 1186
1186 1187 fp = open( filename,'wb' )
1187 1188
1188 1189 self.blockIndex = 0
1189 1190
1190 1191 #guardando atributos
1191 1192 self.filename = filename
1192 1193 self.subfolder = subfolder
1193 1194 self.fp = fp
1194 1195 self.setFile = setFile
1195 1196 self.flagIsNewFile = 1
1196 1197
1197 1198 self.setFirstHeader()
1198 1199
1199 1200 print 'Writing the file: %s'%self.filename
1200 1201
1201 1202 self.__writeFirstHeader()
1202 1203
1203 1204 return 1
1204 1205
1205 1206 def setup(self, dataOut, path, blocksPerFile, profilesPerBlock=None, set=0, ext=None):
1206 1207 """
1207 1208 Setea el tipo de formato en la cual sera guardada la data y escribe el First Header
1208 1209
1209 1210 Inputs:
1210 1211 path : el path destino en el cual se escribiran los files a crear
1211 1212 format : formato en el cual sera salvado un file
1212 1213 set : el setebo del file
1213 1214
1214 1215 Return:
1215 1216 0 : Si no realizo un buen seteo
1216 1217 1 : Si realizo un buen seteo
1217 1218 """
1218 1219
1219 1220 if ext == None:
1220 1221 ext = self.ext
1221 1222
1222 1223 ext = ext.lower()
1223 1224
1224 1225 self.ext = ext
1225 1226
1226 1227 self.path = path
1227 1228
1228 1229 self.setFile = set - 1
1229 1230
1230 1231 self.blocksPerFile = blocksPerFile
1231 1232
1232 1233 self.profilesPerBlock = profilesPerBlock
1233 1234
1234 1235 self.dataOut = dataOut
1235 1236
1236 1237 if not(self.setNextFile()):
1237 1238 print "There isn't a next file"
1238 1239 return 0
1239 1240
1240 1241 self.setBlockDimension()
1241 1242
1242 1243 return 1
1243 1244
1244 1245 def run(self, dataOut, **kwargs):
1245 1246
1246 1247 if not(self.isConfig):
1247 1248
1248 1249 self.setup(dataOut, **kwargs)
1249 1250 self.isConfig = True
1250 1251
1251 1252 self.putData()
1252 1253
1253 1254 class VoltageReader(JRODataReader):
1254 1255 """
1255 1256 Esta clase permite leer datos de voltage desde archivos en formato rawdata (.r). La lectura
1256 1257 de los datos siempre se realiza por bloques. Los datos leidos (array de 3 dimensiones:
1257 1258 perfiles*alturas*canales) son almacenados en la variable "buffer".
1258 1259
1259 1260 perfiles * alturas * canales
1260 1261
1261 1262 Esta clase contiene instancias (objetos) de las clases BasicHeader, SystemHeader,
1262 1263 RadarControllerHeader y Voltage. Los tres primeros se usan para almacenar informacion de la
1263 1264 cabecera de datos (metadata), y el cuarto (Voltage) para obtener y almacenar un perfil de
1264 1265 datos desde el "buffer" cada vez que se ejecute el metodo "getData".
1265 1266
1266 1267 Example:
1267 1268
1268 1269 dpath = "/home/myuser/data"
1269 1270
1270 1271 startTime = datetime.datetime(2010,1,20,0,0,0,0,0,0)
1271 1272
1272 1273 endTime = datetime.datetime(2010,1,21,23,59,59,0,0,0)
1273 1274
1274 1275 readerObj = VoltageReader()
1275 1276
1276 1277 readerObj.setup(dpath, startTime, endTime)
1277 1278
1278 1279 while(True):
1279 1280
1280 1281 #to get one profile
1281 1282 profile = readerObj.getData()
1282 1283
1283 1284 #print the profile
1284 1285 print profile
1285 1286
1286 1287 #If you want to see all datablock
1287 1288 print readerObj.datablock
1288 1289
1289 1290 if readerObj.flagNoMoreFiles:
1290 1291 break
1291 1292
1292 1293 """
1293 1294
1294 1295 ext = ".r"
1295 1296
1296 1297 optchar = "D"
1297 1298 dataOut = None
1298 1299
1299 1300
1300 1301 def __init__(self):
1301 1302 """
1302 1303 Inicializador de la clase VoltageReader para la lectura de datos de voltage.
1303 1304
1304 1305 Input:
1305 1306 dataOut : Objeto de la clase Voltage. Este objeto sera utilizado para
1306 1307 almacenar un perfil de datos cada vez que se haga un requerimiento
1307 1308 (getData). El perfil sera obtenido a partir del buffer de datos,
1308 1309 si el buffer esta vacio se hara un nuevo proceso de lectura de un
1309 1310 bloque de datos.
1310 1311 Si este parametro no es pasado se creara uno internamente.
1311 1312
1312 1313 Variables afectadas:
1313 1314 self.dataOut
1314 1315
1315 1316 Return:
1316 1317 None
1317 1318 """
1318 1319
1319 1320 self.isConfig = False
1320 1321
1321 1322 self.datablock = None
1322 1323
1323 1324 self.utc = 0
1324 1325
1325 1326 self.ext = ".r"
1326 1327
1327 1328 self.optchar = "D"
1328 1329
1329 1330 self.basicHeaderObj = BasicHeader(LOCALTIME)
1330 1331
1331 1332 self.systemHeaderObj = SystemHeader()
1332 1333
1333 1334 self.radarControllerHeaderObj = RadarControllerHeader()
1334 1335
1335 1336 self.processingHeaderObj = ProcessingHeader()
1336 1337
1337 1338 self.online = 0
1338 1339
1339 1340 self.fp = None
1340 1341
1341 1342 self.idFile = None
1342 1343
1343 1344 self.dtype = None
1344 1345
1345 1346 self.fileSizeByHeader = None
1346 1347
1347 1348 self.filenameList = []
1348 1349
1349 1350 self.filename = None
1350 1351
1351 1352 self.fileSize = None
1352 1353
1353 1354 self.firstHeaderSize = 0
1354 1355
1355 1356 self.basicHeaderSize = 24
1356 1357
1357 1358 self.pathList = []
1358 1359
1359 1360 self.filenameList = []
1360 1361
1361 1362 self.lastUTTime = 0
1362 1363
1363 1364 self.maxTimeStep = 30
1364 1365
1365 1366 self.flagNoMoreFiles = 0
1366 1367
1367 1368 self.set = 0
1368 1369
1369 1370 self.path = None
1370 1371
1371 1372 self.profileIndex = 2**32-1
1372 1373
1373 1374 self.delay = 3 #seconds
1374 1375
1375 1376 self.nTries = 3 #quantity tries
1376 1377
1377 1378 self.nFiles = 3 #number of files for searching
1378 1379
1379 1380 self.nReadBlocks = 0
1380 1381
1381 1382 self.flagIsNewFile = 1
1382 1383
1383 1384 self.__isFirstTimeOnline = 1
1384 1385
1385 1386 self.ippSeconds = 0
1386 1387
1387 1388 self.flagTimeBlock = 0
1388 1389
1389 1390 self.flagIsNewBlock = 0
1390 1391
1391 1392 self.nTotalBlocks = 0
1392 1393
1393 1394 self.blocksize = 0
1394 1395
1395 1396 self.dataOut = self.createObjByDefault()
1396 1397
1397 1398 def createObjByDefault(self):
1398 1399
1399 1400 dataObj = Voltage()
1400 1401
1401 1402 return dataObj
1402 1403
1403 1404 def __hasNotDataInBuffer(self):
1404 1405 if self.profileIndex >= self.processingHeaderObj.profilesPerBlock:
1405 1406 return 1
1406 1407 return 0
1407 1408
1408 1409
1409 1410 def getBlockDimension(self):
1410 1411 """
1411 1412 Obtiene la cantidad de puntos a leer por cada bloque de datos
1412 1413
1413 1414 Affected:
1414 1415 self.blocksize
1415 1416
1416 1417 Return:
1417 1418 None
1418 1419 """
1419 1420 pts2read = self.processingHeaderObj.profilesPerBlock * self.processingHeaderObj.nHeights * self.systemHeaderObj.nChannels
1420 1421 self.blocksize = pts2read
1421 1422
1422 1423
1423 1424 def readBlock(self):
1424 1425 """
1425 1426 readBlock lee el bloque de datos desde la posicion actual del puntero del archivo
1426 1427 (self.fp) y actualiza todos los parametros relacionados al bloque de datos
1427 1428 (metadata + data). La data leida es almacenada en el buffer y el contador del buffer
1428 1429 es seteado a 0
1429 1430
1430 1431 Inputs:
1431 1432 None
1432 1433
1433 1434 Return:
1434 1435 None
1435 1436
1436 1437 Affected:
1437 1438 self.profileIndex
1438 1439 self.datablock
1439 1440 self.flagIsNewFile
1440 1441 self.flagIsNewBlock
1441 1442 self.nTotalBlocks
1442 1443
1443 1444 Exceptions:
1444 1445 Si un bloque leido no es un bloque valido
1445 1446 """
1446 1447
1447 1448 junk = numpy.fromfile( self.fp, self.dtype, self.blocksize )
1448 1449
1449 1450 try:
1450 1451 junk = junk.reshape( (self.processingHeaderObj.profilesPerBlock, self.processingHeaderObj.nHeights, self.systemHeaderObj.nChannels) )
1451 1452 except:
1452 1453 print "The read block (%3d) has not enough data" %self.nReadBlocks
1453 1454 return 0
1454 1455
1455 1456 junk = numpy.transpose(junk, (2,0,1))
1456 1457 self.datablock = junk['real'] + junk['imag']*1j
1457 1458
1458 1459 self.profileIndex = 0
1459 1460
1460 1461 self.flagIsNewFile = 0
1461 1462 self.flagIsNewBlock = 1
1462 1463
1463 1464 self.nTotalBlocks += 1
1464 1465 self.nReadBlocks += 1
1465 1466
1466 1467 return 1
1467 1468
1468 1469 def getFirstHeader(self):
1469 1470
1470 1471 self.dataOut.dtype = self.dtype
1471 1472
1472 1473 self.dataOut.nProfiles = self.processingHeaderObj.profilesPerBlock
1473 1474
1474 1475 xf = self.processingHeaderObj.firstHeight + self.processingHeaderObj.nHeights*self.processingHeaderObj.deltaHeight
1475 1476
1476 1477 self.dataOut.heightList = numpy.arange(self.processingHeaderObj.firstHeight, xf, self.processingHeaderObj.deltaHeight)
1477 1478
1478 1479 self.dataOut.channelList = range(self.systemHeaderObj.nChannels)
1479 1480
1480 1481 self.dataOut.ippSeconds = self.ippSeconds
1481 1482
1482 1483 self.dataOut.timeInterval = self.ippSeconds * self.processingHeaderObj.nCohInt
1483 1484
1484 1485 self.dataOut.nCohInt = self.processingHeaderObj.nCohInt
1485 1486
1486 1487 self.dataOut.flagShiftFFT = False
1487 1488
1488 1489 if self.radarControllerHeaderObj.code != None:
1489 1490
1490 1491 self.dataOut.nCode = self.radarControllerHeaderObj.nCode
1491 1492
1492 1493 self.dataOut.nBaud = self.radarControllerHeaderObj.nBaud
1493 1494
1494 1495 self.dataOut.code = self.radarControllerHeaderObj.code
1495 1496
1496 1497 self.dataOut.systemHeaderObj = self.systemHeaderObj.copy()
1497 1498
1498 1499 self.dataOut.radarControllerHeaderObj = self.radarControllerHeaderObj.copy()
1499 1500
1500 1501 self.dataOut.flagDecodeData = False #asumo q la data no esta decodificada
1501 1502
1502 1503 self.dataOut.flagDeflipData = False #asumo q la data no esta sin flip
1503 1504
1504 1505 self.dataOut.flagShiftFFT = False
1505 1506
1506 1507 def getData(self):
1507 1508 """
1508 1509 getData obtiene una unidad de datos del buffer de lectura y la copia a la clase "Voltage"
1509 1510 con todos los parametros asociados a este (metadata). cuando no hay datos en el buffer de
1510 1511 lectura es necesario hacer una nueva lectura de los bloques de datos usando "readNextBlock"
1511 1512
1512 1513 Ademas incrementa el contador del buffer en 1.
1513 1514
1514 1515 Return:
1515 1516 data : retorna un perfil de voltages (alturas * canales) copiados desde el
1516 1517 buffer. Si no hay mas archivos a leer retorna None.
1517 1518
1518 1519 Variables afectadas:
1519 1520 self.dataOut
1520 1521 self.profileIndex
1521 1522
1522 1523 Affected:
1523 1524 self.dataOut
1524 1525 self.profileIndex
1525 1526 self.flagTimeBlock
1526 1527 self.flagIsNewBlock
1527 1528 """
1528 1529
1529 1530 if self.flagNoMoreFiles:
1530 1531 self.dataOut.flagNoData = True
1531 1532 print 'Process finished'
1532 1533 return 0
1533 1534
1534 1535 self.flagTimeBlock = 0
1535 1536 self.flagIsNewBlock = 0
1536 1537
1537 1538 if self.__hasNotDataInBuffer():
1538 1539
1539 1540 if not( self.readNextBlock() ):
1540 1541 return 0
1541 1542
1542 1543 self.getFirstHeader()
1543 1544
1544 1545 if self.datablock == None:
1545 1546 self.dataOut.flagNoData = True
1546 1547 return 0
1547 1548
1548 1549 self.dataOut.data = self.datablock[:,self.profileIndex,:]
1549 1550
1550 1551 self.dataOut.flagNoData = False
1551 1552
1552 1553 self.getBasicHeader()
1553 1554
1554 1555 self.profileIndex += 1
1555 1556
1556 1557 return self.dataOut.data
1557 1558
1558 1559
1559 1560 class VoltageWriter(JRODataWriter):
1560 1561 """
1561 1562 Esta clase permite escribir datos de voltajes a archivos procesados (.r). La escritura
1562 1563 de los datos siempre se realiza por bloques.
1563 1564 """
1564 1565
1565 1566 ext = ".r"
1566 1567
1567 1568 optchar = "D"
1568 1569
1569 1570 shapeBuffer = None
1570 1571
1571 1572
1572 1573 def __init__(self):
1573 1574 """
1574 1575 Inicializador de la clase VoltageWriter para la escritura de datos de espectros.
1575 1576
1576 1577 Affected:
1577 1578 self.dataOut
1578 1579
1579 1580 Return: None
1580 1581 """
1581 1582
1582 1583 self.nTotalBlocks = 0
1583 1584
1584 1585 self.profileIndex = 0
1585 1586
1586 1587 self.isConfig = False
1587 1588
1588 1589 self.fp = None
1589 1590
1590 1591 self.flagIsNewFile = 1
1591 1592
1592 1593 self.nTotalBlocks = 0
1593 1594
1594 1595 self.flagIsNewBlock = 0
1595 1596
1596 1597 self.setFile = None
1597 1598
1598 1599 self.dtype = None
1599 1600
1600 1601 self.path = None
1601 1602
1602 1603 self.filename = None
1603 1604
1604 1605 self.basicHeaderObj = BasicHeader(LOCALTIME)
1605 1606
1606 1607 self.systemHeaderObj = SystemHeader()
1607 1608
1608 1609 self.radarControllerHeaderObj = RadarControllerHeader()
1609 1610
1610 1611 self.processingHeaderObj = ProcessingHeader()
1611 1612
1612 1613 def hasAllDataInBuffer(self):
1613 1614 if self.profileIndex >= self.processingHeaderObj.profilesPerBlock:
1614 1615 return 1
1615 1616 return 0
1616 1617
1617 1618
1618 1619 def setBlockDimension(self):
1619 1620 """
1620 1621 Obtiene las formas dimensionales del los subbloques de datos que componen un bloque
1621 1622
1622 1623 Affected:
1623 1624 self.shape_spc_Buffer
1624 1625 self.shape_cspc_Buffer
1625 1626 self.shape_dc_Buffer
1626 1627
1627 1628 Return: None
1628 1629 """
1629 1630 self.shapeBuffer = (self.processingHeaderObj.profilesPerBlock,
1630 1631 self.processingHeaderObj.nHeights,
1631 1632 self.systemHeaderObj.nChannels)
1632 1633
1633 1634 self.datablock = numpy.zeros((self.systemHeaderObj.nChannels,
1634 1635 self.processingHeaderObj.profilesPerBlock,
1635 1636 self.processingHeaderObj.nHeights),
1636 1637 dtype=numpy.dtype('complex64'))
1637 1638
1638 1639
1639 1640 def writeBlock(self):
1640 1641 """
1641 1642 Escribe el buffer en el file designado
1642 1643
1643 1644 Affected:
1644 1645 self.profileIndex
1645 1646 self.flagIsNewFile
1646 1647 self.flagIsNewBlock
1647 1648 self.nTotalBlocks
1648 1649 self.blockIndex
1649 1650
1650 1651 Return: None
1651 1652 """
1652 1653 data = numpy.zeros( self.shapeBuffer, self.dtype )
1653 1654
1654 1655 junk = numpy.transpose(self.datablock, (1,2,0))
1655 1656
1656 1657 data['real'] = junk.real
1657 1658 data['imag'] = junk.imag
1658 1659
1659 1660 data = data.reshape( (-1) )
1660 1661
1661 1662 data.tofile( self.fp )
1662 1663
1663 1664 self.datablock.fill(0)
1664 1665
1665 1666 self.profileIndex = 0
1666 1667 self.flagIsNewFile = 0
1667 1668 self.flagIsNewBlock = 1
1668 1669
1669 1670 self.blockIndex += 1
1670 1671 self.nTotalBlocks += 1
1671 1672
1672 1673 def putData(self):
1673 1674 """
1674 1675 Setea un bloque de datos y luego los escribe en un file
1675 1676
1676 1677 Affected:
1677 1678 self.flagIsNewBlock
1678 1679 self.profileIndex
1679 1680
1680 1681 Return:
1681 1682 0 : Si no hay data o no hay mas files que puedan escribirse
1682 1683 1 : Si se escribio la data de un bloque en un file
1683 1684 """
1684 1685 if self.dataOut.flagNoData:
1685 1686 return 0
1686 1687
1687 1688 self.flagIsNewBlock = 0
1688 1689
1689 1690 if self.dataOut.flagTimeBlock:
1690 1691
1691 1692 self.datablock.fill(0)
1692 1693 self.profileIndex = 0
1693 1694 self.setNextFile()
1694 1695
1695 1696 if self.profileIndex == 0:
1696 1697 self.setBasicHeader()
1697 1698
1698 1699 self.datablock[:,self.profileIndex,:] = self.dataOut.data
1699 1700
1700 1701 self.profileIndex += 1
1701 1702
1702 1703 if self.hasAllDataInBuffer():
1703 1704 #if self.flagIsNewFile:
1704 1705 self.writeNextBlock()
1705 1706 # self.setFirstHeader()
1706 1707
1707 1708 return 1
1708 1709
1709 1710 def __getProcessFlags(self):
1710 1711
1711 1712 processFlags = 0
1712 1713
1713 1714 dtype0 = numpy.dtype([('real','<i1'),('imag','<i1')])
1714 1715 dtype1 = numpy.dtype([('real','<i2'),('imag','<i2')])
1715 1716 dtype2 = numpy.dtype([('real','<i4'),('imag','<i4')])
1716 1717 dtype3 = numpy.dtype([('real','<i8'),('imag','<i8')])
1717 1718 dtype4 = numpy.dtype([('real','<f4'),('imag','<f4')])
1718 1719 dtype5 = numpy.dtype([('real','<f8'),('imag','<f8')])
1719 1720
1720 1721 dtypeList = [dtype0, dtype1, dtype2, dtype3, dtype4, dtype5]
1721 1722
1722 1723
1723 1724
1724 1725 datatypeValueList = [PROCFLAG.DATATYPE_CHAR,
1725 1726 PROCFLAG.DATATYPE_SHORT,
1726 1727 PROCFLAG.DATATYPE_LONG,
1727 1728 PROCFLAG.DATATYPE_INT64,
1728 1729 PROCFLAG.DATATYPE_FLOAT,
1729 1730 PROCFLAG.DATATYPE_DOUBLE]
1730 1731
1731 1732
1732 1733 for index in range(len(dtypeList)):
1733 1734 if self.dataOut.dtype == dtypeList[index]:
1734 1735 dtypeValue = datatypeValueList[index]
1735 1736 break
1736 1737
1737 1738 processFlags += dtypeValue
1738 1739
1739 1740 if self.dataOut.flagDecodeData:
1740 1741 processFlags += PROCFLAG.DECODE_DATA
1741 1742
1742 1743 if self.dataOut.flagDeflipData:
1743 1744 processFlags += PROCFLAG.DEFLIP_DATA
1744 1745
1745 1746 if self.dataOut.code != None:
1746 1747 processFlags += PROCFLAG.DEFINE_PROCESS_CODE
1747 1748
1748 1749 if self.dataOut.nCohInt > 1:
1749 1750 processFlags += PROCFLAG.COHERENT_INTEGRATION
1750 1751
1751 1752 return processFlags
1752 1753
1753 1754
1754 1755 def __getBlockSize(self):
1755 1756 '''
1756 1757 Este metodos determina el cantidad de bytes para un bloque de datos de tipo Voltage
1757 1758 '''
1758 1759
1759 1760 dtype0 = numpy.dtype([('real','<i1'),('imag','<i1')])
1760 1761 dtype1 = numpy.dtype([('real','<i2'),('imag','<i2')])
1761 1762 dtype2 = numpy.dtype([('real','<i4'),('imag','<i4')])
1762 1763 dtype3 = numpy.dtype([('real','<i8'),('imag','<i8')])
1763 1764 dtype4 = numpy.dtype([('real','<f4'),('imag','<f4')])
1764 1765 dtype5 = numpy.dtype([('real','<f8'),('imag','<f8')])
1765 1766
1766 1767 dtypeList = [dtype0, dtype1, dtype2, dtype3, dtype4, dtype5]
1767 1768 datatypeValueList = [1,2,4,8,4,8]
1768 1769 for index in range(len(dtypeList)):
1769 1770 if self.dataOut.dtype == dtypeList[index]:
1770 1771 datatypeValue = datatypeValueList[index]
1771 1772 break
1772 1773
1773 1774 blocksize = int(self.dataOut.nHeights * self.dataOut.nChannels * self.dataOut.nProfiles * datatypeValue * 2)
1774 1775
1775 1776 return blocksize
1776 1777
1777 1778 def setFirstHeader(self):
1778 1779
1779 1780 """
1780 1781 Obtiene una copia del First Header
1781 1782
1782 1783 Affected:
1783 1784 self.systemHeaderObj
1784 1785 self.radarControllerHeaderObj
1785 1786 self.dtype
1786 1787
1787 1788 Return:
1788 1789 None
1789 1790 """
1790 1791
1791 1792 self.systemHeaderObj = self.dataOut.systemHeaderObj.copy()
1792 1793 self.systemHeaderObj.nChannels = self.dataOut.nChannels
1793 1794 self.radarControllerHeaderObj = self.dataOut.radarControllerHeaderObj.copy()
1794 1795
1795 1796 self.setBasicHeader()
1796 1797
1797 1798 processingHeaderSize = 40 # bytes
1798 1799 self.processingHeaderObj.dtype = 0 # Voltage
1799 1800 self.processingHeaderObj.blockSize = self.__getBlockSize()
1800 1801 self.processingHeaderObj.profilesPerBlock = self.profilesPerBlock
1801 1802 self.processingHeaderObj.dataBlocksPerFile = self.blocksPerFile
1802 1803 self.processingHeaderObj.nWindows = 1 #podria ser 1 o self.dataOut.processingHeaderObj.nWindows
1803 1804 self.processingHeaderObj.processFlags = self.__getProcessFlags()
1804 1805 self.processingHeaderObj.nCohInt = self.dataOut.nCohInt
1805 1806 self.processingHeaderObj.nIncohInt = 1 # Cuando la data de origen es de tipo Voltage
1806 1807 self.processingHeaderObj.totalSpectra = 0 # Cuando la data de origen es de tipo Voltage
1807 1808
1808 1809 if self.dataOut.code != None:
1809 1810 self.processingHeaderObj.code = self.dataOut.code
1810 1811 self.processingHeaderObj.nCode = self.dataOut.nCode
1811 1812 self.processingHeaderObj.nBaud = self.dataOut.nBaud
1812 1813 codesize = int(8 + 4 * self.dataOut.nCode * self.dataOut.nBaud)
1813 1814 processingHeaderSize += codesize
1814 1815
1815 1816 if self.processingHeaderObj.nWindows != 0:
1816 1817 self.processingHeaderObj.firstHeight = self.dataOut.heightList[0]
1817 1818 self.processingHeaderObj.deltaHeight = self.dataOut.heightList[1] - self.dataOut.heightList[0]
1818 1819 self.processingHeaderObj.nHeights = self.dataOut.nHeights
1819 1820 self.processingHeaderObj.samplesWin = self.dataOut.nHeights
1820 1821 processingHeaderSize += 12
1821 1822
1822 1823 self.processingHeaderObj.size = processingHeaderSize
1823 1824
1824 1825 class SpectraReader(JRODataReader):
1825 1826 """
1826 1827 Esta clase permite leer datos de espectros desde archivos procesados (.pdata). La lectura
1827 1828 de los datos siempre se realiza por bloques. Los datos leidos (array de 3 dimensiones)
1828 1829 son almacenados en tres buffer's para el Self Spectra, el Cross Spectra y el DC Channel.
1829 1830
1830 1831 paresCanalesIguales * alturas * perfiles (Self Spectra)
1831 1832 paresCanalesDiferentes * alturas * perfiles (Cross Spectra)
1832 1833 canales * alturas (DC Channels)
1833 1834
1834 1835 Esta clase contiene instancias (objetos) de las clases BasicHeader, SystemHeader,
1835 1836 RadarControllerHeader y Spectra. Los tres primeros se usan para almacenar informacion de la
1836 1837 cabecera de datos (metadata), y el cuarto (Spectra) para obtener y almacenar un bloque de
1837 1838 datos desde el "buffer" cada vez que se ejecute el metodo "getData".
1838 1839
1839 1840 Example:
1840 1841 dpath = "/home/myuser/data"
1841 1842
1842 1843 startTime = datetime.datetime(2010,1,20,0,0,0,0,0,0)
1843 1844
1844 1845 endTime = datetime.datetime(2010,1,21,23,59,59,0,0,0)
1845 1846
1846 1847 readerObj = SpectraReader()
1847 1848
1848 1849 readerObj.setup(dpath, startTime, endTime)
1849 1850
1850 1851 while(True):
1851 1852
1852 1853 readerObj.getData()
1853 1854
1854 1855 print readerObj.data_spc
1855 1856
1856 1857 print readerObj.data_cspc
1857 1858
1858 1859 print readerObj.data_dc
1859 1860
1860 1861 if readerObj.flagNoMoreFiles:
1861 1862 break
1862 1863
1863 1864 """
1864 1865
1865 1866 pts2read_SelfSpectra = 0
1866 1867
1867 1868 pts2read_CrossSpectra = 0
1868 1869
1869 1870 pts2read_DCchannels = 0
1870 1871
1871 1872 ext = ".pdata"
1872 1873
1873 1874 optchar = "P"
1874 1875
1875 1876 dataOut = None
1876 1877
1877 1878 nRdChannels = None
1878 1879
1879 1880 nRdPairs = None
1880 1881
1881 1882 rdPairList = []
1882 1883
1883 1884 def __init__(self):
1884 1885 """
1885 1886 Inicializador de la clase SpectraReader para la lectura de datos de espectros.
1886 1887
1887 1888 Inputs:
1888 1889 dataOut : Objeto de la clase Spectra. Este objeto sera utilizado para
1889 1890 almacenar un perfil de datos cada vez que se haga un requerimiento
1890 1891 (getData). El perfil sera obtenido a partir del buffer de datos,
1891 1892 si el buffer esta vacio se hara un nuevo proceso de lectura de un
1892 1893 bloque de datos.
1893 1894 Si este parametro no es pasado se creara uno internamente.
1894 1895
1895 1896 Affected:
1896 1897 self.dataOut
1897 1898
1898 1899 Return : None
1899 1900 """
1900 1901
1901 1902 self.isConfig = False
1902 1903
1903 1904 self.pts2read_SelfSpectra = 0
1904 1905
1905 1906 self.pts2read_CrossSpectra = 0
1906 1907
1907 1908 self.pts2read_DCchannels = 0
1908 1909
1909 1910 self.datablock = None
1910 1911
1911 1912 self.utc = None
1912 1913
1913 1914 self.ext = ".pdata"
1914 1915
1915 1916 self.optchar = "P"
1916 1917
1917 1918 self.basicHeaderObj = BasicHeader(LOCALTIME)
1918 1919
1919 1920 self.systemHeaderObj = SystemHeader()
1920 1921
1921 1922 self.radarControllerHeaderObj = RadarControllerHeader()
1922 1923
1923 1924 self.processingHeaderObj = ProcessingHeader()
1924 1925
1925 1926 self.online = 0
1926 1927
1927 1928 self.fp = None
1928 1929
1929 1930 self.idFile = None
1930 1931
1931 1932 self.dtype = None
1932 1933
1933 1934 self.fileSizeByHeader = None
1934 1935
1935 1936 self.filenameList = []
1936 1937
1937 1938 self.filename = None
1938 1939
1939 1940 self.fileSize = None
1940 1941
1941 1942 self.firstHeaderSize = 0
1942 1943
1943 1944 self.basicHeaderSize = 24
1944 1945
1945 1946 self.pathList = []
1946 1947
1947 1948 self.lastUTTime = 0
1948 1949
1949 1950 self.maxTimeStep = 30
1950 1951
1951 1952 self.flagNoMoreFiles = 0
1952 1953
1953 1954 self.set = 0
1954 1955
1955 1956 self.path = None
1956 1957
1957 1958 self.delay = 60 #seconds
1958 1959
1959 1960 self.nTries = 3 #quantity tries
1960 1961
1961 1962 self.nFiles = 3 #number of files for searching
1962 1963
1963 1964 self.nReadBlocks = 0
1964 1965
1965 1966 self.flagIsNewFile = 1
1966 1967
1967 1968 self.__isFirstTimeOnline = 1
1968 1969
1969 1970 self.ippSeconds = 0
1970 1971
1971 1972 self.flagTimeBlock = 0
1972 1973
1973 1974 self.flagIsNewBlock = 0
1974 1975
1975 1976 self.nTotalBlocks = 0
1976 1977
1977 1978 self.blocksize = 0
1978 1979
1979 1980 self.dataOut = self.createObjByDefault()
1980 1981
1981 1982 self.profileIndex = 1 #Always
1982 1983
1983 1984
1984 1985 def createObjByDefault(self):
1985 1986
1986 1987 dataObj = Spectra()
1987 1988
1988 1989 return dataObj
1989 1990
1990 1991 def __hasNotDataInBuffer(self):
1991 1992 return 1
1992 1993
1993 1994
1994 1995 def getBlockDimension(self):
1995 1996 """
1996 1997 Obtiene la cantidad de puntos a leer por cada bloque de datos
1997 1998
1998 1999 Affected:
1999 2000 self.nRdChannels
2000 2001 self.nRdPairs
2001 2002 self.pts2read_SelfSpectra
2002 2003 self.pts2read_CrossSpectra
2003 2004 self.pts2read_DCchannels
2004 2005 self.blocksize
2005 2006 self.dataOut.nChannels
2006 2007 self.dataOut.nPairs
2007 2008
2008 2009 Return:
2009 2010 None
2010 2011 """
2011 2012 self.nRdChannels = 0
2012 2013 self.nRdPairs = 0
2013 2014 self.rdPairList = []
2014 2015
2015 2016 for i in range(0, self.processingHeaderObj.totalSpectra*2, 2):
2016 2017 if self.processingHeaderObj.spectraComb[i] == self.processingHeaderObj.spectraComb[i+1]:
2017 2018 self.nRdChannels = self.nRdChannels + 1 #par de canales iguales
2018 2019 else:
2019 2020 self.nRdPairs = self.nRdPairs + 1 #par de canales diferentes
2020 2021 self.rdPairList.append((self.processingHeaderObj.spectraComb[i], self.processingHeaderObj.spectraComb[i+1]))
2021 2022
2022 2023 pts2read = self.processingHeaderObj.nHeights * self.processingHeaderObj.profilesPerBlock
2023 2024
2024 2025 self.pts2read_SelfSpectra = int(self.nRdChannels * pts2read)
2025 2026 self.blocksize = self.pts2read_SelfSpectra
2026 2027
2027 2028 if self.processingHeaderObj.flag_cspc:
2028 2029 self.pts2read_CrossSpectra = int(self.nRdPairs * pts2read)
2029 2030 self.blocksize += self.pts2read_CrossSpectra
2030 2031
2031 2032 if self.processingHeaderObj.flag_dc:
2032 2033 self.pts2read_DCchannels = int(self.systemHeaderObj.nChannels * self.processingHeaderObj.nHeights)
2033 2034 self.blocksize += self.pts2read_DCchannels
2034 2035
2035 2036 # self.blocksize = self.pts2read_SelfSpectra + self.pts2read_CrossSpectra + self.pts2read_DCchannels
2036 2037
2037 2038
2038 2039 def readBlock(self):
2039 2040 """
2040 2041 Lee el bloque de datos desde la posicion actual del puntero del archivo
2041 2042 (self.fp) y actualiza todos los parametros relacionados al bloque de datos
2042 2043 (metadata + data). La data leida es almacenada en el buffer y el contador del buffer
2043 2044 es seteado a 0
2044 2045
2045 2046 Return: None
2046 2047
2047 2048 Variables afectadas:
2048 2049
2049 2050 self.flagIsNewFile
2050 2051 self.flagIsNewBlock
2051 2052 self.nTotalBlocks
2052 2053 self.data_spc
2053 2054 self.data_cspc
2054 2055 self.data_dc
2055 2056
2056 2057 Exceptions:
2057 2058 Si un bloque leido no es un bloque valido
2058 2059 """
2059 2060 blockOk_flag = False
2060 2061 fpointer = self.fp.tell()
2061 2062
2062 2063 spc = numpy.fromfile( self.fp, self.dtype[0], self.pts2read_SelfSpectra )
2063 2064 spc = spc.reshape( (self.nRdChannels, self.processingHeaderObj.nHeights, self.processingHeaderObj.profilesPerBlock) ) #transforma a un arreglo 3D
2064 2065
2065 2066 if self.processingHeaderObj.flag_cspc:
2066 2067 cspc = numpy.fromfile( self.fp, self.dtype, self.pts2read_CrossSpectra )
2067 2068 cspc = cspc.reshape( (self.nRdPairs, self.processingHeaderObj.nHeights, self.processingHeaderObj.profilesPerBlock) ) #transforma a un arreglo 3D
2068 2069
2069 2070 if self.processingHeaderObj.flag_dc:
2070 2071 dc = numpy.fromfile( self.fp, self.dtype, self.pts2read_DCchannels ) #int(self.processingHeaderObj.nHeights*self.systemHeaderObj.nChannels) )
2071 2072 dc = dc.reshape( (self.systemHeaderObj.nChannels, self.processingHeaderObj.nHeights) ) #transforma a un arreglo 2D
2072 2073
2073 2074
2074 2075 if not(self.processingHeaderObj.shif_fft):
2075 2076 #desplaza a la derecha en el eje 2 determinadas posiciones
2076 2077 shift = int(self.processingHeaderObj.profilesPerBlock/2)
2077 2078 spc = numpy.roll( spc, shift , axis=2 )
2078 2079
2079 2080 if self.processingHeaderObj.flag_cspc:
2080 2081 #desplaza a la derecha en el eje 2 determinadas posiciones
2081 2082 cspc = numpy.roll( cspc, shift, axis=2 )
2082 2083
2083 2084 # self.processingHeaderObj.shif_fft = True
2084 2085
2085 2086 spc = numpy.transpose( spc, (0,2,1) )
2086 2087 self.data_spc = spc
2087 2088
2088 2089 if self.processingHeaderObj.flag_cspc:
2089 2090 cspc = numpy.transpose( cspc, (0,2,1) )
2090 2091 self.data_cspc = cspc['real'] + cspc['imag']*1j
2091 2092 else:
2092 2093 self.data_cspc = None
2093 2094
2094 2095 if self.processingHeaderObj.flag_dc:
2095 2096 self.data_dc = dc['real'] + dc['imag']*1j
2096 2097 else:
2097 2098 self.data_dc = None
2098 2099
2099 2100 self.flagIsNewFile = 0
2100 2101 self.flagIsNewBlock = 1
2101 2102
2102 2103 self.nTotalBlocks += 1
2103 2104 self.nReadBlocks += 1
2104 2105
2105 2106 return 1
2106 2107
2107 2108 def getFirstHeader(self):
2108 2109
2109 2110 self.dataOut.dtype = self.dtype
2110 2111
2111 2112 self.dataOut.nPairs = self.nRdPairs
2112 2113
2113 2114 self.dataOut.pairsList = self.rdPairList
2114 2115
2115 2116 self.dataOut.nProfiles = self.processingHeaderObj.profilesPerBlock
2116 2117
2117 2118 self.dataOut.nFFTPoints = self.processingHeaderObj.profilesPerBlock
2118 2119
2119 2120 self.dataOut.nCohInt = self.processingHeaderObj.nCohInt
2120 2121
2121 2122 self.dataOut.nIncohInt = self.processingHeaderObj.nIncohInt
2122 2123
2123 2124 xf = self.processingHeaderObj.firstHeight + self.processingHeaderObj.nHeights*self.processingHeaderObj.deltaHeight
2124 2125
2125 2126 self.dataOut.heightList = numpy.arange(self.processingHeaderObj.firstHeight, xf, self.processingHeaderObj.deltaHeight)
2126 2127
2127 2128 self.dataOut.channelList = range(self.systemHeaderObj.nChannels)
2128 2129
2129 2130 self.dataOut.ippSeconds = self.ippSeconds
2130 2131
2131 2132 self.dataOut.timeInterval = self.ippSeconds * self.processingHeaderObj.nCohInt * self.processingHeaderObj.nIncohInt * self.dataOut.nFFTPoints
2132 2133
2133 2134 self.dataOut.systemHeaderObj = self.systemHeaderObj.copy()
2134 2135
2135 2136 self.dataOut.radarControllerHeaderObj = self.radarControllerHeaderObj.copy()
2136 2137
2137 2138 self.dataOut.flagShiftFFT = self.processingHeaderObj.shif_fft
2138 2139
2139 2140 self.dataOut.flagDecodeData = False #asumo q la data no esta decodificada
2140 2141
2141 2142 self.dataOut.flagDeflipData = True #asumo q la data no esta sin flip
2142 2143
2143 2144 if self.processingHeaderObj.code != None:
2144 2145
2145 2146 self.dataOut.nCode = self.processingHeaderObj.nCode
2146 2147
2147 2148 self.dataOut.nBaud = self.processingHeaderObj.nBaud
2148 2149
2149 2150 self.dataOut.code = self.processingHeaderObj.code
2150 2151
2151 2152 self.dataOut.flagDecodeData = True
2152 2153
2153 2154 def getData(self):
2154 2155 """
2155 2156 Copia el buffer de lectura a la clase "Spectra",
2156 2157 con todos los parametros asociados a este (metadata). cuando no hay datos en el buffer de
2157 2158 lectura es necesario hacer una nueva lectura de los bloques de datos usando "readNextBlock"
2158 2159
2159 2160 Return:
2160 2161 0 : Si no hay mas archivos disponibles
2161 2162 1 : Si hizo una buena copia del buffer
2162 2163
2163 2164 Affected:
2164 2165 self.dataOut
2165 2166
2166 2167 self.flagTimeBlock
2167 2168 self.flagIsNewBlock
2168 2169 """
2169 2170
2170 2171 if self.flagNoMoreFiles:
2171 2172 self.dataOut.flagNoData = True
2172 2173 print 'Process finished'
2173 2174 return 0
2174 2175
2175 2176 self.flagTimeBlock = 0
2176 2177 self.flagIsNewBlock = 0
2177 2178
2178 2179 if self.__hasNotDataInBuffer():
2179 2180
2180 2181 if not( self.readNextBlock() ):
2181 2182 self.dataOut.flagNoData = True
2182 2183 return 0
2183 2184
2184 2185 #data es un numpy array de 3 dmensiones (perfiles, alturas y canales)
2185 2186
2186 2187 if self.data_dc == None:
2187 2188 self.dataOut.flagNoData = True
2188 2189 return 0
2189 2190
2190 2191 self.getBasicHeader()
2191 2192
2192 2193 self.getFirstHeader()
2193 2194
2194 2195 self.dataOut.data_spc = self.data_spc
2195 2196
2196 2197 self.dataOut.data_cspc = self.data_cspc
2197 2198
2198 2199 self.dataOut.data_dc = self.data_dc
2199 2200
2200 2201 self.dataOut.flagNoData = False
2201 2202
2202 2203 return self.dataOut.data_spc
2203 2204
2204 2205
2205 2206 class SpectraWriter(JRODataWriter):
2206 2207
2207 2208 """
2208 2209 Esta clase permite escribir datos de espectros a archivos procesados (.pdata). La escritura
2209 2210 de los datos siempre se realiza por bloques.
2210 2211 """
2211 2212
2212 2213 ext = ".pdata"
2213 2214
2214 2215 optchar = "P"
2215 2216
2216 2217 shape_spc_Buffer = None
2217 2218
2218 2219 shape_cspc_Buffer = None
2219 2220
2220 2221 shape_dc_Buffer = None
2221 2222
2222 2223 data_spc = None
2223 2224
2224 2225 data_cspc = None
2225 2226
2226 2227 data_dc = None
2227 2228
2228 2229 # dataOut = None
2229 2230
2230 2231 def __init__(self):
2231 2232 """
2232 2233 Inicializador de la clase SpectraWriter para la escritura de datos de espectros.
2233 2234
2234 2235 Affected:
2235 2236 self.dataOut
2236 2237 self.basicHeaderObj
2237 2238 self.systemHeaderObj
2238 2239 self.radarControllerHeaderObj
2239 2240 self.processingHeaderObj
2240 2241
2241 2242 Return: None
2242 2243 """
2243 2244
2244 2245 self.isConfig = False
2245 2246
2246 2247 self.nTotalBlocks = 0
2247 2248
2248 2249 self.data_spc = None
2249 2250
2250 2251 self.data_cspc = None
2251 2252
2252 2253 self.data_dc = None
2253 2254
2254 2255 self.fp = None
2255 2256
2256 2257 self.flagIsNewFile = 1
2257 2258
2258 2259 self.nTotalBlocks = 0
2259 2260
2260 2261 self.flagIsNewBlock = 0
2261 2262
2262 2263 self.setFile = None
2263 2264
2264 2265 self.dtype = None
2265 2266
2266 2267 self.path = None
2267 2268
2268 2269 self.noMoreFiles = 0
2269 2270
2270 2271 self.filename = None
2271 2272
2272 2273 self.basicHeaderObj = BasicHeader(LOCALTIME)
2273 2274
2274 2275 self.systemHeaderObj = SystemHeader()
2275 2276
2276 2277 self.radarControllerHeaderObj = RadarControllerHeader()
2277 2278
2278 2279 self.processingHeaderObj = ProcessingHeader()
2279 2280
2280 2281
2281 2282 def hasAllDataInBuffer(self):
2282 2283 return 1
2283 2284
2284 2285
2285 2286 def setBlockDimension(self):
2286 2287 """
2287 2288 Obtiene las formas dimensionales del los subbloques de datos que componen un bloque
2288 2289
2289 2290 Affected:
2290 2291 self.shape_spc_Buffer
2291 2292 self.shape_cspc_Buffer
2292 2293 self.shape_dc_Buffer
2293 2294
2294 2295 Return: None
2295 2296 """
2296 2297 self.shape_spc_Buffer = (self.dataOut.nChannels,
2297 2298 self.processingHeaderObj.nHeights,
2298 2299 self.processingHeaderObj.profilesPerBlock)
2299 2300
2300 2301 self.shape_cspc_Buffer = (self.dataOut.nPairs,
2301 2302 self.processingHeaderObj.nHeights,
2302 2303 self.processingHeaderObj.profilesPerBlock)
2303 2304
2304 2305 self.shape_dc_Buffer = (self.dataOut.nChannels,
2305 2306 self.processingHeaderObj.nHeights)
2306 2307
2307 2308
2308 2309 def writeBlock(self):
2309 2310 """
2310 2311 Escribe el buffer en el file designado
2311 2312
2312 2313 Affected:
2313 2314 self.data_spc
2314 2315 self.data_cspc
2315 2316 self.data_dc
2316 2317 self.flagIsNewFile
2317 2318 self.flagIsNewBlock
2318 2319 self.nTotalBlocks
2319 2320 self.nWriteBlocks
2320 2321
2321 2322 Return: None
2322 2323 """
2323 2324
2324 2325 spc = numpy.transpose( self.data_spc, (0,2,1) )
2325 2326 if not( self.processingHeaderObj.shif_fft ):
2326 2327 spc = numpy.roll( spc, self.processingHeaderObj.profilesPerBlock/2, axis=2 ) #desplaza a la derecha en el eje 2 determinadas posiciones
2327 2328 data = spc.reshape((-1))
2328 2329 data = data.astype(self.dtype[0])
2329 2330 data.tofile(self.fp)
2330 2331
2331 2332 if self.data_cspc != None:
2332 2333 data = numpy.zeros( self.shape_cspc_Buffer, self.dtype )
2333 2334 cspc = numpy.transpose( self.data_cspc, (0,2,1) )
2334 2335 if not( self.processingHeaderObj.shif_fft ):
2335 2336 cspc = numpy.roll( cspc, self.processingHeaderObj.profilesPerBlock/2, axis=2 ) #desplaza a la derecha en el eje 2 determinadas posiciones
2336 2337 data['real'] = cspc.real
2337 2338 data['imag'] = cspc.imag
2338 2339 data = data.reshape((-1))
2339 2340 data.tofile(self.fp)
2340 2341
2341 2342 if self.data_dc != None:
2342 2343 data = numpy.zeros( self.shape_dc_Buffer, self.dtype )
2343 2344 dc = self.data_dc
2344 2345 data['real'] = dc.real
2345 2346 data['imag'] = dc.imag
2346 2347 data = data.reshape((-1))
2347 2348 data.tofile(self.fp)
2348 2349
2349 2350 self.data_spc.fill(0)
2350 2351 self.data_dc.fill(0)
2351 2352 if self.data_cspc != None:
2352 2353 self.data_cspc.fill(0)
2353 2354
2354 2355 self.flagIsNewFile = 0
2355 2356 self.flagIsNewBlock = 1
2356 2357 self.nTotalBlocks += 1
2357 2358 self.nWriteBlocks += 1
2358 2359 self.blockIndex += 1
2359 2360
2360 2361
2361 2362 def putData(self):
2362 2363 """
2363 2364 Setea un bloque de datos y luego los escribe en un file
2364 2365
2365 2366 Affected:
2366 2367 self.data_spc
2367 2368 self.data_cspc
2368 2369 self.data_dc
2369 2370
2370 2371 Return:
2371 2372 0 : Si no hay data o no hay mas files que puedan escribirse
2372 2373 1 : Si se escribio la data de un bloque en un file
2373 2374 """
2374 2375
2375 2376 if self.dataOut.flagNoData:
2376 2377 return 0
2377 2378
2378 2379 self.flagIsNewBlock = 0
2379 2380
2380 2381 if self.dataOut.flagTimeBlock:
2381 2382 self.data_spc.fill(0)
2382 2383 self.data_cspc.fill(0)
2383 2384 self.data_dc.fill(0)
2384 2385 self.setNextFile()
2385 2386
2386 2387 if self.flagIsNewFile == 0:
2387 2388 self.setBasicHeader()
2388 2389
2389 2390 self.data_spc = self.dataOut.data_spc.copy()
2390 2391 self.data_cspc = self.dataOut.data_cspc.copy()
2391 2392 self.data_dc = self.dataOut.data_dc.copy()
2392 2393
2393 2394 # #self.processingHeaderObj.dataBlocksPerFile)
2394 2395 if self.hasAllDataInBuffer():
2395 2396 # self.setFirstHeader()
2396 2397 self.writeNextBlock()
2397 2398
2398 2399 return 1
2399 2400
2400 2401
2401 2402 def __getProcessFlags(self):
2402 2403
2403 2404 processFlags = 0
2404 2405
2405 2406 dtype0 = numpy.dtype([('real','<i1'),('imag','<i1')])
2406 2407 dtype1 = numpy.dtype([('real','<i2'),('imag','<i2')])
2407 2408 dtype2 = numpy.dtype([('real','<i4'),('imag','<i4')])
2408 2409 dtype3 = numpy.dtype([('real','<i8'),('imag','<i8')])
2409 2410 dtype4 = numpy.dtype([('real','<f4'),('imag','<f4')])
2410 2411 dtype5 = numpy.dtype([('real','<f8'),('imag','<f8')])
2411 2412
2412 2413 dtypeList = [dtype0, dtype1, dtype2, dtype3, dtype4, dtype5]
2413 2414
2414 2415
2415 2416
2416 2417 datatypeValueList = [PROCFLAG.DATATYPE_CHAR,
2417 2418 PROCFLAG.DATATYPE_SHORT,
2418 2419 PROCFLAG.DATATYPE_LONG,
2419 2420 PROCFLAG.DATATYPE_INT64,
2420 2421 PROCFLAG.DATATYPE_FLOAT,
2421 2422 PROCFLAG.DATATYPE_DOUBLE]
2422 2423
2423 2424
2424 2425 for index in range(len(dtypeList)):
2425 2426 if self.dataOut.dtype == dtypeList[index]:
2426 2427 dtypeValue = datatypeValueList[index]
2427 2428 break
2428 2429
2429 2430 processFlags += dtypeValue
2430 2431
2431 2432 if self.dataOut.flagDecodeData:
2432 2433 processFlags += PROCFLAG.DECODE_DATA
2433 2434
2434 2435 if self.dataOut.flagDeflipData:
2435 2436 processFlags += PROCFLAG.DEFLIP_DATA
2436 2437
2437 2438 if self.dataOut.code != None:
2438 2439 processFlags += PROCFLAG.DEFINE_PROCESS_CODE
2439 2440
2440 2441 if self.dataOut.nIncohInt > 1:
2441 2442 processFlags += PROCFLAG.INCOHERENT_INTEGRATION
2442 2443
2443 2444 if self.dataOut.data_dc != None:
2444 2445 processFlags += PROCFLAG.SAVE_CHANNELS_DC
2445 2446
2446 2447 return processFlags
2447 2448
2448 2449
2449 2450 def __getBlockSize(self):
2450 2451 '''
2451 2452 Este metodos determina el cantidad de bytes para un bloque de datos de tipo Spectra
2452 2453 '''
2453 2454
2454 2455 dtype0 = numpy.dtype([('real','<i1'),('imag','<i1')])
2455 2456 dtype1 = numpy.dtype([('real','<i2'),('imag','<i2')])
2456 2457 dtype2 = numpy.dtype([('real','<i4'),('imag','<i4')])
2457 2458 dtype3 = numpy.dtype([('real','<i8'),('imag','<i8')])
2458 2459 dtype4 = numpy.dtype([('real','<f4'),('imag','<f4')])
2459 2460 dtype5 = numpy.dtype([('real','<f8'),('imag','<f8')])
2460 2461
2461 2462 dtypeList = [dtype0, dtype1, dtype2, dtype3, dtype4, dtype5]
2462 2463 datatypeValueList = [1,2,4,8,4,8]
2463 2464 for index in range(len(dtypeList)):
2464 2465 if self.dataOut.dtype == dtypeList[index]:
2465 2466 datatypeValue = datatypeValueList[index]
2466 2467 break
2467 2468
2468 2469
2469 2470 pts2write = self.dataOut.nHeights * self.dataOut.nFFTPoints
2470 2471
2471 2472 pts2write_SelfSpectra = int(self.dataOut.nChannels * pts2write)
2472 2473 blocksize = (pts2write_SelfSpectra*datatypeValue)
2473 2474
2474 2475 if self.dataOut.data_cspc != None:
2475 2476 pts2write_CrossSpectra = int(self.dataOut.nPairs * pts2write)
2476 2477 blocksize += (pts2write_CrossSpectra*datatypeValue*2)
2477 2478
2478 2479 if self.dataOut.data_dc != None:
2479 2480 pts2write_DCchannels = int(self.dataOut.nChannels * self.dataOut.nHeights)
2480 2481 blocksize += (pts2write_DCchannels*datatypeValue*2)
2481 2482
2482 2483 blocksize = blocksize #* datatypeValue * 2 #CORREGIR ESTO
2483 2484
2484 2485 return blocksize
2485 2486
2486 2487 def setFirstHeader(self):
2487 2488
2488 2489 """
2489 2490 Obtiene una copia del First Header
2490 2491
2491 2492 Affected:
2492 2493 self.systemHeaderObj
2493 2494 self.radarControllerHeaderObj
2494 2495 self.dtype
2495 2496
2496 2497 Return:
2497 2498 None
2498 2499 """
2499 2500
2500 2501 self.systemHeaderObj = self.dataOut.systemHeaderObj.copy()
2501 2502 self.systemHeaderObj.nChannels = self.dataOut.nChannels
2502 2503 self.radarControllerHeaderObj = self.dataOut.radarControllerHeaderObj.copy()
2503 2504
2504 2505 self.setBasicHeader()
2505 2506
2506 2507 processingHeaderSize = 40 # bytes
2507 2508 self.processingHeaderObj.dtype = 1 # Spectra
2508 2509 self.processingHeaderObj.blockSize = self.__getBlockSize()
2509 2510 self.processingHeaderObj.profilesPerBlock = self.dataOut.nFFTPoints
2510 2511 self.processingHeaderObj.dataBlocksPerFile = self.blocksPerFile
2511 2512 self.processingHeaderObj.nWindows = 1 #podria ser 1 o self.dataOut.processingHeaderObj.nWindows
2512 2513 self.processingHeaderObj.processFlags = self.__getProcessFlags()
2513 2514 self.processingHeaderObj.nCohInt = self.dataOut.nCohInt# Se requiere para determinar el valor de timeInterval
2514 2515 self.processingHeaderObj.nIncohInt = self.dataOut.nIncohInt
2515 2516 self.processingHeaderObj.totalSpectra = self.dataOut.nPairs + self.dataOut.nChannels
2516 2517 self.processingHeaderObj.shif_fft = self.dataOut.flagShiftFFT
2517 2518
2518 2519 if self.processingHeaderObj.totalSpectra > 0:
2519 2520 channelList = []
2520 2521 for channel in range(self.dataOut.nChannels):
2521 2522 channelList.append(channel)
2522 2523 channelList.append(channel)
2523 2524
2524 2525 pairsList = []
2525 2526 for pair in self.dataOut.pairsList:
2526 2527 pairsList.append(pair[0])
2527 2528 pairsList.append(pair[1])
2528 2529 spectraComb = channelList + pairsList
2529 2530 spectraComb = numpy.array(spectraComb,dtype="u1")
2530 2531 self.processingHeaderObj.spectraComb = spectraComb
2531 2532 sizeOfSpcComb = len(spectraComb)
2532 2533 processingHeaderSize += sizeOfSpcComb
2533 2534
2534 2535 # The processing header should not have information about code
2535 2536 # if self.dataOut.code != None:
2536 2537 # self.processingHeaderObj.code = self.dataOut.code
2537 2538 # self.processingHeaderObj.nCode = self.dataOut.nCode
2538 2539 # self.processingHeaderObj.nBaud = self.dataOut.nBaud
2539 2540 # nCodeSize = 4 # bytes
2540 2541 # nBaudSize = 4 # bytes
2541 2542 # codeSize = 4 # bytes
2542 2543 # sizeOfCode = int(nCodeSize + nBaudSize + codeSize * self.dataOut.nCode * self.dataOut.nBaud)
2543 2544 # processingHeaderSize += sizeOfCode
2544 2545
2545 2546 if self.processingHeaderObj.nWindows != 0:
2546 2547 self.processingHeaderObj.firstHeight = self.dataOut.heightList[0]
2547 2548 self.processingHeaderObj.deltaHeight = self.dataOut.heightList[1] - self.dataOut.heightList[0]
2548 2549 self.processingHeaderObj.nHeights = self.dataOut.nHeights
2549 2550 self.processingHeaderObj.samplesWin = self.dataOut.nHeights
2550 2551 sizeOfFirstHeight = 4
2551 2552 sizeOfdeltaHeight = 4
2552 2553 sizeOfnHeights = 4
2553 2554 sizeOfWindows = (sizeOfFirstHeight + sizeOfdeltaHeight + sizeOfnHeights)*self.processingHeaderObj.nWindows
2554 2555 processingHeaderSize += sizeOfWindows
2555 2556
2556 2557 self.processingHeaderObj.size = processingHeaderSize
2557 2558
2558 2559 class SpectraHeisWriter(Operation):
2559 2560 # set = None
2560 2561 setFile = None
2561 2562 idblock = None
2562 2563 doypath = None
2563 2564 subfolder = None
2564 2565
2565 2566 def __init__(self):
2566 2567 self.wrObj = FITS()
2567 2568 # self.dataOut = dataOut
2568 2569 self.nTotalBlocks=0
2569 2570 # self.set = None
2570 2571 self.setFile = None
2571 2572 self.idblock = 0
2572 2573 self.wrpath = None
2573 2574 self.doypath = None
2574 2575 self.subfolder = None
2575 2576 self.isConfig = False
2576 2577
2577 2578 def isNumber(str):
2578 2579 """
2579 2580 Chequea si el conjunto de caracteres que componen un string puede ser convertidos a un numero.
2580 2581
2581 2582 Excepciones:
2582 2583 Si un determinado string no puede ser convertido a numero
2583 2584 Input:
2584 2585 str, string al cual se le analiza para determinar si convertible a un numero o no
2585 2586
2586 2587 Return:
2587 2588 True : si el string es uno numerico
2588 2589 False : no es un string numerico
2589 2590 """
2590 2591 try:
2591 2592 float( str )
2592 2593 return True
2593 2594 except:
2594 2595 return False
2595 2596
2596 2597 def setup(self, dataOut, wrpath):
2597 2598
2598 2599 if not(os.path.exists(wrpath)):
2599 2600 os.mkdir(wrpath)
2600 2601
2601 2602 self.wrpath = wrpath
2602 2603 # self.setFile = 0
2603 2604 self.dataOut = dataOut
2604 2605
2605 2606 def putData(self):
2606 2607 name= time.localtime( self.dataOut.utctime)
2607 2608 ext=".fits"
2608 2609
2609 2610 if self.doypath == None:
2610 2611 self.subfolder = 'F%4.4d%3.3d_%d' % (name.tm_year,name.tm_yday,time.mktime(datetime.datetime.now().timetuple()))
2611 2612 self.doypath = os.path.join( self.wrpath, self.subfolder )
2612 2613 os.mkdir(self.doypath)
2613 2614
2614 2615 if self.setFile == None:
2615 2616 # self.set = self.dataOut.set
2616 2617 self.setFile = 0
2617 2618 # if self.set != self.dataOut.set:
2618 2619 ## self.set = self.dataOut.set
2619 2620 # self.setFile = 0
2620 2621
2621 2622 #make the filename
2622 2623 file = 'D%4.4d%3.3d_%3.3d%s' % (name.tm_year,name.tm_yday,self.setFile,ext)
2623 2624
2624 2625 filename = os.path.join(self.wrpath,self.subfolder, file)
2625 2626
2626 2627 idblock = numpy.array([self.idblock],dtype="int64")
2627 2628 header=self.wrObj.cFImage(idblock=idblock,
2628 2629 year=time.gmtime(self.dataOut.utctime).tm_year,
2629 2630 month=time.gmtime(self.dataOut.utctime).tm_mon,
2630 2631 day=time.gmtime(self.dataOut.utctime).tm_mday,
2631 2632 hour=time.gmtime(self.dataOut.utctime).tm_hour,
2632 2633 minute=time.gmtime(self.dataOut.utctime).tm_min,
2633 2634 second=time.gmtime(self.dataOut.utctime).tm_sec)
2634 2635
2635 2636 c=3E8
2636 2637 deltaHeight = self.dataOut.heightList[1] - self.dataOut.heightList[0]
2637 2638 freq=numpy.arange(-1*self.dataOut.nHeights/2.,self.dataOut.nHeights/2.)*(c/(2*deltaHeight*1000))
2638 2639
2639 2640 colList = []
2640 2641
2641 2642 colFreq=self.wrObj.setColF(name="freq", format=str(self.dataOut.nFFTPoints)+'E', array=freq)
2642 2643
2643 2644 colList.append(colFreq)
2644 2645
2645 2646 nchannel=self.dataOut.nChannels
2646 2647
2647 2648 for i in range(nchannel):
2648 2649 col = self.wrObj.writeData(name="PCh"+str(i+1),
2649 2650 format=str(self.dataOut.nFFTPoints)+'E',
2650 2651 data=10*numpy.log10(self.dataOut.data_spc[i,:]))
2651 2652
2652 2653 colList.append(col)
2653 2654
2654 2655 data=self.wrObj.Ctable(colList=colList)
2655 2656
2656 2657 self.wrObj.CFile(header,data)
2657 2658
2658 2659 self.wrObj.wFile(filename)
2659 2660
2660 2661 #update the setFile
2661 2662 self.setFile += 1
2662 2663 self.idblock += 1
2663 2664
2664 2665 return 1
2665 2666
2666 2667 def run(self, dataOut, **kwargs):
2667 2668
2668 2669 if not(self.isConfig):
2669 2670
2670 2671 self.setup(dataOut, **kwargs)
2671 2672 self.isConfig = True
2672 2673
2673 2674 self.putData()
2674 2675
2675 2676
2676 2677 class FITS:
2677 2678 name=None
2678 2679 format=None
2679 2680 array =None
2680 2681 data =None
2681 2682 thdulist=None
2682 2683 prihdr=None
2683 2684 hdu=None
2684 2685
2685 2686 def __init__(self):
2686 2687
2687 2688 pass
2688 2689
2689 2690 def setColF(self,name,format,array):
2690 2691 self.name=name
2691 2692 self.format=format
2692 2693 self.array=array
2693 2694 a1=numpy.array([self.array],dtype=numpy.float32)
2694 2695 self.col1 = pyfits.Column(name=self.name, format=self.format, array=a1)
2695 2696 return self.col1
2696 2697
2697 2698 # def setColP(self,name,format,data):
2698 2699 # self.name=name
2699 2700 # self.format=format
2700 2701 # self.data=data
2701 2702 # a2=numpy.array([self.data],dtype=numpy.float32)
2702 2703 # self.col2 = pyfits.Column(name=self.name, format=self.format, array=a2)
2703 2704 # return self.col2
2704 2705
2705 2706
2706 2707 def writeData(self,name,format,data):
2707 2708 self.name=name
2708 2709 self.format=format
2709 2710 self.data=data
2710 2711 a2=numpy.array([self.data],dtype=numpy.float32)
2711 2712 self.col2 = pyfits.Column(name=self.name, format=self.format, array=a2)
2712 2713 return self.col2
2713 2714
2714 2715 def cFImage(self,idblock,year,month,day,hour,minute,second):
2715 2716 self.hdu= pyfits.PrimaryHDU(idblock)
2716 2717 self.hdu.header.set("Year",year)
2717 2718 self.hdu.header.set("Month",month)
2718 2719 self.hdu.header.set("Day",day)
2719 2720 self.hdu.header.set("Hour",hour)
2720 2721 self.hdu.header.set("Minute",minute)
2721 2722 self.hdu.header.set("Second",second)
2722 2723 return self.hdu
2723 2724
2724 2725
2725 2726 def Ctable(self,colList):
2726 2727 self.cols=pyfits.ColDefs(colList)
2727 2728 self.tbhdu = pyfits.new_table(self.cols)
2728 2729 return self.tbhdu
2729 2730
2730 2731
2731 2732 def CFile(self,hdu,tbhdu):
2732 2733 self.thdulist=pyfits.HDUList([hdu,tbhdu])
2733 2734
2734 2735 def wFile(self,filename):
2735 2736 if os.path.isfile(filename):
2736 2737 os.remove(filename)
2737 2738 self.thdulist.writeto(filename)
2738 2739
2739 2740
2740 2741 class ParameterConf:
2741 2742 ELEMENTNAME = 'Parameter'
2742 2743 def __init__(self):
2743 2744 self.name = ''
2744 2745 self.value = ''
2745 2746
2746 2747 def readXml(self, parmElement):
2747 2748 self.name = parmElement.get('name')
2748 2749 self.value = parmElement.get('value')
2749 2750
2750 2751 def getElementName(self):
2751 2752 return self.ELEMENTNAME
2752 2753
2753 2754 class Metadata:
2754 2755
2755 2756 def __init__(self, filename):
2756 2757 self.parmConfObjList = []
2757 2758 self.readXml(filename)
2758 2759
2759 2760 def readXml(self, filename):
2760 2761 self.projectElement = None
2761 2762 self.procUnitConfObjDict = {}
2762 2763 self.projectElement = ElementTree().parse(filename)
2763 2764 self.project = self.projectElement.tag
2764 2765
2765 2766 parmElementList = self.projectElement.getiterator(ParameterConf().getElementName())
2766 2767
2767 2768 for parmElement in parmElementList:
2768 2769 parmConfObj = ParameterConf()
2769 2770 parmConfObj.readXml(parmElement)
2770 2771 self.parmConfObjList.append(parmConfObj)
2771 2772
2772 2773 class FitsWriter(Operation):
2773 2774
2774 2775 def __init__(self):
2775 2776 self.isConfig = False
2776 2777 self.dataBlocksPerFile = None
2777 2778 self.blockIndex = 0
2778 2779 self.flagIsNewFile = 1
2779 2780 self.fitsObj = None
2780 2781 self.optchar = 'P'
2781 2782 self.ext = '.fits'
2782 2783 self.setFile = 0
2783 2784
2784 2785 def setFitsHeader(self, dataOut, metadatafile):
2785 2786
2786 2787 header_data = pyfits.PrimaryHDU()
2787 2788
2788 2789 metadata4fits = Metadata(metadatafile)
2789 2790 for parameter in metadata4fits.parmConfObjList:
2790 2791 parm_name = parameter.name
2791 2792 parm_value = parameter.value
2792 2793
2793 2794 if parm_value == 'fromdatadatetime':
2794 2795 value = time.strftime("%b %d %Y %H:%M:%S", dataOut.datatime.timetuple())
2795 2796 elif parm_value == 'fromdataheights':
2796 2797 value = dataOut.nHeights
2797 2798 elif parm_value == 'fromdatachannel':
2798 2799 value = dataOut.nChannels
2799 2800 elif parm_value == 'fromdatasamples':
2800 2801 value = dataOut.nFFTPoints
2801 2802 else:
2802 2803 value = parm_value
2803 2804
2804 2805 header_data.header[parm_name] = value
2805 2806
2806 2807 header_data.header['NBLOCK'] = self.blockIndex
2807 2808
2808 2809 header_data.writeto(self.filename)
2809 2810
2810 2811
2811 2812 def setup(self, dataOut, path, dataBlocksPerFile, metadatafile):
2812 2813
2813 2814 self.path = path
2814 2815 self.dataOut = dataOut
2815 2816 self.metadatafile = metadatafile
2816 2817 self.dataBlocksPerFile = dataBlocksPerFile
2817 2818
2818 2819 def open(self):
2819 2820 self.fitsObj = pyfits.open(self.filename, mode='update')
2820 2821
2821 2822
2822 2823 def addData(self, data):
2823 2824 self.open()
2824 2825 extension = pyfits.ImageHDU(data=data, name=self.fitsObj[0].header['DATA'])
2825 2826 extension.header['UTCTIME'] = self.dataOut.utctime
2826 2827 self.fitsObj.append(extension)
2827 2828 self.blockIndex += 1
2828 2829 self.fitsObj[0].header['NBLOCK'] = self.blockIndex
2829 2830
2830 2831 self.write()
2831 2832
2832 2833 def write(self):
2833 2834
2834 2835 self.fitsObj.flush(verbose=True)
2835 2836 self.fitsObj.close()
2836 2837
2837 2838
2838 2839 def setNextFile(self):
2839 2840
2840 2841 ext = self.ext
2841 2842 path = self.path
2842 2843
2843 2844 timeTuple = time.localtime( self.dataOut.utctime)
2844 2845 subfolder = 'd%4.4d%3.3d' % (timeTuple.tm_year,timeTuple.tm_yday)
2845 2846
2846 2847 fullpath = os.path.join( path, subfolder )
2847 2848 if not( os.path.exists(fullpath) ):
2848 2849 os.mkdir(fullpath)
2849 2850 self.setFile = -1 #inicializo mi contador de seteo
2850 2851 else:
2851 2852 filesList = os.listdir( fullpath )
2852 2853 if len( filesList ) > 0:
2853 2854 filesList = sorted( filesList, key=str.lower )
2854 2855 filen = filesList[-1]
2855 2856
2856 2857 if isNumber( filen[8:11] ):
2857 2858 self.setFile = int( filen[8:11] ) #inicializo mi contador de seteo al seteo del ultimo file
2858 2859 else:
2859 2860 self.setFile = -1
2860 2861 else:
2861 2862 self.setFile = -1 #inicializo mi contador de seteo
2862 2863
2863 2864 setFile = self.setFile
2864 2865 setFile += 1
2865 2866
2866 2867 file = '%s%4.4d%3.3d%3.3d%s' % (self.optchar,
2867 2868 timeTuple.tm_year,
2868 2869 timeTuple.tm_yday,
2869 2870 setFile,
2870 2871 ext )
2871 2872
2872 2873 filename = os.path.join( path, subfolder, file )
2873 2874
2874 2875 self.blockIndex = 0
2875 2876 self.filename = filename
2876 2877 self.setFile = setFile
2877 2878 self.flagIsNewFile = 1
2878 2879
2879 2880 print 'Writing the file: %s'%self.filename
2880 2881
2881 2882 self.setFitsHeader(self.dataOut, self.metadatafile)
2882 2883
2883 2884 return 1
2884 2885
2885 2886 def writeBlock(self):
2886 2887 self.addData(self.dataOut.data_spc)
2887 2888 self.flagIsNewFile = 0
2888 2889
2889 2890
2890 2891 def __setNewBlock(self):
2891 2892
2892 2893 if self.flagIsNewFile:
2893 2894 return 1
2894 2895
2895 2896 if self.blockIndex < self.dataBlocksPerFile:
2896 2897 return 1
2897 2898
2898 2899 if not( self.setNextFile() ):
2899 2900 return 0
2900 2901
2901 2902 return 1
2902 2903
2903 2904 def writeNextBlock(self):
2904 2905 if not( self.__setNewBlock() ):
2905 2906 return 0
2906 2907 self.writeBlock()
2907 2908 return 1
2908 2909
2909 2910 def putData(self):
2910 2911 if self.flagIsNewFile:
2911 2912 self.setNextFile()
2912 2913 self.writeNextBlock()
2913 2914
2914 2915 def run(self, dataOut, **kwargs):
2915 2916 if not(self.isConfig):
2916 2917 self.setup(dataOut, **kwargs)
2917 2918 self.isConfig = True
2918 2919 self.putData()
2919 2920
2920 2921
General Comments 0
You need to be logged in to leave comments. Login now